text
stringlengths
29
850k
#!/usr/bin/env python #-*- coding: utf-8 -*- # ------------------ # # RTL 102.5 info # # ------------------ # import urllib from xml.dom.minidom import parse import re import json def uni(s): """ Decode text. """ ascii_char = re.findall(r'\[e\]\[c\](\d+)\[p\]', s) other_char = re.findall(r'\[[a-z]\]+', s) # find and replace number to ascii character for char in ascii_char: if char in s: s = s.replace(char , unichr(int(char))) # find and remove [*] for char in other_char: if char in s: s = s.replace(char , '') return s def get_info(): """ Get information. """ # check if VLC is turned on try: urllib.urlretrieve('http://127.0.0.1:8080/requests/status.xml', '/tmp/info.xml') except IOError: print 'VLC is closed.' return # replace html characters with xml with open('/tmp/info.xml', 'r') as fr, open('/tmp/info2.xml', 'w') as fw: z = ['&lt;', '&gt;'] x = ['<', '>'] for line in fr.readlines(): for i in range(len(z)): if z[i] in line: line = line.replace(z[i], x[i]) fw.write(line) # open xml file, get information and make json file with open('/tmp/info2.xml', 'r') as fr, open('rtl1025-playlist.json', 'w') as fw: dom = parse(fr) cnodes = dom.childNodes info_dict = {"program_title":"", "speakers":"", "program_image":"", "artist_name":"", "song_title":"", "song_cover":""} try: info_dict["program_title"] = uni(cnodes[0].\ getElementsByTagName('prg_title')[0].firstChild.data) info_dict["speakers"] = uni(cnodes[0].\ getElementsByTagName('speakers')[0].firstChild.data) info_dict["program_image"] = cnodes[0].\ getElementsByTagName('image400')[0].firstChild.data info_dict["artist_name"] = uni(cnodes[0].\ getElementsByTagName('mus_art_name')[0].firstChild.data) info_dict["song_title"] = uni(cnodes[0].\ getElementsByTagName('mus_sng_title')[0].firstChild.data) info_dict["song_cover"] = cnodes[0].\ getElementsByTagName('mus_sng_itunescoverbig')[0].firstChild.data except (IndexError, AttributeError): pass # my_dict as json file fw.write(json.dumps(info_dict)) # display data with open('rtl1025-playlist.json', 'r') as fw: j = json.load(fw) for k, v in j.iteritems(): print "{:15}{:2}{:1}".format(k, ":", v.encode('utf-8')) if __name__ == '__main__': get_info()
This is a placeholder page for LeRoy Ahrens, which means this person is not currently on this site. We do suggest using the tools below to find LeRoy Ahrens. You are visiting the placeholder page for LeRoy Ahrens. This page is here because someone used our placeholder utility to look for LeRoy Ahrens. We created this page automatically in hopes LeRoy Ahrens would find it. If you are not LeRoy Ahrens, but are an alumni of Huntley High School, register on this site for free now.
""" Django settings for django_project project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '75dlm!mxn6a$_wa-1kti3_u(_97-hya!ov@8=rcdk364#cy^9g' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] TEMPLATES = [{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates'),], 'APP_DIRS': True, # 'TEMPLATE_DEBUG': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django_app', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'django_project.urls' WSGI_APPLICATION = 'django_project.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ # STATICFILES_DIRS = ( # os.path.join(BASE_DIR, "static"), # os.path.abspath(os.path.join(BASE_DIR, "../../_shared-web-resources/autobahn")), # ) # STATIC_URL = '/static/' # TEMPLATE_DIRS = ['/templates/',] STATIC_URL = '/static/' STATIC_ROOT = "/vagrant/static/"
Justin Timberlake reunites with Levi's for a second collection from Fresh Leaves. Justin Timberlake and Levi's join forces once more for a new Fresh Leaves collection. The spring 2019 range takes inspiration from Timberlake's birth town of Memphis. The singer shares, Memphis and music have always been part of me...and also part of this collection, inspired by the places and things that I love since childhood. The lineup fuses Timberlake's personal references to Memphis with iconic Levi's pieces such as the trucker jacket. Other key items range from slim taper Levi's 501 jeans to a variety of hoodies. Levi Strauss & Co. Jennifer Sey explains, Justin is the perfect ally for Levi's and this collection is a celebration of his hometown Memphis, combining references to American music and authentic personal expression. Shop the Justin Timberlake x Levi's Fresh Leaves collaboration at Levis.com. Singer Justin Timberlake wears a hybrid trucker jacket and military shirt from his Fresh Leaves Levi's collaboration. Collaborating with Levi's, Justin Timberlake wears a Fresh Leaves trucker jacket. Front and center, Justin Timberlake wears a Levi's Fresh Leaves long-sleeve graphic tee. Going sporty, Justin Timberlake dons a Levi's Fresh Leaves hybrid hooded trucker jacket.
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import base64 import socket import struct import time try: from cryptography.hazmat.primitives.ciphers import ( Cipher, algorithms, modes ) from cryptography.hazmat.backends import default_backend except ImportError: # pragma: no cover raise RuntimeError("You need to install Cryptography.") # pragma: no cover from . import pkcs7 from .exceptions import ( UnvalidEncodingAESKey, AppIdValidationError, InvalidSignature ) from werobot.utils import ( to_text, to_binary, generate_token, byte2int, get_signature ) class PrpCrypto(object): """ 提供接收和推送给公众平台消息的加解密接口 """ def __init__(self, key): key = to_binary(key) self.cipher = Cipher( algorithms.AES(key), modes.CBC(key[:16]), backend=default_backend() ) def get_random_string(self): """ :return: 长度为16的随即字符串 """ return generate_token(16) def encrypt(self, text, app_id): """ 对明文进行加密 :param text: 需要加密的明文 :param app_id: 微信公众平台的 AppID :return: 加密后的字符串 """ text = b"".join( [ to_binary(self.get_random_string()), struct.pack(b"I", socket.htonl(len(to_binary(text)))), to_binary(text), to_binary(app_id) ] ) text = pkcs7.encode(text) encryptor = self.cipher.encryptor() ciphertext = to_binary(encryptor.update(text) + encryptor.finalize()) return base64.b64encode(ciphertext) def decrypt(self, text, app_id): """ 对密文进行解密 :param text: 需要解密的密文 :param app_id: 微信公众平台的 AppID :return: 解密后的字符串 """ text = to_binary(text) decryptor = self.cipher.decryptor() plain_text = decryptor.update(base64.b64decode(text) ) + decryptor.finalize() padding = byte2int(plain_text, -1) content = plain_text[16:-padding] xml_len = socket.ntohl(struct.unpack("I", content[:4])[0]) xml_content = content[4:xml_len + 4] from_appid = content[xml_len + 4:] if to_text(from_appid) != app_id: raise AppIdValidationError(text, app_id) return xml_content class MessageCrypt(object): ENCRYPTED_MESSAGE_XML = """ <xml> <Encrypt><![CDATA[{encrypt}]]></Encrypt> <MsgSignature><![CDATA[{signature}]]></MsgSignature> <TimeStamp>{timestamp}</TimeStamp> <Nonce><![CDATA[{nonce}]]></Nonce> </xml> """.strip() def __init__(self, token, encoding_aes_key, app_id): key = base64.b64decode(to_binary(encoding_aes_key + '=')) if len(key) != 32: raise UnvalidEncodingAESKey(encoding_aes_key) self.prp_crypto = PrpCrypto(key) self.token = token self.app_id = app_id def decrypt_message(self, timestamp, nonce, msg_signature, encrypt_msg): """ 解密收到的微信消息 :param timestamp: 请求 URL 中收到的 timestamp :param nonce: 请求 URL 中收到的 nonce :param msg_signature: 请求 URL 中收到的 msg_signature :param encrypt_msg: 收到的加密文本. ( XML 中的 <Encrypt> 部分 ) :return: 解密后的 XML 文本 """ signature = get_signature(self.token, timestamp, nonce, encrypt_msg) if signature != msg_signature: raise InvalidSignature(msg_signature) return self.prp_crypto.decrypt(encrypt_msg, self.app_id) def encrypt_message(self, reply, timestamp=None, nonce=None): """ 加密微信回复 :param reply: 加密前的回复 :type reply: WeChatReply 或 XML 文本 :return: 加密后的回复文本 """ if hasattr(reply, "render"): reply = reply.render() timestamp = timestamp or to_text(int(time.time())) nonce = nonce or generate_token(5) encrypt = to_text(self.prp_crypto.encrypt(reply, self.app_id)) signature = get_signature(self.token, timestamp, nonce, encrypt) return to_text( self.ENCRYPTED_MESSAGE_XML.format( encrypt=encrypt, signature=signature, timestamp=timestamp, nonce=nonce ) )
"... Abstract. Moss and Rabani study constrained node-weighted Steiner tree problems with two independent weight values associated with each node, namely, cost and prize (or penalty). They give an O(logn)-approximation algorithm for the prize-collecting node-weighted Steiner tree problem (PCST)—wher ..." "... given a graph with costs/weights on edges and/or nodes and prescribed connectivity require-ments/demands. Among the subgraphs of G that satisfy the requirements, we seek to find one of minimum cost. Formally, the problem is defined as follows. Given a graph G = (V,E) and Q ⊆ V, the Q-connectivity λQ ..."
# -*- coding: utf-8 -*- # Maestro Music Manager - https://github.com/maestromusic/maestro # Copyright (C) 2013-2015 Martin Altmayer, Michael Helmling # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import itertools import os.path from PyQt5 import QtCore, QtWidgets from PyQt5.QtCore import Qt from PyQt5.QtWidgets import QDialogButtonBox from maestro import config, utils from maestro.core import levels, tags, domains, urls from maestro.core.elements import ContainerType from maestro.gui import actions, dialogs, delegates, mainwindow, tagwidgets, treeview from maestro.gui.delegates.abstractdelegate import * from maestro.models import leveltreemodel from maestro.plugins.musicbrainz import plugin as mbplugin, xmlapi, elements translate = QtCore.QCoreApplication.translate class ImportAudioCDAction(actions.TreeAction): label = translate('ImportAudioCDAction', 'Rip audio CD ...') ripper = None @staticmethod def _getRelease(theDiscid): releases = xmlapi.findReleasesForDiscid(theDiscid) if len(releases) > 1: dialog = ReleaseSelectionDialog(releases, theDiscid) if dialog.exec_(): return dialog.selectedRelease else: return None else: return releases[0] @staticmethod def askForDiscId(): """Asks the user for a CD-ROM device to use. :returns: Three-tuple of the *device*, *disc id*, and number of tracks. """ import discid device, ok = QtWidgets.QInputDialog.getText( mainwindow.mainWindow, translate('AudioCD Plugin', 'Select device'), translate('AudioCD Plugin', 'CDROM device:'), QtWidgets.QLineEdit.Normal, discid.get_default_device()) if not ok: return None try: with discid.read(device) as disc: disc.read() except discid.disc.DiscError as e: dialogs.warning(translate("AudioCD Plugin", "CDROM drive is empty"), str(e)) return None return device, disc.id, len(disc.tracks) def doAction(self): # device, theDiscid, trackCount = '/dev/sr0', 'qx_MV1nqkljh.L37bA_rgVoyAgU-', 3 ans = self.askForDiscId() if ans is None: return device, theDiscid, trackCount = ans from . import ripper self.ripper = ripper.Ripper(device, theDiscid) if config.options.audiocd.earlyrip: self.ripper.start() try: release = self._getRelease(theDiscid) if release is None: return progress = dialogs.WaitingDialog("Querying MusicBrainz", "please wait", False) progress.open() def callback(url): progress.setText(self.tr("Fetching data from:\n{}").format(url)) QtWidgets.qApp.processEvents() xmlapi.queryCallback = callback xmlapi.fillReleaseForDisc(release, theDiscid) progress.close() xmlapi.queryCallback = None QtWidgets.qApp.processEvents() stack = self.level().stack.createSubstack(modalDialog=True) level = levels.Level("audiocd", self.level(), stack=stack) dialog = ImportAudioCDDialog(level, release) if dialog.exec_(): model = self.parent().model() model.insertElements(model.root, len(model.root.contents), [dialog.container]) if not config.options.audiocd.earlyrip: self.ripper.start() stack.close() except xmlapi.UnknownDiscException: dialog = SimpleRipDialog(theDiscid, trackCount, self.level()) if dialog.exec_(): if not config.options.audiocd.earlyrip: self.ripper.start() self.level().stack.beginMacro(self.tr("Load Audio CD")) model = self.parent().model() model.insertElements(model.root, len(model.root.contents), [dialog.container]) self.level().stack.endMacro() except ConnectionError as e: dialogs.warning(self.tr('Error communicating with MusicBrainz'), str(e)) if 'progress' in locals(): progress.close() class ReleaseSelectionDialog(QtWidgets.QDialog): def __init__(self, releases, discid): super().__init__(mainwindow.mainWindow) self.setModal(True) lay = QtWidgets.QVBoxLayout() lay.addWidget(QtWidgets.QLabel(self.tr('Select release:'))) for release in releases: text = "" if len(release.children) > 1: pos, medium = release.mediumForDiscid(discid) text = "[Disc {}: '{}' of {} in] ".format(pos, medium, len(release.children)) text += release.tags["title"][0] + "\nby {}".format(release.tags["artist"][0]) if "date" in release.tags: text += "\nreleased {}".format(release.tags["date"][0]) if "country" in release.tags: text += " ({})".format(release.tags["country"][0]) if "barcode" in release.tags: text += ", barcode={}".format(release.tags["barcode"][0]) but = QtWidgets.QPushButton(text) but.release = release but.setStyleSheet("text-align: left") but.clicked.connect(self._handleClick) lay.addWidget(but) btbx = QtWidgets.QDialogButtonBox(QtWidgets.QDialogButtonBox.Cancel) btbx.rejected.connect(self.reject) lay.addWidget(btbx) self.setLayout(lay) def _handleClick(self): self.selectedRelease = self.sender().release self.accept() class CDROMDelegate(delegates.StandardDelegate): def __init__(self, view): self.profile = delegates.profiles.DelegateProfile("cdrom") self.profile.options['appendRemainingTags'] = True self.profile.options['showPaths'] = True self.profile.options['showType'] = True super().__init__(view, self.profile) def getUrlWarningItem(self, wrapper): element = wrapper.element from . import plugin if element.isFile() and element.url.scheme == 'audiocd': tracknr = plugin.parseNetloc(element.url)[1] return TextItem(self.tr('[Track {:2d}]').format(tracknr), DelegateStyle(bold=True, color=Qt.blue)) return super().getUrlWarningItem(wrapper) class AliasComboDelegate(QtWidgets.QStyledItemDelegate): def __init__(self, box, parent=None): super().__init__(parent) self.box = box def paint(self, painter, option, index): alias = self.box.entity.aliases[index.row()] if alias.primary: option.font.setBold(True) super().paint(painter, option, index) option.font.setBold(False) class AliasComboBox(QtWidgets.QComboBox): aliasChanged = QtCore.pyqtSignal(object) def __init__(self, entity, sortNameItem): super().__init__() self.addItem(entity.aliases[0].name) self.entity = entity self.setEditable(True) self.setEditText(entity.name) self.sortNameItem = sortNameItem self.setItemDelegate(AliasComboDelegate(self)) self.activated.connect(self._handleActivate) self.editTextChanged.connect(self._handleEditTextChanged) def showPopup(self): if not self.entity.loaded: self.entity.loadAliases() for alias in self.entity.aliases[1:]: self.addItem(alias.name) if alias.locale: self.setItemData(self.count() - 1, ("primary " if alias.primary else "") + \ "alias for locale {}".format(alias.locale), Qt.ToolTipRole) QtWidgets.qApp.processEvents() return super().showPopup() def _handleActivate(self, index): alias = self.entity.aliases[index] sortname = alias.sortName self.sortNameItem.setText(sortname) if self.currentText() != self.entity.name: self.entity.selectAlias(index) self.aliasChanged.emit(self.entity) def _handleEditTextChanged(self, text): self.entity.name = text self.aliasChanged.emit(self.entity) class AliasWidget(QtWidgets.QTableWidget): """ TODO: use sort names! """ aliasChanged = QtCore.pyqtSignal(object) def __init__(self, entities): super().__init__() self.entities = sorted(entities, key=lambda ent: "".join(sorted(ent.asTag))) self.columns = [self.tr("Roles"), self.tr("WWW"), self.tr("Name"), self.tr("Sort-Name")] self.setColumnCount(len(self.columns)) self.verticalHeader().hide() self.setHorizontalHeaderLabels(self.columns) self.horizontalHeader().setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents) self.horizontalHeader().setStretchLastSection(True) self.setRowCount(len(self.entities)) self.cellChanged.connect(self._handleCellChanged) for row, ent in enumerate(self.entities): label = QtWidgets.QTableWidgetItem(", ".join(ent.asTag)) label.setFlags(Qt.ItemIsEnabled) self.setItem(row, 0, label) label = QtWidgets.QLabel('<a href="{}">{}</a>'.format(ent.url(), self.tr("lookup"))) label.setToolTip(ent.url()) label.setOpenExternalLinks(True) self.setCellWidget(row, 1, label) sortNameItem = QtWidgets.QTableWidgetItem(ent.sortName) combo = AliasComboBox(ent, sortNameItem) combo.aliasChanged.connect(self.aliasChanged) self.setCellWidget(row, 2, combo) self.setItem(row, 3, sortNameItem) def activeEntities(self): entities = [] for row, ent in enumerate(self.entities): if self.cellWidget(row, 2).isEnabled(): entities.append(ent) return entities def updateDisabledTags(self, mapping): for row, ent in enumerate(self.entities): state = not all((val in mapping and mapping[val] is None) for val in ent.asTag) for col in range(self.columnCount()): item = self.item(row, col) if item: if state: item.setFlags(item.flags() | Qt.ItemIsEnabled) else: item.setFlags(item.flags() ^ Qt.ItemIsEnabled) else: widget = self.cellWidget(row, col) widget.setEnabled(state) def _handleCellChanged(self, row, col): if col != 3: return self.entities[row].sortName = self.item(row, col).text() class TagMapWidget(QtWidgets.QTableWidget): tagConfigChanged = QtCore.pyqtSignal(dict) def __init__(self, newtags): super().__init__() self.columns = [self.tr("Import"), self.tr("MusicBrainz Name"), self.tr("Maestro Tag")] self.setColumnCount(len(self.columns)) self.verticalHeader().hide() self.setHorizontalHeaderLabels(self.columns) self.horizontalHeader().setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents) self.setRowCount(len(newtags)) self.tagMapping = mbplugin.tagMap.copy() from ...gui.tagwidgets import TagTypeBox for row, tagname in enumerate(newtags): if tagname in self.tagMapping: tag = self.tagMapping[tagname] else: tag = tags.get(tagname) checkbox = QtWidgets.QTableWidgetItem() ttBox = TagTypeBox(tag, editable=True) ttBox.tagChanged.connect(self._handleTagTypeChanged) mbname = QtWidgets.QTableWidgetItem(tagname) self.setCellWidget(row, 2, ttBox) if tag is None: checkbox.setCheckState(Qt.Unchecked) ttBox.setEnabled(False) mbname.setFlags(mbname.flags() ^ Qt.ItemIsEnabled) else: checkbox.setCheckState(Qt.Checked) self.tagMapping[tagname] = tag mbname.setFlags(Qt.ItemIsEnabled) self.setItem(row, 0, checkbox) self.setItem(row, 1, mbname) self.cellChanged.connect(self._handleCellChange) def _handleCellChange(self, row, col): if col != 0: return state = self.item(row, 0).checkState() == Qt.Checked item = self.item(row, 1) if state: item.setFlags(item.flags() | Qt.ItemIsEnabled) self.tagMapping[item.text()] = self.cellWidget(row, 2).getTag() else: item.setFlags(item.flags() ^ Qt.ItemIsEnabled) self.tagMapping[item.text()] = None self.cellWidget(row, 2).setEnabled(state) self.tagConfigChanged.emit(self.tagMapping) def _handleTagTypeChanged(self, tag): for row in range(self.rowCount()): if self.cellWidget(row, 2) is self.sender(): break self.tagMapping[self.item(row, 1).text()] = tag self.tagConfigChanged.emit(self.tagMapping) class ImportAudioCDDialog(QtWidgets.QDialog): """The main dialog of this plugin, which is used for adding audioCDs to the editor. Shows the container structure obtained from musicbrainz and allows to configure alias handling and some other options. """ def __init__(self, level, release): super().__init__(mainwindow.mainWindow) self.setModal(True) self.level = level self.mbNode = elements.MBNode(release) self.release = release self.maestroModel = leveltreemodel.LevelTreeModel(level) self.maestroView = treeview.TreeView(level, affectGlobalSelection=False) self.maestroView.setModel(self.maestroModel) self.maestroView.setItemDelegate(CDROMDelegate(self.maestroView)) # collect alias entities in this release entities = set() for item in release.walk(): if not item.ignore: entities.update(val for val in itertools.chain.from_iterable(item.tags.values()) if isinstance(val, xmlapi.AliasEntity)) self.aliasWidget = AliasWidget(entities) self.aliasWidget.aliasChanged.connect(self.makeElements) self.newTagWidget = TagMapWidget(release.collectExternalTags()) self.newTagWidget.tagConfigChanged.connect(self.aliasWidget.updateDisabledTags) self.newTagWidget.tagConfigChanged.connect(self.makeElements) configLayout = QtWidgets.QVBoxLayout() self.searchReleaseBox = QtWidgets.QCheckBox(self.tr('search for existing release')) self.searchReleaseBox.setChecked(True) self.searchReleaseBox.stateChanged.connect(self.makeElements) configLayout.addWidget(self.searchReleaseBox) self.mediumContainerBox = QtWidgets.QCheckBox(self.tr('add containers for discs')) self.mediumContainerBox.stateChanged.connect(self.makeElements) self.forceBox = QtWidgets.QCheckBox(self.tr('...even without title')) self.forceBox.stateChanged.connect(self.makeElements) configLayout.addWidget(self.mediumContainerBox) configLayout.addWidget(self.forceBox) btbx = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel) btbx.accepted.connect(self.finalize) btbx.rejected.connect(self.reject) lay = QtWidgets.QVBoxLayout() topLayout = QtWidgets.QHBoxLayout() topLayout.addLayout(configLayout) topLayout.addWidget(self.maestroView) lay.addLayout(topLayout, stretch=5) lay.addWidget(QtWidgets.QLabel(self.tr("Alias handling:"))) lay.addWidget(self.aliasWidget, stretch=2) lay.addWidget(QtWidgets.QLabel(self.tr("New tagtypes:"))) lay.addWidget(self.newTagWidget, stretch=1) lay.addWidget(btbx, stretch=1) self.setLayout(lay) self.makeElements() self.resize(mainwindow.mainWindow.size() * 0.9) def makeElements(self, *args, **kwargs): self.maestroModel.clear() self.level.removeElements(list(self.level.elements.values())) elemConfig = elements.ElementConfiguration(self.newTagWidget.tagMapping) elemConfig.searchRelease = self.searchReleaseBox.isChecked() elemConfig.mediumContainer = self.mediumContainerBox.isChecked() elemConfig.forceMediumContainer = self.forceBox.isChecked() self.container = self.release.makeElements(self.level, elemConfig) self.maestroModel.insertElements(self.maestroModel.root, 0, [self.container]) def finalize(self): mbplugin.updateDBAliases(self.aliasWidget.activeEntities()) for mbname, maestroTag in self.newTagWidget.tagMapping.items(): config.storage.musicbrainz.tagmap[mbname] = maestroTag.name if maestroTag else None self.level.commit() self.accept() class SimpleRipDialog(QtWidgets.QDialog): """Dialog for ripping CDs that are not found in the MusicBrainz database. Allows to enter album title, artist, date, and a title for each track, """ def __init__(self, discId, trackCount, level): super().__init__(mainwindow.mainWindow) self.setModal(True) self.level = level self.discid = discId topLayout = QtWidgets.QHBoxLayout() topLayout.addWidget(QtWidgets.QLabel(self.tr('Album title:'))) self.titleEdit = tagwidgets.TagValueEditor(tags.TITLE) self.titleEdit.setValue('unknown album') topLayout.addWidget(self.titleEdit) midLayout = QtWidgets.QHBoxLayout() midLayout.addWidget(QtWidgets.QLabel(self.tr('Artist:'))) self.artistEdit = tagwidgets.TagValueEditor(tags.get('artist')) self.artistEdit.setValue('unknown artist') midLayout.addWidget(self.artistEdit) midLayout.addStretch() midLayout.addWidget(QtWidgets.QLabel(self.tr('Date:'))) self.dateEdit = tagwidgets.TagValueEditor(tags.get('date')) self.dateEdit.setValue(utils.FlexiDate(1900)) midLayout.addWidget(self.dateEdit) layout = QtWidgets.QVBoxLayout() description = QtWidgets.QLabel(self.tr('The MusicBrainz database does not contain a release ' 'for this disc. Please fill the tags manually.')) description.setWordWrap(True) layout.addWidget(description) layout.addLayout(topLayout) layout.addLayout(midLayout) tableLayout = QtWidgets.QGridLayout() edits = [] for i in range(1, trackCount+1): tableLayout.addWidget(QtWidgets.QLabel(self.tr('Track {:2d}:').format(i)), i-1, 0) edits.append(tagwidgets.TagValueEditor(tags.TITLE)) edits[-1].setValue('unknown title') tableLayout.addWidget(edits[-1], i-1, 1) layout.addLayout(tableLayout) box = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel) box.accepted.connect(self.finish) box.rejected.connect(self.reject) layout.addWidget(box) self.setLayout(layout) self.edits = edits def finish(self): elems = [] for i, edit in enumerate(self.edits, start=1): url = urls.URL("audiocd://{0}.{1}{2}/{0}/{1}.flac".format( self.discid, i, os.path.abspath(config.options.audiocd.rippath))) elem = self.level.collect(url) elTags = tags.Storage() elTags[tags.TITLE] = [edit.getValue()] elTags[tags.ALBUM] = [self.titleEdit.getValue()] elTags[tags.get('artist')] = [self.artistEdit.getValue()] elTags[tags.get('date')] = [self.dateEdit.getValue()] diff = tags.TagStorageDifference(None, elTags) self.level.changeTags({elem: diff}) elems.append(elem) contTags = tags.Storage() contTags[tags.TITLE] = [self.titleEdit.getValue()] contTags[tags.ALBUM] = [self.titleEdit.getValue()] contTags[tags.get('date')] = [self.dateEdit.getValue()] contTags[tags.get('artist')] = [self.artistEdit.getValue()] cont = self.level.createContainer(contents=elems, type=ContainerType.Album, domain=domains.default(), tags=contTags) self.container = cont self.accept()
When you get an invitation to attend the launch of the new LINDT Chocolate Boutique at the Pavilion it is very hard not to squeal with delight. Talk about perfect timing for the store opening too, I know that a box of LINDT Chocolate has been my go to for a gorgeous gift on the go, plenty of times. And guess what Durbanites! This is the very first interactive store launched in South Africa, and its on my doorstep! YASSSS, sooo excited, I don’t think a shopping trip to ‘The Pav’ will be complete now without popping in for a treat or two. The VIP event was just that, absolutely gorgeous and expected from this amazing brand. The red carpet treatment was on top form. It was lovely to see familiar faces on arrival, topped with a glass of bubbly, and LINDT chocolate served by the friendliest staff. I literally felt like a kid in a chocolate factory and that my golden ticket had arrived. Wall to wall CHOCOLATE! It was hard to hold back, I loved the warmth of the store, and the fine finishings added to the atmosphere, it felt like a high-end jewelry store – where the jewelry is glistening wrapped chocolate delights. The boutique will afford chocolate lovers the chance to explore a wide selection of international LINDT flavours, as well as the full range of well-known favourites. A first of its kind Master Chocolatier station will be a permanent feature in this store, where consumers will have the opportunity to create their own LINDT slab under the guidance of the in-store LINDT Master Chocolatier. Freshly made LINDT products such as macarons, brownies and the signature hot chocolate and milkshakes will also be available.
import gobject import gtk from gtk import gdk import gtkimageview import time def test_version_string(): ''' The module has a __version__ attribute which is a string containing three numbers separted by periods. The version string is >= 1.0.0. ''' major, minor, micro = gtkimageview.__version__.split('.') major, minor, micro = int(major), int(minor), int(micro) assert major >= 1 if major == 1: assert minor >= 0 def test_default_tool(): ''' The default tool is ImageToolDragger. ''' view = gtkimageview.ImageView() assert isinstance(view.get_tool(), gtkimageview.ImageToolDragger) def test_set_wrong_pixbuf_type(): ''' A TypeError is raised when set_pixbuf() is called with something that is not a pixbuf. ''' view = gtkimageview.ImageView() try: view.set_pixbuf('Hi mom!', True) assert False except TypeError: assert True def set_pixbuf_null(): view = gtkimageview.ImageView() view.set_pixbuf(None, True) assert not view.get_pixbuf() def test_set_pixbuf_default(): ''' Make sure that set_pixbuf():s second parameter has the default value True. ''' view = gtkimageview.ImageView() view.set_fitting(False) view.set_pixbuf(None) assert view.get_fitting() def check_class(parent, init_args): class TheClass(parent): __gsignals__ = {'my-signal' : (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, (gobject.TYPE_INT,))} def __init__(self): parent.__init__(self, *init_args) self.arg = 0 def do_my_signal(self, arg): self.arg = arg gobject.type_register(TheClass) obj = TheClass() obj.emit('my-signal', 20) assert obj.arg == 20 def test_nav_subclass_with_signals(): ''' Ensure that a subclass of ImageNav which adds a signal to the class works as expected. ''' check_class(gtkimageview.ImageNav, [gtkimageview.ImageView()]) def test_view_subclass_with_signals(): ''' Ensure that a subclass of ImageView which adds a signal to the class works as expected. ''' check_class(gtkimageview.ImageView, []) def test_selector_subclass_with_signals(): ''' Ensure that a subclass of ImageToolSelector which adds a signal to the class works as expected. ''' check_class(gtkimageview.ImageToolSelector, [gtkimageview.ImageView()]) def test_dragger_subclass_with_signals(): ''' Ensure that a subclass of ImageToolDragger which adds a signal to the class works as expected. ''' check_class(gtkimageview.ImageToolDragger, [gtkimageview.ImageView()]) def test_scroll_win_subclass_with_signals(): ''' Ensure that a subclass of ImageScrollWin which adds a signal to the class works as expected. ''' check_class(gtkimageview.ImageScrollWin, [gtkimageview.ImageView()]) def test_min_max_zoom_functions(): ''' Ensure that the gtkimageview.zooms_* functions are present and works as expected. ''' min_zoom = float(gtkimageview.zooms_get_min_zoom()) max_zoom = float(gtkimageview.zooms_get_max_zoom()) assert min_zoom < max_zoom def test_get_viewport(): ''' Ensure that getting the viewport of the view works as expected. ''' view = gtkimageview.ImageView() assert not view.get_viewport() view.size_allocate(gdk.Rectangle(0, 0, 100, 100)) pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 50, 50) view.set_pixbuf(pixbuf) rect = view.get_viewport() assert rect.x == 0 and rect.y == 0 assert rect.width == 50 and rect.height == 50 def test_get_viewport_unallocated(): ''' If the view is not allocated, get_viewport returns a rectangle with 0 width and height. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 50, 50) view = gtkimageview.ImageView() view.set_pixbuf(pixbuf) for ofs_x, ofs_y in [(30, 30), (10, 20), (5, 10)]: view.set_offset(ofs_x, ofs_y) rect = view.get_viewport() assert rect.x == ofs_x assert rect.y == ofs_y assert rect.width == 0 assert rect.height == 0 def test_get_check_colors(): ''' Ensure that getting the view:s check colors works as expected. ''' view = gtkimageview.ImageView() col1, col2 = view.get_check_colors() assert int(col1) assert int(col2) def test_get_check_colors_many_args(): ''' Ensure that a correct error is thrown when get_check_colors() is invoked with to many arguments. ''' view = gtkimageview.ImageView() try: view.get_check_colors(1, 2, 3) assert False except TypeError: assert True def test_image_nav_wrong_nr_args(): ''' Ensure that TypeError is raised when ImageNav is instantiated with the wrong nr of args. ''' try: nav = gtkimageview.ImageNav() assert False except TypeError: assert True try: nav = gtkimageview.ImageNav(gtkimageview.ImageView(), None, None) assert False except TypeError: assert True def test_get_draw_rect(): ''' Ensure that getting the draw rectangle works as expected. ''' view = gtkimageview.ImageView() assert not view.get_draw_rect() view.size_allocate(gdk.Rectangle(0, 0, 100, 100)) pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 50, 50) view.set_pixbuf(pixbuf) rect = view.get_draw_rect() assert rect.x == 25 and rect.y == 25 assert rect.width == 50 and rect.height == 50 def test_draw_rect_unallocated(): ''' Ensure that get_draw_rect() always return a zero rectangle when the view is not allocated. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 50, 50) view = gtkimageview.ImageView() view.set_pixbuf(pixbuf) for x_ofs, y_ofs in [(30, 30), (-10, 20), (0, 0), (5, 10)]: view.set_offset(x_ofs, y_ofs) rect = view.get_draw_rect() assert rect.x == 0 assert rect.y == 0 assert rect.width == 0 assert rect.height == 0 def test_set_offset(): ''' Ensure that setting the offset works as expected. ''' view = gtkimageview.ImageView() view.size_allocate(gdk.Rectangle(0, 0, 100, 100)) pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 200, 200) view.set_pixbuf(pixbuf) view.set_zoom(1) view.set_offset(0, 0) rect = view.get_viewport() assert rect.x == 0 and rect.y == 0 view.set_offset(100, 100, invalidate = True) rect = view.get_viewport() assert rect.x == 100 and rect.y == 100 def test_set_transp(): ''' Ensure that setting the views transparency settings works as expected. ''' view = gtkimageview.ImageView() view.set_transp(gtkimageview.TRANSP_COLOR, transp_color = 0xff0000) col1, col2 = view.get_check_colors() assert col1 == col2 == 0xff0000 view.set_transp(gtkimageview.TRANSP_GRID) def test_presence_of_constants(): ''' Check that all enum constants exist in the module. ''' assert hasattr(gtkimageview, 'TRANSP_BACKGROUND') assert hasattr(gtkimageview, 'TRANSP_COLOR') assert hasattr(gtkimageview, 'TRANSP_GRID') assert hasattr(gtkimageview, 'DRAW_METHOD_CONTAINS') assert hasattr(gtkimageview, 'DRAW_METHOD_SCALE') assert hasattr(gtkimageview, 'DRAW_METHOD_SCROLL') def test_incomplete_iimage_tool(): ''' Ensure that NotImplementedError is raised if an attempt is made to instantiate an incomplete IImageTool. ''' class Foo(gtkimageview.IImageTool): pass try: Foo() assert False except NotImplementedError: assert True def test_pixbuf_draw_opts(): ''' Ensure that the PixbufDrawOpts class is present. ''' assert hasattr(gtkimageview, 'PixbufDrawOpts') def test_pixbuf_draw_cache(): ''' Ensure that the PixbufDrawCache class is present. ''' assert hasattr(gtkimageview, 'PixbufDrawCache') def test_pixbuf_draw_opts_attrs(): ''' Ensure that all required attributes are present on the PixbufDrawOpts object. ''' obj = gtkimageview.PixbufDrawOpts() assert hasattr(obj, 'zoom') assert hasattr(obj, 'zoom_rect') assert hasattr(obj, 'widget_x') assert hasattr(obj, 'widget_y') assert hasattr(obj, 'interp') assert hasattr(obj, 'pixbuf') assert hasattr(obj, 'check_color1') assert hasattr(obj, 'check_color2') def test_pixbuf_draw_cache_attrs(): ''' Ensure that all required attributes are present on the PixbufDrawCache object. ''' obj = gtkimageview.PixbufDrawCache() assert hasattr(obj, 'last_pixbuf') assert hasattr(obj, 'old') assert hasattr(obj, 'check_size') assert callable(obj.draw) def test_get_draw_method(): ''' Sanity test for the PixbufDrawCache.get_method() classmethod. ''' obj = gtkimageview.PixbufDrawCache() assert hasattr(obj, 'get_method') opts = gtkimageview.PixbufDrawOpts() gtkimageview.PixbufDrawCache.get_method(opts, opts) def test_return_of_get_draw_method(): ''' Ensure that PixbufDrawCache.get_method() returns either DRAW_METHOD_CONTAINS, DRAW_METHOD_SCALE or DRAW_METHOD_SCROLL. ''' obj = gtkimageview.PixbufDrawCache() opts = gtkimageview.PixbufDrawOpts() ret = obj.get_method(opts, opts) assert ret in (gtkimageview.DRAW_METHOD_CONTAINS, gtkimageview.DRAW_METHOD_SCALE, gtkimageview.DRAW_METHOD_SCROLL) def test_type_error_for_draw_method(): ''' Ensure that TypeError is raised if PixbufDrawCache.get_method() is called with an argument that is not a PixbufDrawOpts object. ''' arg_pairs = [(None, None), (gtkimageview.PixbufDrawOpts(), None), (None, gtkimageview.PixbufDrawOpts()), ("Hello", "Foo")] for last_opts, new_opts in arg_pairs: try: gtkimageview.PixbufDrawCache.get_method(last_opts, new_opts) assert False except TypeError: assert True def test_invalidate(): ''' Sanity test for the PixbufDrawCache.invalidate() method. ''' cache = gtkimageview.PixbufDrawCache() assert hasattr(cache, 'invalidate') def test_library_verson(): ''' Ensure sanity of the library_version() function. ''' version = gtkimageview.library_version() maj, min, mic = version.split('.') digits = int(maj), int(min), int(mic) def test_tool_selector_get_selection(): ''' Ensure that the default selection rectangle is (0,0)-[0,0]. ''' view = gtkimageview.ImageView() tool = gtkimageview.ImageToolSelector(view) view.set_tool(tool) sel = tool.get_selection() assert sel.x == 0 and sel.y == 0 assert sel.width == 0 and sel.height == 0 def test_set_anim_none(): ''' gtkimageview.AnimView.set_anim can be called with None. ''' view = gtkimageview.AnimView() view.set_anim(None) def test_damage_pixels(): ''' Ensure that gtkimageview.ImageView.damage_pixels can be called. ''' view = gtkimageview.ImageView() view.damage_pixels(gdk.Rectangle(0, 0, 100, 100)) view.damage_pixels(None) view.damage_pixels() view.damage_pixels(rect = gdk.Rectangle(0, 0, 100, 100)) def test_damage_pixels_badarg(): ''' Ensure that TypeError is raised if argument to gtkimageview.ImageView.damage_pixels is not None or a gdk.Rectangle. ''' view = gtkimageview.ImageView() try: view.damage_pixels('hello') assert False except TypeError: assert True def test_widget_to_image_rect(): ''' Tests that gtkimageview.ImageView.widget_to_image_rect works. ''' view = gtkimageview.ImageView() rect = gdk.Rectangle(0, 0, 20, 20) assert not view.widget_to_image_rect(rect) view.size_allocate(gdk.Rectangle(0, 0, 100, 100)) assert not view.widget_to_image_rect(rect) pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 50, 50) view.set_pixbuf(pixbuf) r = view.widget_to_image_rect(gdk.Rectangle(25, 25, 50, 50)) assert r.x == 0 assert r.y == 0 assert r.width == 50 assert r.height == 50 def test_image_to_widget_rect(): ''' Test that gtkimageview.ImageView.image_to_widget_rect works. ''' view = gtkimageview.ImageView() rect = gdk.Rectangle(0, 0, 50, 50) assert not view.image_to_widget_rect(rect) view.size_allocate(gdk.Rectangle(0, 0, 100, 100)) pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 50, 50) view.set_pixbuf(pixbuf) r = view.image_to_widget_rect(rect) assert r.x == 25 assert r.y == 25 assert r.width == 50 assert r.height == 50 def test_image_to_widget_less_than_1_size(): ''' If the width or the height of the image space rectangle occupies less than one widget space pixel, then it is rounded up to 1. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 100, 100) view = gtkimageview.ImageView() view.size_allocate(gdk.Rectangle(0, 0, 100, 100)) view.set_pixbuf(pixbuf) view.set_zoom(0.5) rect = gdk.Rectangle(0, 0, 1, 1) r = view.image_to_widget_rect(rect) assert r.x == 25 assert r.y == 25 assert r.width == 1 assert r.height == 1 def test_big_image_small_allocation(): ''' This is a test for #23. If it eats up all memory it is a failure. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 5000, 5000); alloc = gdk.Rectangle(0, 0, 5, 5) view = gtkimageview.ImageView() view.set_pixbuf(pixbuf) view.set_show_frame(False) view.window = gdk.Window(None, alloc.width, alloc.height, gdk.WINDOW_TOPLEVEL, 0, gdk.INPUT_OUTPUT) view.size_allocate(alloc) ev = gdk.Event(gdk.EXPOSE) ev.area = alloc view.emit('expose-event', ev) def test_zoom_to_fit_keybinding(): ''' Ensure that the 'x' keybinding works as expected. ''' view = gtkimageview.ImageView() view.set_fitting(False) gtk.bindings_activate(view, gtk.keysyms.x, 0) assert view.get_fitting() def test_step_on_non_anim(): ''' Ensure that calling ``gtkimageview.AnimView.step()`` works as expected when the view shows a static image. ''' anim = gdk.PixbufSimpleAnim(100, 100, 10) anim.add_frame(gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 5000, 5000)) view = gtkimageview.AnimView() view.set_anim(anim) view.step() def test_unload_animation(): ''' Ensure that a running animation can be unloaded. Tests #34. ''' # Flush the event queue. while gtk.events_pending(): gtk.main_iteration(True) anim = gdk.PixbufSimpleAnim(100, 100, 10) for x in range(100): pb = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 300, 300) anim.add_frame(pb) view = gtkimageview.AnimView() view.set_anim(anim) assert view.get_pixbuf() for x in range(10): gtk.main_iteration(True) view.set_anim(None) # No further events for 1 second and no pixbuf in the view. start = time.time() while time.time() < start + 1: assert not gtk.events_pending() assert not view.get_pixbuf() gtk.main_iteration(False) def test_step_on_last_frame_of_anim(): ''' Ensure that calling ``gtkimageview.AnimView.step()`` on the last frame of the animation that the view shows works as expected. ''' anim = gdk.PixbufSimpleAnim(100, 100, 10) for x in range(2): anim.add_frame(gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 5000, 5000)) view = gtkimageview.AnimView() view.set_anim(anim) for x in range(2): view.step() def test_step_on_fast_player(): ''' Ensure that calling step always advances the frame even if the animation is one of those that play to fast. ''' # 50 fps = 20ms delay -> will be delayed anim = gdk.PixbufSimpleAnim(100, 100, 50) for x in range(10): anim.add_frame(gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 400, 300)) view = gtkimageview.AnimView() view.set_anim(anim) for x in range(9): pb_old = view.get_pixbuf() view.step() pb_new = view.get_pixbuf() assert pb_old != pb_new def test_zoom_in_destroyed_window(): ''' This test exposes a bug in GtkRange which causes a segfault when the window the ``gtkimageview.ImageScrollWin`` widget is in, is destroyed. Unfortunately it will never be fixed, see #551317. ''' # view = gtkimageview.ImageView() # view.set_pixbuf(gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 500, 500)) # scroll = gtkimageview.ImageScrollWin(view) # win = gtk.Window() # win.add(scroll) # win.set_default_size(100, 100) # win.show_all() # win.destroy() #view.set_zoom(3.0) def test_return_of_motion_notify(): ''' Ensure that all tools returns True if it handled a motion notify event and False otherwise. ''' # The pixbuf is larger than the view and should be draggable. view = gtkimageview.ImageView() view.size_allocate((0, 0, 50, 50)) pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 100, 100) view.set_pixbuf(pixbuf) view.set_zoom(1.0) button_ev = gdk.Event(gdk.BUTTON_PRESS) button_ev.x = 10.0 button_ev.y = 10.0 button_ev.button = 1 button_ev.window = window = gdk.Window(None, 100, 100, gdk.WINDOW_TOPLEVEL, 0, gdk.INPUT_OUTPUT, x = 100, y = 100) motion_ev = gdk.Event(gdk.MOTION_NOTIFY) motion_ev.x = 20.0 motion_ev.y = 20.0 for tool_cls in (gtkimageview.ImageToolDragger, gtkimageview.ImageToolSelector): tool = tool_cls(view) # Simulate a dragging motion. Left mouse button is pressed # down at (10, 10) and then moved to (20, 20). tool.do_button_press(tool, button_ev) assert tool.do_motion_notify(tool, motion_ev) def test_return_of_button_release(): ''' Ensure that all tools return True if it released the grab in response to a button release event. ''' # The pixbuf is larger than the view and should be draggable. view = gtkimageview.ImageView() view.size_allocate((0, 0, 50, 50)) pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 100, 100) view.set_pixbuf(pixbuf) view.set_zoom(1.0) press_ev = gdk.Event(gdk.BUTTON_PRESS) press_ev.x = 10.0 press_ev.y = 10.0 press_ev.button = 1 press_ev.window = window = gdk.Window(None, 100, 100, gdk.WINDOW_TOPLEVEL, 0, gdk.INPUT_OUTPUT, x = 100, y = 100) rel_ev = gdk.Event(gdk.BUTTON_RELEASE) rel_ev.button = 1 for tool_cls in (gtkimageview.ImageToolDragger, gtkimageview.ImageToolSelector): tool = tool_cls(view) tool.do_button_press(tool, press_ev) assert tool.do_button_release(tool, rel_ev) class DummyTool(gobject.GObject, gtkimageview.IImageTool): def __init__(self): gobject.GObject.__init__(self) self.zoom_rect = None def do_button_press(self, ev_button): pass def do_button_release(self, ev_button): pass def do_motion_notify(self, ev_motion): pass def do_pixbuf_changed(self, reset_fit, rect): pass def do_paint_image(self, opts, drawable): self.zoom_rect = opts.zoom_rect def do_cursor_at_point(self, x, y): pass gobject.type_register(DummyTool) def test_correct_repaint_offset(): ''' Ensure that there is no off by one error when repainting. A 1600*1600 pixbuf viewed in a 700*700 image view widget at zoom 1.0 should be perfectly centered so that draw starts at coordinates 450, 450. However, there may be a mishandled floating point conversion which causes the draw to start at 449, 449. See #31. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 1600, 1600) tool = DummyTool() view = gtkimageview.ImageView() view.set_tool(tool) view.set_show_frame(False) view.set_pixbuf(pixbuf) view.size_allocate((0, 0, 700, 700)) view.window = gdk.Window(None, 700, 700, gdk.WINDOW_TOPLEVEL, 0, gdk.INPUT_OUTPUT) view.set_zoom(1.0) ev = gdk.Event(gdk.EXPOSE) ev.area = view.allocation view.do_expose_event(view, ev) assert tool.zoom_rect.x == 450 assert tool.zoom_rect.y == 450 def test_scrolling_offbyone(): ''' Ensure that there is no off by one error when scrolling the view. The view is scrolled two pixels in the vertical direction, the result should be that the tool is asked to draw two new horizontal lines of the pixbuf. This is the other problem in bug #31. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 1600, 1600) tool = DummyTool() view = gtkimageview.ImageView() view.set_tool(tool) view.set_show_frame(False) view.set_pixbuf(pixbuf) view.size_allocate((0, 0, 700, 700)) view.window = gdk.Window(None, 700, 700, gdk.WINDOW_TOPLEVEL, 0, gdk.INPUT_OUTPUT) view.set_zoom(1.0) view.set_offset(450.0, 448.0) assert tool.zoom_rect == gdk.Rectangle(450, 448, 700, 2) view.set_offset(448.0, 450.0) def test_scrolling_adjustments_offbyone(): ''' Ensure that there is no off by one error when scrolling the view using the horizontal adjustment. First the view is scrolled two small steps, then the same distance in one go. In each case, the same number of pixels should be painted by the tool. The steps include numbers with the fractional part >= 0.5 to test the rounding. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 1600, 1600) hadj = gtk.Adjustment() vadj = gtk.Adjustment() tool = DummyTool() view = gtkimageview.ImageView() view.set_show_frame(False) view.set_tool(tool) view.set_scroll_adjustments(hadj, vadj) view.set_pixbuf(pixbuf) view.size_allocate((0, 0, 700, 700)) view.window = gdk.Window(None, 700, 700, gdk.WINDOW_TOPLEVEL, 0, gdk.INPUT_OUTPUT) view.set_zoom(3.0) pix_drawn = 0 hadj.value = 2050.0 hadj.value_changed() for ofs in [2073.71, 2088.41]: hadj.value = ofs hadj.value_changed() pix_drawn += tool.zoom_rect.width hadj.value = 2050 hadj.value_changed() hadj.value = 2088.41 hadj.value_changed() assert tool.zoom_rect.width == pix_drawn def test_setting_float_offsets_offbyone(): ''' Another test for #31. ''' pixbuf = gdk.Pixbuf(gdk.COLORSPACE_RGB, False, 8, 1600, 1600) hadj = gtk.Adjustment() vadj = gtk.Adjustment() tool = DummyTool() view = gtkimageview.ImageView() view.set_show_frame(False) view.set_tool(tool) view.set_scroll_adjustments(hadj, vadj) view.set_pixbuf(pixbuf) view.size_allocate((0, 0, 700, 700)) view.window = gdk.Window(None, 700, 700, gdk.WINDOW_TOPLEVEL, 0, gdk.INPUT_OUTPUT) view.set_zoom(3.0) view.set_offset(2050.0, 2050.0) pix_drawn = 0 for ofs in [2073.71, 2088.41]: view.set_offset(ofs, 2050.0) pix_drawn += tool.zoom_rect.width view.set_offset(2050.0, 2050.0) view.set_offset(2088.41, 2050) assert tool.zoom_rect.width == pix_drawn
Gymboree Launches Hop ‘n’ Roll Playwear – 1% of Proceeds to KaBOOM! Pinterest Playdate Map: Partners and top bloggers will continue to contribute toGymboree’s Pinterest Playdate Map – an incredible resource for families to discover new and exciting places to play, whether on a trip or in their very own backyard. With every purchase of Hop ‘n’ Roll playwear, 1% of the proceeds will be donated to KaBOOM! – a national non-profit that inspires communities to support play and creates great places to play for kids in underserved communities across America. Your readers can also donate directly to KaBOOM!on top of their online or in-store Hop ‘n’ Roll purchase. Now that calls for a happy dance!
import os import sys import pytest def test_completion_for_bash(script): """ Test getting completion for bash shell """ bash_completion = """\ _pip_completion() { COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ COMP_CWORD=$COMP_CWORD \\ PIP_AUTO_COMPLETE=1 $1 ) ) } complete -o default -F _pip_completion pip""" result = script.pip('completion', '--bash') assert bash_completion in result.stdout, 'bash completion is wrong' def test_completion_for_zsh(script): """ Test getting completion for zsh shell """ zsh_completion = """\ function _pip_completion { local words cword read -Ac words read -cn cword reply=( $( COMP_WORDS="$words[*]" \\ COMP_CWORD=$(( cword-1 )) \\ PIP_AUTO_COMPLETE=1 $words[1] ) ) } compctl -K _pip_completion pip""" result = script.pip('completion', '--zsh') assert zsh_completion in result.stdout, 'zsh completion is wrong' def test_completion_for_fish(script): """ Test getting completion for fish shell """ fish_completion = """\ function __fish_complete_pip set -lx COMP_WORDS (commandline -o) "" set -lx COMP_CWORD ( \\ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ ) set -lx PIP_AUTO_COMPLETE 1 string split \\ -- (eval $COMP_WORDS[1]) end complete -fa "(__fish_complete_pip)" -c pip""" result = script.pip('completion', '--fish') assert fish_completion in result.stdout, 'fish completion is wrong' def test_completion_for_unknown_shell(script): """ Test getting completion for an unknown shell """ error_msg = 'no such option: --myfooshell' result = script.pip('completion', '--myfooshell', expect_error=True) assert error_msg in result.stderr, 'tests for an unknown shell failed' def test_completion_alone(script): """ Test getting completion for none shell, just pip completion """ result = script.pip('completion', expect_error=True) assert 'ERROR: You must pass --bash or --fish or --zsh' in result.stderr, \ 'completion alone failed -- ' + result.stderr def setup_completion(script, words, cword, cwd=None): script.environ = os.environ.copy() script.environ['PIP_AUTO_COMPLETE'] = '1' script.environ['COMP_WORDS'] = words script.environ['COMP_CWORD'] = cword # expect_error is True because autocomplete exists with 1 status code result = script.run( 'python', '-c', 'import pip._internal;pip._internal.autocomplete()', expect_error=True, cwd=cwd, ) return result, script def test_completion_for_un_snippet(script): """ Test getting completion for ``un`` should return uninstall """ res, env = setup_completion(script, 'pip un', '1') assert res.stdout.strip().split() == ['uninstall'], res.stdout def test_completion_for_default_parameters(script): """ Test getting completion for ``--`` should contain --help """ res, env = setup_completion(script, 'pip --', '1') assert '--help' in res.stdout,\ "autocomplete function could not complete ``--``" def test_completion_option_for_command(script): """ Test getting completion for ``--`` in command (e.g. ``pip search --``) """ res, env = setup_completion(script, 'pip search --', '2') assert '--help' in res.stdout,\ "autocomplete function could not complete ``--``" def test_completion_short_option(script): """ Test getting completion for short options after ``-`` (eg. pip -) """ res, env = setup_completion(script, 'pip -', '1') assert '-h' in res.stdout.split(),\ "autocomplete function could not complete short options after ``-``" def test_completion_short_option_for_command(script): """ Test getting completion for short options after ``-`` in command (eg. pip search -) """ res, env = setup_completion(script, 'pip search -', '2') assert '-h' in res.stdout.split(),\ "autocomplete function could not complete short options after ``-``" def test_completion_files_after_option(script, data): """ Test getting completion for <file> or <dir> after options in command (e.g. ``pip install -r``) """ res, env = setup_completion( script=script, words=('pip install -r r'), cword='3', cwd=data.completion_paths, ) assert 'requirements.txt' in res.stdout, ( "autocomplete function could not complete <file> " "after options in command" ) assert os.path.join('resources', '') in res.stdout, ( "autocomplete function could not complete <dir> " "after options in command" ) assert not any(out in res.stdout for out in (os.path.join('REPLAY', ''), 'README.txt')), ( "autocomplete function completed <file> or <dir> that " "should not be completed" ) if sys.platform != 'win32': return assert 'readme.txt' in res.stdout, ( "autocomplete function could not complete <file> " "after options in command" ) assert os.path.join('replay', '') in res.stdout, ( "autocomplete function could not complete <dir> " "after options in command" ) def test_completion_not_files_after_option(script, data): """ Test not getting completion files after options which not applicable (e.g. ``pip install``) """ res, env = setup_completion( script=script, words=('pip install r'), cword='2', cwd=data.completion_paths, ) assert not any(out in res.stdout for out in ('requirements.txt', 'readme.txt',)), ( "autocomplete function completed <file> when " "it should not complete" ) assert not any(os.path.join(out, '') in res.stdout for out in ('replay', 'resources')), ( "autocomplete function completed <dir> when " "it should not complete" ) def test_completion_directories_after_option(script, data): """ Test getting completion <dir> after options in command (e.g. ``pip --cache-dir``) """ res, env = setup_completion( script=script, words=('pip --cache-dir r'), cword='2', cwd=data.completion_paths, ) assert os.path.join('resources', '') in res.stdout, ( "autocomplete function could not complete <dir> after options" ) assert not any(out in res.stdout for out in ( 'requirements.txt', 'README.txt', os.path.join('REPLAY', ''))), ( "autocomplete function completed <dir> when " "it should not complete" ) if sys.platform == 'win32': assert os.path.join('replay', '') in res.stdout, ( "autocomplete function could not complete <dir> after options" ) def test_completion_subdirectories_after_option(script, data): """ Test getting completion <dir> after options in command given path of a directory """ res, env = setup_completion( script=script, words=('pip --cache-dir ' + os.path.join('resources', '')), cword='2', cwd=data.completion_paths, ) assert os.path.join('resources', os.path.join('images', '')) in res.stdout, ( "autocomplete function could not complete <dir> " "given path of a directory after options" ) def test_completion_path_after_option(script, data): """ Test getting completion <path> after options in command given absolute path """ res, env = setup_completion( script=script, words=('pip install -e ' + os.path.join(data.completion_paths, 'R')), cword='3', ) assert all(os.path.normcase(os.path.join(data.completion_paths, out)) in res.stdout for out in ( 'README.txt', os.path.join('REPLAY', ''))), ( "autocomplete function could not complete <path> " "after options in command given absolute path" ) @pytest.mark.parametrize('flag', ['--bash', '--zsh', '--fish']) def test_completion_uses_same_executable_name(script, flag): expect_stderr = sys.version_info[:2] == (3, 3) executable_name = 'pip{}'.format(sys.version_info[0]) result = script.run( executable_name, 'completion', flag, expect_stderr=expect_stderr ) assert executable_name in result.stdout
The postsecondary world can produce curious intellectuals, but it’s also ground zero to some heinous germs. The solution? Vaccinate. Beware, you probably use these two general items every day.
# Copyright 2013-2015 University of Warsaw # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os.path import avro.schema from avro.datafile import DataFileWriter from avro.io import DatumWriter def create(standard_out_path, nested_out_path, binary_out_path): """Create example Avro data stores""" __create_standard(standard_out_path) __create_nested(nested_out_path) __create_binary(binary_out_path) def __create_standard(out_path): os.makedirs(out_path) schema_path = os.path.join(os.path.dirname(__file__), 'data/user.avsc') schema = avro.schema.parse(open(schema_path).read()) with DataFileWriter(open(os.path.join(out_path, 'part-m-00000.avro'), 'w'), DatumWriter(), schema) as writer: writer.append({'position': 0, 'name': 'Alyssa', 'favorite_number': 256}) writer.append({'position': 1, 'name': 'Ben', 'favorite_number': 4, 'favorite_color': 'red'}) with DataFileWriter(open(os.path.join(out_path, 'part-m-00001.avro'), 'w'), DatumWriter(), schema) as writer: writer.append({'position': 2, 'name': 'Alyssa2', 'favorite_number': 512}) writer.append({'position': 3, 'name': 'Ben2', 'favorite_number': 8, 'favorite_color': 'blue', 'secret':b'0987654321'}) writer.append({'position': 4, 'name': 'Ben3', 'favorite_number': 2, 'favorite_color': 'green', 'secret':b'12345abcd'}) with DataFileWriter(open(os.path.join(out_path, 'part-m-00002.avro'), 'w'), DatumWriter(), schema) as writer: pass with DataFileWriter(open(os.path.join(out_path, 'part-m-00003.avro'), 'w'), DatumWriter(), schema) as writer: writer.append({'position': 5, 'name': 'Alyssa3', 'favorite_number': 16}) writer.append({'position': 6, 'name': 'Mallet', 'favorite_color': 'blue', 'secret': b'asdfgf'}) writer.append({'position': 7, 'name': 'Mikel', 'favorite_color': ''}) def __create_nested(out_path): os.makedirs(out_path) schema_path = os.path.join(os.path.dirname(__file__), 'data/nested.avsc') schema = avro.schema.parse(open(schema_path).read()) with DataFileWriter(open(os.path.join(out_path, 'part-m-00004.avro'), 'w'), DatumWriter(), schema) as writer: writer.append({'sup': 1, 'sub':{'level2':2}}) writer.append({'sup': 2, 'sub':{'level2':1}}) def __create_binary(out_path): os.makedirs(out_path) schema_path = os.path.join(os.path.dirname(__file__), 'data/binary.avsc') schema = avro.schema.parse(open(schema_path).read()) with DataFileWriter(open(os.path.join(out_path, 'content.avro'), 'w'), DatumWriter(), schema) as writer: various_stuff_data = open(os.path.join(os.path.dirname(__file__), 'data/binary_stuff/various_stuff.tar.gz')).read() writer.append({'description': 'various stuff', 'packed_files': various_stuff_data}) greetings_data = open(os.path.join(os.path.dirname(__file__), 'data/binary_stuff/greetings.tar.gz')).read() writer.append({'description': 'greetings', 'packed_files': greetings_data})
The weeklong Pride Week event features several activities aimed at increasing the attention paid to LGBTQ issues around Grounds. Scott Rheinheimer, coordinator for LGBTQ student services, said he is currently working with the administration to address several topics, including gender-neutral housing options and co-ed bathrooms. Saturday marked the beginning of Pride Week 2014, hosted by the University LGBTQ Center. The event runs through Friday and features 11 separate activities celebrating the diverse aspects of LGBTQ communities. Highlights for the week include an LGBTQ Career Panel, Safe Space training, a talk from asexual activist Julie Sondra Decker and the Day of Silence, where students will abstain from speaking to represent the silence felt by many LGBTQ individuals. The week will wrap up with the Over the Rainbow Paint Fight Saturday at 3 p.m. at Nameless Field. QSU Vice-President Abe Wapner, a fourth-year College student, said Pride Week is an opportunity to bring LGBTQ issues to the foreground of dialogues across Grounds. Founded under the Office of the Dean of Students in 2001, the LGBTQ Center advocates for inclusion of sexual and gender diversities through programs, outreach and services intended to support the advancement of LGBTQ individuals. Rheinheimer said he hopes events such as Pride Week will provide some type of cultural education to the general community, as well as support for LGBTQ and non-identifying students. Two of the community’s current concerns include bathroom usage and housing. Rheinheimer said some LGBTQ students feel anxiety when deciding whether to use male or female bathrooms. The Center is working to address LGBTQ living situations by looking into inclusive gender-neutral housing on Grounds. Rheinheimer pointed out that the challenge will be making the housing inclusive and secure while ensuring that it is not segregated. Gay Perez, associate dean of students and executive director of housing and residence life, said the University does not currently have an explicit policy about gender neutral housing. The inclusion of LGBTQ students within traditional university organizations such as fraternities, sororities, and other visible groups around Grounds is another central goal of the Center. Rheinheimer said initiatives such as Safe Space training in the Inter-Fraternity and Inter-Sorority Councils are steps in the right direction, but there are still problems that need to be addressed. The admissions office is currently looking into adding a question under the demographics area of applications which would allow students to identify their sexual orientation. Roberts said the Admissions Office has reached out to LGBTQ individuals in the past six months to work on engaging with, enrolling and supporting LGBTQ students.
# -*- coding: utf-8 -*- """ Clase de la jerarquía de tipos de Tiger representando el tipo array. """ from pytiger2c.types.tigertype import TigerType class ArrayType(TigerType): """ Clase de la jerarquía de tipos de Tiger representando el tipo array. """ def _get_code_name(self): """ Método para obtener el valor de la propiedad C{code_name}. """ return self._code_name def _set_code_name(self, value): """ Método para cambiar el valor de la propiedad C{code_name}. """ self._code_name = value code_name = property(_get_code_name, _set_code_name) def _get_fields_typenames(self): """ Método para obtener el valor de la propiedad C{fields_typenames}. """ return self._fields_typenames fields_typenames = property(_get_fields_typenames) def _get_fields_types(self): """ Método para obtener el valor de la propiedad C{fields_types}. """ return self._fields_types def _set_fields_types(self, fields_types): """ Método para cambiar el valor de la propiedad C{fields_types}. """ self._fields_types = fields_types fields_types = property(_get_fields_types, _set_fields_types) def __init__(self, values_typename): """ Inicializa la clase representando el tipo array. @type values_typename: C{str} @param values_typename: Nombre del tipo que tendrán los valores del array. """ super(ArrayType, self).__init__() self._fields_typenames = [values_typename] self._fields_types = None self._code_name = None
All your family's lesson needs at one location. Group sibling lessons so that you can make one trip. Excellent teachers. New studios, unique make up lesson program, Ribbons and trophies earned and recognized in monthly newsletter. Hi everybody! My name is Stetzon and and I love music. I have been playing violin since I was 8 year old, for a total of 11 years now. Along the way I have picked up guitar, ukulele, percussion, singing, and I always try to learn more. I have have been teaching violin for a few years now. Just moved to Logan and I'm looking to continue my teaching here! Please contact me for more information.
#!/usr/bin/env python # -*- coding: utf-8 -*- # pylint: disable-msg=C0103 ##***** BEGIN LICENSE BLOCK ***** ##Version: MPL 1.1 ## ##The contents of this file are subject to the Mozilla Public License Version ##1.1 (the "License") you may not use this file except in compliance with ##the License. You may obtain a copy of the License at ##http:##www.mozilla.org/MPL/ ## ##Software distributed under the License is distributed on an "AS IS" basis, ##WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License ##for the specific language governing rights and limitations under the ##License. ## ##The Original Code is the AllegroGraph Java Client interface. ## ##The Original Code was written by Franz Inc. ##Copyright (C) 2006 Franz Inc. All Rights Reserved. ## ##***** END LICENSE BLOCK ***** from __future__ import absolute_import from ..model.value import URI NS = "http://www.w3.org/2001/XMLSchema#" class XMLSchema: """ A 'static' class containing useful XMLSchema URIs. """ NAMESPACE = NS DURATION = URI(namespace=NS, localname="duration") DATETIME = URI(namespace=NS, localname="dateTime") TIME = URI(namespace=NS, localname="time") DATE = URI(namespace=NS, localname="date") GYEARMONTH = URI(namespace=NS, localname="gYearMonth") GYEAR = URI(namespace=NS, localname="gYear") GMONTHDAY = URI(namespace=NS, localname="gMonthDay") GDAY = URI(namespace=NS, localname="gDay") GMONTH = URI(namespace=NS, localname="gMonth") STRING = URI(namespace=NS, localname="string") BOOLEAN = URI(namespace=NS, localname="boolean") BASE64BINARY = URI(namespace=NS, localname="base64Binary") HEXBINARY = URI(namespace=NS, localname="hexBinary") FLOAT = URI(namespace=NS, localname="float") DECIMAL = URI(namespace=NS, localname="decimal") DOUBLE = URI(namespace=NS, localname="double") ANYURI = URI(namespace=NS, localname="anyURI") QNAME = URI(namespace=NS, localname="QName") NOTATION = URI(namespace=NS, localname="NOTATION") NORMALIZEDSTRING = URI(namespace=NS, localname="normalizedString") TOKEN = URI(namespace=NS, localname="token") LANGUAGE = URI(namespace=NS, localname="language") NMTOKEN = URI(namespace=NS, localname="NMTOKEN") NMTOKENS = URI(namespace=NS, localname="NMTOKENS") NAME = URI(namespace=NS, localname="Name") NCNAME = URI(namespace=NS, localname="NCName") ID = URI(namespace=NS, localname="ID") IDREF = URI(namespace=NS, localname="IDREF") IDREFS = URI(namespace=NS, localname="IDREFS") ENTITY = URI(namespace=NS, localname="ENTITY") ENTITIES = URI(namespace=NS, localname="ENTITIES") INTEGER = URI(namespace=NS, localname="integer") LONG = URI(namespace=NS, localname="long") INT = URI(namespace=NS, localname="int") SHORT = URI(namespace=NS, localname="short") NUMBER = URI(namespace=NS, localname="number") BYTE = URI(namespace=NS, localname="byte") NON_POSITIVE_INTEGER = URI(namespace=NS, localname="nonPositiveInteger") NEGATIVE_INTEGER = URI(namespace=NS, localname="negativeInteger") NON_NEGATIVE_INTEGER = URI(namespace=NS, localname="nonNegativeInteger") POSITIVE_INTEGER = URI(namespace=NS, localname="positiveInteger") UNSIGNED_LONG = URI(namespace=NS, localname="unsignedLong") UNSIGNED_INT = URI(namespace=NS, localname="unsignedInt") UNSIGNED_SHORT = URI(namespace=NS, localname="unsignedShort") UNSIGNED_BYTE = URI(namespace=NS, localname="unsignedByte") ## map of uri strings to URI objects: uristr2obj = {} for name, uri in XMLSchema.__dict__.iteritems(): if name.upper() == name: XMLSchema.uristr2obj[str(uri)] = uri del XMLSchema.uristr2obj[NS]
If you’ve made it this far – congratulations! And thank you. I just needed to get this out of my system. Today I am not the same person I was yesterday. Tomorrow I will be someone different to today. If you are part of my life, it’s important to keep up!
# Copyright (c) 2013 VMware, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Test suite for VMware VMDK driver. """ from distutils.version import LooseVersion import os import mock import mox from oslo.utils import units from cinder import exception from cinder.image import glance from cinder import test from cinder.volume import configuration from cinder.volume.drivers.vmware import api from cinder.volume.drivers.vmware import datastore as hub from cinder.volume.drivers.vmware import error_util from cinder.volume.drivers.vmware import vim from cinder.volume.drivers.vmware import vim_util from cinder.volume.drivers.vmware import vmdk from cinder.volume.drivers.vmware import vmware_images from cinder.volume.drivers.vmware import volumeops class FakeVim(object): @property def service_content(self): return mox.MockAnything() @property def client(self): return mox.MockAnything() def Login(self, session_manager, userName, password): return mox.MockAnything() def Logout(self, session_manager): pass def TerminateSession(self, session_manager, sessionId): pass def SessionIsActive(self, session_manager, sessionID, userName): pass class FakeTaskInfo(object): def __init__(self, state, result=None): self.state = state self.result = result class FakeError(object): def __init__(self): self.localizedMessage = None self.error = FakeError() class FakeMor(object): def __init__(self, type, val): self._type = type self.value = val class FakeObject(object): def __init__(self): self._fields = {} def __setitem__(self, key, value): self._fields[key] = value def __getitem__(self, item): return self._fields[item] class FakeManagedObjectReference(object): def __init__(self, lis=None): self.ManagedObjectReference = lis or [] class FakeDatastoreSummary(object): def __init__(self, freeSpace, capacity, datastore=None, name=None): self.freeSpace = freeSpace self.capacity = capacity self.datastore = datastore self.name = name class FakeSnapshotTree(object): def __init__(self, tree=None, name=None, snapshot=None, childSnapshotList=None): self.rootSnapshotList = tree self.name = name self.snapshot = snapshot self.childSnapshotList = childSnapshotList class FakeElem(object): def __init__(self, prop_set=None): self.propSet = prop_set class FakeProp(object): def __init__(self, name=None, val=None): self.name = name self.val = val class FakeRetrieveResult(object): def __init__(self, objects, token): self.objects = objects self.token = token class FakeObj(object): def __init__(self, obj=None): self.obj = obj # TODO(vbala) Split test methods handling multiple cases into multiple methods, # each handling a specific case. class VMwareEsxVmdkDriverTestCase(test.TestCase): """Test class for VMwareEsxVmdkDriver.""" IP = 'localhost' USERNAME = 'username' PASSWORD = 'password' VOLUME_FOLDER = 'cinder-volumes' API_RETRY_COUNT = 3 TASK_POLL_INTERVAL = 5.0 IMG_TX_TIMEOUT = 10 MAX_OBJECTS = 100 TMP_DIR = "/vmware-tmp" VMDK_DRIVER = vmdk.VMwareEsxVmdkDriver def setUp(self): super(VMwareEsxVmdkDriverTestCase, self).setUp() self._config = mox.MockObject(configuration.Configuration) self._config.append_config_values(mox.IgnoreArg()) self._config.vmware_host_ip = self.IP self._config.vmware_host_username = self.USERNAME self._config.vmware_host_password = self.PASSWORD self._config.vmware_wsdl_location = None self._config.vmware_volume_folder = self.VOLUME_FOLDER self._config.vmware_api_retry_count = self.API_RETRY_COUNT self._config.vmware_task_poll_interval = self.TASK_POLL_INTERVAL self._config.vmware_image_transfer_timeout_secs = self.IMG_TX_TIMEOUT self._config.vmware_max_objects_retrieval = self.MAX_OBJECTS self._config.vmware_tmp_dir = self.TMP_DIR self._db = mock.Mock() self._driver = vmdk.VMwareEsxVmdkDriver(configuration=self._config, db=self._db) api_retry_count = self._config.vmware_api_retry_count, task_poll_interval = self._config.vmware_task_poll_interval, self._session = api.VMwareAPISession(self.IP, self.USERNAME, self.PASSWORD, api_retry_count, task_poll_interval, create_session=False) self._volumeops = volumeops.VMwareVolumeOps(self._session, self.MAX_OBJECTS) self._vim = FakeVim() def test_retry(self): """Test Retry.""" class TestClass(object): def __init__(self): self.counter1 = 0 self.counter2 = 0 @api.Retry(max_retry_count=2, inc_sleep_time=0.001, exceptions=(Exception)) def fail(self): self.counter1 += 1 raise exception.CinderException('Fail') @api.Retry(max_retry_count=2) def success(self): self.counter2 += 1 return self.counter2 test_obj = TestClass() self.assertRaises(exception.CinderException, test_obj.fail) self.assertEqual(test_obj.counter1, 3) ret = test_obj.success() self.assertEqual(1, ret) def test_create_session(self): """Test create_session.""" m = self.mox m.StubOutWithMock(api.VMwareAPISession, 'vim') self._session.vim = self._vim m.ReplayAll() self._session.create_session() m.UnsetStubs() m.VerifyAll() def test_do_setup(self): """Test do_setup.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'session') self._driver.session = self._session m.ReplayAll() self._driver.do_setup(mox.IgnoreArg()) m.UnsetStubs() m.VerifyAll() def test_check_for_setup_error(self): """Test check_for_setup_error.""" self._driver.check_for_setup_error() def test_get_volume_stats(self): """Test get_volume_stats.""" stats = self._driver.get_volume_stats() self.assertEqual(stats['vendor_name'], 'VMware') self.assertEqual(stats['driver_version'], self._driver.VERSION) self.assertEqual(stats['storage_protocol'], 'LSI Logic SCSI') self.assertEqual(stats['reserved_percentage'], 0) self.assertEqual(stats['total_capacity_gb'], 'unknown') self.assertEqual(stats['free_capacity_gb'], 'unknown') def test_create_volume(self): """Test create_volume.""" driver = self._driver host = mock.sentinel.host rp = mock.sentinel.resource_pool folder = mock.sentinel.folder summary = mock.sentinel.summary driver._select_ds_for_volume = mock.MagicMock() driver._select_ds_for_volume.return_value = (host, rp, folder, summary) # invoke the create_volume call volume = {'name': 'fake_volume'} driver.create_volume(volume) # verify calls made driver._select_ds_for_volume.assert_called_once_with(volume) # test create_volume call when _select_ds_for_volume fails driver._select_ds_for_volume.side_effect = error_util.VimException('') self.assertRaises(error_util.VimFaultException, driver.create_volume, volume) # Clear side effects. driver._select_ds_for_volume.side_effect = None def test_success_wait_for_task(self): """Test successful wait_for_task.""" m = self.mox m.StubOutWithMock(api.VMwareAPISession, 'vim') self._session.vim = self._vim result = FakeMor('VirtualMachine', 'my_vm') success_task_info = FakeTaskInfo('success', result=result) m.StubOutWithMock(vim_util, 'get_object_property') vim_util.get_object_property(self._session.vim, mox.IgnoreArg(), 'info').AndReturn(success_task_info) m.ReplayAll() ret = self._session.wait_for_task(mox.IgnoreArg()) self.assertEqual(ret.result, result) m.UnsetStubs() m.VerifyAll() def test_failed_wait_for_task(self): """Test failed wait_for_task.""" m = self.mox m.StubOutWithMock(api.VMwareAPISession, 'vim') self._session.vim = self._vim failed_task_info = FakeTaskInfo('failed') m.StubOutWithMock(vim_util, 'get_object_property') vim_util.get_object_property(self._session.vim, mox.IgnoreArg(), 'info').AndReturn(failed_task_info) m.ReplayAll() self.assertRaises(error_util.VimFaultException, self._session.wait_for_task, mox.IgnoreArg()) m.UnsetStubs() m.VerifyAll() def test_delete_volume_without_backing(self): """Test delete_volume without backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') self._volumeops.get_backing('hello_world').AndReturn(None) m.ReplayAll() volume = FakeObject() volume['name'] = 'hello_world' self._driver.delete_volume(volume) m.UnsetStubs() m.VerifyAll() def test_delete_volume_with_backing(self): """Test delete_volume with backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops backing = FakeMor('VirtualMachine', 'my_vm') FakeMor('Task', 'my_task') m.StubOutWithMock(self._volumeops, 'get_backing') m.StubOutWithMock(self._volumeops, 'delete_backing') self._volumeops.get_backing('hello_world').AndReturn(backing) self._volumeops.delete_backing(backing) m.ReplayAll() volume = FakeObject() volume['name'] = 'hello_world' self._driver.delete_volume(volume) m.UnsetStubs() m.VerifyAll() def test_create_export(self): """Test create_export.""" self._driver.create_export(mox.IgnoreArg(), mox.IgnoreArg()) def test_ensure_export(self): """Test ensure_export.""" self._driver.ensure_export(mox.IgnoreArg(), mox.IgnoreArg()) def test_remove_export(self): """Test remove_export.""" self._driver.remove_export(mox.IgnoreArg(), mox.IgnoreArg()) def test_terminate_connection(self): """Test terminate_connection.""" self._driver.terminate_connection(mox.IgnoreArg(), mox.IgnoreArg(), force=mox.IgnoreArg()) def test_create_backing_in_inventory_multi_hosts(self): """Test _create_backing_in_inventory scanning multiple hosts.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops host1 = FakeObj(obj=FakeMor('HostSystem', 'my_host1')) host2 = FakeObj(obj=FakeMor('HostSystem', 'my_host2')) retrieve_result = FakeRetrieveResult([host1, host2], None) m.StubOutWithMock(self._volumeops, 'get_hosts') self._volumeops.get_hosts().AndReturn(retrieve_result) m.StubOutWithMock(self._driver, '_create_backing') volume = FakeObject() volume['name'] = 'vol_name' backing = FakeMor('VirtualMachine', 'my_back') mux = self._driver._create_backing(volume, host1.obj, {}) mux.AndRaise(error_util.VimException('Maintenance mode')) mux = self._driver._create_backing(volume, host2.obj, {}) mux.AndReturn(backing) m.StubOutWithMock(self._volumeops, 'cancel_retrieval') self._volumeops.cancel_retrieval(retrieve_result) m.StubOutWithMock(self._volumeops, 'continue_retrieval') m.ReplayAll() result = self._driver._create_backing_in_inventory(volume) self.assertEqual(result, backing) m.UnsetStubs() m.VerifyAll() def test_init_conn_with_instance_and_backing(self): """Test initialize_connection with instance and backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') volume = FakeObject() volume['name'] = 'volume_name' volume['id'] = 'volume_id' volume['size'] = 1 connector = {'instance': 'my_instance'} backing = FakeMor('VirtualMachine', 'my_back') self._volumeops.get_backing(volume['name']).AndReturn(backing) m.StubOutWithMock(self._volumeops, 'get_host') host = FakeMor('HostSystem', 'my_host') self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host) m.ReplayAll() conn_info = self._driver.initialize_connection(volume, connector) self.assertEqual(conn_info['driver_volume_type'], 'vmdk') self.assertEqual(conn_info['data']['volume'], 'my_back') self.assertEqual(conn_info['data']['volume_id'], 'volume_id') m.UnsetStubs() m.VerifyAll() def test_get_volume_group_folder(self): """Test _get_volume_group_folder.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops datacenter = FakeMor('Datacenter', 'my_dc') m.StubOutWithMock(self._volumeops, 'get_vmfolder') self._volumeops.get_vmfolder(datacenter) m.ReplayAll() self._driver._get_volume_group_folder(datacenter) m.UnsetStubs() m.VerifyAll() def test_select_datastore_summary(self): """Test _select_datastore_summary.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops datastore1 = FakeMor('Datastore', 'my_ds_1') datastore2 = FakeMor('Datastore', 'my_ds_2') datastore3 = FakeMor('Datastore', 'my_ds_3') datastore4 = FakeMor('Datastore', 'my_ds_4') datastores = [datastore1, datastore2, datastore3, datastore4] m.StubOutWithMock(self._volumeops, 'get_summary') summary1 = FakeDatastoreSummary(5, 100) summary2 = FakeDatastoreSummary(25, 100) summary3 = FakeDatastoreSummary(50, 100) summary4 = FakeDatastoreSummary(75, 100) self._volumeops.get_summary( datastore1).MultipleTimes().AndReturn(summary1) self._volumeops.get_summary( datastore2).MultipleTimes().AndReturn(summary2) self._volumeops.get_summary( datastore3).MultipleTimes().AndReturn(summary3) self._volumeops.get_summary( datastore4).MultipleTimes().AndReturn(summary4) m.StubOutWithMock(self._volumeops, 'get_connected_hosts') host1 = FakeMor('HostSystem', 'my_host_1') host2 = FakeMor('HostSystem', 'my_host_2') host3 = FakeMor('HostSystem', 'my_host_3') host4 = FakeMor('HostSystem', 'my_host_4') self._volumeops.get_connected_hosts( datastore1).MultipleTimes().AndReturn([host1, host2, host3, host4]) self._volumeops.get_connected_hosts( datastore2).MultipleTimes().AndReturn([host1, host2, host3]) self._volumeops.get_connected_hosts( datastore3).MultipleTimes().AndReturn([host1, host2]) self._volumeops.get_connected_hosts( datastore4).MultipleTimes().AndReturn([host1, host2]) m.ReplayAll() summary = self._driver._select_datastore_summary(1, datastores) self.assertEqual(summary, summary1) summary = self._driver._select_datastore_summary(10, datastores) self.assertEqual(summary, summary2) summary = self._driver._select_datastore_summary(40, datastores) self.assertEqual(summary, summary4) self.assertRaises(error_util.VimException, self._driver._select_datastore_summary, 100, datastores) m.UnsetStubs() m.VerifyAll() @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver.' 'session', new_callable=mock.PropertyMock) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_get_folder_ds_summary(self, volumeops, session): """Test _get_folder_ds_summary.""" volumeops = volumeops.return_value driver = self._driver volume = {'size': 10, 'volume_type_id': 'fake_type'} rp = mock.sentinel.resource_pool dss = mock.sentinel.datastores # patch method calls from _get_folder_ds_summary volumeops.get_dc.return_value = mock.sentinel.dc volumeops.get_vmfolder.return_value = mock.sentinel.folder driver._get_storage_profile = mock.MagicMock() driver._select_datastore_summary = mock.MagicMock() driver._select_datastore_summary.return_value = mock.sentinel.summary # call _get_folder_ds_summary (folder, datastore_summary) = driver._get_folder_ds_summary(volume, rp, dss) # verify returned values and calls made self.assertEqual(mock.sentinel.folder, folder, "Folder returned is wrong.") self.assertEqual(mock.sentinel.summary, datastore_summary, "Datastore summary returned is wrong.") volumeops.get_dc.assert_called_once_with(rp) volumeops.get_vmfolder.assert_called_once_with(mock.sentinel.dc) driver._get_storage_profile.assert_called_once_with(volume) size = volume['size'] * units.Gi driver._select_datastore_summary.assert_called_once_with(size, dss) @mock.patch('cinder.volume.volume_types.get_volume_type_extra_specs') def test_get_disk_type(self, get_volume_type_extra_specs): """Test _get_disk_type.""" # Test with no volume type. volume = {'volume_type_id': None} self.assertEqual(vmdk.THIN_VMDK_TYPE, vmdk.VMwareEsxVmdkDriver._get_disk_type(volume)) # Test with valid vmdk_type. volume_type_id = mock.sentinel.volume_type_id volume = {'volume_type_id': volume_type_id} get_volume_type_extra_specs.return_value = vmdk.THICK_VMDK_TYPE self.assertEqual(vmdk.THICK_VMDK_TYPE, vmdk.VMwareEsxVmdkDriver._get_disk_type(volume)) get_volume_type_extra_specs.assert_called_once_with(volume_type_id, 'vmware:vmdk_type') # Test with invalid vmdk_type. get_volume_type_extra_specs.return_value = 'sparse' self.assertRaises(error_util.InvalidDiskTypeException, vmdk.VMwareEsxVmdkDriver._get_disk_type, volume) def test_init_conn_with_instance_no_backing(self): """Test initialize_connection with instance and without backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') volume = FakeObject() volume['name'] = 'volume_name' volume['id'] = 'volume_id' volume['size'] = 1 volume['volume_type_id'] = None connector = {'instance': 'my_instance'} self._volumeops.get_backing(volume['name']) m.StubOutWithMock(self._volumeops, 'get_host') host = FakeMor('HostSystem', 'my_host') self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host) m.StubOutWithMock(self._volumeops, 'get_dss_rp') resource_pool = FakeMor('ResourcePool', 'my_rp') datastores = [FakeMor('Datastore', 'my_ds')] self._volumeops.get_dss_rp(host).AndReturn((datastores, resource_pool)) m.StubOutWithMock(self._driver, '_get_folder_ds_summary') folder = FakeMor('Folder', 'my_fol') summary = FakeDatastoreSummary(1, 1) self._driver._get_folder_ds_summary(volume, resource_pool, datastores).AndReturn((folder, summary)) backing = FakeMor('VirtualMachine', 'my_back') m.StubOutWithMock(self._volumeops, 'create_backing') self._volumeops.create_backing(volume['name'], volume['size'] * units.Mi, mox.IgnoreArg(), folder, resource_pool, host, mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(backing) m.ReplayAll() conn_info = self._driver.initialize_connection(volume, connector) self.assertEqual(conn_info['driver_volume_type'], 'vmdk') self.assertEqual(conn_info['data']['volume'], 'my_back') self.assertEqual(conn_info['data']['volume_id'], 'volume_id') m.UnsetStubs() m.VerifyAll() def test_init_conn_without_instance(self): """Test initialize_connection without instance and a backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') backing = FakeMor('VirtualMachine', 'my_back') volume = FakeObject() volume['name'] = 'volume_name' volume['id'] = 'volume_id' connector = {} self._volumeops.get_backing(volume['name']).AndReturn(backing) m.ReplayAll() conn_info = self._driver.initialize_connection(volume, connector) self.assertEqual(conn_info['driver_volume_type'], 'vmdk') self.assertEqual(conn_info['data']['volume'], 'my_back') self.assertEqual(conn_info['data']['volume_id'], 'volume_id') m.UnsetStubs() m.VerifyAll() def test_create_snapshot_without_backing(self): """Test vmdk.create_snapshot without backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') snapshot = FakeObject() snapshot['volume_name'] = 'volume_name' snapshot['name'] = 'snap_name' snapshot['volume'] = FakeObject() snapshot['volume']['status'] = 'available' self._volumeops.get_backing(snapshot['volume_name']) m.ReplayAll() self._driver.create_snapshot(snapshot) m.UnsetStubs() m.VerifyAll() def test_create_snapshot_with_backing(self): """Test vmdk.create_snapshot with backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') snapshot = FakeObject() snapshot['volume_name'] = 'volume_name' snapshot['name'] = 'snapshot_name' snapshot['display_description'] = 'snapshot_desc' snapshot['volume'] = FakeObject() snapshot['volume']['status'] = 'available' backing = FakeMor('VirtualMachine', 'my_back') self._volumeops.get_backing(snapshot['volume_name']).AndReturn(backing) m.StubOutWithMock(self._volumeops, 'create_snapshot') self._volumeops.create_snapshot(backing, snapshot['name'], snapshot['display_description']) m.ReplayAll() self._driver.create_snapshot(snapshot) m.UnsetStubs() m.VerifyAll() def test_create_snapshot_when_attached(self): """Test vmdk.create_snapshot when volume is attached.""" snapshot = FakeObject() snapshot['volume'] = FakeObject() snapshot['volume']['status'] = 'in-use' self.assertRaises(exception.InvalidVolume, self._driver.create_snapshot, snapshot) def test_delete_snapshot_without_backing(self): """Test delete_snapshot without backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') snapshot = FakeObject() snapshot['volume_name'] = 'volume_name' snapshot['name'] = 'snap_name' snapshot['volume'] = FakeObject() snapshot['volume']['status'] = 'available' self._volumeops.get_backing(snapshot['volume_name']) m.ReplayAll() self._driver.delete_snapshot(snapshot) m.UnsetStubs() m.VerifyAll() def test_delete_snapshot_with_backing(self): """Test delete_snapshot with backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') snapshot = FakeObject() snapshot['name'] = 'snapshot_name' snapshot['volume_name'] = 'volume_name' snapshot['name'] = 'snap_name' snapshot['volume'] = FakeObject() snapshot['volume']['status'] = 'available' backing = FakeMor('VirtualMachine', 'my_back') self._volumeops.get_backing(snapshot['volume_name']).AndReturn(backing) m.StubOutWithMock(self._volumeops, 'delete_snapshot') self._volumeops.delete_snapshot(backing, snapshot['name']) m.ReplayAll() self._driver.delete_snapshot(snapshot) m.UnsetStubs() m.VerifyAll() def test_delete_snapshot_when_attached(self): """Test delete_snapshot when volume is attached.""" snapshot = FakeObject() snapshot['volume'] = FakeObject() snapshot['volume']['status'] = 'in-use' self.assertRaises(exception.InvalidVolume, self._driver.delete_snapshot, snapshot) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_cloned_volume_without_backing(self, mock_vops): """Test create_cloned_volume without a backing.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'name': 'mock_vol'} src_vref = {'name': 'src_snapshot_name'} driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = None # invoke the create_volume_from_snapshot api driver.create_cloned_volume(volume, src_vref) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('src_snapshot_name') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_cloned_volume_with_backing(self, mock_vops): """Test create_cloned_volume with a backing.""" mock_vops = mock_vops.return_value driver = self._driver volume = mock.sentinel.volume fake_size = 1 src_vref = {'name': 'src_snapshot_name', 'size': fake_size} backing = mock.sentinel.backing driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing src_vmdk = "[datastore] src_vm/src_vm.vmdk" mock_vops.get_vmdk_path.return_value = src_vmdk driver._create_backing_by_copying = mock.MagicMock() # invoke the create_volume_from_snapshot api driver.create_cloned_volume(volume, src_vref) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('src_snapshot_name') mock_vops.get_vmdk_path.assert_called_once_with(backing) driver._create_backing_by_copying.assert_called_once_with(volume, src_vmdk, fake_size) @mock.patch.object(VMDK_DRIVER, '_extend_volumeops_virtual_disk') @mock.patch.object(VMDK_DRIVER, '_create_backing_in_inventory') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_create_backing_by_copying(self, volumeops, create_backing, _extend_virtual_disk): self._test_create_backing_by_copying(volumeops, create_backing, _extend_virtual_disk) def _test_create_backing_by_copying(self, volumeops, create_backing, _extend_virtual_disk): """Test _create_backing_by_copying.""" fake_volume = {'size': 2, 'name': 'fake_volume-0000000000001'} fake_size = 1 fake_src_vmdk_path = "[datastore] src_vm/src_vm.vmdk" fake_backing = mock.sentinel.backing fake_vmdk_path = mock.sentinel.path #"[datastore] dest_vm/dest_vm.vmdk" fake_dc = mock.sentinel.datacenter create_backing.return_value = fake_backing volumeops.get_vmdk_path.return_value = fake_vmdk_path volumeops.get_dc.return_value = fake_dc # Test with fake_volume['size'] greater than fake_size self._driver._create_backing_by_copying(fake_volume, fake_src_vmdk_path, fake_size) create_backing.assert_called_once_with(fake_volume) volumeops.get_vmdk_path.assert_called_once_with(fake_backing) volumeops.get_dc.assert_called_once_with(fake_backing) volumeops.delete_vmdk_file.assert_called_once_with(fake_vmdk_path, fake_dc) volumeops.copy_vmdk_file.assert_called_once_with(fake_dc, fake_src_vmdk_path, fake_vmdk_path) _extend_virtual_disk.assert_called_once_with(fake_volume['size'], fake_vmdk_path, fake_dc) # Reset all the mocks and test with fake_volume['size'] # not greater than fake_size _extend_virtual_disk.reset_mock() fake_size = 2 self._driver._create_backing_by_copying(fake_volume, fake_src_vmdk_path, fake_size) self.assertFalse(_extend_virtual_disk.called) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_volume_from_snapshot_without_backing(self, mock_vops): """Test create_volume_from_snapshot without a backing.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'name': 'mock_vol'} snapshot = {'volume_name': 'mock_vol', 'name': 'mock_snap'} driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = None # invoke the create_volume_from_snapshot api driver.create_volume_from_snapshot(volume, snapshot) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('mock_vol') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_volume_from_snap_without_backing_snap(self, mock_vops): """Test create_volume_from_snapshot without a backing snapshot.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'volume_type_id': None, 'name': 'mock_vol'} snapshot = {'volume_name': 'mock_vol', 'name': 'mock_snap'} backing = mock.sentinel.backing driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing mock_vops.get_snapshot.return_value = None # invoke the create_volume_from_snapshot api driver.create_volume_from_snapshot(volume, snapshot) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('mock_vol') mock_vops.get_snapshot.assert_called_once_with(backing, 'mock_snap') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_volume_from_snapshot(self, mock_vops): """Test create_volume_from_snapshot.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'volume_type_id': None, 'name': 'mock_vol'} snapshot = {'volume_name': 'mock_vol', 'name': 'mock_snap', 'volume_size': 1} fake_size = snapshot['volume_size'] backing = mock.sentinel.backing snap_moref = mock.sentinel.snap_moref driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing mock_vops.get_snapshot.return_value = snap_moref src_vmdk = "[datastore] src_vm/src_vm-001.vmdk" mock_vops.get_vmdk_path.return_value = src_vmdk driver._create_backing_by_copying = mock.MagicMock() # invoke the create_volume_from_snapshot api driver.create_volume_from_snapshot(volume, snapshot) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('mock_vol') mock_vops.get_snapshot.assert_called_once_with(backing, 'mock_snap') mock_vops.get_vmdk_path.assert_called_once_with(snap_moref) driver._create_backing_by_copying.assert_called_once_with(volume, src_vmdk, fake_size) @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_extend_volume(self, volume_ops, _extend_virtual_disk, _select_ds_for_volume): """Test extend_volume.""" self._test_extend_volume(volume_ops, _extend_virtual_disk, _select_ds_for_volume) def _test_extend_volume(self, volume_ops, _extend_virtual_disk, _select_ds_for_volume): fake_name = u'volume-00000001' new_size = '21' fake_size = '20' fake_vol = {'project_id': 'testprjid', 'name': fake_name, 'size': fake_size, 'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66'} fake_host = mock.sentinel.host fake_rp = mock.sentinel.rp fake_folder = mock.sentinel.folder fake_summary = mock.Mock(spec=object) fake_summary.datastore = mock.sentinel.datastore fake_summary.name = 'fake_name' fake_backing = mock.sentinel.backing volume_ops.get_backing.return_value = fake_backing # If there is enough space in the datastore, where the volume is # located, then the rest of this method will not be called. self._driver.extend_volume(fake_vol, new_size) _extend_virtual_disk.assert_called_with(fake_name, new_size) self.assertFalse(_select_ds_for_volume.called) self.assertFalse(volume_ops.get_backing.called) self.assertFalse(volume_ops.relocate_backing.called) self.assertFalse(volume_ops.move_backing_to_folder.called) # If there is not enough space in the datastore, where the volume is # located, then the rest of this method will be called. The first time # _extend_virtual_disk is called, VimFaultException is raised. The # second time it is called, there is no exception. _extend_virtual_disk.reset_mock() _extend_virtual_disk.side_effect = [error_util. VimFaultException(mock.Mock(), 'Error'), None] # When _select_ds_for_volume raises no exception. _select_ds_for_volume.return_value = (fake_host, fake_rp, fake_folder, fake_summary) self._driver.extend_volume(fake_vol, new_size) _select_ds_for_volume.assert_called_with(new_size) volume_ops.get_backing.assert_called_with(fake_name) volume_ops.relocate_backing.assert_called_with(fake_backing, fake_summary.datastore, fake_rp, fake_host) _extend_virtual_disk.assert_called_with(fake_name, new_size) volume_ops.move_backing_to_folder.assert_called_with(fake_backing, fake_folder) # If get_backing raises error_util.VimException, # this exception will be caught for volume extend. _extend_virtual_disk.reset_mock() _extend_virtual_disk.side_effect = [error_util. VimFaultException(mock.Mock(), 'Error'), None] volume_ops.get_backing.side_effect = error_util.VimException('Error') self.assertRaises(error_util.VimException, self._driver.extend_volume, fake_vol, new_size) # If _select_ds_for_volume raised an exception, the rest code will # not be called. _extend_virtual_disk.reset_mock() volume_ops.get_backing.reset_mock() volume_ops.relocate_backing.reset_mock() volume_ops.move_backing_to_folder.reset_mock() _extend_virtual_disk.side_effect = [error_util. VimFaultException(mock.Mock(), 'Error'), None] _select_ds_for_volume.side_effect = error_util.VimException('Error') self.assertRaises(error_util.VimException, self._driver.extend_volume, fake_vol, new_size) _extend_virtual_disk.assert_called_once_with(fake_name, new_size) self.assertFalse(volume_ops.get_backing.called) self.assertFalse(volume_ops.relocate_backing.called) self.assertFalse(volume_ops.move_backing_to_folder.called) def test_copy_image_to_volume_non_vmdk(self): """Test copy_image_to_volume for a non-vmdk disk format.""" fake_context = mock.sentinel.context fake_image_id = 'image-123456789' fake_image_meta = {'disk_format': 'novmdk'} image_service = mock.Mock() image_service.show.return_value = fake_image_meta fake_volume = {'name': 'fake_name', 'size': 1} self.assertRaises(exception.ImageUnacceptable, self._driver.copy_image_to_volume, fake_context, fake_volume, image_service, fake_image_id) @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch.object(VMDK_DRIVER, '_create_virtual_disk_from_preallocated_image') @mock.patch.object(VMDK_DRIVER, '_create_virtual_disk_from_sparse_image') @mock.patch( 'cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver._get_disk_type') @mock.patch.object(VMDK_DRIVER, '_get_ds_name_folder_path') @mock.patch.object(VMDK_DRIVER, '_create_backing_in_inventory') def test_copy_image_to_volume_non_stream_optimized( self, create_backing, get_ds_name_folder_path, get_disk_type, create_disk_from_sparse_image, create_disk_from_preallocated_image, vops, select_ds_for_volume, generate_uuid, extend_disk): self._test_copy_image_to_volume_non_stream_optimized( create_backing, get_ds_name_folder_path, get_disk_type, create_disk_from_sparse_image, create_disk_from_preallocated_image, vops, select_ds_for_volume, generate_uuid, extend_disk) def _test_copy_image_to_volume_non_stream_optimized( self, create_backing, get_ds_name_folder_path, get_disk_type, create_disk_from_sparse_image, create_disk_from_preallocated_image, vops, select_ds_for_volume, generate_uuid, extend_disk): image_size_in_bytes = 2 * units.Gi adapter_type = 'lsiLogic' image_meta = {'disk_format': 'vmdk', 'size': image_size_in_bytes, 'properties': {'vmware_disktype': 'sparse', 'vmwware_adaptertype': adapter_type}} image_service = mock.Mock(glance.GlanceImageService) image_service.show.return_value = image_meta backing = mock.Mock() def create_backing_mock(volume, create_params): self.assertTrue(create_params[vmdk.CREATE_PARAM_DISK_LESS]) return backing create_backing.side_effect = create_backing_mock ds_name = mock.Mock() folder_path = mock.Mock() get_ds_name_folder_path.return_value = (ds_name, folder_path) summary = mock.Mock() select_ds_for_volume.return_value = (mock.sentinel.host, mock.sentinel.rp, mock.sentinel.folder, summary) uuid = "6b77b25a-9136-470e-899e-3c930e570d8e" generate_uuid.return_value = uuid host = mock.Mock() dc_ref = mock.Mock() vops.get_host.return_value = host vops.get_dc.return_value = dc_ref disk_type = vmdk.EAGER_ZEROED_THICK_VMDK_TYPE get_disk_type.return_value = disk_type path = mock.Mock() create_disk_from_sparse_image.return_value = path create_disk_from_preallocated_image.return_value = path volume_size = 2 vops.get_disk_size.return_value = volume_size * units.Gi context = mock.Mock() volume = {'name': 'volume_name', 'id': 'volume_id', 'size': volume_size} image_id = mock.Mock() self._driver.copy_image_to_volume( context, volume, image_service, image_id) create_params = {vmdk.CREATE_PARAM_DISK_LESS: True, vmdk.CREATE_PARAM_BACKING_NAME: uuid} create_backing.assert_called_once_with(volume, create_params) create_disk_from_sparse_image.assert_called_once_with( context, image_service, image_id, image_size_in_bytes, dc_ref, ds_name, folder_path, uuid) vops.attach_disk_to_backing.assert_called_once_with( backing, image_size_in_bytes / units.Ki, disk_type, adapter_type, path.get_descriptor_ds_file_path()) select_ds_for_volume.assert_called_once_with(volume) vops.clone_backing.assert_called_once_with( volume['name'], backing, None, volumeops.FULL_CLONE_TYPE, summary.datastore, disk_type, mock.sentinel.host) vops.delete_backing.assert_called_once_with(backing) self.assertFalse(extend_disk.called) vops.get_disk_size.return_value = 1 * units.Gi create_backing.reset_mock() vops.attach_disk_to_backing.reset_mock() vops.delete_backing.reset_mock() image_meta['properties']['vmware_disktype'] = 'preallocated' self._driver.copy_image_to_volume( context, volume, image_service, image_id) del create_params[vmdk.CREATE_PARAM_BACKING_NAME] create_backing.assert_called_once_with(volume, create_params) create_disk_from_preallocated_image.assert_called_once_with( context, image_service, image_id, image_size_in_bytes, dc_ref, ds_name, folder_path, volume['name'], adapter_type) vops.attach_disk_to_backing.assert_called_once_with( backing, image_size_in_bytes / units.Ki, disk_type, adapter_type, path.get_descriptor_ds_file_path()) extend_disk.assert_called_once_with(volume['name'], volume['size']) extend_disk.reset_mock() create_disk_from_preallocated_image.side_effect = ( error_util.VimException("Error")) self.assertRaises(error_util.VimException, self._driver.copy_image_to_volume, context, volume, image_service, image_id) vops.delete_backing.assert_called_once_with(backing) self.assertFalse(extend_disk.called) @mock.patch( 'cinder.volume.drivers.vmware.volumeops.FlatExtentVirtualDiskPath') @mock.patch.object(VMDK_DRIVER, '_copy_image') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_create_virtual_disk_from_preallocated_image( self, vops, copy_image, flat_extent_path): self._test_create_virtual_disk_from_preallocated_image( vops, copy_image, flat_extent_path) def _test_create_virtual_disk_from_preallocated_image( self, vops, copy_image, flat_extent_path): context = mock.Mock() image_service = mock.Mock() image_id = mock.Mock() image_size_in_bytes = 2 * units.Gi dc_ref = mock.Mock() ds_name = "nfs" folder_path = "A/B/" disk_name = "disk-1" adapter_type = "ide" src_path = mock.Mock() flat_extent_path.return_value = src_path ret = self._driver._create_virtual_disk_from_preallocated_image( context, image_service, image_id, image_size_in_bytes, dc_ref, ds_name, folder_path, disk_name, adapter_type) create_descriptor = vops.create_flat_extent_virtual_disk_descriptor create_descriptor.assert_called_once_with( dc_ref, src_path, image_size_in_bytes / units.Ki, adapter_type, vmdk.EAGER_ZEROED_THICK_VMDK_TYPE) copy_image.assert_called_once_with( context, dc_ref, image_service, image_id, image_size_in_bytes, ds_name, src_path.get_flat_extent_file_path()) self.assertEqual(src_path, ret) create_descriptor.reset_mock() copy_image.reset_mock() copy_image.side_effect = error_util.VimException("error") self.assertRaises( error_util.VimException, self._driver._create_virtual_disk_from_preallocated_image, context, image_service, image_id, image_size_in_bytes, dc_ref, ds_name, folder_path, disk_name, adapter_type) vops.delete_file.assert_called_once_with( src_path.get_descriptor_ds_file_path(), dc_ref) @mock.patch( 'cinder.volume.drivers.vmware.volumeops.' 'MonolithicSparseVirtualDiskPath') @mock.patch( 'cinder.volume.drivers.vmware.volumeops.FlatExtentVirtualDiskPath') @mock.patch.object(VMDK_DRIVER, '_copy_temp_virtual_disk') @mock.patch.object(VMDK_DRIVER, '_copy_image') def test_create_virtual_disk_from_sparse_image( self, copy_image, copy_temp_virtual_disk, flat_extent_path, sparse_path): self._test_create_virtual_disk_from_sparse_image( copy_image, copy_temp_virtual_disk, flat_extent_path, sparse_path) def _test_create_virtual_disk_from_sparse_image( self, copy_image, copy_temp_virtual_disk, flat_extent_path, sparse_path): context = mock.Mock() image_service = mock.Mock() image_id = mock.Mock() image_size_in_bytes = 2 * units.Gi dc_ref = mock.Mock() ds_name = "nfs" folder_path = "A/B/" disk_name = "disk-1" src_path = mock.Mock() sparse_path.return_value = src_path dest_path = mock.Mock() flat_extent_path.return_value = dest_path ret = self._driver._create_virtual_disk_from_sparse_image( context, image_service, image_id, image_size_in_bytes, dc_ref, ds_name, folder_path, disk_name) copy_image.assert_called_once_with( context, dc_ref, image_service, image_id, image_size_in_bytes, ds_name, src_path.get_descriptor_file_path()) copy_temp_virtual_disk.assert_called_once_with( dc_ref, src_path, dest_path) self.assertEqual(dest_path, ret) @mock.patch.object(vmware_images, 'fetch_stream_optimized_image') @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, '_get_storage_profile_id') @mock.patch.object(VMDK_DRIVER, 'session') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_copy_image_to_volume_stream_optimized(self, volumeops, session, get_profile_id, _select_ds_for_volume, _extend_virtual_disk, fetch_optimized_image): """Test copy_image_to_volume. Test with an acceptable vmdk disk format and streamOptimized disk type. """ self._test_copy_image_to_volume_stream_optimized(volumeops, session, get_profile_id, _select_ds_for_volume, _extend_virtual_disk, fetch_optimized_image) def _test_copy_image_to_volume_stream_optimized(self, volumeops, session, get_profile_id, _select_ds_for_volume, _extend_virtual_disk, fetch_optimized_image): fake_context = mock.Mock() fake_backing = mock.sentinel.backing fake_image_id = 'image-id' size = 5 * units.Gi size_gb = float(size) / units.Gi fake_volume_size = 1 + size_gb adapter_type = 'ide' fake_image_meta = {'disk_format': 'vmdk', 'size': size, 'properties': {'vmware_disktype': 'streamOptimized', 'vmware_adaptertype': adapter_type}} image_service = mock.Mock(glance.GlanceImageService) fake_host = mock.sentinel.host fake_rp = mock.sentinel.rp fake_folder = mock.sentinel.folder fake_summary = mock.sentinel.summary fake_summary.name = "datastore-1" fake_vm_create_spec = mock.sentinel.spec fake_disk_type = 'thin' vol_name = 'fake_volume name' vol_id = '12345' fake_volume = {'name': vol_name, 'id': vol_id, 'size': fake_volume_size, 'volume_type_id': None} cf = session.vim.client.factory vm_import_spec = cf.create('ns0:VirtualMachineImportSpec') vm_import_spec.configSpec = fake_vm_create_spec timeout = self._config.vmware_image_transfer_timeout_secs image_service.show.return_value = fake_image_meta volumeops.get_create_spec.return_value = fake_vm_create_spec volumeops.get_backing.return_value = fake_backing # If _select_ds_for_volume raises an exception, get_create_spec # will not be called. _select_ds_for_volume.side_effect = error_util.VimException('Error') self.assertRaises(exception.VolumeBackendAPIException, self._driver.copy_image_to_volume, fake_context, fake_volume, image_service, fake_image_id) self.assertFalse(volumeops.get_create_spec.called) # If the volume size is greater then than the backing's disk size, # _extend_vmdk_virtual_disk will be called. _select_ds_for_volume.side_effect = None _select_ds_for_volume.return_value = (fake_host, fake_rp, fake_folder, fake_summary) profile_id = 'profile-1' get_profile_id.return_value = profile_id volumeops.get_disk_size.return_value = size self._driver.copy_image_to_volume(fake_context, fake_volume, image_service, fake_image_id) image_service.show.assert_called_with(fake_context, fake_image_id) _select_ds_for_volume.assert_called_with(fake_volume) get_profile_id.assert_called_once_with(fake_volume) volumeops.get_create_spec.assert_called_with(fake_volume['name'], 0, fake_disk_type, fake_summary.name, profile_id, adapter_type) self.assertTrue(fetch_optimized_image.called) fetch_optimized_image.assert_called_with(fake_context, timeout, image_service, fake_image_id, session=session, host=self.IP, resource_pool=fake_rp, vm_folder=fake_folder, vm_create_spec= vm_import_spec, image_size=size) _extend_virtual_disk.assert_called_once_with(fake_volume['name'], fake_volume_size) # If the volume size is not greater then than backing's disk size, # _extend_vmdk_virtual_disk will not be called. volumeops.get_disk_size.return_value = fake_volume_size * units.Gi _extend_virtual_disk.reset_mock() self._driver.copy_image_to_volume(fake_context, fake_volume, image_service, fake_image_id) self.assertFalse(_extend_virtual_disk.called) # If fetch_stream_optimized_image raises an exception, # get_backing and delete_backing will be called. fetch_optimized_image.side_effect = exception.CinderException self.assertRaises(exception.CinderException, self._driver.copy_image_to_volume, fake_context, fake_volume, image_service, fake_image_id) volumeops.get_backing.assert_called_with(fake_volume['name']) volumeops.delete_backing.assert_called_with(fake_backing) self.assertFalse(_extend_virtual_disk.called) def test_copy_volume_to_image_non_vmdk(self): """Test copy_volume_to_image for a non-vmdk disk format.""" m = self.mox image_meta = FakeObject() image_meta['disk_format'] = 'novmdk' volume = FakeObject() volume['name'] = 'vol-name' volume['instance_uuid'] = None volume['attached_host'] = None m.ReplayAll() self.assertRaises(exception.ImageUnacceptable, self._driver.copy_volume_to_image, mox.IgnoreArg(), volume, mox.IgnoreArg(), image_meta) m.UnsetStubs() m.VerifyAll() def test_copy_volume_to_image_when_attached(self): """Test copy_volume_to_image when volume is attached.""" m = self.mox volume = FakeObject() volume['instance_uuid'] = 'my_uuid' m.ReplayAll() self.assertRaises(exception.InvalidVolume, self._driver.copy_volume_to_image, mox.IgnoreArg(), volume, mox.IgnoreArg(), mox.IgnoreArg()) m.UnsetStubs() m.VerifyAll() def test_copy_volume_to_image_vmdk(self): """Test copy_volume_to_image for a valid vmdk disk format.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'session') self._driver.session = self._session m.StubOutWithMock(api.VMwareAPISession, 'vim') self._session.vim = self._vim m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops image_id = 'image-id-1' image_meta = FakeObject() image_meta['disk_format'] = 'vmdk' image_meta['id'] = image_id image_meta['name'] = image_id image_service = FakeObject() vol_name = 'volume-123456789' project_id = 'project-owner-id-123' volume = FakeObject() volume['name'] = vol_name size_gb = 5 size = size_gb * units.Gi volume['size'] = size_gb volume['project_id'] = project_id volume['instance_uuid'] = None volume['attached_host'] = None # volumeops.get_backing backing = FakeMor("VirtualMachine", "my_vm") m.StubOutWithMock(self._volumeops, 'get_backing') self._volumeops.get_backing(vol_name).AndReturn(backing) # volumeops.get_vmdk_path datastore_name = 'datastore1' file_path = 'my_folder/my_nested_folder/my_vm.vmdk' vmdk_file_path = '[%s] %s' % (datastore_name, file_path) m.StubOutWithMock(self._volumeops, 'get_vmdk_path') self._volumeops.get_vmdk_path(backing).AndReturn(vmdk_file_path) # vmware_images.upload_image timeout = self._config.vmware_image_transfer_timeout_secs host_ip = self.IP m.StubOutWithMock(vmware_images, 'upload_image') vmware_images.upload_image(mox.IgnoreArg(), timeout, image_service, image_id, project_id, session=self._session, host=host_ip, vm=backing, vmdk_file_path=vmdk_file_path, vmdk_size=size, image_name=image_id, image_version=1) m.ReplayAll() self._driver.copy_volume_to_image(mox.IgnoreArg(), volume, image_service, image_meta) m.UnsetStubs() m.VerifyAll() def test_retrieve_properties_ex_fault_checker(self): """Test retrieve_properties_ex_fault_checker is called.""" m = self.mox class FakeVim(vim.Vim): def __init__(self): pass @property def client(self): class FakeRetrv(object): def RetrievePropertiesEx(self, collector): pass def __getattr__(self, name): if name == 'service': return FakeRetrv() return FakeRetrv() def RetrieveServiceContent(self, type='ServiceInstance'): return mox.MockAnything() _vim = FakeVim() m.ReplayAll() # retrieve_properties_ex_fault_checker throws authentication error self.assertRaises(error_util.VimFaultException, _vim.RetrievePropertiesEx, mox.IgnoreArg()) m.UnsetStubs() m.VerifyAll() @mock.patch.object(VMDK_DRIVER, '_delete_temp_backing') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, '_get_volume_group_folder') @mock.patch('cinder.volume.volume_types.get_volume_type_extra_specs') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch.object(VMDK_DRIVER, 'ds_sel') def test_retype(self, ds_sel, vops, get_volume_type_extra_specs, get_volume_group_folder, generate_uuid, delete_temp_backing): self._test_retype(ds_sel, vops, get_volume_type_extra_specs, get_volume_group_folder, generate_uuid, delete_temp_backing) def _test_retype(self, ds_sel, vops, get_volume_type_extra_specs, get_volume_group_folder, genereate_uuid, delete_temp_backing): self._driver._storage_policy_enabled = True context = mock.sentinel.context diff = mock.sentinel.diff host = mock.sentinel.host new_type = {'id': 'abc'} # Test with in-use volume. vol = {'size': 1, 'status': 'retyping', 'name': 'vol-1', 'volume_type_id': 'def', 'instance_uuid': '583a8dbb'} self.assertFalse(self._driver.retype(context, vol, new_type, diff, host)) # Test with no backing. vops.get_backing.return_value = None vol['instance_uuid'] = None self.assertTrue(self._driver.retype(context, vol, new_type, diff, host)) # Test with no disk type conversion, no profile change and # compliant datastore. ds_value = mock.sentinel.datastore_value datastore = mock.Mock(value=ds_value) vops.get_datastore.return_value = datastore backing = mock.sentinel.backing vops.get_backing.return_value = backing get_volume_type_extra_specs.side_effect = [vmdk.THIN_VMDK_TYPE, vmdk.THIN_VMDK_TYPE, None, None] ds_sel.is_datastore_compliant.return_value = True self.assertTrue(self._driver.retype(context, vol, new_type, diff, host)) # Test with no disk type conversion, profile change and # compliant datastore. new_profile = mock.sentinel.new_profile get_volume_type_extra_specs.side_effect = [vmdk.THIN_VMDK_TYPE, vmdk.THIN_VMDK_TYPE, 'gold-1', new_profile] ds_sel.is_datastore_compliant.return_value = True profile_id = mock.sentinel.profile_id ds_sel.get_profile_id.return_value = profile_id self.assertTrue(self._driver.retype(context, vol, new_type, diff, host)) vops.change_backing_profile.assert_called_once_with(backing, profile_id) # Test with disk type conversion, profile change and a backing with # snapshots. Also test the no candidate datastore case. get_volume_type_extra_specs.side_effect = [vmdk.THICK_VMDK_TYPE, vmdk.THIN_VMDK_TYPE, 'gold-1', new_profile] vops.snapshot_exists.return_value = True ds_sel.select_datastore.return_value = () self.assertFalse(self._driver.retype(context, vol, new_type, diff, host)) exp_req = {hub.DatastoreSelector.HARD_ANTI_AFFINITY_DS: [ds_value], hub.DatastoreSelector.PROFILE_NAME: new_profile, hub.DatastoreSelector.SIZE_BYTES: units.Gi} ds_sel.select_datastore.assert_called_once_with(exp_req) # Modify the previous case with a candidate datastore which is # different than the backing's current datastore. get_volume_type_extra_specs.side_effect = [vmdk.THICK_VMDK_TYPE, vmdk.THIN_VMDK_TYPE, 'gold-1', new_profile] vops.snapshot_exists.return_value = True host = mock.sentinel.host rp = mock.sentinel.rp candidate_ds = mock.Mock(value=mock.sentinel.candidate_ds_value) summary = mock.Mock(datastore=candidate_ds) ds_sel.select_datastore.return_value = (host, rp, summary) folder = mock.sentinel.folder get_volume_group_folder.return_value = folder vops.change_backing_profile.reset_mock() self.assertTrue(self._driver.retype(context, vol, new_type, diff, host)) vops.relocate_backing.assert_called_once_with( backing, candidate_ds, rp, host, vmdk.THIN_VMDK_TYPE) vops.move_backing_to_folder.assert_called_once_with(backing, folder) vops.change_backing_profile.assert_called_once_with(backing, profile_id) # Modify the previous case with no profile change. get_volume_type_extra_specs.side_effect = [vmdk.THICK_VMDK_TYPE, vmdk.THIN_VMDK_TYPE, 'gold-1', 'gold-1'] ds_sel.select_datastore.reset_mock() vops.relocate_backing.reset_mock() vops.move_backing_to_folder.reset_mock() vops.change_backing_profile.reset_mock() self.assertTrue(self._driver.retype(context, vol, new_type, diff, host)) exp_req = {hub.DatastoreSelector.HARD_ANTI_AFFINITY_DS: [ds_value], hub.DatastoreSelector.PROFILE_NAME: 'gold-1', hub.DatastoreSelector.SIZE_BYTES: units.Gi} ds_sel.select_datastore.assert_called_once_with(exp_req) vops.relocate_backing.assert_called_once_with( backing, candidate_ds, rp, host, vmdk.THIN_VMDK_TYPE) vops.move_backing_to_folder.assert_called_once_with(backing, folder) self.assertFalse(vops.change_backing_profile.called) # Test with disk type conversion, profile change, backing with # no snapshots and candidate datastore which is same as the backing # datastore. get_volume_type_extra_specs.side_effect = [vmdk.THICK_VMDK_TYPE, vmdk.THIN_VMDK_TYPE, 'gold-1', new_profile] vops.snapshot_exists.return_value = False summary.datastore = datastore uuid = '025b654b-d4ed-47f9-8014-b71a7744eafc' genereate_uuid.return_value = uuid clone = mock.sentinel.clone vops.clone_backing.return_value = clone vops.change_backing_profile.reset_mock() self.assertTrue(self._driver.retype(context, vol, new_type, diff, host)) vops.rename_backing.assert_called_once_with(backing, uuid) vops.clone_backing.assert_called_once_with( vol['name'], backing, None, volumeops.FULL_CLONE_TYPE, datastore, vmdk.THIN_VMDK_TYPE, host) delete_temp_backing.assert_called_once_with(backing) vops.change_backing_profile.assert_called_once_with(clone, profile_id) # Modify the previous case with exception during clone. get_volume_type_extra_specs.side_effect = [vmdk.THICK_VMDK_TYPE, vmdk.THIN_VMDK_TYPE, 'gold-1', new_profile] vops.clone_backing.side_effect = error_util.VimException('error') vops.rename_backing.reset_mock() vops.change_backing_profile.reset_mock() self.assertRaises( error_util.VimException, self._driver.retype, context, vol, new_type, diff, host) exp_rename_calls = [mock.call(backing, uuid), mock.call(backing, vol['name'])] self.assertEqual(exp_rename_calls, vops.rename_backing.call_args_list) self.assertFalse(vops.change_backing_profile.called) @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_extend_vmdk_virtual_disk(self, volume_ops): """Test vmdk._extend_vmdk_virtual_disk.""" self._test_extend_vmdk_virtual_disk(volume_ops) def _test_extend_vmdk_virtual_disk(self, volume_ops): fake_backing = mock.sentinel.backing fake_vmdk_path = "[datastore] dest_vm/dest_vm.vmdk" fake_dc = mock.sentinel.datacenter fake_name = 'fake_name' fake_size = 7 # If the backing is None, get_vmdk_path and get_dc # will not be called volume_ops.get_backing.return_value = None volume_ops.get_vmdk_path.return_value = fake_vmdk_path volume_ops.get_dc.return_value = fake_dc self._driver._extend_vmdk_virtual_disk(fake_name, fake_size) volume_ops.get_backing.assert_called_once_with(fake_name) self.assertFalse(volume_ops.get_vmdk_path.called) self.assertFalse(volume_ops.get_dc.called) self.assertFalse(volume_ops.extend_virtual_disk.called) # Reset the mock and set the backing with a fake, # all the mocks should be called. volume_ops.get_backing.reset_mock() volume_ops.get_backing.return_value = fake_backing self._driver._extend_vmdk_virtual_disk(fake_name, fake_size) volume_ops.get_vmdk_path.assert_called_once_with(fake_backing) volume_ops.get_dc.assert_called_once_with(fake_backing) volume_ops.extend_virtual_disk.assert_called_once_with(fake_size, fake_vmdk_path, fake_dc) # Test the exceptional case for extend_virtual_disk volume_ops.extend_virtual_disk.side_effect = error_util.VimException( 'VimException raised.') self.assertRaises(error_util.VimException, self._driver._extend_vmdk_virtual_disk, fake_name, fake_size) @mock.patch.object(vmware_images, 'download_stream_optimized_disk') @mock.patch('cinder.openstack.common.fileutils.file_open') @mock.patch.object(VMDK_DRIVER, '_temporary_file') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, '_create_backing_in_inventory') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch.object(VMDK_DRIVER, 'session') def test_backup_volume(self, session, vops, create_backing, generate_uuid, temporary_file, file_open, download_disk): self._test_backup_volume(session, vops, create_backing, generate_uuid, temporary_file, file_open, download_disk) def _test_backup_volume(self, session, vops, create_backing, generate_uuid, temporary_file, file_open, download_disk): volume = {'name': 'vol-1', 'id': 1, 'size': 1} self._db.volume_get.return_value = volume vops.get_backing.return_value = None backing = mock.sentinel.backing create_backing.return_value = backing uuid = "c1037b23-c5e9-4446-815f-3e097cbf5bb0" generate_uuid.return_value = uuid tmp_file_path = mock.sentinel.tmp_file_path temporary_file_ret = mock.Mock() temporary_file.return_value = temporary_file_ret temporary_file_ret.__enter__ = mock.Mock(return_value=tmp_file_path) temporary_file_ret.__exit__ = mock.Mock(return_value=None) vmdk_path = mock.sentinel.vmdk_path vops.get_vmdk_path.return_value = vmdk_path tmp_file = mock.sentinel.tmp_file file_open_ret = mock.Mock() file_open.return_value = file_open_ret file_open_ret.__enter__ = mock.Mock(return_value=tmp_file) file_open_ret.__exit__ = mock.Mock(return_value=None) context = mock.sentinel.context backup = {'id': 2, 'volume_id': 1} backup_service = mock.Mock() self._driver.backup_volume(context, backup, backup_service) create_backing.assert_called_once_with(volume) temporary_file.assert_called_once_with(suffix=".vmdk", prefix=uuid) self.assertEqual(mock.call(tmp_file_path, "wb"), file_open.call_args_list[0]) download_disk.assert_called_once_with( context, self.IMG_TX_TIMEOUT, tmp_file, session=session, host=self.IP, vm=backing, vmdk_file_path=vmdk_path, vmdk_size=volume['size'] * units.Gi) self.assertEqual(mock.call(tmp_file_path, "rb"), file_open.call_args_list[1]) backup_service.backup.assert_called_once_with(backup, tmp_file) @mock.patch.object(VMDK_DRIVER, 'extend_volume') @mock.patch.object(VMDK_DRIVER, '_restore_backing') @mock.patch('cinder.openstack.common.fileutils.file_open') @mock.patch.object(VMDK_DRIVER, '_temporary_file') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_restore_backup(self, vops, generate_uuid, temporary_file, file_open, restore_backing, extend_volume): self._test_restore_backup(vops, generate_uuid, temporary_file, file_open, restore_backing, extend_volume) def _test_restore_backup( self, vops, generate_uuid, temporary_file, file_open, restore_backing, extend_volume): volume = {'name': 'vol-1', 'id': 1, 'size': 1} backup = {'id': 2, 'size': 1} context = mock.sentinel.context backup_service = mock.Mock() backing = mock.sentinel.backing vops.get_backing.return_value = backing vops.snapshot_exists.return_value = True self.assertRaises( exception.InvalidVolume, self._driver.restore_backup, context, backup, volume, backup_service) uuid = "c1037b23-c5e9-4446-815f-3e097cbf5bb0" generate_uuid.return_value = uuid tmp_file_path = mock.sentinel.tmp_file_path temporary_file_ret = mock.Mock() temporary_file.return_value = temporary_file_ret temporary_file_ret.__enter__ = mock.Mock(return_value=tmp_file_path) temporary_file_ret.__exit__ = mock.Mock(return_value=None) tmp_file = mock.sentinel.tmp_file file_open_ret = mock.Mock() file_open.return_value = file_open_ret file_open_ret.__enter__ = mock.Mock(return_value=tmp_file) file_open_ret.__exit__ = mock.Mock(return_value=None) vops.snapshot_exists.return_value = False self._driver.restore_backup(context, backup, volume, backup_service) temporary_file.assert_called_once_with(suffix=".vmdk", prefix=uuid) file_open.assert_called_once_with(tmp_file_path, "wb") backup_service.restore.assert_called_once_with( backup, volume['id'], tmp_file) restore_backing.assert_called_once_with( context, volume, backing, tmp_file_path, backup['size'] * units.Gi) self.assertFalse(extend_volume.called) temporary_file.reset_mock() file_open.reset_mock() backup_service.reset_mock() restore_backing.reset_mock() volume = {'name': 'vol-1', 'id': 1, 'size': 2} self._driver.restore_backup(context, backup, volume, backup_service) temporary_file.assert_called_once_with(suffix=".vmdk", prefix=uuid) file_open.assert_called_once_with(tmp_file_path, "wb") backup_service.restore.assert_called_once_with( backup, volume['id'], tmp_file) restore_backing.assert_called_once_with( context, volume, backing, tmp_file_path, backup['size'] * units.Gi) extend_volume.assert_called_once_with(volume, volume['size']) @mock.patch.object(VMDK_DRIVER, '_delete_temp_backing') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch( 'cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver._get_disk_type') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, '_create_backing_from_stream_optimized_file') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') def test_restore_backing( self, generate_uuid, create_backing, select_ds, get_disk_type, vops, delete_temp_backing): self._test_restore_backing( generate_uuid, create_backing, select_ds, get_disk_type, vops, delete_temp_backing) def _test_restore_backing( self, generate_uuid, create_backing, select_ds, get_disk_type, vops, delete_temp_backing): src_uuid = "c1037b23-c5e9-4446-815f-3e097cbf5bb0" generate_uuid.return_value = src_uuid src = mock.sentinel.src create_backing.return_value = src summary = mock.Mock() summary.datastore = mock.sentinel.datastore select_ds.return_value = (mock.sentinel.host, mock.ANY, mock.ANY, summary) disk_type = vmdk.THIN_VMDK_TYPE get_disk_type.return_value = disk_type context = mock.sentinel.context volume = {'name': 'vol-1', 'id': 1, 'size': 1} backing = None tmp_file_path = mock.sentinel.tmp_file_path backup_size = units.Gi self._driver._restore_backing( context, volume, backing, tmp_file_path, backup_size) create_backing.assert_called_once_with( context, src_uuid, volume, tmp_file_path, backup_size) vops.clone_backing.assert_called_once_with( volume['name'], src, None, volumeops.FULL_CLONE_TYPE, summary.datastore, disk_type, mock.sentinel.host) delete_temp_backing.assert_called_once_with(src) create_backing.reset_mock() vops.clone_backing.reset_mock() delete_temp_backing.reset_mock() dest_uuid = "de4b0708-f947-4abe-98f8-75e52ce03b7b" tmp_uuid = "82c2a4f0-9064-4d95-bd88-6567a36018fa" generate_uuid.side_effect = [src_uuid, dest_uuid, tmp_uuid] dest = mock.sentinel.dest vops.clone_backing.return_value = dest backing = mock.sentinel.backing self._driver._restore_backing( context, volume, backing, tmp_file_path, backup_size) create_backing.assert_called_once_with( context, src_uuid, volume, tmp_file_path, backup_size) vops.clone_backing.assert_called_once_with( dest_uuid, src, None, volumeops.FULL_CLONE_TYPE, summary.datastore, disk_type, mock.sentinel.host) exp_rename_calls = [mock.call(backing, tmp_uuid), mock.call(dest, volume['name'])] self.assertEqual(exp_rename_calls, vops.rename_backing.call_args_list) exp_delete_temp_backing_calls = [mock.call(backing), mock.call(src)] self.assertEqual(exp_delete_temp_backing_calls, delete_temp_backing.call_args_list) delete_temp_backing.reset_mock() vops.rename_backing.reset_mock() def vops_rename(backing, new_name): if backing == dest and new_name == volume['name']: raise error_util.VimException("error") vops.rename_backing.side_effect = vops_rename generate_uuid.side_effect = [src_uuid, dest_uuid, tmp_uuid] self.assertRaises( error_util.VimException, self._driver._restore_backing, context, volume, backing, tmp_file_path, backup_size) exp_rename_calls = [mock.call(backing, tmp_uuid), mock.call(dest, volume['name']), mock.call(backing, volume['name'])] self.assertEqual(exp_rename_calls, vops.rename_backing.call_args_list) exp_delete_temp_backing_calls = [mock.call(dest), mock.call(src)] self.assertEqual(exp_delete_temp_backing_calls, delete_temp_backing.call_args_list) @mock.patch.object(VMDK_DRIVER, '_delete_temp_backing') @mock.patch.object(vmware_images, 'upload_stream_optimized_disk') @mock.patch('cinder.openstack.common.fileutils.file_open') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch( 'cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver._get_disk_type') @mock.patch.object(VMDK_DRIVER, '_get_storage_profile_id') @mock.patch.object(VMDK_DRIVER, 'session') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') def test_create_backing_from_stream_optimized_file( self, select_ds, session, get_storage_profile_id, get_disk_type, vops, file_open, upload_disk, delete_temp_backing): self._test_create_backing_from_stream_optimized_file( select_ds, session, get_storage_profile_id, get_disk_type, vops, file_open, upload_disk, delete_temp_backing) def _test_create_backing_from_stream_optimized_file( self, select_ds, session, get_storage_profile_id, get_disk_type, vops, file_open, upload_disk, delete_temp_backing): rp = mock.sentinel.rp folder = mock.sentinel.folder summary = mock.Mock() summary.name = mock.sentinel.name select_ds.return_value = (mock.ANY, rp, folder, summary) import_spec = mock.Mock() session.vim.client.factory.create.return_value = import_spec profile_id = 'profile-1' get_storage_profile_id.return_value = profile_id disk_type = vmdk.THIN_VMDK_TYPE get_disk_type.return_value = disk_type create_spec = mock.Mock() vops.get_create_spec.return_value = create_spec tmp_file = mock.sentinel.tmp_file file_open_ret = mock.Mock() file_open.return_value = file_open_ret file_open_ret.__enter__ = mock.Mock(return_value=tmp_file) file_open_ret.__exit__ = mock.Mock(return_value=None) vm_ref = mock.sentinel.vm_ref upload_disk.return_value = vm_ref context = mock.sentinel.context name = 'vm-1' volume = {'name': 'vol-1', 'id': 1, 'size': 1} tmp_file_path = mock.sentinel.tmp_file_path file_size_bytes = units.Gi ret = self._driver._create_backing_from_stream_optimized_file( context, name, volume, tmp_file_path, file_size_bytes) self.assertEqual(vm_ref, ret) vops.get_create_spec.assert_called_once_with( name, 0, disk_type, summary.name, profile_id) file_open.assert_called_once_with(tmp_file_path, "rb") upload_disk.assert_called_once_with( context, self.IMG_TX_TIMEOUT, tmp_file, session=session, host=self.IP, resource_pool=rp, vm_folder=folder, vm_create_spec=import_spec, vmdk_size=file_size_bytes) upload_disk.side_effect = error_util.VimException("error") backing = mock.sentinel.backing vops.get_backing.return_value = backing self.assertRaises( error_util.VimException, self._driver._create_backing_from_stream_optimized_file, context, name, volume, tmp_file_path, file_size_bytes) delete_temp_backing.assert_called_once_with(backing) class VMwareVcVmdkDriverTestCase(VMwareEsxVmdkDriverTestCase): """Test class for VMwareVcVmdkDriver.""" VMDK_DRIVER = vmdk.VMwareVcVmdkDriver DEFAULT_VC_VERSION = '5.5' def setUp(self): super(VMwareVcVmdkDriverTestCase, self).setUp() self._config.vmware_host_version = self.DEFAULT_VC_VERSION self._driver = vmdk.VMwareVcVmdkDriver(configuration=self._config, db=self._db) def test_get_pbm_wsdl_location(self): # no version returns None wsdl = self._driver._get_pbm_wsdl_location(None) self.assertIsNone(wsdl) def expected_wsdl(version): driver_dir = os.path.join(os.path.dirname(__file__), '..', 'volume', 'drivers', 'vmware') driver_abs_dir = os.path.abspath(driver_dir) return 'file://' + os.path.join(driver_abs_dir, 'wsdl', version, 'pbmService.wsdl') # verify wsdl path for different version strings with mock.patch('os.path.exists') as path_exists: path_exists.return_value = True wsdl = self._driver._get_pbm_wsdl_location(LooseVersion('5')) self.assertEqual(expected_wsdl('5'), wsdl) wsdl = self._driver._get_pbm_wsdl_location(LooseVersion('5.5')) self.assertEqual(expected_wsdl('5.5'), wsdl) wsdl = self._driver._get_pbm_wsdl_location(LooseVersion('5.5.1')) self.assertEqual(expected_wsdl('5.5'), wsdl) # if wsdl path does not exist, then it returns None path_exists.return_value = False wsdl = self._driver._get_pbm_wsdl_location(LooseVersion('5.5')) self.assertIsNone(wsdl) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'session', new_callable=mock.PropertyMock) def test_get_vc_version(self, session): # test config overrides fetching from VC server version = self._driver._get_vc_version() self.assertEqual(self.DEFAULT_VC_VERSION, version) # explicitly remove config entry self._driver.configuration.vmware_host_version = None session.return_value.vim.service_content.about.version = '6.0.1' version = self._driver._get_vc_version() self.assertEqual(LooseVersion('6.0.1'), version) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' '_get_vc_version') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'session', new_callable=mock.PropertyMock) def test_do_setup_with_pbm_disabled(self, session, get_vc_version): session_obj = mock.Mock(name='session') session.return_value = session_obj get_vc_version.return_value = LooseVersion('5.0') self._driver.do_setup(mock.ANY) self.assertFalse(self._driver._storage_policy_enabled) get_vc_version.assert_called_once_with() self.assertEqual(session_obj, self._driver.volumeops._session) self.assertEqual(session_obj, self._driver.ds_sel._session) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' '_get_pbm_wsdl_location') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' '_get_vc_version') def test_do_setup_with_invalid_pbm_wsdl(self, get_vc_version, get_pbm_wsdl_location): vc_version = LooseVersion('5.5') get_vc_version.return_value = vc_version get_pbm_wsdl_location.return_value = None self.assertRaises(error_util.VMwareDriverException, self._driver.do_setup, mock.ANY) self.assertFalse(self._driver._storage_policy_enabled) get_vc_version.assert_called_once_with() get_pbm_wsdl_location.assert_called_once_with(vc_version) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' '_get_pbm_wsdl_location') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' '_get_vc_version') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'session', new_callable=mock.PropertyMock) def test_do_setup(self, session, get_vc_version, get_pbm_wsdl_location): session_obj = mock.Mock(name='session') session.return_value = session_obj vc_version = LooseVersion('5.5') get_vc_version.return_value = vc_version get_pbm_wsdl_location.return_value = 'file:///pbm.wsdl' self._driver.do_setup(mock.ANY) self.assertTrue(self._driver._storage_policy_enabled) get_vc_version.assert_called_once_with() get_pbm_wsdl_location.assert_called_once_with(vc_version) self.assertEqual(session_obj, self._driver.volumeops._session) self.assertEqual(session_obj, self._driver.ds_sel._session) @mock.patch.object(VMDK_DRIVER, '_extend_volumeops_virtual_disk') @mock.patch.object(VMDK_DRIVER, '_create_backing_in_inventory') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_create_backing_by_copying(self, volumeops, create_backing, extend_virtual_disk): self._test_create_backing_by_copying(volumeops, create_backing, extend_virtual_disk) def test_init_conn_with_instance_and_backing(self): """Test initialize_connection with instance and backing.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') volume = FakeObject() volume['name'] = 'volume_name' volume['id'] = 'volume_id' volume['size'] = 1 connector = {'instance': 'my_instance'} backing = FakeMor('VirtualMachine', 'my_back') self._volumeops.get_backing(volume['name']).AndReturn(backing) m.StubOutWithMock(self._volumeops, 'get_host') host = FakeMor('HostSystem', 'my_host') self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host) datastore = FakeMor('Datastore', 'my_ds') resource_pool = FakeMor('ResourcePool', 'my_rp') m.StubOutWithMock(self._volumeops, 'get_dss_rp') self._volumeops.get_dss_rp(host).AndReturn(([datastore], resource_pool)) m.StubOutWithMock(self._volumeops, 'get_datastore') self._volumeops.get_datastore(backing).AndReturn(datastore) m.ReplayAll() conn_info = self._driver.initialize_connection(volume, connector) self.assertEqual(conn_info['driver_volume_type'], 'vmdk') self.assertEqual(conn_info['data']['volume'], 'my_back') self.assertEqual(conn_info['data']['volume_id'], 'volume_id') m.UnsetStubs() m.VerifyAll() def test_get_volume_group_folder(self): """Test _get_volume_group_folder.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops datacenter = FakeMor('Datacenter', 'my_dc') m.StubOutWithMock(self._volumeops, 'get_vmfolder') self._volumeops.get_vmfolder(datacenter) m.StubOutWithMock(self._volumeops, 'create_folder') self._volumeops.create_folder(mox.IgnoreArg(), self._config.vmware_volume_folder) m.ReplayAll() self._driver._get_volume_group_folder(datacenter) m.UnsetStubs() m.VerifyAll() def test_init_conn_with_instance_and_backing_and_relocation(self): """Test initialize_connection with backing being relocated.""" m = self.mox m.StubOutWithMock(self._driver.__class__, 'volumeops') self._driver.volumeops = self._volumeops m.StubOutWithMock(self._volumeops, 'get_backing') volume = FakeObject() volume['name'] = 'volume_name' volume['id'] = 'volume_id' volume['size'] = 1 connector = {'instance': 'my_instance'} backing = FakeMor('VirtualMachine', 'my_back') self._volumeops.get_backing(volume['name']).AndReturn(backing) m.StubOutWithMock(self._volumeops, 'get_host') host = FakeMor('HostSystem', 'my_host') self._volumeops.get_host(mox.IgnoreArg()).AndReturn(host) datastore1 = FakeMor('Datastore', 'my_ds_1') datastore2 = FakeMor('Datastore', 'my_ds_2') resource_pool = FakeMor('ResourcePool', 'my_rp') m.StubOutWithMock(self._volumeops, 'get_dss_rp') self._volumeops.get_dss_rp(host).AndReturn(([datastore1], resource_pool)) m.StubOutWithMock(self._volumeops, 'get_datastore') self._volumeops.get_datastore(backing).AndReturn(datastore2) m.StubOutWithMock(self._driver, '_get_folder_ds_summary') folder = FakeMor('Folder', 'my_fol') summary = FakeDatastoreSummary(1, 1, datastore1) self._driver._get_folder_ds_summary(volume, resource_pool, [datastore1]).AndReturn((folder, summary)) m.StubOutWithMock(self._volumeops, 'relocate_backing') self._volumeops.relocate_backing(backing, datastore1, resource_pool, host) m.StubOutWithMock(self._volumeops, 'move_backing_to_folder') self._volumeops.move_backing_to_folder(backing, folder) m.ReplayAll() conn_info = self._driver.initialize_connection(volume, connector) self.assertEqual(conn_info['driver_volume_type'], 'vmdk') self.assertEqual(conn_info['data']['volume'], 'my_back') self.assertEqual(conn_info['data']['volume_id'], 'volume_id') m.UnsetStubs() m.VerifyAll() @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_clone_backing_linked(self, volume_ops, _extend_vmdk_virtual_disk): """Test _clone_backing with clone type - linked.""" fake_size = 3 fake_volume = {'volume_type_id': None, 'name': 'fake_name', 'size': fake_size} fake_snapshot = {'volume_name': 'volume_name', 'name': 'snapshot_name', 'volume_size': 2} fake_type = volumeops.LINKED_CLONE_TYPE fake_backing = mock.sentinel.backing self._driver._clone_backing(fake_volume, fake_backing, fake_snapshot, volumeops.LINKED_CLONE_TYPE, fake_snapshot['volume_size']) volume_ops.clone_backing.assert_called_with(fake_volume['name'], fake_backing, fake_snapshot, fake_type, None, host=None) # If the volume size is greater than the original snapshot size, # _extend_vmdk_virtual_disk will be called. _extend_vmdk_virtual_disk.assert_called_with(fake_volume['name'], fake_volume['size']) # If the volume size is not greater than the original snapshot size, # _extend_vmdk_virtual_disk will not be called. fake_size = 2 fake_volume['size'] = fake_size _extend_vmdk_virtual_disk.reset_mock() self._driver._clone_backing(fake_volume, fake_backing, fake_snapshot, volumeops.LINKED_CLONE_TYPE, fake_snapshot['volume_size']) self.assertFalse(_extend_vmdk_virtual_disk.called) @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_clone_backing_full(self, volume_ops, _select_ds_for_volume, _extend_vmdk_virtual_disk): """Test _clone_backing with clone type - full.""" fake_host = mock.sentinel.host fake_backing = mock.sentinel.backing fake_folder = mock.sentinel.folder fake_datastore = mock.sentinel.datastore fake_resource_pool = mock.sentinel.resourcePool fake_summary = mock.Mock(spec=object) fake_summary.datastore = fake_datastore fake_size = 3 fake_volume = {'volume_type_id': None, 'name': 'fake_name', 'size': fake_size} fake_snapshot = {'volume_name': 'volume_name', 'name': 'snapshot_name', 'volume_size': 2} _select_ds_for_volume.return_value = (fake_host, fake_resource_pool, fake_folder, fake_summary) self._driver._clone_backing(fake_volume, fake_backing, fake_snapshot, volumeops.FULL_CLONE_TYPE, fake_snapshot['volume_size']) _select_ds_for_volume.assert_called_with(fake_volume) volume_ops.clone_backing.assert_called_with(fake_volume['name'], fake_backing, fake_snapshot, volumeops.FULL_CLONE_TYPE, fake_datastore, host=fake_host) # If the volume size is greater than the original snapshot size, # _extend_vmdk_virtual_disk will be called. _extend_vmdk_virtual_disk.assert_called_with(fake_volume['name'], fake_volume['size']) # If the volume size is not greater than the original snapshot size, # _extend_vmdk_virtual_disk will not be called. fake_size = 2 fake_volume['size'] = fake_size _extend_vmdk_virtual_disk.reset_mock() self._driver._clone_backing(fake_volume, fake_backing, fake_snapshot, volumeops.FULL_CLONE_TYPE, fake_snapshot['volume_size']) self.assertFalse(_extend_vmdk_virtual_disk.called) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_volume_from_snapshot_without_backing(self, mock_vops): """Test create_volume_from_snapshot without a backing.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'name': 'mock_vol'} snapshot = {'volume_name': 'mock_vol', 'name': 'mock_snap'} driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = None # invoke the create_volume_from_snapshot api driver.create_volume_from_snapshot(volume, snapshot) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('mock_vol') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_volume_from_snap_without_backing_snap(self, mock_vops): """Test create_volume_from_snapshot without a backing snapshot.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'volume_type_id': None, 'name': 'mock_vol'} snapshot = {'volume_name': 'mock_vol', 'name': 'mock_snap'} backing = mock.sentinel.backing driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing mock_vops.get_snapshot.return_value = None # invoke the create_volume_from_snapshot api driver.create_volume_from_snapshot(volume, snapshot) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('mock_vol') mock_vops.get_snapshot.assert_called_once_with(backing, 'mock_snap') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_volume_from_snapshot(self, mock_vops): """Test create_volume_from_snapshot.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'volume_type_id': None, 'name': 'mock_vol'} snapshot = {'volume_name': 'mock_vol', 'name': 'mock_snap', 'volume_size': 2} backing = mock.sentinel.backing snap_moref = mock.sentinel.snap_moref driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing mock_vops.get_snapshot.return_value = snap_moref driver._clone_backing = mock.MagicMock() # invoke the create_volume_from_snapshot api driver.create_volume_from_snapshot(volume, snapshot) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('mock_vol') mock_vops.get_snapshot.assert_called_once_with(backing, 'mock_snap') default_clone_type = volumeops.FULL_CLONE_TYPE driver._clone_backing.assert_called_once_with(volume, backing, snap_moref, default_clone_type, snapshot['volume_size']) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_cloned_volume_without_backing(self, mock_vops): """Test create_cloned_volume without a backing.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'name': 'mock_vol'} src_vref = {'name': 'src_snapshot_name'} driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = None # invoke the create_volume_from_snapshot api driver.create_cloned_volume(volume, src_vref) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_create_cloned_volume_with_backing(self, mock_vops): """Test create_cloned_volume with clone type - full.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'volume_type_id': None, 'name': 'mock_vol'} src_vref = {'name': 'src_snapshot_name', 'size': 1} backing = mock.sentinel.backing driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing default_clone_type = volumeops.FULL_CLONE_TYPE driver._clone_backing = mock.MagicMock() # invoke the create_volume_from_snapshot api driver.create_cloned_volume(volume, src_vref) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('src_snapshot_name') driver._clone_backing.assert_called_once_with(volume, backing, None, default_clone_type, src_vref['size']) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' '_get_clone_type') def test_create_linked_cloned_volume_with_backing(self, get_clone_type, mock_vops): """Test create_cloned_volume with clone type - linked.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'volume_type_id': None, 'name': 'mock_vol', 'id': 'mock_id'} src_vref = {'name': 'src_snapshot_name', 'status': 'available', 'size': 1} backing = mock.sentinel.backing driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing linked_clone = volumeops.LINKED_CLONE_TYPE get_clone_type.return_value = linked_clone driver._clone_backing = mock.MagicMock() mock_vops.create_snapshot = mock.MagicMock() mock_vops.create_snapshot.return_value = mock.sentinel.snapshot # invoke the create_volume_from_snapshot api driver.create_cloned_volume(volume, src_vref) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('src_snapshot_name') get_clone_type.assert_called_once_with(volume) name = 'snapshot-%s' % volume['id'] mock_vops.create_snapshot.assert_called_once_with(backing, name, None) driver._clone_backing.assert_called_once_with(volume, backing, mock.sentinel.snapshot, linked_clone, src_vref['size']) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' '_get_clone_type') def test_create_linked_cloned_volume_when_attached(self, get_clone_type, mock_vops): """Test create_cloned_volume linked clone when volume is attached.""" mock_vops = mock_vops.return_value driver = self._driver volume = {'volume_type_id': None, 'name': 'mock_vol', 'id': 'mock_id'} src_vref = {'name': 'src_snapshot_name', 'status': 'in-use'} backing = mock.sentinel.backing driver._verify_volume_creation = mock.MagicMock() mock_vops.get_backing.return_value = backing linked_clone = volumeops.LINKED_CLONE_TYPE get_clone_type.return_value = linked_clone # invoke the create_volume_from_snapshot api self.assertRaises(exception.InvalidVolume, driver.create_cloned_volume, volume, src_vref) # verify calls driver._verify_volume_creation.assert_called_once_with(volume) mock_vops.get_backing.assert_called_once_with('src_snapshot_name') get_clone_type.assert_called_once_with(volume) @mock.patch('cinder.volume.volume_types.get_volume_type_extra_specs') def test_get_storage_profile(self, get_volume_type_extra_specs): """Test vmdk _get_storage_profile.""" # volume with no type id returns None volume = FakeObject() volume['volume_type_id'] = None sp = self._driver._get_storage_profile(volume) self.assertEqual(None, sp, "Without a volume_type_id no storage " "profile should be returned.") # profile associated with the volume type should be returned fake_id = 'fake_volume_id' volume['volume_type_id'] = fake_id get_volume_type_extra_specs.return_value = 'fake_profile' profile = self._driver._get_storage_profile(volume) self.assertEqual('fake_profile', profile) spec_key = 'vmware:storage_profile' get_volume_type_extra_specs.assert_called_once_with(fake_id, spec_key) # None should be returned when no storage profile is # associated with the volume type get_volume_type_extra_specs.return_value = False profile = self._driver._get_storage_profile(volume) self.assertIsNone(profile) @mock.patch('cinder.volume.drivers.vmware.vim_util.' 'convert_datastores_to_hubs') @mock.patch('cinder.volume.drivers.vmware.vim_util.' 'convert_hubs_to_datastores') @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'session', new_callable=mock.PropertyMock) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_filter_ds_by_profile(self, volumeops, session, hubs_to_ds, ds_to_hubs): """Test vmdk _filter_ds_by_profile() method.""" volumeops = volumeops.return_value session = session.return_value # Test with no profile id datastores = [mock.sentinel.ds1, mock.sentinel.ds2] profile = 'fake_profile' volumeops.retrieve_profile_id.return_value = None self.assertRaises(error_util.VimException, self._driver._filter_ds_by_profile, datastores, profile) volumeops.retrieve_profile_id.assert_called_once_with(profile) # Test with a fake profile id profileId = 'fake_profile_id' filtered_dss = [mock.sentinel.ds1] # patch method calls from _filter_ds_by_profile volumeops.retrieve_profile_id.return_value = profileId pbm_cf = mock.sentinel.pbm_cf session.pbm.client.factory = pbm_cf hubs = [mock.sentinel.hub1, mock.sentinel.hub2] ds_to_hubs.return_value = hubs volumeops.filter_matching_hubs.return_value = mock.sentinel.hubs hubs_to_ds.return_value = filtered_dss # call _filter_ds_by_profile with a fake profile actual_dss = self._driver._filter_ds_by_profile(datastores, profile) # verify return value and called methods self.assertEqual(filtered_dss, actual_dss, "Wrong filtered datastores returned.") ds_to_hubs.assert_called_once_with(pbm_cf, datastores) volumeops.filter_matching_hubs.assert_called_once_with(hubs, profileId) hubs_to_ds.assert_called_once_with(mock.sentinel.hubs, datastores) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'session', new_callable=mock.PropertyMock) @mock.patch('cinder.volume.drivers.vmware.vmdk.VMwareVcVmdkDriver.' 'volumeops', new_callable=mock.PropertyMock) def test_get_folder_ds_summary(self, volumeops, session): """Test _get_folder_ds_summary.""" volumeops = volumeops.return_value driver = self._driver driver._storage_policy_enabled = True volume = {'size': 10, 'volume_type_id': 'fake_type'} rp = mock.sentinel.resource_pool dss = [mock.sentinel.datastore1, mock.sentinel.datastore2] filtered_dss = [mock.sentinel.datastore1] profile = mock.sentinel.profile def filter_ds(datastores, storage_profile): return filtered_dss # patch method calls from _get_folder_ds_summary volumeops.get_dc.return_value = mock.sentinel.dc volumeops.get_vmfolder.return_value = mock.sentinel.vmfolder volumeops.create_folder.return_value = mock.sentinel.folder driver._get_storage_profile = mock.MagicMock() driver._get_storage_profile.return_value = profile driver._filter_ds_by_profile = mock.MagicMock(side_effect=filter_ds) driver._select_datastore_summary = mock.MagicMock() driver._select_datastore_summary.return_value = mock.sentinel.summary # call _get_folder_ds_summary (folder, datastore_summary) = driver._get_folder_ds_summary(volume, rp, dss) # verify returned values and calls made self.assertEqual(mock.sentinel.folder, folder, "Folder returned is wrong.") self.assertEqual(mock.sentinel.summary, datastore_summary, "Datastore summary returned is wrong.") volumeops.get_dc.assert_called_once_with(rp) volumeops.get_vmfolder.assert_called_once_with(mock.sentinel.dc) volumeops.create_folder.assert_called_once_with(mock.sentinel.vmfolder, self.VOLUME_FOLDER) driver._get_storage_profile.assert_called_once_with(volume) driver._filter_ds_by_profile.assert_called_once_with(dss, profile) size = volume['size'] * units.Gi driver._select_datastore_summary.assert_called_once_with(size, filtered_dss) # Clear side effects. driver._filter_ds_by_profile.side_effect = None @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_extend_vmdk_virtual_disk(self, volume_ops): """Test vmdk._extend_vmdk_virtual_disk.""" self._test_extend_vmdk_virtual_disk(volume_ops) @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch.object(VMDK_DRIVER, '_create_virtual_disk_from_preallocated_image') @mock.patch.object(VMDK_DRIVER, '_create_virtual_disk_from_sparse_image') @mock.patch( 'cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver._get_disk_type') @mock.patch.object(VMDK_DRIVER, '_get_ds_name_folder_path') @mock.patch.object(VMDK_DRIVER, '_create_backing_in_inventory') def test_copy_image_to_volume_non_stream_optimized( self, create_backing, get_ds_name_folder_path, get_disk_type, create_disk_from_sparse_image, create_disk_from_preallocated_image, vops, select_ds_for_volume, generate_uuid, extend_disk): self._test_copy_image_to_volume_non_stream_optimized( create_backing, get_ds_name_folder_path, get_disk_type, create_disk_from_sparse_image, create_disk_from_preallocated_image, vops, select_ds_for_volume, generate_uuid, extend_disk) @mock.patch( 'cinder.volume.drivers.vmware.volumeops.FlatExtentVirtualDiskPath') @mock.patch.object(VMDK_DRIVER, '_copy_image') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_create_virtual_disk_from_preallocated_image( self, vops, copy_image, flat_extent_path): self._test_create_virtual_disk_from_preallocated_image( vops, copy_image, flat_extent_path) @mock.patch( 'cinder.volume.drivers.vmware.volumeops.' 'MonolithicSparseVirtualDiskPath') @mock.patch( 'cinder.volume.drivers.vmware.volumeops.FlatExtentVirtualDiskPath') @mock.patch.object(VMDK_DRIVER, '_copy_temp_virtual_disk') @mock.patch.object(VMDK_DRIVER, '_copy_image') def test_create_virtual_disk_from_sparse_image( self, copy_image, copy_temp_virtual_disk, flat_extent_path, sparse_path): self._test_create_virtual_disk_from_sparse_image( copy_image, copy_temp_virtual_disk, flat_extent_path, sparse_path) @mock.patch.object(vmware_images, 'fetch_stream_optimized_image') @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, '_get_storage_profile_id') @mock.patch.object(VMDK_DRIVER, 'session') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_copy_image_to_volume_stream_optimized(self, volumeops, session, get_profile_id, _select_ds_for_volume, _extend_virtual_disk, fetch_optimized_image): """Test copy_image_to_volume. Test with an acceptable vmdk disk format and streamOptimized disk type. """ self._test_copy_image_to_volume_stream_optimized(volumeops, session, get_profile_id, _select_ds_for_volume, _extend_virtual_disk, fetch_optimized_image) @mock.patch.object(VMDK_DRIVER, '_delete_temp_backing') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, '_get_volume_group_folder') @mock.patch('cinder.volume.volume_types.get_volume_type_extra_specs') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch.object(VMDK_DRIVER, 'ds_sel') def test_retype(self, ds_sel, vops, get_volume_type_extra_specs, get_volume_group_folder, generate_uuid, delete_temp_backing): self._test_retype(ds_sel, vops, get_volume_type_extra_specs, get_volume_group_folder, generate_uuid, delete_temp_backing) @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, '_extend_vmdk_virtual_disk') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_extend_volume(self, volume_ops, _extend_virtual_disk, _select_ds_for_volume): """Test extend_volume.""" self._test_extend_volume(volume_ops, _extend_virtual_disk, _select_ds_for_volume) @mock.patch.object(vmware_images, 'download_stream_optimized_disk') @mock.patch('cinder.openstack.common.fileutils.file_open') @mock.patch.object(VMDK_DRIVER, '_temporary_file') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, '_create_backing_in_inventory') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch.object(VMDK_DRIVER, 'session') def test_backup_volume(self, session, vops, create_backing, generate_uuid, temporary_file, file_open, download_disk): self._test_backup_volume(session, vops, create_backing, generate_uuid, temporary_file, file_open, download_disk) @mock.patch.object(VMDK_DRIVER, 'extend_volume') @mock.patch.object(VMDK_DRIVER, '_restore_backing') @mock.patch('cinder.openstack.common.fileutils.file_open') @mock.patch.object(VMDK_DRIVER, '_temporary_file') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_restore_backup(self, vops, generate_uuid, temporary_file, file_open, restore_backing, extend_volume): self._test_restore_backup(vops, generate_uuid, temporary_file, file_open, restore_backing, extend_volume) @mock.patch.object(VMDK_DRIVER, '_delete_temp_backing') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch( 'cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver._get_disk_type') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') @mock.patch.object(VMDK_DRIVER, '_create_backing_from_stream_optimized_file') @mock.patch('cinder.openstack.common.uuidutils.generate_uuid') def test_restore_backing( self, generate_uuid, create_backing, select_ds, get_disk_type, vops, delete_temp_backing): self._test_restore_backing( generate_uuid, create_backing, select_ds, get_disk_type, vops, delete_temp_backing) @mock.patch.object(VMDK_DRIVER, '_delete_temp_backing') @mock.patch.object(vmware_images, 'upload_stream_optimized_disk') @mock.patch('cinder.openstack.common.fileutils.file_open') @mock.patch.object(VMDK_DRIVER, 'volumeops') @mock.patch( 'cinder.volume.drivers.vmware.vmdk.VMwareEsxVmdkDriver._get_disk_type') @mock.patch.object(VMDK_DRIVER, '_get_storage_profile_id') @mock.patch.object(VMDK_DRIVER, 'session') @mock.patch.object(VMDK_DRIVER, '_select_ds_for_volume') def test_create_backing_from_stream_optimized_file( self, select_ds, session, get_storage_profile_id, get_disk_type, vops, file_open, upload_disk, delete_temp_backing): self._test_create_backing_from_stream_optimized_file( select_ds, session, get_storage_profile_id, get_disk_type, vops, file_open, upload_disk, delete_temp_backing) @mock.patch.object(VMDK_DRIVER, '_get_folder_ds_summary') @mock.patch.object(VMDK_DRIVER, 'volumeops') def test_create_backing_with_params(self, vops, get_folder_ds_summary): resource_pool = mock.sentinel.resource_pool vops.get_dss_rp.return_value = (mock.Mock(), resource_pool) folder = mock.sentinel.folder summary = mock.sentinel.summary get_folder_ds_summary.return_value = (folder, summary) volume = {'name': 'vol-1', 'volume_type_id': None, 'size': 1} host = mock.Mock() create_params = {vmdk.CREATE_PARAM_DISK_LESS: True} self._driver._create_backing(volume, host, create_params) vops.create_backing_disk_less.assert_called_once_with('vol-1', folder, resource_pool, host, summary.name, None) create_params = {vmdk.CREATE_PARAM_ADAPTER_TYPE: 'ide'} self._driver._create_backing(volume, host, create_params) vops.create_backing.assert_called_once_with('vol-1', units.Mi, vmdk.THIN_VMDK_TYPE, folder, resource_pool, host, summary.name, None, 'ide') vops.create_backing.reset_mock() backing_name = "temp-vol" create_params = {vmdk.CREATE_PARAM_BACKING_NAME: backing_name} self._driver._create_backing(volume, host, create_params) vops.create_backing.assert_called_once_with(backing_name, units.Mi, vmdk.THIN_VMDK_TYPE, folder, resource_pool, host, summary.name, None, 'lsiLogic') @mock.patch('cinder.openstack.common.fileutils.ensure_tree') @mock.patch('cinder.openstack.common.fileutils.delete_if_exists') @mock.patch('tempfile.mkstemp') @mock.patch('os.close') def test_temporary_file( self, close, mkstemp, delete_if_exists, ensure_tree): fd = mock.sentinel.fd tmp = mock.sentinel.tmp mkstemp.return_value = (fd, tmp) prefix = ".vmdk" suffix = "test" with self._driver._temporary_file(prefix=prefix, suffix=suffix) as tmp_file: self.assertEqual(tmp, tmp_file) ensure_tree.assert_called_once_with(self.TMP_DIR) mkstemp.assert_called_once_with(dir=self.TMP_DIR, prefix=prefix, suffix=suffix) close.assert_called_once_with(fd) delete_if_exists.assert_called_once_with(tmp) class ImageDiskTypeTest(test.TestCase): """Unit tests for ImageDiskType.""" def test_is_valid(self): self.assertTrue(vmdk.ImageDiskType.is_valid("thin")) self.assertTrue(vmdk.ImageDiskType.is_valid("preallocated")) self.assertTrue(vmdk.ImageDiskType.is_valid("streamOptimized")) self.assertTrue(vmdk.ImageDiskType.is_valid("sparse")) self.assertFalse(vmdk.ImageDiskType.is_valid("thick")) def test_validate(self): vmdk.ImageDiskType.validate("thin") vmdk.ImageDiskType.validate("preallocated") vmdk.ImageDiskType.validate("streamOptimized") vmdk.ImageDiskType.validate("sparse") self.assertRaises(exception.ImageUnacceptable, vmdk.ImageDiskType.validate, "thick")
This incense dish is an exact replica of an original from the 1st to 2nd century AD. The legs are decorated with three styled lion heads. The dish is completely handmade of bronze. Its heighs is approx. 8,5 cm and its width is 7,2 cm.
""" A limited-memory DFP method for unconstrained minimization. A symmetric and positive definite approximation of the Hessian matrix is built and updated at each iteration following the Davidon-Fletcher-Powell formula. For efficiency, only the recent observed curvature is incorporated into the approximation, resulting in a *limited-memory* scheme. The main idea of this method is that the DFP formula is dual to the BFGS formula. Therefore, by swapping s and y in the (s,y) pairs, the InverseLBFGS class updates a limited-memory DFP approximation to the Hessian, rather than a limited-memory BFGS approximation to its inverse. """ from nlpy.model.amplpy import AmplModel from nlpy.optimize.solvers.lbfgs import InverseLBFGS from nlpy.optimize.solvers.trunk import TrunkFramework import numpy as np __docformat__ = 'restructuredtext' # Subclass InverseLBFGS to update a LDFP approximation to the Hessian # (as opposed to a LBFGS approximation to its inverse). class LDFP(InverseLBFGS): """ A limited-memory DFP framework for quasi-Newton methods. See the documentation of `InverseLBFGS`. """ def __init__(self, n, npairs=5, **kwargs): InverseLBFGS.__init__(self, n, npairs, **kwargs) def store(self, new_s, new_y): # Simply swap s and y. InverseLBFGS.store(self, new_y, new_s) class StructuredLDFP(InverseLBFGS): """ A limited-memory DFP framework for quasi-Newton methods that only memorizes updates corresponding to certain variables. This is useful when approximating the Hessian of a constraint with a sparse Jacobian. """ def __init__(self, n, npairs=5, **kwargs): """ See the documentation of `InverseLBFGS` for complete information. :keywords: :vars: List of variables participating in the quasi-Newton update. If `None`, all variables participate. """ self.on = n # Original value of n. self.vars = kwargs.get('vars', None) # None means all variables. if self.vars is None: nvars = n else: nvars = len(self.vars) # This next initialization will set self.n to nvars. # The original value of n was saved in self.on. InverseLBFGS.__init__(self, nvars, npairs, **kwargs) def store(self, new_s, new_y): """ Store a new (s,y) pair. This method takes "small" vectors as input, i.e., corresponding to the variables participating in the quasi-Newton update. """ InverseLBFGS.store(self, new_y, new_s) def matvec(self, v): """ Take a small vector and return a small vector giving the contribution of the Hessian approximation to the matrix-vector product. """ return InverseLBFGS.matvec(self, v) # Subclass solver TRUNK to maintain an LDFP approximation to the Hessian and # perform the LDFP matrix update at the end of each iteration. class LDFPTrunkFramework(TrunkFramework): def __init__(self, nlp, TR, TrSolver, **kwargs): TrunkFramework.__init__(self, nlp, TR, TrSolver, **kwargs) self.ldfp = LDFP(self.nlp.n, **kwargs) self.save_g = True def hprod(self, v, **kwargs): """ Compute the matrix-vector product between the limited-memory DFP approximation kept in storage and the vector `v`. """ return self.ldfp.matvec(v) def PostIteration(self, **kwargs): """ This method updates the limited-memory DFP approximation by appending the most recent (s,y) pair to it and possibly discarding the oldest one if all the memory has been used. """ if self.status != 'Rej': s = self.alpha * self.solver.step y = self.g - self.g_old self.ldfp.store(s, y) return None
The Canadian Beard Balm is a True North Strong and Free scent of Maple Bark and Wild Portage. Free the beard, eh. The Canadian is a True North Strong and Free scent of Maple Bark and Wild Portage.
#!/usr/bin/python # -*- coding:utf-8 -*- from flask import jsonify, request, url_for from ..models import Post, Comment from . import api from ..shares import do_pagination @api.route('/comments/') def get_comments(): query = Comment.query.order_by(Comment.created.desc()) page = request.args.get('page', 1, type=int) pagination, comments = do_pagination(query) prev = None if pagination.has_prev: prev = url_for('api.get_comments', page=page - 1, _external=True) next_ = None if pagination.has_next: next_ = url_for('api.get_comments', page=page + 1, _external=True) return jsonify({ 'posts': [comment.to_json() for comment in comments], 'prev': prev, 'next': next_, 'count': pagination.total }) @api.route('/comments/<int:id_>/') def get_comment(id_): comment = Comment.query.get_or_404(id_) return jsonify(comment.to_json()) @api.route('/posts/<int:id_>/comments/') def get_post_comments(id_): post = Post.query.get_or_404(id_) page = request.args.get('page', 1, type=int) query = Comment.query.filter_by(post_id=post.id).order_by(Comment.created.desc()) pagination, comments = do_pagination(query) prev = None if pagination.has_prev: prev = url_for('api.get_comments', page=page - 1, _external=True) next_ = None if pagination.has_next: next_ = url_for('api.get_comments', page=page + 1, _external=True) return jsonify({ 'posts': [comment.to_json() for comment in comments], 'prev': prev, 'next': next_, 'count': pagination.total })
Healdsburg heating professionals are always just a short phone call away. Property owners seeking a heating company in Healdsburg can contact Just-In Time Home Services. Our customers might need a heater repair in Healdsburg to remain warm during the cooler times of the year. We provide fast Healdsburg heater repairs late at night or on the weekends. We do so at both residential and commercial properties. Our trained technicians have a service van ready to arrive at a home or business to fix baseboard heaters or furnaces. In most cases, we can complete a repair to a building’s heating system in one service call. We also remove debris from heating systems before replacing filters to improve the airflow. So why wait? Contact our team of Healdsburg heating experts today. Our Healdsburg heating technicians understand how to work on a variety of heating systems. to determine if it is a broken blower or a thermostat is nonfunctional. In some cases, a heating system has more than one problem that requires specialized diagnostics from our technicians. We know that adequate heating in Healdsburg is necessary to keep a building comfortable and healthy. Our heating company in Healdsburg hires trained and licensed technicians. These techs are capable of fixing any brand and model of climate-control device. Besides hiring experts, we use only the best parts to make repairs. We do so to ensure a furnace or baseboard heater continues to function. Our Healdsburg heating company also provides a great guarantee on labor and parts. Our Healdsburg heating company offers an assortment of services to customers. These services include furnace installation and replacements. Replacing old climate-control equipment is a great idea if you're experiencing high utility bills. Heating in Healdsburg businesses and homes is less expensive with modern furnaces. Our technicians also inspect your building’s ventilation system to check for blockages. These blockages can lead to dangerous gas buildup from carbon monoxide. Call us today to request a furnace tune-up or baseboard heater cleaning. Remember, we're the go-to Healdsburg heating professionals.
# -*- coding: utf-8 -*- # # Copyright (C) 2015-2016 Jakub Beranek # # This file is part of Devi. # # Devi is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3 of the License, or # (at your option) any later version. # # Devi is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Devi. If not, see <http://www.gnu.org/licenses/>. # import time import util from enums import DebuggerState, ProcessState class ProcessExitedEventData(object): def __init__(self, return_code): self.return_code = return_code class ProcessStoppedEventData(object): def __init__(self, stop_reason): self.stop_reason = stop_reason class StartupInfo(object): def __init__(self, cmd_arguments="", working_directory="", env_vars=None): """ @type cmd_arguments: str @type working_directory: str @type env_vars: list of tuple of (str, str) """ self.cmd_arguments = cmd_arguments self.working_directory = working_directory self.env_vars = env_vars if env_vars is not None else [] def copy(self): return StartupInfo(self.cmd_arguments, self.working_directory, list(self.env_vars)) def __repr__(self): return "StartupInfo: [{}, {}, {}]".format( self.cmd_arguments, self.working_directory, self.env_vars ) class HeapManager(object): def __init__(self, debugger): """ @type debugger: debugger.Debugger """ self.debugger = debugger self.on_heap_change = util.EventBroadcaster() self.on_free_error = util.EventBroadcaster() def watch(self): """ @rtype: str """ raise NotImplementedError() def stop(self): raise NotImplementedError() def find_block_by_address(self, addr): """ @type addr: str @rtype: HeapBlock | None """ raise NotImplementedError() def get_total_allocations(self): """ @rtype: int """ raise NotImplementedError() def get_total_deallocations(self): """ @rtype: int """ raise NotImplementedError() class IOManager(object): def __init__(self): self.stdin = None self.stdout = None self.stderr = None def handle_io(self): raise NotImplementedError() def stop_io(self): raise NotImplementedError() class BreakpointManager(object): def __init__(self, debugger): """ @type debugger: Debugger """ self.debugger = debugger self.on_breakpoint_changed = util.EventBroadcaster() def add_breakpoint(self, location, line): """ Adds a breakpoint, if there is not a breakpoint with the same location and line already. @type location: str @type line: int @rtype: boolean """ raise NotImplementedError() def toggle_breakpoint(self, location, line): """ Toggles a breakpoint on the given location and line. @type location: str @type line: int @rtype: boolean """ raise NotImplementedError() def get_breakpoints(self): """ @rtype: list of debugger.Breakpoint """ raise NotImplementedError() def find_breakpoint(self, location, line): """ @type location: str @type line: int @rtype: debugger.Breakpoint | None """ raise NotImplementedError() def remove_breakpoint(self, location, line): """ @type location: str @type line: int @rtype: boolean """ raise NotImplementedError() class FileManager(object): def __init__(self, debugger): """ @type debugger: Debugger """ self.debugger = debugger def get_main_source_file(self): raise NotImplementedError() def get_current_location(self): """ Returns the current file and line of the debugged process. @rtype: tuple of basestring, int | None """ raise NotImplementedError() def get_line_address(self, filename, line): """ Returns the starting address and ending address in hexadecimal format of code at the specified line in the given file. Returns None if no code is at the given location. @type filename: str @type line: int @rtype: tuple of int | None """ raise NotImplementedError() def disassemble(self, filename, line): """ Returns disassembled code for the given location. Returns None if no code was found, @type filename: str @type line: int @rtype: str | None """ raise NotImplementedError() def disassemble_raw(self, filename, line): """ Disassembles the given line in a raw form (returns a string with the line and all assembly instructions for it). @type filename: str @type line: int @rtype: str | None """ raise NotImplementedError() class ThreadManager(object): def __init__(self, debugger): """ @type debugger: Debugger """ self.debugger = debugger def get_current_thread(self): """ @rtype: debugee.Thread """ raise NotImplementedError() def get_thread_info(self): """ Returns (active_thread_id, all_threads). @rtype: debugee.ThreadInfo | None """ raise NotImplementedError() def set_thread_by_index(self, thread_id): """ @type thread_id: int @rtype: bool """ raise NotImplementedError() def get_current_frame(self, with_variables=False): """ @type with_variables: bool @rtype: debugee.Frame | None """ raise NotImplementedError() def get_frames(self): """ @rtype: list of debugee.Frame """ raise NotImplementedError() def get_frames_with_variables(self): """ @rtype: list of debugee.Frame """ raise NotImplementedError() def change_frame(self, frame_index): """ @type frame_index: int @rtype: bool """ raise NotImplementedError() class VariableManager(object): """ Handles retrieval and updating of variables and raw memory of the debugged process. """ def __init__(self, debugger): """ @type debugger: Debugger """ self.debugger = debugger def get_type(self, expression, level=0): """ Returns type for the given expression. @type expression: str @type level: int @rtype: debugee.Type """ raise NotImplementedError() def get_variable(self, expression, level=0): """ Returns a variable for the given expression- @type expression: str @type level: int @rtype: debugee.Variable """ raise NotImplementedError() def update_variable(self, variable): """ Updates the variable's value in the debugged process. @type variable: debugee.Variable """ raise NotImplementedError() def get_memory(self, address, count): """ Returns count bytes from the given address. @type address: str @type count: int @rtype: list of int """ raise NotImplementedError() def get_registers(self): """ Returns the register values as a list of tuples with name and value of the given register. @rtype: list of register.Register """ raise NotImplementedError() def get_vector_items(self, vector): """ @type vector: debugger.debugee.VectorVariable @rtype: list of debugger.debugee.Variable """ raise NotImplementedError() class Debugger(object): def __init__(self): self.state = util.Flags(DebuggerState, DebuggerState.Started) self.process_state = ProcessState.Invalid self.io_manager = IOManager() self.breakpoint_manager = BreakpointManager(self) self.file_manager = FileManager(self) self.thread_manager = ThreadManager(self) self.variable_manager = VariableManager(self) self.heap_manager = HeapManager(self) self.on_process_state_changed = util.EventBroadcaster() self.on_debugger_state_changed = util.EventBroadcaster() self.state.on_value_changed.redirect(self.on_debugger_state_changed) self.on_process_state_changed = util.EventBroadcaster() self.on_frame_changed = util.EventBroadcaster() self.on_thread_changed = util.EventBroadcaster() def require_state(self, required_state): if not self.get_state().is_set(required_state): raise util.BadStateError(required_state, self.state) def get_state(self): return self.state def get_process_state(self): return self.process_state def load_binary(self, binary_path): raise NotImplementedError() def launch(self, startup_info=None): """ Launches the program with the given startup info. @type startup_info: StartupInfo | None @rtype: bool """ raise NotImplementedError() def exec_continue(self): raise NotImplementedError() def exec_pause(self): raise NotImplementedError() def exec_step_over(self): raise NotImplementedError() def exec_step_in(self): raise NotImplementedError() def exec_step_out(self): raise NotImplementedError() def quit_program(self, return_code=1): raise NotImplementedError() def terminate(self): raise NotImplementedError() def wait_for_stop(self): while self.process_state not in (ProcessState.Stopped, ProcessState.Exited): time.sleep(0.1) return self.process_state def wait_for_exit(self): while self.process_state != ProcessState.Exited: time.sleep(0.1) return self.process_state
Do you have cats? Then you know about kitty litter issues. Yes, Kitty Litter Issues! You know what I mean. If you have kitties, or let's just call them what they are, Cat's, you have issues. Let it be said that when you visit cat owners homes you generally only see the cat. Sometimes you see the cat food dispenser, sometimes even the litter box. Have you ever heard cat owners talk about how miserable litter maintenance can be? Bet not. I'm here to save you from Cat Litter ignorance. Why? Cause I just know you are like me and in your initial "I think I 'd like to have a cat" emotional attraction, you will be sorely unaware of the downside of having a new living breathing being in your home. Cat hair, you might ask? No, that's no biggie. Fur balls? No, that's manageable too. Ok, you asked, here it is: Cat Pee. Once again, if you have cats you know what I'm talking about. One whiff of the stuff and man, you better get a grip on something solid pretty fast because you are possibly going to faint. I sure almost did. On the hottest day of the summer I opened the door of my bedroom and nearly dropped on the floor from the wave of smell that instantly overwhelmed me. One of the cats had soaked a nice neat little pool right up there on our cozy duvet. First thing you do after you overcome the nasty first impression is to close the door and panic over how you are going to get that mess cleaned up. But that is really the secondary issue at hand. What you need to do is figure out why she did it in the first place and provided she doesn't have a UTI (Urinary Track Infection) you probably have a simple matter of litter management. In our house we have tried the litter trays, then the enclosed litter tray (apparently Cat's like privacy) but the daily cleaning of these trays is nasty. This just means that whoever is assigned the duty for today is probably going to conveniently forget to do it as often as possible. Neglecting to clean the litter daily can cause it to smell and be very discouraging to your cat from entering it. After all, what if you...Naw, forget it, I won't go there. Cat’s are very clean animals. Heck, they spend every waking hour of their day cleaning themselves, they better be! They also don’t like being in smelly environment’s, what’s more, they don’t like climbing over their previous disposal. So the litter has to be kept clean. This led me on a worldwide search for the most effective automatic litter box. I found a few good ones. Rakes, auto scoopers, etc. But these two seem coolest and best. Stay tuned for my update and review once we have tried one.
#!/usr/bin/env python ## calverter.py (2008/08/16) ## ## Copyright (C) 2008 Mehdi Bayazee (Bayazee@Gmail.com) ## ## Iranian (Jalali) calendar: ## http://en.wikipedia.org/wiki/Iranian_calendar ## Islamic (Hijri) calendar: ## http://en.wikipedia.org/wiki/Islamic_calendar ## Gregorian calendar: ## http://en.wikipedia.org/wiki/Gregorian_calendar ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2, or (at your option) ## any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. __author__ = "Mehdi Bayazee" __copyright__ = "Copyright (C) 2008 Mehdi Bayazee" __revision__ = "$Id$" __version__ = "0.1.5" import math ## \defgroup Utility ## @{ class calverter: def __init__(self): self.J0000 = 1721424.5 # Julian date of Gregorian epoch: 0000-01-01 self.J1970 = 2440587.5 # Julian date at Unix epoch: 1970-01-01 self.JMJD = 2400000.5 # Epoch of Modified Julian Date system self.J1900 = 2415020.5 # Epoch (day 1) of Excel 1900 date system (PC) self.J1904 = 2416480.5 # Epoch (day 0) of Excel 1904 date system (Mac) self.NormLeap = ("Normal year", "Leap year") self.GREGORIAN_EPOCH = 1721425.5 self.GREGORIAN_WEEKDAYS = ("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday") self.ISLAMIC_EPOCH = 1948439.5; self.ISLAMIC_WEEKDAYS = ("al-ahad", "al-'ithnayn", "ath-thalatha'", "al-arbia`aa'", "al-khamis", "al-jumu`a", "as-sabt") self.JALALI_EPOCH = 1948320.5; self.JALALI_WEEKDAYS = ("Yekshanbeh", "Doshanbeh", "Seshhanbeh", "Chaharshanbeh", "Panjshanbeh", "Jomeh", "Shanbeh") def jwday(self, j): "JWDAY: Calculate day of week from Julian day" return int(math.floor((j + 1.5))) % 7 def weekday_before(self, weekday, jd): """ WEEKDAY_BEFORE: Return Julian date of given weekday (0 = Sunday) in the seven days ending on jd. """ return jd - self.jwday(jd - weekday) def search_weekday(self, weekday, jd, direction, offset): """ SEARCH_WEEKDAY: Determine the Julian date for: weekday Day of week desired, 0 = Sunday jd Julian date to begin search direction 1 = next weekday, -1 = last weekday offset Offset from jd to begin search """ return self.weekday_before(weekday, jd + (direction * offset)) # Utility weekday functions, just wrappers for search_weekday def nearest_weekday(self, weekday, jd): return self.search_weekday(weekday, jd, 1, 3) def next_weekday(self, weekday, jd): return self.search_weekday(weekday, jd, 1, 7) def next_or_current_weekday(self, weekday, jd): return self.search_weekday(weekday, jd, 1, 6) def previous_weekday(self, weekday, jd): return self.search_weekday(weekday, jd, -1, 1) def previous_or_current_weekday(self, weekday, jd): return self.search_weekday(weekday, jd, 1, 0) def leap_gregorian(self, year): "LEAP_GREGORIAN: Is a given year in the Gregorian calendar a leap year ?" return ((year % 4) == 0) and (not(((year % 100) == 0) and ((year % 400) != 0))) def gregorian_to_jd(self, year, month, day): "GREGORIAN_TO_JD: Determine Julian day number from Gregorian calendar date" # Python <= 2.5 if month <= 2 : tm = 0 elif self.leap_gregorian(year): tm = -1 else: tm = -2 # Python 2.5 #tm = 0 if month <= 2 else (-1 if self.leap_gregorian(year) else -2) return (self.GREGORIAN_EPOCH - 1) + (365 * (year - 1)) + math.floor((year - 1) / 4) + (-math.floor((year - 1) / 100)) + \ math.floor((year - 1) / 400) + math.floor((((367 * month) - 362) / 12) + tm + day) def jd_to_gregorian(self, jd) : "JD_TO_GREGORIAN: Calculate Gregorian calendar date from Julian day" wjd = math.floor(jd - 0.5) + 0.5 depoch = wjd - self.GREGORIAN_EPOCH quadricent = math.floor(depoch / 146097) dqc = depoch % 146097 cent = math.floor(dqc / 36524) dcent = dqc % 36524 quad = math.floor(dcent / 1461) dquad = dcent % 1461 yindex = math.floor(dquad / 365) year = int((quadricent * 400) + (cent * 100) + (quad * 4) + yindex) if not((cent == 4) or (yindex == 4)) : year += 1 yearday = wjd - self.gregorian_to_jd(year, 1, 1) # Python <= 2.5 if wjd < self.gregorian_to_jd(year, 3, 1): leapadj = 0 elif self.leap_gregorian(year): leapadj = 1 else: leapadj = 2 # Python 2.5 #leapadj = 0 if wjd < self.gregorian_to_jd(year, 3, 1) else (1 if self.leap_gregorian(year) else 2) month = int(math.floor((((yearday + leapadj) * 12) + 373) / 367)) day = int(wjd - self.gregorian_to_jd(year, month, 1)) + 1 return year, month, day def n_weeks(self, weekday, jd, nthweek): j = 7 * nthweek if nthweek > 0 : j += self.previous_weekday(weekday, jd) else : j += next_weekday(weekday, jd) return j def iso_to_julian(self, year, week, day): "ISO_TO_JULIAN: Return Julian day of given ISO year, week, and day" return day + self.n_weeks(0, self.gregorian_to_jd(year - 1, 12, 28), week) def jd_to_iso(self, jd): "JD_TO_ISO: Return array of ISO (year, week, day) for Julian day" year = self.jd_to_gregorian(jd - 3)[0] if jd >= self.iso_to_julian(year + 1, 1, 1) : year += 1 week = int(math.floor((jd - self.iso_to_julian(year, 1, 1)) / 7) + 1) day = self.jwday(jd) if day == 0 : day = 7 return year, week, day def iso_day_to_julian(self, year, day): "ISO_DAY_TO_JULIAN: Return Julian day of given ISO year, and day of year" return (day - 1) + self.gregorian_to_jd(year, 1, 1) def jd_to_iso_day(self, jd): "JD_TO_ISO_DAY: Return array of ISO (year, day_of_year) for Julian day" year = self.jd_to_gregorian(jd)[0] day = int(math.floor(jd - self.gregorian_to_jd(year, 1, 1))) + 1 return year, day def pad(self, Str, howlong, padwith) : "PAD: Pad a string to a given length with a given fill character. " s = str(Str) while s.length < howlong : s = padwith + s return s def leap_islamic(self, year): "LEAP_ISLAMIC: Is a given year a leap year in the Islamic calendar ?" return (((year * 11) + 14) % 30) < 11 def islamic_to_jd(self, year, month, day): "ISLAMIC_TO_JD: Determine Julian day from Islamic date" return (day + math.ceil(29.5 * (month - 1)) + \ (year - 1) * 354 + \ math.floor((3 + (11 * year)) / 30) + \ self.ISLAMIC_EPOCH) - 1 def jd_to_islamic(self, jd): "JD_TO_ISLAMIC: Calculate Islamic date from Julian day" jd = math.floor(jd) + 0.5 year = int(math.floor(((30 * (jd - self.ISLAMIC_EPOCH)) + 10646) / 10631)) month = int(min(12, math.ceil((jd - (29 + self.islamic_to_jd(year, 1, 1))) / 29.5) + 1)) day = int(jd - self.islamic_to_jd(year, month, 1)) + 1; return year, month, day def leap_jalali(self, year): "LEAP_jalali: Is a given year a leap year in the Jalali calendar ?" # Python <= 2.5 if year > 0: rm = 474 else: rm = 473 # Python 2.5 #return ((((((year - 474 if year > 0 else 473 ) % 2820) + 474) + 38) * 682) % 2816) < 682 return ((((((year - rm) % 2820) + 474) + 38) * 682) % 2816) < 682 def jalali_to_jd(self, year, month, day): "JALALI_TO_JD: Determine Julian day from Jalali date" # Python <= 2.5 if year >=0 : rm = 474 else: rm = 473 epbase = year - (rm) # Python 2.5 #epbase = year - 474 if year>=0 else 473 epyear = 474 + (epbase % 2820) if month <= 7 : mm = (month - 1) * 31 else: mm = ((month - 1) * 30) + 6 return day + mm + \ math.floor(((epyear * 682) - 110) / 2816) + \ (epyear - 1) * 365 + \ math.floor(epbase / 2820) * 1029983 + \ (self.JALALI_EPOCH - 1) def jd_to_jalali(self, jd): "JD_TO_JALALI: Calculate Jalali date from Julian day" jd = math.floor(jd) + 0.5 depoch = jd - self.jalali_to_jd(475, 1, 1) cycle = math.floor(depoch / 1029983) cyear = depoch % 1029983 if cyear == 1029982 : ycycle = 2820 else : aux1 = math.floor(cyear / 366) aux2 = cyear % 366 ycycle = math.floor(((2134 * aux1) + (2816 * aux2) + 2815) / 1028522) + aux1 + 1 year = int(ycycle + (2820 * cycle) + 474) if year <= 0 : year -= 1 yday = (jd - self.jalali_to_jd(year, 1, 1)) + 1 if yday <= 186: month = int(math.ceil(yday / 31)) else: month = int(math.ceil((yday - 6) / 30)) day = int(jd - self.jalali_to_jd(year, month, 1)) + 1 return year, month, day ## @}
The most appalling website I've encountered in a while. Make certain you read all of it. Our options have changed. You have none. The cable internet at my Chicago apartment wasn't working (again) last night, so I watched an interesting documentary on filmmaker Roger Corman and fell asleep about 8 pm. When I awoke at 4 am, the broadband connection was still down. I knew it wasn't the cable itself, since I was watching World News Now, and I knew it wasn't the modem, since all the status lights were green. Something was wrong with RCN's server, since I couldn't get a DHCP address. So, at 4:01 am, I called their customer service number. Due to the unusual high call volume, you may experience a long wait time. At 4 am? If this is not of an urgent nature, please call back the next business day between 11 am and 3 pm. Hmm. The next business day is Monday, and I'm going to be in Manhattan. Not an option. ...or you may visit us online at www.rcnchicago.com. Don't taunt me at this hour of the morning, you cretinous oafs. If I could visit you online, I wouldn't be sitting here in my underwear at 4 am. We have recently changed our menu. Please listen to all options. No, you haven't, you lying scum. The last time I called you, three months ago, I wrote down your menu options. They haven't changed at all. Screw you. I'm pressing "0" for a human. All of our representatives are assisting other callers at this time. Sure. I'm sure your crack outsourced staff at some callbank in a northeastern prison are really inundated with calls from Chicago cable subscribers. After about a minute, "Jose" answers and tells me that the people in Internet support don't start work until 6 am. Could I call back then? I could, but I'm not going to. I get dressed, wait for my wife's 4:45 am call (it's 5:45 am east coast time; she's on her way for her cardiac therapy session), brave the 10 degree temperature and head into the office. What do I do at the office? Why, I'm a customer support representative. And I just forwarded all my calls to RCN. I saw a television news report last night which featured the Navy's aircraft carrier USS Harry S. Truman. Note the period after the "S". I was always taught that there wasn't supposed to be one. Bummer. Tex Henson, 78, the guy who animated the Rocky and Bullwinkle cartoons, has died after being hit by a pickup truck. The New York Post story is here. Oh Canada. You whining little twerps. I'm not surprised that America's popularity is slipping. It's inevitable. When you're the only superpower left in the world, everything you do is going to torque someone off somewhere. I'm not even particularly bothered by the fact 90% of the world's Muslims despise the U.S. But what really gets me is, according to the AP story, "Next door, only 25 percent of Canadians said America is a considerate world citizen." Aside from hockey and the cold fronts they send south to screw up our morning rush hours, name something distinctive about Canada. Can't think of anything, can you? Canadians constantly complain about the "Americanization" of their culture, yet more than 80% of its population lives within 200 miles of the American border. They boast about the number of talented Canadians, especially in the entertainment industry, yet the only reason these talented individuals are known at all is because they got the hell out of Canada and made names for themselves in the United States. While lecturing the U.S. about our politically incorrect immigration policies post-9/11, they continue to allow Saudis and other potential terrorists into their country without visas, where they can, for the most part, just mosey across the largest unprotected border in the world. I had to spend time in Canada this past summer. While there, I was accosted by an individual who was incensed because I possessed the obscene nationalistic bad taste to have a small U.S. flag pin on my backpack. "Why do you Americans constantly display your flag everywhere?" he asked, barely disguising his distaste with my very existence. "I wear it to identify myself as an American," I explained, "so terrorists won't waste ammo shooting at Canadians." I registered the kgb.com domain name back in 1993, prior to the explosive growth of the World Wide Web, and two years after the Soviet Union's collapse. The reds' secret police (KGB) also went out of business then, or at least changed its name. I ran a company named KGB Consulting at the time, and it seemed like a great address: short, easy to remember, and a bit droll, especially for espionage enthusiasts. It didn't take long for e-mails to boris@kgb.com and natasha@kgb.com to start showing up in my mailbox. But every so often I get e-mail from people who apparently think kgb@kgbreport.com is where Russian spy wannabes should direct their missives. They could be jokes, but the content and tone of the mail appear to be serious. I usually send a reply along the lines of "Wrong KGB, Skippy," and never hear from the sender again. But there's this persistent fella in Germany who keeps sending me stuff- specifically, detailed descriptions of a neighbor's activities. This Smersh-head apparently believes the guy down the street is a clear and present danger to the well-being of the the Eastern bloc, but it sounds to me like this alleged subversive has a lifestyle that would bore the good folks in Mayberry. Since the "Skippy" approach didn't work, I sent him an e-mail stating that kgb.com was a commercial website in the United States and that I had no connection whatsoever to the Russians. I even sent him to the specific page on my website that I had to create to deal with all the KGB crud. I understand Comrade need for discression (sic). Will continue per schedule. With FBI and CIA use of Carnivore e-mail not secure. For next report (Wednesday 1500 hours), write report on paper. Line hat with aluminum foil. Make sure ears are covered by foil. Exit building, turn left, go to corner. Place paper in teeth. Insert right finger into left nostril. Wiggle pinky. Wait for instructions. Worst case, he amuses the neighbors. Best case, he attracts attention and gets some professional help. Ok, I have exactly ten minutes to write this, so hold on to your eyeballs. One thing I miss about not driving when I'm in Chicago is the opportunity to talk to myself. You know what I mean. Look closely when you're on the road, and you'll see a fair number of people having rather animated conversations; soliloquies, actually, since there's no one else in their cars. I've always talked to myself. Part of it stems from being an only child, but since my teens the habit's acquired a practical utility: to hone arguments and to improve my writing. Unfortunately, it's rather hard to do that when you walk to work in a herd of commuters. They have a tendency to look at you strangely and offer you loose change. The solution: the cell phone. I keep one stuck to my ear during my daily travels. Fellow pedestrians no longer think I'm psychotic. Unless they actually listen to what I'm saying. When you're stuck in a crowd and need to clear a path quickly, the most effective method is to yell, "Oh God, I'm gonna puke!" Those Heinz ketchup bottles in restaurants that are painted red, so they always look full. It makes me wonder what other shortcuts are being used by the restaurant to simulate attention to detail. Maybe they ScotchGuard the plates. McDonalds' new "healthy" fries. They've removed the beef flavoring, and they just don't taste right. And I have it from an unimpeachable source: my dog won't eat them any more. How can you be expected to take aircraft security and safety procedures seriously when you know some of them are just plain wrong? Take cell phones, for instance. There is no supporting evidence their onboard use interferes with any aircraft systems. In the mid-90s the FAA reviewed thousands of flight reports and couldn't find any instances of interference. Boeing and Airbus have loaded their jumbo jets with hundreds of cell phones and not a single aircraft communication or navigation device was affected. The FCC, not the FAA, has banned the use of cell phones in flight, for good reason. Cellular networks are designed to be used from the ground, where the phone is in contact with one cellular repeater at a time. A cell phone used in-flight "hits" many cellular sites simultaneously, reducing the available capacity of the network. It also makes it difficult for the cellular providers to accurately track and bill the call. And, of course, the airlines hope you'll use their $6 a minute Airfones. Okay, an air ban I can understand. But when you're stuck on the ground because another aircraft is in your gate? Don't give me that "interference with navigational systems" malarkey. If the pilot can't find the gate from here without using his navigation system, you have worse problems than rogue Nokia users. And what about the fact that 100 feet away, inside the terminal building, there are about 10,000 active cell phones all beaming their deadly signals into the skies above the airport? "Available" instead of "optional" in automobile advertising. Another perfectly good word ruined for the sake of deceptive communication. With optional, you knew it was available, but you had to pay for it. "Available" is non-specific. "Yeah, babe, it's available... and so am I. Heh heh. Let's go in the back room and do some paperwork..." Right now. "MSNBC News... right now!" As opposed to when? You're re-running yesterday's news? Something's happened to them, at least in the suburbs of Pittsburgh. Those suckers have started flying. In the past week I saw a dozen of them in mid-air and one nearly became a hood ornament on my car. This is an unsettling development, especially around Thanksgiving. I think they're planning something.
# -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-05-25 00:59 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='UserAddress', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('address', models.CharField(max_length=120)), ('address2', models.CharField(blank=True, max_length=120, null=True)), ('city', models.CharField(max_length=120)), ('province', models.CharField(max_length=120, null=True)), ('phone', models.CharField(max_length=16)), ('timestamp', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['updated', '-timestamp'], }, ), migrations.CreateModel( name='UserDefaultAddress', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('address', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_address_default', to='accounts.UserAddress')), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
Spanish for Healthcare Providers is a specialized program designed to help health professionals communicate with their patients in Spanish. Students learn the names of body parts, action verbs, and useful phrases typically used in a medical setting. The material is divided into 10 lessons. Each lesson consists of 3 parts: the introduction of the new words, guided practice, and translation exercises. The 60 minute CD provides correct pronunciation by a native speaker. The combination of the 90-page Audioscript, the oral exercises, and the Workbook, when used concurrently, give the student the confidence needed to communicate effectively. Students read the lessons while listening to them, then apply the skills acquired to complete the 74 written exercises. After concluding the exercises students can check their answers at the back of the Workbook. If you are a beginner or an advanced student eager to increase vocabulary or improve your pronunciation and communication, this program will provide the tools you need. Includes 15 contact hours - CEU's - certificate upon completion of course with passing score. 1. Students have one year from the date of purchase to complete this home study program. Therefore, before beginning your studies, schedule your lessons with specific time goals in mind, allowing plenty of time for review before taking the final test. 2. After completing all your practice exercises, take the open book test. If you have followed each lesson in sequence and allowed for a full review, you should be able to answer the questions with minimal searching for the correct answers. Write your answers clearly on the forms provided. 4. You must get a minimum of 70% of the answers correct in order to receive credit for the course. If you score lower than 70% you will be notified in writing. At the same time, you will be notified of a test retaking opportunity and a new date of completion. 5. If you score 70% or higher, a Certificate of Completion will be issued. 6. The test will be reviewed and the certificate issued within two weeks of receipt. Maria Oliveira Language Learning Center is approved as a Continuing Education Provider by the California Board of Registered Nursing. CEP 15113.
# Copyright 2016 Michael Thomas # # See www.whatang.org for more information. # # This file is part of DrumBurp. # # DrumBurp is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # DrumBurp is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with DrumBurp. If not, see <http://www.gnu.org/licenses/> ''' Created on Feb 22, 2015 @author: mike_000 ''' import copy from PyQt4.QtGui import (QDialog, QColor, QLabel, QPushButton, QComboBox, QColorDialog, QPen) from PyQt4 import QtCore from GUI.ui_dbColours import Ui_ColourPicker STYLE_MAP = {"None": QtCore.Qt.NoPen, "Solid": QtCore.Qt.SolidLine, "Dashed": QtCore.Qt.DashLine} STYLES = ["None", "Dashed", "Solid"] REVERSE_STYLE_MAP = dict((x, y) for (y, x) in STYLE_MAP.iteritems()) class ColouredItem(object): def __init__(self, backgroundColour, borderStyle, borderColour): self._borderStyle = None self.backgroundColour = backgroundColour self.borderStyle = borderStyle self.borderColour = borderColour @property def borderStyle(self): return self._borderStyle @borderStyle.setter def borderStyle(self, value): if not isinstance(value, QtCore.Qt.PenStyle): value = STYLE_MAP.get(value, QtCore.Qt.NoPen) self._borderStyle = value @staticmethod def _colourToString(name, colour): return name + ":%02x,%02x,%02x,%02x" % colour.getRgb() @staticmethod def _colourFromString(colString): rgba = [int(x, 16) for x in colString.split(",")] return QColor.fromRgb(*rgba) @staticmethod def _lineToString(name, line): return "%s:%s" % (name, REVERSE_STYLE_MAP[line]) @staticmethod def _lineFromString(lineString): return STYLE_MAP[lineString] def toString(self): answer = "/".join([self._colourToString("backgroundColour", self.backgroundColour), self._lineToString("borderStyle", self.borderStyle), self._colourToString("borderColour", self.borderColour)]) return answer def fromString(self, colString): for item in str(colString).split("/"): if ":" not in item: continue name, detail = item.split(":") if name.endswith("Colour"): setattr(self, name, self._colourFromString(detail)) elif name.endswith("Style"): setattr(self, name, self._lineFromString(detail)) class TextColouredItem(ColouredItem): def __init__(self, textColour): super(TextColouredItem, self).__init__(QColor(QtCore.Qt.transparent), "None", textColour) class BorderColouredItem(ColouredItem): def __init__(self, borderStyle, borderColour): super(BorderColouredItem, self).__init__(QColor(QtCore.Qt.transparent), borderStyle, borderColour) class ColAttrs(object): KNOWN_COLOURS = [] def __init__(self, longName, attrName, default, background=True, border=True, text=False): self.longName = longName self.attrName = attrName self.default = default self.background = background self.border = border self.text = text self.KNOWN_COLOURS.append(self) def makeInstance(self, scheme): inst = ColourInstance(self) setattr(scheme, self.attrName, inst) def setPainter(self, painter, colour): raise NotImplementedError() def getInstance(self, scheme): return getattr(scheme, self.attrName) class TextColAttrs(ColAttrs): def __init__(self, longName, attrName, default): super(TextColAttrs, self).__init__(longName, attrName, default, False, False, True) def setPainter(self, painter, colour): pen = QPen() pen.setColor(colour.borderColour) painter.setPen(pen) class SolidBoxAttrs(ColAttrs): def __init__(self, longName, attrName, default): super(SolidBoxAttrs, self).__init__(longName, attrName, default, True, True, False) def setPainter(self, painter, colour): pen = QPen(colour.borderStyle) pen.setColor(colour.borderColour) painter.setPen(pen) painter.setBrush(colour.backgroundColour) class BorderAttrs(ColAttrs): def __init__(self, longName, attrName, default): super(BorderAttrs, self).__init__(longName, attrName, default, False, True, False) def setPainter(self, painter, colour): pen = QPen(colour.borderStyle) pen.setColor(colour.borderColour) painter.setPen(pen) painter.setBrush(QColor(QtCore.Qt.transparent)) _TEXT_ATTRS = TextColAttrs("Text", "text", TextColouredItem(QColor(QtCore.Qt.black))) _POTENTIAL_ATTRS = TextColAttrs("New notes", "potential", TextColouredItem(QColor(QtCore.Qt.blue))) _DELETE_ATTRS = TextColAttrs("Notes to delete", "delete", TextColouredItem(QColor(QtCore.Qt.red))) _NOTE_HIGHLIGHT_ATTRS = SolidBoxAttrs("Note Highlight", "noteHighlight", ColouredItem(QColor(QtCore.Qt.yellow).lighter(), "None", QColor(QtCore.Qt.black))) _TIME_HIGHLIGHT_ATTRS = BorderAttrs("Time Highlight", "timeHighlight", BorderColouredItem("Dashed", QColor(QtCore.Qt.blue).lighter())) _SEL_MEASURE_ATTRS = SolidBoxAttrs("Selected Measure", "selectedMeasure", ColouredItem(QColor(QtCore.Qt.gray).lighter(), "Solid", QColor(QtCore.Qt.gray).lighter())) _PLAY_HL_ATTRS = BorderAttrs("Playing Highlight", "playingHighlight", BorderColouredItem("Solid", QColor(QtCore.Qt.blue).lighter())) _NEXT_PLAY_HL_ATTRS = BorderAttrs("Next Playing Highlight", "nextPlayingHighlight", BorderColouredItem("Dashed", QColor(QtCore.Qt.blue).lighter())) _STICKING_ATTRS = SolidBoxAttrs("Sticking Display", "sticking", ColouredItem(QColor(QtCore.Qt.white), "Dashed", QColor(QtCore.Qt.gray))) class ColourInstance(object): def __init__(self, colourAttrs): self.colour = copy.deepcopy(colourAttrs.default) self.colourAttrs = colourAttrs def setPainter(self, painter): self.colourAttrs.setPainter(painter, self) @property def borderStyle(self): return self.colour.borderStyle @borderStyle.setter def borderStyle(self, value): self.colour.borderStyle = value @property def borderColour(self): return self.colour.borderColour @borderColour.setter def borderColour(self, value): self.colour.borderColour = value @property def backgroundColour(self): return self.colour.backgroundColour @backgroundColour.setter def backgroundColour(self, value): self.colour.backgroundColour = value def toString(self): return self.colour.toString() def fromString(self, colString): return self.colour.fromString(colString) class ColourScheme(object): def __init__(self): for colAttr in ColAttrs.KNOWN_COLOURS: colAttr.makeInstance(self) def iterColours(self): for colour in ColAttrs.KNOWN_COLOURS: yield getattr(self, colour.attrName) def iterTextColours(self): for colour in ColAttrs.KNOWN_COLOURS: if colour.text: yield getattr(self, colour.attrName) def iterAreaColours(self): for colour in ColAttrs.KNOWN_COLOURS: if not colour.text: yield getattr(self, colour.attrName) class DBColourPicker(QDialog, Ui_ColourPicker): def __init__(self, colour_scheme, parent=None): super(DBColourPicker, self).__init__(parent) self.setupUi(self) self._originalScheme = copy.deepcopy(colour_scheme) self._currentScheme = copy.deepcopy(colour_scheme) reset = self.buttonBox.button(self.buttonBox.Reset) reset.clicked.connect(self.reset) restore = self.buttonBox.button(self.buttonBox.RestoreDefaults) restore.clicked.connect(self.restoreDefaults) self._colourSelectors = [] self._lineSelectors = [] for row, colour in enumerate(self._currentScheme.iterTextColours()): colourAttr = colour.colourAttrs label = QLabel(self.frame) label.setText(colourAttr.longName) label.setAlignment(QtCore.Qt.AlignRight) self.textGrid.addWidget(label, row + 1, 0, 1, 1) textButton = self._makeLineButton(colourAttr) self.textGrid.addWidget(textButton, row + 1, 1, 1, 1) for row, colour in enumerate(self._currentScheme.iterAreaColours()): colourAttr = colour.colourAttrs label = QLabel(self.frame_2) label.setText(colourAttr.longName) label.setAlignment(QtCore.Qt.AlignRight) self.areaGrid.addWidget(label, row + 1, 0, 1, 1) if colourAttr.background: backgroundButton = self._makeBackgroundButton(colourAttr) self.areaGrid.addWidget(backgroundButton, row + 1, 1, 1, 1) if colourAttr.border: combo = self._makeLineCombo(colourAttr) self.areaGrid.addWidget(combo, row + 1, 2, 1, 1) lineButton = self._makeLineButton(colourAttr) self.areaGrid.addWidget(lineButton, row + 1, 3, 1, 1) self._setColourValues() @staticmethod def _styleButton(button, colour): button.setText("") button.setAutoFillBackground(True) ss = """QPushButton { background: rgba(%d, %d, %d, %d); border-color: black; border-width:1px; color: black; border-style: ridge; } QPushButton:hover { border-width:2px; border-color: red; }""" ss %= colour.getRgb() if colour.getRgb()[3] == 0: button.setText("Transparent") button.setStyleSheet(ss) def _makeColourSelector(self, button, colourAttr, colourType): def selectColour(): colour = colourAttr.getInstance(self._currentScheme) currentColour = getattr(colour, colourType) colourDialog = QColorDialog(currentColour, self) if colourDialog.exec_(): selected = colourDialog.selectedColor() if selected != currentColour: self._styleButton(button, selected) setattr(colour, colourType, selected) button.clicked.connect(selectColour) self._colourSelectors.append((button, colourAttr, colourType)) def _makeBackgroundButton(self, colourAttr): backgroundButton = QPushButton(self) backgroundButton.setObjectName(colourAttr.attrName + "background_col") self._makeColourSelector( backgroundButton, colourAttr, "backgroundColour") return backgroundButton def _makeLineCombo(self, colourAttr): combo = QComboBox(self) combo.setObjectName(colourAttr.attrName + "border_style") for lineStyle in STYLES: combo.addItem(lineStyle) def setLineStyle(newIndex): colour = colourAttr.getInstance(self._currentScheme) colour.borderStyle = STYLES[newIndex] combo.currentIndexChanged.connect(setLineStyle) self._lineSelectors.append((combo, colourAttr)) return combo def _makeLineButton(self, colourAttr): lineButton = QPushButton(self) lineButton.setObjectName(colourAttr.attrName + "border_col") self._makeColourSelector(lineButton, colourAttr, "borderColour") return lineButton def getColourScheme(self): return self._currentScheme def _setColourValues(self): for button, colourAttr, colourType in self._colourSelectors: colour = colourAttr.getInstance(self._currentScheme) colourVal = getattr(colour, colourType) self._styleButton(button, colourVal) for combo, colourAttr in self._lineSelectors: colour = colourAttr.getInstance(self._currentScheme) currentStyle = colour.borderStyle for selected, lineStyle in enumerate(STYLES): if STYLE_MAP[lineStyle] == currentStyle: combo.setCurrentIndex(selected) def reset(self): self._currentScheme = copy.deepcopy(self._originalScheme) self._setColourValues() def restoreDefaults(self): self._currentScheme = copy.deepcopy(ColourScheme()) self._setColourValues() def main(): from PyQt4.QtGui import QApplication import sys app = QApplication(sys.argv) scheme = ColourScheme() dialog = DBColourPicker(scheme) dialog.show() app.exec_() if dialog.result(): scheme = dialog.getColourScheme() for col in scheme.iterColours(): print col.colourAttrs.longName, col.toString() if __name__ == "__main__": main()
Slowly catching up with Project Life. I was really hoping to have 2012 finished by the end of December. My goal is the end of January! Only 14 more weeks to go! This entry was posted on January 18, 2013 by debbe. It was filed under Project Life and was tagged with crate paper, echo park, hello, life, project, project life, scrapbook, scrapping, week 38.
#!/usr/bin/env python var_map = {} val_map = {} undo = [] rolling_back = False def in_transaction_block(): return len(undo) != 0 and not rolling_back def set_var(name, value): if name in var_map: if in_transaction_block(): undo.append((set_var, name, var_map[name])) val_map[var_map[name]] -= 1 elif in_transaction_block(): undo.append((unset, name)) if value in val_map: val_map[value] += 1 else: val_map[value] = 1 var_map[name] = value def get_val(name): if name in var_map: print var_map[name] else: print "NULL" def unset(name): if in_transaction_block(): undo.append((set_var, name, var_map[name])) val_map[var_map[name]] -= 1 del var_map[name] def num_equal_to(value): if value in val_map: print val_map[value] else: print "0" def commit(): if not in_transaction_block(): print "NO TRANSACTION" return global undo undo = [] def rollback(): if not in_transaction_block(): print "NO TRANSACTION" return global rolling_back rolling_back = True rolling = None while rolling != "begin": rolling = undo.pop() if rolling != "begin": rolling[0](*rolling[1:]) rolling_back = False def begin(): undo.append("begin") def main(): dispatch_table = { "GET": get_val, "SET": set_var, "UNSET": unset, "NUMEQUALTO": num_equal_to, "END": exit, "BEGIN": begin, "COMMIT": commit, "ROLLBACK": rollback, } while True: try: line = raw_input() except EOFError: exit() command = line.split() dispatch_table[command[0]](*command[1:]) if __name__ == "__main__": main()
In the year 2000, Racing Engineering, based in Sanlúcar de Barrameda (near the Jerez F1 track), became the first Spanish team to participate in the legendary Le Mans 24 Hours. Since then, the Andalusian team has raced at the highest levels of motorsport, both at the national, as well as the international level. Exceptional dedication and a pure passion for motorsports form the ethos that drives Racing Engineering in its pursuit of competitive brilliance and the base for its success. In eight years, the team has won 11 titles. A record for every motorsport team within such a short period of time. With participations and achievements in categories as diverse as the Spanish GT Championship and Spanish Formula 3, where Racing Engineering obtained the championship title for six consecutive years. Or, the World Series single seater championship, where the title was achieved in 2002, and the vice-championship in 2003, the team has not stopped growing, evolving and continuing to thrive for success and new objectives. This was achieved during the past years as one of the strongest teams in the GP2, the category regarded as the stepping-stone to Formula 1. During these years in GP2 and F2, Racing Engineering obtained numerous victories. In 2008 Racing Engineering claimed the GP2 championship title with its Italian driver, Giorgio Pantano. Lucas Di Grassi, who participated with the Spanish outfit in 2009 GP2 season, while being Renault F1's third driver, finished the championship in third place and raced in Formula 1 in 2010. In 2013 Racing Engineering won the GP2 drivers title again, this time with Fabio Leimer. More success followed in 2015 when Racing Engineering took second place in both the GP2 Team and Driver Championships with eight podium finishes and Alexander Rossi winning three races. 2016 saw the team once again finishing second in the Team Championship with four race wins and ten podium finishes for Norman Nato and Jordan King. For 2018 the team moved to the European Le Mans Series and it was another great year with Racing Engineering winning their debut race at the Paul Ricard 4 Hours and taking second position in both the Team and Driver Championships. Continued expansion and development mean the Racing Engineering facilities - near Jerez in southern Spain - house state of the art, fully equipped workshops and engineering divisions. The purpose built facilities also host a whole range of amenities: The Techno gym for driver and team fitness, a restaurant, conference facilities and one of the finest golf courses in Europe at Costa Ballena. Our hospitality unit provides seating for up to 50 guests for on track corporate entertainment and team accommodation.
import os from collections import namedtuple import pytest import sqlalchemy as sa import sqlalchemy.engine as sa_engine import sqlalchemy.orm as sa_orm import testing.postgresql from app import models # re-useable test database subdirectory KEEPDB_PATH = '.test_db' # Test database options DatabaseConfig = namedtuple( 'DatabaseConfig', ['keepdb_active', 'keepdb_path'] ) @pytest.fixture(scope='session') def db_options(request, root_path:str) -> DatabaseConfig: """ Fixture of test database options for the entire pytest session :param request: pytest fixture request (FixtureRequest) """ keepdb_active = request.config.getoption('--keepdb') if keepdb_active: keepdb_path = os.path.join(root_path, KEEPDB_PATH) else: keepdb_path = None return DatabaseConfig(keepdb_active, keepdb_path) @pytest.fixture(scope='session') def db_url(db_options: DatabaseConfig): """ Postgres conninfo URL for the test database. This URL is usually a transient database managed by the 'testing.postgres' library. If the '--keepdb' option is specified, it will force it to be persistent at a known local path. :param db_options: test database options """ testdb_kwargs = {} if db_options.keepdb_path: testdb_kwargs['base_dir'] = db_options.keepdb_path with testing.postgresql.Postgresql(**testdb_kwargs) as postgresql: yield postgresql.url() @pytest.fixture(scope='session') def db_engine(db_options:DatabaseConfig, db_url: str): """ Fixture providing SQLAlchemy test database connectivity :param db_options: database options :param db_url: test database conninfo URL """ db_engine = sa.create_engine(db_url) # speed up tests by only installing schema if there was no prior database created with --keepdb if not db_options.keepdb_active or os.path.exists(db_options.keepdb_path): models.init_database(db_engine) yield db_engine db_engine.dispose() @pytest.fixture def session(db_engine: sa_engine.Engine): """ Fixture providing SQLAlchemy session for operations on ORM-mapped objects :param db_engine: test database connectivity instance """ sessionmaker = sa_orm.sessionmaker(db_engine) # session is automatically rolled back regardless of test result # if an uncaught exception occurred, ensure it is still propagated to pytest with the original traceback session = None try: session = sessionmaker() yield session except: raise finally: session.rollback()
If you want to run the entire towpath, be prepared for a workout: It spans 20 miles from its beginning in Valley View to the penultimate visitor's center off Smith Road in Akron. But that should be no cause for despair. Almost any piece of the crushed limestone trail makes for a good run. It's usually less crowded than Metroparks trails and, in many places, significantly more beautiful, with the Cuyahoga River edging one side and the wonderfully green Ohio & Erie Canal hugging the other.
# -*- coding: utf-8 -*- ############################################################################## # # Author: Yannick Vaucher # Copyright 2014 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.tests import common class test_lp_1282584(common.TransactionCase): """ Test wizard open the right type of view The wizard can generate picking.in and picking.out Let's ensure it open the right view for each picking type """ def setUp(self): super(test_lp_1282584, self).setUp() cr, uid = self.cr, self.uid self.WizardMakePicking = self.registry('claim_make_picking.wizard') ClaimLine = self.registry('claim.line') Claim = self.registry('crm.claim') self.product_id = self.ref('product.product_product_4') self.partner_id = self.ref('base.res_partner_12') # Create the claim with a claim line self.claim_id = Claim.create( cr, uid, { 'name': 'TEST CLAIM', 'number': 'TEST CLAIM', 'claim_type': 'customer', 'delivery_address_id': self.partner_id, }) claim = Claim.browse(cr, uid, self.claim_id) self.warehouse_id = claim.warehouse_id.id self.claim_line_id = ClaimLine.create( cr, uid, { 'name': 'TEST CLAIM LINE', 'claim_origine': 'none', 'product_id': self.product_id, 'claim_id': self.claim_id, 'location_dest_id': claim.warehouse_id.lot_stock_id.id }) def test_00(self): """Test wizard opened view model for a new product return """ cr, uid = self.cr, self.uid wiz_context = { 'active_id': self.claim_id, 'partner_id': self.partner_id, 'warehouse_id': self.warehouse_id, 'picking_type': 'in', } wizard_id = self.WizardMakePicking.create(cr, uid, { }, context=wiz_context) res = self.WizardMakePicking.action_create_picking( cr, uid, [wizard_id], context=wiz_context) self.assertEquals(res.get('res_model'), 'stock.picking.in', "Wrong model defined") def test_01(self): """Test wizard opened view model for a new delivery """ cr, uid = self.cr, self.uid WizardChangeProductQty = self.registry('stock.change.product.qty') wiz_context = {'active_id': self.product_id} wizard_chg_qty_id = WizardChangeProductQty.create(cr, uid, { 'product_id': self.product_id, 'new_quantity': 12}) WizardChangeProductQty.change_product_qty(cr, uid, [wizard_chg_qty_id], context=wiz_context) wiz_context = { 'active_id': self.claim_id, 'partner_id': self.partner_id, 'warehouse_id': self.warehouse_id, 'picking_type': 'out', } wizard_id = self.WizardMakePicking.create(cr, uid, { }, context=wiz_context) res = self.WizardMakePicking.action_create_picking( cr, uid, [wizard_id], context=wiz_context) self.assertEquals(res.get('res_model'), 'stock.picking.out', "Wrong model defined")
Evans Hybrid Coated Snare Batter Drum Head, 13" Evans Hybrid Coated 13" Snare Batter Drum Head- B13MHG. The Hybrid coated snare batter drum head from Evans comes fitted with two unique fibers to enhance durability, projection, flexibility and response. The Evans Hybrid series drum heads were built to hold up to the heaviest of hitters and they offer a unique wear-resistant texture, perfect for brushes and achieving maximum articulation.
from globfile import * from lib import * def tableprint(z): #prints table with the summary print rowprint(colname[z]),'%10s' % 'notes' print rowprint(expected(colname[z],z)), '%10s' % 'expected' temp = [ c for c in range(len(colname[z]))] for c in colname[z]: if c in nump[z]: temp[colname[z].index(c)] = str('%0.2f' % round(sd[z][c],2)) else: temp[colname[z].index(c)] = str('%0.2f' % round(float(most[z][c])/float(n[z][c]),2)) print rowprint(temp),'%10s' % 'certainity' for row in data[z]: print rowprint(row) def tableprint1(z): print rowprint(colname[z]) for row in data[z]: print rowprint(row) def klass1(data, z): for k in klass[z]: return data[colname[z].index(k)] def klassAt(z): for k in klass[z]: return colname[z].index(k) def fromHell(row,z,more,less): m = 0 out = 0 aLittle = 0.001 if z in more: for c in more[z]: ind = colname[z].index(c) if row[ind] != '?': m+=1 print ind,z out += ((row[ind] - hi[z][c]) / (hi[z][c] - lo[z][c] + aLittle))**2 if z in less: for c in less[z]: ind = colname[z].index(c) if row[ind] != '?': m+=1 out += ((row[ind] - hi[z][c])/ (hi[z][c] - lo[z][c] + aLittle))**2 return out**0.5/m**5 if m == 1 else 1
F. Stephen Hodi, M.D. - Deciphera Pharmaceuticals, Inc. F. Stephen Hodi, M.D. is the Director of the Melanoma Center and the Center for Immuno-Oncology at Dana-Farber/Brigham and Women’s Cancer Center and Assistant Professor of Medicine at Harvard Medical School. His research focuses on gene therapy, the development of immune therapies, and first into human studies for malignant melanoma. Dr. Hodi is a member of the National Comprehensive Cancer Network, the American Society of Clinical Oncology, the Eastern Cooperative Oncology Group Melanoma Committee, the Society for Immunotherapy of Cancer, the American Association for Cancer Research and a founding member of the Society for Melanoma Research. Dr. Hodi received his MD degree from Cornell University Medical College. He competed his postdoctoral training in Internal Medicine at the Hospital of the University of Pennsylvania, and Medical Oncology training at Dana-Farber Cancer Institute.
from typing import Any, Iterable, List, Mapping, Set, Tuple, Optional, Union from django.utils.translation import ugettext as _ from zerver.lib.actions import check_stream_name, create_streams_if_needed from zerver.lib.request import JsonableError from zerver.models import UserProfile, Stream, Subscription, \ Realm, Recipient, get_stream, \ bulk_get_streams, get_realm_stream, DefaultStreamGroup, get_stream_by_id_in_realm from django.db.models.query import QuerySet def check_for_exactly_one_stream_arg(stream_id: Optional[int], stream: Optional[str]) -> None: if stream_id is None and stream is None: raise JsonableError(_("Please supply 'stream'.")) if stream_id is not None and stream is not None: raise JsonableError(_("Please choose one: 'stream' or 'stream_id'.")) def access_stream_for_delete_or_update(user_profile: UserProfile, stream_id: int) -> Stream: # We should only ever use this for realm admins, who are allowed # to delete or update all streams on their realm, even private streams # to which they are not subscribed. We do an assert here, because # all callers should have the require_realm_admin decorator. assert(user_profile.is_realm_admin) error = _("Invalid stream id") try: stream = Stream.objects.get(id=stream_id) except Stream.DoesNotExist: raise JsonableError(error) if stream.realm_id != user_profile.realm_id: raise JsonableError(error) return stream # Only set allow_realm_admin flag to True when you want to allow realm admin to # access unsubscribed private stream content. def access_stream_common(user_profile: UserProfile, stream: Stream, error: str, require_active: bool=True, allow_realm_admin: bool=False) -> Tuple[Recipient, Optional[Subscription]]: """Common function for backend code where the target use attempts to access the target stream, returning all the data fetched along the way. If that user does not have permission to access that stream, we throw an exception. A design goal is that the error message is the same for streams you can't access and streams that don't exist.""" # First, we don't allow any access to streams in other realms. if stream.realm_id != user_profile.realm_id: raise JsonableError(error) recipient = stream.recipient try: sub = Subscription.objects.get(user_profile=user_profile, recipient=recipient, active=require_active) except Subscription.DoesNotExist: sub = None # If the stream is in your realm and public, you can access it. if stream.is_public() and not user_profile.is_guest: return (recipient, sub) # Or if you are subscribed to the stream, you can access it. if sub is not None: return (recipient, sub) # For some specific callers (e.g. getting list of subscribers, # removing other users from a stream, and updating stream name and # description), we allow realm admins to access stream even if # they are not subscribed to a private stream. if user_profile.is_realm_admin and allow_realm_admin: return (recipient, sub) # Otherwise it is a private stream and you're not on it, so throw # an error. raise JsonableError(error) def access_stream_by_id(user_profile: UserProfile, stream_id: int, require_active: bool=True, allow_realm_admin: bool=False) -> Tuple[Stream, Recipient, Optional[Subscription]]: stream = get_stream_by_id(stream_id) error = _("Invalid stream id") (recipient, sub) = access_stream_common(user_profile, stream, error, require_active=require_active, allow_realm_admin=allow_realm_admin) return (stream, recipient, sub) def get_public_streams_queryset(realm: Realm) -> 'QuerySet[Stream]': return Stream.objects.filter(realm=realm, invite_only=False, history_public_to_subscribers=True) def get_stream_by_id(stream_id: int) -> Stream: error = _("Invalid stream id") try: stream = Stream.objects.get(id=stream_id) except Stream.DoesNotExist: raise JsonableError(error) return stream def check_stream_name_available(realm: Realm, name: str) -> None: check_stream_name(name) try: get_stream(name, realm) raise JsonableError(_("Stream name '%s' is already taken.") % (name,)) except Stream.DoesNotExist: pass def access_stream_by_name(user_profile: UserProfile, stream_name: str, allow_realm_admin: bool=False) -> Tuple[Stream, Recipient, Optional[Subscription]]: error = _("Invalid stream name '%s'") % (stream_name,) try: stream = get_realm_stream(stream_name, user_profile.realm_id) except Stream.DoesNotExist: raise JsonableError(error) (recipient, sub) = access_stream_common(user_profile, stream, error, allow_realm_admin=allow_realm_admin) return (stream, recipient, sub) def access_stream_for_unmute_topic_by_name(user_profile: UserProfile, stream_name: str, error: str) -> Stream: """ It may seem a little silly to have this helper function for unmuting topics, but it gets around a linter warning, and it helps to be able to review all security-related stuff in one place. Our policy for accessing streams when you unmute a topic is that you don't necessarily need to have an active subscription or even "legal" access to the stream. Instead, we just verify the stream_id has been muted in the past (not here, but in the caller). Long term, we'll probably have folks just pass us in the id of the MutedTopic row to unmute topics. """ try: stream = get_stream(stream_name, user_profile.realm) except Stream.DoesNotExist: raise JsonableError(error) return stream def access_stream_for_unmute_topic_by_id(user_profile: UserProfile, stream_id: int, error: str) -> Stream: try: stream = Stream.objects.get(id=stream_id, realm_id=user_profile.realm_id) except Stream.DoesNotExist: raise JsonableError(error) return stream def can_access_stream_history(user_profile: UserProfile, stream: Stream) -> bool: """Determine whether the provided user is allowed to access the history of the target stream. The stream is specified by name. This is used by the caller to determine whether this user can get historical messages before they joined for a narrowing search. Because of the way our search is currently structured, we may be passed an invalid stream here. We return False in that situation, and subsequent code will do validation and raise the appropriate JsonableError. Note that this function should only be used in contexts where access_stream is being called elsewhere to confirm that the user can actually see this stream. """ if stream.is_history_realm_public() and not user_profile.is_guest: return True if stream.is_history_public_to_subscribers(): # In this case, we check if the user is subscribed. error = _("Invalid stream name '%s'") % (stream.name,) try: (recipient, sub) = access_stream_common(user_profile, stream, error) except JsonableError: return False return True return False def can_access_stream_history_by_name(user_profile: UserProfile, stream_name: str) -> bool: try: stream = get_stream(stream_name, user_profile.realm) except Stream.DoesNotExist: return False return can_access_stream_history(user_profile, stream) def can_access_stream_history_by_id(user_profile: UserProfile, stream_id: int) -> bool: try: stream = get_stream_by_id_in_realm(stream_id, user_profile.realm) except Stream.DoesNotExist: return False return can_access_stream_history(user_profile, stream) def filter_stream_authorization(user_profile: UserProfile, streams: Iterable[Stream]) -> Tuple[List[Stream], List[Stream]]: streams_subscribed = set() # type: Set[int] recipient_ids = [stream.recipient_id for stream in streams] subs = Subscription.objects.filter(user_profile=user_profile, recipient_id__in=recipient_ids, active=True) for sub in subs: streams_subscribed.add(sub.recipient.type_id) unauthorized_streams = [] # type: List[Stream] for stream in streams: # The user is authorized for their own streams if stream.id in streams_subscribed: continue # Users are not authorized for invite_only streams, and guest # users are not authorized for any streams if stream.invite_only or user_profile.is_guest: unauthorized_streams.append(stream) authorized_streams = [stream for stream in streams if stream.id not in set(stream.id for stream in unauthorized_streams)] return authorized_streams, unauthorized_streams def list_to_streams(streams_raw: Iterable[Mapping[str, Any]], user_profile: UserProfile, autocreate: bool=False) -> Tuple[List[Stream], List[Stream]]: """Converts list of dicts to a list of Streams, validating input in the process For each stream name, we validate it to ensure it meets our requirements for a proper stream name using check_stream_name. This function in autocreate mode should be atomic: either an exception will be raised during a precheck, or all the streams specified will have been created if applicable. @param streams_raw The list of stream dictionaries to process; names should already be stripped of whitespace by the caller. @param user_profile The user for whom we are retreiving the streams @param autocreate Whether we should create streams if they don't already exist """ # Validate all streams, getting extant ones, then get-or-creating the rest. stream_set = set(stream_dict["name"] for stream_dict in streams_raw) for stream_name in stream_set: # Stream names should already have been stripped by the # caller, but it makes sense to verify anyway. assert stream_name == stream_name.strip() check_stream_name(stream_name) existing_streams = [] # type: List[Stream] missing_stream_dicts = [] # type: List[Mapping[str, Any]] existing_stream_map = bulk_get_streams(user_profile.realm, stream_set) member_creating_announcement_only_stream = False for stream_dict in streams_raw: stream_name = stream_dict["name"] stream = existing_stream_map.get(stream_name.lower()) if stream is None: if stream_dict.get("is_announcement_only", False) and not user_profile.is_realm_admin: member_creating_announcement_only_stream = True missing_stream_dicts.append(stream_dict) else: existing_streams.append(stream) if len(missing_stream_dicts) == 0: # This is the happy path for callers who expected all of these # streams to exist already. created_streams = [] # type: List[Stream] else: # autocreate=True path starts here if not user_profile.can_create_streams(): raise JsonableError(_('User cannot create streams.')) elif not autocreate: raise JsonableError(_("Stream(s) (%s) do not exist") % ", ".join( stream_dict["name"] for stream_dict in missing_stream_dicts)) elif member_creating_announcement_only_stream: raise JsonableError(_('User cannot create a stream with these settings.')) # We already filtered out existing streams, so dup_streams # will normally be an empty list below, but we protect against somebody # else racing to create the same stream. (This is not an entirely # paranoid approach, since often on Zulip two people will discuss # creating a new stream, and both people eagerly do it.) created_streams, dup_streams = create_streams_if_needed(realm=user_profile.realm, stream_dicts=missing_stream_dicts) existing_streams += dup_streams return existing_streams, created_streams def access_default_stream_group_by_id(realm: Realm, group_id: int) -> DefaultStreamGroup: try: return DefaultStreamGroup.objects.get(realm=realm, id=group_id) except DefaultStreamGroup.DoesNotExist: raise JsonableError(_("Default stream group with id '%s' does not exist.") % (group_id,)) def get_stream_by_narrow_operand_access_unchecked(operand: Union[str, int], realm: Realm) -> Stream: """This is required over access_stream_* in certain cases where we need the stream data only to prepare a response that user can access and not send it out to unauthorized recipients. """ if isinstance(operand, str): return get_stream(operand, realm) return get_stream_by_id_in_realm(operand, realm)
Barclays bank is closing about 100 UK accounts held by cash transfer businesses, over fears they are being used for money laundering. The businesses are vital for Somali expatriates sending remittances back home, where banking facilities have collapsed. Aid workers say the service is a "lifeline" for 40% of the Somali population, who rely on the transfers. It is feared that the cash transfer business could now go underground. Money transfer shops are a lifeline for millions of people in the developing world. Their relatives living in richer countries send cash for school fees, medical care and even basic food. The companies having their Barclays accounts in the UK closed have branches in countries as diverse as Bangladesh, South Africa and Romania. Barclays says some of the companies don't have checks in place to stop cash reaching money launderers or terrorists. Worldwide, the remittance business is estimated to be worth $350bn a year. That's more than the annual value of the entire economy of Denmark - and more than double the total of international development aid. The long war in Somalia means there is no formal banking sector at all, so millions of Somalis depend on money sent from abroad. Several money transfer businesses - including Dahabshiil, the largest such business providing services to Somalia - say Barclays has given them a temporary reprieve of one month. Dahabshiil says it is urgently trying to meet the bank's criteria to keep its account open. Abdirashid Duale, chief executive officer of Dahabshiil, has said Barclays' decision could see money transfers pushed underground into the hands of "unregulated and illegal providers". Barclays is the last major UK bank that still provides such money transfer services to Somalia, which has an estimated 1.5 million of its nationals living overseas. The UK Serious Organised Crime Agency has identified money service businesses generally as a potential money laundering risk. All international banks have been tightening rules in a bid to cut money-laundering and funding of groups accused of terrorism. "Some money service businesses don't have the proper checks in place to spot criminal activity and could therefore unwittingly be facilitating money-laundering and terrorist financing," Barclays said in a statement last month. The bank emphasised that it was "very happy" to serve companies with strong anti-financial crime controls. Last month, more than 100 researchers and aid workers signed a letter urging the UK government to stop Barclays closing its account with Dahabshiil. They said the move would cause a crisis for the families that rely on the transfers. According to Dominic Thorncroft of the UK Money Transmitters Association (UKMTA) trade body, "closing these accounts will lead to a humanitarian crisis in Somalia". UKMTA represents over a third of money transfer firms in the UK. Oxfam says there is no need for a blanket shut-down of these kinds of accounts. Instead, it would like to see better regulation and the money service businesses investigated on a case-by-case basis. Research for the charity suggests that Somali migrants in the UK may be sending as much as £100 million a year back to Somalia, about 12.5% of the total amount. That would make it the second largest proportion of remittances, after the US. Oxfam's study also estimates that remittances could account for about 60% of a recipient's annual income.
import logging import pkg_resources from flask import Flask, request, Response from lintreview.config import load_config from lintreview.github import get_repository, get_lintrc from lintreview.tasks import process_pull_request config = load_config() app = Flask("lintreview") app.config.update(config) log = logging.getLogger(__name__) version = pkg_resources.get_distribution('lintreview').version @app.route("/ping") def ping(): return "lint-review: %s pong\n" % (version,) @app.route("/review/start", methods=["POST"]) def start_review(): event = request.headers.get('X-Github-Event') if event == 'ping': return Response(status=200) try: payload = request.get_json() action = payload["action"] pull_request = payload["pull_request"] number = pull_request["number"] base_repo_url = pull_request["base"]["repo"]["git_url"] head_repo_url = pull_request["head"]["repo"]["git_url"] head_repo_ref = pull_request["head"]["ref"] user = pull_request["base"]["repo"]["owner"]["login"] head_user = pull_request["head"]["repo"]["owner"]["login"] repo = pull_request["base"]["repo"]["name"] head_repo = pull_request["head"]["repo"]["name"] except Exception as e: log.error("Got an invalid JSON body. '%s'", e) return Response(status=403, response="You must provide a valid JSON body\n") log.info("Received GitHub pull request notification for " "%s %s, (%s) from: %s", base_repo_url, number, action, head_repo_url) if action not in ("opened", "synchronize", "reopened"): log.info("Ignored '%s' action." % action) return Response(status=204) gh = get_repository(app.config, head_user, head_repo) try: lintrc = get_lintrc(gh, head_repo_ref) log.debug("lintrc file contents '%s'", lintrc) except Exception as e: log.warn("Cannot download .lintrc file for '%s', " "skipping lint checks.", base_repo_url) log.warn(e) return Response(status=204) try: log.info("Scheduling pull request for %s/%s %s", user, repo, number) process_pull_request.delay(user, repo, number, lintrc) except Exception: log.error('Could not publish job to celery. Make sure its running.') return Response(status=500) return Response(status=204)
In 2015, Rep. Marcia Fudge (D-Ohio) wrote a letter supporting a judge who was sentenced for beating his wife. This weekend, that judge’s wife was murdered, and he has been arrested as a suspect. Fudge is considered to be a favorite to challenge Rep. Nancy Pelosi (D-Calif.) for the role of speaker of the House of Representatives. In 2014, Lance Mason, a former Cuyahoga County judge in Ohio, punched his wife, Aisha Fraser, 20 times and slammed her head against the dashboard of his car while their children watched. She ended up with a broken bone in her skull, while he ended up pleading guilty and spending nine months in prison. On Saturday, his wife’s sister called the police, saying that her brother-in-law had murdered her sister. Mason tried to flee the scene, but crashed his car into a police vehicle. The police officer in that vehicle was sent to the hospital with serious injuries. Mason has been charged with felonious assault for the crash. The murder is still being investigated, so no charges have been pressed in that case so far, but Fraser’s death has been ruled a homicide. Mason was employed by the city of Cleveland as Minority Business Development Administrator at the time of the murder. The city has since terminated his employment. WOIO-TV published a letter it had obtained from Fudge to the county prosecutor’s office in support of Mason after his assault charge in 2015. “This letter of support, on behalf of Lance T. Mason, comes as a result of more than 20 years of friendship,” she wrote. In the letter, Fudge called Mason’s actions “out of character and totally contrary to everything I know about him.” She commended Mason for “recognizing that he needed help” and entering counseling. “The Lance T. Mason I know,” she concluded, “is a kind, intelligent man and loyal friend. The Lance T. Mason I know is an advocate for the people of his community. Whether as a County Prosecutor, State Senator or Common Pleas Judge. He is well respected and well liked.
from pyfuzzy_toolbox import transformation as trans from pyfuzzy_toolbox import preprocessing as pre import pyfuzzy_toolbox.features.count as count_features import pyfuzzy_toolbox.features.max as max_features import pyfuzzy_toolbox.features.sum as sum_features import test_preprocessing as tpre import nose print 'Loading test text 1' bow_sentences_1 = pre.start(tpre.text_1) bow_sentences_1 = trans.start(bow_sentences_1) print 'Loading test text 1a' bow_sentences_1a = pre.start(tpre.text_1a) bow_sentences_1a = trans.start(bow_sentences_1a) print 'Loading test text 2a' bow_sentences_2a = pre.start(tpre.text_2a) bow_sentences_2a = trans.start(bow_sentences_2a) """ ----------------------------- SUM FEATURES ----------------------------- """ """UNIGRAMS""" def test_sum_of_positive_adjectives_scores(): expected_sum = 0.0855961827957 sum_of_positive_adjectives = sum_features.sum_of_unigrams_scores(bow_sentences_1) nose.tools.assert_almost_equal(expected_sum, sum_of_positive_adjectives) def test_sum_of_positive_adverbs_scores(): expected_sum = 0.0 sum_of_positive_adverbs = sum_features.sum_of_unigrams_scores( bow_sentences_1, unigram=count_features.ADVS) nose.tools.assert_almost_equal(expected_sum, sum_of_positive_adverbs) def test_sum_of_positive_verbs_scores(): expected_sum = 0.02447258064516129 sum_of_positive_verbs = sum_features.sum_of_unigrams_scores( bow_sentences_1, unigram=count_features.VERBS) nose.tools.assert_almost_equal(expected_sum, sum_of_positive_verbs) def test_sum_of_negative_adjectives_scores(): expected_sum = -0.06547738317757008 sum_of_negative_adjectives = sum_features.sum_of_unigrams_scores( bow_sentences_1a, positive=False) nose.tools.assert_almost_equal(expected_sum, sum_of_negative_adjectives) def test_sum_of_negative_adverbs_scores(): expected_sum = -0.00891862928349 sum_of_negative_adverbs = sum_features.sum_of_unigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, positive=False) nose.tools.assert_almost_equal(expected_sum, sum_of_negative_adverbs) def test_sum_of_negative_verbs_scores(): expected_sum = 0.0 sum_of_negative_verbs = sum_features.sum_of_unigrams_scores( bow_sentences_1a, unigram=count_features.VERBS, positive=False) nose.tools.assert_almost_equal(expected_sum, sum_of_negative_verbs) def test_sum_ratio_of_positive_adjectives_scores(): expected_sum = 0.0004601945311596716 sum_of_positive_adjectives = sum_features.sum_of_unigrams_scores( bow_sentences_1, ratio=True) nose.tools.assert_almost_equal(expected_sum, sum_of_positive_adjectives) def test_sum_ratio_of_positive_adverbs_scores(): expected_sum = 0.0 sum_of_positive_adverbs = sum_features.sum_of_unigrams_scores( bow_sentences_1, unigram=count_features.ADVS, ratio=True) nose.tools.assert_almost_equal(expected_sum, sum_of_positive_adverbs) def test_sum_ratio_of_positive_verbs_scores(): expected_sum = 0.00013157301422129724 sum_of_positive_verbs = sum_features.sum_of_unigrams_scores( bow_sentences_1, unigram=count_features.VERBS, ratio=True, positive=True) nose.tools.assert_almost_equal(expected_sum, sum_of_positive_verbs) def test_sum_ratio_of_negative_adjectives_scores(): expected_sum = -0.0008910665972944851 sum_of_negative_adjectives = sum_features.sum_of_unigrams_scores( bow_sentences_1, ratio=True, positive=False) nose.tools.assert_almost_equal(expected_sum, sum_of_negative_adjectives) def test_sum_ratio_of_negative_adverbs_scores(): expected_sum = -2.7783891848875693e-05 sum_of_negative_adverbs = sum_features.sum_of_unigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, ratio=True, positive=False) nose.tools.assert_almost_equal(expected_sum, sum_of_negative_adverbs) def test_sum_ratio_of_negative_verbs_scores(): expected_sum = -0.000179220719158 sum_of_negative_verbs = sum_features.sum_of_unigrams_scores( bow_sentences_1, unigram=count_features.VERBS, ratio=True, positive=False) nose.tools.assert_almost_equal(expected_sum, sum_of_negative_verbs) def test_positive_to_negative_ratio_sum_scores_adjectives(): expected_ratio_sum = (0.0855961827957 + (-0.165738387097)) positive_to_negative_ratio = sum_features.positive_to_negative_ratio_sum_unigrams_scores( bow_sentences_1) nose.tools.assert_almost_equal( expected_ratio_sum, positive_to_negative_ratio) def test_positive_to_negative_ratio_sum_scores_adverbs(): expected_ratio_sum = (0.0105152647975 + (-0.00891862928349)) positive_to_negative_ratio = sum_features.positive_to_negative_ratio_sum_unigrams_scores( bow_sentences_1a, unigram=count_features.ADVS) nose.tools.assert_almost_equal( expected_ratio_sum, positive_to_negative_ratio) def test_positive_to_negative_ratio_sum_scores_verbs(): expected_ratio_sum = (0.0223977570093 + (0.0)) positive_to_negative_ratio = sum_features.positive_to_negative_ratio_sum_unigrams_scores( bow_sentences_1a, unigram=count_features.VERBS) nose.tools.assert_almost_equal( expected_ratio_sum, positive_to_negative_ratio) """BIGRAMS""" def test_sum_of_positive_adjectives_scores_and_bigrams_with_adjectives(): expected_sum = 0.0855961827957 sum_of_positive_adjectives_and_bigrams_with_adjectives = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1) nose.tools.assert_almost_equal( expected_sum, sum_of_positive_adjectives_and_bigrams_with_adjectives) def test_sum_of_negative_adjectives_scores_and_bigrams_with_adjectives(): expected_sum = -2.2411307476635516 sum_of_negative_adjectives_and_bigrams_with_adjectives = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1a, positive=False) nose.tools.assert_almost_equal( expected_sum, sum_of_negative_adjectives_and_bigrams_with_adjectives) def test_sum_of_positive_adverbs_scores_and_bigrams_with_adverbs(): expected_sum = 0.0 sum_of_positive_adverbs_and_bigrams_with_adverbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS) nose.tools.assert_almost_equal( expected_sum, sum_of_positive_adverbs_and_bigrams_with_adverbs) def test_sum_of_negative_adverbs_scores_and_bigrams_with_adverbs(): expected_sum = -0.00891862928349 sum_of_negative_adverbs_and_bigrams_with_adverbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS, positive=False) nose.tools.assert_almost_equal( expected_sum, sum_of_negative_adverbs_and_bigrams_with_adverbs) def test_sum_of_positive_verbs_scores_and_bigrams_with_verbs(): expected_sum = 0.7079659139784946 sum_of_positive_verbs_and_bigrams_with_verbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS) nose.tools.assert_almost_equal( expected_sum, sum_of_positive_verbs_and_bigrams_with_verbs) def test_sum_of_negative_verbs_scores_and_bigrams_with_verbs(): expected_sum = -0.0333350537634 sum_of_negative_verbs_and_bigrams_with_verbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS, positive=False) nose.tools.assert_almost_equal( expected_sum, sum_of_negative_verbs_and_bigrams_with_verbs) def test_sum_ratio_of_positive_adjectives_scores_and_bigrams_with_adjectives(): expected_sum = 0.0855961827957 / 186 sum_of_positive_adjectives_and_bigrams_with_adjectives = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1, ratio=True) nose.tools.assert_almost_equal( expected_sum, sum_of_positive_adjectives_and_bigrams_with_adjectives) def test_sum_ratio_of_negative_adjectives_scores_and_bigrams_with_adjectives(): expected_sum = -0.006981715724808572 sum_of_negative_adjectives_and_bigrams_with_adjectives = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1a, positive=False, ratio=True) nose.tools.assert_almost_equal( expected_sum, sum_of_negative_adjectives_and_bigrams_with_adjectives) def test_sum_ratio_of_positive_adverbs_scores_and_bigrams_with_adverbs(): expected_sum = 0.0 sum_of_positive_adverbs_and_bigrams_with_adverbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS, ratio=True) nose.tools.assert_almost_equal( expected_sum, sum_of_positive_adverbs_and_bigrams_with_adverbs) def test_sum_ratio_of_negative_adverbs_scores_and_bigrams_with_adverbs(): expected_sum = -0.00891862928349 / 321 sum_of_negative_adverbs_and_bigrams_with_adverbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS, positive=False, ratio=True) nose.tools.assert_almost_equal( expected_sum, sum_of_negative_adverbs_and_bigrams_with_adverbs) def test_sum_ratio_of_positive_verbs_scores_and_bigrams_with_verbs(): expected_sum = 0.003806268354723089 sum_of_positive_verbs_and_bigrams_with_verbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS, ratio=True) nose.tools.assert_almost_equal( expected_sum, sum_of_positive_verbs_and_bigrams_with_verbs) def test_sum_ratio_of_negative_verbs_scores_and_bigrams_with_verbs(): expected_sum = -0.0333350537634 / 186 sum_of_negative_verbs_and_bigrams_with_verbs = sum_features.sum_of_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS, positive=False, ratio=True) nose.tools.assert_almost_equal( expected_sum, sum_of_negative_verbs_and_bigrams_with_verbs) def test_positive_to_negative_ratio_sum_scores_adjectives_and_bigrams_with_adjectives(): expected_ratio_sum = 0.0855961827957 - 0.165738387097 positive_to_negative_ratio_sum = sum_features.positive_to_negative_ratio_sum_unigrams_and_bigrams_scores( bow_sentences_1) nose.tools.assert_almost_equal( expected_ratio_sum, positive_to_negative_ratio_sum) def test_positive_to_negative_ratio_sum_scores_adverbs_and_bigrams_with_adverbs(): expected_ratio_sum = 0.0 positive_to_negative_ratio_sum = sum_features.positive_to_negative_ratio_sum_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS) nose.tools.assert_almost_equal( expected_ratio_sum, positive_to_negative_ratio_sum) def test_positive_to_negative_ratio_sum_scores_verbs_and_bigrams_with_verbs(): expected_ratio_sum = 0.6746308602150538 positive_to_negative_ratio_sum = sum_features.positive_to_negative_ratio_sum_unigrams_and_bigrams_scores( bow_sentences_1, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS) nose.tools.assert_almost_equal( expected_ratio_sum, positive_to_negative_ratio_sum) """ ----------------------------- COUNT FEATURES ----------------------------- """ """UNIGRAMS""" def test_positive_scores_adjectives_count(): expected_count = count_features.count_of_unigrams_scores( bow_sentences_1a, unigram=count_features.ADJS, positive=True) assert expected_count == 16 def test_negative_scores_adjectives_count(): expected_count = count_features.count_of_unigrams_scores( bow_sentences_1a, unigram=count_features.ADJS, positive=False) assert expected_count == 4 def test_positive_scores_adverbs_count(): expected_count = count_features.count_of_unigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, positive=True) assert expected_count == 1 def test_negative_scores_adverbs_count(): expected_count = count_features.count_of_unigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, positive=False) assert expected_count == 2 def test_positive_scores_verbs_count(): expected_count = count_features.count_of_unigrams_scores( bow_sentences_1a, unigram=count_features.VERBS, positive=True) assert expected_count == 5 def test_negative_scores_verbs_count(): expected_count = count_features.count_of_unigrams_scores( bow_sentences_1a, unigram=count_features.VERBS, positive=False) assert expected_count == 0 def test_positive_to_negative_scores_ratio_of_adjectives_count(): expected_count = count_features.positive_to_negative_ratio_count_unigrams_scores( bow_sentences_1a, unigram=count_features.ADJS) assert expected_count == (16 - 4) def test_positive_to_negative_scores_ratio_of_adverbs_count(): expected_count = count_features.positive_to_negative_ratio_count_unigrams_scores( bow_sentences_1a, unigram=count_features.ADVS) assert expected_count == (1 - 2) def test_positive_to_negative_scores_ratio_of_verbs_count(): expected_count = count_features.positive_to_negative_ratio_count_unigrams_scores( bow_sentences_1a, unigram=count_features.VERBS) assert expected_count == (5 - 0) """BIGRAMS""" def test_positive_scores_adjectives_count_and_bigrams_with_adjectives(): expected_count = count_features.count_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADJS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADJS, positive=True) assert expected_count == (16 + 1) def test_negative_scores_adjectives_count_and_bigrams_with_adjectives(): expected_count = count_features.count_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADJS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADJS, positive=False) assert expected_count == (4 + 3) def test_positive_scores_adverbs_count_and_bigrams_with_adverbs(): expected_count = count_features.count_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS, positive=True) assert expected_count == (1 + 0) def test_negative_scores_adverbs_count_and_bigrams_with_adverbs(): expected_count = count_features.count_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS, positive=False) assert expected_count == (2 + 0) def test_positive_scores_verbs_count_and_bigrams_with_verbs(): expected_count = count_features.count_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS, positive=True) assert expected_count == (5 + 1) def test_negative_scores_verbs_count_and_bigrams_with_verbs(): expected_count = count_features.count_of_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS, positive=False) assert expected_count == (0 + 0) def test_positive_to_negative_scores_ratio_of_adjectives_count_and_bigrams_with_adjectives(): expected_count = count_features.positive_to_negative_ratio_count_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADJS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADJS) assert expected_count == (16 + 1) - (4 + 3) def test_positive_to_negative_scores_ratio_of_adverbs_count_and_bigrams_with_adverbs(): expected_count = count_features.positive_to_negative_ratio_count_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS) assert expected_count == (1 + 0) - (2 + 0) def test_positive_to_negative_scores_ratio_of_verbs_count_and_bigrams_with_verbs(): expected_count = count_features.positive_to_negative_ratio_count_unigrams_and_bigrams_scores( bow_sentences_1a, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS) assert expected_count == (5 + 1) - (0 + 0) def test_count_selected_ngrams(): assert count_features.count_selected_ngrams(bow_sentences_1) == 17 assert count_features.count_selected_ngrams(bow_sentences_1a) == 33 assert count_features.count_selected_ngrams(bow_sentences_2a) == 13 """ ----------------------------- MAX FEATURES ----------------------------- """ """UNIGRAMS""" def test_max_rule_score_for_adjective(): assert max_features.max_rule_score_for_unigrams( bow_sentences_1a, unigram=count_features.ADJS)['sign'] == 0 def test_max_rule_score_for_adverbs(): assert max_features.max_rule_score_for_unigrams( bow_sentences_1a, unigram=count_features.ADVS)['sign'] == 1 def test_max_rule_score_for_verbs(): assert max_features.max_rule_score_for_unigrams( bow_sentences_1a, unigram=count_features.VERBS)['sign'] == 1 """BIGRAMS""" def test_max_rule_score_for_adjective_and_bigrams_with_adjectives(): assert max_features.max_rule_score_for_unigrams_and_bigrams(bow_sentences_1a, unigram=count_features.ADJS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADJS) == 0 def test_max_rule_score_for_adverbs_and_bigrams_with_adverbs(): assert max_features.max_rule_score_for_unigrams_and_bigrams(bow_sentences_1a, unigram=count_features.ADVS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.ADVS) == 1 def test_max_rule_score_for_verbs_and_bigrams_with_verbs(): assert max_features.max_rule_score_for_unigrams_and_bigrams(bow_sentences_1a, unigram=count_features.VERBS, bigram_word_1=count_features.ADVS, bigram_word_2=count_features.VERBS) == 1 """ ----------------------------- PERCENTAGE FEATURES ----------------------------- """ def test_percentage_of_negated_ngrams_by_document_size(): nose.tools.assert_almost_equal(0.00537634408602, count_features.percentage_of_negated_ngrams_by_document_size(bow_sentences_1)['value']) nose.tools.assert_almost_equal(0.0155763239875, count_features.percentage_of_negated_ngrams_by_document_size(bow_sentences_1a)['value']) nose.tools.assert_almost_equal(0.0127388535032, count_features.percentage_of_negated_ngrams_by_document_size(bow_sentences_2a)['value']) """ ----------------------------- MODULE TESTS ----------------------------- """ def test_all_count_features(): features_list = count_features.all(bow_sentences_1) attributes = [] data = [] for fl in features_list: attributes.append((fl['name'], 'REAL')) data.append(fl['value']) bow_sentences_1_dict = {'attributes': attributes, 'data': data} # print bow_sentences_1_dict['attributes'] # print '------------------------------------------' # print bow_sentences_1_dict['data'] assert len(bow_sentences_1_dict['attributes']) == len(bow_sentences_1_dict['data']) == 27 def test_all_sum_features(): features_list = sum_features.all(bow_sentences_1) attributes = [] data = [] for fl in features_list: attributes.append((fl['name'], 'REAL')) data.append(fl['value']) bow_sentences_1_dict = {'attributes': attributes, 'data': data} # print bow_sentences_1_dict['attributes'] # print '------------------------------------------' # print bow_sentences_1_dict['data'] assert len(bow_sentences_1_dict['attributes']) == len(bow_sentences_1_dict['data']) == 40 def test_all_max_features(): features_list = max_features.all(bow_sentences_1) attributes = [] data = [] for fl in features_list: attributes.append((fl['name'], 'REAL')) data.append(fl['value']) bow_sentences_1_dict = {'attributes': attributes, 'data': data} # print bow_sentences_1_dict['attributes'] # print '------------------------------------------' # print bow_sentences_1_dict['data'] assert len(bow_sentences_1_dict['attributes']) == len(bow_sentences_1_dict['data']) == 8
Those waves behind me - I know they're not your typical soundtrack for election coverage. But here in Orange County, Calif., there's a really interesting race that could help determine whether Republicans hang on to control of the House. Republican Congressman Dana Rohrabacher has served the 48th Congressional District here for nearly three decades. But there is a political newcomer, Democrat Harley Rouda, who seems to have a shot at unseating him. This district - it has been moving to the left, and we got a hint of why that is the other night. We were in Laguna Niguel. It's one of the uber-wealthy towns in the hills above the Pacific here. And this is where some very rich voters are changing the political landscape. UNIDENTIFIED PERSON: So what was your name again? ERIC MCDANIEL, BYLINE: Eric McDaniel. GREENE: That was our editor Eric giving his name, so we could pull through the gate. We then went up to a house where there was a Porsche parked in front of this huge fountain. And our host, Duraid Antone, brought us inside. DURAID ANTONE: Morning, David. How are you? GREENE: Nice to meet you. ANTONE: I was a Republican for 31 years. When I actually turned 18 and joined the military, we were just coming off of, you know, Ronald Reagan being president and really the strong stance he had against, you know, Russia and Gorbachev and his charm and charisma. It was really cool to be a Republican at the time. GREENE: But he doesn't see it that way anymore. Antone registered as a Democrat for the first time this year. He's been high-dollar fundraising for Democrats, including Congressman Rohrabacher's opponent. This really has been a defining year for him. He says President Trump has been undermining democratic institutions, though he says his thinking was starting to change a few years ago. Antone voted for President Barack Obama, even though Democrats usually push tax policies that cost wealthy people like him a whole lot. ANTONE: To me, his policies, from a business perspective, hurt me, OK? Now, mind you - I say that very loosely. I live in a beautiful home. I got beautiful cars (laughter) outside. I got a wonderful wife - life, not even just wife - phenomenal wife, by the way - wonderful wife (laughter). GREENE: Wonderful wife and life. ANTONE: And life - exactly. GREENE: What about Rohrabacher? Have you supported him in the past? GREENE: Up through, like, the most recent elections? ANTONE: Yeah, even the last ones. I believe so, yeah, because it's not like I spend a lot of time looking at Dana and his policies or what he's done for my district or not. And I would say a lot of people were in that element, you know? GREENE: So you would vote for, say, Barack Obama but then Republicans straight through in everything else. ANTONE: Conservative - fiscally, fiscally. It took us to this point where we finally have to go back and say, why did we get here? How did we get here? GREENE: ...Making a ton of money - good riddance. This is our party now. GREENE: And Trump gave them hope. ANTONE: Great. So what is it that you identify with him? I mean, outside of this rhetoric of, you know, draining the swamp, do you sacrifice where we are as a nation as far as the level of insult, derogatory comments, integrity, character, civility - for what? - just for a $300, $400 tax break? Maybe you would, you know, and I think that's where we differ. GREENE: The fact that you have become a Democrat this year, registered, and this - something has clicked for you. GREENE: What does that say about our country right now, this moment? ANTONE: Yeah, it's crazy. I mean, yeah, we're in a very unique time. And I just feel like what we have at risk is just losing our democracy and our ways as - because I go back. And I took for granted what I stand for and what I represent. And it really took me a while to understand that I am the American dream - someone that came here, humble beginnings. You know, and I look at my parents - the struggles. They've sacrificed. To leave a country like Iraq, come to a state like Detroit, Mich. - cold as hell - with barely any money in their pocket, you know, and to all sacrifice for the greater good of their kids - you go, what a sacrifice. And we've taken that sacrifice for granted. GREENE: All right. So that was Duraid Antone. He is one of the voters we've been speaking to here in California's 48th Congressional District. It is right along the ocean in Orange County, Calif. We're going to hear a lot more voices throughout the morning. Transcript provided by NPR, Copyright NPR.
#!/usr/bin/env python3 import sys import glob import numpy as np import matplotlib.pyplot as plt import matplotlib.patches as patches from utilities import dataFromDict from matplotlib.ticker import FormatStrFormatter # 08.04.2016: Mona added an option for colorbar bounds to addImagePlot plt.rc('xtick', labelsize=24); #plt.rc('ytick.major', size=10) plt.rc('ytick', labelsize=24); #plt.rc('ytick.minor', size=6) plt.rcParams["font.family"] = "serif" plt.rcParams["legend.fontsize"] = "large" #plt.rcParams["font.serif"] = "Utopia" #plt.rcParams["font.family"] = "monospace" #plt.rcParams["font.monospace"] = "Courier" #plt.rcParams["legend.labelspacing"] = 1. iCg = 0 # Global integer for color iMg = 0 # Global integer for markers iLg = 0 # Global integer for line styles gxI = -1 # Global x-index for csv animations gyLst = [] # Global y-value list for csv animations # The available color maps: cmaps = { 1:'rainbow', 2:'jet', 3:'hot', 4:'gist_earth', 5:'nipy_spectral',\ 6:'coolwarm', 7:'gist_rainbow', 8:'Spectral', 9:'CMRmap', 10:'cubehelix',\ 11:'seismic', 12:'bwr', 13:'terrain', 14:'gist_ncar', 15:'gnuplot2', \ 16:'BuPu', 17:'GnBu', 18:'RdPu', 19:'YlGnBu', 20:'YlOrRd',\ 21:'Oranges', 22:'Reds', 23:'Purples', 24:'Blues'} # NOTE! Some good ones: 2, 5, 12, 14 # The available color maps in the new version of matplotlib: cmaps_new = { 1:'viridis', 2:'inferno', 3:'plasma', 4:'magma', 5:'Blues', 6:'BuGn', 7:'BuPu', 8:'GnBu', 9:'Greens', 10:'Greys', 11:'Oranges', 12:'OrRd', 13:'PuBu', 14:'PuBuGn', 15:'PuRd', 16:'Purples', 17:'RdPu', 18:'afmhot', 19:'autumn', 20:'bone', 22:'cool', 23:'copper', 24:'gist_heat', 25:'gray', 26:'hot', 27:'pink', 28:'spring', 29:'summer', 30:'winter', 31:'Reds', 32:'YlGn', 33:'YlGnBu', 34:'YlOrBr', 35:'YlOrRd', 36:'BrBG', 37:'bwr', 38:'coolwarm', 39:'PiYG', 40:'PRGn', 41:'PuOr', 42:'RdBu', 43:'RdGy', 44:'RdYlBu', 45:'RdYlGn', 46:'Spectral', 47:'seismic', 48:'Accent', 49:'Dark2', 50:'Paired', 51:'Pastel1', 52:'Pastel2', 53:'Set1', 54:'Set2', 55:'Set3', 56:'gist_earth',57:'terrain', 58:'ocean', 59:'gist_stern', 60:'brg', 61:'CMRmap', 62:'cubehelix', 63:'gnuplot', 64:'gnuplot2', 65:'gist_ncar',66:'nipy_spectral', 67:'jet', 68:'rainbow', 69:'gist_rainbow', 70:'hsv', 71:'flag', 72:'prism'} # =*=*=*=* FUNCTION DEFINITIONS *=*=*=*=*=*=*=*=*=*=*=* # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addFigAxes( fig ): if( len(fig.get_axes()) == 0 ): ax = fig.add_axes( [0.115, 0.09 , 0.85 , 0.81] ) #[left, up, width, height] else: ax = fig.get_axes()[0] return ax # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def printDict( D , ncols=3 ): i = 0; pStr = str() for k, v in D.items(): i+=1 # use at least 13 chars to make columns line up pStr += ' {}: {:13s} \t'.format(k,v) if( i%ncols == 0 ): print(pStr); pStr = str() # print whatever is left at the end print(pStr+'\n'); pStr = None; i = None # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def setColormap( img ): global cmaps # Select the desired colormap try: printDict( cmaps_new, 3 ) icmap = int(input(' Enter integer key for the colormap = ')) try: nc = int(input(' Number of discrete colors in colormap = ')) except: nc = None cm = plt.get_cmap( cmaps_new[icmap], nc ) img.set_cmap(cm) except: print(' Using default colormap.') pass return img # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def setColorbarLims( img, lMax=None, lMin=None ): # Specify the bounds in the colorbar if( (lMax is None) or (lMin is None) ): try: mm = input(' Enter limits for colorbar: <min> <max> =') lMin,lMax = list( map(float, mm.split()) ) img.set_clim([lMin,lMax]) except: pass else: try: lMin = float(lMin); lMax = float(lMax) img.set_clim([lMin,lMax]) except: pass return img # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def userColormapSettings( fig, im, Rmax=None, Rmin=None ): uticks =None # User-defined ticks. <None> leads to default setting. eformat=None im = setColorbarLims( im ) im = setColormap( im ) try: uticks=list( map(float, input(' Enter ticks separated by comma (empty=default):').split(',')) ) except: uticks=None if(Rmax is not None): if(Rmax<1.e-3): eformat='%.2e' cb = fig.colorbar(im, ticks=uticks, format=eformat) return cb # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def random_marker(): markerList = ['x','s','p','h','d','*','o','+'] nm = len(markerList) im = np.random.random_integers(nm) - 1 mrk = markerList[im] return mrk # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def marker_stack(): global iMg markerList = ['+','s','D','o','h','p','*','x'] mrk = markerList[ iMg ] iMg = min( ( iMg + 1 ), ( len(markerList)-1 ) ) return mrk # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def linestyle_stack(lm=1, il=None): global iLg # '-' : solid line style, '--': dashed line style # '-.' : dash-dot line style, ':' : dotted line style if( lm == 1 ): lstyleList = ['-','--','-.',':'] else: lstyleList = ['-','--'] # ['x','+'] # ['-','--'] # nlinestyles = len(lstyleList) if( il is not None and np.isscalar(il) ): iLg = min( int(il) , (nlinestyles-1) ) lstyle = lstyleList[iLg] iLg += 1 if( iLg > (nlinestyles-1) ): iLg = 0 return lstyle # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def color_stack(lm=1, ic=None): global iCg ''' Brown '#A52A2A', DeepPink '#FF1493', BlueViolet '#8A2BE2', DarkCyan '#008B8B', DarkOrange '#FF8C00', DarkMagenta '#8B008B', GoldenRod '#DAA520', SeaGreen '#2E8B57', OrangeRed '#FF4500', SlateBlue '#6A5ACD' ''' if( lm == 1 ): colorList = ['b','r','c','k','#FF8C00','g','#8B008B',\ '#FF1493','#8A2BE2','#008B8B','m',\ '#2E8B57','#FF4500','#6A5ACD',\ '#A52A2A','#DAA520'] else: colorList = ['b','b','r','r','c','c','k','k','#FF8C00','#FF8C00','g','g','#8B008B','#8B008B',\ '#FF1493','#FF1493','#8A2BE2','#8A2BE2','#008B8B','#008B8B','m','m',\ '#2E8B57','#2E8B57','#FF4500','#FF4500','#6A5ACD','#6A5ACD',\ '#A52A2A','#A52A2A','#DAA520','#DAA520'] ncolors = len(colorList) if( ic is not None and np.isscalar(ic) ): iCg = min( int(ic) , ( ncolors-1 ) ) clr = colorList[iCg] iCg += 1 if( iCg > (ncolors-1) ): iCg = 0 return clr # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def plotBar(fig, xb, yb, labelStr, plotStr=["","",""], wb=0.6, errb=0): ax = addFigAxes( fig ) bars=ax.bar(xb,yb,width=wb, label=labelStr, yerr=errb, ecolor='r') ax.set_title( plotStr[0], fontsize=22) ax.set_xlabel(plotStr[1], fontsize=22) ax.set_ylabel(plotStr[2], fontsize=22); ax.grid(True) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addImagePlot( fig, R, titleStr, gridOn=False, limsOn=False, plotNests=False): global cmaps ax = addFigAxes( fig ) im = ax.imshow(np.real(R), aspect='auto') while (plotNests): try: nestParams=list( map(float, input(' Please enter nest location (top left x, top left y, width, height).\n' ' Leave empty to continue plotting.\n').split(',')) ) annotation=str(input(' Please enter annotation for nest.\n')) except: break try: nesti = patches.Rectangle((nestParams[0],nestParams[1]),nestParams[2],nestParams[3], linewidth=1, edgecolor='r', facecolor='none') ax.add_patch(nesti) ax.annotate(annotation,(nestParams[0],nestParams[1]),textcoords='offset pixels',xytext=(4,-18),color='r',size='medium') except: print(' Nest drawing failed.') ax.set_title(titleStr) ax.grid(gridOn) if(limsOn): cbar = userColormapSettings( fig, im, np.nanmax(R) ) else: minval = np.nanmin(R); maxval = np.nanmax(R) minSign = np.sign( minval ) maxSign = np.sign( maxval ) vmin = min( np.abs(minval), np.abs(maxval) ) vmax = max( np.abs(minval), np.abs(maxval) ) if( vmax/(vmin+1.E-5) < 1.5 ): vmax *= maxSign; vmin = minSign * vmax else: vmax *= maxSign; vmin *= minSign im = setColorbarLims( im, vmax, vmin ) cbar = fig.colorbar(im) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addImagePlotDict(fig, RDict ): global cmaps R = dataFromDict('R', RDict, allowNone=False) ex = dataFromDict('extent', RDict, allowNone=True) ttl = dataFromDict('title', RDict, allowNone=True) xlbl = dataFromDict('xlabel', RDict, allowNone=True) ylbl = dataFromDict('ylabel', RDict, allowNone=True) gOn = dataFromDict('gridOn', RDict, allowNone=False) lOn = dataFromDict('limsOn', RDict, allowNone=False) cm = dataFromDict('cmap', RDict, allowNone=True) orig = dataFromDict('origin', RDict, allowNone=True) ax = addFigAxes( fig ) im = ax.imshow(np.real(R), origin=orig, extent=ex, aspect='auto', cmap=cm) ax.set_title(ttl); ax.set_xlabel(xlbl); ax.set_ylabel(ylbl) ax.grid(gOn) if(lOn): cbar = userColormapSettings( fig, im, np.nanmax(R), np.nanmin(R) ) else: cbar = fig.colorbar(im) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addToPlot(fig, x,y,labelStr, plotStr=["","",""], logOn=False): ''' Add variables x,y to a given plot. Test whether y has multiple columns --> Require different treatment. ''' ax = addFigAxes( fig ) d = np.size(np.shape(y)) # Test if y has multiple columns for i in range(d): if(d==1): yt = y else: yt = y[:,i]; labelStr+='['+str(i)+']' if(logOn): lines=ax.semilogy(x,yt,linestyle_stack(1, None), linewidth=2.0, label=labelStr) #lines=ax.loglog(x,yt,'-', linewidth=1.3, label=labelStr) else: lines=ax.plot(x,yt,linestyle_stack(1, None), linewidth=2.0, label=labelStr) ax.set_title( plotStr[0], fontsize=22) ax.set_xlabel(plotStr[1], fontsize=28) ax.set_ylabel(plotStr[2], fontsize=28); ax.grid(True) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def plotXX( fig, pDict, ax=None ): global iCg, iMg, iLg fileStr = dataFromDict('filename', pDict, allowNone=False) logOn = dataFromDict('logOn', pDict, allowNone=False) llogOn = dataFromDict('llogOn', pDict, allowNone=False) Cx = dataFromDict('Cx', pDict, allowNone=False) Cy = dataFromDict('Cy', pDict, allowNone=False) revAxes = dataFromDict('revAxes', pDict, allowNone=False) linemode = dataFromDict('lm', pDict, allowNone=False) linewidth= dataFromDict('lw', pDict, allowNone=False) ylims = dataFromDict('ylims', pDict, allowNone=True) xlims = dataFromDict('xlims', pDict, allowNone=True) reset = dataFromDict('reset', pDict, allowNone=True) try: x = np.loadtxt(fileStr) except: x = np.loadtxt(fileStr,delimiter=',') if( ax is None ): ax = addFigAxes( fig ) # Reset global integer for color, marker and linestyle. if( reset ): iCg = 0; iMg = 0; iLg = 0 labelStr = labelString( fileStr ) #lStr = fileStr.rsplit(".", 1)[0] # Remove the ".dat" #rStr = lStr.rsplit("_")[-1] #tStr = lStr.split("/", 2) #if( tStr[0] is "." ): # lStr = tStr[1] #else: # lStr = tStr[0] #labelStr = lStr+"_"+rStr # Print each column separately amax = 0. Ny = (x.shape[1]-1) for i in range(Ny): if( Ny == 1 ): labelXX = labelStr else: labelXX = labelStr+'['+str(i)+']' if( revAxes ): yp = Cy*x[:,0]; xp = Cx*x[:,i+1]; dp = xp else: xp = Cx*x[:,0]; yp = Cy*x[:,i+1]; dp = yp if( logOn or llogOn ): if( revAxes ): xp = np.abs( xp ) plotf = ax.semilogx else: yp = np.abs( yp ) plotf = ax.semilogy if( llogOn ): plotf = ax.loglog else: plotf = ax.plot lines = plotf( xp, yp, \ linestyle_stack(lm=linemode), linewidth=linewidth, \ label=labelXX, color=color_stack(lm=linemode)) lmax = np.abs(np.max(dp)) # Local maximum if( lmax > amax ): amax = lmax #if( amax <5.e-4 and revAxes): # if( revAxes ): ax.xaxis.set_major_formatter(FormatStrFormatter('%.2e')) # else: ax.yaxis.set_major_formatter(FormatStrFormatter('%.2e')) ax.set_ybound(lower=ylims[0], upper=ylims[1] ) ax.set_xbound(lower=xlims[0], upper=xlims[1] ) ax.set_xlabel(" ") ax.set_ylabel(" ") return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def ciDataFromFile( filename ): try: x = np.loadtxt(filename) except: x = np.loadtxt(filename,delimiter=',') nrows, ncols = x.shape #print(' nrows, ncols = {}, {}'.format(nrows,ncols)) if( ncols > 3 ): # Copy values and clear memory d = x[:,0]; v = x[:,1]; vl = x[:,2]; vu = x[:,3] elif( ncols == 2 ): d = x[:,0]; v = x[:,1]; vl = x[:,1]; vu = x[:,1] else: msg = ''' Error! ncols has a strange value {}. The data must be in [x, v, v_lower, v_upper, (possibly something else)] format. Or alternatively [x,v] format in which case no confidence intervals will be present. Exiting...'''.format( ncols ) sys.exit(msg) # clear memory x = None return d, v, vl, vu # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def ciScaleVals( d, v, vl, vu, Cx, Cy, revAxes ): if( revAxes ): xx = Cx*v vl *= Cx; vu *= Cx d *= Cy yy = d else: yy = Cy*v vl *= Cy; vu *= Cy d *= Cx xx = d return d, xx, yy, vl, vu # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def ciDiffVals( x1, y1, v1l, v1u, x2, y2, v2l, v2u, revAxes ): # Because it is possible that all v2u > v1u (or v2u < v1u) we have to prepare for that. id2 = (v2u>v1u) id1 = ~id2 id2On=False; id1On=False if( np.count_nonzero(id2) > 0 ): id2On = True v1mu = np.abs( np.mean(v1u[id2]) ) if( np.count_nonzero(id1) > 0 ): id1On = True v1ml = np.abs( np.mean(v1l[id1]) ) if( revAxes ): #if( id2On ): x1[id2] =np.maximum( ((v2l[id2]-v1u[id2])/v1mu)*100., 0.) # If diff is pos, there's overlap #if( id1On ): x1[id1] =np.minimum( ((v2u[id1]-v1l[id1])/v1ml)*100., 0.) # If diff is neg, -- " -- if( id2On ): x1[id2] =np.maximum( (v2l[id2]-v1u[id2]), 0.) # If diff is pos, there's overlap if( id1On ): x1[id1] =np.minimum( (v2u[id1]-v1l[id1]), 0.) # If diff is neg, -- " -- y1 = 0.5*( y1 + y2 ) dm = np.mean( np.abs(x1) ) else: #if( id2On ): y1[id2] =np.maximum( ((v2l[id2]-v1u[id2])/v1mu)*100., 0.) # If diff is pos, there's overlap #if( id1On ): y1[id1] =np.minimum( ((v2u[id1]-v1l[id1])/v1ml)*100., 0.) # If diff is neg, -- " -- if( id2On ): y1[id2] =np.maximum( (v2l[id2]-v1u[id2]), 0.) # If diff is pos, there's overlap if( id1On ): y1[id1] =np.minimum( (v2u[id1]-v1l[id1]), 0.) # If diff is neg, -- " -- x1 = 0.5*( x1 + x2 ) dm = np.mean( np.abs(y1) ) return x1, y1, dm # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def labelString( fname ): ls = fname if( "." in ls ): ls = ls.rsplit(".", 1)[0] if( "/" in ls ): sL = ls.split('/') if( len(sL) > 1 ): lL = list(map( len, sL )) if( (lL[0] > 1) and ("." not in sL[0]) ): ls = sL[0] elif((lL[1] > 1) and ("." not in sL[1]) ): ls = sL[1] else: ls = sL[-1] return ls # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def plotCiXY( fig, pDict ): fn = dataFromDict('filename', pDict, allowNone=False) Cx = dataFromDict('Cx', pDict, allowNone=True) Cy = dataFromDict('Cy', pDict, allowNone=True) linemode= dataFromDict('lm', pDict, allowNone=False) logOn = dataFromDict('logOn', pDict, allowNone=True) revAxes = dataFromDict('revAxes', pDict, allowNone=True) ylims = dataFromDict('ylims', pDict, allowNone=True) xlims = dataFromDict('xlims', pDict, allowNone=True) labelStr = labelString( fn ) if( Cx is None ): Cx = 1. if( Cy is None ): Cy = 1. d, v, v_l, v_u = ciDataFromFile( fn ) ax = addFigAxes( fig ) d, xp, yp, v_l, v_u = ciScaleVals( d, v, v_l, v_u, Cx, Cy, revAxes ) if( revAxes ): xlb = 'V(d)'; ylb = 'd' else: ylb = 'V(d)'; xlb = 'd' if( logOn ): if( revAxes ): plotf = ax.semilogx fillbf = ax.fill_betweenx else: plotf = ax.semilogy fillbf= ax.fill_between else: plotf = ax.plot if( revAxes ): fillbf = ax.fill_betweenx else: fillbf = ax.fill_between lines = plotf( xp, yp, linestyle_stack(lm=linemode), lw=3., \ label=labelStr, color=color_stack(lm=linemode)) linef = fillbf( d, v_u, v_l, facecolor='white', edgecolor='white', alpha=0.25) ax.set_ybound(lower=ylims[0], upper=ylims[1] ) ax.set_xbound(lower=xlims[0], upper=xlims[1] ) ax.set_xlabel(xlb) ax.set_ylabel(ylb) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def plotCiDiffXY( fig, pDict ): f1 = dataFromDict('fileref', pDict, allowNone=False) fn = dataFromDict('filename', pDict, allowNone=False) Cx = dataFromDict('Cx', pDict, allowNone=True) Cy = dataFromDict('Cy', pDict, allowNone=True) linemode= dataFromDict('lm', pDict, allowNone=False) logOn = dataFromDict('logOn', pDict, allowNone=True) revAxes = dataFromDict('revAxes', pDict, allowNone=True) ylims = dataFromDict('ylims', pDict, allowNone=True) xlims = dataFromDict('xlims', pDict, allowNone=True) labelStr = labelString( fn ) if( Cx is None ): Cx = 1. if( Cy is None ): Cy = 1. d1, v1, v1_l, v1_u = ciDataFromFile( f1 ) d2, v2, v2_l, v2_u = ciDataFromFile( fn ) if( d2[-1] != d1[-1] ): if( d2[-1] > d1[-1] ): # Quick and dirty handling for cases when d2[-1] > d1[-1] idx = ( d2 <= d1[-1] ) # Take the terms where values match d2 = d2[idx]; v2 = v2[idx]; v2_l = v2_l[idx]; v2_u = v2_u[idx] # Shorten # Compute the ratio to match the resolutions (roughly) r = np.round( (d2[1]-d2[0])/(d1[1]-d1[0]) ).astype(int) # Use the matching indecies only idm = ( np.mod((np.arange(len(d1))+1) , r) == 0 ) d1 = d1[idm]; v1 = v1[idm]; v1_l = v1_l[idm]; v1_u = v1_u[idm] Lm = min( len(v2), len(v1) ) d2 = d2[:Lm]; v2 = v2[:Lm]; v2_l = v2_l[:Lm]; v2_u = v2_u[:Lm] d1 = d1[:Lm]; v1 = v1[:Lm]; v1_l = v1_l[:Lm]; v1_u = v1_u[:Lm] d1, x1, y1, v1_l, v1_u = ciScaleVals( d1, v1, v1_l, v1_u, Cx, Cy, revAxes ) d2, x2, y2, v2_l, v2_u = ciScaleVals( d2, v2, v2_l, v2_u, Cx, Cy, revAxes ) xp, yp, dm = ciDiffVals( x1, y1, v1_l, v1_u, x2, y2, v2_l, v2_u, revAxes ) if( revAxes ): xlb = 'D(d)'; ylb = 'd' else: ylb = 'D(d)'; xlb = 'd' ax = addFigAxes( fig ) if( logOn ): if( revAxes ): plotf = ax.semilogx fillbf = ax.fill_betweenx else: plotf = ax.semilogy fillbf= ax.fill_between else: plotf = ax.plot if( revAxes ): fillbf = ax.fill_betweenx else: fillbf = ax.fill_between lines = plotf( xp, yp, linestyle_stack(lm=linemode), lw=3., \ label=labelStr+r': $\left< | \Delta | \right>$={:.2g}'.format(dm) , color=color_stack(lm=linemode)) #label=r': $\left< | \Delta | \right>$={:.2f}'.format(dm) , color=color_stack(lm=linemode)) #linef = fillbf( d, v_u, v_l, facecolor='gray', alpha=0.25) ax.set_ybound(lower=ylims[0], upper=ylims[1] ) ax.set_xbound(lower=xlims[0], upper=xlims[1] ) ax.set_xlabel(xlb) ax.set_ylabel(ylb) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def plotDY( fig, fileStr, dim=3, revAxes=False ): dim = min( dim, 3 ); dim=max(dim , 1) x = np.loadtxt(fileStr) r = np.zeros( len(x[:,0]), float ) for i in range(dim): x0 = np.min( x[:,i] ) r += (x[:,i]-x0)**2 d = np.sqrt(r) ax = addFigAxes( fig ) labelStr = labelString( fileStr ) # Print each column separately for i in range((x.shape[1]-dim)): if( revAxes ): lines=ax.plot(x[:,i+dim],d[:],marker=marker_stack(), color=color_stack(), fillstyle='none', ls='None' , label=labelStr+'['+str(i)+']' ) else: lines=ax.plot(d[:],x[:,i+dim],marker=marker_stack(), mew=1.7, color=color_stack(), fillstyle='none', ls='None', label=labelStr+'['+str(i)+']') if( revAxes ): ax.set_ylabel(" D(X,Y,Z) "); ax.set_xlabel(" F(D) ") else: ax.set_xlabel(" D(X,Y,Z) "); ax.set_ylabel(" F(D) ") return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def plotYX( fig, fileStr, logOn ): x = np.loadtxt(fileStr) y = x[:,1] ax = addFigAxes( fig ) # Print each column separately for i in range((x.shape[1]-3)): if( logOn ): lines=ax.semilogy(np.abs(x[:,i+3]), y[:] , linewidth=1.1 , label=fileStr+'_'+str(i)) else: lines=ax.plot(x[:,i+3], y[:], linewidth=1.1, label=fileStr+'_'+str(i) ) ax.set_xlabel(" F(Y) ") ax.set_ylabel(" Y ") return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def fullPlotXY(fig,fileStr,figStr,xlabelStr,ylabelStr,lwidth=1.2,fsize=16,logOn=False): x = np.loadtxt(fileStr) y = x[:,1] ax = addFigAxes( fig ) # Print each column separately for i in range((x.shape[1]-3)): if( logOn ): lines=ax.semilogy(np.abs(x[:,i+3]), y[:] , linewidth=lw , label=figStr+'_'+str(i)) else: lines=ax.plot(x[:,i+3], y[:], linewidth=lwidth, label=figStr+'_'+str(i) ) ax.set_xlabel(xlabelStr, fontsize=fsize) ax.set_ylabel(ylabelStr, fontsize=fsize) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def plotCSV( fig, fileStr, revAxes=False, magY=False, globalValues=False ): global gxI global gyLst fl = open( fileStr, 'r' ) line = fl.readline() # Read first line which contains all variable names as str. fl.close() varList = line.split(',') for i in range(len(varList)): varList[i]=varList[i].strip("\"") x = np.loadtxt(fileStr, delimiter=',', skiprows=1) if( not globalValues or (globalValues and gxI == -1) ): n = 0 for v in varList: print(" => ["+str(n)+"]: "+ v) n+=1 try: xI = int(input(" X [index] = ")) except: sys.exit(' No selection. Exiting program. ') e = input(" Y [List] = ") if( e == ''): select=input(" Select All? [1-9]=> Yes, [Empty]=> No: ") if( select == ''): sys.exit(' Exiting program.') else: yLst = list(range(len(fileList))) else: try: yLst = list( map( int, e.split(',') ) ) except: sys.exit(' Bad entry. Exiting program.') if( globalValues and gxI == -1 ): gxI = xI # Store the global values gyLst = yLst else: # (globalValues and gxI /= -1) #print ' Utilizing global values ' xI = gxI # Extract the global values yLst = gyLst labelStr = fileStr.split(".")[0] ax = addFigAxes( fig ) if( not magY ): yLbl = "" # Start with empty label for yJ in yLst: yLbl = yLbl+varList[yJ]+"; " # Compile label if( revAxes ): lines=ax.plot(x[:,yJ],x[:,xI],'-', markersize=6, linewidth=1.5, label=labelStr+": "+varList[yJ]) else: lines=ax.plot(x[:,xI],x[:,yJ],'o-', markersize=6, linewidth=1.5, label=labelStr+": "+varList[yJ]) #writeXY( x[:,xI],x[:,yJ], 'out.dat' ) else: yt = np.zeros(len(x[:,0])) yLbl = " Mag(y[:]) " # Set fixed label for yJ in yLst: yt += x[:,yJ]**2 if( revAxes ): lines=ax.plot(np.sqrt(yt),x[:,xI],'-', markersize=6, linewidth=1.5, label=labelStr) else: lines=ax.plot(x[:,xI],np.sqrt(yt),'o-', markersize=6, linewidth=1.5, label=labelStr) if( revAxes ): ax.set_ylabel(varList[xI]); ax.set_xlabel(yLbl) else: ax.set_xlabel(varList[xI]); ax.set_ylabel(yLbl) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def extractFromCSV( csvFile, varNames ): fl = open( csvFile, 'r' ) line = fl.readline() # Read first line which contains all variable names as str. fl.close() varList = line.split(',') for i in range(len(varList)): varList[i]=varList[i].strip("\"") varList[i]=varList[i].strip("\""+"\n") # This is in case the line contain '\n' Ix = [] for varStr in varNames: try: Ix.append( varList.index(varStr) )#; print "Index List= {}".format(Ix) except: None if (len(Ix) == 0): print("None of the variables in {0} were found in {1}".format(varNames,varList)) print("Exiting program. ") sys.exit(1) x = np.loadtxt(csvFile, delimiter=',', skiprows=1) data = [] for jcol in Ix: data.append( np.array(x[:,jcol]) ) return np.array(data) # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def extractFromRAW( rawFile, varNames ): fl = open( rawFile, 'r' ) # Read (first or second) line which contains all var names as str. while 1: line = fl.readline() if('#' and 'x' in line): break fl.close() varList = line.split(); varList.remove('#') #print varList Ix = [] for varStr in varNames: try: Ix.append( varList.index(varStr) )#; print "Index List= {}".format(Ix) except: None #print Ix if (len(Ix) == 0): print("None of the variables in {0} were found in {1}".format(varNames,varList)) print("Exiting program. ") sys.exit(1) x = np.loadtxt(rawFile) data = [] for jcol in Ix: data.append(x[:,jcol]) return data # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addQuiver( X, Y, Ux, Uy , fc, labelStr, titleStr=" " ): plt.figure() Q = plt.quiver(X[::fc, ::fc],Y[::fc, ::fc],Ux[::fc, ::fc],Uy[::fc, ::fc],\ pivot='tail', color='b', units='xy', scale=1.5 ) #qk = plt.quiverkey(Q, 0.9, 1.05, 1, labelStr, labelpos='E',fontproperties={'weight': 'bold'}) plt.title(titleStr) return Q # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addContourf( X, Y, Q, CfDict=None ): Xdims = np.array(X.shape) figDims = 12.*(Xdims[::-1].astype(float)/np.max(Xdims)) #figDims = (11,11) #figDims = (9,11) fig = plt.figure(figsize=figDims) #fig, ax = plt.subplots() ax = addFigAxes( fig ) # Default values labelStr = ' Q(X,Y) ' titleStr = ' Title: Q(X,Y) ' cm = None vx = None vn = None levels = None N = 12 if( CfDict is not None ): titleStr = dataFromDict('title', CfDict, allowNone=False) labelStr = dataFromDict('label', CfDict, allowNone=False) cm = dataFromDict('cmap', CfDict, allowNone=True ) N = dataFromDict('N', CfDict, allowNone=True ) vn = dataFromDict('vmin', CfDict, allowNone=True ) vx = dataFromDict('vmax', CfDict, allowNone=True ) levels = dataFromDict('levels', CfDict, allowNone=True ) if( N is None ): N = 12 #print(' vmax = {}, vmin = {} '.format(vx,vn)) #levels = [-1e-6, -1e-7, 0, 1e-7, 1e-6] #CO = plt.contourf(X,Y,Q, levels ) if( levels is not None ): CO = ax.contourf(X,Y,Q, levels, cmap=cm, vmin=vn, vmax=vx ) else: CO = ax.contourf(X,Y,Q, N , cmap=cm, vmin=vn, vmax=vx ) ax.set_title( titleStr ) cbar = fig.colorbar(CO) if( vx is not None ): cbar.vmax = vx if( vn is not None ): cbar.vmin = vn cbar.ax.set_ylabel(labelStr, fontsize=20, fontstyle='normal', fontweight='book', fontname='serif') return CO # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addScatterPlot(fig, X, Y, C, fc=4 ): ax = addFigAxes( fig ) dims = np.array(np.shape(X))//fc # NOTE: integer division necessary N = np.prod(dims) ax.scatter(X[::fc,::fc].reshape(N), Y[::fc,::fc].reshape(N), s=10, \ c=C[::fc,::fc].reshape(N), marker=',', cmap=plt.cm.rainbow) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def arrow2DPlot( fig, fileStr , scale=1.0, ic=0, fillOn=True ): d = np.loadtxt(fileStr) labelStr = fileStr.split(".")[0] try: x = d[:,0]; y =d[:,1]; dx = d[:,2]; dy =d[:,3] except: print(' The file must contain (at least) 4 columns: x, y, dx, dy ') sys.exit(1) ax = addFigAxes( fig ) lx = max(scale, 0.825 )*0.0008 lx = min( lx, 0.0016 ) for i in range( len(x) ): ax.arrow( x[i], y[i], scale*dx[i], scale*dy[i], color=color_stack(ic) , width=lx, \ head_width=5.85*lx, head_length=2.85*lx, overhang=0.25, fill=fillOn ) return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def writeXY( x , y , fileName ): f = open( fileName ,'w') #'w' = for writing for i in range(len(x)): f.write("%13.7e \t %13.7e \n" %(x[i], y[i]) ) print('Writing file '+fileName) f.close() # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def wavePlot( fig, fileStr, logOn ): x = np.loadtxt(fileStr) ax = addFigAxes( fig ) labelStr = fileStr.split(".")[0] # Print each column separately Ny = (x.shape[1]-1) for i in range(Ny): if( Ny == 1 ): labelXX = labelStr else: labelXX = labelStr+'['+str(i)+']' if( logOn ): #lines=ax.loglog(x[:,0],np.abs(x[:,i+1]),'o-', linewidth=1.3 , label=labelXX) lines=ax.semilogy(x[:,0],np.abs(x[:,i+1]),'-', linewidth=1.1 , label=labelXX) else: lines=ax.plot(x[:,0],x[:,i+1],'o', linewidth=1.1, label=labelXX) ax.set_xlabel(" X ") ax.set_ylabel(" Y ") return fig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def userLabels( pFig ): #print('A) pFig.get_axes()[] ') ax=pFig.get_axes()[0] # Get a handle on the first axes #print('B) pFig.get_axes()[] ') #pl.rc('text', usetex=True ) #pl.rc('xtick', labelsize=24) #pl.rc('ytick', labelsize=24) titleStr = strEntry( " Plot Title = " , " " ) yLbl = strEntry( " Y Label = " , " Y " ) xLbl = strEntry( " X Label = " , " X " ) """ fontname: [ FONTNAME | 'serif' | 'sans-serif' | 'cursive' | 'fantasy' | 'monospace' ] fontsize: [ size in points ] fontweight: [ a numeric value in range 0-1000 | 'ultralight' | 'light' | 'normal' | 'regular' | 'book' | 'medium' | 'roman' | 'semibold' | 'demibold' | 'demi' | 'bold' | 'heavy' | 'extra bold' | 'black' ] fontstyle: [ 'normal' | 'italic' | 'oblique'] """ ax.set_title(titleStr, fontsize=20, fontstyle='normal', fontweight='demibold', fontname='serif') ax.set_ylabel(yLbl, fontsize=20, fontstyle='normal', fontweight='book', fontname='serif') ax.set_xlabel(xLbl, fontsize=20, fontstyle='normal', fontweight='book', fontname='serif') return pFig # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def strEntry( questionStr , defaultStr ): try: oStr = input(str(questionStr)) except: oStr = str(defaultStr) return oStr # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def numEntry( questionStr , defaultValue ): try: value = input(str(questionStr)) except: value = float(defaultValue) return value # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def maxValues( fileStr ): x = np.loadtxt(fileStr) mv = [] for i in range(x.shape[1]): mv.append(np.max(x[:,i])) return mv # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* # ADDED MY MONA KURPPA, 2016: # =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* def addToPlot_marker(fig, x,y,labelStr, plotStr=["","",""], logOn=False, marker='-'): ''' Add variables x,y to a given plot. Test whether y has multiple columns --> Require different treatment. e.g. marker = '-' or '--' or 'v-' ''' ax = addFigAxes( fig ) d = np.size(np.shape(y)) # Test if y has multiple columns for i in range(d): if(d==1): yt = y else: yt = y[:,i]; labelStr+='['+str(i)+']' if(logOn): lines=ax.loglog(x,yt,marker, linewidth=1.3, label=labelStr) else: lines=ax.plot(x,yt,marker, linewidth=1.6, label=labelStr) ax.set_title( plotStr[0], fontsize=22) ax.set_xlabel(plotStr[1], fontsize=22) ax.set_ylabel(plotStr[2], fontsize=22); ax.grid(True) return fig
Bob Haugen passed away last Sunday. My favorite Haugen articles are really two. In Commonalities in the Determinants of Expected Stock Returns (1995, with Nardin Baker), he basically showed that there's lots of stange things going on in the stock market. He looked at 40 or so specific metrics related to liquidity, price ratios, prior returns, growth, and risk, and found many of them significantly related to future returns in the US, Germany, France, and the UK. The paper was important because at that time Fama and French came out with their influential paper showing the value and size anomalies, and reconciled this within a risk model that they said must have some orthogonal value and size related factors. Lakonishok, Shliefer, and Vishny, meanwhile were arguing these were due to inefficiencies, investor over and under-reactions. Haugen favored the inefficiency explanation, and more importantly highlighted there were a lot more than the value and size anomalies. Most importantly to me, in my dissertation I remember highlighting his The efficient market inefficiency of capitalization-weighted stock portfolios (1991, with Nardin Baker). This paper highlighted that a very fundamental portfolio, the Minimum Variance Portfolio that is at the extreme left of a convex hull created via Markowitzian diversification in mean-variance space, actually had a slightly higher than average return. Haugen focused on the inefficiency of the market portfolio, but I used it to support my contention that low volatility stocks actually had slightly higher than average returns. Most low volatility funds reference this paper in presentations of their approach, as it was the first paper to highlight the dominance of this special portfolio. He was an independent spirit, and will be greatly missed. Nice to pay tribute to an original thinker, but please quite his paper correctly: "Commonality In The Determinants Of Expected Stock Returns" Good paper. Are there any other favorite papers you would recommend on the topic? Is Broker-Dealer Leverage the Elusive SDF?
# coding=utf-8 ''' GestPYPay 1.0.0 (C) 2012 Gianfranco Reppucci <gianfranco@gdlabs.it> https://github.com/giefferre/gestpypay GestPYPay is an implementation in Python of GestPayCrypt and GestPayCryptHS italian bank Banca Sella Java classes. It allows to connect to online credit card payment GestPay. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License version 2.1 as published by the Free Software Foundation. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details at http://www.gnu.org/copyleft/lgpl.html You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. ''' import urllib import re import requests def empty(variable): if not variable: return True return False class GestPayCrypt: # attributes ShopLogin = None # Shop Login (e.g. Codice Esercente) Currency = None # Currency code (242 = EUR) Amount = None # Transaction Amount (e.g. 100.00) ShopTransactionID = None # Merchant transaction id CardNumber = None # Credit Card Number ExpMonth = None # Credit Card Expiration Month ExpYear = None # Credit Card Expiration Year BuyerName = None # Client Name and Surname BuyerEmail = None # Client Email Address Language = None # Language CustomInfo = None # Additional Informations AuthorizationCode = None # Transaction Authorization Code ErrorCode = None # Error code ErrorDescription = None # Error description BankTransactionID = None # GestPay transaction id AlertCode = None # Alert code AlertDescription = None # Alert description EncryptedString = None # Crypted string ToBeEncript = None # String to be encrypted Decrypted = None # Decrypted string TransactionResult = None # Transaction result ProtocolAuthServer = None # 'http' or 'https' DomainName = None # GetPay Domain separator = None # Separator string for crypted string Version = None Min = None CVV = None country = None vbvrisp = None vbv = None # constructor def __init__(self, *args, **kwargs): self.ShopLogin = "" self.Currency = "" self.Amount = "" self.ShopTransactionID = "" self.CardNumber = "" self.ExpMonth = "" self.ExpYear = "" self.BuyerName = "" self.BuyerEmail = "" self.Language = "" self.CustomInfo = "" self.AuthorizationCode = "" self.ErrorCode = "0" self.ErrorDescription = "" self.BankTransactionID = "" self.AlertCode = "" self.AlertDescription = "" self.EncryptedString = "" self.ToBeEncrypt = "" self.Decrypted = "" self.ProtocolAuthServer = "http" self.DomainName = "ecomm.sella.it" self.ScriptEnCrypt = "/CryptHTTP/Encrypt.asp" self.ScriptDecrypt = "/CryptHTTP/Decrypt.asp" self.separator = "*P1*" self.Version = "1.0" self.Min = "" self.CVV = "" self.country = "" self.vbvrisp = "" self.vbv = "" self.debug = False # write methods def SetShopLogin(self, val): self.ShopLogin = val def SetCurrency(self, val): self.Currency = val def SetAmount(self, val): self.Amount = val def SetShopTransactionID(self, val): self.ShopTransactionID = urllib.quote_plus(val.strip()) def SetCardNumber(self, val): self.CardNumber = val def SetExpMonth(self, val): self.ExpMonth = val def SetExpYear(self, val): self.ExpYear = val def SetMIN(self, val): self.Min = val def SetCVV(self, val): self.CVV = val def SetBuyerName(self, val): self.BuyerName = urllib.quote_plus(val.strip()) def SetBuyerEmail(self, val): self.BuyerEmail = val.strip() def SetLanguage(self, val): self.Language = val.strip() def SetCustomInfo(self, val): self.CustomInfo = urllib.quote_plus(val.strip()) def SetEncryptedString(self, val): self.EncryptedString = val # read only methods def GetShopLogin(self): return self.ShopLogin def GetCurrency(self): return self.Currency def GetAmount(self): return self.Amount def GetCountry(self): return self.country def GetVBV(self): return self.vbv def GetVBVrisp(self): return self.vbvrisp def GetShopTransactionID(self): return urllib.unquote_plus(self.ShopTransactionID) def GetBuyerName(self): return urllib.unquote_plus(self.BuyerName) def GetBuyerEmail(self): return self.BuyerEmail def GetCustomInfo(self): return urllib.unquote_plus(self.CustomInfo) def GetAuthorizationCode(self): return self.AuthorizationCode def GetErrorCode(self): return self.ErrorCode def GetErrorDescription(self): return self.ErrorDescription def GetBankTransactionID(self): return self.BankTransactionID def GetTransactionResult(self): return self.TransactionResult def GetAlertCode(self): return self.AlertCode def GetAlertDescription(self): return self.AlertDescription def GetEncryptedString(self): return self.EncryptedString # encryption / decryption def Encrypt(self): err = "" self.ErrorCode = "0" self.ErrorDescription = "" self.ToBeEncrypt = "" if empty(self.ShopLogin): self.ErrorCode = "546" self.ErrorDescription = "IDshop not valid" return False if empty(self.Currency): self.ErrorCode = "552" self.ErrorDescription = "Currency not valid" return False if empty(self.Amount): self.ErrorCode = "553" self.ErrorDescription = "Amount not valid" return False if empty(self.ShopTransactionID): self.ErrorCode = "551" self.ErrorDescription = "Shop Transaction ID not valid" return False self.ToEncrypt(self.CVV, "PAY1_CVV") self.ToEncrypt(self.Min, "PAY1_MIN") self.ToEncrypt(self.Currency, "PAY1_UICCODE") self.ToEncrypt(self.Amount, "PAY1_AMOUNT") self.ToEncrypt(self.ShopTransactionID, "PAY1_SHOPTRANSACTIONID") self.ToEncrypt(self.CardNumber, "PAY1_CARDNUMBER") self.ToEncrypt(self.ExpMonth, "PAY1_EXPMONTH") self.ToEncrypt(self.ExpYear, "PAY1_EXPYEAR") self.ToEncrypt(self.BuyerName, "PAY1_CHNAME") self.ToEncrypt(self.BuyerEmail, "PAY1_CHEMAIL") self.ToEncrypt(self.Language, "PAY1_IDLANGUAGE") self.ToEncrypt(self.CustomInfo, "") self.ToBeEncrypt = self.ToBeEncrypt.replace(" ", "+") uri = self.ScriptEnCrypt + "?a=" + self.ShopLogin + "&b=" + self.ToBeEncrypt[len(self.separator):] full_url = self.ProtocolAuthServer + "://" + self.DomainName + uri if self.debug: print "URL richiesta: " + full_url + "\n" self.EncryptedString = self.HttpGetResponse(full_url, True) if self.EncryptedString == -1: return False if self.debug: print "Stringa criptata: " + self.EncryptedString + "\n" return True def Decrypt(self): err = "" self.ErrorCode = "0" self.ErrorDescription = "" if empty(self.ShopLogin): self.ErrorCode = "546" self.ErrorDescription = "IDshop not valid" return False if empty(self.EncryptedString): self.ErrorCode = "1009" self.ErrorDescription = "String to Decrypt not valid" return False uri = self.ScriptDecrypt + "?a=" + self.ShopLogin + "&b=" + self.EncryptedString full_url = self.ProtocolAuthServer + "://" + self.DomainName + uri if self.debug: print "URL richiesta: " + full_url + "\n" self.Decrypted = self.HttpGetResponse(full_url, False) if self.Decrypted == -1: return False elif empty(self.Decrypted): self.ErrorCode = "9999" self.ErrorDescription = "Empty decrypted string" return False self.Decrypted = self.Decrypted.replace("+", " ") if self.debug: print "Stringa decriptata: " + self.Decrypted + "\n" self.Parsing() return True # helpers def ToEncrypt(self, value, tagvalue): equal = "=" if tagvalue else "" if not empty(value): self.ToBeEncrypt += "%s%s%s%s" % (self.separator, tagvalue, equal, value) def HttpGetResponse(self, url, crypt): response = "" req = "crypt" if crypt else "decrypt" line = self.HttpGetLine(url) if line == -1: return -1 if self.debug: print line reg = re.compile("#" + req + "string#([\w\W]*)#\/" + req + "string#").findall(line) err = re.compile("#error#([\w\W]*)#\/error#").findall(line) if self.debug: print url print req print line print reg print err if len(reg) > 0: response = reg[0].strip() elif len(err) > 0: err = err[0].split('-') if empty(err[0]) and empty(err[1]): self.ErrorCode = "9999" self.ErrorDescription = "Unknown error" else: self.ErrorCode = err[0].strip() self.ErrorDescription = err[1].strip() return -1 else: self.ErrorCode = "9999" self.ErrorDescription = "Response from server not valid" return -1 return response def HttpGetLine(self, url): try: r = requests.get(url) except Exception, e: print e self.ErrorCode = "9999" self.ErrorDescription = "Impossible to connect to host: " + host return -1 output = "" for line in r.iter_lines(): output = line break return output def Parsing(self): keyval = self.Decrypted.split(self.separator) for tagPAY1 in keyval: tagPAY1val = tagPAY1.split("=") if re.search("^PAY1_UICCODE", tagPAY1): self.Currency = tagPAY1val[1] elif re.search("^PAY1_AMOUNT", tagPAY1): self.Amount = tagPAY1val[1] elif re.search("^PAY1_SHOPTRANSACTIONID", tagPAY1): self.ShopTransactionID = tagPAY1val[1] elif re.search("^PAY1_CHNAME", tagPAY1): self.BuyerName = tagPAY1val[1] elif re.search("^PAY1_CHEMAIL", tagPAY1): self.BuyerEmail = tagPAY1val[1] elif re.search("^PAY1_AUTHORIZATIONCODE", tagPAY1): self.AuthorizationCode = tagPAY1val[1] elif re.search("^PAY1_ERRORCODE", tagPAY1): self.ErrorCode = tagPAY1val[1] elif re.search("^PAY1_ERRORDESCRIPTION", tagPAY1): self.ErrorDescription = tagPAY1val[1] elif re.search("^PAY1_BANKTRANSACTIONID", tagPAY1): self.BankTransactionID = tagPAY1val[1] elif re.search("^PAY1_ALERTCODE", tagPAY1): self.AlertCode = tagPAY1val[1] elif re.search("^PAY1_ALERTDESCRIPTION", tagPAY1): self.AlertDescription = tagPAY1val[1] elif re.search("^PAY1_CARDNUMBER", tagPAY1): self.CardNumber = tagPAY1val[1] elif re.search("^PAY1_EXPMONTH", tagPAY1): self.ExpMonth = tagPAY1val[1] elif re.search("^PAY1_EXPYEAR", tagPAY1): self.ExpYear = tagPAY1val[1] elif re.search("^PAY1_COUNTRY", tagPAY1): self.ExpYear = tagPAY1val[1] elif re.search("^PAY1_VBVRISP", tagPAY1): self.ExpYear = tagPAY1val[1] elif re.search("^PAY1_VBV", tagPAY1): self.ExpYear = tagPAY1val[1] elif re.search("^PAY1_IDLANGUAGE", tagPAY1): self.Language = tagPAY1val[1] elif re.search("^PAY1_TRANSACTIONRESULT", tagPAY1): self.TransactionResult = tagPAY1val[1] else: self.CustomInfo += tagPAY1 + self.separator self.CustomInfo = self.CustomInfo[:-len(self.separator)] class GestPayCryptHS(GestPayCrypt): # constructor def __init__(self, *args, **kwargs): self.ShopLogin = "" self.Currency = "" self.Amount = "" self.ShopTransactionID = "" self.CardNumber = "" self.ExpMonth = "" self.ExpYear = "" self.BuyerName = "" self.BuyerEmail = "" self.Language = "" self.CustomInfo = "" self.AuthorizationCode = "" self.ErrorCode = "0" self.ErrorDescription = "" self.BankTransactionID = "" self.AlertCode = "" self.AlertDescription = "" self.EncryptedString = "" self.ToBeEncrypt = "" self.Decrypted = "" self.ProtocolAuthServer = "https" self.DomainName = "ecomm.sella.it" self.ScriptEnCrypt = "/CryptHTTPS/Encrypt.asp" self.ScriptDecrypt = "/CryptHTTPS/Decrypt.asp" self.separator = "*P1*" self.Version = "1.0" self.Min = "" self.CVV = "" self.country = "" self.vbvrisp = "" self.vbv = "" self.debug = False
Mock Texture Collection Asus ZenPad S 8 Skin – MightySkins Added to cart! View cart or continue shopping. Your Asus ZenPad S 8 should be as unique as you are, and these Mock Textures Skins for it are the perfect way to express your personality! This awesome vinyl decal will let you show off your favorite gear with a unique style that's totally you!
from django.core.management.base import BaseCommand from xbrowse_server.base.model_utils import create_xbrowse_model from xbrowse_server.base.models import Project import sys from django.utils import timezone class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("project_id") parser.add_argument("project_name", nargs="?") def handle(self, *args, **options): if 'project_id' not in options: print '\n' print 'Creates a project in Seqr.\n' print 'Please provide a project ID as an argument. Optionally, provide a more human-readable project name as a second argument. ' print 'Example: python manage.py add_project 1kg\n' sys.exit() project_id = options['project_id'] if "." in project_id: sys.exit("ERROR: A '.' in the project ID is not supported") if Project.objects.filter(project_id=project_id).exists(): print '\nSorry, I am unable to create that project since it exists already\n' sys.exit() project_name = options.get('project_name') or project_id print('Creating project with id "%(project_id)s" and name "%(project_name)s"' % locals()) try: create_xbrowse_model(Project, project_id=project_id, project_name=project_name, created_date=timezone.now()) except Exception as e: print('\nError creating project:', e, '\n') sys.exit()
Two members of the local entertainment industry remain in custody in the United States following their arrest last week for alleged drug trafficking. One of those arrested is 37-year-old disc jockey and entertainment promoter, ‘Crazy Chris’, from Montego Bay. Chris, whose given name is Christopher Samuels, was arrested in North Carolina on cocaine charges on Tuesday, November 6 by the Charlotte Mecklenburg Police Department. Also arrested is stylist, 25-year-old Keshon Hawthorne, who was booked on drug trafficking charges following his arrival at the Charlotte Douglas International Airport in Charlotte, North Carolina on Sunday, November 4. According to US media reports, Hawthorne arrived in Charlotte on a flight from Montego Bay. A white powdery substance that field-tested positive for cocaine was discovered during an inspection of his luggage. The police said two days later, Samuels landed at the Charlotte Douglas International Airport, also from Montego Bay, and was detained after it was determined that he used his luggage to smuggle cocaine. The total weight of the cocaine seized from the Jamaicans was more than six pounds. It had an estimated street value of US $90,000. Both men were arrested for importation of a controlled substance, and turned over to Home Security investigators. Samuels was subsequently slapped with charges of possession with intent to sell and deliver cocaine, and two counts of trafficking cocaine. The specific charges laid against Hawthorne are not immediately clear.
import logging from random import getrandbits from ming import Field from ming.declarative import Document from ming import schema as S from .m_base import doc_session, dumps, pickle_property, Resource log = logging.getLogger(__name__) class TaskState(Document): class __mongometa__: name = 'chapman.task' session = doc_session indexes = [ [('parent_id', 1), ('data.composite_position', 1)], ] _id = Field(int, if_missing=lambda: getrandbits(63)) type = Field(str) parent_id = Field(int, if_missing=None) status = Field(str, if_missing='pending') _result = Field('result', S.Binary) data = Field({str: None}) options = Field(dict( queue=S.String(if_missing='chapman'), priority=S.Int(if_missing=10), immutable=S.Bool(if_missing=False), ignore_result=S.Bool(if_missing=False), path=S.String(if_missing=None), semaphores = [str], )) on_complete = Field(int, if_missing=None) active = Field([int]) # just one message active queued = Field([int]) # any number queued result = pickle_property('_result') @classmethod def set_result(cls, id, result): cls.m.update_partial( {'_id': id}, {'$set': { 'result': dumps(result), 'status': result.status}}) def __repr__(self): parts = [self.type, self._id] if self.options['path']: parts.append(self.options['path']) return '<{}>'.format( ' '.join(map(str, parts))) class TaskStateResource(Resource): cls=TaskState def __init__(self, id): self.id = id def __repr__(self): obj = TaskState.m.get(_id=self.id) return '<TaskStateResource({}:{}): {} / {}>'.format( obj.type, obj._id, obj.active, obj.queued) def acquire(self, msg_id): return super(TaskStateResource, self).acquire(msg_id, 1) def release(self, msg_id): return super(TaskStateResource, self).release(msg_id, 1)
Personal blog for Matt Burris, and home to 2,748 mashups! Something that has been at the back of our minds for years; do the things you own intentionally break down after a set period of time? Did that DVD player you just bought have a destruction time tuned to December 24th, 2004? Are certain parts of your car designed to tear down after a couple of years? Do manufacturers deliberately create products to not last long, so you’d have to buy a new one, thus increasing profits? This is called planned obsolescence, and it’s very real. Share book reviews and ratings with Matt, and even join a book club on Goodreads. Copyright © 2019, GoodBlimey. Proudly powered by WordPress. Blackoot design by Iceable Themes.
# The Open Corpus of Newswriting (OCON) # Copyright (C) 2014 Mahangu Weerasinghe (mahangu@gmail.com) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import urlparse import os import justext import urllib2 import re #regex stuff from ftfy import fix_text #unicode cleanup INPUT_DIR="input/" OUTPUT_DIR="output/" def get_article_list(filename): lines = tuple(open(filename, "r")) lines_seen = set() # holds lines already seen for line in open(filename, "r"): if line not in lines_seen: # not a duplicate lines_seen.add(line) return tuple(lines_seen) def grab_article(url): article = "" import requests import justext #url = "http://archives.dailynews.lk/2008/01/11/news38.asp" url = url.strip("\n") url = url.strip("\r") url = url.strip(" ") print url response = requests.get(url) print response paragraphs = justext.justext(response.content, justext.get_stoplist("English")) for paragraph in paragraphs: if not paragraph.is_boilerplate: #print paragraph.text article = article + unicode(paragraph.text) + "\n\n" if article!=None: return unicode(article) else: return None; for file in os.listdir(INPUT_DIR): if file.endswith(".txt"): split_filename = re.findall(r"[^\W_]+", file) #splitting up the filename so we can get newspaper name and date from it NEWSPAPER = split_filename[0] DATE = split_filename[1] article_url_list = "" article_url_list = get_article_list(INPUT_DIR + file) print article_url_list for article_url in article_url_list: scheme = urlparse.urlparse(article_url).scheme if article_url!="\n" and scheme=="http": #checking for newlines and mailto: links hostname = urlparse.urlparse(article_url).hostname path = urlparse.urlparse(article_url).path #grab the part after the .TLD path = path.replace("/", "") #remove forward slashes raw_text = unicode(grab_article(article_url)) if raw_text!=None: text = fix_text(raw_text) text = text + "\n\n\n\n" split_path = re.findall(r"[^\W_]+", path) #sanitising the path so it doesn't end up crazy long short_path = split_path[0] print short_path text_file = open(OUTPUT_DIR + NEWSPAPER + "_" + DATE + "_" + hostname + "_" + short_path + ".txt", "a+") text_file.write(text.encode('utf8')) text_file.close()
SeaWorld CEO Joel Manby has attacked activists for failing to understand animal welfare issue. And he hit out at claims that SeaWorld is responsible for whales dying in captivity. Manby said SeaWorld was a valuable haven for sea life in the face of increasing levels of sea pollution. He said: “I get frustrated with the small-minded arguments from activists that really don’t know what they’re talking about. “One hundred years from now, people are going to be begging for zoos and aquariums to take the animals from the wild because the extinction rate is so high. In 2016 SeaWorld announced that it was terminating its in-park orca breeding program and cutting down its live killer whale shows. Mr Manby said that SeaWorld would not be restarting its breeding program. He added: “We will still have the whales for 50 years. “They live a long time. This is a decision that is for the immediate.
import tw2.forms as twf import tw2.core as twc class UserExists(twc.Validator): """Validate the user exists in the DB. It's used when we want to authentificate it. """ __unpackargs__ = ('login', 'password', 'validate_func', 'request') msgs = { 'mismatch': ('Login failed. Please check your ' 'credentials and try again.'), } def _validate_python(self, value, state): super(UserExists, self)._validate_python(value, state) login = value[self.login] password = value[self.password] for v in [login, password]: try: if issubclass(v, twc.validation.Invalid): # No need to validate the password of the user, the login # or password are invalid return except TypeError: pass res = self.validate_func(self.request, login, password) if not res: raise twc.ValidationError('mismatch', self) if res is not True: value['user'] = res def create_login_form(request, validate_func): class LoginForm(twf.TableForm): login = twf.TextField(validator=twc.Validator(required=True)) password = twf.PasswordField(validator=twc.Validator(required=True)) submit = twf.SubmitButton(id='submit', value='Login') validator = UserExists( login='login', password='password', validate_func=validate_func, request=request, ) return LoginForm
At 19:37 The Milton Fire Department was dispatched to the intersection of Lincoln and Locust Streets for a two vehicle MVA. Chief 1502 arrived to find two vehicles in the roadway with all occupants out of the vehicles. Rescue 15 arrived and to care of vehicle hazards. Units on scene Chief 1502 Lupo, Rescue 15, MICU 15, CO 15 Fire Police, and Milton Police Department . Photos by Jay Mabus.
""" RequestHandlers for the Jukebox application IndexHandler - Show version TracklistHandler - Show current tracklist SongHandler - Show track information VoteHandler - Add and remove votes SkipHandler - Add and remove skips SearchHandler - Search the library """ from __future__ import absolute_import, unicode_literals import json import uuid from datetime import datetime from functools import wraps from mopidy.models import ModelJSONEncoder from tornado import web, escape, gen, auth from .library import Tracklist from .models import Vote, User, Session from .util import track_json def authenticate(f): """ Decorator for checking if a user is authenticated """ @wraps(f) def wrapper(self): """ :type self: RequestHandler """ try: self.request.session = Session.get(Session.secret == self.get_cookie('session')) f(self) except Session.DoesNotExist: self.set_status(403) return wrapper class LoginHandler(web.RequestHandler): def get(self): cookie = self.get_cookie('session') if cookie: try: session = Session.get(Session.secret == cookie) self.set_status(200) self.write("Successfully logged in") except Session.DoesNotExist: self.redirect('/jukebox-api/auth/google') else: self.redirect('/jukebox-api/auth/google') class LogoutHandler(web.RequestHandler): @authenticate def get(self): self.request.session.delete() self.clear_cookie('session') self.set_status(200) self.write("Successfully logged out") class GoogleOAuth2LoginHandler(web.RequestHandler, auth.GoogleOAuth2Mixin): def initialize(self, google_oauth, google_oauth_secret): self.settings[self._OAUTH_SETTINGS_KEY] = { 'key': google_oauth, 'secret': google_oauth_secret, } @gen.coroutine def get(self): # own url without GET variables redirect_uri = self.request.protocol + "://" + self.request.host + self.request.uri.split('?')[0] if self.get_argument('code', False): try: access = yield self.get_authenticated_user( redirect_uri=redirect_uri, code=self.get_argument('code')) google_user = yield self.oauth2_request( "https://www.googleapis.com/oauth2/v1/userinfo", access_token=access["access_token"]) try: user = User.get(uid=google_user['id']) except User.DoesNotExist: user = User.create(uid=google_user['id'], name=google_user['name'], email=google_user['email'], picture=google_user['picture']) user.save() # a user can have 1 session Session.delete().where(Session.user == user).execute() session = Session(user=user, secret=uuid.uuid1()) session.save() self.set_cookie('session', str(session.secret)) self.set_status(200) self.write("Successfully logged in") except auth.AuthError: self.set_status(400, "Bad Request") self.write("400: Bad Request") else: yield self.authorize_redirect( redirect_uri=redirect_uri, client_id=self.settings[self._OAUTH_SETTINGS_KEY]['key'], scope=['profile', 'email'], response_type='code', extra_params={'approval_prompt': 'auto'}) class IndexHandler(web.RequestHandler): def initialize(self, version, core): self.core = core self.version = version def get(self): self.write({'message': 'Welcome to the Jukebox API', 'version': self.version}) self.set_header("Content-Type", "application/json") class TracklistHandler(web.RequestHandler): def initialize(self, core): self.core = core @authenticate def get(self): tracklist = self.core.tracklist.get_tl_tracks().get() self.write({ 'tracklist': [{'id': id, 'track': track_json(track)} for (id, track) in tracklist] }) self.set_header("Content-Type", "application/json") class TrackHandler(web.RequestHandler): def initialize(self, core): self.core = core def post(self): """ Get information for a specific track :return: """ try: track_uri = self.get_body_argument('track', '') track = self.core.library.lookup(track_uri).get()[0] self.write(track_json(track)) except web.MissingArgumentError: self.write({"error": "'track' key not found"}) self.set_status(400) class UserHandler(web.RequestHandler): def initialize(self, core): self.core = core @authenticate def get(self): """ Get information about the active user :return: """ user = self.request.session.user self.set_header("Content-Type", "application/json") self.write({ 'name': user.name, 'picture': user.picture, 'email': user.email, 'uid': user.uid, }) class VoteHandler(web.RequestHandler): def initialize(self, core): self.core = core @authenticate def post(self): """ Get the vote for a specific track :return: """ user = self.request.session.user try: track_uri = self.get_body_argument('track') vote = Vote.get(Vote.track_uri == track_uri) track = self.core.library.lookup(track_uri).get()[0] self.write({'track': track_json(track), 'user': user.name, 'timestamp': vote.timestamp.isoformat()}) self.set_header("Content-Type", "application/json") except web.MissingArgumentError: self.set_status(400) self.write({"error": "'track' key not found"}) @authenticate def put(self): """ Vote for a specific track :return: """ try: track_uri = self.get_body_argument('track') active_user = self.request.session.user if Vote.select().where(Vote.track_uri == track_uri, Vote.user == active_user): return self.set_status(409, 'Vote already exists') my_vote = Vote(track_uri=track_uri, user=active_user, timestamp=datetime.now()) if my_vote.save() is 1: # Add this track to now playing TODO: remove Tracklist.update_tracklist(self.core.tracklist) self.set_status(201) else: self.set_status(500) except web.MissingArgumentError: self.set_status(400) self.write({"error": "'track' key not found"}) @authenticate def delete(self): """ Delete the vote for a specific track :return: """ try: track_uri = self.get_body_argument('track') if not track_uri: self.write({"error": "'track' key not found"}) return self.set_status(400) active_user = self.request.session.user q = Vote.delete().where(Vote.track_uri == track_uri and Vote.user == active_user) if q.execute() is 0: self.set_status(404, "No vote deleted") else: Tracklist.update_tracklist(self.core.tracklist) self.set_status(204, "Vote deleted") except web.MissingArgumentError: self.set_status(400) self.write({"error":"'track' key not found"}) class SkipHandler(web.RequestHandler): def initialize(self, core): self.core = core class SearchHandler(web.RequestHandler): def initialize(self, core): self.core = core def error(self, code, message): self.write({ 'error': code, 'message': message }) self.set_status(code, message) def post(self): field = self.get_body_argument('field', '') values = self.get_body_argument('values', '') if not field: return self.error(400, 'Please provide a field') search = {field: [values]} search_result = self.core.library.search(search).get()[0] self.set_header("Content-Type", "application/json") self.write("""{ "uri": "%s", "albums": %s, "artists": %s, "tracks": %s }""" % (search_result.uri, json.dumps(search_result.albums, cls=ModelJSONEncoder), json.dumps(search_result.artists, cls=ModelJSONEncoder), json.dumps(search_result.tracks, cls=ModelJSONEncoder)))
Research in the fourth edition of the Aviva Working Lives Report, which examines the attitudes and experiences of employers and employees on issues affecting the present and future of the UK workplace – also carries a wake-up call to businesses. More than two in five (43%) employees feel their employer puts the results of the company ahead of their health and wellbeing. Seven in ten UK employees – equivalent to 18 million nationally – have gone to work feeling unwell when they should have taken the day off, according to a report. The findings suggest private sector workers are fearful of heavy workloads if they take time off, as more than two in five (41%) say their work will pile up if they are off sick. With people continuing to work while they are unwell, it is likely they are less productive as a consequence and in turn could also affect the health of other employees. The trend comes against the backdrop of a historic fall in the average number of sick days taken annually by UK employees, dropping to a record low of 4.3 days in 2016 compared with 7.2 days in 1993 when tracking began.
# Copyright 2014 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import gzip import logging import math import os import re import requests import shutil import six import stat import tempfile import time from oslo_concurrency import processutils from oslo_config import cfg from oslo_utils import excutils from oslo_utils import units from ironic_lib.openstack.common._i18n import _ from ironic_lib.openstack.common._i18n import _LE from ironic_lib.openstack.common._i18n import _LW from ironic_lib.openstack.common import imageutils from ironic_lib import disk_partitioner from ironic_lib import exception from ironic_lib import utils opts = [ cfg.IntOpt('efi_system_partition_size', default=200, help='Size of EFI system partition in MiB when configuring ' 'UEFI systems for local boot.', deprecated_group='deploy'), cfg.StrOpt('dd_block_size', default='1M', help='Block size to use when writing to the nodes disk.', deprecated_group='deploy'), cfg.IntOpt('iscsi_verify_attempts', default=3, help='Maximum attempts to verify an iSCSI connection is ' 'active, sleeping 1 second between attempts.', deprecated_group='deploy'), ] CONF = cfg.CONF CONF.register_opts(opts, group='disk_utils') LOG = logging.getLogger(__name__) _PARTED_PRINT_RE = re.compile(r"^(\d+):([\d\.]+)MiB:" "([\d\.]+)MiB:([\d\.]+)MiB:(\w*)::(\w*)") _ISCSI_RE = re.compile(r"^ip-[\d+.]*:\w+-iscsi-[\w+.]*-lun-\d+") def list_partitions(device): """Get partitions information from given device. :param device: The device path. :returns: list of dictionaries (one per partition) with keys: number, start, end, size (in MiB), filesystem, flags """ output = utils.execute( 'parted', '-s', '-m', device, 'unit', 'MiB', 'print', use_standard_locale=True, run_as_root=True)[0] if isinstance(output, bytes): output = output.decode("utf-8") lines = [line for line in output.split('\n') if line.strip()][2:] # Example of line: 1:1.00MiB:501MiB:500MiB:ext4::boot fields = ('number', 'start', 'end', 'size', 'filesystem', 'flags') result = [] for line in lines: match = _PARTED_PRINT_RE.match(line) if match is None: LOG.warn(_LW("Partition information from parted for device " "%(device)s does not match " "expected format: %(line)s"), dict(device=device, line=line)) continue # Cast int fields to ints (some are floats and we round them down) groups = [int(float(x)) if i < 4 else x for i, x in enumerate(match.groups())] result.append(dict(zip(fields, groups))) return result def is_iscsi_device(dev): """check whether the device path belongs to an iscsi device. """ basename = os.path.basename(dev) return bool(_ISCSI_RE.match(basename)) def get_disk_identifier(dev): """Get the disk identifier from the disk being exposed by the ramdisk. This disk identifier is appended to the pxe config which will then be used by chain.c32 to detect the correct disk to chainload. This is helpful in deployments to nodes with multiple disks. http://www.syslinux.org/wiki/index.php/Comboot/chain.c32#mbr: :param dev: Path for the already populated disk device. :returns The Disk Identifier. """ disk_identifier = utils.execute('hexdump', '-s', '440', '-n', '4', '-e', '''\"0x%08x\"''', dev, run_as_root=True, check_exit_code=[0], attempts=5, delay_on_retry=True) return disk_identifier[0] def make_partitions(dev, root_mb, swap_mb, ephemeral_mb, configdrive_mb, commit=True, boot_option="netboot", boot_mode="bios"): """Partition the disk device. Create partitions for root, swap, ephemeral and configdrive on a disk device. :param root_mb: Size of the root partition in mebibytes (MiB). :param swap_mb: Size of the swap partition in mebibytes (MiB). If 0, no partition will be created. :param ephemeral_mb: Size of the ephemeral partition in mebibytes (MiB). If 0, no partition will be created. :param configdrive_mb: Size of the configdrive partition in mebibytes (MiB). If 0, no partition will be created. :param commit: True/False. Default for this setting is True. If False partitions will not be written to disk. :param boot_option: Can be "local" or "netboot". "netboot" by default. :param boot_mode: Can be "bios" or "uefi". "bios" by default. :returns: A dictionary containing the partition type as Key and partition path as Value for the partitions created by this method. """ LOG.debug("Starting to partition the disk device: %(dev)s", {'dev': dev}) if is_iscsi_device(dev): part_template = dev + '-part%d' else: part_template = dev + '%d' part_dict = {} # For uefi localboot, switch partition table to gpt and create the efi # system partition as the first partition. if boot_mode == "uefi" and boot_option == "local": dp = disk_partitioner.DiskPartitioner(dev, disk_label="gpt") part_num = dp.add_partition(CONF.disk_utils.efi_system_partition_size, fs_type='fat32', bootable=True) part_dict['efi system partition'] = part_template % part_num else: dp = disk_partitioner.DiskPartitioner(dev) if ephemeral_mb: LOG.debug("Add ephemeral partition (%(size)d MB) to device: %(dev)s", {'dev': dev, 'size': ephemeral_mb}) part_num = dp.add_partition(ephemeral_mb) part_dict['ephemeral'] = part_template % part_num if swap_mb: LOG.debug("Add Swap partition (%(size)d MB) to device: %(dev)s", {'dev': dev, 'size': swap_mb}) part_num = dp.add_partition(swap_mb, fs_type='linux-swap') part_dict['swap'] = part_template % part_num if configdrive_mb: LOG.debug("Add config drive partition (%(size)d MB) to device: " "%(dev)s", {'dev': dev, 'size': configdrive_mb}) part_num = dp.add_partition(configdrive_mb) part_dict['configdrive'] = part_template % part_num # NOTE(lucasagomes): Make the root partition the last partition. This # enables tools like cloud-init's growroot utility to expand the root # partition until the end of the disk. LOG.debug("Add root partition (%(size)d MB) to device: %(dev)s", {'dev': dev, 'size': root_mb}) part_num = dp.add_partition(root_mb, bootable=(boot_option == "local" and boot_mode == "bios")) part_dict['root'] = part_template % part_num if commit: # write to the disk dp.commit() return part_dict def is_block_device(dev): """Check whether a device is block or not.""" attempts = CONF.disk_utils.iscsi_verify_attempts for attempt in range(attempts): try: s = os.stat(dev) except OSError as e: LOG.debug("Unable to stat device %(dev)s. Attempt %(attempt)d " "out of %(total)d. Error: %(err)s", {"dev": dev, "attempt": attempt + 1, "total": attempts, "err": e}) time.sleep(1) else: return stat.S_ISBLK(s.st_mode) msg = _("Unable to stat device %(dev)s after attempting to verify " "%(attempts)d times.") % {'dev': dev, 'attempts': attempts} LOG.error(msg) raise exception.InstanceDeployFailure(msg) def dd(src, dst): """Execute dd from src to dst.""" utils.dd(src, dst, 'bs=%s' % CONF.disk_utils.dd_block_size, 'oflag=direct') def qemu_img_info(path): """Return an object containing the parsed output from qemu-img info.""" if not os.path.exists(path): return imageutils.QemuImgInfo() out, err = utils.execute('env', 'LC_ALL=C', 'LANG=C', 'qemu-img', 'info', path) return imageutils.QemuImgInfo(out) def convert_image(source, dest, out_format, run_as_root=False): """Convert image to other format.""" cmd = ('qemu-img', 'convert', '-O', out_format, source, dest) utils.execute(*cmd, run_as_root=run_as_root) def populate_image(src, dst): data = qemu_img_info(src) if data.file_format == 'raw': dd(src, dst) else: convert_image(src, dst, 'raw', True) # TODO(rameshg87): Remove this one-line method and use utils.mkfs # directly. def mkfs(fs, dev, label=None): """Execute mkfs on a device.""" utils.mkfs(fs, dev, label) def block_uuid(dev): """Get UUID of a block device.""" out, _err = utils.execute('blkid', '-s', 'UUID', '-o', 'value', dev, run_as_root=True, check_exit_code=[0]) return out.strip() def get_image_mb(image_path, virtual_size=True): """Get size of an image in Megabyte.""" mb = 1024 * 1024 if not virtual_size: image_byte = os.path.getsize(image_path) else: data = qemu_img_info(image_path) image_byte = data.virtual_size # round up size to MB image_mb = int((image_byte + mb - 1) / mb) return image_mb def get_dev_block_size(dev): """Get the device size in 512 byte sectors.""" block_sz, cmderr = utils.execute('blockdev', '--getsz', dev, run_as_root=True, check_exit_code=[0]) return int(block_sz) def destroy_disk_metadata(dev, node_uuid): """Destroy metadata structures on node's disk. Ensure that node's disk appears to be blank without zeroing the entire drive. To do this we will zero the first 18KiB to clear MBR / GPT data and the last 18KiB to clear GPT and other metadata like LVM, veritas, MDADM, DMRAID, etc. """ # NOTE(NobodyCam): This is needed to work around bug: # https://bugs.launchpad.net/ironic/+bug/1317647 LOG.debug("Start destroy disk metadata for node %(node)s.", {'node': node_uuid}) try: utils.execute('dd', 'if=/dev/zero', 'of=%s' % dev, 'bs=512', 'count=36', run_as_root=True, check_exit_code=[0]) except processutils.ProcessExecutionError as err: with excutils.save_and_reraise_exception(): LOG.error(_LE("Failed to erase beginning of disk for node " "%(node)s. Command: %(command)s. Error: %(error)s."), {'node': node_uuid, 'command': err.cmd, 'error': err.stderr}) # now wipe the end of the disk. # get end of disk seek value try: block_sz = get_dev_block_size(dev) except processutils.ProcessExecutionError as err: with excutils.save_and_reraise_exception(): LOG.error(_LE("Failed to get disk block count for node %(node)s. " "Command: %(command)s. Error: %(error)s."), {'node': node_uuid, 'command': err.cmd, 'error': err.stderr}) else: seek_value = block_sz - 36 try: utils.execute('dd', 'if=/dev/zero', 'of=%s' % dev, 'bs=512', 'count=36', 'seek=%d' % seek_value, run_as_root=True, check_exit_code=[0]) except processutils.ProcessExecutionError as err: with excutils.save_and_reraise_exception(): LOG.error(_LE("Failed to erase the end of the disk on node " "%(node)s. Command: %(command)s. " "Error: %(error)s."), {'node': node_uuid, 'command': err.cmd, 'error': err.stderr}) def _get_configdrive(configdrive, node_uuid): """Get the information about size and location of the configdrive. :param configdrive: Base64 encoded Gzipped configdrive content or configdrive HTTP URL. :param node_uuid: Node's uuid. Used for logging. :raises: InstanceDeployFailure if it can't download or decode the config drive. :returns: A tuple with the size in MiB and path to the uncompressed configdrive file. """ # Check if the configdrive option is a HTTP URL or the content directly is_url = utils.is_http_url(configdrive) if is_url: try: data = requests.get(configdrive).content except requests.exceptions.RequestException as e: raise exception.InstanceDeployFailure( _("Can't download the configdrive content for node %(node)s " "from '%(url)s'. Reason: %(reason)s") % {'node': node_uuid, 'url': configdrive, 'reason': e}) else: data = configdrive try: data = six.BytesIO(base64.b64decode(data)) except TypeError: error_msg = (_('Config drive for node %s is not base64 encoded ' 'or the content is malformed.') % node_uuid) if is_url: error_msg += _(' Downloaded from "%s".') % configdrive raise exception.InstanceDeployFailure(error_msg) configdrive_file = tempfile.NamedTemporaryFile(delete=False, prefix='configdrive', dir=CONF.ironic_lib.tempdir) configdrive_mb = 0 with gzip.GzipFile('configdrive', 'rb', fileobj=data) as gunzipped: try: shutil.copyfileobj(gunzipped, configdrive_file) except EnvironmentError as e: # Delete the created file utils.unlink_without_raise(configdrive_file.name) raise exception.InstanceDeployFailure( _('Encountered error while decompressing and writing ' 'config drive for node %(node)s. Error: %(exc)s') % {'node': node_uuid, 'exc': e}) else: # Get the file size and convert to MiB configdrive_file.seek(0, os.SEEK_END) bytes_ = configdrive_file.tell() configdrive_mb = int(math.ceil(float(bytes_) / units.Mi)) finally: configdrive_file.close() return (configdrive_mb, configdrive_file.name) def work_on_disk(dev, root_mb, swap_mb, ephemeral_mb, ephemeral_format, image_path, node_uuid, preserve_ephemeral=False, configdrive=None, boot_option="netboot", boot_mode="bios"): """Create partitions and copy an image to the root partition. :param dev: Path for the device to work on. :param root_mb: Size of the root partition in megabytes. :param swap_mb: Size of the swap partition in megabytes. :param ephemeral_mb: Size of the ephemeral partition in megabytes. If 0, no ephemeral partition will be created. :param ephemeral_format: The type of file system to format the ephemeral partition. :param image_path: Path for the instance's disk image. :param node_uuid: node's uuid. Used for logging. :param preserve_ephemeral: If True, no filesystem is written to the ephemeral block device, preserving whatever content it had (if the partition table has not changed). :param configdrive: Optional. Base64 encoded Gzipped configdrive content or configdrive HTTP URL. :param boot_option: Can be "local" or "netboot". "netboot" by default. :param boot_mode: Can be "bios" or "uefi". "bios" by default. :returns: a dictionary containing the following keys: 'root uuid': UUID of root partition 'efi system partition uuid': UUID of the uefi system partition (if boot mode is uefi). NOTE: If key exists but value is None, it means partition doesn't exist. """ # the only way for preserve_ephemeral to be set to true is if we are # rebuilding an instance with --preserve_ephemeral. commit = not preserve_ephemeral # now if we are committing the changes to disk clean first. if commit: destroy_disk_metadata(dev, node_uuid) try: # If requested, get the configdrive file and determine the size # of the configdrive partition configdrive_mb = 0 configdrive_file = None if configdrive: configdrive_mb, configdrive_file = _get_configdrive(configdrive, node_uuid) part_dict = make_partitions(dev, root_mb, swap_mb, ephemeral_mb, configdrive_mb, commit=commit, boot_option=boot_option, boot_mode=boot_mode) ephemeral_part = part_dict.get('ephemeral') swap_part = part_dict.get('swap') configdrive_part = part_dict.get('configdrive') root_part = part_dict.get('root') if not is_block_device(root_part): raise exception.InstanceDeployFailure( _("Root device '%s' not found") % root_part) for part in ('swap', 'ephemeral', 'configdrive', 'efi system partition'): part_device = part_dict.get(part) LOG.debug("Checking for %(part)s device (%(dev)s) on node " "%(node)s.", {'part': part, 'dev': part_device, 'node': node_uuid}) if part_device and not is_block_device(part_device): raise exception.InstanceDeployFailure( _("'%(partition)s' device '%(part_device)s' not found") % {'partition': part, 'part_device': part_device}) # If it's a uefi localboot, then we have created the efi system # partition. Create a fat filesystem on it. if boot_mode == "uefi" and boot_option == "local": efi_system_part = part_dict.get('efi system partition') mkfs(dev=efi_system_part, fs='vfat', label='efi-part') if configdrive_part: # Copy the configdrive content to the configdrive partition dd(configdrive_file, configdrive_part) finally: # If the configdrive was requested make sure we delete the file # after copying the content to the partition if configdrive_file: utils.unlink_without_raise(configdrive_file) populate_image(image_path, root_part) if swap_part: mkfs(dev=swap_part, fs='swap', label='swap1') if ephemeral_part and not preserve_ephemeral: mkfs(dev=ephemeral_part, fs=ephemeral_format, label="ephemeral0") uuids_to_return = { 'root uuid': root_part, 'efi system partition uuid': part_dict.get('efi system partition') } try: for part, part_dev in six.iteritems(uuids_to_return): if part_dev: uuids_to_return[part] = block_uuid(part_dev) except processutils.ProcessExecutionError: with excutils.save_and_reraise_exception(): LOG.error(_LE("Failed to detect %s"), part) return uuids_to_return
It’s that most wonderful time of year again — Banned Super Bowl Ad time! SodaStream is a relatively new, second-year entry — this time with an ad featuring Scarlett Johansson. Typically banned Super Bowl ads are pretty low-rent stuff and they’re ixnayed owing to shoddy production, controversial or skeevy content — sometimes companies make and submits ads knowing it hasn’t a prayer of getting into the game, for the sake of the publicity — I know, shocking, right? ad will go viral in an even bigger way than did last year’s banned SodaStream ad in advance of the game — that one had only depicted truck drivers wearing uniforms featuring Coke and Pepsi logos. This year’s SodaStream ad is far more sophisticated — and easier to tweak, so as to maybe end up in the game after all, for double the exposure. This year’s ad was “banned” because it ends with Johansson purring “Sorry, Coke and Pepsi” after extolling the virtues of her employer’s sodas over the competition (and major Super Bowl advertisers).
""" Example using a filtered back-projection (FBP) in fan beam using `fbp_op`. Note that the FBP is only approximate in this geometry, but still gives a decent reconstruction that can be used as an initial guess in more complicated methods. Here we look at a partial scan, where the angular interval is not 2 * pi. This caues issues for the regular FBP reconstruction, but can be improved via a Parker weighting. """ import numpy as np import odl # --- Set-up geometry of the problem --- # # Discrete reconstruction space: discretized functions on the cube # [-20, 20]^2 with 300 samples per dimension. reco_space = odl.uniform_discr( min_pt=[-20, -20], max_pt=[20, 20], shape=[300, 300], dtype='float32') # Make a circular cone beam geometry with flat detector # Angles: uniformly spaced, n = 360, min = 0, max = pi + fan angle angle_partition = odl.uniform_partition(0, np.pi + 0.7, 360) # Detector: uniformly sampled, n = 558, min = -40, max = 40 detector_partition = odl.uniform_partition(-40, 40, 558) # Geometry with large fan angle geometry = odl.tomo.FanFlatGeometry( angle_partition, detector_partition, src_radius=80, det_radius=40) # --- Create Filtered Back-Projection (FBP) operator --- # # Ray transform (= forward projection). We use the ASTRA CUDA backend. ray_trafo = odl.tomo.RayTransform(reco_space, geometry, impl='astra_cuda') # Create FBP operator using utility function # We select a Hann filter, and only use the lowest 80% of frequencies to avoid # high frequency noise. fbp = odl.tomo.fbp_op(ray_trafo, filter_type='Hann', frequency_scaling=0.8) # Apply parker weighting in order to improve reconstruction parker_weighting = odl.tomo.parker_weighting(ray_trafo) parker_weighting.show() parker_weighted_fbp = fbp * parker_weighting # --- Show some examples --- # # Create a discrete Shepp-Logan phantom (modified version) phantom = odl.phantom.shepp_logan(reco_space, modified=True) # Create projection data by calling the ray transform on the phantom proj_data = ray_trafo(phantom) # Calculate filtered back-projection of data fbp_reconstruction = fbp(proj_data) pw_fbp_reconstruction = parker_weighted_fbp(proj_data) # Shows a slice of the phantom, projections, and reconstruction phantom.show(title='Phantom') proj_data.show(title='Projection data (sinogram)') fbp_reconstruction.show(title='Filtered back-projection') pw_fbp_reconstruction.show(title='Parker weighted filtered back-projection')
Never is too late, Don't give up. Let the fear go. Tomorrow begin today. Did you ever felt how your stomach flutter, how you can't see and hear anything?. That's how I feel right now. But I still here trying to tell you why you need to fight for all the stuff you want. Why you need to live all the moments always, because it never comes back. That means that a moment never can be lived twice. That's why I tell you " Never is late to start to figth, to enjoy your life." I will not tell you that is going to be easy but that's why I told you never give up. How many times on the week told yourself, " I can't do it" or something like that? Things like that are good examples of things that we should not give up, and try again and again and again up to the end, and when you can say, I DID IT, then you are going to see all that you wanted, and all the pain doesn't exist anymore. But if you ignore what I told you, do you want me to tell you when you are going to be happy? Tomorrow, do you think that is to early?... dont be surprised because tomorrow I going to tell you tomorrow,tomorrow and tomorrow. Try to think about it, try to never give up. And Tomorrow starts today. And Today it has to be happy. WriteWork contributors. "Speech : Tomorrow Begins Today" WriteWork.com. WriteWork.com, 01 February, 2008. Web. 24 Apr. 2019.
# -*- coding: utf-8 -*- from PySide import QtGui, QtCore from .ui_add_carrier import Ui_AddCarrier from mango.models.carrier import Carrier from .error_message_box import ErrorMessageBox class AddCarrier(QtGui.QDialog): def __init__(self, parent): super(AddCarrier, self).__init__(parent) self.api = parent.api self.ui = Ui_AddCarrier() self.ui.setupUi(self) self.api.get_carriers() self.ui.frequentWidget.setEnabled(False) self.carriersTableModel = CarriersTableModel([], self) self.filterCarriersProxyModel = QtGui.QSortFilterProxyModel() self.filterCarriersProxyModel.setSourceModel(self.carriersTableModel) self.filterCarriersProxyModel.setFilterKeyColumn(-1) self.filterCarriersProxyModel.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive) self.ui.carriersTableView.setModel(self.filterCarriersProxyModel) self.ui.filterLineEdit.textChanged.connect(self.filterCarriersProxyModel.setFilterRegExp) self.api.getCarriersFinished.connect(self.carriersTableModel.refreshCarriers) self.ui.newButton.clicked.connect(self.enableCarrierType) self.ui.frequentButton.clicked.connect(self.enableCarrierType) self.ui.carriersTableView.doubleClicked.connect(self.addCarrier) self.ui.addButton.clicked.connect(self.addCarrier) self.ui.cancelButton.clicked.connect(self.reject) def addCarrier(self): if self.ui.newButton.isChecked(): errors = [] name = self.ui.nameLineEdit.text() if name == "": errors.append("Debe indicar un nombre") if not errors: self.new = True self.carrier = Carrier(name, None) if self.ui.saveAsFrequentBox.isChecked(): self.carrier.frequent = True self.accept() else: ErrorMessageBox(errors).exec_() else: errors = [] carrierFilteredIndex = self.ui.carriersTableView.currentIndex() if carrierFilteredIndex.row() == -1: errors.append("Debe seleccionar una transportista") if not errors: self.new = False carrierIndex = self.filterCarriersProxyModel.mapToSource(carrierFilteredIndex) self.carrier = self.carriersTableModel.getCarrier(carrierIndex.row()) self.accept() else: ErrorMessageBox(errors).exec_() def enableCarrierType(self): if self.ui.newButton.isChecked(): self.ui.newWidget.setEnabled(True) self.ui.frequentWidget.setEnabled(False) else: self.ui.newWidget.setEnabled(False) self.ui.frequentWidget.setEnabled(True) class CarriersTableModel(QtCore.QAbstractTableModel): def __init__(self, carriers, parent): super(CarriersTableModel, self).__init__(parent) self._carriers = carriers self._headers = ['Nombre'] def getCarrier(self, row): return self._carriers[row] def refreshCarriers(self, carriers): self.beginResetModel() self._carriers = carriers self.endResetModel() def headerData(self, section, orientation, role): if role == QtCore.Qt.DisplayRole: if orientation == QtCore.Qt.Horizontal: return self._headers[section] def rowCount(self, parent): return len(self._carriers) def columnCount(self, parent): return len(self._headers) def data(self, index, role): row = index.row() column = index.column() if role == QtCore.Qt.DisplayRole: if column == 0: return self._carriers[row].name
Mac users are complaining that the Backup devices are failing after about 18 months. Reports of failed Time Capsules have been common on Apple’s support site for months. Most users adding to the two most popular threads, which together boast more than 60,000 views and over 600 messages, claim that their Time Capsules failed without any warning. “My Time Capsule was running just fine, then spontaneously just powered off by itself,” said one user, who kicked off one of the two main threads on the issue in late August. “I’m guessing that there’s a power connection fried, because if it were a power supply failure the lights would not have blinked on at all.” “My Time Capsule has just failed too, after 18 months of use,” reported another user on the same thread Monday. To show that you’re not alone in this process, irate mac users have opened the Apple Time Capsule Memorial Register. To Apple’s credit, however, users have reported that Apple has been replacing dead devices under the standard 12 month warranty issued on purchase of a Time Capsule, as well as under the AppleCare replacement warranty, if the user has purchased it. Apple currently sells two models of the Time Capsule: a 1TB backup device for $299 and a 2TB model for $499. Does anything Apple makes work? @crankenstein – What an ignorant comment. Do some research. Lots of MS stuff is crap, too (hello, Zune). Neither company is immune.
from joernInterface.JoernInterface import jutils class Node(object): def __init__(self, node_id, properties = None): self.node_id = node_id self.properties = properties self.node_selection = 'g.v("{}")'.format(self.node_id) def __str__(self): return str(self.node_id) def __repr__(self): return str(self) def __eq__(self, other): return self.node_id == other.node_id def __ne__(self, other): return self.node_id != other.node_id def __hash__(self): return self.node_id def load_properties(self): _, node = jutils.raw_lookup(self.node_selection)[0] self.properties = node.get_properties() def get_property(self, label): if not self.properties: self.load_properties() if label in self.properties: return self.properties[label] else: return None def getId(self): return self.node_id @property def node_type(self): return self.get_property('type')
an unsought-for but unexpectedly perfect book. This summer I've been reading books from 1914, as well as a few written about that momentous year, so when I was at the library last week and spotted this book on the display shelf it felt serendipitous. Bookish serendipity at its best. This novel, written by French author, Jean Echenoz, is only 109 pages long and begins in France on the first day of World War I. Charles and Anthime Seze, two brothers in love with the same girl, are quickly mobilized. Charles is positive the war won't last long; Anthime is not so sure. Echenoz paints perfect snapshot moments of what follows next for these brothers: in the air, on the ground, and in the trenches. There's mud, gas, shrapnel, and death; there's also survival, life, and hope. I love how Echenoz was able to show so much with so few words. This is a quiet and beautifully written novel. I'm so glad I stumbled on it so serendipitously. I love bookish serendipity! And I have been meaning to read more about the Great War, as the centennial gets closer. This was a quick read and gives you a taste of what the war was like. It's always lovely when you unexpectedy find a brilliant book :) Great review! How perfect! I love finding a book that I didn't know I needed! I'm glad you enjoyed 1914. Thanks! It was a happy accident.
from __future__ import print_function from tornado.gen import Task, Return, coroutine import tornado.process import tornado.web from tornado.ioloop import IOLoop import re from repl import Repl import os.path import time import json import subprocess repls = {} pattern = re.compile(r"/(\d+)") safe_repls = ["prolog","scala","python","haskell","ruby","clojure","erlang","kotlin","nodejs"] def create_repl(ioloop,repl_type): global repls repl = Repl(ioloop, repl_type) repls[repl.identity] = repl return repl.identity def clean_idle_repls(): global repls try: to_del = [] for key, repl in repls.iteritems(): if repl.is_expired(): to_del.append(key) repl.close() for key in to_del: del repls[key] ioloop = tornado.ioloop.IOLoop.current() finally: ioloop.call_later(2, clean_idle_repls) class KillReplHandler(tornado.web.RequestHandler): def get(self, path): num = int(path) if num in repls: repls[num].close() del repls[num] self.set_status(200) self.finish() else: self.clear() self.set_status(404) self.finish("<html><body>non existant repl type</body></html>") class NewReplHandler(tornado.web.RequestHandler): def get(self, repl_type): if repl_type in safe_repls: repl_id = create_repl(ioloop, repl_type) self.write(json.dumps(repl_id)) else: self.clear() self.set_status(404) self.finish("<html><body>non existant repl type</body></html>") @tornado.web.stream_request_body class MainHandler(tornado.web.RequestHandler): def get(self, path): num = int(path) if num not in repls: self.set_status(404) else: repls[num].drain_to_handler(self) def post(self, path): self.write("") def data_received(self, chunk): num = int(pattern.match(self.request.path).group(1)) if num not in repls: self.set_status(404) else: repls[num].write_async(chunk) settings = { "static_path": os.path.join(os.path.dirname(__file__), "static") } class RootHandler(tornado.web.RequestHandler): def get(self): self.render("static/index.html") application = tornado.web.Application([ (r"/", RootHandler), (r"/kill/(\d+)", KillReplHandler), (r"/(\d+)", MainHandler), (r"/new/([a-zA-Z0-9\-]+)", NewReplHandler), ], **settings) if __name__ == "__main__": application.listen(8888) ioloop = tornado.ioloop.IOLoop.current() ioloop.call_later(5, clean_idle_repls) ioloop.start()
, we picks the very best libraries along with ideal resolution only for you all, and this images is among pictures selections inside our very best photographs gallery concerning Ab Testing Resume. I am hoping you might as it. published by simply fiforlife from 2018-08-14 12:15:44. To find out most photos in Ab Testing Resume images gallery you should abide by this web page link. Never forget to look up the next photo gallery, which also contains the Ab Testing Resume In Qa Engineer Sample Resume image showed above. If you have any thoughts, questions or just want to say hello to other people, please do not hestitate to submit your opinion/ideas via the following comment form.
#!/usr/bin/env python from flask import Flask, request, redirect, url_for, abort, render_template, make_response, after_this_request import flickrapi from werkzeug import secure_filename import xml.etree.cElementTree as ET import xml.etree.ElementTree as xml import os import md5 import tarfile import hashlib import binascii import random import string import webbrowser # Eye-Fi Port PORT = 59278 # KEY for Eye-Fi Mobi Cards KEY = u'00000000000000000000000000000000' # Server nonce SERVER_CRED = '' # Client nonce SESSION = '' FILE_ID = 1 # UPLOAD_FOLDER = '/sd/uploads' UPLOAD_FOLDER = '/tmp' FLICKR_API_KEY = u'034800f3a9eb9d88d054c9d00a67d82e' FLICKR_API_SECRET = u'fa6a19f351f9aced' # Create application. app = Flask(__name__) app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER # Env vars. # app.config.from_envvar('FLASKR_SETTINGS', silent=False) @app.route('/') def index(): return render_template('index.html') @app.route('/api/soap/eyefilm/v1', methods=['POST']) def start_session(): if 'Soapaction' not in request.headers: abort(400) header_value = request.headers['Soapaction'] if header_value == '"urn:StartSession"': app.logger.info('Running Start session..') root = ET.fromstring(request.data) for child in root: for step_child in child: for step_step_child in step_child: if step_step_child.tag == 'macaddress': macaddress = step_step_child.text elif step_step_child.tag == 'cnonce': cnonce = step_step_child.text elif step_step_child.tag == 'transfermode': transfermode = step_step_child.text elif step_step_child.tag == 'transfermodetimestamp': transfermode_timestamp = step_step_child.text credential = _get_credential(macaddress, cnonce, KEY) _set_cnonce(credential) new_snonce = _get_new_snonce() _set_snonce(new_snonce) resp = make_response(render_template( 'start_session_response.xml', credential=credential, snonce=SERVER_CRED, transfermode=transfermode, transfermode_timestamp=transfermode_timestamp)) resp.headers['Content-Type'] = 'text/xml; charset="utf-8"' resp.headers['Connection'] = 'keep-alive' return resp elif header_value == '"urn:GetPhotoStatus"': app.logger.info('Running Get Photo Status..') root = ET.fromstring(request.data) for child in root: for step_child in child: for step_step_child in step_child: if step_step_child.tag == 'credential': credential = step_step_child.text elif step_step_child.tag == 'macaddress': macaddress = step_step_child.text elif step_step_child.tag == 'filename': file_name = step_step_child.text elif step_step_child.tag == 'filesize': file_size = step_step_child.text elif step_step_child.tag == 'filesignature': file_sig = step_step_child.text elif step_step_child.tag == 'flags': flags = step_step_child.text old_credential = _get_credential(macaddress, KEY, SERVER_CRED) if old_credential == credential: @after_this_request def set_file_id(resp): global FILE_ID FILE_ID += 1 return resp resp = make_response(render_template( 'get_photo_status_response.xml', file_id=FILE_ID, offset=0)) resp.headers['Content-Type'] = 'text/xml; charset="utf-8"' resp.headers['Connection'] = 'keep-alive' return resp else: abort(400) else: abort(400) @app.route('/api/soap/eyefilm/v1/upload', methods=['POST']) def capture_upload(): app.logger.info('Running file upload...') app.logger.info(request.headers) app.logger.info(request.form) app.logger.info(request.files) # We ignore this for now.. integrity_digest = request.form['INTEGRITYDIGEST'] app.logger.info('integrity_digest') app.logger.info(integrity_digest) upload_data = request.form['SOAPENVELOPE'] app.logger.info('upload_data') app.logger.info(upload_data) # Image object image_tar = request.files['FILENAME'] app.logger.info('image_tar') app.logger.info(image_tar) # Get file from req tar_filename = secure_filename(image_tar.filename) image_filename = tar_filename.rsplit('.', 1)[0] app.logger.info('image_filename') app.logger.info(image_filename) # Save file to upload dir tar_file_path = os.path.join(app.config['UPLOAD_FOLDER'], tar_filename) app.logger.info('tar_file_path') app.logger.info(tar_file_path) image_tar.save(tar_file_path) image_file_path = os.path.join(app.config['UPLOAD_FOLDER'], image_filename) app.logger.info('image_file_path') app.logger.info(image_file_path) ar = tarfile.open(tar_file_path, mode='r') ar.extractall(path=app.config['UPLOAD_FOLDER']) ar.close() root = ET.fromstring(upload_data) for child in root: for step_child in child: for step_step_child in step_child: if step_step_child.tag == 'fileid': file_id = step_step_child.text elif step_step_child.tag == 'macaddress': macaddress = step_step_child.text elif step_step_child.tag == 'filename': pass elif step_step_child.tag == 'filesize': filesize = step_step_child.text elif step_step_child.tag == 'filesignature': file_sig = step_step_child.text elif step_step_child.tag == 'encryption': encryption = step_step_child.text elif step_step_child.tag == 'flags': flags = step_step_child.text @after_this_request def flickr(resp): _flickr_upload_photo(image_filename, image_file_path) return resp return render_template('upload_photo_response.xml') def _flickr_upload_photo(file_name, file_path): flickr = flickrapi.FlickrAPI(FLICKR_API_KEY, FLICKR_API_SECRET) if not flickr.token_valid(perms=u'write'): # Get a request token flickr.get_request_token(oauth_callback='oob') # Open a browser at the authentication URL. Do this however # you want, as long as the user visits that URL. authorize_url = flickr.auth_url(perms=u'write') webbrowser.open_new_tab(authorize_url) # Get the verifier code from the user. Do this however you # want, as long as the user gives the application the code. verifier = unicode(raw_input('Verifier code: ')) # Trade the request token for an access token flickr.get_access_token(verifier) return flickr.upload( is_public=1, fileobj=open(file_path, 'rb'), filename=file_name, content_type=1, format='rest') else: return flickr.upload( is_public=1, fileobj=open(file_path, 'rb'), filename=file_name, content_type=1, format='rest') def _get_new_snonce(): m = md5.new() random_word = '.'.join(random.choice(string.lowercase) for i in range(40)) m.update(random_word) return m.hexdigest() def _set_cnonce(cnonce): global SESSION SESSION = cnonce def _set_snonce(credential): global SERVER_CRED SERVER_CRED = credential def _get_credential(mac, cnonce, key): cred_str = mac + cnonce + key bin_cred_str = binascii.unhexlify(cred_str) m = hashlib.md5() m.update(bin_cred_str) return m.hexdigest() if __name__ == '__main__': app.debug = True app.run(port=PORT, host='0.0.0.0')
You are right the camera is not a high point of this tablet. I subscribe to consumer reports’s online and they said it has a relatively course screen resolution 149 pixels per inch the rectangle or screen is better shape for videos then for photos. Consumer Reports does rate this tablet overall as a Best Buy. If you want to tablet for pictures specifically this may not be the one for you but you might want to still consider using your phone or an actual camera because using a 10 inch tablet to take pictures is cumbersome anyways.
# # jython examples for jas. # $Id$ # import sys from java.lang import System from jas import PolyRing, QQ, AN, RF from jas import terminate, startLog # polynomial examples: prime/primary decomposition in Q(sqrt(2))(x)(sqrt(x))[y,z] Q = PolyRing(QQ(),"w2",PolyRing.lex); print "Q = " + str(Q); [e,a] = Q.gens(); #print "e = " + str(e); print "a = " + str(a); root = a**2 - 2; print "root = " + str(root); Q2 = AN(root,field=True); print "Q2 = " + str(Q2.factory()); [one,w2] = Q2.gens(); #print "one = " + str(one); #print "w2 = " + str(w2); print; Qp = PolyRing(Q2,"x",PolyRing.lex); print "Qp = " + str(Qp); [ep,wp,ap] = Qp.gens(); #print "ep = " + str(ep); #print "wp = " + str(wp); #print "ap = " + str(ap); print; Qr = RF(Qp); print "Qr = " + str(Qr.factory()); [er,wr,ar] = Qr.gens(); #print "er = " + str(er); #print "wr = " + str(wr); #print "ar = " + str(ar); print; Qwx = PolyRing(Qr,"wx",PolyRing.lex); print "Qwx = " + str(Qwx); [ewx,wwx,ax,wx] = Qwx.gens(); #print "ewx = " + str(ewx); print "ax = " + str(ax); #print "wwx = " + str(wwx); print "wx = " + str(wx); print; rootx = wx**2 - ax; print "rootx = " + str(rootx); Q2x = AN(rootx,field=True); print "Q2x = " + str(Q2x.factory()); [ex2,w2x2,ax2,wx] = Q2x.gens(); #print "ex2 = " + str(ex2); #print "w2x2 = " + str(w2x2); #print "ax2 = " + str(ax2); #print "wx = " + str(wx); print; Yr = PolyRing(Q2x,"y,z",PolyRing.lex) print "Yr = " + str(Yr); [e,w2,x,wx,y,z] = Yr.gens(); print "e = " + str(e); print "w2 = " + str(w2); print "x = " + str(x); print "wx = " + str(wx); print "y = " + str(y); print "z = " + str(z); print; f1 = ( y**2 - x ) * ( y**2 - 2 ); #f1 = ( y**2 - x )**3 * ( y**2 - 2 )**2; f2 = ( z**2 - y**2 ); print "f1 = ", f1; print "f2 = ", f2; print; F = Yr.ideal( list=[f1,f2] ); print "F = ", F; print; #sys.exit(); startLog(); t = System.currentTimeMillis(); P = F.primeDecomp(); #P = F.primaryDecomp(); t1 = System.currentTimeMillis() - t; print "P = ", P; print; print "prime/primary decomp time =", t1, "milliseconds"; print; print "F = ", F; print; #startLog(); terminate();
IEE uses an index-matched optical bonding technique to create an optical stack with an integral heater for low-temperature operations, EMI shielding, and a cover glass featuring anti-reflective/anti-glare treatments. In addition, the 10.1 inch display boasts a very wide viewing angle display, and features a selectable dual-mode LED backlight for sunlight readable daytime operation and NVIS-compatible operation for night. The fully-featured operator interface includes an 8-way joystick and encoder knobs, along with13 programmable bezel pushbuttons, supporting the control of a wide range of applications. IEE also features a 12.1-inch Control Display Unit (CDU) designed with networked infrastructure and time critical video processing for the next generation of ground vehicles and airborne architectures.
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import threading import zlib from profile_chrome import controllers from profile_chrome import util from pylib.constants import host_paths with host_paths.SysPath(host_paths.DEVIL_PATH): from devil.utils import cmd_helper _SYSTRACE_OPTIONS = [ # Compress the trace before sending it over USB. '-z', # Use a large trace buffer to increase the polling interval. '-b', '16384' ] # Interval in seconds for sampling systrace data. _SYSTRACE_INTERVAL = 15 _TRACING_ON_PATH = '/sys/kernel/debug/tracing/tracing_on' class SystraceController(controllers.BaseController): def __init__(self, device, categories, ring_buffer): controllers.BaseController.__init__(self) self._device = device self._categories = categories self._ring_buffer = ring_buffer self._done = threading.Event() self._thread = None self._trace_data = None def __repr__(self): return 'systrace' @staticmethod def GetCategories(device): return device.RunShellCommand('atrace --list_categories') def StartTracing(self, _): self._thread = threading.Thread(target=self._CollectData) self._thread.start() def StopTracing(self): self._done.set() def PullTrace(self): self._thread.join() self._thread = None if self._trace_data: output_name = 'systrace-%s' % util.GetTraceTimestamp() with open(output_name, 'w') as out: out.write(self._trace_data) return output_name def IsTracingOn(self): result = self._RunAdbShellCommand(['cat', _TRACING_ON_PATH]) return result.strip() == '1' def _RunAdbShellCommand(self, command): # We use a separate interface to adb because the one from AndroidCommands # isn't re-entrant. # TODO(jbudorick) Look at providing a way to unhandroll this once the # adb rewrite has fully landed. device_param = (['-s', str(self._device)] if str(self._device) else []) cmd = ['adb'] + device_param + ['shell'] + command return cmd_helper.GetCmdOutput(cmd) def _RunATraceCommand(self, command): cmd = ['atrace', '--%s' % command] + _SYSTRACE_OPTIONS + self._categories return self._RunAdbShellCommand(cmd) def _ForceStopAtrace(self): # atrace on pre-M Android devices cannot be stopped asynchronously # correctly. Use synchronous mode to force stop. cmd = ['atrace', '-t', '0'] return self._RunAdbShellCommand(cmd) def _CollectData(self): trace_data = [] self._RunATraceCommand('async_start') try: while not self._done.is_set(): self._done.wait(_SYSTRACE_INTERVAL) if not self._ring_buffer or self._done.is_set(): trace_data.append( self._DecodeTraceData(self._RunATraceCommand('async_dump'))) finally: trace_data.append( self._DecodeTraceData(self._RunATraceCommand('async_stop'))) if self.IsTracingOn(): self._ForceStopAtrace() self._trace_data = ''.join([zlib.decompress(d) for d in trace_data]) @staticmethod def _DecodeTraceData(trace_data): try: trace_start = trace_data.index('TRACE:') except ValueError: raise RuntimeError('Systrace start marker not found') trace_data = trace_data[trace_start + 6:] # Collapse CRLFs that are added by adb shell. if trace_data.startswith('\r\n'): trace_data = trace_data.replace('\r\n', '\n') # Skip the initial newline. return trace_data[1:]
> -z gives you even more help as far as I remember. still underway. Or, if I can help, let me know.
# Copyright (c) 2009 Reza Lotun http://reza.lotun.name/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. class Activity(object): def __init__(self, connection=None): self.connection = connection self.start_time = None self.activity_id = None self.progress = None self.status_code = None self.cause = None self.description = None def __repr__(self): return 'Activity:%s status:%s progress:%s' % (self.description, self.status_code, self.progress) def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'ActivityId': self.activity_id = value elif name == 'StartTime': self.start_time = value elif name == 'Progress': self.progress = value elif name == 'Cause': self.cause = value elif name == 'Description': self.description = value elif name == 'StatusCode': self.status_code = value else: setattr(self, name, value)
Independence Day is celebrated on July 4 because that was the day in 1776 when the Continental Congress adopted the final draft of the Declaration of Independence. The War of Independence however, dragged on until 1783, and in that year, Independence Day was made an official holiday. In 1941 Congress declared the 4th of July a federal holiday. The 50 star flag became the “Official flag of the United States” on July 4, 1960. It was put into place by President Dwight D. Eisenhower’s Executive Order number 10834, published on August 25, 1959. On July 4th, we proudly celebrate the freedom and civility for which our forefathers fought for so bravely….the freedom that is represented by the Stars and Stripes of our respected flag. The color red was selected for courage, white for purity and blue for perseverance and justice. Here are some etiquette tips for honoring our flag and our nation this 4th of July and all year long whether at a business meeting or public event. When the Pledge of Allegiance is recited, citizens should stand at attention and salute by placing their right hand over the heart with the first word and hold the salute through the last line of the Pledge. Those in uniform give the appropriate formal salute as specified by the uniform service. The salute is directed to the flag by facing the flag during the Pledge. Men wearing a head cover (a hat or cap) are to remove it. Women may wear a head covering. The Flag of the United States is saluted as it is hoisted and lowered. The salute is held until the flag is unsnapped from the halyard or through the last note of the national anthem; whichever is the longest. It was the valiant defense of Fort McHenry by American forces during the British attack on September 13, 1814 that inspired 35-year old, poet-lawyer Francis Scott Key to write the poem which was to become our national anthem, “The Star-Spangled Banner.” In 1931 the Congress of The United States of America enacted legislation that made “The Star-Spangled Banner” the official national anthem. Then conquer we must, when our cause it is just,And this be our motto: “In god is our trust”And the Star – Spangled Banner in triumph shall waveO’er the land of the free and the home of the brave! When the national anthem is played or sung, citizens should stand at attention and salute, by placing their right hand over the heart, at the first note and hold the salute through the last note. The salute is directed to the flag, if displayed, otherwise to the music. If in uniform, the person should salute in the formal manner. It is proper to salute whenever the national anthem is played; in a public park, in a church, at a sports event…etc.
# Copyright 2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron_lib.db import model_base import sqlalchemy as sa from sqlalchemy import orm as sa_orm # Database models used by the neutron DB IPAM driver # NOTE(salv-orlando): The following data model creates redundancy with # models_v2.IPAllocationPool. This level of data redundancy could be tolerated # considering that the following model is specific to the IPAM driver logic. # It therefore represents an internal representation of a subnet allocation # pool and can therefore change in the future, where as # models_v2.IPAllocationPool is the representation of IP allocation pools in # the management layer and therefore its evolution is subject to APIs backward # compatibility policies class IpamAllocationPool(model_base.BASEV2, model_base.HasId): """Representation of an allocation pool in a Neutron subnet.""" ipam_subnet_id = sa.Column(sa.String(36), sa.ForeignKey('ipamsubnets.id', ondelete="CASCADE"), nullable=False) first_ip = sa.Column(sa.String(64), nullable=False) last_ip = sa.Column(sa.String(64), nullable=False) def __repr__(self): return "%s - %s" % (self.first_ip, self.last_ip) class IpamSubnet(model_base.BASEV2, model_base.HasId): """Association between IPAM entities and neutron subnets. For subnet data persistency - such as cidr and gateway IP, the IPAM driver relies on Neutron's subnet model as source of truth to limit data redundancy. """ neutron_subnet_id = sa.Column(sa.String(36), nullable=True) allocation_pools = sa_orm.relationship(IpamAllocationPool, backref='subnet', lazy="joined", cascade='delete') class IpamAllocation(model_base.BASEV2): """Model class for IP Allocation requests. """ ip_address = sa.Column(sa.String(64), nullable=False, primary_key=True) status = sa.Column(sa.String(36)) # The subnet identifier is redundant but come handy for looking up # IP addresses to remove. ipam_subnet_id = sa.Column(sa.String(36), sa.ForeignKey('ipamsubnets.id', ondelete="CASCADE"), primary_key=True, nullable=False)
In the wake of a school shooting in his hometown of Raleigh three days before his bid for reelection, a Republican senator (Brett LaRosa) makes an off-the-cuff comment that gets leaked. The comment calls into question the senator’s stance on guns and God, and sends his devoutly Christian wife (Leslie Williams) and liberal Jewish campaign manager (Candy Fox) scrambling to contain the damage. “Church & State” tackles the issues of religion in politics, gun control and being true to oneself in a simultaneously funny, heartbreaking, and uplifting way. Directed by Gordon Wiener, the cast is rounded out by Kyle Walton playing multiple roles. “Church & State” runs March 8 -23. Fridays and Saturdays at 8 pm, Thursday, March 21st at 8 pm, and Sunday, March 10 at 3 pm. For reservations, email us at alliancerepco@gmail.com or visit www.alliancerep.org.
import sys import logging import textwrap from argparse import ArgumentParser from argparse import RawDescriptionHelpFormatter from pprint import pprint import json from csirtg_mail import parse_email_from_string LOG_FORMAT = '%(asctime)s - %(levelname)s - %(name)s[%(lineno)s] - %(message)s' logger = logging.getLogger(__name__) def main(): p = ArgumentParser( description=textwrap.dedent('''\ csirtg-mail is a CLI tool for debugging, it allows you to easily input a email message and print out the py-cgmail data structure. example usage: $ cat test.eml | csirtg-mail $ csirtg-mail --file test.eml '''), formatter_class=RawDescriptionHelpFormatter, prog='csirtg-mail' ) p.add_argument("-f", "--file", dest="file", help="specify email file") p.add_argument("-d", "--debug", help="enable debugging", action="store_true") p.add_argument("-s", "--sanitize", help="strip parameters (...?foo=bar) from parsed URLs", action="store_true") p.add_argument("--urls", help="print URLS to stdout", action="store_true") args = p.parse_args() loglevel = logging.INFO if args.debug: loglevel = logging.DEBUG console = logging.StreamHandler() logging.getLogger('').setLevel(loglevel) console.setFormatter(logging.Formatter(LOG_FORMAT)) logging.getLogger('').addHandler(console) options = vars(args) # get email from file or stdin if options.get("file"): with open(options["file"], errors='ignore') as f: email = f.read() else: email = sys.stdin.read() # parse email message results = parse_email_from_string( email, sanitize_urls=options.get("sanitize")) if args.urls: for e in results: for u in e['urls']: print(u) raise SystemExit if args.debug: results = json.dumps(results, indent=4) else: results = json.dumps(results) print(results) if __name__ == "__main__": main()
On September 8, 2016, NASA's OSIRIS-REx spacecraft began its journey to near-Earth asteroid Bennu. Just as the sun began to set over Cape Canaveral, OSIRIS-REx made a picture-perfect liftoff at 7:05 pm EDT. It departed Space Launch Complex 41 aboard a United Launch Alliance Atlas V 411 rocket, cheered on by crowds of mission personnel and space enthusiasts. The launch sent OSIRIS-REx on a seven-year journey to asteroid Bennu and back. An excerpt of the launch broadcast appears at the top of this page. Raw camera feeds from Cape Canaveral and Kennedy Space Center appear below. These clips are intended as a video editor's resource, and are available for download in their original DVCPRO HD format. Launch commentary is provided by KSC host Mike Curie. Learn more about OSIRIS-REx from NASA and the University of Arizona. Excerpt of OSIRIS-REx launch coverage by NASA-TV. The unedited camera feeds from this broadcast are available below. Ground-level view of OSIRIS-REx lifting off from Space Launch Complex 41 at Cape Canaveral. OSIRIS-REx launch viewed from the Vehicle Assembly Building at Kennedy Space Center. OSIRIS-REx launch viewed from the Vertical Integration Facility at SLC-41 on Cape Canaveral. OSIRIS-REx launch footage from the UCS-3 camera. OSIRIS-REx launch filmed from the NASA Causeway, between Kennedy Space Center and Cape Canaveral. This view looks north along the Banana River toward SLC-41. Crowd gathered at Kennedy Space Center to watch OSIRIS-REx begin its journey. "Rocket Cam" view looking down the Atlas V 411 launch vehicle. The single solid rocket booster jettisons at 3:21 in the video. OSIRIS-REx launch footage from the UCS-3 tracker camera. OSIRIS-REx launch footage from the UCS-23 tracker camera.
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup, Extension except ImportError: from distutils.core import setup, Extension from Cython.Build import cythonize extensions = [ Extension("libchardet._libchardet", ["libchardet/_libchardet.pyx"], include_dirs = ["../src"], libraries = ["chardet"], library_dirs = ["../build/src"], ), ] setup( name = 'libchardet', author = 'Jérôme Carretero (zougloub)', author_email = 'cJ-libchardet@zougloub.eu', url = r"https://github.com/zougloub/libchardet", description = "Character Encoding Detector", long_description= "libchardet detects the most probable character" \ " encodings in a string.\n" \ "It doesn't give the best results but the goal is for the library to be" \ " expandable and clear.\n", version = '0.1', license = 'MPL 2.0', classifiers = [ # http://pypi.python.org/pypi?:action=list_classifiers 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MPL License', 'Programming Language :: Cython', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ], keywords = [ 'cython', 'libchardet', 'chardet', 'encoding', ], packages = ['libchardet'], ext_modules = cythonize(extensions), )
Exhibition Centre Liverpool is a brand new, state of the art, purpose built exhibition centre. It joins ACC Liverpool, in a family of world class venues, alongside interconnected sister venues BT Convention Centre and Echo Arena. Located on Liverpool’s Waterfront, Exhibition Centre Liverpool is an exciting part of the city and a great place to visit for events. The 1,600-space multi-storey Liverpool Waterfront Car Park is an integral part of the site campus and just a few minutes’ walk from Exhibition Centre Liverpool. It is fully secure and open 24 hours a day, 7 days a week. Exhibition Centre Liverpool is just two hours from London via direct train services. Regional and local rail connections bring it within easy reach to the North West and nearby regions. From Lime Street Station, they are just a short taxi journey away. Many bus routes serve the bus station at the Liverpool ONE retail development, just a five-minute walk from Exhibition Centre Liverpool. For door-to-door service, they offer drop-off areas and dedicated coach parking.
__author__ = 'Christopher Nelson' import os import time import psutil from infinisqlmgr.management.metric import Metric memory = ["total", "available", "percent", "used", "free", "active", "inactive", "buffers", "cached"] swap = ["total", "used", "free", "percent", "sin", "sout"] cpu = ["user", "nice", "system", "idle", "iowait", "irq", "softirq", "steal", "guest", "guest_nice"] disk_space = ["total", "used", "free", "percent"] disk_io = ["read_count", "write_count", "read_bytes", "write_bytes", "read_time", "write_time"] net_io = ["bytes_sent", "bytes_recv", "packets_sent", "packets_recv", "errin", "errout", "dropin", "dropout"] class Health(object): def __init__(self, node_id, config): """ Creates a new health object for the given node. Databases for health statistics will be created in the 'data_dir'. The databases use the "whisper" database format from graphite, so they automatically handle long-term storage with decreasing resolution. Once the databases are created they never grow or shrink, regardless of the amount of data stored. :param node_id: The node to create this health object for. :param data_dir: The data directory to use for the health stats. """ self.path = os.path.join(config.get("metrics", "data_dir"), "health", node_id[0], str(node_id[1])) self.node_id = node_id self.memory_alert = False self.swap_alert = False self.cpu_load = Metric(self.path, "cpu.load") self.mem = [Metric(self.path, "mem.%s" % item) for item in memory] self.swp = [Metric(self.path, "swp.%s" % item) for item in swap] self.cpu = [Metric(self.path, "cpu.%s" % item) for item in cpu] self.dsk_sp = {} self.dsk_io = {} self.net = {} def get_metric_names(self): metrics = [] for root, dirnames, filenames in os.walk(self.path): for filename in filenames: if filename.endswith(".dp"): metrics.append(os.path.join(root, filename).replace(self.path, "").replace("/", ".")[1:-3]) return sorted(metrics) def capture(self): """ Captures stats of the local system and writes them into the series database. :return: None """ self.cpu_load.update(psutil.cpu_percent(interval=None)) for i, value in enumerate(psutil.cpu_times()): self.cpu[i].update(value) for i,value in enumerate(psutil.virtual_memory()): self.mem[i].update(value) for i,value in enumerate(psutil.swap_memory()): self.swp[i].update(value) net_io_data = psutil.net_io_counters(pernic=True) for name in net_io_data: if name not in self.net: self.net[name] = [Metric(self.path, "net.io.%s.%s" % (name,item)) for item in net_io] net = self.net[name] for i,value in enumerate(net_io_data[name]): net[i].update(value) dsk_io_data = psutil.disk_io_counters(perdisk=True) for name in dsk_io_data: if name not in self.dsk_io: self.dsk_io[name] = [Metric(self.path, "dsk.io.%s.%s" % (name,item)) for item in disk_io] dsk_io = self.dsk_io[name] for i,value in enumerate(dsk_io_data[name]): dsk_io[i].update(value) self.disk_partitions = psutil.disk_partitions() for disks in self.disk_partitions: device = disks[0].replace("/dev/", "") name = "-".join([el for el in device.split("/") if el]) # Create an new set of data points if we find a new disk. if name not in self.dsk_sp: self.dsk_sp[name] = [Metric(self.path, "dsk.space.%s.%s" % (name,item)) for item in disk_space] # Find the disk we are storing data for dsk = self.dsk_sp[name] # Update the disk stats for i, value in enumerate(psutil.disk_usage(disks[1])): dsk[i].update(value) def lookup(self, name): """ Lookup a metric name and resolve it to a metric database. :param name: The metric name to resolve. :return: A data point if it was resolvable, or None """ parts = name.split(".") if parts[0] == "cpu": if parts[1] == "load": return self.cpu_load return self.cpu[cpu.index(parts[1])] elif parts[0] == "mem": return self.mem[memory.index(parts[1])] elif parts[0] == "dsk": if parts[1] == "space": return self.dsk_sp[parts[2]][disk_space.index(parts[3])] elif parts[1] == "io": return self.dsk_io[parts[2]][disk_io.index(parts[3])] elif parts[0] == "net": if parts[1] == "io": return self.net_io[parts[2]][net_io.index(parts[3])] return None def min(self, dp, from_time, until_time=None): """ Request the minimum value from the given metric. :param dp: The metric to check for minimum value. :param from_time: The earliest time in the series. :param until_time: The latest time in the series (optional). If omitted this defaults to now. :return: The minimum value from the series requested. """ if type(dp) == type(str()): dp = self.lookup(dp) return min([x for x in dp.fetch(from_time, until_time)[1] if x is not None]) def max(self, dp, from_time, until_time=None): """ Request the maximum value from the given metric. :param dp: The metric to check for maximum value. :param from_time: The earliest time in the series. :param until_time: The latest time in the series (optional). If omitted this defaults to now. :return: The maximum value from the series requested. """ if type(dp) == type(str()): dp = self.lookup(dp) return max([x for x in dp.fetch(from_time, until_time)[1] if x is not None]) def avg(self, dp, from_time, until_time=None): """ Request the average value for the given metric. :param dp: The metric to use to compute the average value. :param from_time: The earliest time in the series. :param until_time: The latest time in the series (optional). If omitted this defaults to now. :return: The average value from the series requested. """ if type(dp) == type(str()): dp = self.lookup(dp) values = [x for x in dp.fetch(from_time, until_time)[1] if x is not None] return sum(values) / len(values) def is_healthy(self, dp, seconds, has_alert, low_water, high_water): """ Checks to see if the given metric has been healthy over the last 'seconds' seconds. If 'has_alert' is true then the metric must be lower than 'low_water', otherwise it must be lower than 'high_water'. Returns True if it's healthy, false if it's not. :param dp: The metric to check. :param seconds: The number of seconds of history to evaluate. :param has_alert: True if the metric was previously in an unhealthy state. :param low_water: The low water mark if has_alert is True. :param high_water: The high water mark. :return: True if the metric is healthy, False otherwise. """ percent_used = self.avg(dp, time.time() - seconds) if has_alert: return percent_used < low_water return percent_used < high_water def is_memory_healthy(self, seconds, low_water, high_water): """ Checks to see if memory is in a healthy state. This is a convenience for is_healthy("mem.percent") :param seconds: The number of seconds of history to check for health. :param low_water: The low water level in memory percent used. :param high_water: The high water level in memory percent used. :return: True if memory is healthy, False otherwise. """ self.memory_alert = not self.is_healthy("mem.percent", seconds, self.memory_alert, low_water, high_water) return not self.memory_alert def is_swap_healthy(self, seconds, low_water, high_water): """ Checks to see if swap is in a healthy state. This is a convenience for is_healthy("swp.percent") :param seconds: The number of seconds of history to check for health. :param low_water: The low water level in swap percent used. :param high_water: The high water level in swap percent used. :return: True if swap is healthy, False otherwise. """ self.swap_alert = not self.is_healthy("swp.percent", seconds, self.swap_alert, low_water, high_water) return not self.swap_alert
The Franklin Mint states that the Siberian Tiger Collector Knife is the very first collector’s knife authorised and authenticated by the International Wildlife Coalition and bears the original art of J. Nahra. It is also certified that this first class army knife has an original portrait of a Siberian Tiger on the handle, complemented by a natural Tiger Eye gemstone and accented with 24 karat gold. The knife has its own individual case. Design of the handle: natural Tiger Eye gemstone with original Siberian Tiger art. Number of sheets: 1 blade range: 2.76 - 4 inches.
""" Class OWTextableIntersect Copyright 2012-2019 LangTech Sarl (info@langtech.ch) ----------------------------------------------------------------------------- This file is part of the Orange3-Textable package. Orange3-Textable is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Orange3-Textable is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Orange3-Textable. If not, see <http://www.gnu.org/licenses/>. """ __version__ = '0.15.2' import LTTL.Segmenter as Segmenter from LTTL.Segmentation import Segmentation from .TextableUtils import ( OWTextableBaseWidget, InfoBox, SendButton, AdvancedSettings, pluralize, updateMultipleInputs, SegmentationListContextHandler, SegmentationsInputList, ProgressBar ) from Orange.widgets import widget, gui, settings class OWTextableIntersect(OWTextableBaseWidget): """Orange widget for segment in-/exclusion based on other segmentation""" name = "Intersect" description = "In-/exclude segments based on another segmentation" icon = "icons/Intersect.png" priority = 4004 # Input and output channels... inputs = [('Segmentation', Segmentation, "inputData", widget.Multiple)] outputs = [ ('Selected data', Segmentation, widget.Default), ('Discarded data', Segmentation) ] settingsHandler = SegmentationListContextHandler( version=__version__.rsplit(".", 1)[0] ) segmentations = SegmentationsInputList() # type: list # Settings... copyAnnotations = settings.Setting(True) mode = settings.Setting(u'Include') autoNumber = settings.Setting(False) autoNumberKey = settings.Setting('num') displayAdvancedSettings = settings.Setting(False) source = settings.ContextSetting(0) filtering = settings.ContextSetting(0) sourceAnnotationKey = settings.ContextSetting(u'(none)') filteringAnnotationKey = settings.ContextSetting(u'(none)') want_main_area = False def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.infoBox = InfoBox(widget=self.controlArea) self.sendButton = SendButton( widget=self.controlArea, master=self, callback=self.sendData, infoBoxAttribute='infoBox', sendIfPreCallback=self.updateGUI, ) self.advancedSettings = AdvancedSettings( widget=self.controlArea, master=self, callback=self.sendButton.settingsChanged, ) # GUI... # TODO: update docs to match removal of source annotation from basic self.advancedSettings.draw() # Intersect box self.intersectBox = gui.widgetBox( widget=self.controlArea, box=u'Intersect', orientation='vertical', addSpace=False, ) self.modeCombo = gui.comboBox( widget=self.intersectBox, master=self, value='mode', sendSelectedValue=True, items=[u'Include', u'Exclude'], orientation='horizontal', label=u'Mode:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"Specify whether source segments whose type is\n" u"present in the filter segmentation should be\n" u"included in or excluded from the output\n" u"segmentation." ), ) self.modeCombo.setMinimumWidth(140) gui.separator(widget=self.intersectBox, height=3) self.sourceCombo = gui.comboBox( widget=self.intersectBox, master=self, value='source', orientation='horizontal', label=u'Source segmentation:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"The segmentation from which a subset of segments\n" u"will be selected to build the output segmentation." ), ) gui.separator(widget=self.intersectBox, height=3) self.sourceAnnotationCombo = gui.comboBox( widget=self.intersectBox, master=self, value='sourceAnnotationKey', sendSelectedValue=True, emptyString=u'(none)', orientation='horizontal', label=u'Source annotation key:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"Indicate whether source segments will be selected\n" u"based on annotation values corresponding to a\n" u"specific annotation key or rather on their content\n" u"(value 'none')." ), ) gui.separator(widget=self.intersectBox, height=3) self.filteringCombo = gui.comboBox( widget=self.intersectBox, master=self, value='filtering', orientation='horizontal', label=u'Filter segmentation:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"The segmentation whose types will be used to\n" u"include source segments in (or exclude them from)\n" u"the output segmentation." ), ) gui.separator(widget=self.intersectBox, height=3) self.filteringAnnotationCombo = gui.comboBox( widget=self.intersectBox, master=self, value='filteringAnnotationKey', sendSelectedValue=True, emptyString=u'(none)', orientation='horizontal', label=u'Filter annotation key:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"Indicate whether filter segment types are based\n" u"on annotation values corresponding to a specific\n" u"annotation key or rather on segment content\n" u"(value 'none')." ), ) gui.separator(widget=self.intersectBox, height=3) self.advancedSettings.advancedWidgets.append(self.intersectBox) self.advancedSettings.advancedWidgetsAppendSeparator() # Options box... optionsBox = gui.widgetBox( widget=self.controlArea, box=u'Options', orientation='vertical', addSpace=False ) optionsBoxLine2 = gui.widgetBox( widget=optionsBox, box=False, orientation='horizontal', addSpace=True, ) gui.checkBox( widget=optionsBoxLine2, master=self, value='autoNumber', label=u'Auto-number with key:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"Annotate output segments with increasing numeric\n" u"indices." ), ) self.autoNumberKeyLineEdit = gui.lineEdit( widget=optionsBoxLine2, master=self, value='autoNumberKey', orientation='horizontal', callback=self.sendButton.settingsChanged, tooltip=( u"Annotation key for output segment auto-numbering." ), ) gui.checkBox( widget=optionsBox, master=self, value='copyAnnotations', label=u'Copy annotations', callback=self.sendButton.settingsChanged, tooltip=( u"Copy all annotations from input to output segments." ), ) gui.separator(widget=optionsBox, height=2) self.advancedSettings.advancedWidgets.append(optionsBox) self.advancedSettings.advancedWidgetsAppendSeparator() # Basic intersect box self.basicIntersectBox = gui.widgetBox( widget=self.controlArea, box=u'Intersect', orientation='vertical', ) self.basicModeCombo = gui.comboBox( widget=self.basicIntersectBox, master=self, value='mode', sendSelectedValue=True, items=[u'Include', u'Exclude'], orientation='horizontal', label=u'Mode:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"Specify whether source segments whose type is\n" u"present in the filter segmentation should be\n" u"included in or excluded from the output\n" u"segmentation." ), ) self.basicModeCombo.setMinimumWidth(140) gui.separator(widget=self.basicIntersectBox, height=3) self.basicSourceCombo = gui.comboBox( widget=self.basicIntersectBox, master=self, value='source', orientation='horizontal', label=u'Source segmentation:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"The segmentation from which a subset of segments\n" u"will be selected to build the output segmentation." ), ) gui.separator(widget=self.basicIntersectBox, height=3) self.basicFilteringCombo = gui.comboBox( widget=self.basicIntersectBox, master=self, value='filtering', orientation='horizontal', label=u'Filter segmentation:', labelWidth=180, callback=self.sendButton.settingsChanged, tooltip=( u"The segmentation whose types will be used to\n" u"include source segments in (or exclude them from)\n" u"the output segmentation." ), ) gui.separator(widget=self.basicIntersectBox, height=3) self.advancedSettings.basicWidgets.append(self.basicIntersectBox) self.advancedSettings.basicWidgetsAppendSeparator() gui.rubber(self.controlArea) # Send button... self.sendButton.draw() # Info box... self.infoBox.draw() self.sendButton.sendIf() self.adjustSizeWithTimer() def sendData(self): """(Have LTTL.Segmenter) perform the actual filtering""" # Check that there's something on input... if len(self.segmentations) == 0: self.infoBox.setText(u'Widget needs input.', 'warning') self.send('Selected data', None, self) self.send('Discarded data', None, self) return assert self.source >= 0 assert self.filtering >= 0 # TODO: remove message 'No label was provided.' from docs # Source and filtering parameter... source = self.segmentations[self.source][1] filtering = self.segmentations[self.filtering][1] if self.displayAdvancedSettings: source_annotation_key = self.sourceAnnotationKey or None if self.sourceAnnotationKey == u'(none)': source_annotation_key = None filtering_annotation_key = self.filteringAnnotationKey or None if filtering_annotation_key == u'(none)': filtering_annotation_key = None else: source_annotation_key = None filtering_annotation_key = None # Check that autoNumberKey is not empty (if necessary)... if self.displayAdvancedSettings and self.autoNumber: if self.autoNumberKey: autoNumberKey = self.autoNumberKey num_iterations = 2 * len(source['segmentation']) else: self.infoBox.setText( u'Please enter an annotation key for auto-numbering.', 'warning' ) self.send('Selected data', None, self) self.send('Discarded data', None, self) return else: autoNumberKey = None num_iterations = len(source) # Basic settings... if self.displayAdvancedSettings: copyAnnotations = self.copyAnnotations else: copyAnnotations = True # Perform filtering... self.infoBox.setText(u"Processing, please wait...", "warning") self.controlArea.setDisabled(True) progressBar = ProgressBar( self, iterations=num_iterations ) (filtered_data, discarded_data) = Segmenter.intersect( source=source, source_annotation_key=source_annotation_key, filtering=filtering, filtering_annotation_key=filtering_annotation_key, mode=self.mode.lower(), label=self.captionTitle, copy_annotations=self.copyAnnotations, auto_number_as=autoNumberKey, progress_callback=progressBar.advance, ) progressBar.finish() self.controlArea.setDisabled(False) message = u'%i segment@p sent to output.' % len(filtered_data) message = pluralize(message, len(filtered_data)) self.infoBox.setText(message) self.send('Selected data', filtered_data, self) self.send('Discarded data', discarded_data, self) self.sendButton.resetSettingsChangedFlag() def inputData(self, newItem, newId=None): """Process incoming data.""" self.closeContext() updateMultipleInputs( self.segmentations, newItem, newId, self.onInputRemoval ) self.infoBox.inputChanged() self.updateGUI() def onInputRemoval(self, index): """Handle removal of input with given index""" if index < self.source: self.source -= 1 elif index == self.source \ and self.source == len(self.segmentations) - 1: self.source -= 1 if index < self.filtering: self.filtering -= 1 elif index == self.filtering \ and self.filtering == len(self.segmentations) - 1: self.filtering -= 1 def updateGUI(self): """Update GUI state""" if self.displayAdvancedSettings: sourceCombo = self.sourceCombo filteringCombo = self.filteringCombo intersectBox = self.intersectBox else: sourceCombo = self.basicSourceCombo filteringCombo = self.basicFilteringCombo intersectBox = self.basicIntersectBox sourceCombo.clear() self.sourceAnnotationCombo.clear() self.sourceAnnotationCombo.addItem(u'(none)') self.advancedSettings.setVisible(self.displayAdvancedSettings) if len(self.segmentations) == 0: self.source = -1 self.sourceAnnotationKey = u'' intersectBox.setDisabled(True) return else: if len(self.segmentations) == 1: self.source = 0 for segmentation in self.segmentations: sourceCombo.addItem(segmentation[1].label) self.source = max(self.source, 0) sourceAnnotationKeys \ = self.segmentations[self.source][1].get_annotation_keys() for k in sourceAnnotationKeys: self.sourceAnnotationCombo.addItem(k) if self.sourceAnnotationKey not in sourceAnnotationKeys: self.sourceAnnotationKey = u'(none)' self.sourceAnnotationKey = self.sourceAnnotationKey intersectBox.setDisabled(False) self.autoNumberKeyLineEdit.setDisabled(not self.autoNumber) filteringCombo.clear() for index in range(len(self.segmentations)): filteringCombo.addItem(self.segmentations[index][1].label) self.filtering = max(self.filtering, 0) segmentation = self.segmentations[self.filtering] if self.displayAdvancedSettings: self.filteringAnnotationCombo.clear() self.filteringAnnotationCombo.addItem(u'(none)') filteringAnnotationKeys = segmentation[1].get_annotation_keys() for key in filteringAnnotationKeys: self.filteringAnnotationCombo.addItem(key) if self.filteringAnnotationKey not in filteringAnnotationKeys: self.filteringAnnotationKey = u'(none)' self.filteringAnnotationKey = self.filteringAnnotationKey def setCaption(self, title): if 'captionTitle' in dir(self): changed = title != self.captionTitle super().setCaption(title) if changed: self.sendButton.settingsChanged() else: super().setCaption(title) def handleNewSignals(self): """Overridden: called after multiple signals have been added""" self.openContext(self.uuid, self.segmentations) self.updateGUI() self.sendButton.sendIf() if __name__ == '__main__': import sys import re from PyQt5.QtWidgets import QApplication from LTTL.Input import Input appl = QApplication(sys.argv) ow = OWTextableIntersect() seg1 = Input(u'hello world', 'text') seg2 = Segmenter.tokenize( seg1, [ (re.compile(r'hello'), u'tokenize', {'tag': 'interj'}), (re.compile(r'world'), u'tokenize', {'tag': 'noun'}), ], label='words', ) seg3 = Segmenter.tokenize( seg2, [(re.compile(r'[aeiou]'), u'tokenize')], label='V' ) seg4 = Segmenter.tokenize( seg2, [(re.compile(r'[hlwrdc]'), u'tokenize')], label='C' ) seg5 = Segmenter.tokenize( seg2, [(re.compile(r' '), u'tokenize')], label='S' ) seg6 = Segmenter.concatenate( [seg3, seg4, seg5], import_labels_as='category', label='chars', sort=True, merge_duplicates=True, ) seg7 = Segmenter.tokenize( seg6, [(re.compile(r'l'), u'tokenize')], label='pivot' ) ow.inputData(seg2, 1) ow.inputData(seg6, 2) ow.inputData(seg7, 3) ow.show() appl.exec_() ow.saveSettings()
We offer St Albans a professional cleaning service with a friendlier face. We're not just the friendliest cleaners in AL2, though - we'll give you brilliantly thorough, fast & professional home cleaning at some of the lowest prices in the country. And we use eco-friendly cleaning products which are healthier for your home.
# Copyright 2017 The UAI-SDK Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== ''' UCloud Train basic flags ''' import tensorflow as tf flags = tf.app.flags # ======================================================================= # Constant variables # --work_dir=/data # --data_dir=/data/data # --output_dir=/data/output # # Note: Use this params as contant values # Do not set this params !!! # ======================================================================= ''' Default work dir. The working dir for the traing job, it will contains: /data/data --data_dir /data/output --output_dir Note: DO NOT CHANGE THIS VALUE UCloud Train Job Executor Will Set it Automatically ''' flags.DEFINE_string("work_dir", "/data", "Default work path") ''' Default data path used in Training, all data will be downloaded into this path Please use data in this path as input for Training Note: DO NOT CHANGE THIS VALUE UCloud Train Job Executor Will Set it Automatically ''' flags.DEFINE_string("data_dir", "/data/data", "Default data path") ''' Default output path used in Training, files in this path will be uploaded to UFile after training finished. You can also assume your checkpoint files inside output_path (If you provided in the UCloud console), files will also be downloaded into this path befor Training start Note: DO NOT CHANGE THIS VALUE UCloud Train Job Executor Will Set it Automatically ''' flags.DEFINE_string("output_dir", "/data/output", "Default output path") ''' Default tensorboard output path used in Training, iles in this path will be uploaded to UFile after training finished. This dir is same as output_dir Note: DO NOT CHANGE THIS VALUE UCloud Train Job Executor Will Set it Automatically ''' flags.DEFINE_string("log_dir", "/data/output", "Default log path") ''' Define num_gpus for training Note: DO NOT CHANGE THIS VALUE UCloud Train Job Executor Will Set it Automatically ''' flags.DEFINE_integer("num_gpus", 0, "Num of avaliable gpus") # ======================================================================= # Usable variables # --max_step=<int> # # Note: You can SET and USE these params # UCloud may use these params as guidance for training projects # ======================================================================= ''' You can use this param to transfer the max_step value Note: You can use it as your wish ''' flags.DEFINE_integer("max_step", 0, "Max Step")
We are so grateful to the Manchester StreetFest for inviting us to have a tent at this evening’s StreetFest in Manchester, Vermont. While nothing is official, Manchester is looking to be our home for competing in the 2015 season. We handed out fliers to adults and puzzle sheets to children. A small group of 17-year olds were disappointed we haven’t started junior derby yet. People bought t-shirts and hoodies and we hung our brand new banner from Turcotte Design inside our tent. We talked to a lot of people about joining and even more people who are really looking forward to coming to our games next year. The entire Manchester community seemed to be excited about having us here in the Shires! We can’t wait to play for you in 2015.
"""Script used to generate evoked spike test data Usage: python -i import_spike_detection.py expt_id cell_id This will load all spikes evoked in the specified cell one at a time. For each one you can select whether to write the data out to a new test file. Note that files are saved without results; to generate these, you must run unit tests with --audit. """ import pickle, sys import numpy as np from scipy.optimize import curve_fit from neuroanalysis.spike_detection import detect_evoked_spikes, SpikeDetectTestCase from neuroanalysis.ui.spike_detection import SpikeDetectUI from neuroanalysis.data import TSeries, TSeriesList, PatchClampRecording from multipatch_analysis.database import default_db as db from multipatch_analysis.data import Analyzer, PulseStimAnalyzer, MultiPatchProbe import pyqtgraph as pg pg.dbg() # for inspecting exception stack expt_id = float(sys.argv[1]) cell_id = int(sys.argv[2]) ui = SpikeDetectUI() skip_btn = pg.QtGui.QPushButton('skip') ui.widget.addWidget(skip_btn) save_btn = pg.QtGui.QPushButton('save') ui.widget.addWidget(save_btn) session = db.session() def iter_pulses(): """Generator that yields all selected pulses one at a time. """ # look up experiment from database and load the NWB file expt = db.experiment_from_timestamp(expt_id) cell = expt.cells[cell_id] channel = cell.electrode.device_id sweeps = expt.data.contents for sweep in sweeps: # Ignore sweep if it doesn't have the requested channel, or the correct stimulus try: pre_rec = sweep[channel] except KeyError: continue if not isinstance(pre_rec, MultiPatchProbe): continue print("sweep: %d channel: %d" % (sweep.key, channel)) # Get chunks for each stim pulse pulse_stim = PulseStimAnalyzer.get(pre_rec) chunks = pulse_stim.pulse_chunks() for chunk in chunks: yield (expt_id, cell_id, sweep, channel, chunk) all_pulses = iter_pulses() last_result = None def load_next(): global all_pulses, ui, last_result try: (expt_id, cell_id, sweep, channel, chunk) = next(all_pulses) except StopIteration: ui.widget.hide() return # run spike detection on each chunk pulse_edges = chunk.meta['pulse_edges'] spikes = detect_evoked_spikes(chunk, pulse_edges, ui=ui) ui.show_result(spikes) # copy just the necessary parts of recording data for export to file export_chunk = PatchClampRecording(channels={k:TSeries(chunk[k].data, t0=chunk[k].t0, sample_rate=chunk[k].sample_rate) for k in chunk.channels}) export_chunk.meta.update(chunk.meta) # construct test case tc = SpikeDetectTestCase() tc._meta = { 'expt_id': expt_id, 'cell_id': cell_id, 'device_id': channel, 'sweep_id': sweep.key, } tc._input_args = { 'data': export_chunk, 'pulse_edges': chunk.meta['pulse_edges'], } last_result = tc def save_and_load_next(): global last_result # write results out to test file test_file = 'test_data/evoked_spikes/%s.pkl' % (last_result.name) last_result.save_file(test_file) load_next() skip_btn.clicked.connect(load_next) save_btn.clicked.connect(save_and_load_next) load_next()
Are you looking for a website template for your web hosting business? If so, check the following list of the best web hosting website templates that we have put together for you. Do you need help customizing your hosting template? We can help. See our web design packages. UpHosting – a powerful and passionately carved Hosting Template. It has a fantastic balance between a lovely design and lot of Hosting Functionalities. UpHosting HTML Template includes 24 well-organized files with the clean and professionally crafted design. Hosting City is a popular web hosting template. It has an elegant design perfect for Web Hosting businesses It looks perfect on Desktop and Mobile It has a clean and modern layout designed in a clean and minimalistic style. This is a simple, fast and professional Domain and Hosting HTML Template for your web hosting website. It comes with all the features that you will need, such as domain registration. It is ideal if you are looking to sell services such as cloud hosting, shared hosting, virtual servers and dedicated servers. BREED it’s an WHMCS & HTML Template for Web Hosting Services and Domain Registration companies. It has a clean design, which is responsive, which means that it will adapt to any device such as desktop, tablets and smartphones. Digital Hosting is a great looking web hosting template ideal for web hosting companies and domain registration businesses, It is mobile friendly, which means that it will adapt to all devices such as tablet, desktop and mobile. ServerEast is a fully responsive HTML Template, ideal for a web hosting business. It is a fully responsive HTML Template, which means that it will look good on any device eg, your pc or mobile phone. MoonHost is a Responsive HTML5 Hosting and WHMCS Template ideal for a domain or hosting business. You can easily customize the template and it is fully responsive. It adapts to any device. Foxuhost is a Responsive HTML5 / CSS3 template that is simple to use and great for a web hosting business. It is responsive and comes with tons of features. HostHubs is a professional template for web hosting companies or domain registration businesses. It is a Bootstrap framework, HTML5 and CSS technology. You can easily customize it to suit your needs. Did you find this list of the best web hosting templates useful? Are there any other templates that you prefer?
from describe.mock.utils import TWO_OPS_FULL, ONE_OPS, NIL from describe.mock.expectations import Invoke, ExpectationList, ExpectationSet, Expectation from describe.mock.mock import Mock from describe.utils import Replace def stub(*args, **attributes): if args: stub = Stub(args[0]) else: stub = Stub() for key, value in attributes.items(): setattr(stub, key, value) return stub def stub_attr(obj, key, value=NIL): if value is NIL: value = stub() return Replace(obj, key, value) class StubErrorDelegate(object): def __init__(self): self.instance = None self.items = {} def _new_stub(self, attrname): new_stub = self.instance.__class__() setattr(self.instance, attrname, new_stub) return new_stub def no_expectations(self, expectations, sender, attrname, args, kwargs): recent_history = reversed(expectations.history) for expect in recent_history: try: return expectations.validate_expectation(expect, sender, attrname, args, kwargs) except (ExpectationList.FailedToSatisfyArgumentsError, ExpectationList.FailedToSatisfyAttrnameError): pass return self._new_stub(attrname) def fails_to_satisfy_attrname(self, expectations, sender, attrname, args, kwargs, expectation): return self._new_stub(attrname) def fails_to_satisfy_arguments(self, expectations, sender, attrname, args, kwargs, expectation): return self._new_stub(attrname) class Stub(Mock): def __init__(self, name='Stub'): delegate = StubErrorDelegate() super(self.__class__, self).__init__(name=name, error_delegate=delegate) delegate.instance = self # saved for reference if 0: IGNORE_LIST = set(( '_Stub__attributes', '_Stub__magic', '_Stub__items', '__class__', '_create_magic_method', # 'expects' )) def process(dictionary, name, cls): if name not in dictionary: dictionary[name] = cls() if isinstance(dictionary[name], Invoke): return dictionary[name]() return dictionary[name] class Stub(object): """Stubs are objects that can stand-in for any other object. It simply returns more stubs when accessed or invoked. This is used for testing functionality that doesn't particularly care about the objects they are manipulating, (ie - a function that splits an array in half doesn't care about what kinds of elements are in there) """ def __init__(self, **attributes): self.__attributes = attributes self.__items = {} self.__magic = {} @classmethod def attr(cls, obj, name, value=NIL): return StubAttr(obj, name, getattr(obj, name, NIL), value).replace() # @property # def expects(self): # raise TypeError('reserved for API') def __getattribute__(self, name): if name in IGNORE_LIST: return super(Stub, self).__getattribute__(name) return process(self.__attributes, name, self.__class__) def __setattr__(self, name, value): if name in IGNORE_LIST: return super(Stub, self).__setattr__(name, value) self.__attributes[name] = value def __getitem__(self, name): return self.__items.get(name, None) def __setitem__(self, name, value): self.__items[name] = value def __call__(self, *args, **kwargs): full_name = '__call__' return process(self.__magic, full_name, self.__class__) def _create_magic_method(name): full_name = '__%s__' % name def getter(self): return process(self.__magic, full_name, self.__class__) getter.__name__ = full_name return property(getter) for op in TWO_OPS_FULL + ONE_OPS: exec('__%s__ = _create_magic_method(%r)' % (op, op)) class StubAttr(object): "Manages the lifetime of a stub on an attribute." def __init__(self, obj, name, orig_value, new_value): self.obj, self.name, self.orig_value, self.new_value = obj, name, orig_value, new_value @property def stub(self): return self.new_value def replace(self): if self.new_value is NIL: self.new_value = Stub() setattr(self.obj, self.name, self.new_value) return self def restore(self): if self.orig_value is NIL: delattr(self.obj, self.name) else: setattr(self.obj, self.name, self.orig_value) return self def __enter__(self): return self.replace().stub def __exit__(self, type, info, tb): self.restore() def __del__(self): self.restore()
“I am in Irbil (sometimes spelled Erbil.) It is Capitol city of Kurds (you know, in Iraq, they have Arabs and Kurds) – you should look for it on a map. Kurds have culture closer to Turkey. Also, Irbil is a lot closer to Turkey than to Baghdad. The camp is run by the Korean Army. Only about a dozen Americans here. I flew down on a Japanese Self Defense Force C-130 painted pastel blue. My Japanese buddy, Major Natori, hooked me up. Today I got a tour of a vocational school and a hospital that the Koreans built here. I met lots of really friendly Iraqis who are getting an education. A lot of them speak English really well and even make jokes. This place is so nice. Lots of hills and green grass. The air is very clean and clear. Maybe like North Dakota from pictures I’ve seen, or Mongolia. It is fun being with the Koreans. I ate lunch and dinner at the Korean cafe. For lunch I had bulgoggi and the red hot spicy soup that Kaito likes. It has meat and tofu and big green onions. For dinner, it was mackerel Korean style, two types of KIMCHI, and beef with Toppogi mochi. And rice is sticky rice. It is delicious after all American in Camp Victory for 4 months! After reading the 07 FEB 07 Letters section of the Stars and Stripes, I felt it my patriotic duty to hop on the First Amendment Rights bandwagon as SFC Elesky (ret) chose to do. How dare he push his criteria for serving in the Armed Forces as the norm? The “Special Breed” he describes is a wildly romantic notion at best. Indeed, there are an abundance of philosophies among our ranks; this diversity is what makes our country and military strong, and keeps me in. But getting more practical, if we get very general and categorize, as many do, liberals as non-Republicans (Mr. Elesky did not provide an adequate definition himself,) why do so many decorated war Veteran Congressmen and Senators belong to the Democratic Party? And why are they the ones with the most children in theater serving with me here in Iraq? If you want to talk of insult then this viewpoint is the ultimate insult to the many brave men and women who fought valiantly on the foreign soil of battlefields from WWI to Vietnam as draftees. Older borther Lt. Bob is an naval engineer who – due to his Japanese language ability – ended up translating for some joint military exercise which involved a trip to a Aircraft Carrier named for the esteemed Republican President Lincoln (yes the same one on which BushII unfurled the Mission Accomplished banner). Besides his tripp to my old stomping grounds of Guam, he also recently embarked on trips to Italy, Hawaii and the Japanese Alps for summits and soaks.
from types import * from robot import utils from selenium.webdriver.remote.webdriver import WebDriver from selenium.common.exceptions import NoSuchWindowException class WindowManager(object): def __init__(self): self._strategies = { 'title': self._select_by_title, 'name': self._select_by_name, 'url': self._select_by_url, None: self._select_by_default } def get_window_ids(self, browser): return [ window_info[1] for window_info in self._get_window_infos(browser) ] def get_window_names(self, browser): return [ window_info[2] for window_info in self._get_window_infos(browser) ] def get_window_titles(self, browser): return [ window_info[3] for window_info in self._get_window_infos(browser) ] def select(self, browser, locator): assert browser is not None if locator is not None: if isinstance(locator, list): self._select_by_excludes(browser, locator) return if locator.lower() == "self" or locator.lower() == "current": return if locator.lower() == "new" or locator.lower() == "popup": self._select_by_last_index(browser) return (prefix, criteria) = self._parse_locator(locator) strategy = self._strategies.get(prefix) if strategy is None: raise ValueError("Window locator with prefix '" + prefix + "' is not supported") return strategy(browser, criteria) # Strategy routines, private def _select_by_title(self, browser, criteria): self._select_matching( browser, lambda window_info: window_info[3].strip().lower() == criteria.lower(), "Unable to locate window with title '" + criteria + "'") def _select_by_name(self, browser, criteria): self._select_matching( browser, lambda window_info: window_info[2].strip().lower() == criteria.lower(), "Unable to locate window with name '" + criteria + "'") def _select_by_url(self, browser, criteria): self._select_matching( browser, lambda window_info: window_info[4].strip().lower() == criteria.lower(), "Unable to locate window with URL '" + criteria + "'") def _select_by_default(self, browser, criteria): if criteria is None or len(criteria) == 0 or criteria.lower() == "null": handles = browser.get_window_handles() browser.switch_to_window(handles[0]) return try: starting_handle = browser.get_current_window_handle() except NoSuchWindowException: starting_handle = None for handle in browser.get_window_handles(): browser.switch_to_window(handle) if criteria == handle: return for item in browser.get_current_window_info()[2:4]: if item.strip().lower() == criteria.lower(): return if starting_handle: browser.switch_to_window(starting_handle) raise ValueError("Unable to locate window with handle or name or title or URL '" + criteria + "'") def _select_by_last_index(self, browser): handles = browser.get_window_handles() try: if handles[-1] == browser.get_current_window_handle(): raise AssertionError("No new window at last index. Please use '@{ex}= | List Windows' + new window trigger + 'Select Window | ${ex}' to find it.") except IndexError: raise AssertionError("No window found") except NoSuchWindowException: raise AssertionError("Currently no focus window. where are you making a popup window?") browser.switch_to_window(handles[-1]) def _select_by_excludes(self, browser, excludes): for handle in browser.get_window_handles(): if handle not in excludes: browser.switch_to_window(handle) return raise ValueError("Unable to locate new window") # Private def _parse_locator(self, locator): prefix = None criteria = locator if locator is not None and len(locator) > 0: locator_parts = locator.partition('=') if len(locator_parts[1]) > 0: prefix = locator_parts[0].strip().lower() criteria = locator_parts[2].strip() if prefix is None or prefix == 'name': if criteria is None or criteria.lower() == 'main': criteria = '' return (prefix, criteria) def _get_window_infos(self, browser): window_infos = [] try: starting_handle = browser.get_current_window_handle() except NoSuchWindowException: starting_handle = None try: for handle in browser.get_window_handles(): browser.switch_to_window(handle) window_infos.append(browser.get_current_window_info()) finally: if starting_handle: browser.switch_to_window(starting_handle) return window_infos def _select_matching(self, browser, matcher, error): try: starting_handle = browser.get_current_window_handle() except NoSuchWindowException: starting_handle = None for handle in browser.get_window_handles(): browser.switch_to_window(handle) if matcher(browser.get_current_window_info()): return if starting_handle: browser.switch_to_window(starting_handle) raise ValueError(error)
Our firm handled a case recently where a defendant was stopped for Driving Under the Influence in a mall parking lot. The officer cited as the reason for initially stopping the defendant the fact that the defendant “rolled” through a stop sign posted in the mall’s parking lot – i.e., our client did not come to a full and complete stop at the stop sign. When the client was stopped, it was determined that he was under the influence of alcohol, and he was arrested. The District Attorney initially offered that our client should plead guilty or no contest to DUI charges and pay a steep fine and attend alcohol classes, and be on probation for three years. After reviewing the relevant law, we found that the Vehicle Code does not impose a duty upon a driver to stop at a stop sign posted on private property. Our office retained an investigator to investigate the property, and we obtained certified copies of the deed to the mall parking lot, which showed conclusively that the parking lot was owned by a private entity. Our investigator also took photographs of the mall’s parking lot entrances, and, especially, the signs showing that the parking lot was “private property” thereby negating any inference of mistake. After our attorneys filed a written motion to suppress evidence – citing the fact that the stop was conducted illegally on private property – the District Attorney agreed to dismiss all DUI charges against our client. No probation. No classes. No fine. Nothing. Case dismissed. While certainly no one condones ignoring a stop sign, whether posted on private property or public property, and failing to stop at a sign could give rise to civil liability if there is an accident and someone is hurt, the Vehicle Code does not permit an officer to stop and cite a person simply for failing to stop at a stop sign on private property.
#!/usr/bin/env python import rospy import smach import smach_ros import time from std_msgs.msg import Empty,String,Int8 from gps_handler.srv import * from ai_mapping_robot.msg import ErrorMessage from ai_mapping_robot.msg import InitData import tf.transformations as trans from math import * from pwm_serial_py.srv import Over_int ############### wait Init Data ############################## def initDataCallback(msg): global waitInitDataMsg global initDataMsg initDataMsg = msg waitInitDataMsg = 0 def waitForInitData(time): global waitInitDataMsg,initDataMsg start = rospy.get_time() waitInitDataMsg = 1 rospy.loginfo("wait InitData ...") s = rospy.Subscriber('init_data',InitData,initDataCallback) while waitInitDataMsg and rospy.get_time()-start < time: rospy.sleep(1.0/20.0) s.unregister() if not waitInitDataMsg: return initDataMsg else: return 'Error' #################Wait GPS List of waypoints ################### def casesCallback(msg): global waitGPSData global lastGPS,t1 t1 = msg.data.split('\n') lastGPS = t1[:-1].split(";") waitGPSData = 0 def waitForGPSData(AI,time): global waitGPSData,lastGPS,t1 start = rospy.get_time() waitGPSData = 1 s = rospy.Subscriber('gps_string',String,casesCallback) rospy.loginfo("wait GPSData ...") while waitGPSData and rospy.get_time()-start < time: if AI.preempt_requested(): rospy.loginfo("Go building GPS is being preempted") AI.service_preempt() return 'preempted' rospy.sleep(1.0/20.0) s.unregister() if not waitGPSData: return (lastGPS,t1) else: return 'Error' ################ Entry Init ###################################### def findHeading(listener,cmdPublisher,heading): (r,p,yaw)=(0,0,0) try: (trans1,rot1) = listener.lookupTransform("local_origin", "fcu", rospy.Time(0)) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): (r,p,yaw) = trans.euler_from_quaternion(rot1) e = heading-yaw; #insert lidar data while abs(e)>0.1: try: (trans1,rot1) = listener.lookupTransform("local_origin", "fcu", rospy.Time(0)) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): rospy.loginfo("echec local_origin fcu") (r,p,yaw) = trans.euler_from_quaternion(rot1) err= heading-yaw u=20*(2/PI)*atan(tan(err/2));#atan for modulo 2*pi*/ sendCommand(1500,1500+u); sendCommand(1500,1500); def sendCommand(channelSpeed,channelYaw): try: send_pwm = rospy.ServiceProxy('/pwm_serial_send',Over_int) resp1 = send_pwm([channelSpeed,0,channelYaw,0,0,0,0,0]) return resp1.result except rospy.ServiceException, e: print "Service call failed : %s"%e def dataCallback(msg): global waitDataMsg waitDataMsg = 0 def waitForRemote(time): global waitDataMsg start = rospy.get_time() waitDataMsg = 1 rospy.loginfo("wait For Remote ...") s = rospy.Subscriber('/restart_msg',Int8,dataCallback) while waitDataMsg and rospy.get_time()-start < time: rospy.sleep(1.0/20.0) s.unregister() return not waitDataMsg ############ Restart ######################### def remoteGoCallback(msg): global waitRestartMsg,message waitRestartMsg = 0 message = msg def waitForRemoteGo(time): global waitRestartMsg,message message = Int8(0) start = rospy.get_time() waitRestartMsg = 1 rospy.loginfo("wait RemoteGo ...") s = rospy.Subscriber('/restart_msg',Int8,remoteGoCallback) while waitRestartMsg and rospy.get_time()-start < time: rospy.sleep(1.0/20.0) s.unregister() return message
1477 – Battle of Nancy: Charles the Bold is defeated and killed in a conflict with René II, Duke of Lorraine; the Burgundy subsequently becomes part of France. 1664 – Maratha forces under Chhatrapati Shivaji defeat the Mughals in the Battle of Surat. 1875 – The Palais Garnier, one of the most famous opera houses in the world, is inaugurated in Paris. 1882 – Charles J. Guiteau is found guilty of assassinating US President James A. Garfield, and is sentenced to death by hanging. 1911 – Kappa Alpha Psi, the world's third oldest and largest black fraternity, is founded at Indiana University. 1914 – The Ford Motor Company announces an eight-hour workday and minimum daily wage of $5 in salary plus bonuses subject to restrictions and imposed "character standards." 1949 – United States President Harry S. Truman unveils his Fair Deal program. 1950 – In the Sverdlovsk air disaster, all 19 of those on board were killed, including almost the entire national ice hockey team (VVS Moscow) of the Soviet Air Force – 11 players, as well as a team doctor and a masseur. 1953 – The play Waiting for Godot by Samuel Beckett is first performed. 1957 – In a speech given to the United States Congress, United States President Dwight D. Eisenhower announces the establishment of what will later be called the Eisenhower Doctrine. 1968 – Alexander Dubček comes to power; "Prague Spring" begins in Czechoslovakia. 1970 – The 7.1 Mw  Tonghai earthquake shakes Tonghai County, Yunnan province, China, with a maximum Mercalli intensity of X (Extreme). Between 10,000–15,621 were killed and 26,783 were injured. 1974 – The warmest reliably measured temperature below the Antarctic Circle of +59 °F (+15 °C) is recorded at Vanda Station. 1976 – The Troubles: Gunmen shoot dead ten Protestant civilians after stopping their minibus at Kingsmill in County Armagh, Northern Ireland, UK, allegedly as retaliation for a string of attacks on Catholic civilians in the area by Loyalists, particularly the killing of six Catholics the night before. 1991 – Georgian forces enter Tskhinvali, the capital of South Ossetia, Georgia, opening the 1991–92 South Ossetia War. 2000 – Sri Lankan Civil War: Sri Lankan Tamil politician Kumar Ponnambalam is shot dead in Colombo. srpskohrvatski / српскохрватски: 5. 1.
# -*- coding: utf-8 *-* import os import sys import subprocess try: from setuptools import setup except ImportError: from distribute_setup import use_setuptools use_setuptools() from setuptools import setup from distutils.cmd import Command version = "1.2.+" class doc(Command): description = "generate documentation" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): path = "doc/build/%s" % version try: os.makedirs(path) except: pass status = subprocess.call(["sphinx-build", "-E", "-b", "html", "doc", path]) if status: raise RuntimeError("documentation step '%s' failed" % ("html",)) sys.stdout.write("\nDocumentation step '%s' performed, results here:\n" " %s/\n" % ("html", path)) f = open("README.rst") try: try: readme_content = f.read() except: readme_content = "" finally: f.close() setup( name="pymongolab", version=version, description="PyMongoLab is a client library for MongoLab REST API.", long_description=readme_content, author=u"Jorge Puente Sarrín", author_email="puentesarrin@gmail.com", url="http://pymongolab.puentesarr.in", packages=['mongolabclient', 'pymongolab'], keywords=["mongolab", "pymongolab", "mongolabclient", "mongo", "mongodb"], install_requires=["pymongo", "requests"], license="Apache License, Version 2.0", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Database"], cmdclass={"doc": doc}, )
Explore Chicago like a local and be in the center of everything. Experience smart luxury in every room and suite at Conrad Chicago. View Michigan Avenue and the city skyline from our rooftop restaurant. Sample our house made bourbon from the 20th floor at Baptiste and Bottle. Relax and take it all in from our Michigan Ave. King rooms and spacious suites. An experience like no other awaits. In order to receive our Book More, Earn More booking incentive, please send us a list of bookings that actualized for the year and complete the 2019 cash booking bonus form and then fax or email it to us to the number or email address below. *Terms & Conditions: Receive $100 American Express Gift Card for ten confirmed bookings, no minimum length of stay required. Valid on all Guestrooms and Suites. For additional information or to redeem please see Cash Booking Bonus Form. Looking for stunning photos of the hotel, informational fact sheets, or the additional information? Submissions must be submitted at least 72 hours before client arrival. If this booking is coming through a 3rd party wholesaler, please let us know which one. If you would like us to include a message with your amenity, please type out the message below. *Travel Agency/Agent must be IATA/IATAN/CLIA approved. Travel Agency/Agent must be IATA/IATAN/CLIA approved. Bookings made by travel agents via a preferred Wholesale account are acceptable. Booking Bonus program is not applicable to wholesale reservation agents. Gift Card bonus incentive is per accumulative bookings of guestrooms/suites and accommodations. Report must be received no later than forty-five (45) days post guest departure for program eligibility. Membership bookings, Group programs of ten (10) rooms or more, travel industry discounted bookings and free night stays are not eligible. Bonus will be processed following the guest departure and within forty-five (45) days of the last day of the month. Bonus program is subject to change without notice. Bonus program is subject to change without notice.