prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
icenses as well. In some cases we can relicense portions of *
# * Nmap or grant special permissions to use it in other open source *
# * software. Please contact fyodor@nmap.org with any such requests. *
# * Similarly, we don't incorporate incompatible open source software into *
# * Covered Software without special permission from the copyright holders. *
# * *
# * If you have any questions about the licensing restrictions on using *
# * Nmap in other works, are happy to help. As mentioned above, we also *
# * offer alternative license to integrate Nmap into proprietary *
# * applications and appliances. These contracts have been sold to dozens *
# * of software vendors, and generally include a perpetual license as well *
# * as providing for priority support and updates. They also fund the *
# * continued development of Nmap. Please email sales@nmap.com for further *
# * information. *
# * *
# * If you have received a written license agreement or contract for *
# * Covered Software stating terms other than these, you may choose to use *
# * and redistribute Covered Software under those terms instead of these. *
# * *
# * Source is provided to this software because we believe users have a *
# * right to know exactly what a program is going to do before they run it. *
# * This also allows you to audit the software for security holes (none *
# * have been found so far). *
# * *
# * Source code also allows you to port Nmap to new platforms, fix bugs, *
# * and add new features. You are highly encouraged to send your changes *
# * to the dev@nmap.org mailing list for possible incorporation into the *
# * main distribution. By sending these changes to Fyodor or one of the *
# * Insecure.Org development mailing lists, or checking them into the Nmap *
# * source code repository, it is understood (unless you specify otherwise) *
# * that you are offering the Nmap Project (Insecure.Com LLC) the *
# * unlimited, non-exclusive right to reuse, modify, and relicense the *
# * code. Nmap will always be available Open Source, but this is important *
# * because the inability to relicense code has caused devastating problems *
# * for other Free Software projects (such as KDE and NASM). We also *
# * occasionally relicense the code to third parties as discussed above. *
# * If you wish to specify special license conditions of your *
# * contributions, just say so when you send them. *
# * *
# * This program is distributed in the hope that it will be useful, but *
# * WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Nmap *
# * license file for more details (it's in a COPYING file included with *
# * Nmap, and also available from https://svn.nmap.org/nmap/COPYING *
# * *
# ***************************************************************************/
"""
higwidgets/higspinner.py
a pygtk spinner, based on the epiphany/nautilus implementation
"""
__all__ = ['HIGSpinner']
import os
import gtk
import gobject
from gtkutils import gobject_register
class HIGSpinnerImages:
def __init__(self):
"""This class holds list of GDK Pixbuffers.
- static_pixbufs is used for multiple static pixbuffers
- self.animated_pixbufs is used for the pixbuffers that make up the
animation
"""
dprint('HIGSpinnerImages::__init__')
# The Nautilus/Epiphany implementation uses a single "rest/quiescent"
# static pixbuffer. We'd rather allow the developer to choose from
# multiple static states, such as "done" or "failed".
# Index it by a name like that.
self.static_pixbu | fs = {}
# We should have a default rest pixbuf, set it with set_rest_pixbuf()
self.rest_pixbuf = None
# This is a list of pixbufs to be used on the animation
# For now, we're only implementing a single animation. Inconsistent!
self.animated_pixbufs = []
def add_static_pixbuf(self, name, pixbuf, default_on_rest=False):
"""Add a static pixbuf.
If this is the first one, make it the default pixbuffer on rest.
The user can make some other | pixbuf the new default on rest, by setting
default_on_rest to True.
"""
dprint('HIGSpinnerImages::add_static_pixbuf')
self.static_pixbufs[name] = pixbuf
if (len(self.static_pixbufs) == 1) or default_on_rest:
self.set_rest_pixbuf(name)
def add_animated_pixbuf(self, pixbuf):
dprint('HIGSpinnerImages::add_animated_pixbuf')
self.animated_pixbufs.append(pixbuf)
def set_rest_pixbuf(self, name):
"""Sets the pixbuf that will be used on the default, 'rest' state. """
dprint('HIGSpinnerImages::set_rest_pixbuf')
if name not in self.static_pixbufs:
raise StaticPixbufNotFound
# self.rest_pixbuf holds the *real* pixbuf, not it's name
self.rest_pixbuf = self.static_pixbufs[name]
def set_size(self, width, height):
"""Sets the size of each pixbuf (static and animated)"""
new_animated = []
for p in self.animated_pixbufs:
new_animated.append(p.scale_simple(width, height,
gtk.gdk.INTERP_BILINEAR))
self.animated_pixbufs = new_animated
for k in self.static_pixbufs:
self.static_pixbufs[k] = self.static_pixbufs[k].\
scale_simple(width,
height,
gtk.gdk.INTERP_BILINEAR)
self.rest_pixbuf = self.rest_pixbuf.\
scale_simple(width,
height,
gtk.gdk.INTERP_BILINEAR)
self.images_width = width
self.images_height = height
class HIGSpinnerCache:
"""This hols a copy of the images used on the HIGSpinners instances."""
def __init__(self):
dprint('HIGSpinnerCache::__init__')
# Our own instance of a HIGSpinnerImages
self.spinner_images = HIGSpinnerImages()
# These are on Private member in the C implementation
self.icon_theme = gtk.IconTheme()
self.originals = None
self.images = None
# We might have access to a "default" animated icon.
# For example, if we're on a GNOME desktop, and have the (default)
# "gnome-icon-theme" package installed, we might have access
# to "gnome-spinner". Check it before using, though
if (self.icon_theme.lookup_icon("gnome-spinner", -1, 0)):
self.default_animated_icon_name = "gnome-spinner"
else:
self.default_animated_icon_name = None
def load_animated_from_lookup(self, icon_name=None):
"""Loads an animated icon by doing a lookup on the icon theme."""
# If user do not choose a icon_name, use the default one
if icon_name is None:
icon_name = self.default_animated_icon_name
# Even the default one (now on icon_name) might not be available
if icon_name is None:
raise AnimatedIconNotFound
# Try to lookup the icon
icon_info = self.icon_theme.lookup_icon(icon_name, -1, 0)
# Even if icon_name exists, it might not be found by lookup
if icon_info is None:
raise AnimatedIconNotFound
|
from __future__ import absolute_import
# Copyright (c) 2010-2017 openpyxl
"""Write the workbook global settings to the archive."""
from copy import copy
from openpyxl.utils import absolute_coordinate, quote_sheetname
from openpyxl.xml.constants import (
ARC_APP,
ARC_CORE,
ARC_WORKBOOK,
PKG_REL_NS,
CUSTOMUI_NS,
ARC_ROOT_RELS,
)
from openpyxl.xml.functions import tostring, fromstring
fro | m openpyxl.worksheet import Worksheet
from openpyxl.chartsheet import Chartsheet
from openpyxl.packag | ing.relationship import Relationship, RelationshipList
from openpyxl.workbook.defined_name import DefinedName
from openpyxl.workbook.external_reference import ExternalReference
from openpyxl.workbook.parser import ChildSheet, WorkbookPackage
from openpyxl.workbook.properties import CalcProperties, WorkbookProperties
from openpyxl.workbook.views import BookView
from openpyxl.utils.datetime import CALENDAR_MAC_1904
def write_root_rels(workbook):
"""Write the relationships xml."""
rels = RelationshipList()
rel = Relationship(type="officeDocument", Target=ARC_WORKBOOK)
rels.append(rel)
rel = Relationship(Target=ARC_CORE, Type="%s/metadata/core-properties" % PKG_REL_NS)
rels.append(rel)
rel = Relationship(type="extended-properties", Target=ARC_APP)
rels.append(rel)
if workbook.vba_archive is not None:
# See if there was a customUI relation and reuse it
xml = fromstring(workbook.vba_archive.read(ARC_ROOT_RELS))
root_rels = RelationshipList.from_tree(xml)
for rel in root_rels.find(CUSTOMUI_NS):
rels.append(rel)
return tostring(rels.to_tree())
def get_active_sheet(wb):
"""
Return the index of the active sheet.
If the sheet set to active is hidden return the next visible sheet or None
"""
visible_sheets = [idx for idx, sheet in enumerate(wb._sheets) if sheet.sheet_state == "visible"]
if not visible_sheets:
raise IndexError("At least one sheet must be visible")
idx = wb._active_sheet_index
sheet = wb.active
if sheet and sheet.sheet_state == "visible":
return idx
for idx in visible_sheets[idx:]:
wb.active = idx
return idx
return None
def write_workbook(workbook):
"""Write the core workbook xml."""
wb = workbook
wb.rels = RelationshipList()
root = WorkbookPackage()
props = WorkbookProperties() # needs a mapping to the workbook for preservation
if wb.code_name is not None:
props.codeName = wb.code_name
if wb.excel_base_date == CALENDAR_MAC_1904:
props.date1904 = True
root.workbookPr = props
# workbook protection
root.workbookProtection = wb.security
# book views
active = get_active_sheet(wb)
view = BookView(activeTab=active)
root.bookViews =[view]
# worksheets
for idx, sheet in enumerate(wb._sheets, 1):
sheet_node = ChildSheet(name=sheet.title, sheetId=idx, id="rId{0}".format(idx))
rel = Relationship(type=sheet._rel_type, Target=sheet.path)
wb.rels.append(rel)
if not sheet.sheet_state == 'visible':
if len(wb._sheets) == 1:
raise ValueError("The only worksheet of a workbook cannot be hidden")
sheet_node.state = sheet.sheet_state
root.sheets.append(sheet_node)
# external references
for link in wb._external_links:
# need to match a counter with a workbook's relations
rId = len(wb.rels) + 1
rel = Relationship(type=link._rel_type, Target=link.path)
wb.rels.append(rel)
ext = ExternalReference(id=rel.id)
root.externalReferences.append(ext)
# Defined names
defined_names = copy(wb.defined_names) # don't add special defns to workbook itself.
# Defined names -> autoFilter
for idx, sheet in enumerate(wb.worksheets):
auto_filter = sheet.auto_filter.ref
if auto_filter:
name = DefinedName(name='_FilterDatabase', localSheetId=idx, hidden=True)
name.value = u"{0}!{1}".format(quote_sheetname(sheet.title),
absolute_coordinate(auto_filter)
)
defined_names.append(name)
# print titles
if sheet.print_titles:
name = DefinedName(name="Print_Titles", localSheetId=idx)
name.value = ",".join([u"{0}!{1}".format(quote_sheetname(sheet.title), r)
for r in sheet.print_titles.split(",")])
defined_names.append(name)
# print areas
if sheet.print_area:
name = DefinedName(name="Print_Area", localSheetId=idx)
name.value = ",".join([u"{0}!{1}".format(quote_sheetname(sheet.title), r)
for r in sheet.print_area])
defined_names.append(name)
root.definedNames = defined_names
root.calcPr = CalcProperties(calcId=124519, fullCalcOnLoad=True)
return tostring(root.to_tree())
def write_workbook_rels(workbook):
"""Write the workbook relationships xml."""
wb = workbook
strings = Relationship(type='sharedStrings', Target='sharedStrings.xml')
wb.rels.append(strings)
styles = Relationship(type='styles', Target='styles.xml')
wb.rels.append(styles)
theme = Relationship(type='theme', Target='theme/theme1.xml')
wb.rels.append(theme)
if workbook.vba_archive:
vba = Relationship(type='', Target='vbaProject.bin')
vba.Type ='http://schemas.microsoft.com/office/2006/relationships/vbaProject'
wb.rels.append(vba)
return tostring(wb.rels.to_tree())
|
import random
from .exceptions import ClientError
def catch_client_error(func):
"""
Decorator to catch the ClientError exception and translate it into a reply.
"""
def inner(message):
try:
return func(message)
except ClientError as e:
# If we catch a client error, tell it to send an error string
# back to the client on their reply channel
e.send_to(message.reply_channel)
return inner
def generate_name():
adjective_list = [
'Admiring',
'Adoring',
'Agitated',
'Amazing',
'Angry',
'Awesome',
'Backstabbing',
'Berserk',
'Big',
'Boring',
'Clever',
'Compassionate',
'Condescending',
'Cranky',
'Desperate',
'Determined',
'Distracted',
'Dreamy',
'Drunk',
'Ecstatic',
'Elated',
'Elegant',
'Evil',
'Fervent',
'Focused',
'Furious',
'Gigantic',
'Gloomy',
'Goofy',
'Grave',
'Happy',
'High',
'Hopeful',
'Hungry',
'Infallible',
'Jolly',
'Jovial',
'Kickass',
'Lonely',
'Loving',
'Mad',
'Modest',
'Naughty',
'Nauseous',
'Nostalgic',
'Pedantic',
'Pensive',
'Prickly',
'Reverent',
'Romantic',
'Sad',
'Serene',
'Sharp',
'Sick',
'Silly',
'Sleepy',
'Small',
'Stoic',
'Stupefied',
'Suspicious',
'Tender',
'Thirsty',
'Tiny',
'Trusting',
]
subject_list = [
'Kraven the Hunter',
'Juggernaut',
'Marvel Girl',
'Swarm',
'Black Bolt',
'Loki Lauyefson',
'Ghost Rider',
'Professor X',
'Quicksilver',
'Kingpin',
'Doctor Octopus',
'Green Goblin',
'Red Skull',
'Colossus',
'Shadowcat',
'Cyclops',
'Havok',
'Luke Cage',
'Black Widow',
'Beast',
'The Multiple Man',
'Silver Surfer',
'Ultron',
'Captain Britain',
'Iron Man',
'The Punisher',
'Ego the Living Planet',
'Nightcrawler',
'An | nihilus',
'Deadpool',
'Captain America',
'Fin Fang Foom',
'Daredevil',
'J Jonah Jameson',
'Kang the Conqueror',
'Beta Ray Bill',
'Doctor Stephen Strange',
'Wolverine',
'MODOK',
'Nick Fury',
'Emma Frost',
'Black Panther',
'The Hulk',
'Thing',
'Galactus',
'Magneto',
'Spider-Man',
'Doctor V | ictor Von Doom',
]
left = random.choice(adjective_list)
right = random.choice(subject_list)
name = '{} {}'.format(left, right)
return name
|
'http://as.anime-stream24.co/' #BACKUP URL
def load():
oGui = cGui()
params = ParameterHandler()
logger.info("Load %s" % SITE_NAME)
if showAdult():
params.setParam('entryMode', "a_z")
oGui.addFolder(cGuiElement('A BIS Z', SITE_IDENTIFIER, 'showMovies'), params)
params.setParam('entryMode', "top_animes")
oGui.addFolder(cGuiElement('Top', SITE_IDENTIFIER, 'showMovies'), params)
params.setParam('entryMode', "new")
oGui.addFolder(cGuiElement('Neuste Animes', SITE_IDENTIFIER, 'showMovies'), params)
params.setParam('entryMode', "a_z")
oGui.addFolder(cGuiElement('Suche', SITE_IDENTIFIER, 'showSearch'), params)
else:
oGui.addFolder(cGuiElement('Um Inhalte sehen zu können, muss der Adult Content aktiviert werden. \n(Klicke hier, um diese zu öffnen)', SITE_IDENTIFIER, 'getConf'), params)
oGui.setEndOfDirectory()
def showMovies(sURL = False, sGui = False, sSearchText = ""):
oGui = sGui if sGui else cGui()
if not sURL: sURL = URL_MAIN
params = ParameterHandler()
eMode = ""
if not eMode:
eMode = params.getValue('entryMode')
else:
eMode = "ERROR"
if "top_animes" in eMode:
pattern = 'class="separator".*?<a href="([^"]+)".*?' #link
pattern += '<img src="([^"]+)".*?' #img
pattern += '([^><]+)</a>' #titel
elif "a_z" in eMode:
pattern = "<option value='([^']+)'>([^><]+)</option>" #link, titel
elif "new" in eMode:
sURL = sURL + "search?updated-max=" + time.strftime("%Y-%m-%d") + "T08:48:00%2B01:00&max-results="
pattern = False
aResult = False
else:
if not sGui: oGui.showInfo('xStream', eMode)
return
if pattern:
oRequestHandler = cRequestHandler(sURL)
sHtmlContent = oRequestHandler.request()
oParser = cParser()
aResult = oParser.parse(sHtmlContent, pattern)
if not aResult[0]:
if not sGui: oGui.showInfo('xStream', 'Es wurde kein Eintrag gefunden')
return
total = len(aResult[1])
qual = "1080"
if "top_animes" in eMode:
for link, img, title in aResult[1]:
guiElement = cGuiElement(title, SITE_IDENTIFIER, 'getEpisodes')
guiElement.setThumbnail(img)
#guiElement.setDescription(plot.decode('iso-8859-1'))
guiElement.setMediaType('movie')
params.setParam('eUrl',link)
oGui.addFolder(guiElement, params, bIsFolder = True, iTotal = total)
elif "new" in eMode:
ymd_date = time.strftime("%Y-%m-%d")
params.setParam('eUrl',sURL + "11")
oGui.addFolder(cGuiElement("Zeige letzte 11 Einträge (" + ymd_date +")", SITE_IDENTIFIER, 'getEpisodes'),params)
params.setParam('eUrl',sURL + "22")
oGui.addFolder(cGuiElement("Zeige letzte 22 Einträge (" + ymd_date +")", SITE_IDENTIFIER, 'getEpisodes'),params)
params.setParam('eUrl',sURL + "44")
oGui.addFolder(cGuiElement("Zeige letzte 44 Einträge (" + ymd_date +")", SITE_IDENTIFIER, 'getEpisodes'),params)
elif "a_z" in eMode:
#sPattern = params.getValue('search_on')
sPattern = sSearchText; a = []
reg_ex = re.compile('.*' + sSearchText + '.*?', re.I)
pattern = "class='post-title entry-title'><a href='([^']+)'>" #link
pattern += "([^><]+).*?" #ep_Name
pattern += '<img.*?src="([^"]+)".*?bung:.*?/>' #Img
pattern += "(.*?)<br./>" #plot /Gen
if sPattern:
for link, title in aResult[1]:
if re.search(reg_ex,title):
guiElement = cGuiElement(title, SITE_IDENTIFIER, 'getEpisodes')
sHtml = cRequestHandler(link).request()
a = oParser.parse(sHtml, pattern)
#xbmcgui.Dialog().ok("SHOW",str(a[1][1][3])) #.encode("utf-8"))
guiElement.setThumbnail(a[1][1][2])
guiElement.setDescription(a[1][1][3])
params.setParam('eUrl',link)
oGui.addFolder(guiElement, params, bIsFolder = True, iTotal = total)
else:
for link, title in aResult[1]:
guiElement = cGuiElement(title, SITE_IDENTIFIER, 'getEpisodes')
"""
TODO: ERROR HANDLING OUT OF RANGE - LAEDT SONST EWIG FUER DEN REQUEST
EVENTL AUFTEILEN ODER EINZELNE THREADS??
----------------------------------------------------------------------
sHtml = cRequestHandler(link).request()
a = oParser.parse(sHtml, pattern)
guiElement.setThumbnail(a[1][1][2])
guiElement.setDescription(a[1][1][3].decode('iso-8859-1').encode('utf-8'))
"""
params.setParam('eUrl',link)
oGui.addFolder(guiElement, params, bIsFolder = True, iTotal = total)
oGui.setView('movies')
oGui.setEndOfDirectory()
def getEpisodes():
oGui = cGui()
oParser = cParser()
params = ParameterHandler()
eUrl = ParameterHandler().getValue('eUrl')
eUrl = eUrl.replace(" ", "%20"); eUrl = eUrl.replace("+", "%2B") #Decode(Leerzeichen, +)
isMovie = True
pattern = "class='post-title entry-title'><a href='([^']+)'>" #link
pattern += "([^><]+).*?" #ep_Name
pattern += '<img.*?src="([^"]+)".*?bung:.*?/>' #Img
pattern += "(.*?)<br./>" #plot /Gen
sHtmlContent = cRequestHandler(eUrl).request()
aResult = oParser.parse(sHtmlContent, pattern)
bResult = oParser.parse(sHtmlContent, "older-link'.*?href='([^']+)'")
if not aResult[0]:
oGui.showInfo('xStream', 'Es wurde kein Eintrag gefunden')
return
total = len(aResult[1])
for link, title, img, plot in aResult[1]:
GuiElement = cGuiElement(title, SITE_IDENTIFIER, 'getHosters')
GuiElement.setMediaType('movie' if isMovie else 'tvshow')
GuiElement.setThumbnail(img)
plot.replace('<b>', '')
GuiElement.setDescription(plot)#.decode('iso-8859-1').encode('utf-8'))
#GuiElement.setYear(year)
params.setParam('siteUrl', link)
params.setParam('sName', title)
oGui.addFolder(GuiElement, params, False, total)
if 'entry-title' in cRequestHandler(bResult[1][0]).request():
params.setParam('eUrl', bResult[1][0])
oGui.addFolder(cGuiElement("Weitere Episoden -->", SITE_IDENTIFIER, 'getEpisodes'),params)
#logger.info('[[suhmser]] %s: ' % str(bResult[1][0]))
oGui.setView('movies')
oGui.setEndOfDirectory()
def getHosters():
oParams = ParameterHandler()
oGui = cGui()
sUrl = oParams.getValue('siteUrl')
sHtmlContent = cRequestHandler(sUrl).request()
sPattern = '<iframe.*?(?:src|SRC)="([^"]+).*?(?:\<\/if|\<\/IF)'
sPattern_bkp = '-[0-9]".?>.*?(?:src|SRC)="([^"]+)".*?'
#sPattern_alone = '#fragment.*?src|SRC="//([^"]+)".*?>(?:' #s_url
aResult = cParser().parse(sHtmlContent, sPattern)
if aResult[0]:
hosters = []
#test_link = "*.mp4"
#hosters.append({'link': t | est_link, 'name': 'Testing_link', 'resolveable': True})
reg_ex = re.compile('(?://|\.)?(?:[a-zA-Z0-9]+\.)?([a-zA-Z0-9-.]{0,})\..*?\/.*?\/?', re.I)
for sUrl in aResult[1]:
sName = re.search(reg_ex, sUrl).group(1)
| if not sUrl.startswith('http'):
if sUrl.startswith('//'):
sUrl = 'http:%s' % sUrl
else:
sUrl = 'http://%s' % sUrl
hosters.append({'link': sUrl, 'name': sName, 'resolveable': True})
if hosters:
hosters.append('getHosterUrl')
return hosters
else:
oGui.showInfo('xStream', 'Es wurde kein Eintrag gefunden')
def getHosterUrl(sUrl=False):
if not sUrl:
sUrl = ParameterHandler().getValue('sUrl')
if 'animes-stream24.net' in sUrl:
sUrl = _as24_resolver(sUrl)
res = True
elif 'ani-stream.com' in sUrl: #DOT|net=off
sUrl = _anistream_resolver(sUrl)
res = True
elif 'uploadkadeh.com' in sUrl:
sUrl = 'http://uploadkadeh.com:182/d/' + _webtv_resolver(sUrl) + '/video.mp4'
res |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ===========================================================================
# iDevice Para Saber más creado para la FPD por José Ramón Jiménez Reyes
# ===========================================================================
"""
Para Saber mas iDevice
"""
import logging
from exe.engine.idevice import Idevice
from exe.engine.translate import lateTranslate
from exe.engine.field import TextAreaField
import re
log = logging.getLogger(__name__)
# ===========================================================================
class ParasabermasfpdIdevice(Idevice):
"""
El iDevice Para saber permite al alumnado ampliar conocimientos voluntarios para su aprendizaje
"""
persistenceVersion = 7
def __init__(self, activity = "", answer = ""):
"""
Initialize
"""
Idevice.__init__(self,
x_(u"FPD - Para Saber Mas"),
x_(u"Jose Ramon Jimenez Reyes"),
x_(u"""Para saber más es un iDevice que permite al alumnado ampliar conocimientos, siendo estos voluntarios para su aprendizaje."""), u"", u"parasabermasfpd")
# self.emphasis = Idevice.SomeEmphasis
self.emphasis = "_parasabermasfpd"
self._activityInstruc = x_(u"""Introduce el texto que aparecerá en este iDevice""")
# self.systemResources += ["common.js"]
self.activityTextArea = TextAreaField(x_(u'Texto Para saber más'),
self._activityInstruc, activity)
self.activityTextArea.idevice = self
# Properties
activityInstruc = lateTranslate('activityInstruc')
def getResourcesField(self, this_resource):
"""
implement the specific resource finding mechanism for this iDevice:
"""
# be warned that before upgrading, this iDevice field could not exist:
if hasattr(self, 'activityTextArea')\
and hasattr(self.activityTextArea, 'images'):
for this_image in self.activityTextArea.images:
if hasattr(this_image, '_imageResource') \
and this_resource == this_image._imageResource:
return self.activityTextArea
return None
def getRichTextFields(self):
fields_list = []
if hasattr(self, 'activityTextArea'):
fields_list.append(self.activityTextArea)
return fields_list
def burstHTML(self, i):
# Parasabermasfpd Idevice:
title = i.find(name='span', attrs={'class' : 'iDeviceTitle' })
self.title = title.renderContents().decode('utf-8')
reflecti | ons = i.findAll(name='div | ', attrs={'id' : re.compile('^ta') })
# should be exactly two of these:
# 1st = field[0] == Activity
if len(reflections) >= 1:
self.activityTextArea.content_wo_resourcePaths = \
reflections[0].renderContents().decode('utf-8')
# and add the LOCAL resource paths back in:
self.activityTextArea.content_w_resourcePaths = \
self.activityTextArea.MassageResourceDirsIntoContent( \
self.activityTextArea.content_wo_resourcePaths)
self.activityTextArea.content = \
self.activityTextArea.content_w_resourcePaths
def upgradeToVersion1(self):
"""
Upgrades the node from version 0 to 1.
"""
log.debug(u"Upgrading iDevice")
self.icon = u"activity"
def upgradeToVersion2(self):
"""
Upgrades the node from 1 (v0.5) to 2 (v0.6).
Old packages will loose their icons, but they will load.
"""
log.debug(u"Upgrading iDevice")
# self.emphasis = Idevice.SomeEmphasis
self.emphasis = "_parasabermasfpd"
def upgradeToVersion3(self):
"""
Upgrades v0.6 to v0.7.
"""
self.lastIdevice = False
def upgradeToVersion4(self):
"""
Upgrades to exe v0.10
"""
self._upgradeIdeviceToVersion1()
self._activityInstruc = self.__dict__['activityInstruc']
def upgradeToVersion5(self):
"""
Upgrades to exe v0.10
"""
self._upgradeIdeviceToVersion1()
def upgradeToVersion6(self):
"""
Upgrades to v0.12
"""
self._upgradeIdeviceToVersion2()
# self.systemResources += ["common.js"]
def upgradeToVersion7(self):
"""
Upgrades to somewhere before version 0.25 (post-v0.24)
Taking the old unicode string fields, and converting them
into image-enabled TextAreaFields:
"""
self.activityTextArea = TextAreaField(x_(u'Texto Para sabe más'),
self._activityInstruc, self.activity)
self.activityTextArea.idevice = self
# ===========================================================================
|
# vim: fdm=indent
# author: Fabio Zanini
# date: 02/08/17
# content: Support module for filenames related to LOOM files.
# Modules
import numpy as np
import pandas as pd
from singlet.config import config
# Parser
def parse_dataset(
path,
obsm_keys=None,
):
import anndata
adata = anndata.read_h5ad(path)
samplesheet = adata.obs.copy()
# Add obsm (e.g. PCA, embeddings)
for | key, array in adata.obsm.items():
if key.startswith('X_'):
newkey | = key[2:]
if (obsm_keys is not None) and (newkey not in obsm_keys):
continue
for j, col in enumerate(array.T, 1):
samplesheet[newkey+'_'+str(j)] = col
featuresheet = adata.var.copy()
count_mat = adata.X.toarray().T
counts_table = pd.DataFrame(
data=count_mat,
index=featuresheet.index,
columns=samplesheet.index,
)
return {
'counts': counts_table,
'samplesheet': samplesheet,
'featuresheet': featuresheet,
}
|
# ===========================================================================
#
# Library: PyCTK
# Filename: test_axeswidget.py
#
# Copyright (c) 2015 Lamond Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ========================================= | ==================================
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyCTK.Widgets import ctkAxesWidget
class Widget(QWidget):
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
l=QVBoxLayout(self)
self._axesWidget=ctkAxesWidget(self)
l.addWidget(self._axesWidget)
if __name__=="__main__":
from sys import argv, exit
a=QApplication(argv)
w=Widget()
w | .show()
w.raise_()
exit(a.exec_()) |
prefix
), "NTURI path '%s' doesn't start with '%s'" % (value.path, prefix)
method = value.path[len(prefix) :]
parameters = convert_value_to_dict(value.query)
else:
# We got something else, take path from pvRequest method and our mri
# and parameters from the full value
if self.field is not None:
# We already know the method name
method = self.field
else:
# Get the path and string "value" from the put value
method = op.pvRequest().get("method")
assert method, "No 'method' in pvRequest:\n%s" % op.pvRequest()
parameters = convert_value_to_dict(value)
path = [self.controller.mri, method]
view = self.controller.block_view()[method]
assert isinstance(
view, Method
), "%s.%s is not a Method so cannot do RPC" % tuple(path)
add_wrapper = method_return_unpacked() in view.meta.tags
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} called with "
f"params {parameters}"
)
post = Post(path=path, parameters=parameters)
def handle_post_response(response: Response) -> None:
if isinstance(response, Return):
ret: Any
if add_wrapper:
# Method gave us return unpacked (bare string or other type)
# so we must wrap it in a structure to send it
ret = {"return": response.value}
else:
ret = response.value
v = convert_dict_to_value(ret)
if ret:
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} returned with "
f"value {ret}"
)
else:
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} returned"
)
op.done(v)
else:
if isinstance(response, Error):
message = stringify_error(response.message)
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} resulted in "
f"error ({message})"
)
else:
message = "BadResponse: %s" % response.to_dict()
self.controller.log.debug(
f"{self.controller.mri}: RPC method {method} got a bad "
f"response ({message})"
)
op.done(error=message)
post.set_callback(handle_post_response)
self.controller.handle_request(post).get()
def put(self, pv: SharedPV, op: ServerOperation) -> None:
path = [self.controller.mri]
# We work out what to Put by taking every field that is marked as
# changed and walking up the tree, adding every dotted field name
# to the tree on the way up. This set will contain something like:
# {"attr.value", "attr"}
# Or for a table:
# {"table.value.colA", "table.value.colB", "table.value", "table"}
# Or if self.field:
# {"value"}
changed_fields_inc_parents = op.value().changedSet(parents=True, expand=False)
# Taking the intersection with all puttable paths should yield the
# thing we want to change, so value_changed would be:
# {"attr.value"} or {"table.value"} or {"value"}
value_changed = changed_fields_inc_parents.intersection(self.put_paths)
assert (
len(value_changed) == 1
), "Can only do a Put to a single field, got %s" % list(value_changed)
changed = list(value_changed)[0]
if self.field is not None:
# Only accept a Put to "value"
assert changed == "value", "Can only put to value of %s.%s, not %s" % (
self.controller.mri,
self.field,
changed,
)
path += [self.field, "value"]
op_value = op.value()
else:
# Get the path and string "value" from the put value
split = changed.split(".")
assert (
len(split) == 2 and split[1] == "value"
), "Can only put to value of %s.%s, not %s" % (
self.controller.mri,
split[0],
split[1],
)
path += list(split)
op_value = op.value()[split[0]]
value = convert_value_to_dict(op_value)["value"]
put = Put(path=path, value=value)
def handle_put_response(response: Response) -> None:
if isinstance(response, Return):
op.done()
else:
if isinstance(response, Error):
message = stringify_error(response.message)
else:
message = "BadResponse: %s" % response.to_dict()
op.done(error=message)
put.set_callback(handle_put_response)
self.controller.handle_request(put).get()
def handle(self, response: Response) -> None:
# Called from whate | ver thread the child block c | ould be in, so
# must already be a good thread to take the lock
with self._lock:
if self.pv:
# onFirstConnect has been called, should be able to update it
try:
assert isinstance(response, Delta), (
"Expecting Delta response, got %s" % response
)
# We got a delta, create or update value and notify
if self.value is None:
# Open it with the value
self.controller.log.debug("About to open")
self._create_initial_value(response)
elif self.pv.isOpen():
# Update it with values
self._update_value(response)
except Exception:
self.controller.log.debug(
f"Closing pv because of error in response {response}",
exc_info=True,
)
# We got a return or error, close the connection to clients
self.pv.close()
def _create_initial_value(self, response: Delta) -> None:
# Called with the lock taken
assert response.changes, "No changes"
assert (
len(response.changes) == 1
and len(response.changes[0]) == 2
and response.changes[0][0] == []
), "Expected root update, got %s" % (response.changes,)
self.value = convert_dict_to_value(response.changes[0][1])
unputtable_ids = (MethodModel.typeid, BlockMeta.typeid)
if not self.field:
self.put_paths = set(
"%s.value" % x
for x, v in self.value.items()
if v.getID() not in unputtable_ids
)
elif self.value.getID() not in unputtable_ids:
self.put_paths = {"value"}
else:
self.put_paths = set()
self.controller.log.debug(f"Opening with {list(self.value)}")
assert self.pv, "No pv"
self.pv.open(self.value)
def _update_value(self, delta: Delta) -> None:
# Called with the lock taken
self.value.unmark()
assert delta.changes, "No Delta changes"
for change in delta.changes:
assert len(change) == 2, "Path %s deleted" % change[0]
assert len(change[0]) > 0, "Can't handle root update %s after initial" % (
change,
)
# Path will have at least one element
path, update = change
update_path(self.value, path, update)
# No type change, post the updated value
assert self.pv, "No pv"
self.pv.post(self.value)
# Need camelCase as called by p4 |
wait:
cls._wait_for_load_balancer_status(lb.get('id'))
except Exception:
raise Exception(_("Failed to create load balancer..."))
cls._lbs_to_delete.append(lb.get('id'))
return lb
@classmethod
def _create_active_load_balancer(cls, **kwargs):
lb = cls._create_load_balancer(**kwargs)
lb = cls._wait_for_load_balancer_status(lb.get('id'))
return lb
@classmethod
def _delete_load_balancer(cls, load_balancer_id, wait=True):
cls.load_balancers_client.delete_load_balancer(load_balancer_id)
if wait:
cls._wait_for_load_balancer_status(
load_balancer_id, delete=True)
@classmethod
def _update_load_balancer(cls, load_balancer_id, wait=True, **lb_kwargs):
lb = cls.load_balancers_client.update_load_balancer(
load_balancer_id, **lb_kwargs)
if wait:
cls._wait_for_load_balancer_status(
load_balancer_id)
return lb
@classmethod
def _wait_for_load_balancer_status(cls, load_balancer_id,
provisioning_status='ACTIVE',
operating_status='ONLINE',
delete=False):
interval_time = 10
timeout = 300
end_time = time.time() + timeout
lb = {}
while time.time() < end_time:
try:
lb = cls.load_balancers_client.get_load_balancer(
load_balancer_id)
if not lb:
# loadbalancer not found
if delete:
break
else:
raise Exception(
_("loadbalancer {lb_id} not"
" found").format(
lb_id=load_balancer_id))
if (lb.get('provisioning_status') == provisioning_status and
lb.get('operating_status') == operating_status):
break
time.sleep(interval_time)
except exceptions.NotFound as e:
# if wait is for delete operation do break
if delete:
break
else:
# raise original exception
raise e
else:
raise Exception(
_("Wait for load balancer ran for {timeout} seconds and did "
"not observe {lb_id} reach {provisioning_status} "
"provisioning status and {operating_status} "
"operating status.").format(
timeout=timeout,
lb_id=load_balancer_id,
provisioning_status=provisioning_status,
operating_status=operating_status))
return lb
@classmethod
def _create_listener(cls, wait=True, **listener_kwargs):
listener = cls.listeners_client.create_listener(**listener_kwargs)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
return listener
@classmethod
def _delete_listener(cls, listener_id, wait=True):
cls.listeners_client.delete_listener(listener_id)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
@classmethod
def _update_listener(cls, listener_id, wait=True, **listener_kwargs):
listener = cls.listeners_client.update_listener(
listener_id, **listener_kwargs)
if wait:
cls._wait_for_load_balancer_status(
cls.load_balancer.get('id'))
return listener
@classmethod
def _create_pool(cls, wait=True, **pool_kwargs):
pool = cls.pools_client.create_pool(**pool_kwargs)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
return pool
@classmethod
def _delete_pool(cls, pool_id, wait=True):
cls.pools_client.delete_pool(pool_id)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
@classmethod
def _update_pool(cls, pool_id, wait=True, **pool_kwargs):
pool = cls.pools_client.update_pool(pool_id, **pool_kwargs)
if wait:
cls._wait_for_load_balancer_status(
cls.load_balancer.get('id'))
return pool
@classmethod
def _create_health_monitor(cls, wait=True, **health_monitor_kwargs):
hm = cls.health_monitors_client.create_health_monitor(
**health_monitor_kwargs)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
return hm
@classmethod
def _delete_health_monitor(cls, health_monitor_id, wait=True):
cls.health_monitors_client.delete_health_monitor(health_monitor_id)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
@classmethod
def _update_health_monitor(cls, health_monitor_id, wait=True,
**health_monitor_kwargs):
health_monitor = cls.health_monitors_client.update_health_monitor(
health_monitor_id, **health_monitor_kwargs)
if wait:
cls._wait_for_load_balancer_status(
cls.load_balancer.get('id'))
return health_monitor
@classmethod
def _create_member(cls, pool_id, wait=True, **member_kwargs):
member = cls.members_client.create_member(pool_id, **member_kwargs)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
return member
@classmethod
def _delete_member(cls, pool_id, member_id, wait=True):
cls.members_client.delete_member(pool_id, member_id)
if wait:
cls._wait_for_load_balancer_status(cls.load_balancer.get('id'))
@classmethod
def _update_member(cls, pool_id, member_id, wait=True,
**member_kwargs):
member = cls.members_client.update_member(
pool_id, member_id, **member_kwargs)
if wait:
cls._wait_for_load_balancer_status(
cls.load_balancer.get('id'))
return member
@classmethod
def _check_status_tree(cls, load_balancer_id, listener_ids=None,
pool_ids=None, health_monitor_id=None,
| member_ids=None):
statuses = cls.load_balancers_client.get_load_balancer_status_tree(
load_balancer_id=load_balancer_id)
load_balancer = statuses['loadbalancer']
assert 'ONLINE' == load_balancer['operating_status']
assert 'ACTIVE' == load_balancer['provisioning_status']
if listener_ids:
cls._check_status_tree_thing(listener_ids,
load_balancer['listeners'])
if pool_ids:
cls._check_status_tree_thing(pool_ids,
load_balancer['listeners']['pools'])
if member_ids:
cls._check_status_tree_thing(
member_ids,
load_balancer['listeners']['pools']['members'])
if health_monitor_id:
health_monitor = (
load_balancer['listeners']['pools']['health_monitor'])
assert health_monitor_id == health_monitor['id']
assert 'ACTIVE' == health_monitor['provisioning_status']
@classmethod
def _check_status_tree_thing(cls, actual_thing_ids, status_tree_things):
found_things = 0
status_tree_things = status_tree_things
assert len(actual_thing_ids) == len(status_tree_things)
for actual_thing_id in actual_thing_ids:
for status_tree_thing in status_tree_things:
if status_tree_thing['id'] == actual_thing_id:
assert 'ONLINE' == (
status_tree_thing['operating_status'])
assert 'ACTIVE' == (
status_tree_thing['provisioning_status'])
found_things += 1
assert len(actual_thi | |
DATASET_DIR = '/tmp'
BRAIN_DIR = '/tmp'
GENRES = [
'blues', 'classical', 'country', 'disco', 'hiphop',
'jazz', 'metal', 'pop', 'reggae', 'rock'
]
NUM_BEATS = 10
KEEP_FRAMES = 0
TRAIN_TEST_RATIO = [7, 3]
MODE = 'nn'
PCA = False
FEATURES = ['mfcc', 'dwt', 'beat']
MFCC_EXTRA = ['delta', 'ddelta', 'energy']
DWT = ['mean', 'std', 'max', 'min']
FEATURES_LENGTH = {
'mfcc' : 160,
'dwt' : 112,
'beat' : 11
}
FRAME_LENGTH = 0.025
HOP_LENGTH = 0.005
N_MFCC = 13
W_FRAME_SCALE = 10
NN = {
'NUM_HIDDEN_LAYERS' : 2,
'HIDDEN_INPUTS' : [1024, 1024],
'RANDOM' : True,
'BATCH_SIZE' : 100,
'TRAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01,
'DROPOUT_PROB' : 0.6
}
CNN = {
'NUM_HIDDEN_LAYERS' : 2,
'NUM_DENSE_LAYERS' : 1,
'HIDDEN_FEATURES' : [32, 64],
'DENSE_INPUTS' : [128],
'INPUT_SHAPE' : [16, 17],
'P | ATCH_SIZE' : [5, 5],
'RANDOM' : False,
'STRIDES' : [1, 1, 1, 1],
'BATCH_SIZE' : 100,
'TRAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01,
'DROPOUT_PROB' : | 0.6
} |
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all disapproved ads for a given campaign with AWQL.
To add an ad, run add_ads.py.
Tags: AdGroupAdService.get
"""
__author__ = ('api.kwinter@gmail.com (Kevin Winter)'
'Joseph DiLallo')
from googleads import adwords
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
def main(client, campaign_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201309')
# Construct query and get all ads for a given campaign.
query = ('SELECT Id, AdGroupAdDisapprovalReasons '
'WHERE CampaignId = %s AND '
'AdGroupCreativeApprovalStatus = DISAPPROVED '
'ORDER BY Id' % campaign_id)
ads = ad_group_ad_service.query(query)
# Display results.
if 'entries' in ads:
for ad in ads['entries']:
print ('Ad with id \'%s\' was disapproved for the following reasons: '
% (ad['ad']['id']))
if ad['ad'].get('disapprovalReasons'):
for reason in ad['ad']['disapprovalReasons']:
print '\t%s' % reason
else:
print | '\tReason not provided.'
else:
print 'No disapproved ads were found.'
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClien | t.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID)
|
from __future__ import print_function
import pickle, os, sys, glob, hashlib
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
test_files = set(pd.read_csv('./data/sampleSubmission_v2.csv').file.values)
train = pd.read_csv('./data/train_v2.csv')
df_full = pickle.load(open( "df_full.p", "rb"))
#no point using empty files in our training set so we remove them
print('--- Removing empty files')
filepaths = glob.glob('data/*/*.txt')
for filepath in filepaths:
if os.path.getsize(filepath) == 0:
filename = os.path.basename(filepath)
df_full = df_full[df_full.file != filename]
if filename in test_files:
print("Found empty file in submission: ", filename)
#https://www.youtube.com/watch?v=0GrciaGYzV0
print('--- Training random forest')
clf = RandomForestClassifier(n_estimators=300, n_jobs=-1, random_state=0)
train_data = df_full[df_full.sponsored.notnull()].fillna(0)
test = df_full[df_full.sponsored.isnull() & df_full.file.isin(test_files)].fillna(0)
clf.fit(train_data.drop(['file', 'sponsored'], 1), train_data.sponsored)
#normalized value between 0 and 1
feature_importances = pd.Series(clf.feature_importances_, index=train_data.drop(['file', 'sponsored'], 1).columns)
feature_importances.so | rt()
with pd.option_context('display.max_rows', len(feature_importances), 'display.max_columns', 10):
print(feature_importances)
print('--- Create predictions and submission')
submission = test[['file']].reset_index(drop=True)
submission['sponsored'] = clf.predict_proba(test.drop(['file', 'sponsored'], 1))[:, 1]
#make sure submission has | the correct number of rows
if len(submission) != 66772:
print("Error: wrong dimension! Not generating submission CSV file.")
else:
submission.to_csv('native_btb_basic_submission.csv', index=False)
|
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
| - An int or float in the interval [1, inf]
Returns
| -------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
"""
def __init__(self, arg=None, color=None, family=None, size=None, **kwargs):
"""
Construct a new Textfont object
Sets the icon text font (color=mapbox.layer.paint.text-color,
size=mapbox.layer.layout.text-size). Has an effect only when
`type` is set to "symbol".
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.scattermapbox.Textfont`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
Returns
-------
Textfont
"""
super(Textfont, self).__init__("textfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scattermapbox.Textfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scattermapbox.Textfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._sk |
from typing import NamedTuple, List
from data import crossword
class Clue(str):
def __ini | t__(self, value) -> None:
super(Clue, self).__init__(value)
self._tokens = crossword.tokenize_clue(value)
class _Node(object):
_clue: Clue
_occupied: int
def __init__(self, clue: Clue, oc | cupied: int) -> None:
self._clue = clue
self._occupied = occupied
class Parsed(List):
pass
# A list of nodes, initially Nulls
|
import logging
from gettext import gettext as _
from .l_connector import ConnectorBase
from .l_net import ModuleStrategyBase
from .main import OptionsWIFI
logger = logging.getLogger(__name__)
# noinspection PyAbstractClass
class Connector(ConnectorBase):
opti_: OptionsWIFI
class Module(ModuleStrategyBase):
ID = 'net_wifi'
opti_: OptionsWIFI
OptionsClass = OptionsWIFI
_connector: Connector
ConnectorClass = Connector
@property
| def name(self) -> str:
return _('W | IFI')
|
from django.shortcuts import render
from services.genomics_test_generator.fhir_genomics_test_gene import *
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext, loader
import json
from home.task_runner import perform_test
from home.models import task, server, resource
from home.search import search_basedon_id
from services import auth
from home.matrix import form_resource_martix, form_level_martix,form_matrix
import traceback
# Create your views here.
@csrf_exempt
def submit_task(request):
#get code, language, type
req_json = json.loads(request.body)
print req_json
code = req_json['code']
language = req_json['language']
test_type = req_json['type']
resource_list = []
if test_type == 3 or test_type == 0:
resource_state = req_json['resources']
print resource_state
for item in resource_state:
if item['checked']:
resource_list.append(item['name'])
print resource_list
if 'chosen_server' in req_json:
#ser url and access token
try:
server_obj = server.objects.get(server_id=int(req_json['chosen_server']))
url = server_obj.server_url
access_token = server_obj.access_token
except:
traceback.print_exc()
result = {
'isSuccessful':False,
'error':"Invalid server"
}
return HttpResponse(json.dumps(result), content_type="application/json")
else:
access_token = req_json['access_token']
url = req_json['url']
token = None
try:
token = req_json['token']
except:
pass
username = None
if token:
username = auth.extract_username(token)
print access_token
#return task id
if 'chosen_server' in req_json:
task_id = perform_test(language=language,code=code,url=url,test_type=test_type,server_id=req_json['chosen_server'], resource_list=resource_list, access_token=access_token, username=username)
else:
task_id = perform_test(language=language,code=code,url=url,test_type=test_type,server_id=None, resource_list=resource_list, access_token=access_token, username=username)
result = {
'isSuccessful':True,
'task_id':task_id
}
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_resource_matrix(request):
result = form_resource_martix()
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_resources(request):
resource_type = request.GET.get('type', 0)
if isinstance(resource_type,str):
try:
resource_type = int(resource_type)
except:
resource_type = 0
result = {
'isSuccessful':False,
'names':[]
}
try:
resources = resource.objects.filter(resource_type=resource_type)
for resource_obj in resources:
result['names'].append({'name':resource_obj.name,'checked':True})
result['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def add_new_server(request):
req_json = json.loads(request.body)
result = {
'isSuccessful': False
}
try:
server_name = req_json['name']
server_url = req_json['url']
access_token = None
if 'token' in req_json:
access_token = req_json['token']
new_server = server(server_name=server_name,server_url=server_url,access_token=access_token)
new_server.save()
result['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def delete_server(request):
req_json = json.loads(request.body)
result = {
'isSuccessful': False
}
try:
server_id = req_json['id']
server_obj = server.objects.get(server_id=server_id)
if server_obj.is_deletable:
server_obj.is_delete = True
server_obj.save()
result['isSuccessful'] = True
else:
result['error'] = 'No access to delete this server'
except:
result['error'] = 'problem while deleting'
return HttpResponse(json.dumps(result), content_type='application/json')
@csrf_exempt
def get_all_servers(request):
result = {
'isSuccessful' : False
}
try:
server_list = server.objects.filter(is_delete=False)
result['servers'] = []
for server_obj in server_list:
result['servers'].append({'name':server_obj.server_name,'id':server_obj.server_id,'url':server_obj.server_url, 'is_deletable':server_obj.is_deletable})
result['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_user_task_history(request):
req_json = json.loads(request.body)
try:
token = req_json['token']
except:
return {
'isSuccessful': False
}
result = {
'isSuccessful': False
}
if token:
try:
username = auth.extract_username(token)
task_obj_list = task.objects.filter(user_id=username)
task_list = []
for task_obj in task_obj_list:
task_id = task_obj.task_id
task_time = task_obj.create_time
task_list.append({
'task_id':ta | sk_id,
'time':task_time.strftime("%Y-%m-%d")
})
result['tasks'] = task_list
r | esult['isSuccessful'] = True
except:
pass
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def search_task(request):
req_json = json.loads(request.body)
keyword = req_json['keyword']
result = {
'isSuccessful': True
}
result['tasks'] = search_basedon_id(keyword)
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def all_test_time(request):
req_json = json.loads(request.body)
ttype = req_json['ttype']
result = {
'isSuccessful':True
}
time_list = task.objects.filter(task_type=ttype,status="finished",).order_by('-create_time').values_list('create_time', flat=True)
strtime_list = []
for time_obj in time_list:
strtime_list.append(time_obj.strftime('%Y-%m-%d %H:%M:%S'))
result['times'] = strtime_list
return HttpResponse(json.dumps(result), content_type="application/json")
@csrf_exempt
def get_certain_matrix(request):
req_json = json.loads(request.body)
ttype = str(req_json['ttype'])
result = {
'isSuccessful':True
}
ttime = None
if 'time' in req_json:
ttime = req_json['time']
print ttime,ttype
result['matrix'] = form_matrix(ttype, ttime)
return HttpResponse(json.dumps(result), content_type="application/json")
|
'')
def _get_voltype_specs(self, volume):
"""Get specs suitable for volume creation."""
vtype = volume.get('volume_type_id', None)
extra_specs = None
if vtype:
extra_specs = volume_types.get_volume_type_extra_specs(vtype)
return self._get_specs(extra_specs)
def _get_specs(self, xspecs):
"""Return a dict with extra specs and/or config values."""
result = {}
for spc in ZFSSA_LUN_SPECS:
val = None
prop = spc.split(':')[1]
cfg = 'zfssa_lun_' + prop
if xspecs:
val = xspecs.pop(spc, None)
if val is None:
val = self.configuration.safe_get(cfg)
if val is not None and val != '':
result.update({prop: val})
return result
def migrate_volume(self, ctxt, volume, host):
LOG.debug('Attempting ZFSSA enabled volume migration. volume: %(id)s, '
'host: %(host)s, status=%(status)s.',
{'id': volume['id'],
'host': host,
'status': volume['status']})
lcfg = self.configuration
default_ret = (False, None)
if volume['status'] != "available":
LOG.debug('Only available volumes can be migrated using backend '
'assisted migration. Defaulting to generic migration.')
return default_ret
if (host['capabilities']['vendor_name'] != 'Oracle' or
host['capabilities']['storage_protocol'] != self.protocol):
LOG.debug('Source and destination drivers need to be Oracle iSCSI '
'to use backend assisted migration. Defaulting to '
'generic migration.')
return default_ret
if 'location_info' not in host['capabilities']:
LOG.debug('Could not find location_info in capabilities reported '
'by the destination driver. Defaulting to generic '
'migration.')
return default_ret
loc_info = host['capabilities']['location_info']
try:
(tgt_host, auth_str, tgt_pool, tgt_project, tgt_tgtgroup,
tgt_repl_ip) = loc_info.split(':')
except ValueError:
LOG.error(_LE("Location info needed for backend enabled volume "
"migration not in correct format: %s. Continuing "
"with generic volume migration."), loc_info)
return default_ret
if tgt_repl_ip == '':
msg = _LE("zfssa_replication_ip not set in cinder.conf. "
"zfssa_replication_ip is needed for backend enabled "
"volume migration. Continuing with generic volume "
"migration.")
LOG.error(msg)
return default_ret
src_pool = lcfg.zfssa_pool
src_project = lcfg.zfssa_project
try:
LOG.info(_LI('Connecting to target host: %s for backend enabled '
'migration.'), tgt_host)
self.tgt_zfssa.set_host(tgt_host)
self.tgt_zfssa.login(auth_str)
# Verify that the replication service is online
try:
self.zfssa.verify_service('replication')
self.tgt_zfssa.verify_service('replication')
except exception.VolumeBackendAPIException:
return default_ret
# ensure that a target group by the same name exists on the target
# system also, if not, use default migration.
lun = self.zfssa.get_lun(src_pool, src_project, volume['name'])
if lun['targetgroup'] != tgt_tgtgroup:
return def | ault_ret
| tgt_asn = self.tgt_zfssa.get_asn()
src_asn = self.zfssa.get_asn()
# verify on the source system that the destination has been
# registered as a replication target
tgts = self.zfssa.get_replication_targets()
targets = []
for target in tgts['targets']:
if target['asn'] == tgt_asn:
targets.append(target)
if targets == []:
LOG.debug('Target host: %(host)s for volume migration '
'not configured as a replication target '
'for volume: %(vol)s.',
{'host': tgt_repl_ip,
'vol': volume['name']})
return default_ret
# Multiple ips from the same appliance may be configured
# as different targets
for target in targets:
if target['address'] == tgt_repl_ip + ':216':
break
if target['address'] != tgt_repl_ip + ':216':
LOG.debug('Target with replication ip: %s not configured on '
'the source appliance for backend enabled volume '
'migration. Proceeding with default migration.',
tgt_repl_ip)
return default_ret
flow = lf.Flow('zfssa_volume_migration').add(
MigrateVolumeInit(),
MigrateVolumeCreateAction(provides='action_id'),
MigrateVolumeSendReplUpdate(),
MigrateVolumeSeverRepl(),
MigrateVolumeMoveVol(),
MigrateVolumeCleanUp()
)
taskflow.engines.run(flow,
store={'driver': self,
'tgt_zfssa': self.tgt_zfssa,
'tgt_pool': tgt_pool,
'tgt_project': tgt_project,
'volume': volume, 'tgt_asn': tgt_asn,
'src_zfssa': self.zfssa,
'src_asn': src_asn,
'src_pool': src_pool,
'src_project': src_project,
'target': target})
return(True, None)
except Exception:
LOG.error(_LE("Error migrating volume: %s"), volume['name'])
raise
def update_migrated_volume(self, ctxt, volume, new_volume,
original_volume_status):
"""Return model update for migrated volume.
:param volume: The original volume that was migrated to this backend
:param new_volume: The migration volume object that was created on
this backend as part of the migration process
:param original_volume_status: The status of the original volume
:return model_update to update DB with any needed changes
"""
lcfg = self.configuration
original_name = CONF.volume_name_template % volume['id']
current_name = CONF.volume_name_template % new_volume['id']
LOG.debug('Renaming migrated volume: %(cur)s to %(org)s',
{'cur': current_name,
'org': original_name})
self.zfssa.set_lun_props(lcfg.zfssa_pool, lcfg.zfssa_project,
current_name, name=original_name)
return {'_name_id': None}
@utils.synchronized('zfssaiscsi', external=True)
def _check_origin(self, lun, volname):
"""Verify the cache volume of a bootable volume.
If the cache no longer has clone, it will be deleted.
There is a small lag between the time a clone is deleted and the number
of clones being updated accordingly. There is also a race condition
when multiple volumes (clones of a cache volume) are deleted at once,
leading to the number of clones reported incorrectly. The file lock is
here to avoid such issues.
"""
lcfg = self.configuration
cache = lun['origin']
numclones = -1
if (cache['snapshot'].startswith('image-') and
cache['share'].startswith('os-cache-vol')) |
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import buildconfig
from mozbuild.preprocessor import preprocess
# By default, the pre-processor used for jar.mn will use "%" as a marker
# for ".css" files and "#" otherwise. This falls apart when a file using
# one marker needs to include a file with the other marker since the
# pre-processor instructions in the included file will not be
# processed. The following SVG files need to include a file whi | ch uses
# "%" as the marker so we invoke the pre- processor ourselves here with
# the marker specified. The resulting SVG files will get packaged by the
# processing of the jar file in the appropriate directory.
def _do_preprocessing(output_svg, input_svg_file, additional_defines):
additional_defines.update(buildconfig.defines)
return preprocess(output=output_svg,
includes=[input_svg_fil | e],
marker='%',
defines=additional_defines)
def tab_side_start(output_svg, input_svg_file):
return _do_preprocessing(output_svg, input_svg_file, {'TAB_SIDE': 'start'})
def tab_side_end(output_svg, input_svg_file):
return _do_preprocessing(output_svg, input_svg_file, {'TAB_SIDE': 'end'})
|
# Licensed under GPL version 3 - see LICENSE.rst
import numpy as np
from astropy.io import ascii
from .base import FlatOpticalElement, FlatStack
from ..math.utils import norm_vector, e2h, h2e
class FlatBrewsterMirror(FlatOpticalElement):
'''Flat mirror operated at the Brewster angle.
Calculation of the Fresnel coefficients can be computationally intense
and also requires knowledge of the refractive index for the appropriate material.
The ``FlatBrewsterMirror`` simplifies this for a mirror that is known to be
operated at the Brewster angle.
This mirror assumes that all photons arrive at the Brewster angle
where only s (senkrecht = direction perpendicular to plane of incidence)
polarisation is reflected.
It also assumes that all photons that are not reflected (i.e. those that
are transmitted) are lost. No transmitted photons are returned, instead the
probability of the reflected photons is adjusted to account for this ove | rall loss.
'''
display = {'color': (0., 1., 0.),
'shape': 'box',
'box-half': '+x',
}
def fresnel(self, photons, inter | sect, intersection, local):
'''The incident angle can easily be calculated from e_x and photons['dir'].
Returns
-------
refl_s, refl_p : np.array or float
Reflection probability for s and p polarized photons.
Typically, the number will depend on the incident angle and energy
of each photon and thus the return value will be a vector.
'''
return 1., 0.
def specific_process_photons(self, photons, intersect, intersection, local):
directions = norm_vector(photons['dir'].data[intersect])
# save the direction of the incoming photons as beam_dir
beam_dir = h2e(directions)
# reflect the photons (change direction) by transforming to local coordinates
pos4d_inv = np.linalg.inv(self.pos4d)
directions = directions.T
directions = np.dot(pos4d_inv, directions)
directions[0, :] *= -1
directions = np.dot(self.pos4d, directions)
new_beam_dir = directions.T
# split polarization into s and p components
# - s is s polarization (perpendicular to plane of incidence)
# - p is p polarization (in the plane of incidence)
# First, make basis vectors.
v_s = np.cross(beam_dir, self.geometry['e_x'][0:3])
v_s /= np.linalg.norm(v_s, axis=1)[:, np.newaxis]
v_p = np.cross(beam_dir, v_s)
polarization = h2e(photons['polarization'].data[intersect])
p_v_s = np.einsum('ij,ij->i', polarization, v_s)
p_v_p = np.einsum('ij,ij->i', polarization, v_p)
fresnel_refl_s, fresnel_refl_p = self.fresnel(photons, intersect, intersection, local)
# Calculate new intensity ~ (E_x)^2 + (E_y)^2
Es2 = fresnel_refl_s * p_v_s ** 2
Ep2 = fresnel_refl_p * p_v_p ** 2
# parallel transport of polarization vector
# v_s stays the same by definition
new_v_p = np.cross(h2e(new_beam_dir), v_s)
new_pol = norm_vector(-Es2[:, np.newaxis] * v_s + Ep2[:, np.newaxis] * new_v_p)
return {'dir': new_beam_dir,
'probability': Es2 + Ep2,
'polarization': e2h(new_pol, 0)}
class MultiLayerEfficiency(FlatOpticalElement):
'''The Multilayer mirror with varying layer thickness along one axis
The distance between layers (and thus best reflected energy) changes along
the local y axis.
All reflectivity data is assumed to be for a single, desired angle. There
is currently no way to enter varying reflection that depends on the angle
of incidence.
Provide reflectivity data in a file with columns:
- 'X(mm)' - position along the "changing" axis (local y axis)
- 'Peak lambda' - wavelength with maximum reflection at a given position
- 'Peak' - maximum reflection at a given position
- 'FWHM(nm)' - full width half max, measure of width of reflection Gaussian peaks
Provide polarization data in a file with columns:
- 'Photon energy' - energy of the photon in keV
- 'Polarization' - Fraction polarized in the more reflective direction, so that
randomly polarized light would have a value of 0.5.
Parameters
----------
reflFile: string
path, filename, and .txt extension for reflection data file
testedPolarization: string
path, filename, and .txt to a text file containing a table with photon energy
and fraction polarization for the light used to test the mirrors and create the
reflectivity file
'''
def __init__(self, **kwargs):
self.fileName = kwargs.pop('reflFile')
self.polFile = kwargs.pop('testedPolarization')
super(MultiLayerEfficiency, self).__init__(**kwargs)
def interp_files(self, photons, local):
# read in correct reflecting probability file, now in table format
reflectFile = ascii.read(self.fileName)
# find reflectivity adjustment due to polarization of light in reflectivity testing
polarizedFile = ascii.read(self.polFile)
tested_polarized_fraction = np.interp(photons['energy'], polarizedFile['Photon energy'] / 1000, polarizedFile['Polarization'])
# find probability of being reflected due to position
local_x = local[:, 0] / np.linalg.norm(self.geometry['v_y'])
local_coords_in_file = reflectFile['X(mm)'] / np.linalg.norm(self.geometry['v_y']) - 1
# interpolate 'Peak lambda', 'Peak' [reflectivity], and 'FWHM(nm)' to the actual photon positions
peak_wavelength = np.interp(local_x, local_coords_in_file, reflectFile['Peak lambda'])
max_refl = np.interp(local_x, local_coords_in_file, reflectFile['Peak']) / tested_polarized_fraction
spread_refl = np.interp(local_x, local_coords_in_file, reflectFile['FWHM(nm)'])
return peak_wavelength, max_refl, spread_refl
def specific_process_photons(self, photons, intersect, intersection, local):
# wavelength is in nm assuming energy is in keV
wavelength = 1.23984282 / photons['energy'].data[intersect]
peak_wavelength, max_refl, spread_refl = self.interp_files(photons[intersect], local[intersect])
# the standard deviation squared of the Gaussian reflectivity functions of each photon's wavelength
c_squared = (spread_refl ** 2) / (8. * np.log(2))
# skip the case when there is no Gaussian (this is assumed to just be the zero function)
c_is_zero = (c_squared == 0)
refl_prob = np.zeros(len(wavelength))
refl_prob[~c_is_zero] = max_refl[~c_is_zero] * np.exp(-((wavelength[~c_is_zero] - peak_wavelength[~c_is_zero]) ** 2) / (2 * c_squared[~c_is_zero]))
return {'probability': refl_prob / 100}
class MultiLayerMirror(FlatStack):
def __init__(self, **kwargs):
super(MultiLayerMirror, self).__init__(elements=[FlatBrewsterMirror, MultiLayerEfficiency],
keywords=[{}, {'reflFile': kwargs.pop('reflFile'),
'testedPolarization': kwargs.pop('testedPolarization')}],
**kwargs)
|
def find_token(self, key_set):
""" Return a token that can delete this key set. """
# Do we have a perfect match?
# FIXME: This is really awful...
provided_params = [self.index_of_param(param) for param in token_list_for(key_set)]
provided_params.sort()
provided_params = tuple(provided_params)
if self.token_dict.has_key(provided_params):
return self.token_dict[provided_params]
# FIXME: Come up with a smarter fallback solution count number of
# roundings? instead of contains, return a "closeness" heuristic?
for token in reversed(self.tokens):
if token.contains(key_set):
return token
def get(self, arg_list, default=None):
""" Get the value of the cache at arg_list (which can be a tuple). """
if self.disabled:
return default
key = self.key(arg_list)
# gather keys
keys_to_get = [key] + self._token_keys(arg_list)
# extract values
ans_dict = self.cache.get_many(keys_to_get)
wrapped_value = ans_dict.get(key, self.CACHE_NONE)
if wrapped_value is self.CACHE_NONE:
self._miss_hook(arg_list)
return default
try:
# check tokens
if len(wrapped_value) != len(keys_to_get):
# shhhh... that value wasn't really there
self.cache.delete(key)
self._miss_hook(arg_list)
return default
for tvalue, tkey in zip(wrapped_value[1:], keys_to_get[1:]):
saved_value = ans_dict.get(tkey, self.CACHE_NONE)
# token mismatch!
if saved_value is self.CACHE_NONE or saved_value != tvalue:
# shhhh... that value wasn't really there
self.cache.delete(key)
self._miss_hook(arg_list)
return default
# okay, it's good
self._hit_hook(arg_list)
return wrapped_value[0]
except Exception: # Don't die on errors, e.g. if wrapped_value is not a tuple/list
self._miss_hook(arg_list)
return default
def set(self, arg_list, value, timeout_seconds=None):
""" Set the value of the cache at arg_list (which can be a tuple). """
if self.disabled:
return
key = self.key(arg_list)
# gather keys
token_keys = self._token_keys(arg_list)
# extract what values we can
# we use get_many here to optimize the common case: all tokens already present
ans_dict = self.cache.get_many(token_keys)
# regenerate missing tokens
for tkey, token in zip(token_keys, self.tokens):
if not ans_dict.has_key(tkey):
ans_dict[tkey] = token.value_args(arg_list)
# gather token values
wrapped_value = [value]
for tkey in token_keys:
wrapped_value.append(ans_dict[tkey])
if timeout_seconds is None:
timeout_seconds = self.timeout_seconds
self.cache.set(key, wrapped_value, timeout_seconds)
set.alters_data = True
def delete(self, arg_list):
""" Delete the value of the cache at arg_list (which can be a tuple). """
key = self.key(arg_list)
self.cache.delete(key)
key_set = {}
for i,arg in enumerate(arg_list):
key_set[self.params[i]] = arg
self.send(key_set=key_set)
delete.alters_data = True
def delete_key_set(self, key_set):
""" Delete everything in this key_set, rounding up if necessary. """
if settings.CACHE_DEBUG:
print "Dumping from", self.name, "keyset", key_set
# TODO: Would be nicer if we could just make a
# proxy token for the single-element case
arg_list = self.is_arg_list(key_set)
if arg_list:
return self.delete(arg_list)
else:
token = self.find_token(key_set)
token.delete_key_set(key_set, send_signal=False) # We can send a more accurate signal
self.send(key_set=key_set)
delete_key_set.alters_data = True
def delete_key_sets(self, list_or_set):
""" Delete one or multiple (including nested lists) key sets.
- Michael P 11/1/2009
"""
if isinstance(list_or_set, list):
for item in list_or_set:
self.delete_key_sets(item)
else:
self.delete_key_set(list_or_set)
delete_key_sets.alters_data = True
def has_key(self, arg_list):
""" Returns true if arg_list is cached. """
return self.get(arg_list,default=self.CACHE_NONE) is not self.CACHE_NONE
def is_arg_list(self, key_set):
""" Does this key_set specify everything? """
arg_list = []
for param in self.params:
if specifies_key(key_set, param):
arg_list.append(key_set[param])
else:
return False
return arg_list
def depend_on_model(self, Model, key_set={}, create_token=True):
"""
Dump parts of this cache when anything in the Model changes.
key_set is the key_set to dump. If not provided, the entire cache is
dumped. If create_token is True, we ensure a token exists to delete
this.
This function is equivalent to self.depend_on_row(Model, lambda
instance: key_set) with the proper token created. We probably could
replace this function with that, but oh well. It's... marginally more
efficient.
"""
# Silently fail. This means the object has been double-loaded (Thanks,
# Python/Django double-import)
if self.locked:
return
def resolve_depend_on_model(Model):
| if create_token:
self.get_or_create_token(token_list_for(key_set))
def delete_cb(sender, **kwargs):
self.delete_key_sets(key_set)
signals.post_save.connect(delete_cb, sender=Model, weak=False)
signals.pre_delete.connect(delete_cb, sender=Model, weak=False)
add_lazy_depende | ncy(self, Model, resolve_depend_on_model)
depend_on_model.alters_data = True
def depend_on_row(self, Model, selector, filter=None):
"""
Depend on a row of a Model when a row of this Model changes.
When a row is changed, selector is called to determine the key_set
to evict. selector should be of the form
def selector(instance):
return {'class' : instance.parent_class}
Because of how common the case is, if selector is a string, we create
the token automatically and use lambda instance : {selector: instance}
as the mapping function.
"""
# Silently fail. This means the object has been double-loaded (Thanks,
# Python/Django double-import)
if self.locked:
return
if filter is None:
filter = lambda instance: True
if isinstance(selector, str):
# Special-case this
selector_str = selector
selector = lambda instance: {selector_str: instance}
# Make the token
token = self.get_or_create_token((selector_str,))
def resolve_depend_on_row(Model):
if Model is None:
raise ValueError("Attempting to depend on Model None... this is a pretty dumb thing to do.")
def delete_cb(sender, instance, **kwargs):
if not filter(instance):
return None
new_key_set = selector(instance)
if new_key_set is not None:
self.delete_key_sets(new_key_set)
signals.post_save.connect(delete_cb, sender=Model, weak=False)
signals.pre_delete.connect(delete_cb, sender=Model, weak=False)
add_lazy_dependency(self, Model, resolve_depend_on_row)
depend_on_row.alters_data = True
def depend_on_cache(self, cache_obj, mapping_func, filter=None):
"""
Depend on another |
#!/usr/bin/env python
"""
Download NLTK data
"""
__author__ = "Manan Kalra"
__email__ = "m | anankalr | 29@gmail.com"
import nltk
nltk.download() |
Fields"))
self.gridLayout_3.addWidget(self.lstSrcFields, 0, 1, 5, 1)
self.btnSrcUp = QtGui.QPushButton(self.groupBox_5)
self.btnSrcUp.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcUp.setText(_fromUtf8(""))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/stdm/images/icons/up.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnSrcUp.setIcon(icon)
self.btnSrcUp.setObjectName(_fromUtf8("btnSrcUp"))
self.gridLayout_3.addWidget(self.btnSrcUp, 0, 0, 1, 1)
self.btnSrcNone = QtGui.QPushButton(self.groupBox_5)
self.btnSrcNone.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcNone.setObjectName(_fromUtf8("btnSrcNone"))
self.gridLayout_3.addWidget(self.btnSrcNone, 3, 0, 1, 1)
self.btnSrcDown = QtGui.QPushButton(self.groupBox_5)
self.btnSrcDown.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcDown.setText(_fromUtf8(""))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/stdm/images/icons/down.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnSrcDown.setIcon(icon1)
self.btnSrcDown.setObjectName(_fromUtf8("btnSrcDown"))
self.gridLayout_3.addWidget(self.btnSrcDown, 1, 0, 1, 1)
self.btnSrcAll = QtGui.QPushButton(self.groupBox_5)
self.btnSrcAll.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnSrcAll.setObjectName(_fromUtf8("btnSrcAll"))
self.gridLayout_3.addWidget(self.btnSrcAll, 2, 0, 1, 1)
self.gridLayout_6.addWidget(self.groupBox_5, 0, 0, 1, 1)
self.groupBox_6 = QtGui.QGroupBox(self.assignColumns)
self.groupBox_6.setObjectName(_fromUtf8("groupBox_6"))
self.gridLayout_4 = QtGui.QGridLayout(self.groupBox_6)
self.gridLayout_4.setObjectName(_fro | mUtf8("gridLayout_4"))
self.label_3 = QtGui.QLabel(self.groupBox_6)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_4.addWidget(self.label_3, 4, 0, 1, 1)
self.btn_delete_translator = QtGui.QToolButton(self.groupBox_6)
self.btn_delete_translator.setMinimumSize(QtCore.QSize(0, 0))
self.btn_delete_translator.setText(_fromUtf8(""))
icon2 = QtGui.QIcon()
icon2.add | Pixmap(QtGui.QPixmap(_fromUtf8(":/plugins/stdm/images/icons/remove.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_delete_translator.setIcon(icon2)
self.btn_delete_translator.setPopupMode(QtGui.QToolButton.InstantPopup)
self.btn_delete_translator.setObjectName(_fromUtf8("btn_delete_translator"))
self.gridLayout_4.addWidget(self.btn_delete_translator, 4, 3, 1, 1)
self.btn_add_translator = QtGui.QToolButton(self.groupBox_6)
self.btn_add_translator.setMinimumSize(QtCore.QSize(0, 0))
self.btn_add_translator.setText(_fromUtf8(""))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/stdm/images/icons/add.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_add_translator.setIcon(icon3)
self.btn_add_translator.setPopupMode(QtGui.QToolButton.InstantPopup)
self.btn_add_translator.setObjectName(_fromUtf8("btn_add_translator"))
self.gridLayout_4.addWidget(self.btn_add_translator, 4, 1, 1, 1)
self.btn_edit_translator = QtGui.QToolButton(self.groupBox_6)
self.btn_edit_translator.setMinimumSize(QtCore.QSize(0, 0))
self.btn_edit_translator.setText(_fromUtf8(""))
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/stdm/images/icons/edit.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btn_edit_translator.setIcon(icon4)
self.btn_edit_translator.setPopupMode(QtGui.QToolButton.InstantPopup)
self.btn_edit_translator.setObjectName(_fromUtf8("btn_edit_translator"))
self.gridLayout_4.addWidget(self.btn_edit_translator, 4, 2, 1, 1)
self.btnDestUp = QtGui.QPushButton(self.groupBox_6)
self.btnDestUp.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnDestUp.setText(_fromUtf8(""))
self.btnDestUp.setIcon(icon)
self.btnDestUp.setObjectName(_fromUtf8("btnDestUp"))
self.gridLayout_4.addWidget(self.btnDestUp, 0, 4, 1, 1)
self.btnDestDown = QtGui.QPushButton(self.groupBox_6)
self.btnDestDown.setMaximumSize(QtCore.QSize(50, 16777215))
self.btnDestDown.setText(_fromUtf8(""))
self.btnDestDown.setIcon(icon1)
self.btnDestDown.setObjectName(_fromUtf8("btnDestDown"))
self.gridLayout_4.addWidget(self.btnDestDown, 1, 4, 1, 1)
self.chk_virtual = QtGui.QCheckBox(self.groupBox_6)
self.chk_virtual.setObjectName(_fromUtf8("chk_virtual"))
self.gridLayout_4.addWidget(self.chk_virtual, 3, 0, 1, 4)
self.lstTargetFields = QtGui.QListWidget(self.groupBox_6)
self.lstTargetFields.setMinimumSize(QtCore.QSize(0, 250))
self.lstTargetFields.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.lstTargetFields.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.lstTargetFields.setObjectName(_fromUtf8("lstTargetFields"))
self.gridLayout_4.addWidget(self.lstTargetFields, 0, 0, 3, 4)
self.gridLayout_6.addWidget(self.groupBox_6, 0, 1, 1, 1)
frmImport.addPage(self.assignColumns)
self.retranslateUi(frmImport)
QtCore.QMetaObject.connectSlotsByName(frmImport)
def retranslateUi(self, frmImport):
frmImport.setWindowTitle(_translate("frmImport", "Import to STDM", None))
self.pgSource.setTitle(_translate("frmImport", "Source Data", None))
self.pgSource.setSubTitle(_translate("frmImport", "Specify the location of the source file and representative data type.", None))
self.groupBox.setTitle(_translate("frmImport", "Source:", None))
self.label.setText(_translate("frmImport", "Dataset", None))
self.btnBrowseSource.setText(_translate("frmImport", "Browse", None))
self.groupBox_2.setTitle(_translate("frmImport", "Destination Repository Type:", None))
self.rbTextType.setText(_translate("frmImport", "Textual Data", None))
self.rbSpType.setText(_translate("frmImport", "Spatial Data", None))
self.destTable.setTitle(_translate("frmImport", "Copy Table", None))
self.destTable.setSubTitle(_translate("frmImport", "Destination table and import options.", None))
self.groupBox_3.setTitle(_translate("frmImport", "Select Destination Table:", None))
self.groupBox_4.setTitle(_translate("frmImport", "Options:", None))
self.label_2.setText(_translate("frmImport", "Geometry Column:", None))
self.rbAppend.setText(_translate("frmImport", "A&ppend Data", None))
self.rbOverwrite.setText(_translate("frmImport", "&Overwrite Existing", None))
self.assignColumns.setTitle(_translate("frmImport", "Assign Columns", None))
self.assignColumns.setSubTitle(_translate("frmImport", "Match source and destination table columns.", None))
self.groupBox_5.setTitle(_translate("frmImport", "Source Table:", None))
self.btnSrcNone.setText(_translate("frmImport", "Non&e", None))
self.btnSrcAll.setText(_translate("frmImport", "&All", None))
self.groupBox_6.setTitle(_translate("frmImport", "Destination Table:", None))
self.label_3.setText(_translate("frmImport", "Value translators:", None))
self.btn_delete_translator.setToolTip(_translate("frmImport", "Delete value translator", None))
self.btn_add_translator.setToolTip(_translate("frmImport", "Add value translator", None))
self.btn_add_translator.setWhatsThis(_translate("frmImport", "<html><head/><body><p>Use value translators to transform the value from the corresponding source table column in order to adopt it to a format that adaptible to the destination column.</p></body></html>", None))
self.btn_edit_translator.setToolTip(_translate("frmImport", "Edit value translator", |
#############################################
##
## Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
source("../../shared/suites_qtta.py")
# test bookmark functionality
def renameBookmarkFolder(view, item, newName):
invokeContextMenuItemOnBookmarkFolder(view, item, "Rename Folder")
replaceEditorContent(waitForObject(":Add Bookmark.treeView_QExpandingLineEdit"), newName)
type(waitForObject(":Add Bookmark.treeView_QExpandingLineEdit"), "<Return>")
return
def invokeContextMenuItemOnBookmarkFolder(view, item, menuItem):
aboveWidget = "{name='line' type='QFrame' visible='1' window=':Add Bookmark_BookmarkDialog'}"
mouseClick(waitForObjectItem(view, item), 5, 5, 0, Qt.LeftButton)
openItemContextMenu(view, item, 5, 5, 0)
activateItem(waitForObject("{aboveWidget=%s type='QMenu' unnamed='1' visible='1' "
"window=':Add Bookmark_BookmarkDialog'}" % aboveWidget), menuItem)
def getQModelIndexStr(textProperty, container):
if (container.startswith(":")):
container = "'%s'" % container
return ("{column='0' container=%s %s type='QModelIndex'}"
% (container, textProperty))
def main():
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
# goto help mode and click on topic
switchViewTo(ViewConstants.HELP)
manualQModelIndex = getQModelIndexStr("text?='Qt Creator Manual *'",
":Qt Creator_QHelpContentWidget")
doubleClick(manualQModelIndex, 5, 5, 0, Qt.LeftButton)
gettingStartedQModelIndex = getQModelIndexStr("text='Getting Started'", manualQModelIndex)
doubleClick(gettingStartedQModelIndex, 5, 5, 0, Qt.LeftButton)
mouseClick(waitForObject(getQModelIndexStr("text='Building and Running an Example'",
gettingStartedQModelIndex)), 5, 5, 0, Qt.LeftButton)
# open bookmarks window
clickButton(waitForObject(":Qt Creator.Add Bookmark_QToolButton"))
clickButton(waitForObject(":Add Bookmark.ExpandBookmarksList_QToolButton"))
# create root bookmark directory
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
# rename root bookmark directory
bookmarkView = waitForObject(":Add Bookmark.treeView_QTreeView")
renameBookmarkFolder(bookmarkView, "New Folder*", "Sample")
# create two more subfolders
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
renameBookmarkFolder(bookmarkView, "Sample.New Folder*", "Folder 1")
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
renameBookmarkFolder(bookmarkView, "Sample.Folder 1.New Folder*", "Folder 2")
clickButton(waitForObject(":Add Bookmark.OK_QPushButton"))
mouseClick(manualQModelIndex, 5, 5, 0, Qt.LeftButton)
type(waitForObject(":Qt Creator_QHelpContentWidget"), "<Down>")
clickButton(waitForObject(":Qt Creator.Add Bookmark_QToolButton"))
clickButton(waitForObject(":Add Bookmark.ExpandBookmarksList_QToolButton"))
# click on "Sample" and create new directory under it
mouseClick | (waitForObject(getQModelIndexStr("text='Sample'", ":Add Bookmark.tr | eeView_QTreeView")))
clickButton(waitForObject(":Add Bookmark.New Folder_QPushButton"))
clickButton(waitForObject(":Add Bookmark.OK_QPushButton"))
# choose bookmarks
mouseClick(waitForObjectItem(":Qt Creator_Core::Internal::CommandComboBox", "Bookmarks"))
# verify if all folders are created and bookmarks present
sampleQModelIndex = getQModelIndexStr("text='Sample'", ":Qt Creator_Bookmarks_TreeView")
folder1QModelIndex = getQModelIndexStr("text='Folder 1'", sampleQModelIndex)
folder2QModelIndex = getQModelIndexStr("text='Folder 2'", folder1QModelIndex)
bldRunQModelIndex = getQModelIndexStr("text?='QtCreator : Building and Running an Example*'",
folder2QModelIndex)
newFolderQModelIndex = getQModelIndexStr("text='New Folder'", sampleQModelIndex)
manualQModelIndex = getQModelIndexStr("text='QtCreator : Qt Creator Manual'",
newFolderQModelIndex)
test.verify(checkIfObjectExists(sampleQModelIndex, verboseOnFail = True) and
checkIfObjectExists(folder1QModelIndex, verboseOnFail = True) and
checkIfObjectExists(folder2QModelIndex, verboseOnFail = True) and
checkIfObjectExists(bldRunQModelIndex, verboseOnFail = True) and
checkIfObjectExists(manualQModelIndex, verboseOnFail = True),
"Verifying if all folders and bookmarks are present")
mouseClick(waitForObject(":Qt Creator_Bookmarks_TreeView"), 5, 5, 0, Qt.LeftButton)
for i in range(6):
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Right>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Return>")
test.verify("QtCreator : Building and Running an Example" in str(waitForObject(":Qt Creator_Help::Internal::HelpViewer").title),
"Verifying if first bookmark is opened")
mouseClick(waitForObject(bldRunQModelIndex))
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Down>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Right>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Down>")
type(waitForObject(":Qt Creator_Bookmarks_TreeView"), "<Return>")
test.verify("QtCreator : Qt Creator Manual" in str(waitForObject(":Qt Creator_Help::Internal::HelpViewer").title),
"Verifying if second bookmark is opened")
# delete previously created directory
clickButton(waitForObject(":Qt Creator.Add Bookmark_QToolButton"))
clickButton(waitForObject(":Add Bookmark.ExpandBookmarksList_QToolButton"))
invokeContextMenuItemOnBookmarkFolder(":Add Bookmark.treeView_QTreeView", "Sample.Folder 1",
"Delete Folder")
clickButton(waitForObject("{container=':Add Bookmark.treeView_QTreeView' text='Yes' "
"type='QPushButton' unnamed='1' visible='1'}"))
# close bookmarks
clickButton(waitForObject(":Add Bookmark.OK_QPushButton"))
# choose bookmarks from command combobox
mouseClick(waitForObject(":Qt Creator_Core::Internal::CommandComboBox"))
mouseClick(waitForObjectItem(":Qt Creator_Core::Internal::CommandComboBox", "Bookmarks"))
# verify if folders and bookmark deleted
test.verify(checkIfObjectExists(sampleQModelIndex, verboseOnFail = True) and
checkIfObjectExists(folder1QModelIndex, shouldExist = False, verboseOnFail = True) and
checkIfObjectExists(folder2QModelIndex, shouldExist = False, verboseOnFail = True) and
checkIfObjectExists(bldRunQModelIndex, shouldExist = False, verboseOnF |
import numpy | as np
def rainbow(n):
"""
Returns a list of colors sampled at equal intervals over the spectrum.
Parameters
----------
n : int
The number of colors to return
Returns
-------
R : (n,3) array
An of rows of RGB color values
Notes
-----
Converts from HSV coordinates (0, 1, | 1) to (1, 1, 1) to RGB. Based on
the Sage function of the same name.
"""
from matplotlib import colors
R = np.ones((1,n,3))
R[0,:,0] = np.linspace(0, 1, n, endpoint=False)
#Note: could iterate and use colorsys.hsv_to_rgb
return colors.hsv_to_rgb(R).squeeze()
|
import time
from copy import copy
import numpy as np
from gridworld import Gridworld, Position
from utilities.utils import argmax_ra | ndom_tie
# START
rows = 7
cols = 10
world = Gridworld(rows, cols)
moveset = [
# Position(1, 1),
# Position(1, -1),
# Position(-1, -1),
# Position(-1, 1),
Position(1, 0), # down
Position(0, -1), # left
Position(0, 1), # right
Position(-1, 0) # up
]
alpha = 0.2
e = 0.3
lambd = 0. | 2
e_decay = 0.9999
n_episodes = 100000
print_every = 10000
q = np.zeros((rows, cols, len(moveset)))
for episode in range(n_episodes):
print("episode {}".format(episode))
world.reset()
s = world.pos
found = False
e *= e_decay
step = 0
while not found:
step += 1
world.wind()
a = np.random.randint(0, len(moveset)) if np.random.uniform() < e else argmax_random_tie(q[s.row][s.col])
r = world.move(moveset[a])
sn = world.pos
q[s.row][s.col][a] += alpha * (r + lambd * np.max(q[sn.row][sn.col]) - q[s.row][s.col][a])
s = copy(sn)
if s == world.goal:
found = True
if episode % print_every == 0 and step < 30:
print(world)
time.sleep(0.1)
if found:
print("found goal, epsilon {} in steps {}".format(e, step))
time.sleep(3)
|
"""Tests for greeneye_monitor sensors."""
from unittest.mock import AsyncMock, MagicMock
from homeassistant.components.greeneye_monitor.sensor import (
DATA_PULSES,
DATA_WATT_SECONDS,
)
from homeassistant.const import STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_registry import async_get as get_entity_registry
from .common import (
SINGLE_MONITOR_CONFIG_POWER_SENSORS,
SINGLE_MONITOR_CONFIG_PULSE_COUNTERS,
SINGLE_MONITOR_CONFIG_TEMPERATURE_SENSORS,
SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS,
SINGLE_MONITOR_SERIAL_NUMBER,
mock_monitor,
setup_greeneye_monitor_component_with_config,
)
from .conftest import assert_sensor_state
async def test_disable_sensor_before_monitor_connected(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor disabled before its monitor connected stops listening for new monitors."""
# The sensor base class handles connecting the monitor, so we test this with a single voltage sensor for ease
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
assert len(monitors.listeners) == 1
await disable_entity(hass, "sensor.voltage_1")
assert len(monitors.listeners) == 0 # Make sure we cleaned up the listener
async def test_updates_state_when_monitor_connected(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor updates its state when its monitor first connects."""
# The sensor base class handles updating the state on connection, so we test this with a single voltage sensor for ease
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
assert_sensor_state(hass, "sensor.voltage_1", STATE_UNKNOWN)
assert len(monitors.listeners) == 1
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert len(monitors.listeners) == 0 # Make sure we cleaned up the listener
assert_sensor_state(hass, "sensor.voltage_1", "120.0")
async def test_disable_sensor_after_monitor_connected(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor disabled after its monitor connected stops listening for sensor changes."""
# The sensor base class handles connecting the monitor, so we test this with a single voltage sensor for ease
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
monitor = connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert len(monitor.listeners) == 1
await disable_entity(hass, "sensor.voltage_1")
assert len(monitor.listeners) == 0
async def test_updates_state_when_sensor_pushes(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that a sensor entity updates its state when the underlying sensor pushes an update."""
# The sensor base class handles triggering state updates, so we test this with a single voltage sensor for ease
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
monitor = connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(hass, "sensor.voltage_1", "120.0")
monitor.voltage = 119.8
monitor.notify_all_listeners()
assert_sensor_state(hass, "sensor.voltage_1", "119.8")
async def test_power_sensor_initially_unknown(
hass: HomeAssistant, monitors: AsyncMock
) -> None:
"""Test that the power sensor can handle its initial state being unknown (since the GEM API needs at least two packets to arrive before it can compute watts)."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_POWER_SENSORS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(
hass, "sensor.channel_1", STATE_UNKNOWN, {DATA_WATT_SECONDS: 1000}
)
# This sensor was configured with net metering on, so we should be taking the
# polarized value
assert_sensor_state(
hass, "sensor.channel_two", STATE_UNKNOWN, {DATA_WATT_SECONDS: -400}
)
async def test_power_sensor(hass: HomeAssistant, monitors: AsyncMock) -> None:
"""Test that a power sensor reports its values correctly, including handling net metering."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_POWER_SENSORS
)
monitor = connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
monitor.channels[0].watts = 120.0
monitor.channels[1].watts = 120.0
monitor.channels[0].notify_all_listeners()
monitor.channels[1].notify_all_listeners()
assert_sensor_state(hass, "sensor.channel_1", "120.0", {DATA_WATT_SECONDS: 1000})
# This sensor was configured with net metering on, so we should be taking the
# polarized value
assert_sensor_state(hass, "sensor.channel_two", "120.0", {DATA_WATT_SECONDS: -400})
async def test_pulse_counter(hass: HomeAssistant, monitors: AsyncMock) -> None:
"""Test that a pulse counter sensor reports its values properly, including calculating different units."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_PULSE_COUNTERS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(hass, "sensor.pulse_a", "10.0", {DATA_PULSES: 1000})
# This counter was configured with each pulse meaning 0.5 gallons and
# wanting to show gallons per minute, so 10 pulses per second -> 300 gal/min
assert_sensor_state(hass, "sensor.pulse_2", "300.0", {DATA_PULSES: 1000})
# This counter was configured with each pulse meaning 0.5 gallons and
# wanting to show gallons per hour, so 10 pulses per second -> 18000 gal/hr
assert_sensor_state(hass, "sensor.pulse_3", "18000.0", {DATA_PULSES: 1000})
async def test_temperature_sensor(hass: HomeAssistant, monitors: AsyncMock) -> None:
"""Test that a temperature sensor reports its values properly, including proper handling of when its native unit is different from that configured in hass."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_TEMPERATURE_SENSORS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
# The config says that the sensor is reporting in Fahrenheit; if we set that up
# properly, HA will have converted that to Celsius by default.
assert_sensor_state(hass, "sensor.temp_a", "0.0")
async def test_voltage_sensor(hass: HomeAssistant, monitors: AsyncMock) -> None: |
"""Test that a voltage sensor reports its values properly."""
await setup_greeneye_monitor_component_with_config(
hass, SINGLE_MONITOR_CONFIG_VOLTAGE_SENSORS
)
connect_monitor(monitors, SINGLE_MONITOR_SERIAL_NUMBER)
assert_sensor_state(hass, "sensor.voltage_1", "120.0")
def connect_monitor(monitors: AsyncMock, serial_number: int) -> MagicMock:
"""Simulate a monitor connecting to Home Assistant. Returns the mock monitor API object."""
monitor = mock_moni | tor(serial_number)
monitors.add_monitor(monitor)
return monitor
async def disable_entity(hass: HomeAssistant, entity_id: str) -> None:
"""Disable the given entity."""
entity_registry = get_entity_registry(hass)
entity_registry.async_update_entity(entity_id, disabled_by="user")
await hass.async_block_till_done()
|
def extractFaelicyTumblrCom(item):
'''
Parser for 'faelicy.tumblr.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
ta | gmap = [
('the scum villain\'s self saving system', 'the scum villain\'s self saving system', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseM | essageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
import os
from conans.client.file_copier import FileCopier, report_copied_files
from conans.model.manifest import FileTreeManifest
from conans.paths import CONANINFO
from conans.util.files import mkdir, save
def export_pkg(conanfile, package_id, src_package_folder | , hook_manager, conanfile_path, ref):
# NOTE: The layout folder is not taken into account for the cache, it is not useful to introduce
# a subfolder there.
mkdir(conanfile.package_folder)
output = conanfile.output
output.info("Exporting to cache existing package from user folder")
output.info("Package folder %s" % conanfile.package_folder)
hook_manager.execute("pre_package", conanfile=conanfile, con | anfile_path=conanfile_path,
reference=ref, package_id=package_id)
copier = FileCopier([src_package_folder], conanfile.package_folder)
copier("*", symlinks=True)
hook_manager.execute("post_package", conanfile=conanfile, conanfile_path=conanfile_path,
reference=ref, package_id=package_id)
save(os.path.join(conanfile.package_folder, CONANINFO), conanfile.info.dumps())
manifest = FileTreeManifest.create(conanfile.package_folder)
manifest.save(conanfile.package_folder)
report_files_from_manifest(output, manifest)
output.success("Package '%s' created" % package_id)
prev = manifest.summary_hash
output.info("Created package revision %s" % prev)
return prev
def update_package_metadata(prev, layout, package_id, rrev):
with layout.update_metadata() as metadata:
metadata.packages[package_id].revision = prev
metadata.packages[package_id].recipe_revision = rrev
def report_files_from_manifest(output, manifest):
copied_files = list(manifest.files())
copied_files.remove(CONANINFO)
if not copied_files:
output.warn("No files in this package!")
return
report_copied_files(copied_files, output, message_suffix="Packaged")
|
8
#import pygame
from Tkinter import *
import ttk
import time
from PIL import ImageTk,Image
from functools import partial
import os
import tkMessageBox
from urllib2 import *
from threading import Thread
import urllib as u
from window import *
############################################################################################ İNTERNET BAĞLANTISI KONTROL
def netControl():
try:
u.urlopen("http://example.com")
return True
except Exception as e:
print(e.message)
return False
if(not netControl()):
tkMessageBox.showwarning("Hata","Bu programı şu an internet bağlantısı olmadan kullanamazsınız!")
sys.exit(0)
############################################################################################
####################################################################################### ANA SINIF
class NoteStudio:
def __init__(self):
self.pencere = Tk()
self.rgb = "#008aff"
# ortalamak için
self.h = ((self.pencere.winfo_screenheight())/2)-(142/2)
self.w = ((self.pencere.winfo_screenwidth())/2)-(712/2)
self.pencere.overrideredirect(1)
self.pencere.resizable(width = FALSE,height = FALSE)
self.pencere.geometry("712x142+{0}+{1}".format(self.w,self.h))
self.pencere.title("NoteStudio 1.0")
self.pencere.iconbitmap("image/logo.ico")
self.img = ImageTk.PhotoImage(Image.open("image/banner.png"))
self.panel = Label(self.pencere,image = self.img)
self.panel.pack(side = "bottom", fill = "both", expand = "yes")
self.pencere.after(0,partial(self.efekt,0.1,0,durum = 1))
self.pencere.after(1500,self.start)
self.pencere.mainloop()
def efekt(self,alfa,sayac = 0,durum = 0,event = None): # efektli açılış ekranı
if(sayac < 1):
if(durum):
self.pencere.wm_attributes('-alpha',alfa)
alfa += 0.1
if(alfa>=0.9):
durum = 0
self.pencere.after(50,partial(self.efekt,0.9,sayac+1,durum))
else:
self.pencere.after(50,partial(self.efekt,alfa,sayac,durum))
else:
self.pencere.wm_attributes('-alpha',alfa)
alfa -= 0.1
if(alfa<=0.0):
durum = 1
self.pencere.after(50,partial(self.efekt,alfa,sayac,durum))
else:
self.pencere.after(50,partial(self.efekt,alfa,sayac,durum))
else:
self.pencere.wm_attributes('-alpha',1)
def start(self):
self.h = ((self.pencere.winfo_screenheight())/2)-300
self.w = ((self.pencere.winfo_screenwidth())/2)-400
self.panel.destroy()
self.img = ImageTk.PhotoImage(Image.open("image/background.png"))
self.panel = Label(self.pencere,image = self.img)
self.panel.place(x = 0,
y = 0)
self.pencere.wm_attributes('-alpha',1)
self.pencere.geometry("810x600+{0}+{1}".format(self.w,self.h))
self.pencere.overrideredirect(False)
self.pencere.tk_setPalette("black")
Thread(target = self.ip,args =(),).start()
self.banner = Label(self.pencere,
text = "© NoteStudio 1.1",
bg = self.rgb,
fg = "black")
self.banner.pack(side = BOTTOM,fill = X)
self.islemListe = [{"buton":"Whois Çekme",
#"pencere":self.Pencere,
"title":"NoteStudio Whois",
"text":"Whois bilgisi çekme",
"bilgi":"IP adresi yada Domain",
"fonk":"whois"},
{"buton":"CloudFlare\nTespiti",
#"pencere":self.Pencere,
"title":"NoteStudio CloudFlare",
"text":"Hedefte CloudFlare Tespiti",
"bilgi":"IP adresi yada Domain",
"fonk":"cloudflare"},
{"buton":"IP location",
#"pencere":self.Pencere,
"title":"NoteStudio IPlocation",
"text":"IP adresinden yer bulma",
"bilgi":"IP adresi girin:",
"fonk":"location"},
{"buton":"HoneyPot",
#"pencere":self.Pencere,
"title":"NoteStudio HoneyPot",
"text":"Hedef sistemde HoneyPot oranı",
"bilgi":"IP adresi",
"fonk":"honeypot"},
{"buton":"HTTP Header Grabber",
#"pencere":self.Pencere,
"title":"NoteStudio HeaderGrabber",
"text":"Web sitesi başlık bilgileri",
"bilgi":"IP adresi yada Domain",
"fonk":"header"},
#["Port Sc | an",self.Pencere,"NoteStudio PortScan","Hedef sistem port tarama","IP adresi yada Domain"],
| {"buton":"Robots.txt",
#"pencere":self.Pencere,
"title":"NoteStudio robots.txt",
"text":"Hedef sistemde robots.txt tespiti",
"bilgi":"Domain (http(s)://) ile yazın",
"fonk":"robot"},
{"buton":"Link Grabber",
#"pencere":self.Pencere,
"title":"NoteStudio LinkGrabber",
"text":"Hedef sistemde link taraması",
"bilgi":"IP adresi yada Domain",
"fonk":"link"},
{"buton":"Traceroute",
#"pencere":self.Pencere,
"title":"NoteStudio TraceRoute",
"text":"Hedef sisteme giden yolu izleme",
"bilgi":"IP adresi yada Domain",
"fonk":"trace"},
{"buton":"Zone Transfer",
#"pencere":self.Pencere,
"title":"NoteStudio ZoneTransfer",
"text":"Hedef sistem zone tespiti",
"bilgi":"IP adresi yada Domain",
"fonk":"zone"},
]
sira = 0
for i in self.islemListe:
Window(master = self.pencere,
no = sira,
text = i["buton"],
pTitle = i["title"],
pText = i["text"],
pBilgi = i["bilgi"],
#command = i["pencere"],
fonksiyon = i["fonk"] or None)
sira += 1
if(sira>=len(self.islemListe)):
break
hakkindaB = Window(master = self.pencere,
no = 9,
text = "Hakkında/Beni Oku",
pTitle = "Hakkında",
pText = "Hakkında",
pBilgi = "Hakkında")
hakkindaB.buton["command"] = self.hakkinda
cikisB = Window(master = self.pencere,
no = 10,
text = "Çıkış",
pTitle = "Çıkış",
pText = "Çıkış",
pBilgi = "Çıkış")
cikisB.buton["command"] = self.cik
def ip(self):
ipAdres = u.urlopen("http://ipv4bot.whatismyipaddress.com").read()
self.banner["text"] = self.banner["text"] + " | IP: {}".format(ipAdres)
def hakkinda(self):
mesaj = "NoteStudio 1.1"
tkMessageBox.showinfo("NoteStudio",mesaj)
def cik(self):
self.pencere.destroy()
sys.exit(0)
Note |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('customers', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='address',
name='recipient',
),
migrations.DeleteModel(
name='Address',
),
migrations.AddField(
model_name='recipient',
name='address_line1',
field=models.CharField(max_length=45, verbose_name=b'Address line 1', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='address_line2',
field=models.CharField(max_length=45, verbose_name=b'Address line 2', blank=True),
preserve_default=True,
),
migrations.AddField(
model | _name='recipient',
name='city',
field=models.CharField(max_length=5 | 0, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='country',
field=models.CharField(max_length=40, verbose_name=b'Country', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='postal_code',
field=models.CharField(max_length=10, verbose_name=b'Postal Code', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='recipient',
name='state_province',
field=models.CharField(max_length=40, verbose_name=b'State/Province', blank=True),
preserve_default=True,
),
]
|
# ENH quadpy-optimize
import pathlib
from ...helpers import article
from .._helpers import _read, register
_source = article(
authors=["KyoungJoong Kim", "ManSuk Song"],
title="Symmetric quadrature formulas over a unit disk",
journal="Korean J. Comp. & Appl. Math.",
year="1997",
volume="4",
pages="179-192",
url="https://doi.org/10.1007/BF03011388",
)
this_dir = pathlib.Path(__file__).resolve().parent
def kim_song_1():
return _read(this_dir / "kim_song_01.json", _source)
def kim_song_2():
return _read(this_dir / "kim_song_02.json", _source)
def kim_song_3():
return _read(this_dir / "kim_song_03.json", _source)
def kim_song_4():
return _read(this_dir / "kim_song_04.json", _source)
def kim_song_5():
return _read(this_dir / "kim_song_05.json", _source)
def kim_song_6():
return _read(this_dir / "kim_song_06.json", _source)
def kim_song_7():
return _read(this_ | dir / "kim_song_07.js | on", _source)
# TODO find issue
def kim_song_8():
return _read(this_dir / "kim_song_08.json", _source)
def kim_song_9():
return _read(this_dir / "kim_song_09.json", _source)
def kim_song_10():
return _read(this_dir / "kim_song_10.json", _source)
def kim_song_11():
return _read(this_dir / "kim_song_11.json", _source)
def kim_song_12():
return _read(this_dir / "kim_song_12.json", _source)
def kim_song_13():
return _read(this_dir / "kim_song_13.json", _source)
def kim_song_14():
return _read(this_dir / "kim_song_14.json", _source)
def kim_song_15():
return _read(this_dir / "kim_song_15.json", _source)
register(
[
kim_song_1,
kim_song_2,
kim_song_3,
kim_song_4,
kim_song_5,
kim_song_6,
kim_song_7,
kim_song_8,
kim_song_9,
kim_song_10,
kim_song_11,
kim_song_12,
kim_song_13,
kim_song_14,
kim_song_15,
]
)
|
"""
download a file named filename from the atsc301 downloads directory
and save it as a local file with the same name.
command line example::
python -m a301utils.a301_readfile photon_data.csv
module example::
from a301utils.a301_readfile import download
download('photon_data.csv')
"""
import argparse
import requests
from pathlib import Path
import sys
import os
import shutil
def download(filename):
"""
copy file filename from http://clouds.eos.ubc.ca/~phil/courses/atsc301/downloads to
the local directory
Parameters
----------
filename: string
name of file to fetch from
Returns
-------
Side effect: Creates a copy of that file in the local directory
"""
url = 'https://clouds.eos.ubc.ca/~phil/courses/atsc301/downloads/{}'.format(filename)
filepath = Path('./{}'.format(filename))
if filepath.exists():
the_size = filepath.stat().st_size
print(('\n{} already exists\n'
'and is {} bytes\n'
'will not overwrite\n').format(filename,the_size))
return None
tempfile = str(filepath) + '_tmp'
temppath = Path(tempfile)
with open(tempfile, 'wb') as localfile:
response = requests.get(url, stream=True)
if not response.ok:
print('response: ',response)
raise Exc | eption('Something is wrong, requests.get() failed with filename {}'.format(file | name))
for block in response.iter_content(1024):
if not block:
break
localfile.write(block)
the_size=temppath.stat().st_size
if the_size < 10.e3:
print('Warning -- your file is tiny (smaller than 10 Kbyte)\nDid something go wrong?')
shutil.move(tempfile,filename)
the_size=filepath.stat().st_size
print('downloaded {}\nsize = {}'.format(filename,the_size))
return None
if __name__ == "__main__":
linebreaks=argparse.RawTextHelpFormatter
descrip=__doc__.lstrip()
parser = argparse.ArgumentParser(formatter_class=linebreaks,description=descrip)
parser.add_argument('filename',type=str,help='name of file to download')
args=parser.parse_args()
download(args.filename)
|
"""
Sensors of a KNX Device.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/knx/
"""
from enum import Enum
import logging
import voluptuous as vol
from homeassistant.const import (
CONF_NAME, CONF_MAXIMUM, CONF_MINIMUM,
CONF_TYPE, TEMP_CELSIUS
)
from homeassist | ant.components.knx import (KNXConfig, KNXGroupAddress)
from homeassistant.components.sensor import PLATFORM_SCHEMA
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['knx']
DEFAULT_NAME = "KNX sensor"
CONF_TEMPERATURE = 'temperature'
CONF_ADDRESS = 'address'
CONF_ILLUMINANCE = 'illuminance'
CONF_PERCENTAGE = 'percentage'
CONF_SPEED_MS = 'speed_ms'
class KNXAddressType(Enum):
"""Enum to indicate conversion type for the KNX address."""
| FLOAT = 1
PERCENT = 2
# define the fixed settings required for each sensor type
FIXED_SETTINGS_MAP = {
# Temperature as defined in KNX Standard 3.10 - 9.001 DPT_Value_Temp
CONF_TEMPERATURE: {
'unit': TEMP_CELSIUS,
'default_minimum': -273,
'default_maximum': 670760,
'address_type': KNXAddressType.FLOAT
},
# Speed m/s as defined in KNX Standard 3.10 - 9.005 DPT_Value_Wsp
CONF_SPEED_MS: {
'unit': 'm/s',
'default_minimum': 0,
'default_maximum': 670760,
'address_type': KNXAddressType.FLOAT
},
# Luminance(LUX) as defined in KNX Standard 3.10 - 9.004 DPT_Value_Lux
CONF_ILLUMINANCE: {
'unit': 'lx',
'default_minimum': 0,
'default_maximum': 670760,
'address_type': KNXAddressType.FLOAT
},
# Percentage(%) as defined in KNX Standard 3.10 - 5.001 DPT_Scaling
CONF_PERCENTAGE: {
'unit': '%',
'default_minimum': 0,
'default_maximum': 100,
'address_type': KNXAddressType.PERCENT
}
}
SENSOR_TYPES = set(FIXED_SETTINGS_MAP.keys())
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_TYPE): vol.In(SENSOR_TYPES),
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MINIMUM): vol.Coerce(float),
vol.Optional(CONF_MAXIMUM): vol.Coerce(float)
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the KNX Sensor platform."""
add_devices([KNXSensor(hass, KNXConfig(config))])
class KNXSensor(KNXGroupAddress):
"""Representation of a KNX Sensor device."""
def __init__(self, hass, config):
"""Initialize a KNX Float Sensor."""
# set up the KNX Group address
KNXGroupAddress.__init__(self, hass, config)
device_type = config.config.get(CONF_TYPE)
sensor_config = FIXED_SETTINGS_MAP.get(device_type)
if not sensor_config:
raise NotImplementedError()
# set up the conversion function based on the address type
address_type = sensor_config.get('address_type')
if address_type == KNXAddressType.FLOAT:
self.convert = convert_float
elif address_type == KNXAddressType.PERCENT:
self.convert = convert_percent
else:
raise NotImplementedError()
# other settings
self._unit_of_measurement = sensor_config.get('unit')
default_min = float(sensor_config.get('default_minimum'))
default_max = float(sensor_config.get('default_maximum'))
self._minimum_value = config.config.get(CONF_MINIMUM, default_min)
self._maximum_value = config.config.get(CONF_MAXIMUM, default_max)
_LOGGER.debug(
"%s: configured additional settings: unit=%s, "
"min=%f, max=%f, type=%s",
self.name, self._unit_of_measurement,
self._minimum_value, self._maximum_value, str(address_type)
)
self._value = None
@property
def state(self):
"""Return the Value of the KNX Sensor."""
return self._value
@property
def unit_of_measurement(self):
"""Return the defined Unit of Measurement for the KNX Sensor."""
return self._unit_of_measurement
def update(self):
"""Update KNX sensor."""
super().update()
self._value = None
if self._data:
if self._data == 0:
value = 0
else:
value = self.convert(self._data)
if self._minimum_value <= value <= self._maximum_value:
self._value = value
@property
def cache(self):
"""We don't want to cache any Sensor Value."""
return False
def convert_float(raw_value):
"""Conversion for 2 byte floating point values.
2byte Floating Point KNX Telegram.
Defined in KNX 3.7.2 - 3.10
"""
from knxip.conversion import knx2_to_float
from knxip.core import KNXException
try:
return knx2_to_float(raw_value)
except KNXException as exception:
_LOGGER.error("Can't convert %s to float (%s)", raw_value, exception)
def convert_percent(raw_value):
"""Conversion for scaled byte values.
1byte percentage scaled KNX Telegram.
Defined in KNX 3.7.2 - 3.10.
"""
value = 0
try:
value = raw_value[0]
except (IndexError, ValueError):
# pknx returns a non-iterable type for unsuccessful reads
_LOGGER.error("Can't convert %s to percent value", raw_value)
return round(value * 100 / 255)
|
if program_exists('brew') and 'jobber ' in \
sp_check_output(['brew', 'services']):
self._servicectl = BrewServiceCtl()
elif program_exists('launchctl'):
self._servicectl = LaunchctlServiceCtl()
else:
raise Exception("Cannot determine how to control Jobber service")
@property
def _root_jobfile_path(self):
root_entry = pwd.getpwuid(0)
return os.path.join(root_entry.pw_dir, '.jobber')
@property
def _normuser_jobfile_path(self):
normuser_entry = pwd.getpwnam(_NORMUSER)
return os.path.join(normuser_entry.pw_dir, '.jobber')
def make_tempfile_dir(self):
# make temp-file dir
self._tmpfile_dir = tempfile.mkdtemp()
os.chmod(self._tmpfile_dir, _OCTAL_777)
def rm_tempfile_dir(self):
shutil.rmtree(self._tmpfile_dir)
self._tmpfile_dir = None
def make_tempfile(self, create=False):
path = os.path.join(self._tmpfile_dir,
"tmp-{0}".format(self._next_tmpfile_nbr))
self._next_tmpfile_nbr += 1
if create:
open(path, "w").close()
return path
def stop_service(self):
self._servicectl.stop_jobber()
def restart_service(self):
# restart jobber service
try:
self._servicectl.restart_jobber()
except Exception as e:
self.print_debug_info()
raise e
# wait for it to be ready
started = False
stop_time = time.time() + 20
while time.time() < stop_time and not started:
args = [self._jobber_path, 'list']
proc = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
_, err = proc.communicate()
if proc.returncode == 0:
started = True
else:
time.sleep(1)
if not started:
msg = "Failed to start jobber service!"
msg += " ('jobber list' returned '{0}')".\
format(err.strip())
self.print_debug_info()
raise AssertionError(msg)
# sometimes not all jobberrunner procs have started yet
time.sleep(2)
def print_debug_info(self):
log = ''
# get service status
log += "Jobber service status:\n"
try:
log += self._servicectl.get_jobber_status()
except Exception as e:
log += "[{0}]".format(e)
# get syslog msgs
log += "\n\njobbermaster logs:\n"
try:
log += self._servicectl.get_jobbermaster_logs()
except Exception as e:
log += "[{0}]".format(e)
# get jobberrunner logs
log_files = [
_RUNNER_LOG_FILE_FOR_ROOT,
_RUNNER_LOG_FILE_FOR_NORMUSER,
]
for lf in log_files:
log += "\n\n{0}:\n".format(lf)
try:
with open(lf) as f:
| log += f.read()
except Exception as e:
log += "[{0}]".format(e)
# get config file
log += "\nConfig:\n"
try:
with open(_CONFIG_PATH) as f:
tmp = f.read()
if len(tmp) == 0:
log += "[empty]"
else:
log += tmp
except Exception as e:
lo | g += "[{0}]".format(e)
print(log)
def make_jobfile(self, job_name, cmd, time="*", \
notify_on_error=False, notify_on_success=False,
notify_output_path=None, file_run_log_path=None,
stdout_output_dir=None, stdout_output_max_age=None,
stderr_output_dir=None, stderr_output_max_age=None,
unix_result_sink_path=None, tcp_result_sink_port=None,
log_path=None):
jobfile = {
'version': '1.4',
'prefs': {},
'jobs': {}
}
# make jobs section
job = {
'cmd': cmd,
'time': time,
'notifyOnError': [],
'notifyOnSuccess': []
}
jobfile['jobs'][job_name] = job
# make prefs section
if log_path is not None:
jobfile['prefs']['logPath'] = log_path
def install_result_sink(sink):
for job_name in jobfile['jobs']:
job = jobfile['jobs'][job_name]
if notify_on_error:
job['notifyOnError'].append(sink)
if notify_on_success:
job['notifyOnSuccess'].append(sink)
if notify_output_path is not None:
# make notify program
output_path = self.make_tempfile()
notify_prog = _NOTIFY_PROGRAM.format(notify_output_path=\
notify_output_path)
shebang = "#!" + self._python_path + "\n"
notify_prog = shebang + notify_prog
notify_prog_path = self.make_tempfile()
with open(notify_prog_path, 'w') as f:
f.write(notify_prog)
os.chmod(notify_prog_path, _OCTAL_755)
# make result sink
result_sink = {
'type': 'program',
'path': notify_prog_path
}
install_result_sink(result_sink)
print("Contents of {0}:\n{1}".\
format(notify_prog_path, notify_prog))
if file_run_log_path is not None:
jobfile['prefs']['runLog'] = {'type': 'file', 'path': file_run_log_path}
if stdout_output_dir is not None:
result_sink = {
'type': 'filesystem',
'path': stdout_output_dir,
'maxAgeDays': int(stdout_output_max_age),
'data': ['stdout']
}
install_result_sink(result_sink)
if stderr_output_dir is not None:
result_sink = {
'type': 'filesystem',
'path': stderr_output_dir,
'maxAgeDays': int(stderr_output_max_age),
'data': ['stderr']
}
install_result_sink(result_sink)
if unix_result_sink_path is not None:
result_sink = {
'type': 'socket',
'proto': 'unix',
'address': unix_result_sink_path
}
install_result_sink(result_sink)
if tcp_result_sink_port is not None:
result_sink = {
'type': 'socket',
'proto': 'tcp',
'address': tcp_result_sink_port
}
install_result_sink(result_sink)
return json.dumps(jobfile)
def install_jobfile(self, contents, for_root=False, reload=True, exp_num_jobs=1):
# make jobfile
if for_root:
with open(self._root_jobfile_path, 'w') as f:
f.write(contents)
else:
pwnam = pwd.getpwnam(_NORMUSER)
os.setegid(pwnam.pw_gid)
os.seteuid(pwnam.pw_uid)
try:
with open(self._normuser_jobfile_path, 'w') as f:
f.write(contents)
os.chmod(self._normuser_jobfile_path, _OCTAL_600)
finally:
os.seteuid(0)
os.setegid(0)
# reload it
if reload:
if for_root:
output = sp_check_output([self._jobber_path, 'reload'])
else:
output = sp_check_output(['sudo', '-u', _NORMUSER, \
self._jobber_path, 'reload'])
print(output)
num_jobs = int(output.split()[1])
# check number of loaded jobs
if num_jobs != exp_num_jobs:
msg = ("Failed to load jobfile: expected to load {0} jobs " + \
"but loaded {1}").format(exp_num_jobs, num_jobs)
raise AssertionError(msg)
def rm_jobfiles(self):
# rm jobfile
if os.path.exists(self._root_jobfile_path):
os.unlink(self._root_jobfile_path)
if os.path.exists(self._normuser_ |
s:
# - DNS
# - Record Set (uses DNS SDK)
# - Azure Graph
def legacy_credentials(self, scope):
# Track 2 SDKs use tuple
token = self.get_token((scope + '.default'))
return BasicTokenAuthentication(token={'access_token': token.token})
@property
def tenant_id(self):
# type: (None) -> str
return self._tenant_id
@property
def auth_params(self):
# type: (None) -> str
return self._auth_params
@property
def subscription_id(self):
# type: (None) -> str
return self._subscription_id
class Session:
def __init__(self, subscription_id=None, authorization_file=None,
cloud_endpoints=None, resource_endpoint_type=constants.DEFAULT_AUTH_ENDPOINT):
"""
:param subscription_id: If provided overrides environment variables.
:param authorization_file: Path to file populated from 'get_functions_auth_string'
:param cloud_endpoints: List of endpoints for specified Azure Cloud. Defaults to public.
:param auth_endpoint: Resource endpoint for OAuth token.
"""
self._provider_cache = {}
self.subscription_id_override = subscription_id
self.credentials = None
self.authorization_file = authorization_file
self.cloud_endpoints = cloud_endpoints or AZURE_PUBLIC_CLOUD
self.resource_endpoint_type = resource_endpoint_type
self.resource_endpoint = self.get_auth_endpoint(resource_endpoint_type)
self.storage_endpoint = self.cloud_endpoints.suffixes.storage_endpoint
def _initialize_session(self):
"""
Creates a session using available authentication type.
"""
# Only run once
if self.credentials is not None:
return
try:
self.credentials = AzureCredential(
self.cloud_endpoints,
| authorization_file=self.authorization_file,
subscription_id_overri | de=self.subscription_id_override)
except Exception as e:
if hasattr(e, 'message'):
log.error(e.message)
else:
log.exception("Failed to authenticate.")
sys.exit(1)
if self.credentials is None:
log.error('Failed to authenticate.')
sys.exit(1)
def get_session_for_resource(self, resource):
return Session(
subscription_id=self.subscription_id_override,
authorization_file=self.authorization_file,
cloud_endpoints=self.cloud_endpoints,
resource_endpoint_type=resource)
@lru_cache()
def client(self, client, vault_url=None):
self._initialize_session()
service_name, client_name = client.rsplit('.', 1)
svc_module = importlib.import_module(service_name)
klass = getattr(svc_module, client_name)
klass_parameters = inspect.signature(klass).parameters
legacy = False
if 'credentials' in klass_parameters and 'tenant_id' in klass_parameters:
client = klass(credentials=self.credentials.legacy_credentials(self.resource_endpoint),
tenant_id=self.credentials.tenant_id,
base_url=self.resource_endpoint)
legacy = True
elif 'credentials' in klass_parameters:
client = klass(credentials=self.credentials.legacy_credentials(self.resource_endpoint),
subscription_id=self.credentials.subscription_id,
base_url=self.cloud_endpoints.endpoints.resource_manager)
legacy = True
else:
client_args = {
'credential': self.credentials,
'raw_response_hook': log_response_data,
'retry_policy': C7nRetryPolicy(),
'credential_scopes': [self.resource_endpoint + ".default"]
}
# TODO: remove when fixed: https://github.com/Azure/azure-sdk-for-python/issues/17351
# This workaround will replace used api-version for costmanagement requests
# 2020-06-01 is not supported, but 2019-11-01 is working as expected.
if client == 'azure.mgmt.costmanagement.CostManagementClient':
client_args['raw_request_hook'] = cost_query_override_api_version
if 'subscription_id' in klass_parameters:
client_args['subscription_id'] = self.subscription_id
client_args['base_url'] = self.cloud_endpoints.endpoints.resource_manager
elif 'vault_url' in klass_parameters:
client_args['vault_url'] = vault_url
client = klass(**client_args)
if legacy:
# Override send() method to log request limits & custom retries
service_client = client._client
service_client.orig_send = service_client.send
service_client.send = types.MethodType(custodian_azure_send_override, service_client)
# Don't respect retry_after_header to implement custom retries
service_client.config.retry_policy.policy.respect_retry_after_header = False
return client
@property
def subscription_id(self):
self._initialize_session()
return self.credentials.subscription_id
def get_credentials(self):
self._initialize_session()
return self.credentials
def get_subscription_id(self):
self._initialize_session()
return self.credentials.subscription_id
def get_function_target_subscription_name(self):
self._initialize_session()
if constants.ENV_FUNCTION_MANAGEMENT_GROUP_NAME in os.environ:
return os.environ[constants.ENV_FUNCTION_MANAGEMENT_GROUP_NAME]
return os.environ.get(constants.ENV_FUNCTION_SUB_ID, self.subscription_id)
def get_function_target_subscription_ids(self):
self._initialize_session()
if constants.ENV_FUNCTION_MANAGEMENT_GROUP_NAME in os.environ:
return ManagedGroupHelper.get_subscriptions_list(
os.environ[constants.ENV_FUNCTION_MANAGEMENT_GROUP_NAME], self)
return [os.environ.get(constants.ENV_FUNCTION_SUB_ID, self.subscription_id)]
def resource_api_version(self, resource_id):
""" latest non-preview api version for resource """
namespace = ResourceIdParser.get_namespace(resource_id)
resource_type = ResourceIdParser.get_resource_type(resource_id)
cache_id = namespace + resource_type
if cache_id in self._provider_cache:
return self._provider_cache[cache_id]
resource_client = self.client('azure.mgmt.resource.ResourceManagementClient')
provider = resource_client.providers.get(namespace)
# The api version may be directly provided
if not provider.resource_types and resource_client.providers.api_version:
return resource_client.providers.api_version
rt = next((t for t in provider.resource_types
if StringUtils.equal(t.resource_type, resource_type)), None)
if rt and rt.api_versions:
versions = [v for v in rt.api_versions if 'preview' not in v.lower()]
api_version = versions[0] if versions else rt.api_versions[0]
self._provider_cache[cache_id] = api_version
return api_version
def get_tenant_id(self):
self._initialize_session()
return self.credentials.tenant_id
def get_functions_auth_string(self, target_subscription_id):
"""Build auth json string for deploying Azure Functions.
Look for dedicated Functions environment variables or fall
back to normal Service Principal variables.
"""
self._initialize_session()
function_auth_variables = [
constants.ENV_FUNCTION_TENANT_ID,
constants.ENV_FUNCTION_CLIENT_ID,
constants.ENV_FUNCTION_CLIENT_SECRET
]
required_params = ['client_id', 'client_secret', 'tenant_id']
function_auth_params = {k: v for k, v in self.credentials.auth_params.items()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: expect
version_added: 2.0
short_description: Executes a command and responds to prompts
description:
- The C(expect) module executes a command and responds to prompts
- The given command will be executed on all selected nodes. It will not be
processed through the shell, so variables like C($HOME) and operations
like C("<"), C(">"), C("|"), and C("&") will not work
options:
command:
description:
- the command module takes command to run.
required: true
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: false
removes:
description:
- a filename, when it does not exist, this step will B(not) be run.
required: false
chdir:
description:
- cd into this directory before running the command
required: false
responses:
description:
- Mapping of expected string/regex and string to respond with. If the
response is a list, successive matches return successive
responses. List functionality is new in 2.1.
required: true
timeout:
description:
- Amount of time in seconds to wait for the expected strings
default: 30
echo:
description:
- Whether or not to echo out your response strings
default: false
requirements:
- python >= 2.6
- pexpect >= 3.3
notes:
- If you want to run a command through the shell (say you are using C(<),
C(>), C(|), etc), you must specify a shell in the command such as
C(/bin/bash -c "/path/to/something | grep else")
- The question, or key, under I(responses) is a python regex match. Case
insensitive searches are indicated with a prefix of C(?i)
- By default, if a question is encountered multiple times, it's string
response will be repeated. If you need different responses for successive
question matches, instead of a string response, use a list of strings as
the response. The list functionality is new in 2.1
author: "Matt Martz (@sivel)"
'''
EXAMP | LES = '''
# Case insensitve password string match
- expect:
command: passwd username
responses:
(?i)password: "MySekretPa$$word"
# Generic question with multiple different responses
- expect:
command: /path/to/custom/command
responses:
Question:
- response1
- response2
- response3
'''
import datetime
import os
try:
import pexpect
HAS_PEXPECT = True
except I | mportError:
HAS_PEXPECT = False
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
def response_closure(module, question, responses):
resp_gen = (u'%s\n' % to_text(r).rstrip(u'\n') for r in responses)
def wrapped(info):
try:
return resp_gen.next()
except StopIteration:
module.fail_json(msg="No remaining responses for '%s', "
"output was '%s'" %
(question,
info['child_result_list'][-1]))
return wrapped
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(required=True),
chdir=dict(),
creates=dict(),
removes=dict(),
responses=dict(type='dict', required=True),
timeout=dict(type='int', default=30),
echo=dict(type='bool', default=False),
)
)
if not HAS_PEXPECT:
module.fail_json(msg='The pexpect python module is required')
chdir = module.params['chdir']
args = module.params['command']
creates = module.params['creates']
removes = module.params['removes']
responses = module.params['responses']
timeout = module.params['timeout']
echo = module.params['echo']
events = dict()
for key, value in responses.items():
if isinstance(value, list):
response = response_closure(module, key, value)
else:
response = u'%s\n' % to_text(value).rstrip(u'\n')
events[key.decode()] = response
if args.strip() == '':
module.fail_json(rc=256, msg="no command given")
if chdir:
chdir = os.path.abspath(os.path.expanduser(chdir))
os.chdir(chdir)
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
v = os.path.expanduser(creates)
if os.path.exists(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s exists" % v,
changed=False,
rc=0
)
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
v = os.path.expanduser(removes)
if not os.path.exists(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s does not exist" % v,
changed=False,
rc=0
)
startd = datetime.datetime.now()
try:
try:
# Prefer pexpect.run from pexpect>=4
out, rc = pexpect.run(args, timeout=timeout, withexitstatus=True,
events=events, cwd=chdir, echo=echo,
encoding='utf-8')
except TypeError:
# Use pexpect.runu in pexpect>=3.3,<4
out, rc = pexpect.runu(args, timeout=timeout, withexitstatus=True,
events=events, cwd=chdir, echo=echo)
except (TypeError, AttributeError):
e = get_exception()
# This should catch all insufficient versions of pexpect
# We deem them insufficient for their lack of ability to specify
# to not echo responses via the run/runu functions, which would
# potentially leak sensentive information
module.fail_json(msg='Insufficient version of pexpect installed '
'(%s), this module requires pexpect>=3.3. '
'Error was %s' % (pexpect.__version__, e))
except pexpect.ExceptionPexpect:
e = get_exception()
module.fail_json(msg='%s' % e)
endd = datetime.datetime.now()
delta = endd - startd
if out is None:
out = ''
ret = dict(
cmd=args,
stdout=out.rstrip('\r\n'),
rc=rc,
start=str(startd),
end=str(endd),
delta=str(delta),
changed=True,
)
if rc is not None:
module.exit_json(**ret)
else:
ret['msg'] = 'command exceeded timeout'
module.fail_json(**ret)
if __name__ == '__main__':
main()
|
of forces/torques must be constructed, where each entry in the list
is a (Point, Vector) or (ReferenceFrame, Vector) tuple, where the Vectors
represent the Force or Torque.
Next a particle needs to be created, and it needs to have a point and mass
assigned to it.
Finally, a list of all bodies and particles needs to be created::
>>> kd = [qd - u]
>>> FL = [(P, (-k * q - c * u) * N.x)]
>>> pa = Particle('pa', P, m)
>>> BL = [pa]
Finally we can generate the equations of motion.
First we create the Kane object and supply an inertial frame.
Next we pass it the generalized speeds.
Then we pass it the kinematic differential equation dict.
Next we form FR* and FR to complete: Fr + Fr* = 0.
We have the equations of motion at this point.
It makes sense to rearrnge them though, so we calculate the mass matrix and
the forcing terms, for E.o.M. in the form: [MM] udot = forcing, where MM is
the mass matrix, udot is a vector of the time derivatives of the
generalized speeds, and forcing is a vector representing "forcing" terms::
>>> KM = Kane(N)
>>> KM.coords([q])
>>> KM.speeds([u])
>>> KM.kindiffeq(kd)
>>> (fr, frstar) = KM.kanes_equations(FL, BL)
>>> MM = KM.mass_matrix
>>> forcing = KM.forcing
>>> rhs = MM.inv() * forcing
>>> rhs
[-(c*u(t) + k*q(t))/m]
>>> KM.linearize()[0]
[0, 1]
[k, c]
Please look at the documentation pages for more information on how to
perform linearization and how to deal with dependent coordinates & speeds,
and how do deal with bringing non-contributing forces into evidence.
"""
simp = True
def __init__(self, frame):
"""Supply the inertial frame for Kane initialization. """
# Big storage things
self._inertial = frame
self._forcelist = None
self._bodylist = None
self._fr = None
self._frstar = None
self._rhs = None
self._aux_eq = None
# States
self._q = None
self._qdep = []
self._qdot = None
self._u = None
self._udep = []
self._udot = None
self._uaux = None
# Differential Equations Matrices
self._k_d = None
self._f_d = None
self._k_kqdot = None
self._k_ku = None
self._f_k = None
# Constraint Matrices
self._f_h = Matrix([])
self._k_nh = Matrix([])
self._f_nh = Matrix([])
self._k_dnh = Matrix([])
self._f_dnh = Matrix([])
def _find_dynamicsymbols(self, inlist, insyms=[]):
"""Finds all non-supplied dynamicsymbols in the expressions."""
from sympy.core.function import AppliedUndef, Derivative
t = dynamicsymbols._t
return reduce(set.union, [set([i]) for j in inlist
for i in j.atoms(AppliedUndef, Derivative)
if i.atoms() == set([t])], set()) - insyms
temp_f = set().union(*[i.atoms(AppliedUndef) for i in inlist])
temp_d = set().union(*[i.atoms(Derivative) for i in inlist])
set_f = set([a for a in temp_f if a.args == (t,)])
set_d = set([a for a in temp_d if ((a.args[0] in set_f) and all([i == t
for i in a.variables]))])
return list(set.union(set_f, set_d) - set(insyms))
def _find_othersymbols(self, inlist, insyms=[]):
"""Finds all non-dynamic symbols in the expressions."""
return list(reduce(set.union, [i.atoms(Symbol) for i in inlist]) -
set(insyms))
def _mat_inv_mul(self, A, B):
"""Internal Function
Computes A^-1 * B symbolically w/ substitution, where B is not
necessarily a vector, but can be a matrix.
"""
# Note: investigate difficulty in only creating symbols for non-zero
# entries; this could speed things up, perhaps?
r1, c1 = A.shape
r2, c2 = B.shape
temp1 = Matrix(r1, c1, lambda i, j: Symbol('x' + str(j + r1 * i)))
temp2 = Matrix(r2, c2, lambda i, j: Symbol('y' + str(j + r2 * i)))
for i in range(len(temp1)):
if A[i] == 0:
temp1[i] = 0
for i in range(len(temp2)):
if B[i] == 0:
temp2[i] = 0
temp3 = []
for i in range(c2):
temp3.append(temp1.LUsolve(temp2.extract(range(r2), [i])))
temp3 = Matrix([i.T for i in temp3]).T
if Kane.simp == True:
temp3.simplify()
return temp3.subs(dict(zip(temp1, A))).subs(dict(zip(temp2, B)))
def coords(self, qind, qdep=[], coneqs=[]):
"""Supply all the generalized coordiantes in a list.
If some coordinates are dependent, supply them as part of qdep. Their
dependent nature will only show up in the linearization process though.
Parameters
==========
qind : list
A list of independent generalized coords
qdep : list
List of dependent coordinates
coneq : list
List of expressions which are equal to zero; these are the
configuration constraint equations
"""
if not isinstance(qind, (list, tuple)):
raise TypeError('Generalized coords. must be supplied in a list.')
self._q = qind + qdep
self._qdot = [diff(i, dynamicsymbols._t) for i in self._q]
if not isinstance(qdep, (list, tuple)):
raise TypeError('Dependent speeds and constraints must each be '
'provided in their own list.')
if len(qdep) != len(coneqs):
raise ValueError('There must be an equal number of dependent '
'speeds and constraints.')
coneqs = Matrix(coneqs)
self._qdep = qdep
self._f_h = coneqs
def speeds(self, uind, udep=[], coneqs=[], diffconeqs=None, u_auxiliary=[]):
"""Supply all the generalized speeds in a list.
If there are motion constraints or auxiliary speeds, they are provided
here as well (as well as motion constraints).
Parameters
==========
uind : list
A list of independent generalized speeds
udep : list
Optional list of dependent speeds
coneqs : list
Optional List of constraint expressions; these are expressions
which are equal to zero which define a speed (motion) constraint.
diffconeqs : list
Optional, calculated automatically otherwise; list of constraint
equations; again equal to zero, but define an acceleration
constraint.
u_auxiliary : list
An optional list of auxiliary speeds used for brining
non-contributing forces into evidence
"""
if not isinstance(uind, (list, tuple)):
raise TypeError('Generalized speeds must be supplied in a list.')
self._u = uind + udep
self._udot = [diff(i, dynamicsymbols._t) for i in self._u]
self._uaux = u_auxiliary
if not isinstance(udep, (list, tuple)):
raise TypeError('Dependent speeds and constraints must each be '
'provided in their own list.')
if len(udep) != len(coneqs):
raise ValueError('Th | ere must be an equal number of dependent '
'speeds and constraints.')
if diffconeqs != None:
if len(udep) != len(diffconeqs):
raise ValueError('There must be an equal number of dependent '
| 'speeds and constraints.')
if len(udep) != 0:
u = self._u
uzero = dict(zip(u, [0] * len(u)))
coneqs = Matrix(coneqs)
udot = self._udot
udotzero = dict(zip(udot, [0] * len(udot)))
self._udep = udep
self._f_nh = coneqs.subs(uzero)
self._k_nh = (coneqs - self._f_nh).jacobian(u)
# if no differentiated non holonomic constraints were given, calculate
if diffconeqs == None:
|
tions or test_expectations.NEEDS_REBASELINE in expectations or test_expectations.NEEDS_MANUAL_REBASELINE in expectations
def _test_is_slow(self, test_file):
return test_expectations.SLOW in self._expectations.model().get_expectations(test_file)
def needs_servers(self, test_names):
return any(self._test_requires_lock(test_name) for test_name in test_names)
def _rename_results_folder(self):
try:
timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(self._filesystem.mtime(self._filesystem.join(self._results_directory, "results.html"))))
except (IOError, OSError), e:
# It might be possible that results.html was not generated in previous run, because the test
# run was interrupted even before testing started. In those cases, don't archive the folder.
# Simply override the current folder contents with new results.
import errno
if e.errno == errno.EEXIST or e.errno == errno.ENOENT:
self._printer.write_update("No results.html file found in previous run, skipping it.")
return None
archived_name = ''.join((self._filesystem.basename(self._results_directory), "_", timestamp))
archived_path = self._filesystem.join(self._filesystem.dirname(self._results_directory), archived_name)
self._filesystem.move(self._results_directory, archived_path)
def _delete_dirs(self, dir_list):
for dir in dir_list:
self._filesystem.rmtree(dir)
def _limit_archived_results_count(self):
results_directory_path = self._filesystem.dirname(self._results_directory)
file_list = self._filesystem.listdir(results_directory_path)
results_directories = []
for dir in file_list:
file_path = self._filesystem.join(results_directory_path, dir)
if self._filesystem.isdir(file_path) and self._results_directory in file_path:
results_directories.append(file_path)
results_directories.sort(key=lambda x: self._filesystem.mtime(x))
self._printer.write_update("Clobbering excess archived results in %s" % results_directory_path)
self._delete_dirs(results_directories[:-self.ARCHIVED_RESULTS_LIMIT])
def _set_up_run(self, test_names):
self._printer.write_update("Checking build ...")
if self._options.build:
exit_code = self._port.check_build(self.needs_servers(test_names), self._printer)
if exit_code:
_log.error("Build check failed")
return exit_code
# This must be started before we check the system dependencies,
# since the helper may do things to make the setup correct.
if self._options.pixel_tests:
self._printer.write_update("Starting pixel test helper ...")
self._port.start_helper()
# Check that the system dependencies (themes, fonts, ...) are correct.
if not self._options.nocheck_sys_deps:
self._printer.write_update("Checking system dependencies ...")
exit_code = self._port.check_sys_deps(self.needs_servers(test_names))
if exit_code:
self._port.stop_helper()
return exit_code
if self._options.clobber_old_results:
self._clobber_old_results()
elif self._filesystem.exists(self._results_directory):
self._limit_archived_results_count()
# Rename the existing results folder for archiving.
self._rename_results_folder()
# Create the output directory if it doesn't already exist.
self._port.host.filesystem.maybe_make_directory(self._results_directory)
self._port.setup_test_run()
return test_run_results.OK_EXIT_STATUS
def run(self, args):
"""Run the tests and return a RunDetails object with the results."""
start_time = time.time()
self._printer.write_update("Collecting tests ...")
running_all_tests = False
try:
paths, test_names, running_all_tests = self._collect_tests(args)
except IOError:
# This is raised if --test-list doesn't exist
return test_run_results.RunDetails(exit_code=test_run_results.NO_TESTS_EXIT_STATUS)
self._printer.write_update("Parsing expectations ...")
self._expectations = test_expectations.TestExpectations(self._port, test_names)
tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)
self._printer.print_found(len(test_names), len(tests_to_run), self._options.repeat_each, self._options.iterations)
# Check to make sure we're not skipping every test.
if not tests_to_run:
_log.critical('No tests to run.')
return test_run_results.RunDetails(exit_code=test_run_results.NO_TESTS_EXIT_STATUS)
exit_code = self._set_up_run(tests_to_run)
if exit_code:
return test_run_results.RunDetails(exit_code=exit_code)
# Don't retry failures if an explicit list of tests was passed in.
if self._options.retry_failures is None:
should_retry_failures = len(paths) < len(test_names)
else:
should_retry_failures = self._options.retry_failures
enabled_pixel_tests_in_retry = False
try:
self._start_servers(tests_to_run)
num_workers = self._port.num_workers(int(self._options.child_processes))
initial_results = self._run_tests(
tests_to_run, tests_to_skip, self._options.repeat_each, self._options.iterations,
num_workers)
# Don't retry failures when interrupted by user or failures limit exception.
should_retry_failures = should_retry_failures and not (initial_results.interrupted or initial_results.keyboard_interrupted)
tests_to_retry = self._tests_to_retry(initial_results)
all_retry_results = []
if should_retry_failures and tests_to_retry:
enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed()
for retry_attempt in xrange(1, self._options.num_retries + 1):
if not tests_to_retry:
break
_log.info('')
_log.info('Retrying %s, attempt %d of %d...' %
(grammar.pluralize('unexpected failure', len(tests_to_retry)),
retry_attempt, self._options.num_retries))
retry_results = self._run_tests(tests_to_retry,
tests_to_skip=set(),
repeat_each=1,
iterations=1,
num_workers=num_workers,
retry_attempt=retry_attempt)
all_retry_results.append(retry_results)
tests_to_retry = self._tests_to_retry(retry_results)
if enabled_pixel_tests_in_retry:
self._options.pixel_tests = False
finally:
self._stop_servers()
self._clean_up_run()
# Some crash logs can take a long time to be written out so look
# for new logs after the test run finishes.
self._printer.write_update("looking for new crash log | s")
self._look_for_new_crash_logs(initial_results, start_time)
for retry_attempt_results in all_retry_results:
self._look_for_new_crash_logs(retry_attempt_results, start_time)
_log.debug("summarizing results")
summarized_full_results = test_run_results.summarize_resul | ts(
self._port, self._expectations, initial_results, all_retry_results,
enabled_pixel_tests_in_retry)
summarized_failing_results = test_run_results.summarize_results(
self._port, self._expectations, initial_results, all_retry_results,
enabled_pixel_tests_in_retry, only_include_failing=True)
exit |
puts
self.internally_connectable = internally_connectable
self.multi_type = multi_type
self.i2c = i2c
self.frontend_id = frontend_id
self.__is_empty = is_empty
self.compatible = {
None: (None,),
"DVB-S": ("DVB-S", None),
"DVB-C": ("DVB-C", None),
"DVB-T": ("DVB-T", None),
"DVB-S2": ("DVB-S", "DVB-S2", None),
"DVB-C2": ("DVB-C", "DVB-C2", None),
"DVB-T2": ("DVB-T", "DVB-T2", None),
"ATSC": ("ATSC", None),
}
def isCompatible(self, what):
if not self.isSupported():
return False
return what in self.compatible[self.getType()]
def canBeCompatible(self, what):
if not self.isSupported():
return False
if self.isCompatible(what):
return True
for type in self.multi_type.values():
if what in self.compatible[type]:
return True
return False
def getType(self):
try:
if self.isMultiType():
return self.multi_type[self.config.multiType.value]
except:
pass
return self.type
def connectableTo(self):
connectable = {
"DVB-S": ("DVB-S", "DVB-S2"),
"DVB-C": ("DVB-C", "DVB-C2"),
"DVB-T": ("DVB-T","DVB-T2"),
| "DVB-S2": ("DVB-S", "DVB-S2"),
"DVB-C2": ("DVB-C", "DVB-C2"),
"DVB-T2": ("DVB-T", "DVB-T2"),
"ATSC": ("ATSC"),
}
return connectable[self.getType()]
def getSlotName(self):
# get a friendly description for a slot name.
# we name them "Tuner A/B/C/...", because that's what's usually written on the back
# of the device.
return _("Tuner") + " " + chr(ord('A') + self.slot)
slot_name = property(getSlotName)
def getSlotID(se | lf):
return chr(ord('A') + self.slot)
def getI2C(self):
return self.i2c
def hasOutputs(self):
return self.has_outputs
def internallyConnectableTo(self):
return self.internally_connectable
def setInternalLink(self):
if self.internally_connectable is not None:
print "setting internal link on frontend id", self.frontend_id
open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w").write("internal")
def removeInternalLink(self):
if self.internally_connectable is not None:
print "removing internal link on frontend id", self.frontend_id
open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w").write("external")
def isMultiType(self):
return (len(self.multi_type) > 0)
def isEmpty(self):
return self.__is_empty
# empty tuners are supported!
def isSupported(self):
return (self.frontend_id is not None) or self.__is_empty
# returns dict {<slotid>: <type>}
def getMultiTypeList(self):
return self.multi_type
slot_id = property(getSlotID)
def getFriendlyType(self):
return {
"DVB-S": "DVB-S",
"DVB-T": "DVB-T",
"DVB-C": "DVB-C",
"DVB-S2": "DVB-S2",
"DVB-T2": "DVB-T2",
"DVB-C2": "DVB-C2",
"ATSC": "ATSC",
None: _("empty")
}[self.getType()]
friendly_type = property(getFriendlyType)
def getFriendlyFullDescription(self):
nim_text = self.slot_name + ": "
if self.empty:
nim_text += _("(empty)")
elif not self.isSupported():
nim_text += self.description + " (" + _("not supported") + ")"
else:
nim_text += self.description + " (" + self.friendly_type + ")"
return nim_text
friendly_full_description = property(getFriendlyFullDescription)
config_mode = property(lambda self: config.Nims[self.slot].configMode.value)
config = property(lambda self: config.Nims[self.slot])
empty = property(lambda self: self.getType is None)
class NimManager:
def getConfiguredSats(self):
return self.sec.getConfiguredSats()
def getTransponders(self, pos):
if self.transponders.has_key(pos):
return self.transponders[pos]
else:
return []
def getTranspondersCable(self, nim):
nimConfig = config.Nims[nim]
if nimConfig.configMode.value != "nothing" and nimConfig.cable.scan_type.value == "provider":
return self.transponderscable[self.cablesList[nimConfig.cable.scan_provider.index][0]]
return [ ]
def getTranspondersTerrestrial(self, region):
return self.transpondersterrestrial[region]
def getCableDescription(self, nim):
return self.cablesList[config.Nims[nim].scan_provider.index][0]
def getCableFlags(self, nim):
return self.cablesList[config.Nims[nim].scan_provider.index][1]
def getTerrestrialDescription(self, nim):
return self.terrestrialsList[config.Nims[nim].terrestrial.index][0]
def getTerrestrialFlags(self, nim):
return self.terrestrialsList[config.Nims[nim].terrestrial.index][1]
def getSatDescription(self, pos):
return self.satellites[pos]
def sortFunc(self, x):
orbpos = x[0]
if orbpos > 1800:
return orbpos - 3600
else:
return orbpos + 1800
def readTransponders(self):
# read initial networks from file. we only read files which we are interested in,
# which means only these where a compatible tuner exists.
self.satellites = { }
self.transponders = { }
self.transponderscable = { }
self.transpondersterrestrial = { }
self.transpondersatsc = { }
db = eDVBDB.getInstance()
if self.hasNimType("DVB-S"):
print "Reading satellites.xml"
db.readSatellites(self.satList, self.satellites, self.transponders)
self.satList.sort() # sort by orbpos
#print "SATLIST", self.satList
#print "SATS", self.satellites
#print "TRANSPONDERS", self.transponders
if self.hasNimType("DVB-C"):
print "Reading cables.xml"
db.readCables(self.cablesList, self.transponderscable)
# print "CABLIST", self.cablesList
# print "TRANSPONDERS", self.transponders
if self.hasNimType("DVB-T"):
print "Reading terrestrial.xml"
db.readTerrestrials(self.terrestrialsList, self.transpondersterrestrial)
# print "TERLIST", self.terrestrialsList
# print "TRANSPONDERS", self.transpondersterrestrial
if self.hasNimType("ATSC"):
print "Reading atsc.xml"
#db.readATSC(self.atscList, self.transpondersatsc)
def enumerateNIMs(self):
# enum available NIMs. This is currently very dreambox-centric and uses the /proc/bus/nim_sockets interface.
# the result will be stored into nim_slots.
# the content of /proc/bus/nim_sockets looks like:
# NIM Socket 0:
# Type: DVB-S
# Name: BCM4501 DVB-S2 NIM (internal)
# NIM Socket 1:
# Type: DVB-S
# Name: BCM4501 DVB-S2 NIM (internal)
# NIM Socket 2:
# Type: DVB-T
# Name: Philips TU1216
# NIM Socket 3:
# Type: DVB-S
# Name: Alps BSBE1 702A
#
# Type will be either "DVB-S", "DVB-S2", "DVB-T", "DVB-C" or None.
# nim_slots is an array which has exactly one entry for each slot, even for empty ones.
self.nim_slots = [ ]
try:
nimfile = open("/proc/bus/nim_sockets")
except IOError:
return
current_slot = None
entries = {}
for line in nimfile:
if not line:
break
line = line.strip()
if line.startswith("NIM Socket"):
parts = line.split(" ")
current_slot = int(parts[2][:-1])
entries[current_slot] = {}
elif line.startswith("Type:"):
entries[current_slot]["type"] = str(line[6:])
entries[current_slot]["isempty"] = False
elif line.startswith("Name:"):
entries[current_slot]["name"] = str(line[6:])
entries[current_slot]["isempty"] = False
elif line.startswith("Has_Outputs:"):
input = str(line[len("Has_Outputs:") + 1:])
entries[current_slot]["has_outputs"] = (input == "yes")
elif line.startswith("Internally_Connectable:"):
input = int(line[len("Internally_Connectable:") + 1:])
entries[current_slot]["internally_connectable"] = input
elif line.startswith("Frontend_Device:"):
input = int(line[len("Frontend_Device:") + 1:])
entries[current_slot]["frontend_device"] = input
elif line.startswith("Mode"):
# "Mode 0: DVB-T" -> ["Mode 0", "DVB-T"]
split = line.split(": ")
if len(split) > 1 and split[1]:
# "Mode 0" -> ["Mode", "0"]
split2 = split[0].split(" ")
modes = entries[current_slot].get("multi_type", {})
modes[split2[1]] = split[1]
entries[current_slot]["multi_type"] = modes
elif line.startswith("I2C_Device:"):
input = int(line[len("I2C_Device:") + 1:])
entries[current_slot]["i2c"] = input
elif line.startswith("empty"):
entries[current_slot]["type"] = None
entries[current_slot]["n |
p(paths, "font_directory", text="")
sub.prop(paths, "texture_directory", text="")
sub.prop(paths, "render_output_directory", text="")
sub.prop(paths, "script_directory", text="")
sub.prop(paths, "sound_directory", text="")
sub.prop(paths, "temporary_directory", text="")
sub.prop(paths, "render_cache_directory", text="")
sub.prop(paths, "i18n_branches_directory", text="")
sub.prop(paths, "image_editor", text="")
subsplit = sub.split(percentage=0.3)
subsplit.prop(paths, "animation_player_preset", text="")
subsplit.prop(paths, "animation_player", text="")
col.separator()
col.separator()
colsplit = col.split(percentage=0.95)
sub = colsplit.column()
row = sub.split(percentage=0.3)
row.label(text="Auto Execution:")
row.prop(system, "use_scripts_auto_execute")
if system.use_scripts_auto_execute:
box = sub.box()
row = box.row()
row.label(text="Excluded Paths:")
row.operator("wm.userpref_autoexec_path_add", text="", icon='ZOOMIN', emboss=False)
for i, path_cmp in enumerate(userpref.autoexec_paths):
row = box.row()
row.prop(path_cmp, "path", text="")
row.prop(path_cmp, "use_glob", text="", icon='FILTER')
row.operator("wm.userpref_autoexec_path_remove", text="", icon='X', emboss=False).index = i
col = split.column()
col.label(text="Save & Load:")
col.prop(paths, "use_relative_paths")
col.prop(paths, "use_file_compression")
col.prop(paths, "use_load_ui")
col.prop(paths, "use_filter_files")
col.prop(paths, "show_hidden_files_datablocks")
col.prop(paths, "hide_recent_locations")
col.prop(paths, "hide_system_bookmarks")
col.prop(paths, "show_thumbnails")
col.separator()
col.prop(paths, "save_version")
col.prop(paths, "recent_files")
col.prop(paths, "use_save_preview_images")
col.separator()
col.label(text="Auto Save:")
col.prop(paths, "use_keep_session")
col.prop(paths, "use_auto_save_temporary_files")
sub = col.column()
sub.active = paths.use_auto_save_temporary_files
sub.prop(paths, "auto_save_time", text="Timer (mins)")
col.separator()
col.label(text="Text Editor:")
col.prop(system | , "use_tabs_as_spaces")
| colsplit = col.split(percentage=0.95)
col1 = colsplit.split(percentage=0.3)
sub = col1.column()
sub.label(text="Author:")
sub = col1.column()
sub.prop(system, "author", text="")
class USERPREF_MT_ndof_settings(Menu):
# accessed from the window key-bindings in C (only)
bl_label = "3D Mouse Settings"
def draw(self, context):
layout = self.layout
input_prefs = context.user_preferences.inputs
is_view3d = context.space_data.type == 'VIEW_3D'
layout.prop(input_prefs, "ndof_sensitivity")
layout.prop(input_prefs, "ndof_orbit_sensitivity")
layout.prop(input_prefs, "ndof_deadzone")
if is_view3d:
layout.separator()
layout.prop(input_prefs, "ndof_show_guide")
layout.separator()
layout.label(text="Orbit style")
layout.row().prop(input_prefs, "ndof_view_navigate_method", text="")
layout.row().prop(input_prefs, "ndof_view_rotate_method", text="")
layout.separator()
layout.label(text="Orbit options")
layout.prop(input_prefs, "ndof_rotx_invert_axis")
layout.prop(input_prefs, "ndof_roty_invert_axis")
layout.prop(input_prefs, "ndof_rotz_invert_axis")
# view2d use pan/zoom
layout.separator()
layout.label(text="Pan options")
layout.prop(input_prefs, "ndof_panx_invert_axis")
layout.prop(input_prefs, "ndof_pany_invert_axis")
layout.prop(input_prefs, "ndof_panz_invert_axis")
layout.prop(input_prefs, "ndof_pan_yz_swap_axis")
layout.label(text="Zoom options")
layout.prop(input_prefs, "ndof_zoom_invert")
if is_view3d:
layout.separator()
layout.label(text="Fly/Walk options")
layout.prop(input_prefs, "ndof_fly_helicopter", icon='NDOF_FLY')
layout.prop(input_prefs, "ndof_lock_horizon", icon='NDOF_DOM')
class USERPREF_MT_keyconfigs(Menu):
bl_label = "KeyPresets"
preset_subdir = "keyconfig"
preset_operator = "wm.keyconfig_activate"
def draw(self, context):
props = self.layout.operator("wm.context_set_value", text="Blender (default)")
props.data_path = "window_manager.keyconfigs.active"
props.value = "context.window_manager.keyconfigs.default"
# now draw the presets
Menu.draw_preset(self, context)
class USERPREF_PT_input(Panel):
bl_space_type = 'USER_PREFERENCES'
bl_label = "Input"
bl_region_type = 'WINDOW'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
userpref = context.user_preferences
return (userpref.active_section == 'INPUT')
@staticmethod
def draw_input_prefs(inputs, layout):
import sys
# General settings
row = layout.row()
col = row.column()
sub = col.column()
sub.label(text="Presets:")
subrow = sub.row(align=True)
subrow.menu("USERPREF_MT_interaction_presets", text=bpy.types.USERPREF_MT_interaction_presets.bl_label)
subrow.operator("wm.interaction_preset_add", text="", icon='ZOOMIN')
subrow.operator("wm.interaction_preset_add", text="", icon='ZOOMOUT').remove_active = True
sub.separator()
sub.label(text="Mouse:")
sub1 = sub.column()
sub1.active = (inputs.select_mouse == 'RIGHT')
sub1.prop(inputs, "use_mouse_emulate_3_button")
sub.prop(inputs, "use_mouse_continuous")
sub.prop(inputs, "drag_threshold")
sub.prop(inputs, "tweak_threshold")
sub.label(text="Select With:")
sub.row().prop(inputs, "select_mouse", expand=True)
sub = col.column()
sub.label(text="Double Click:")
sub.prop(inputs, "mouse_double_click_time", text="Speed")
sub.separator()
sub.prop(inputs, "use_emulate_numpad")
sub.separator()
sub.label(text="Orbit Style:")
sub.row().prop(inputs, "view_rotate_method", expand=True)
sub.separator()
sub.label(text="Zoom Style:")
sub.row().prop(inputs, "view_zoom_method", text="")
if inputs.view_zoom_method in {'DOLLY', 'CONTINUE'}:
sub.row().prop(inputs, "view_zoom_axis", expand=True)
sub.prop(inputs, "invert_mouse_zoom", text="Invert Mouse Zoom Direction")
#sub.prop(inputs, "use_mouse_mmb_paste")
#col.separator()
sub = col.column()
sub.prop(inputs, "invert_zoom_wheel", text="Invert Wheel Zoom Direction")
#sub.prop(view, "wheel_scroll_lines", text="Scroll Lines")
if sys.platform == "darwin":
sub = col.column()
sub.prop(inputs, "use_trackpad_natural", text="Natural Trackpad Direction")
col.separator()
sub = col.column()
sub.label(text="View Navigation:")
sub.row().prop(inputs, "navigation_mode", expand=True)
if inputs.navigation_mode == 'WALK':
walk = inputs.walk_navigation
sub.prop(walk, "use_mouse_reverse")
sub.prop(walk, "mouse_speed")
sub.prop(walk, "teleport_time")
sub = col.column(align=True)
sub.prop(walk, "walk_speed")
sub.prop(walk, "walk_speed_factor")
sub.separator()
sub.prop(walk, "use_gravity")
sub = col.column(align=True)
sub.active = walk.use_gravity
sub.prop(walk, "view_height")
sub.prop(walk, "jump_height")
if inputs.use_ndof:
col.separator()
col.label(text="NDOF Device:")
|
import htt | pbenchmark
from unuk.benchmarks.base import r | untests
|
_compile_destruct(compiler, code, left, exp)
else:
# print " MATCH ASSIGN"
scope = _current_scope(compiler)
idx = scope.what_next_temporary()
exp_node = nodes.create_temporary_node(nodes.node_token(node), idx)
match = nodes.create_match_node(nodes.node_token(node), exp, [nodes.list_node(
[left, nodes.list_node([exp_node])]
)])
_compile(compiler, code, match)
def _compile_node_name_lookup(compiler, code, node):
name = _get_symbol_name(compiler, node)
index, is_local = _get_variable_index(compiler, code, node, name)
name_index = _declare_symbol(compiler, name)
if is_local:
code.emit_2(LOCAL, index, name_index, info(node))
else:
code.emit_2(OUTER, index, name_index, info(node))
def _compile_TEMPORARY(compiler, code, node):
idx_node = node_first(node)
idx = api.to_i(idx_node)
if not _has_temporary(compiler, idx):
compile_error(compiler, code, node, u"Invalid temporary variable %d" % idx)
code.emit_1(TEMPORARY, idx, info(node))
def _compile_NAME(compiler, code, node):
_compile_node_name_lookup(compiler, code, node)
def _compile_SYMBOL(compiler, code, node):
name = node_first(node)
_emit_symbol_literal(compiler, code, name)
def _compile_THROW(compiler, code, node):
expr = node_first(node)
_compile(compiler, code, expr)
code.emit_0(THROW, info(node))
def _log_ast(name, ast):
ast_str = str(nodes.node_to_string(ast))
f = open('debinfo/%s.json' % name, 'w')
f.write(ast_str)
f.close()
COUNT_MODIFIES = 0
def _compile_MODIFY(compiler, code, node):
# global COUNT_MODIFIES
# _log_ast("modify_" + str(COUNT_MODIFIES), call)
# COUNT_MODIFIES += 1
call = simplify.simplify_modify(compiler, code, node)
_compile(compiler, code, call)
def _compile_DECORATOR(compiler, code, node):
call = simplify.simplify_decorator(compiler, code, node)
# _log_ast("lense_" + str(0), call)
_compile(compiler, code, call)
def _compile_LENSE(compiler, code, node):
call = simplify.simplify_lense(compiler, code, node)
# _log_ast("lense_" + str(0), call)
_compile(compiler, code, call)
def _compile_MAP(compiler, code, node):
items = node_first(node)
for c in items:
key = c[0]
value = c[1]
if is_empty_node(value):
compile_error(compiler, code, node, u"Value expected")
else:
_compile(compiler, code, value)
_compile(compiler, code, key)
# _emit_map_key(compiler, code, key)
code.emit_1(MAP, len(items), info(node))
def _compile_TUPLE(compiler, code, node):
items = node_first(node)
for c in items:
_compile(compiler, code, c)
code.emit_1(TUPLE, len(items), info(node))
def _compile_UNIT(compiler, code, node):
code.emit_1(TUPLE, 0, info(node))
def _compile_LIST(compiler, code, node):
items = node_first(node)
assert nodes.is_list_node(items), items
for c in items:
_compile(compiler, code, c)
code.emit_1(LIST, len(items), info(node))
def _compile_func_args_and_body(compiler, code, name, params, body):
funcname = _get_symbol_name_or_empty(compiler.process, name)
_enter_scope(compiler)
funccode = newcode(compiler)
if node_type(params) == NT_UNIT:
_declare_arguments(compiler, 0, False)
else:
args = node_first(params)
length = len(args)
funccode.emit_0(FARGS, codeinfo_unknown())
last_param = args[length - 1]
is_variadic = True if node_type(last_param) == NT_REST else False
_declare_arguments(compiler, length, is_variadic)
_compile_destruct_unpack_seq(compiler, funccode, params)
# if not api.isempty(funcname):
# _emit_fself(compiler, funccode, name, funcname)
_compile_2(compiler, funccode, body)
current_scope = _current_scope(compiler)
scope = current_scope.finalize(_previous_scope(compiler), None)
_exit_scope(compiler)
# print "LOCALS:", str(scope.variables.keys())
# print "REFS:", str(scope.references)
compiled_code = funccode.finalize_compilation(scope)
# print [str(c) for c in compiled_code.opcodes]
# print "-------------------------"
source = space.newfuncsource(funcname, compiled_code)
source_index = _declare_literal(compiler, source)
code.emit_1(FUNCTION, source_index, info(name))
def _compile_case_function(compiler, code, node, name, cases):
funcname = _get_symbol_name_or_empty(compiler.process, name)
_enter_scope(compiler)
funccode = newcode(compiler)
arity = nodes.pattern_length(cases[0][0])
_declare_arguments(compiler, arity, True)
# if not api.isempty(funcname):
# _emit_fself(compiler, funccode, name, funcname)
funccode.emit_0(FARGS, codeinfo_unknown())
_compile_match(compiler, funccode, | node, cases, error.Errors.FUNCTION_MATCH_ERROR)
current_scope = _current_scope(compiler)
scope = current_scope.finalize(_previous_scope(compiler), None)
_exit_scope(compiler)
compiled_code = funccode.finalize_co | mpilation(scope)
source = space.newfuncsource(funcname, compiled_code)
source_index = _declare_literal(compiler, source)
code.emit_1(FUNCTION, source_index, info(node))
def is_simple_pattern(pattern, allow_unit):
ntype = node_type(pattern)
if ntype == NT_TUPLE:
for child in node_first(pattern):
if node_type(child) != NT_NAME:
return False
return True
elif ntype == NT_UNIT and allow_unit is True:
return True
return False
def _compile_LAMBDA(compiler, code, node):
funcs = node_first(node)
assert len(funcs) == 1
func = funcs[0]
params = func[0]
body = func[1]
if not is_simple_pattern(params, True):
_compile_case_function(compiler, code, node, nodes.empty_node(), funcs)
else:
_compile_func_args_and_body(compiler, code,
nodes.empty_node(), params, body)
def _compile_FUN(compiler, code, node):
namenode = node_first(node)
funcname = _get_symbol_name_or_empty(compiler.process, namenode)
if not api.isempty(funcname):
index = _declare_local(compiler, funcname)
else:
index = None
# index = _get_function_index(compiler, funcname)
funcs = node_second(node)
# single function
if len(funcs) == 1:
func = funcs[0]
params = func[0]
body = func[1]
if not is_simple_pattern(params, True):
_compile_case_function(compiler, code, node, namenode, funcs)
else:
# print "SIMPLE FUNC", funcname
_compile_func_args_and_body(compiler, code, namenode, params, body)
else:
_compile_case_function(compiler, code, node, namenode, funcs)
if index is not None:
funcname_index = _declare_symbol(compiler, funcname)
code.emit_2(STORE_LOCAL_CONST, index, funcname_index, info(node))
def _compile_branch(compiler, code, condition, body, endif):
_compile(compiler, code, condition)
end_body = code.prealocate_label()
code.emit_1(JUMP_IF_FALSE, end_body, info(condition))
_compile(compiler, code, body)
code.emit_1(JUMP, endif, codeinfo_unknown())
code.emit_1(LABEL, end_body, codeinfo_unknown())
def _compile_WHEN_NO_ELSE(compiler, code, node):
condition = node_first(node)
body = node_second(node)
endif = code.prealocate_label()
_compile_branch(compiler, code, condition, body, endif)
_emit_void(code)
code.emit_1(LABEL, endif, codeinfo_unknown())
def _compile_CONDITION(compiler, code, node):
branches = node_first(node)
endif = code.prealocate_label()
length = len(branches)
for i in range(length - 1):
branch = branches[i]
_compile_branch(compiler, code, branch[0], branch[1], endif)
elsebranch = branches[length - 1]
_compile(compiler, code, elsebranch[1])
code.emit_1(LABEL, endif, codeinfo_unknown())
def _compile_TRY(compiler, code, node):
trynode = node_first(node)
catches = node_second(node)
finallynode = node_third(node)
|
# -*- coding: utf-8 -*-
###################################################################
# This widget will display the next appointment on your calendar in
# the qtile status bar. Appointments within the "reminder" time will be
# highlighted. Authentication credentials are stored on disk.
#
# This widget uses the khal command line calendar utility available at
# https://github.com/geier/khal
#
# This widget also requires the dateutil.parser module.
# If you get a strange "AttributeError: 'module' object has no attribute
# GoogleCalendar" error, you are probably missing a module. Check
# carefully.
#
# Thanks to the creat | or of the YahooWeather widget (dmpayton). This code
# borrows liberally from that one.
#
# Copyright (c) 2016 by David R. Andersen <k | 0rx@RXcomm.net>
# New khal output format adjustment, 2016 Christoph Lassner
# Licensed under the Gnu Public License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###################################################################
from . import base
import datetime
import dateutil.parser
import subprocess
import string
from libqtile import utils
class KhalCalendar(base.ThreadedPollText):
"""Khal calendar widget
This widget will display the next appointment on your Khal calendar in the
qtile status bar. Appointments within the "reminder" time will be
highlighted.
"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
(
'reminder_color',
'FF0000',
'color of calendar entries during reminder time'
),
('foreground', 'FFFF33', 'default foreground color'),
('remindertime', 10, 'reminder time in minutes'),
('lookahead', 7, 'days to look ahead in the calendar'),
]
def __init__(self, **config):
base.ThreadedPollText.__init__(self, **config)
self.add_defaults(KhalCalendar.defaults)
self.text = 'Calendar not initialized.'
self.default_foreground = self.foreground
def poll(self):
# get today and tomorrow
now = datetime.datetime.now()
tomorrow = now + datetime.timedelta(days=1)
# get reminder time in datetime format
remtime = datetime.timedelta(minutes=self.remindertime)
# parse khal output for the next seven days
# and get the next event
args = ['khal', 'agenda', '--days', str(self.lookahead)]
cal = subprocess.Popen(args, stdout=subprocess.PIPE)
output = cal.communicate()[0].decode('utf-8')
output = output.split('\n')
if len(output) < 2:
return 'No appointments scheduled'
date = 'unknown'
endtime = None
for i in range(len(output)): # pylint: disable=consider-using-enumerate
if output[i].strip() == '':
continue
try:
starttime = dateutil.parser.parse(date + ' ' + output[i][:5],
ignoretz=True)
endtime = dateutil.parser.parse(date + ' ' + output[i][6:11],
ignoretz=True)
except ValueError:
try:
if output[i] == 'Today:':
date = str(now.month) + '/' + str(now.day) + '/' + \
str(now.year)
elif output[i] == 'Tomorrow:':
date = str(tomorrow.month) + '/' + str(tomorrow.day) + \
'/' + str(tomorrow.year)
else:
dateutil.parser.parse(output[i])
date = output[i]
continue
except ValueError:
pass # no date.
if endtime is not None and endtime > now:
data = date.replace(':', '') + ' ' + output[i]
break
else:
data = 'No appointments in next ' + \
str(self.lookahead) + ' days'
# get rid of any garbage in appointment added by khal
data = ''.join(filter(lambda x: x in string.printable, data))
# colorize the event if it is within reminder time
if (starttime - remtime <= now) and (endtime > now):
self.foreground = utils.hex(self.reminder_color)
else:
self.foreground = self.default_foreground
return data
|
from unittest import mock
from bgmi.downloader.aria2_rpc import Aria2DownloadRPC
_token = "token:2333"
@mock.patch("bgmi.config.ARIA2_RPC_URL", "https://u | uu")
@mock.patch("bgmi.config.ARIA2_RPC_TOKEN", "token:t")
def test_use_config():
with mock.patch("xmlrpc.client.ServerProxy") as m1:
m1.return_value.aria2.getVersion.return_value = {"version": "1.19.1"}
Aria2DownloadRPC()
m1.assert_has_calls(
[
mock.call("https://uuu"),
mock.call("https://uuu"),
mock.call().aria | 2.getVersion("token:t"),
]
)
|
ng: iso-8859-1 -*-
"""
MoinMoin - Text CAPTCHAs
This is just asking some (admin configured) questions and
checking if the answer is as expected. It is up to the wiki
admin to setup questions that a bot can not easily answer, but
humans can. It is recommended to setup SITE SPECIFIC questions
and not to share the questions with other sites (if everyone
asks the same questions / expects the same answers, spammers
could adapt to that).
TODO:
* roundtrip the question in some other way:
* make sure a q/a pair in the POST is for the q in the GET before
* make some nice CSS
* make similar changes to GUI editor
@copyright: 2007 by MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
import re
import random
from time import time
from MoinMoin import log
logging = log.getLogger(__name__)
from MoinMoin import wikiutil
from | werkzeug.security import safe_str_cmp as safe_str_equal
from MoinMoin.support.python_compatibility | import hmac_new
SHA1_LEN = 40 # length of hexdigest
TIMESTAMP_LEN = 10 # length of timestamp
class TextCha(object):
""" Text CAPTCHA support """
def __init__(self, request, question=None):
""" Initialize the TextCha.
@param request: the request object
@param question: see _init_qa()
"""
self.request = request
self.user_info = request.user.valid and request.user.name or request.remote_addr
self.textchas = self._get_textchas()
if self.textchas:
self.secret = request.cfg.secrets["security/textcha"]
self.expiry_time = request.cfg.textchas_expiry_time
self._init_qa(question)
def _get_textchas(self):
""" get textchas from the wiki config for the user's language (or default_language or en) """
request = self.request
groups = request.groups
cfg = request.cfg
user = request.user
disabled_group = cfg.textchas_disabled_group
if disabled_group and user.name and user.name in groups.get(disabled_group, []):
return None
textchas = cfg.textchas
if textchas:
lang = user.language or request.lang
logging.debug(u"TextCha: user.language == '%s'." % lang)
if lang not in textchas:
lang = cfg.language_default
logging.debug(u"TextCha: fallback to language_default == '%s'." % lang)
if lang not in textchas:
logging.error(u"TextCha: The textchas do not have content for language_default == '%s'! Falling back to English." % lang)
lang = 'en'
if lang not in textchas:
logging.error(u"TextCha: The textchas do not have content for 'en', auto-disabling textchas!")
cfg.textchas = None
lang = None
else:
lang = None
if lang is None:
return None
else:
logging.debug(u"TextCha: using lang = '%s'" % lang)
return textchas[lang]
def _compute_signature(self, question, timestamp):
signature = u"%s%d" % (question, timestamp)
return hmac_new(self.secret, signature.encode('utf-8')).hexdigest()
def _init_qa(self, question=None):
""" Initialize the question / answer.
@param question: If given, the given question will be used.
If None, a new question will be generated.
"""
if self.is_enabled():
if question is None:
self.question = random.choice(self.textchas.keys())
else:
self.question = question
try:
self.answer_regex = self.textchas[self.question]
self.answer_re = re.compile(self.answer_regex, re.U|re.I)
except KeyError:
# this question does not exist, thus there is no answer
self.answer_regex = ur"[Never match for cheaters]"
self.answer_re = None
logging.warning(u"TextCha: Non-existing question '%s'. User '%s' trying to cheat?" % (
self.question, self.user_info))
except re.error:
logging.error(u"TextCha: Invalid regex in answer for question '%s'" % self.question)
self._init_qa()
def is_enabled(self):
""" check if textchas are enabled.
They can be disabled for all languages if you use textchas = None or = {},
also they can be disabled for some specific language, like:
textchas = {
'en': {
'some question': 'some answer',
# ...
},
'de': {}, # having no questions for 'de' means disabling textchas for 'de'
# ...
}
"""
return not not self.textchas # we don't want to return the dict
def check_answer(self, given_answer, timestamp, signature):
""" check if the given answer to the question is correct and within the correct timeframe"""
if self.is_enabled():
if self.answer_re is not None:
success = self.answer_re.match(given_answer.strip()) is not None
else:
# someone trying to cheat!?
success = False
if not timestamp or timestamp + self.expiry_time < time():
success = False
try:
if not safe_str_equal(self._compute_signature(self.question, timestamp), signature):
success = False
except TypeError:
success = False
success_status = success and u"success" or u"failure"
logging.info(u"TextCha: %s (u='%s', a='%s', re='%s', q='%s')" % (
success_status,
self.user_info,
given_answer,
self.answer_regex,
self.question,
))
return success
else:
return True
def _make_form_values(self, question, given_answer):
timestamp = time()
question_form = "%s %d%s" % (
wikiutil.escape(question, True),
timestamp,
self._compute_signature(question, timestamp)
)
given_answer_form = wikiutil.escape(given_answer, True)
return question_form, given_answer_form
def _extract_form_values(self, form=None):
if form is None:
form = self.request.form
question = form.get('textcha-question')
signature = None
timestamp = None
if question:
# the signature is the last SHA1_LEN bytes of the question
signature = question[-SHA1_LEN:]
# operate on the remainder
question = question[:-SHA1_LEN]
try:
# the timestamp is the next TIMESTAMP_LEN bytes
timestamp = int(question[-TIMESTAMP_LEN:])
except ValueError:
pass
# there is a space between the timestamp and the question, so take away 1
question = question[:-TIMESTAMP_LEN - 1]
given_answer = form.get('textcha-answer', u'')
return question, given_answer, timestamp, signature
def render(self, form=None):
""" Checks if textchas are enabled and returns HTML for one,
or an empty string if they are not enabled.
@return: unicode result html
"""
if self.is_enabled():
question, given_answer, timestamp, signature = self._extract_form_values(form)
if question is None:
question = self.question
question_form, given_answer_form = self._make_form_values(question, given_answer)
result = u"""
<d |
# sqlalchemy/__init__.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import util as _util # noqa
from .inspection import inspect # noqa
from .schema import BLANK_SCHEMA # noqa
from .schema import CheckConstraint # noqa
from .schema import Column # noqa
from .schema import ColumnDefault # noqa
from .schema import Constraint # noqa
from .schema import DDL # noqa
from .schema import DefaultClause # noqa
from .schema import FetchedValue # noqa
from .schema import ForeignKey # noqa
from .schema import ForeignKeyConstraint # noqa
from .schema import Index # noqa
from .schema import MetaData # noqa
from .schema import PassiveDefault # noqa
from .schema import PrimaryKeyConstraint # noqa
from .schema import Sequence # noqa
from .schema import Table # noqa
from .schema import ThreadLocalMetaData # noqa
from .schema import UniqueConstraint # noqa
from .sql import alias # noqa
from .sql import all_ # noqa
from .sql import and_ # noqa
from .sql import any_ # noqa
from .sql import asc # noqa
from .sql import between # noqa
from .sql import bindparam # noqa
from .sql import case # noqa
from .sql import cast # noqa
from .sql import collate # noqa
from .sql import column # noqa
from .sql import delete # noqa
from .sql import desc # noqa
from .sql import distinct # noqa
from .sql import except_ # noqa
from .sql import except_all # noqa
from .sql import exists # noqa
from .sql import extract # noqa
from .sql import false # noqa
from .sql import func # noqa
from .sql import funcfilter # noqa
from .sql import insert # noqa
from .sql import intersect # noqa
from .sql import intersect_all # noqa
from .sql import join # noqa
from .sql import lateral # noqa
from .sql import literal # noqa
from .sql import literal_column # noqa
from .sql import modifier # noqa
from .sql import not_ # noqa
from .sql import null # noqa
from .sql import nullsfirst # noqa
from .sql import nullslast # noqa
from .sql import or_ # noqa
from .sql import outerjoin # noqa
from .sql import outparam # noqa
from .sql import over # noqa
from .sql import select # noqa
from .sql import subquery # noqa
from .sql import table # noqa
from .sql import tablesample # noqa
from .sql import text # noqa
from .sql import true # noqa
from .sql import tuple_ # noqa
from .sql import type_coerce # noqa
from .sql import union # noqa
from .sql import union_all # noqa
from .sql import update # noqa
from .sql import within_group # noqa
from .types import ARRAY # noqa
from .types import BIGINT # noqa
from | .types import BigInteger # noqa
from .types import BINARY # noqa
from .types import Binary # noqa
from .types import BLOB # noqa
from .types import BOOLEAN # noqa
from .types import Boolean # noqa
from .types import CHAR # noqa
from .types import CLOB # noqa
from .types import DATE # noqa
from .types import Date # noqa
from .types import DATETIME # noqa
from .types import DateTime # noqa
from .types import DECIMAL # noqa
from .types import Enum # noq | a
from .types import FLOAT # noqa
from .types import Float # noqa
from .types import INT # noqa
from .types import INTEGER # noqa
from .types import Integer # noqa
from .types import Interval # noqa
from .types import JSON # noqa
from .types import LargeBinary # noqa
from .types import NCHAR # noqa
from .types import NUMERIC # noqa
from .types import Numeric # noqa
from .types import NVARCHAR # noqa
from .types import PickleType # noqa
from .types import REAL # noqa
from .types import SMALLINT # noqa
from .types import SmallInteger # noqa
from .types import String # noqa
from .types import TEXT # noqa
from .types import Text # noqa
from .types import TIME # noqa
from .types import Time # noqa
from .types import TIMESTAMP # noqa
from .types import TypeDecorator # noqa
from .types import Unicode # noqa
from .types import UnicodeText # noqa
from .types import VARBINARY # noqa
from .types import VARCHAR # noqa
from .engine import create_engine # noqa nosort
from .engine import engine_from_config # noqa nosort
__version__ = "1.3.3"
def __go(lcls):
global __all__
from . import events # noqa
from . import util as _sa_util
import inspect as _inspect
__all__ = sorted(
name
for name, obj in lcls.items()
if not (name.startswith("_") or _inspect.ismodule(obj))
)
_sa_util.dependencies.resolve_all("sqlalchemy")
__go(locals())
|
from django.contrib im | port admin
# Register your models | here.
from .models import Engine
admin.site.register(Engine) |
from __future__ import absolute_import
import itertools
from django.contrib import messages
from django.core.context_processors import csrf
from django.db import transaction
from django.http import HttpResponseRedirect
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.utils.decorators import method_decorator
from sudo.decorators import sudo_required
from sentry.models import (
Project, ProjectStatus
)
from sentry.plugins import plugins
from sentry.web.forms.accounts import (
ProjectEmailOptionsForm, NotificationSettingsForm,
NotificationReportSettingsForm
)
from sentry.web.decorators import login_required
from sentry.web.frontend.base import BaseView
from sentry.web.helpers import render_to_response
from sentry.utils.auth import get_auth_providers
from sentry.utils.safe import safe_execute
class AccountNotificationView(BaseView):
notification_settings_form = NotificationSettingsForm
@method_decorator(csrf_protect)
@method_decorator(never_cache)
@method_decorator(login_required)
@method_decorator(sudo_required)
@method_decorator(transaction.atomic)
def handle(self, request):
settings_form = self.notification_settings_form(
request.user, request.POST or None)
reports_form = NotificationReportSettingsForm(
request.user, request.POST or None,
prefix='reports')
project_list = list(Project.objects.filter(
team__organizationmemberteam__organizationmember__user=request.user,
team__organizationmemberteam__is_active=True,
status=ProjectStatus.VISIBLE,
).distinct())
project_forms = [
(project, ProjectEmailOptionsForm(
project, request.user,
request.POST or None,
| prefix='project-%s' % (project.id,)
))
for project in sorted(project_list, key=lambda x: (
x.organization.name, x.name))
]
ext_forms = []
for plugin in plugins.all():
for form in safe_execute(plugin.get_notification_forms, _with_transaction=False) or ():
| form = safe_execute(form, plugin, request.user, request.POST or None, prefix=plugin.slug,
_with_transaction=False)
if not form:
continue
ext_forms.append(form)
if request.POST:
all_forms = list(itertools.chain(
[settings_form, reports_form],
ext_forms,
(f for _, f in project_forms)
))
if all(f.is_valid() for f in all_forms):
for form in all_forms:
form.save()
messages.add_message(request, messages.SUCCESS, 'Your settings were saved.')
return HttpResponseRedirect(request.path)
context = csrf(request)
context.update({
'settings_form': settings_form,
'project_forms': project_forms,
'reports_form': reports_form,
'ext_forms': ext_forms,
'page': 'notifications',
'AUTH_PROVIDERS': get_auth_providers(),
})
return render_to_response('sentry/account/notifications.html', context, request)
|
impor | t tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.bui | ld_model( ['Anscombe'] , ['MovingMedian'] , ['Seasonal_Minute'] , ['LSTM'] ); |
, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Tests for the hrpt reader."""
import os
import unittest
from contextlib import suppress
from tempfile import NamedTemporaryFile
from unittest import mock
import numpy as np
import xarray as xr
from satpy.readers.hrpt import HRPTFile, dtype
from satpy.tests.reader_tests.test_avhrr_l1b_gaclac import PygacPatcher
from satpy.tests.utils import make_dataid
NUMBER_OF_SCANS = 10
SWATH_WIDTH = 2048
class TestHRPTWithFile(unittest.TestCase):
"" | "Test base class | with writing a fake file."""
def setUp(self) -> None:
"""Set up the test case."""
test_data = np.ones(NUMBER_OF_SCANS, dtype=dtype)
# Channel 3a
test_data["id"]["id"][:5] = 891
# Channel 3b
test_data["id"]["id"][5:] = 890
with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file:
self.filename = hrpt_file.name
test_data.tofile(hrpt_file)
def tearDown(self) -> None:
"""Tear down the test case."""
with suppress(OSError):
os.remove(self.filename)
def _get_dataset(self, dataset_id):
fh = HRPTFile(self.filename, {}, {})
return fh.get_dataset(dataset_id, {})
class TestHRPTReading(TestHRPTWithFile):
"""Test case for reading hrpt data."""
def test_reading(self):
"""Test that data is read."""
fh = HRPTFile(self.filename, {}, {})
assert fh._data is not None
class TestHRPTGetUncalibratedData(TestHRPTWithFile):
"""Test case for reading uncalibrated hrpt data."""
def _get_channel_1_counts(self):
return self._get_dataset(make_dataid(name='1', calibration='counts'))
def test_get_dataset_returns_a_dataarray(self):
"""Test that get_dataset returns a dataarray."""
result = self._get_channel_1_counts()
assert isinstance(result, xr.DataArray)
def test_platform_name(self):
"""Test that the platform name is correct."""
result = self._get_channel_1_counts()
assert result.attrs['platform_name'] == 'NOAA 19'
def test_no_calibration_values_are_1(self):
"""Test that the values of non-calibrated data is 1."""
result = self._get_channel_1_counts()
assert (result.values == 1).all()
def fake_calibrate_solar(data, *args, **kwargs):
"""Fake calibration."""
del args, kwargs
return data * 25.43 + 3
def fake_calibrate_thermal(data, *args, **kwargs):
"""Fake calibration."""
del args, kwargs
return data * 35.43 + 3
class CalibratorPatcher(PygacPatcher):
"""Patch pygac."""
def setUp(self) -> None:
"""Patch pygac's calibration."""
super().setUp()
# Import things to patch here to make them patchable. Otherwise another function
# might import it first which would prevent a successful patch.
from pygac.calibration import Calibrator, calibrate_solar, calibrate_thermal
self.Calibrator = Calibrator
self.calibrate_thermal = calibrate_thermal
self.calibrate_thermal.side_effect = fake_calibrate_thermal
self.calibrate_solar = calibrate_solar
self.calibrate_solar.side_effect = fake_calibrate_solar
class TestHRPTWithPatchedCalibratorAndFile(CalibratorPatcher, TestHRPTWithFile):
"""Test case with patched calibration routines and a synthetic file."""
def setUp(self) -> None:
"""Set up the test case."""
CalibratorPatcher.setUp(self)
TestHRPTWithFile.setUp(self)
def tearDown(self):
"""Tear down the test case."""
CalibratorPatcher.tearDown(self)
TestHRPTWithFile.tearDown(self)
class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated reflectances from hrpt data."""
def _get_channel_1_reflectance(self):
"""Get the channel 1 reflectance."""
dataset_id = make_dataid(name='1', calibration='reflectance')
return self._get_dataset(dataset_id)
def test_calibrated_reflectances_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_1_reflectance()
np.testing.assert_allclose(result.values, 28.43)
class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_4_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='4', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def test_calibrated_bt_values(self):
"""Test the calibrated reflectance values."""
result = self._get_channel_4_bt()
np.testing.assert_allclose(result.values, 38.43)
class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile):
"""Test case for reading calibrated brightness temperature from hrpt data."""
def _get_channel_3b_bt(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3b', calibration='brightness_temperature')
return self._get_dataset(dataset_id)
def _get_channel_3a_reflectance(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='reflectance')
return self._get_dataset(dataset_id)
def _get_channel_3a_counts(self):
"""Get the channel 4 bt."""
dataset_id = make_dataid(name='3a', calibration='counts')
return self._get_dataset(dataset_id)
def test_channel_3b_masking(self):
"""Test that channel 3b is split correctly."""
result = self._get_channel_3b_bt()
assert np.isnan(result.values[:5]).all()
assert np.isfinite(result.values[5:]).all()
def test_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_reflectance()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
def test_uncalibrated_channel_3a_masking(self):
"""Test that channel 3a is split correctly."""
result = self._get_channel_3a_counts()
assert np.isnan(result.values[5:]).all()
assert np.isfinite(result.values[:5]).all()
class TestHRPTNavigation(TestHRPTWithFile):
"""Test case for computing HRPT navigation."""
def setUp(self) -> None:
"""Set up the test case."""
super().setUp()
self.fake_lons = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH))
self.fake_lats = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) * 2
def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt):
"""Prepare the mocks."""
Orbital.return_value.get_position.return_value = mock.MagicMock(), mock.MagicMock()
get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats
@mock.patch.multiple('satpy.readers.hrpt',
Orbital=mock.DEFAULT,
compute_pixels=mock.DEFAULT,
get_lonlatalt=mock.DEFAULT,
SatelliteInterpolator=mock.DEFAULT)
def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator):
"""Check that latitudes are returned properly."""
self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt)
dataset_id = make_dataid(name='longitude')
result = self._get_dataset(dataset_id)
assert (result == self.fake_lons).all()
@mock.patch.multiple('satpy.readers.hrpt',
Orbital=mock.DEFAULT,
compute_pixels=mock.DEFAULT,
|
from __future__ import with_statement
import os
from setuptools import setup
# Do not update the version manually - it is managed by `bumpversion`.
version = '2.0.1rc'
setup(
name='pydocstyle',
v | ersion=version,
description="Python docstring style checker",
long_description=open('README.rst').read(), |
license='MIT',
author='Amir Rachum',
author_email='amir@rachum.com',
url='https://github.com/PyCQA/pydocstyle/',
classifiers=[
'Intended Audience :: Developers',
'Environment :: Console',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
keywords='pydocstyle, PEP 257, pep257, PEP 8, pep8, docstrings',
packages=('pydocstyle',),
package_dir={'': 'src'},
package_data={'pydocstyle': ['data/*.txt']},
install_requires=[
'snowballstemmer',
'six',
],
entry_points={
'console_scripts': [
'pydocstyle = pydocstyle.cli:main',
],
},
)
|
fr | om funtests import transport
class test_redis(transport.TransportCase):
transport = "redis"
prefix = "redis"
def after_connect(self, connection):
client = connection.channel().client
client.info()
def test_cant_connect_raises_connection_error(self):
conn = self.get_connection(port=65534)
self.assertRaises(conn.connection_errors, conn.conne | ct)
|
from copy import deepcopy, copy
from django.contrib import admin
from django.contrib.admin.views.main import ChangeList
from django.contrib.contenttypes.admin import GenericTabularInline, GenericStackedInline
from django.forms import ModelForm, NumberInput
from django.db import models
class SortableModelAdminBase(object):
"""
Base class for SortableTabularInline and SortableModelAdmin
"""
sortable = 'order'
class Media:
js = ('suit/js/suit.sortables.js',)
class SortableListForm(ModelForm):
"""
Just Meta holder class
"""
class Meta:
widgets = {
'order': NumberInput(
attrs={'class': 'hidden-xs-up suit-sortable'})
}
class SortableChangeList(ChangeList):
"""
Class that forces ordering by sortable param only
"""
def get_ordering(self, request, queryset):
if self.model_admin.sortable_is_enabled():
return [self.model_admin.sortable, '-' + self.model._meta.pk.name]
return super(SortableChangeList, self).get_ordering(request, queryset)
class SortableTabularInlineBase(SortableModelAdminBase):
"""
Sortable tabular inline
"""
def __init__(self, *args, **kwargs):
super(SortableTabularInlineBase, self).__init__(*args, **kwargs)
self.ordering = (self.sortable,)
self.fields = self.fields or []
if self.fields and self.sortable not in self.fields:
self.fields = list(self.fields) + [self.sortable]
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == self.sortable:
kwargs['widget'] = SortableListForm.Meta.widgets['order']
return super(SortableTabularInlineBase, self).formfield_for_dbfield(
db_field, **kwargs)
class SortableTabularInline(SortableTabularInlineBase, admin.TabularInline):
pass
class SortableGenericTabularInline(SortableTabularInlineBase,
GenericTabularInline):
pass
class SortableStackedInlineBase(SortableModelAdminBase):
"""
Sortable stacked inline
"""
def __init__(self, *args, **kwargs):
super(SortableStackedInlineBase, self).__init__(*args, **kwargs)
self.ordering = (self.sortable,)
def get_fieldsets(self, *args, **kwargs):
"""
Iterate all fieldsets and make sure sortable is in the first fieldset
Remove sortable from every other fieldset, if by some reason someone
has added it
"""
fieldsets = super(SortableStackedInlineBase, self).get_fieldsets(*args, **kwargs)
sortable_added = False
for fieldset in fieldsets:
for line in fieldset:
if not line or not isinstance(line, dict):
continue
fields = line.get('fields')
if self.sortable in fields:
fields.remove(self.sortable)
# Add sortable field always as first
if not sortable_added:
fields.insert(0, self.sortable)
sortable_added = True
break
return fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == self.sortable:
kwargs['widget'] = deepcopy(SortableListForm.Meta.widgets['order'])
kwargs['widget'].attrs['class'] += ' suit-sortable-stacked'
kwargs['widget'].attrs['rowclass'] = ' suit-sortable-stacked-row'
return super(SortableStackedInlineBase, self).formfield_for_dbfield(db_field, **kwargs)
class SortableStackedInline(SortableStackedInlineBase, admin.StackedInline):
pass
class SortableGenericStackedInline(SortableStackedInlineBase,
GenericStackedInline):
pass
class SortableModelAdmin(SortableModelAdminBase, admin.ModelAdmin):
"""
Sortable change list
"""
def __init__(self, *args, **kwargs):
super(SortableModelAdmin, self).__init__(*args, **kwargs)
# Keep originals for restore
self._original_ordering = copy(self.ordering)
self._original_list_display = copy(self.list_display)
self._original_list_editable = copy(self.list_editable)
self._original_exclude = copy(self.exclude)
self._original_list_per_page = self.list_per_page
self.enable_sortable()
def merge_form_meta(self, form):
"""
Prepare Meta class with order field widget
"""
if not getattr(form, 'Meta', None):
form.Meta = SortableListForm.Meta
if not getattr(form.Meta, 'widgets', None):
form.Meta.widgets = {}
form.Meta.widgets[self.sortable] = SortableListForm.Meta.widgets[
'order']
def get_changelist_form(self, request, **kwargs):
form = super(SortableModelAdmin, self).get_changelist_form(request,
**kwargs)
self.merge_form_meta(form)
return form
def get_changelist(self, request, **kwargs):
return SortableChangeList
def enable_sortable(self):
self.list_per_page = 500
self.ordering = (self.sortable,)
if self.list_display and self.sortable not in self.list_display:
self.list_display = list(self.list_display) + [self.sortable]
self.list_editable = self.list_editable or []
if self.sortable not in self.list_editable:
self.list_editable = list(self.list_editable) + [self.sortable]
self.exclude = self.exclude or []
if self.sortable not in self.exclude:
self.exclude = list(self.exclude) + [self.sortable]
def disable_sortable(self):
if not self.sortable_is_enabled():
return
self.ordering = self._original_ordering
self.list_display = self._or | iginal_list_display
self.list_editable = self._original_list_editable
self.exclude = self._original_exclude
self.list_per_page = self._ori | ginal_list_per_page
def sortable_is_enabled(self):
return self.list_display and self.sortable in self.list_display
def save_model(self, request, obj, form, change):
if not obj.pk:
max_order = obj.__class__.objects.aggregate(
models.Max(self.sortable))
try:
next_order = max_order['%s__max' % self.sortable] + 1
except TypeError:
next_order = 1
setattr(obj, self.sortable, next_order)
super(SortableModelAdmin, self).save_model(request, obj, form, change)
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the | "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distribute | d on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""If possible, exports all symbols with RTLD_GLOBAL.
Note that this file is only imported by pywrap_tensorflow.py if this is a static
build (meaning there is no explicit framework cc_binary shared object dependency
of _pywrap_tensorflow_internal.so). For regular (non-static) builds, RTLD_GLOBAL
is not necessary, since the dynamic dependencies of custom/contrib ops are
explicit.
"""
import ctypes
import sys
# On UNIX-based platforms, pywrap_tensorflow is a SWIG-generated python library
# that dynamically loads _pywrap_tensorflow.so. The default mode for loading
# keeps all the symbol private and not visible to other libraries that may be
# loaded. Setting the mode to RTLD_GLOBAL to make the symbols visible, so that
# custom op libraries imported using `tf.load_op_library()` can access symbols
# defined in _pywrap_tensorflow.so.
_use_rtld_global = (hasattr(sys, 'getdlopenflags')
and hasattr(sys, 'setdlopenflags'))
if _use_rtld_global:
_default_dlopen_flags = sys.getdlopenflags()
def set_dlopen_flags():
if _use_rtld_global:
sys.setdlopenflags(_default_dlopen_flags | ctypes.RTLD_GLOBAL)
def reset_dlopen_flags():
if _use_rtld_global:
sys.setdlopenflags(_default_dlopen_flags)
|
from cyclone.web import asynchronous
from sockjs.cyclone import proto
from sockjs.cyclone.transports import streamingbase
# HTMLFILE template
HTMLFILE_HEAD = r'''
<!doctype html>
<html><head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
</head><body><h2>Don't panic!</h2>
<script>
document.domain = document.domain;
var c = parent.%s;
c.start();
function p(d) {c.message(d);};
window.onload = function() {c.stop();};
</script>
'''.strip()
HTMLFILE_HEAD += ' ' * (1024 - len(HTMLFILE_HEAD) + 14)
HTMLFILE_HEAD += '\r\n\r\n'
class HtmlFileTransport(streamingbase.StreamingTransportBase):
name = 'htmlfile'
def initialize(self, server):
super(HtmlFileTransport, self).initialize(server)
@asynchronous
def get(self, session_id):
# Start response
self.preflight()
self.handle_session_cookie()
self.disable_cache()
self.set_header('Content-Type', 'text/html; charset=UTF-8')
# Grab callback parameter
callback = self.get_argument('c', None)
if not callback:
self.write('"callback" parameter required')
self.set_status(500)
self.finish()
return
self.write(HTMLFILE_HEAD % callback)
self.flush()
# Now try to attach to session
if not self._attach_session(session_id):
self.finish()
return
# Flush any pending messages
if self.session:
self.session.flush()
def connectionLost(self, reason):
self.session.delayed_close()
self._detach()
def send_pack(self, message):
# TODO: Just do escaping
msg = '<script>\np(%s);\n</script>\r\n' % proto.json_encode(message)
self.w | rite(msg) |
self.flush()
# Close connection based on amount of data transferred
if self.should_finish(len(msg)):
self._detach()
self.safe_finish()
|
l_membership:
group: read_only
target_roles:
- alice
- bob
state: present
# you can also use target_roles: alice,bob,etc to pass the role list
- name: Revoke role read_only and exec_func from bob. Ignore if roles don't exist
postgresql_membership:
groups:
- read_only
- exec_func
target_role: bob
fail_on_role: no
state: absent
'''
RETURN = r'''
queries:
description: List of executed queries.
returned: always
type: str
sample: [ "GRANT \"user_ro\" TO \"alice\"" ]
granted:
description: Dict of granted groups and roles.
returned: if I(state=present)
type: dict
sample: { "ro_group": [ "alice", "bob" ] }
revoked:
description: Dict of revoked groups and roles.
returned: if I(state=absent)
type: dict
sample: { "ro_group": [ "alice", "bob" ] }
state:
description: Membership state that tried to be set.
returned: always
type: str
sample: "present"
'''
try:
import psycopg2
HAS_PSYCOPG2 = True
except ImportError:
HAS_PSYCOPG2 = False
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.database import SQLParseError, pg_quote_identifier
from ansible.module_utils.postgres import connect_to_db, postgres_common_argument_spec
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems
class PgMembership(object):
def __init__(self, module, cursor, groups, target_roles, fail_on_role):
self.module = module
self.cursor = cursor
self.target_roles = [r.strip() for r in target_roles]
self.groups = [r.strip() for r in groups]
self.executed_queries = []
self.granted = {}
self.revoked = {}
self.fail_on_role = fail_on_role
self.non_existent_roles = []
self.changed = False
self.__check_roles_exist()
def grant(self):
for group in self.groups:
self.granted[group] = []
for role in self.target_roles:
# If role is in a group now, pass:
if self.__check_membership(group, role):
continue
query = "GRANT %s TO %s" % ((pg_quote_identifier(group, 'role'),
(pg_quote_identifier(role, 'role'))))
self.changed = self.__exec_sql(query, ddl=True)
if self.changed:
self.granted[group].append(role)
return self.changed
def revoke(self):
for group in self.groups:
self.revoked[group] = []
for role in self.target_roles:
# If role is not in a group now, pass:
if not self.__check_membership(group, role):
continue
query = "REVOKE %s FROM %s" % ((pg_quote_identifier(group, 'role'),
(pg_quote_identifier(role, 'role'))))
self.changed = self.__exec_sql(query, ddl=True)
if self.changed:
self.revoked[group].append(role)
return self.changed
def __check_membership(self, src_role, dst_role):
query = ("SELECT ARRAY(SELECT b.rolname FROM "
"pg_catalog.pg_auth_members m "
"JOIN pg_catalog.pg_roles b ON (m.roleid = b.oid) "
"WHERE m.member = r.oid) "
"FROM pg_catalog.pg_roles r "
"WHERE r.rolname = '%s'" % dst_role)
res = self.__exec_sql(query, add_to_executed=False)
membership = []
if res:
membership = res[0][0]
print('MEMBERSHIP ', membership)
if not membership:
return False
if src_role in membership:
return True
return False
def __check_roles_exist(self):
for group in self.groups:
if not self.__role_exists(group):
if self.fail_on_role:
self.module.fail_json(msg="Role %s does not exist" % group)
else:
self.module.warn("Role %s does not exist, pass" % group)
self.non_existent_roles.append(group)
for role in self.target_roles:
if not self.__role_exists(role):
if self.fail_on_role:
self.module.fail_json(msg="Role %s does not exist" % role)
else:
self.module.warn("Role %s does not exist, pass" % role)
if role not in self.groups:
self.non_existent_roles.append(role)
else:
if self.fail_on_role:
self.module.exit_json(msg="Role role '%s' is a member of role '%s'" % (role, role))
else:
self.module.warn("Role role '%s' is a member of role '%s', pass" % (role, role))
# Update role lists, excluding non existent roles:
self.groups = [g for g in self.groups if g not in self.non_existent_roles]
self.target_roles = [r for r in self.target_roles if r not in self.non_existent_roles]
def __role_exists(self, role):
return self.__exec_sql("SELECT 1 FROM pg_roles WHERE rolname = '%s'" % role, add_to_executed=False)
def __exec_sql(self, query, ddl=False, add_to_executed=True):
try:
self.cursor.execute(query)
if add_to_executed:
self.executed_queries.append(query)
if not ddl:
res = self.cursor.fetchall()
return res
return True
except SQLParseError as e:
self.module.fail_json(msg=to_native(e))
except psycopg2.ProgrammingError as e:
self.module.fail_json(msg="Cannot execute SQL '%s': %s" % (query, to_native(e)))
return False
# ===========================================
# Module execution.
#
def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
groups=dict(type='list', aliases=['group', 'source_role', 'source_roles']),
target_roles=dict(type='list', aliases=['target_role', 'user', 'users']),
fail_on_role=dict(type='bool', default=True),
state=dict(type='str', default='present', choices=['absent', 'present']),
db=dict(type='str', aliases=['login_db']),
session_role=di | ct(type='str'),
)
| module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
if not HAS_PSYCOPG2:
module.fail_json(msg=missing_required_lib('psycopg2'))
groups = module.params['groups']
target_roles = module.params['target_roles']
fail_on_role = module.params['fail_on_role']
state = module.params['state']
sslrootcert = module.params['ca_cert']
session_role = module.params['session_role']
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host": "host",
"login_user": "user",
"login_password": "password",
"port": "port",
"db": "database",
"ssl_mode": "sslmode",
"ca_cert": "sslrootcert"
}
kw = dict((params_map[k], v) for (k, v) in iteritems(module.params)
if k in params_map and v != '' and v is not None)
# If a login_unix_socket is specified, incorporate it here.
is_localhost = "host" not in kw or kw["host"] is None or kw["host"] == "localhost"
if is_localhost and module.params["login_unix_socket"] != "":
kw["host"] = module.params["login_unix_socket"]
if psycopg2.__version__ < '2.4.3' and sslrootcert:
module.fail_json(msg='psycopg2 must be at least 2.4.3 '
'in order to user the ssl_rootcert parameter')
db_connection = connect_to_db(module, kw, autocommit=False)
cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
# Switch role, if specified:
if session_role:
try:
cursor.execute('SET ROLE %s' % session_role)
ex |
import os
from setuptools import setup
from setuptools import find_packages
version_file = 'VERSION.txt'
version = open(version_file).read().strip()
description_file = 'README.txt'
description = open(description_file).read().split('\n\n')[0].strip()
description = description.replace('\n', ' ')
l | ong_ | description_file = os.path.join('doc', 'README.txt')
long_description = open(long_description_file).read().strip()
setup(
name='ximenez',
version=version,
packages=find_packages('src'),
namespace_packages=(),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': ('ximenez=ximenez.xim:main', )
},
author='Damien Baty',
author_email='damien.baty@remove-me.gmail.com',
description=description,
long_description=long_description,
license='GNU GPL',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: System',
'Topic :: Utilities'],
keywords='collector action plug-in plugin',
url='http://code.noherring.com/ximenez',
download_url='http://cheeseshop.python.org/pypi/ximenez',
)
|
import os
from twisted.trial import unittest
from scrapy.contrib.djangoitem import DjangoItem, Field
from scrapy import optional_features
os.environ['DJANGO_SETTINGS_MODULE'] = 'scrapy.tests.test_djangoitem.settings'
if 'django' in optional_features:
from .models import Person, IdentifiedPerson
class BasePersonItem(DjangoItem):
django_model = Person
class NewFieldPersonItem(BasePersonItem):
other = Field()
class OverrideFieldPersonItem(BasePersonItem):
age = Field()
class IdentifiedPersonItem(DjangoItem):
django_model = IdentifiedPerson
class DjangoItemTest(unittest.TestCase):
def setUp(self):
if 'django' not in optional_features:
raise unittest.SkipTest("Django is not available")
def test_base(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
def test_new_fields(self):
i = NewFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'other', 'name'])
def test_override_field(self):
i = OverrideFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
def test_custom_primary_key_field(self):
"""
Test that if a custom primary key exists, it is
in the field list.
"""
i = IdentifiedPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'identifier', 'name'])
def test_save(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
i['name'] = 'John'
i['age'] = '22'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, '22')
def test_override_save(self):
i = OverrideFieldPersonItem()
i['name'] = 'John'
# it is not obvious that "age" should be saved also, since it was
# redefined in child class
i['age'] = '22'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, '22')
def test_validation(self):
long_name = 'z' * 300
i = BasePersonItem(name=long_name)
self.assertFalse(i.is_valid())
self.assertEqual(se | t(i.errors), set(['age', 'name']))
i = BasePersonItem(name='John')
self.assertTrue(i.is_valid(exclude=['age']))
self.ass | ertEqual({}, i.errors)
# once the item is validated, it does not validate again
i['name'] = long_name
self.assertTrue(i.is_valid())
def test_override_validation(self):
i = OverrideFieldPersonItem()
i['name'] = 'John'
self.assertFalse(i.is_valid())
i = i = OverrideFieldPersonItem()
i['name'] = 'John'
i['age'] = '22'
self.assertTrue(i.is_valid())
def test_default_field_values(self):
i = BasePersonItem()
person = i.save(commit=False)
self.assertEqual(person.name, 'Robot')
|
"""
Module for the Colorado Alliance of Research Libraries Metadata 2013
Presentation
Copyright (C) 2013 Jeremy Nelson
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__author__ = "Jeremy Nelson"
import argparse
import datetime
import json
import os
import redis
from bottle import abort, request, route, run, static_file
from bottle import jinja2_view as view
from bottle import jinja2_template as template
from bottle import FlupFCGIServer
PROJECT_ROOT = os.path.split(os.path.abspath(__name__))[0]
PRESENTATION_INFO = json.load(open(os.path.join(PROJECT_ROOT,
'slides.json'),
'rb'))
SLIDES = PRESENTATION_INFO.get('slides')
DEMO_REDIS = redis.StrictRedis()
FLUP = False
@route('/metadata-day-2013/assets/<type_of:path>/<filename:path>')
def send_asset(type_of,filename):
local_path = os.path.join(PROJECT_ROOT,
"assets",
type_of,
filename)
if os.path.exists(local_path):
return static_file(filename,
root=os.path.join(PROJECT_ROOT,
"assets",
type_of))
@route("/metadata-day-2013/bibframe.html")
def bibframe():
return template('bibframe',
category='slides',
next_slide=SLIDES[2],
slide=SLIDES[1],
slides=SLIDES)
@route("/metadata-day-2013/bibframe-adr.html")
def bibframe():
return template('bibframe-adr',
category='slides',
slide=SLIDES[-1],
slides=SLIDES)
@route("/metadata-day-2013/linked-data.html")
def linked_data():
return template('linked-data',
category='slides',
next_slide=SLIDES[1],
slide=SLIDES[0],
slides=SLIDES)
@route("/metadata-day-2013/marc-to-bibframe.html")
def marc_to_bibframe():
return template('marc-bibframe',
category='slides',
next_slide=SLIDES[3],
slide=SLIDES[2],
slides=SLIDES)
@route("/metadata-da | y-2013/mods-to-bibframe.html")
def mods_to_bibframe():
return template | ('mods-bibframe',
category='slides',
next_slide=SLIDES[4],
slide=SLIDES[3],
slides=SLIDES)
@route("/metadata-day-2013/resources.html")
def resources():
return template('resources',
category='home',
slides=SLIDES)
@route("/metadata-day-2013/")
def index():
return template('index',
category='home',
slides=SLIDES)
parser = argparse.ArgumentParser(
description='Run ADR Metadata Day 2013 Presentation')
parser.add_argument('mode',
help='Run in either prod (production) or dev (development)')
mode = parser.parse_args().mode
if mode == 'prod':
run(server=FlupFCGIServer,
host='0.0.0.0',
port=9010)
elif mode == 'dev':
run(host='0.0.0.0',
port=9010,
debug=True,
reloader=True)
else:
print("ERROR unknown run mode {0}".format(mode))
|
import numpy as np
from esdl.cube_provider import CubeSourceProvider
from esdl.cube_config import CubeConfig
class TestCubeSourceProvider(CubeSourceProvider):
"""
CubeSourceProvider implementation used for testing cube generation without any source files.
The following usage generates a cube with two variables ``test_1`` and ``test_2``:
cube-gen -c ./myconf.py ./mycube test:var=test_1 test:var=test_2
:param cube_config: Specifies the fixed layout and conventions used for the cube.
:param name: The provider's registration name. Defaults to ``"test"``.
:param var: Name of a (float32) variable which will be filled with random numbers.
"""
def __init__(self, cube_config: CubeConfig, name: str = 'test', var: str = 'test'):
super(TestCubeSourceProvider, self).__init__(cube_config, name)
self._variable_name = var
self._value = 0.0
def prepare(self):
pass
@property
def temporal_coverage(self):
return self.cube_config.start_time, self.cube_config.end_time
@property
def spatial_coverage(self):
return 0, 0, self.cube_config.grid_width, self.cube_config.grid_height
@property
def variable_descriptors(self):
return {
self._variable_name: {
'data_type': np.f | loat32,
'fill_value': np.nan,
'scale_factor': 1.0,
'add_offset': 0.0,
}
}
def compute_variable_images(self, period_start, period_end):
self._value += 0.1
image_width = self.cube_config.grid_width
image_height = self.cube_config.grid_height
image_shape = (image_height, image_width)
return {
se | lf._variable_name: np.full(image_shape, self._value, dtype=np.float32)
}
def close(self):
pass
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
TABLE_HEADER = """
<html>
<head>
<script type="text/javascript" language="javascript" src="https://code.jquery.com/jquery-3.3.1.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/unveil/1.3.0/jquery.unveil.min.js" integrity="sha512-smKadbDZ1g5bsWtP1BuWxgBq1WeP3Se1DLxeeBB+4Wf/HExJsJ3OV6lzravxS0tFd43Tp4x+zlT6/yDTtr+mew==" crossorigin="anonymous"></script>
<script type="text/javascript" language="javascript"
src="https://cdn.datatables.net/1.10.20/js/jquery.dataTables.min.js"></script>
<script type="text/javascript" language="javascript"
src="https://cdn.datatables.net/buttons/1.6.1/js/dataTables.buttons.min.js"></script>
<script type="text/javascript" language="javascript"
src="https://cdn.datatables.net/buttons/1.6.1/js/buttons.colVis.min.js"></script>
<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/1.10.20/css/jquery.dataTables.min.css">
</link>
<link rel="stylesheet" type="text/css"
href="https://ztzhang.info/assets/css/buttons.dataTables.min.css">
</link>
<link rel="stylesheet" | type="text/css"
href="https://ztzhang.info/assets/css/datatable.css">
</link>
<script>
$(document).ready(function () {{
var table = $('#myTable').DataTable({{
dom: 'Blfrtip',
autoWidth: false,
buttons: [
'columnsToggle'
],
"lengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"columnDefs": [
{{"targets": "_all" | ,
"className": "dt-center"}}
],
"drawCallback": function( settings ) {{
$("#myTable img:visible").unveil();
}},
}});
}});
</script>
</head>
<body bgcolor='black'>
<table id="myTable" class="cell-border compact stripe">
<thead>
<tr>
{table_header}
</tr>
</thead>
<tbody>
{table_body}
</tbody>
</table>
</body>
</html>
"""
image_tag_template = "<td><img src=\"{image_path}\" style=\"max-width:100%;height:auto;\"></td>"
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = '#zw#k0g76&a!ulj820of0+i#y(-y4%)sed3k-3q9mw8kzn7)jf'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates/'),
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'gruyere.urls'
WSGI_APPLICATION = 'gruyere.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'gruyere',
'USER': 'cheddar',
'PASSWORD': 'cheese'
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FIN | DERS = ( |
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static/'),
)
|
ill be sharpen [ 0.05048273 0.13588945 0.81362782]\n
temperature = 1.0, the distribution will be the same [0.1 0.2 0.7]\n
temperature = 1.5, the distribution will be filtered [ 0.16008435 0.25411807 0.58579758]\n
If None, it will be ``np.argmax(a)``
Notes
------
No matter what is the temperature and input list, the sum of all probabilities will be one.
Even if input list = [1, 100, 200], the sum of all probabilities will still be one.
For large vocabulary_size, choice a higher temperature to avoid error.
"""
b = np.copy(a)
try:
if temperature == 1:
return np.argmax(np.random.multinomial(1, a, 1))
if temperature is None:
return np.argmax(a)
else:
a = np.log(a) / temperature
a = np.exp(a) / np.sum(np.exp(a))
return np.argmax(np.random.multinomial(1, a, 1))
except:
# np.set_printoptions(threshold=np.nan)
# print(a)
# print(np.sum(a))
# print(np.max(a))
# print(np.min(a))
# exit()
message = "For large vocabulary_size, choice a higher temperature\
to avoid log error. Hint : use ``sample_top``. "
warnings.warn(message, Warning)
# print(a)
# print(b)
return np.argmax(np.random.multinomial(1, b, 1))
def sample_top(a=[], top_k=10):
"""Sample from ``top_k`` probabilities.
Parameters
----------
a : a list
List of probabilities.
top_k : int
Number of candidates to be considered.
"""
idx = np.argpartition(a, -top_k)[-top_k:]
probs = a[idx]
# print("new", probs)
probs = probs / np.sum(probs)
choice = np.random.choice(idx, p=probs)
return choice
## old implementation
# a = np.array(a)
# idx = np.argsort(a)[::-1]
# idx = idx[:top_k]
# # a = a[idx]
# probs = a[idx]
# print("prev", probs)
# # probs = probs / np.sum(probs)
# # choice = np.random.choice(idx, p=probs)
# # return choice
## Vector representations of words (Advanced) UNDOCUMENT
class SimpleVocabulary(object):
"""Simple vocabulary wrapper, see create_vocab().
Parameters
------------
vocab : A dictionary of word to word_id.
unk_id : Id of the special 'unknown' word.
"""
def __init__(self, vocab, unk_id):
"""Initializes the vocabulary."""
self._vocab = vocab
self._unk_id = unk_id
def word_to_id(self, word):
"""Returns the integer id of a word string."""
if word in self._vocab:
return self._vocab[word]
else:
return self._unk_id
class Vocabulary(object):
"""Create Vocabulary class from a given vocabulary and its id-word, word-id convert,
see create_vocab() and ``tutorial_tfrecord3.py``.
Parameters
-----------
vocab_file : File containing the vocabulary, where the words are the first
whitespace-separated token on each line (other tokens are ignored) and
the word ids are the corresponding line numbers.
start_word : Special word denoting sentence start.
end_word : Special word denoting sentence end.
unk_word : Special word denoting unknown words.
Properties
------------
vocab : a dictionary from word to id.
reverse_vocab : a list from id to word.
start_id : int of start id
end_id : int of end id
unk_id : int of unk id
pad_id : int of padding id
Vocab_files
-------------
>>> Look as follow, includes `start_word` , `end_word` but no `unk_word` .
>>> a 969108
>>> <S> 586368
>>> </S> 586368
>>> . 440479
>>> on 213612
>>> of 202290
>>> the 196219
>>> in 182598
>>> with 152984
>>> and 139109
>>> is 97322
"""
def __init__(self,
vocab_file,
start_word="<S>",
end_word="</S>",
unk_word="<UNK>",
pad_word="<PAD>"):
if not tf.gfile.Exists(vocab_file):
tf.logging.fatal("Vocab file %s not found.", vocab_file)
tf.logging.info("Initializing vocabulary from file: %s", vocab_file)
with tf.gfile.GFile(vocab_file, mode="r") as f:
reverse_vocab = list(f.readlines())
reverse_vocab = [line.split()[0] for line in reverse_vocab]
assert start_word in reverse_vocab
assert end_word in reverse_vocab
if unk_word not in reverse_vocab:
reverse_vocab.append(unk_word)
vocab = dict([(x, y) for (y, x) in enumerate(reverse_vocab)])
print(" [TL] Vocabulary from %s : %s %s %s" % (vocab_file, start_word, end_word, unk_word))
print(" vocabulary with %d words (includes start_word, end_word, unk_word)" % len(vocab))
# tf.logging.info(" vocabulary with %d words" % len(vocab))
self.vocab = vocab # vocab[word] = id
self.reverse_vocab = reverse_vocab # reverse_vocab[id] = word
# Save special word ids.
self.start_id = vocab[start_word]
self.end_id = vocab[end_word]
self.unk_id = vocab[unk_word]
self.pad_id = vocab[pad_word]
print(" start_id: %d" % self.start_id)
print(" end_id: %d" % self.end_id)
print(" unk_id: %d" % self.unk_id)
print(" pad_id: %d" % self.pad_id)
def word_to_id(self, word):
"""Returns the integer word id of a word string."""
if word in self.vocab:
return self.vocab[word]
else:
return self.unk_id
def id_to_word(self, word_id):
"""Returns the word string of an integer word id."""
if word_id >= len(self.reverse_vocab):
return self.reverse_vocab[self.unk_id]
else:
return self.reverse_vocab[word_id]
def process_sentence(sentence, start_word="<S>", end_word="</S>"):
"""Converts a sentence string into a list of string words, add start_word and end_word,
see ``create_vocab()`` and ``tutorial_tfrecord3.py``.
Parameter
---------
sentence : a sentence in string.
start_word : a string or None, if None, non start word will be appended.
end_word : a string or No | ne, if None, non end word will be appended.
Returns
---------
A list of strings; the processed caption.
Examples
-----------
>>> c = "how are you?"
>>> c = tl.nlp.process_sentence(c)
>>> print(c)
... ['<S>', 'how', 'are', 'you', '?', '</S>']
"""
try:
import nltk
except:
raise Exception("H | int : NLTK is required.")
if start_word is not None:
process_sentence = [start_word]
else:
process_sentence = []
process_sentence.extend(nltk.tokenize.word_tokenize(sentence.lower()))
if end_word is not None:
process_sentence.append(end_word)
return process_sentence
def create_vocab(sentences, word_counts_output_file, min_word_count=1):
"""Creates the vocabulary of word to word_id, see create_vocab() and ``tutorial_tfrecord3.py``.
The vocabulary is saved to disk in a text file of word counts. The id of each
word in the file is its corresponding 0-based line number.
Parameters
------------
sentences : a list of lists of strings.
word_counts_output_file : A string
The file name.
min_word_count : a int
Minimum number of occurrences for a word.
Returns
--------
- tl.nlp.SimpleVocabulary object.
Mores
-----
- ``tl.nlp.build_vocab()``
Examples
--------
>>> captions = ["one two , three", "four five five"]
>>> processed_capts = []
>>> for c in captions:
>>> c = tl.nlp.process_sentence(c, start_word="<S>", end_word="</S>")
>>> processed_capts.append(c)
>>> print(processed_capts)
...[['<S>', 'one', 'two', ',', 'three', '</S>'], ['<S>', 'four', 'five', 'five', '</S>']]
>>> tl.nlp.create_vocab(processed_capts, word_counts_output_file='vocab.txt', min_word_count=1)
... [TL] Creating vocabulary.
... Total words: 8
... Words in vocabulary: 8
... Wrote vocabulary file: vocab.txt
>>> vocab = tl.nlp.Vocabulary('vocab.txt', start_word="<S>", end_word="</S>", unk_word="<UNK>")
... INFO:tensorflow:Initializing vocabulary from file: vocab.txt
... [TL] Vocabulary from vocab.txt : <S> </S> <UNK>
... vocabulary with 10 words (includes start_word, end |
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-usuario',
version='0.4',
packages=['usuario'],
include_package_data=True,
license='MIT License',
description='Extension to model User.',
long_description=README,
keywords = "django user",
url='https://github.com/dyachan/django-usuario',
author='Diego Yachan',
author_email='diego.yachan@gmail.com',
classifiers=[
'Environment | :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
' | License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
from dj | angothis.app import read_yaml, read_yaml_file, watchfil | e
|
#!/usr/bin/env python
from pythran import tables
TITLE = "Supported Modules and Functions"
DEPTHS = '=*-+:~#.^"`'
print(DEPTHS[0]*len(TITLE))
print(TITLE)
print(DEPTHS[0]*len(TITLE))
print("")
def format_name(name):
if name.endswith('_') and not name.startswith('_'):
name = name[:-1]
return name
def isiterable(obj):
return hasattr(obj, '__iter__')
def dump_entry(entry_name, entry_value, depth):
if isiterable(entry_value):
print(entry_name)
print(DEPTHS[depth] * len(entry_name))
print("")
sym_entrie | s, sub_entries = [], []
for sym in entry_value:
w = sub_entries if isiterable(entry_value[sym]) else sym_entries
w.append(sym)
for k in sorted(sym_entries):
dump_entry(format_name(k), entry_value[k], depth + 1)
print("")
| for k in sorted(sub_entries):
dump_entry(format_name(k), entry_value[k], depth + 1)
print("")
else:
print(entry_name)
for MODULE in sorted(tables.MODULES):
if MODULE != '__dispatch__':
dump_entry(format_name(MODULE), tables.MODULES[MODULE], 1)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-02 21:54
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
class Migration(mi | grations.Migration):
dependencies = [
('round', '0013_plot_batch'),
]
operations = [
migrations.AlterField(
model_name='round',
name='score',
field=models.D | ecimalField(decimal_places=2, default=Decimal('0'), max_digits=4),
),
]
|
from django.test.testcases import TestCase
from django.utils import timezone
from edc_visit_schedule.site_visit_schedules import site_visit_schedules
from edc_visit_tracking.constants import SCHEDULED
from ba_namotswe.models import Appointment, SubjectVisit, RequisitionMetadata, CrfMetadata, SubjectConsent, RegisteredSubject
from .factories.enrollment_factory import EnrollmentFactory
class TestEnrollment(TestCase):
def test_create_enrollment(self):
"""Assert enrollment creates subject consent and appointments."""
enrollment = EnrollmentFactory()
schedule = site_visit_schedules.get_schedule(enrollment._meta.label_lower)
self.assertEqual(SubjectConsent.objects.all().count(), 1)
self.assertGreater(Appointment.objects.all().count(), 0)
self.assertEqual(Appointment.objects.all().count(), len(schedule.visits))
# def test_create_enrollment_bad_dob(self):
# """Assert enrollment creates subject consent and appointment | s."""
# EnrollmentFactory(dob=date(1900, 1, 1))
def test_subject_identifier(self):
"""Assert enrollment subject_identifier is updated after consent is created."""
enrollment = | EnrollmentFactory()
self.assertIsNotNone(enrollment.subject_identifier)
SubjectConsent.objects.get(subject_identifier=enrollment.subject_identifier)
RegisteredSubject.objects.get(subject_identifier=enrollment.subject_identifier)
def test_subject_consent_attrs(self):
"""Assert attrs from enrollment match subject_consent."""
enrollment = EnrollmentFactory()
subject_consent = SubjectConsent.objects.get(subject_identifier=enrollment.subject_identifier)
subject_consent.dob = enrollment.dob
subject_consent.initials = enrollment.initials
subject_consent.consent_datetime = enrollment.report_datetime
subject_consent.gender = enrollment.gender
def test_registered_subject_attrs(self):
"""Assert attrs from enrollment match registered_subject."""
enrollment = EnrollmentFactory()
registered_subject = RegisteredSubject.objects.get(subject_identifier=enrollment.subject_identifier)
registered_subject.dob = enrollment.dob
registered_subject.initials = enrollment.initials
registered_subject.consent_datetime = enrollment.report_datetime
registered_subject.gender = enrollment.gender
def test_create_subject_visit(self):
"""Assert subject visit creates metadata."""
EnrollmentFactory()
appointment = Appointment.objects.all().order_by('visit_code').first()
SubjectVisit.objects.create(
appointment=appointment,
report_datetime=timezone.now(),
reason=SCHEDULED,
)
schedule = site_visit_schedules.get_schedule(appointment.schedule_name)
visit = schedule.get_visit(appointment.visit_code)
self.assertGreater(CrfMetadata.objects.all().count(), 0)
self.assertEqual(CrfMetadata.objects.all().count(), len(visit.crfs))
self.assertGreater(RequisitionMetadata.objects.all().count(), 0)
self.assertEqual(RequisitionMetadata.objects.all().count(), len(visit.requisitions))
|
"""nushellx_lpt/metafitter_abs.py
Function definitions for an abstract *.lpt metafitter
"""
from __future__ import print_function, division, unicode_literals
import numpy as np
from deprecated.int.metafitter_abs import single_particle_metafit_int
from constants import DPATH_SHELL_RESULTS, DPATH_PLOTS
from deprecated.nushellx_lpt.DataMapNushellxLpt import DataMapNushellxLpt
from plotting import map_to_arrays
from transforms import pzbt
# noinspection PyUnusedLocal
# TODO: Use of zbt_array as a constant in this function is a hack.
# TODO: There should be well-defined methods for accessing zero-body term data
def _get_plot_lpt(n, exp, me_map, mzbt_map, *args):
"""Gets the energy vs. mass plot (x, y, const_list, const_dict) based
on the given mass -> energy map, mass -> zero body map, etc
:param n: state index (beginning at 1) from first column of *.lpt file
:param exp: ExpNushellxLpt, which identifies the data being used for the
plot
:param me_map: mass number A -> enery map, where energy is that derived
from the *.lpt file (without addition of zero body term)
:param mzbt_map: mass number A -> zero body term map, derived from the
interaction files
:param args: allows extra args from compatibility (i.e. duck-typing).
These are not used here
:return: (x, y, const_list, const_dict), where const_list and const_dict
contain exp, n, and zbt array
"""
x, y = map_to_arrays(me_map)
zbt_list = list()
x_arr, zbt_arr = map_to_arrays(mzbt_map)
for xa, zbta, i in zip(x_arr, zbt_arr, range(len(x_arr))):
if xa in x:
zbt_list.append(zbta)
zbt_arr_fixed = np.array(zbt_list)
const_list = [exp, n, np.array(zbt_arr_fixed)]
const_dict = {'exp': exp, 'N': n, 'zbt_arr': zbt_arr_fixed}
return x, y, const_list, const_dict
# noinspection PyUnusedLocal
def _get_plots_lpt(exp_list, data_map, get_data_fn, get_plot_fn=_get_plot_lpt,
**kwargs):
"""Gets a list of plot based on the given exp_list
:param exp_list: list of exp values for which to get plots
:param data_map: DataMapNushellxLpt object containing data for all
of the exp in exp_list
:param get_data_fn: function to retrieve n -> mass -> energy map from
a value (DatumLpt) in data_map
:param get_plot_fn: function to use to make plot from n, exp,
mass -> energy map, and mass -> zero body term map
:param kwargs: other arguments for compatibility (duck-typing). These
are not used here.
:return: list of plot, where a plot is (x, y, const_list, const_dict)
"""
plots = list()
if exp_list is not None:
exps = exp_list
else:
exps = data_map.map.keys()
for exp in sorted(exps):
datum = data_map[exp]
nme_map = get_data_fn(datum)
mzbt_map = datum.mass_to_zbt_map()
for n, me_map in nme_map.items():
plots.append(get_plot_fn(n, exp, me_map, mzbt_map))
return plots
# noinspection PyUnusedLocal
# TODO: Get rid of the need for this function. Top level functions should
# TODO: deal with labels, etc. The metafitters should be as minimalistic as
# TODO: possible.
def _get_label_kwargs_lpt(plot, idx_key=None):
"""Function to get a dictionary for the label keyword arguments for
formatting
:param plot: (x, y, const_list, const_dict)
:param idx_key: I do not even remember what the point of this argument is.
"""
return {'exp': plot[3]['exp'], 'N': plot[3]['N']}
def metafit_nushellx_lpt(
fitfn, exp_list,
transform=pzbt,
exp_filter_fn=None,
xlabel='A', ylabel='Energy + Zero Body Term (MeV)',
show_fit=False,
_sourcedir=DPATH_SHELL_RESULTS, _savedir=DPATH_PLOTS,
_data_map=DataMapNushellxLpt,
_get_data=lambda dm: dm.n_to_mass_to_ex_energy_map(),
_get_plots=_get_plots_lpt, _get_plot=_get_plot_lpt,
_plot_sort_key=lambda p: p[3]['exp'],
_code_pref='LPT',
_title='Metafit {mfn} for shell calculation {tr} data using {fn}',
_label='{N}, {exp}', _get_label_fmt_kwargs=_get_label_kwargs_lpt,
_print_results=False,
_idx='N',
**kwargs
):
"""See the documentation for single_particle_metafit_int
(int/metafitter_abs.py)
"""
return single_particle_metafit_int(
fitfn=fitfn, exp_ | list=exp_list, exp_filter_fn=exp_filter_fn,
dpath_sources=_sourcedir, dpath_plots=_savedir,
transform=transform,
xlabel=xlabel, ylabel=ylabel,
show_fit=show_fit,
_data_map=_data_map, _get_data=_get_data, _get_plots=_get_plots,
| _get_plot=_get_plot, _plot_sort_key=_plot_sort_key,
_title=_title, _label=_label, _idx=_idx,
print_results=_print_results,
_get_label_fmt_kwargs=_get_label_fmt_kwargs,
_code_pref=_code_pref,
**kwargs)
|
om django.http import HttpResponse
import os
from config.views import get_dir
from django.contrib.auth.decorators import login_required
from accounts.permission import permission_verify
import logging
from lib.log import log
from lib.setup import get_playbook, get_roles
# var info
ansible_dir = get_dir("a_path")
roles_dir = get_dir("r_path")
playbook_dir = get_dir("p_path")
level = get_dir("log_level")
log_path = get_dir("log_path")
log("setup.log", level, log_path)
def write_role_vars(roles, vargs):
r_vars = vargs.split('\r\n')
for r in roles:
if vargs:
if os.path.exists(roles_dir+r+"/vars"):
pass
else:
os.mkdir(roles_dir+r+"/vars")
with open(roles_dir+r+'/vars/main.yml', 'wb+') as role_file:
role_file.writelines("---\n")
for x in r_vars:
rs = x + '\n'
role_file.writelines(rs)
return True
@login_required()
@permission_verify()
def index(request):
temp_name = "setup/setup-header.html"
all_host = Host.objects.all()
all_dir = get_roles(roles_dir)
all_pbook = get_playbook(playbook_dir)
all_group = HostGroup.objects.all()
return render(request, 'setup/ansible.html', locals())
@login_required()
@permission_verify()
def playbook(request):
ret = []
temp_name = "setup/setup-header.html"
if os.path.exists(ansible_dir + '/gexec.yml'):
os.remove(ansible_dir + '/gexec.yml')
else:
pass
if request.method == 'POST':
host = request.POST.getlist('mserver', [])
group = request.POST.getlist('mgroup', [])
pbook = request.POST.getlist('splaybook', [])
roles = request.POST.getlist('mroles', [])
role_vars = request.POST.get('mvars')
if host:
if roles:
if role_vars:
write_role_vars(roles, role_vars)
for h in host:
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("host:"+h)
with open(ansible_dir + '/gexec.yml', 'w+') as f:
flist = ['- hosts: '+h+'\n', ' remote_user: root\n', ' gather_facts: true\n', ' roles:\n']
for r in roles:
rs = ' - ' + r + '\n'
flist.append(rs)
logging.info("Role:"+r)
f.writelines(flist)
cmd = "ansible-playbook"+" " + ansible_dir+'/gexec.yml'
p = Popen(cmd, stderr=PIPE, stdout=PIPE, shell=True)
data = p.communicate()
ret.append(data)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
else:
for h in host:
for p in pbook:
f = open(playbook_dir + p, 'r+')
flist = f.readlines()
flist[0] = '- hosts: '+h+'\n'
f = open(playbook_dir + p, 'w+')
f.writelines(flist)
f.close()
cmd = "ansible-playbook"+" " + playbook_dir + p
pcmd = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
data = pcmd.communicate()
ret.append(data)
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("host:"+h)
logging.info("Playbook:"+p)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
return render(request, 'setup/result.html', locals())
if group:
if roles:
if role_vars:
write_role_vars(roles, role_vars)
for g in group:
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("group:"+g)
f = open(ansible_dir + '/gexec.yml', 'w+')
flist = ['- hosts: '+g+'\n', ' remote_user: root\n', ' gather_facts: true\n', ' roles:\n']
for r in roles:
rs = ' - ' + r + '\n'
flist.append(rs)
logging.info("Role:"+r)
f.writelines(flist)
f.close()
cmd = "ansible-playbook"+" " + ansible_dir+'/gexec.yml'
p = Popen(cmd, stderr=PIPE, stdout=PIPE, shell=True)
data = p.communicate()
ret.append(data)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
else:
for g in group:
for p in pbook:
f = open(playbook_dir + p, 'r+')
flist = f.readlines()
flist[0] = '- hosts: '+g+'\n'
f = open(playbook_dir + p, 'w+')
f.writelines(flist)
f.close()
cmd = "ansible-playbook"+" " + playbook_dir + p
pcmd = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
data = pcmd.communicate()
ret.append(data)
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("Group:"+g)
logging.info("Playbook:"+p)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
return render(request, 'set | up/result.html', locals())
@login_required()
@permission_verify()
def ansible_command(request):
command_list = []
ret = []
count = 1
temp_name = "setup/setup-header.html"
if request.method == 'POST' | :
mcommand = request.POST.get('mcommand')
command_list = mcommand.split('\n')
for command in command_list:
if command.startswith("ansible"):
p = Popen(command, stdout=PIPE, stderr=PIPE,shell=True)
data = p.communicate()
ret.append(data)
else:
data = "your command " + str(count) + " is invalid!"
ret.append(data)
count += 1
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("command:"+command)
for d in data:
logging.info(d)
logging.info("==========ansible tasks end============")
return render(request, 'setup/result.html', locals())
@login_required()
@permission_verify()
def host_sync(request):
group = HostGroup.objects.all()
ansible_file = open(ansible_dir+"/hosts", "wb")
all_host = Host.objects.all()
for host in all_host:
#gitlab ansible_host=10.100.1.76 host_name=gitlab
host_item = host.hostname+" "+"ansible_host="+host.ip+" "+"host_name="+host.hostname+"\n"
ansible_file.write(host_item)
for g in group:
group_name = "["+g.name+"]"+"\n"
ansible_file.write(group_name)
members = Host.objects.filter(group__name=g)
for m in members:
group_item = m.hostname+"\n"
ansible_file.write(group_item)
ansible_file.close()
logging.info("==========ansible tasks start==========")
logging.info("User:"+request.user.username)
logging.info("Task: sync cmdb info to ansible hosts")
logging.info( |
#!/usr/bin/env python
# coding: utf-8
from .src import *
def plugin_loaded():
distraction | less.plugin_loaded(reload=Fa | lse)
def plugin_unloaded():
distractionless.plugin_unloaded()
|
o One ", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(utils.AssertPresentationAction(
"7. Move to Radio One radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons & y Radio One RadioButton'",
" VISIBLE: '& y Radio One RadioButton', cursor=1",
"SPEECH OUTPUT: 'Image Radio Buttons panel Radio One not selected radio button'"]))
########################################################################
# Basic Where Am I
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(PauseAction(3000))
sequence.append(utils.AssertPresentationAction(
"8. Basic Where Am I",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons & y Radio One RadioButton'",
" VISIBLE: '& y Radio One RadioButton', cursor=1",
"SPEECH OUTPUT: 'Image Radio Buttons Radio One radio button not selected 3 of 3'"]))
########################################################################
# Expected output when radio button is selected.
#
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction(" "))
sequence.append(WaitAction("object:property-change:accessible-value", None,
None, pyatspi.ROLE_RADIO_BUTTON, 5000))
sequence.append(utils.AssertPresentationAction(
"9. Select the focused radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons &=y Radio One RadioButton'",
" VISIBLE: '&=y Radio One RadioButton', cursor=1",
"SPEECH OUTPUT: 'selected'"]))
########################################################################
# Basic Where Am I
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(PauseAction(3000))
sequence.append(utils.AssertPresentationAction(
"10. Basic Where Am I",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons &=y Radio One RadioButton'",
" VISIBLE: '&=y Radio One RadioButton', cursor=1",
"SPEECH OUTPUT: 'Image Radio Buttons Radio One radio button selected 3 of 3'"]))
########################################################################
# Expected output when radio button comes into focus.
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Radio Two", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(utils.AssertPresentationAction(
"11. Move to Radio Two radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons & y Radio Two RadioButton'",
" VISIBLE: '& y Radio Two RadioButton', cursor=1",
"SPEECH OUTPUT: 'Radio Two not selected radio button'"]))
########################################################################
# Basic Where Am I
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(PauseAction( | 3000))
sequence.append(utils.AssertPresentationAction(
"12. Basic Where Am I",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons & y Radio Two RadioButton'",
" VISIBLE: '& y Radio Two RadioButton', cursor=1",
| "SPEECH OUTPUT: 'Image Radio Buttons Radio Two radio button not selected 2 of 3'"]))
########################################################################
# Expected output when radio button is selected.
#
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction(" "))
sequence.append(WaitAction("object:property-change:accessible-value", None,
None, pyatspi.ROLE_RADIO_BUTTON, 5000))
sequence.append(utils.AssertPresentationAction(
"13. Select the focused radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons &=y Radio Two RadioButton'",
" VISIBLE: '&=y Radio Two RadioButton', cursor=1",
"SPEECH OUTPUT: 'selected'"]))
########################################################################
# Do a basic "Where Am I" via KP_Enter.
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(PauseAction(3000))
sequence.append(utils.AssertPresentationAction(
"14. Basic Where Am I",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons &=y Radio Two RadioButton'",
" VISIBLE: '&=y Radio Two RadioButton', cursor=1",
"SPEECH OUTPUT: 'Image Radio Buttons Radio Two radio button selected 2 of 3'"]))
########################################################################
# Expected output when radio button comes into focus.
#
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Radio Three", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(utils.AssertPresentationAction(
"15. Move to Radio Three radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons & y Radio Three RadioButton'",
" VISIBLE: '& y Radio Three RadioButton', cursor=1",
"SPEECH OUTPUT: 'Radio Three not selected radio button'"]))
########################################################################
# Expected output when radio button is selected.
#
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction(" "))
sequence.append(WaitAction("object:property-change:accessible-value", None,
None, pyatspi.ROLE_RADIO_BUTTON, 5000))
sequence.append(utils.AssertPresentationAction(
"16. Select the focused radio button",
["BRAILLE LINE: 'SwingSet2 Application SwingSet2 Frame RootPane LayeredPane Button Demo TabList Button Demo Page Radio Buttons TabList Radio Buttons Page Image Radio Buttons Panel Image Radio Buttons &=y Radio Three RadioButton'",
" VISIBLE: '&=y Radio Three RadioButton', cursor=1",
"SPEECH OUTPUT: 'selected'"]))
# Tab back up to begining
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Paint Border", acc_role=pyatspi.ROLE_CHECK_BOX))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Paint Focus", acc_role=pyatspi.ROLE_CHECK_BOX))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Enabled", acc_role=pyatspi.ROLE_CHECK_BOX))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Content Filled", acc_role=pyatspi.ROLE_CHECK_BOX))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("Default", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("0", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("10", acc_role=pyatspi.ROLE_RADIO_BUTTON))
sequence.append(KeyComboAction("Tab"))
sequence.append(WaitForFocus("", acc_role=pyatspi.ROLE_TEXT))
sequence.append(KeyComboAction("Tab"))
# Toggle the top left button, to return to normal state.
# Just a little extra wait to let som |
nditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer. Redistributions in binary
# form must reproduce the above copyright notice, this list of conditions and
# the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# None of the names of the copyright holders may be used to endorse or
# promote products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (I | NCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#************************************************** | ****************************
import scipy.linalg.interpolative as pymatrixid
import numpy as np
from scipy.linalg import hilbert, svdvals, norm
from scipy.sparse.linalg import aslinearoperator
from scipy.linalg.interpolative import interp_decomp
import itertools
from numpy.testing import (assert_, assert_allclose, assert_equal,
assert_array_equal)
import pytest
from pytest import raises as assert_raises
import sys
_IS_32BIT = (sys.maxsize < 2**32)
@pytest.fixture()
def eps():
yield 1e-12
@pytest.fixture(params=[np.float64, np.complex128])
def A(request):
# construct Hilbert matrix
# set parameters
n = 300
yield hilbert(n).astype(request.param)
@pytest.fixture()
def L(A):
yield aslinearoperator(A)
@pytest.fixture()
def rank(A, eps):
S = np.linalg.svd(A, compute_uv=False)
try:
rank = np.nonzero(S < eps)[0][0]
except IndexError:
rank = A.shape[0]
return rank
class TestInterpolativeDecomposition:
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_real_id_fixed_precision(self, A, L, eps, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
# Test ID routines on a Hilbert matrix.
A_or_L = A if not lin_op else L
k, idx, proj = pymatrixid.interp_decomp(A_or_L, eps, rand=rand)
B = pymatrixid.reconstruct_matrix_from_id(A[:, idx[:k]], idx, proj)
assert_allclose(A, B, rtol=eps, atol=1e-08)
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_real_id_fixed_rank(self, A, L, eps, rank, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
k = rank
A_or_L = A if not lin_op else L
idx, proj = pymatrixid.interp_decomp(A_or_L, k, rand=rand)
B = pymatrixid.reconstruct_matrix_from_id(A[:, idx[:k]], idx, proj)
assert_allclose(A, B, rtol=eps, atol=1e-08)
@pytest.mark.parametrize("rand,lin_op", [(False, False)])
def test_real_id_skel_and_interp_matrices(
self, A, L, eps, rank, rand, lin_op):
k = rank
A_or_L = A if not lin_op else L
idx, proj = pymatrixid.interp_decomp(A_or_L, k, rand=rand)
P = pymatrixid.reconstruct_interp_matrix(idx, proj)
B = pymatrixid.reconstruct_skel_matrix(A, k, idx)
assert_allclose(B, A[:, idx[:k]], rtol=eps, atol=1e-08)
assert_allclose(B @ P, A, rtol=eps, atol=1e-08)
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_svd_fixed_precison(self, A, L, eps, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
A_or_L = A if not lin_op else L
U, S, V = pymatrixid.svd(A_or_L, eps, rand=rand)
B = U * S @ V.T.conj()
assert_allclose(A, B, rtol=eps, atol=1e-08)
@pytest.mark.parametrize(
"rand,lin_op",
[(False, False), (True, False), (True, True)])
def test_svd_fixed_rank(self, A, L, eps, rank, rand, lin_op):
if _IS_32BIT and A.dtype == np.complex_ and rand:
pytest.xfail("bug in external fortran code")
k = rank
A_or_L = A if not lin_op else L
U, S, V = pymatrixid.svd(A_or_L, k, rand=rand)
B = U * S @ V.T.conj()
assert_allclose(A, B, rtol=eps, atol=1e-08)
def test_id_to_svd(self, A, eps, rank):
k = rank
idx, proj = pymatrixid.interp_decomp(A, k, rand=False)
U, S, V = pymatrixid.id_to_svd(A[:, idx[:k]], idx, proj)
B = U * S @ V.T.conj()
assert_allclose(A, B, rtol=eps, atol=1e-08)
def test_estimate_spectral_norm(self, A):
s = svdvals(A)
norm_2_est = pymatrixid.estimate_spectral_norm(A)
assert_allclose(norm_2_est, s[0], rtol=1e-6, atol=1e-8)
def test_estimate_spectral_norm_diff(self, A):
B = A.copy()
B[:, 0] *= 1.2
s = svdvals(A - B)
norm_2_est = pymatrixid.estimate_spectral_norm_diff(A, B)
assert_allclose(norm_2_est, s[0], rtol=1e-6, atol=1e-8)
def test_rank_estimates_array(self, A):
B = np.array([[1, 1, 0], [0, 0, 1], [0, 0, 1]], dtype=A.dtype)
for M in [A, B]:
rank_tol = 1e-9
rank_np = np.linalg.matrix_rank(M, norm(M, 2) * rank_tol)
rank_est = pymatrixid.estimate_rank(M, rank_tol)
assert_(rank_est >= rank_np)
assert_(rank_est <= rank_np + 10)
def test_rank_estimates_lin_op(self, A):
B = np.array([[1, 1, 0], [0, 0, 1], [0, 0, 1]], dtype=A.dtype)
for M in [A, B]:
ML = aslinearoperator(M)
rank_tol = 1e-9
rank_np = np.linalg.matrix_rank(M, norm(M, 2) * rank_tol)
rank_est = pymatrixid.estimate_rank(ML, rank_tol)
assert_(rank_est >= rank_np - 4)
assert_(rank_est <= rank_np + 4)
def test_rand(self):
pymatrixid.seed('default')
assert_allclose(pymatrixid.rand(2), [0.8932059, 0.64500803],
rtol=1e-4, atol=1e-8)
pymatrixid.seed(1234)
x1 = pymatrixid.rand(2)
assert_allclose(x1, [0.7513823, 0.06861718], rtol=1e-4, atol=1e-8)
np.random.seed(1234)
pymatrixid.seed()
x2 = pymatrixid.rand(2)
np.random.seed(1234)
pymatrixid.seed(np.random.rand(55))
x3 = pymatrixid.rand(2)
assert_allclose(x1, x2)
assert_allclose(x1, x3)
def test_badcall(self):
A = hilbert(5).astype(np.float32)
with assert_raises(ValueError):
pymatrixid.interp_decomp(A, 1e-6, rand=False)
def test_rank_too_large(self):
# svd(array, k) should not segfault
a = np.ones((4, 3))
with assert_raises(ValueError):
pymatrixid.svd(a, 4)
def test_full_rank(self):
eps = 1.0e-12
# fixed precision
A = np.random.rand(16, 8)
k, idx, proj = pymatrixid.interp_decomp(A, eps)
assert_equal(k, A.shape[1])
P = pymatrixid.reconstruct_interp_matrix(idx, proj)
B = pymatrixid.reconstruct_skel_matrix(A, k, idx)
assert_allclose(A, B @ P)
# fixed rank
idx, proj = pymatrixid.interp_decomp(A, k)
P = pymatrixid.reconstruct_interp_matrix(idx, proj)
B = pymatrixid.reconstruct_skel_matrix(A, k, idx)
assert_allclose(A, B @ P)
@pytest.mark.parametrize("dtype", [np.float_, np.complex_])
@pytest.mark.parametrize("rand", [True, False])
@pytest.mark.pa |
import os
from datetime import timedelta
BROKER_URL = os.environ['CELERY_BROKER_URL']
CELERY_IMPORTS = ('app.cabotapp.tasks', )
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
CELERY_TASK_SERIALIZER = "json" |
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERYBEAT_SCHEDULE = {
'run-all-checks': {
'task': 'app.cabotapp.tasks.run_all_checks',
'schedule': timedelta(seconds=60),
},
'update-shifts': {
'task | ': 'app.cabotapp.tasks.update_shifts',
'schedule': timedelta(seconds=1800),
},
'clean-db': {
'task': 'app.cabotapp.tasks.clean_db',
'schedule': timedelta(seconds=60*60*24),
},
}
CELERY_TIMEZONE = 'UTC'
|
import unittest
import mock
import SMBSR
import xs_errors
import XenAPI
import vhdutil
import util
import errno
class FakeSMBSR(SMBSR.SMBSR):
uuid = None
sr_ref = None
mountpoint = None
linkpath = None
path = None
session = None
remoteserver = None
def __init__(self, srcmd, none):
self.dconf = srcmd.dconf
self.srcmd = srcmd
self.uuid = 'auuid'
self.sr_ref = 'asr_ref'
self.mountpoint = 'aMountpoint'
self.linkpath = 'aLinkpath'
self.path = 'aPath'
self.remoteserver = 'aRemoteserver'
class Test_SMBSR(unittest.TestCase):
def create_smbsr(self, sr_uuid='asr_uuid', server='\\aServer', serverpath = '/aServerpath', username = 'aUsername', password = 'aPassword'):
srcmd = mock.Mock()
srcmd.dconf = {
'server': server,
'serverpath': serverpath,
'username': username,
'password': password
}
srcmd.params = {
'command': 'some_command',
'device_config': {}
}
smbsr = FakeSMBSR(srcmd, None)
smbsr.load(sr_uuid)
return smbsr
#Attach
@mock.patch('SMBSR.SMBSR.checkmount')
@mock.patch('SMBSR.SMBSR.mount')
def test_attach_smbexception_raises_xenerror(self, mock_mount, mock_checkmount):
smbsr = self.create_smbsr()
mock_mount = mock.Mock(side_effect=SMBSR.SMBException("mount raised SMBException"))
mock_checkmount = mock.Mock(return_value=False)
try:
smbsr.attach('asr_uuid')
except Exception, exc:
self.assertTrue(isinstance(exc,xs_errors.XenError))
@mock.patch('SMBSR.SMBSR.checkmount')
def test_attach_if_mounted_then_attached(self, mock | _checkmount):
smbsr = self.create_smbsr()
mock_checkmount = mock.Mock(return_value=True)
smbsr.attach('asr_uuid')
self.assertTrue(smbsr.attached)
#Detach
@mock.patch('SMBSR.SMBSR.unmount')
def test_detach_smbexception_raises_xenerror(self,mock_unmount):
smbsr = se | lf.create_smbsr()
mock_unmount = mock.Mock(side_effect=SMBSR.SMBException("unmount raised SMBException"))
try:
smbsr.detach('asr_uuid')
except Exception, exc:
self.assertTrue(isinstance(exc,xs_errors.XenError))
@mock.patch('SMBSR.SMBSR.checkmount',return_value=False)
def test_detach_not_detached_if_not_mounted(self, mock_checkmount):
smbsr = self.create_smbsr()
smbsr.attached = True
mock_checkmount = mock.Mock(return_value=False)
smbsr.detach('asr_uuid')
self.assertTrue(smbsr.attached)
#Mount
@mock.patch('util.isdir')
def test_mount_mountpoint_isdir(self, mock_isdir):
mock_isdir = mock.Mock(side_effect=util.CommandException(errno.EIO, "Not a directory"))
smbsr = self.create_smbsr()
try:
smbsr.mount()
except Exception, exc:
self.assertTrue(isinstance(exc,SMBSR.SMBException))
def test_mount_mountpoint_empty_string(self):
smbsr = self.create_smbsr()
self.assertRaises(SMBSR.SMBException, smbsr.mount, "")
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Amount to Text
#-------------------------------------------------------------
# French
#-------------------------------------------------------------
to_19_fr = ( 'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six',
'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize',
'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' )
tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix')
denom_fr = ( '',
'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' )
def _convert_nn_fr(val):
""" convert a value < 100 to French
"""
if val < 20:
return to_19_fr[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_fr[val % 10]
return dcap
def _convert_nnn_fr(val):
""" convert a value < 1000 to french
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_fr[rem] + ' Cent'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_fr(mod)
return word
def french_number(val):
if val < 100:
return _convert_nn_fr(val)
if val < 1000:
return _convert_nnn_fr(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx]
if r > 0:
ret = ret + ', ' + french_number(r)
return ret
def amount_to_text_fr(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = french_number(abs(int(list[0])))
end_word = f | rench_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and | ' Cents' or ' Cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Dutch
#-------------------------------------------------------------
to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes',
'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien',
'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' )
tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig')
denom_nl = ( '',
'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen',
'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn_nl(val):
""" convert a value < 100 to Dutch
"""
if val < 20:
return to_19_nl[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_nl[val % 10]
return dcap
def _convert_nnn_nl(val):
""" convert a value < 1000 to Dutch
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_nl[rem] + ' Honderd'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_nl(mod)
return word
def dutch_number(val):
if val < 100:
return _convert_nn_nl(val)
if val < 1000:
return _convert_nnn_nl(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx]
if r > 0:
ret = ret + ', ' + dutch_number(r)
return ret
def amount_to_text_nl(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = dutch_number(int(list[0]))
end_word = dutch_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'cent' or 'cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
def add_amount_to_text_function(lang, func):
_translate_funcs[lang] = func
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='fr', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: mille six cent cinquante-quatre.
"""
# if nbr > 1000000:
##TODO: use logger
# print "WARNING: number too large '%d', can't translate it!" % (nbr,)
# return str(nbr)
if not _translate_funcs.has_key(lang):
#TODO: use logger
print "WARNING: no translation function found for lang: '%s'" % (lang,)
#TODO: (default should be en) same as above
lang = 'fr'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.utils.text import slugify
from django.contrib.auth.models import User
ROOM_TYPE = (
(1, u'Privado'),
(2, u'Grupo'),
)
class Room(models.Model):
name = models.TextField(u'Nome')
label = models.SlugField(u'Label', unique=True, blank=True, null=True)
type = models.SmallIntegerField(u'Tipo', choices=ROOM_TYPE)
users = models.ManyToManyField(User, verbose_name=u'Usuários', related_name='room_users')
@models.permalink
def get_absolute_url(self):
return 'chat_room', (self.label, )
def save(self, *args, **kwargs):
self.label = slugify(self.name)
return super(Room, self).save(*args, **kwargs)
def __unicode__(self):
| return self.label
class Message | (models.Model):
room = models.ForeignKey(Room, verbose_name=u'Sala', related_name='messages')
handle = models.ForeignKey(User, verbose_name=u'Usuário', related_name='message_user')
message = models.TextField()
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
def __unicode__(self):
return '[{timestamp}] {handle}: {message}'.format(**self.as_dict())
@property
def formatted_timestamp(self):
return self.timestamp.strftime('%d/%m/%Y %-I:%M')
def as_dict(self):
return {'handle': "%s - %s" % (self.handle.id, self.handle.get_full_name()), 'message': self.message, 'timestamp': self.formatted_timestamp}
SEXO = (
(1, u'Masculino'),
(2, u'Feminino'),
)
class UserProfile(models.Model):
user = models.ForeignKey(User, verbose_name=u'Usuário', related_name='profile_user')
avatar = models.ImageField(u'Foto', max_length=255, upload_to='user_profile', blank=True, null=True)
data_nascimento = models.DateField(u'Data Nascimento')
sexo = models.IntegerField(u'Sexo', choices=SEXO)
def __unicode__(self):
return unicode(self.user.first_name) |
"""
Import all modules and packages in the serverFeatures package
['account', 'connection', 'interface', 'package', 'process']
"""
from pycaf.architecture.devices. | server import Server
from pycaf.architecture.devices.lists.serverList import ServerList
from pycaf.architecture.devices.server_windows import ServerWindows
import pycaf.architecture.devices.server_features.lists
| |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from multiplexer.clients import connect_client
from obci.control.peer.peer_control import PeerControl, ConfigNotReadyError
import obci.control.common.config_message as cmsg
from obci.utils.openbci_logging import get_logger, log_crash
import sys
class ConfiguredClient(object):
@log_crash
def __init__(self, addresses, type, external_config_file=None):
self.conn = connect_client(addresses=addresses, type=type)
self.ready_to_work = False
self.external_config_file = external_config_file
self.config = PeerControl(peer=self)
self.logger = get_logger(self.config.peer_id,
file_level=self.get_param('file_log_level'),
stream_level=self.get_param('console_log_level'),
mx_level=self.get_param('mx_log_level'),
sentry_level=self.get_param('sentry_log_level'),
conn=self.conn,
log_dir=self.get_param('log_dir'),
obci_peer=self)
self.config.logger = self.logger
self.config.connection = self.conn
self.config.peer_validate_params = self.validate_params
self.config.peer_params_change = self.params_changed
result, details = self.config.initialize_config(self.conn)
if not result:
self.logger.critical(
'Config initialisation FAILED: {0}'.format(details))
sys.exit(1)
else:
self.validate_params(self.config.param_values())
@log_crash
def get_param(self, param_name):
return self.config.get_param(param_name)
@log_crash
def set_param(self, param_name, param_value):
self.config.set_param(param_name, param_value)
@log_crash
def ready(self):
self.ready_to_work = True
self.config.register_config(self.conn)
self.config.send_peer_ready(self.conn)
def validate_params(self, params):
self.logger.info("VALIDATE PARAMS, {0}".format(params))
return True
@log_crash
def params_changed(self, params):
self.logger.info("PARAMS CHAnGED, {0}".format(params))
return True
def _param_vals(self):
vals = self.config.param_values()
if 'channels_info' in vals:
vals['channels_info'] = '[...truncated...]'
return vals
def _crash_extra_description(self, exc=None):
return "peer %s config params: %s" % (self.config.peer_id,
self._param_vals())
def _crash_extra_data(self, exc=None):
"""This method is called when the peer crashes, to provide additional
peer data to the crash report.
Should return a dictionary."""
return {
| "config_params" : self._param_vals(),
"peer_id": self.config.peer_id,
"experiment_uuid": self.get_param("experiment_uuid")
}
def _crash_extra_tags(self, exception=None):
return {'ob | ci_part' : 'obci',
"experiment_uuid": self.get_param("experiment_uuid")} |
# -*- coding: utf-8 -*-
# scrapy_web
# Copyright (C) 2016-2017 Matteo.Redaelli@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNES | S FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# usage:
# scrapy crawl auto-doc.it | -t jsonlines -o data/a.json
import scrapy
import re
class AutoDocIt(scrapy.Spider):
name = "auto-doc.it"
def __init__(self, width="195", height="65", diameter="15", *args, **kwargs):
super(AutoDocIt, self).__init__(*args, **kwargs)
self.allowed_domains = ["auto-doc.it"]
#self.start_urls = ['http://www.auto-doc.it/pneumatici?Width=%s&CrossSections=%s&Size=%s&Season=&page=1' % (width, height, diameter)]
self.start_urls = ['http://www.auto-doc.it/pneumatici/%d-pollici?page=1' % n for n in [10,12,13,14,15,16,17,18,19,20,21,22,23,24,40,365,390,415]]
def parse(self, response):
for entry in response.xpath('//li[@class="ovVisLi"]'):
url = entry.xpath('.//div[@class="image"]/a/@href').extract_first()
manufacturer_number = entry.xpath('.//div[@class="description"]//span[@style="font-size: 12px;"]/text()').extract_first().replace("MPN: ","")
##brand
brand = entry.xpath('.//img[@class="tires_item_brand"]/@src').extract_first()
match = re.match(".+/(.+)\.png$", brand)
if match:
brand = match.group(1)
if bool(len(re.findall("IMAGE", brand,flags=re.IGNORECASE))):
m=re.match(".+/(.+)-.+-.+$", url)
if m:
brand = m.group(1).replace("-", " ")
ean = entry.xpath('.//span[@class="article_number"]/text()').extract_first().replace("EAN: ","")
product = entry.xpath('.//div[@class="name"]/a/text()').extract_first()
size = entry.xpath('.//div[@class="nr"]/text()').extract_first()
description = "%s %s" % (product, size)
p = re.compile(brand, re.IGNORECASE)
product = re.sub(p,"", product, re.IGNORECASE)
price = entry.xpath('.//p[@class="actual_price"]/text()').extract_first()
picture_url = entry.xpath('.//img[@class="tires_item_image "]/@src').extract_first()
## estract eu labels
eu_fuel = entry.xpath('.//div[@class="eu_re"]//li[2]/img/@src').extract_first()
eu_wet = entry.xpath('.//div[@class="eu_re"]//li[4]/img/@src').extract_first()
eu_noise = entry.xpath('.//div[@class="eu_re"]//li[6]/text()').extract_first()
if eu_fuel:
m=re.match(".+-letter-(.+)\.png",eu_fuel)
if m:
eu_fuel = m.group(1)
else:
eu_fuel = None
if eu_wet:
m=re.match(".+-letter-(.+)\.png",eu_wet)
if m:
eu_wet = m.group(1)
else:
eu_wet = None
details = {
"description": description,
"ean": ean,
"manufacturer_number": manufacturer_number,
"price": price,
"brand": brand,
"product": product,
"size": size,
"picture_url": picture_url,
"url": url,
"label_fuel": eu_fuel,
"label_wet": eu_wet,
"label_noise": eu_noise
}
keys = entry.xpath('.//div[@class="description"]//div[@class="box"]//ul/li/span[@class="lc"]/text()').extract()
## removing : at the end
keys = map(lambda x: x.replace(":","").lower(), keys)
values = entry.xpath('.//div[@class="description"]//div[@class="box"]//ul/li/span[@class="rc"]/text()').extract()
details2 = zip(keys, values)
details.update(details2)
yield details
next_page = response.xpath('//span[@class="next"]/a/@href').extract_first()
if next_page != None:
yield scrapy.Request(next_page, callback=self.parse)
|
# mammon - utility/third-party stuff, each thing has it's own header and provenance
# information.
# CaseInsensitiveDict from requests.
#
# Copyright 2015 Kenneth Reitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import collections
class CaseInsensitiveDict(collections.MutableMapping):
"""
A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.casefold()``s, the
behavior is undefined.
"""
def __init__(self, data=None, **kwargs):
self._store = dict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value):
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.casefold()] = (key, value)
def __getitem__(self, key):
return self._store[key.casefold()][1]
def __delitem__(self, key):
del self._store[key.casefold()]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return str(dict(self.items()))
# a modified ExpiringDict implementation
#
# Copyright 2013-2015 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ExpiringDict(collections.OrderedDict):
def __init__(self, max_len, max_age_seconds):
collections.OrderedDict.__init__(self)
self.max_len = max_len
self.max_age = max_age_seconds
def __contains__(self, key):
try:
item = collections.OrderedDict.__getitem__(self, key.casefold())
if time.time() - item[1] < self.max_age:
return True
else:
del self[key.casefold()]
except KeyError:
pass
return False
def __getitem__(self, key, with_age=False, max_age=None):
item = collections.OrderedDict.__getitem__(self, key.casefold())
item_age = time.time() - item[1]
if not max_age:
max_age = self.max_age
if item_age < max_age:
if with_age:
return item[0], item_age
else:
return item[0]
else:
del self[key.casefold()]
raise KeyError(key.casefold())
def __setitem__(self, key, value):
if len(self) == self.max_len:
self.popitem(last=False)
collections.OrderedDict.__setitem__(self, key.casefold(), (value, time.time()))
def pop(self, key, default=None):
try:
item = collections.OrderedDict.__getitem__(self, key.casefold())
del self[key.casefold()]
return item[0]
except KeyError:
return default
def get(self, key, default=None, with_age=False, max_age=None):
try:
return self.__getitem__(key.casefold(), with_age, max_age)
except KeyError:
if with_age:
return default, None
else:
return default
def put(self, key, value, ts=None):
if len(self) == self.max_len:
self.popitem(last=False)
if not ts:
ts = time.time()
collections.OrderedDict.__setitem__(self, key.casefold(), (value, ts))
def items(self):
r = []
for key in self:
try:
r.append((key, self[key]))
except KeyError:
pass
return r
def values(self):
r = []
for key in self:
try:
r.append(self[key])
except KeyError:
pass
return r
def fromkeys(self):
raise NotImplementedError()
def iteritems(self):
raise NotImplementedError()
def itervalues(self):
raise NotImplementedError()
def viewitems(self):
raise NotImplementedError()
def viewkeys(self):
raise NotImplementedError()
def viewvalues(self):
raise NotImplementedError()
# fast irc casemapping validation
# part of mammon, under mammon license.
import string
special = '_-|^{}[]`'
nick_allowed_chars = string.ascii_letters + string.digits + special
nick_allowed_chars_tbl = str.maketrans('', '', nick_allowed_chars)
first_nick_allowed_chars = string.ascii_letters + special
def validate_nick(nick):
if nick[0] not in first_nick_allowed_chars:
return False
remainder = nick[1:]
badchars = remainder.translate(nick_allowed_chars_tbl)
return badchars == ''
chan_allowed_chars = string.ascii_letters + string.digits + special + '`~!@#$%^&*()+=|\\<>/?'
chan_allowed_chars_tbl = str.maketrans('', '', chan_allowed_chars)
de | f validate_chan(chan_name):
if chan_name[0] != '#':
| return False
badchars = chan_name[1:].translate(chan_allowed_chars_tbl)
return badchars == ''
def uniq(input):
output = []
for x in input:
if x not in output:
output.append(x)
return output
class UserHost:
def __init__(self, nuh):
self.nuh = nuh
# XXX - put try:except on these just in case doesn't exist
@property
def nickname(self):
return self.nuh.split('!')[0]
@property
def username(self):
return self.nuh.split('!')[1].split('@')[0]
@property
def hostname(self):
return self.nug.split('@')[1]
|
"""
To create an Attribute Editor template using python, do the following:
1. create a subclass of `uitypes.AETemplate`
2. set its ``_nodeType`` class attribute to the name of the desired node type, or name the class using the
convention ``AE<nodeType>Template``
3. import the module
AETemplates which do not meet one of the two requirements listed in step 2 will be ignored. To ensure that your
Template's node type is being detected correctly, use the ``AETemplate.nodeType()`` class method::
import AETemplates
AETemplates.AEmib_amb_occlusionTemplate.nodeType()
As a convenience, when pymel is imported it will automatically import the module ``AETemplates``, if it exists,
thereby causing any AETemplates within it or its sub-modules to be registered. Be sure to import pymel
or modules containing your ``AETemplate`` classes before opening the Atrribute Editor for the node types in question.
To check which python templates are loaded::
from pymel.core.uitypes import AELoader
print AELoader.loadedTemplates()
The example below demonstrates the simplest case, which is the first. It provides a layout for the mib_amb_occlusion
mental ray shader.
"""
from pymel.core import *
class LocalizedTemplate(ui.AETemplate):
"automatically apply language localizations to template arguments"
def _applyLocalization(self, name):
if name is not None and len(name)>2 and name[0] == 'k' and name[1].isupper():
return mel.uiRes('m_' + self.__class__.__name__ + '.' + name)
return name
def addControl(self, control, label=None, **kwargs):
label = self._applyLocalization(label | )
ui.AETemplate.addControl(self, control, label=label, **kwargs)
def beginLayout(self, name, collapse=True):
name = self._applyLocalization(name)
ui.AETemplate.beginLayout(self, name, collapse=collapse)
class mmbTemplateBase(LocalizedTemplate):
def __init__(self, nodeName):
LocalizedTemplate.__init__(self,nodeName)
self.beginScrollLayout()
self.buildBody(nodeName)
self.endScrollLayout()
def AEswatchDisplay(self, nodeN | ame):
mel.AEswatchDisplay(nodeName)
|
flags={}):
url_get_metadata = 'http://api.bilibili.com/view?'
url_get_comment = 'http://comment.bilibili.com/%(cid)s.xml'
if source == 'overseas':
url_get_media = 'http://interface.bilibili.com/v_cdn_play?'
else:
url_get_media = 'http://interface.bilibili.com/playurl?'
def parse_url(url):
'''Parse a bilibili.com URL
Return value: (aid, pid)
'''
regex = re.compile('http:/*[^/]+/video/av(\\d+)(/|/index.html|/index_(\\d+).html)?(\\?|#|$)')
regex_match = regex.match(url)
if not regex_match:
raise ValueError('Invalid URL: %s' % url)
aid = regex_match.group(1)
pid = regex_match.group(3) or '1'
return aid, pid
def fetch_video_metadata(aid, pid):
'''Fetch video metadata
Arguments: aid, pid
Return value: {'cid': cid, 'title': title}
'''
req_args = {'type': 'json', 'appkey': APPKEY, 'id': aid, 'page': pid}
req_args['sign'] = bilibili_hash(req_args)
_, response = fetch_url(url_get_metadata+urllib.parse.urlencode(req_args), user_agent=USER_AGENT_API, cookie=cookie)
try:
response = dict(json.loads(response.decode('utf-8', 'replace')))
except (TypeError, ValueError):
raise ValueError('Can not get \'cid\' from %s' % url)
if 'error' in response:
logging.error('Error message: %s' % response.get('error'))
if 'cid' not in response:
raise ValueError('Can not get \'cid\' from %s' % url)
return response
def get_media_urls(cid, *, fuck_you_bishi_mode=False):
'''Request the URLs of the video
Arguments: cid
Return value: [media_urls]
'''
if source in {None, 'overseas'}:
user_agent = USER_AGENT_API if not fuck_you_bishi_mode else USER_AGENT_PLAYER
req_args = {'appkey': APPKEY, 'cid': cid}
if quality is not None:
req_args['quality'] = quality
req_args['sign'] = bilibili_hash(req_args)
_, response = fetch_url(url_get_media+urllib.parse.urlencode(req_args), user_agent=user_agent, cookie=cookie)
media_urls = [str(k.wholeText).strip() for i in xml.dom.minidom.parseString(response.decode('utf-8', 'replace')).getElementsByTagName('durl') for j in i.getElementsByTagName('url')[:1] for k in j.childNodes if k.nodeType == 4]
if not fuck_you_bishi_mode and media_urls == ['http://static.hdslb.com/error.mp4']:
logging.error('Detected User-Agent block. Switching to fuck-you-bishi mode.')
return get_media_urls(cid, fuck_you_bishi_mode=True)
elif source == 'html5':
req_args = {'aid': aid, 'page': pid}
logging.warning('HTML5 video source is experimental and may not always work.')
_, response = fetch_url('http://www.bilibili.com/m/html5?'+urllib.parse.urlencode(req_args), user_agent=USER_AGENT_PLAYER)
response = json.loads(response.decode('utf-8', 'replace'))
media_urls = [dict.get(response, 'src')]
if not media_urls[0]:
media_urls = []
if not fuck_you_bishi_mode and media_urls == ['http://static.hdslb.com/error.mp4']:
logging.error('Failed to request HTML5 video source. Retrying.')
return get_media_urls(cid, fuck_you_bishi_mode=True)
elif source == 'flvcd':
req_args = {'kw': url}
if quality is not None:
if quality == 3:
req_args['quality'] = 'high'
elif quality >= 4:
req_args['quality'] = 'super'
_, response = fetch_url('http://www.flvcd.com/parse.php?'+urllib.parse.urlencode(req_args), user_agent=USER_AGENT_PLAYER)
resp_match = re.search('<input type="hidden" name="inf" value="([^"]+)"', response.decode('gbk', 'replace'))
if resp_match:
media_urls = resp_match.group(1).rstrip('|').split('|')
else:
media_urls = []
elif source == 'bilipr':
req_args = {'cid': cid}
quality_arg = '1080' if quality is not None and quality >= 4 else '720'
logging.warning('BilibiliPr video source is experimental and may not always work.')
resp_obj, response = fetch_url('http://pr.lolly.cc/P%s?%s' % (quality_arg, urllib.parse.urlencode(req_args)), user_agent=USER_AGENT_PLAYER)
if resp_obj.getheader('Content-Type', '').startswith('text/xml'):
media_urls = [str(k.wholeText).strip() for i in xml.dom.minidom.parseString(response.decode('utf-8', 'replace')).getElementsByTagName('durl') for j in i.getElementsByTagName('url')[:1] for k in j.childNodes if k.nodeType == 4]
else:
media_urls = []
else:
assert source in {None, 'overseas', 'html5', 'flvcd', 'bilipr'}
if len(media_urls) == 0 or media_urls == ['http://static.hdslb.com/error.mp4']:
raise ValueError('Can not get valid media URLs.')
return media_urls
def get_video_size(media_urls):
'''Determine the resolution of the video
Arguments: [media_urls]
Return value: (width, height)
'''
try:
if media_urls[0].startswith('http:') or media_urls[0].startswith('https:'):
ffprobe_command = ['ffprobe', '-icy', '0', '-loglevel', 'repeat+warning' if verbose else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-show_streams', '-timeout', '60000000', '-user-agent', USER_AGENT_PLAYER, '--', media_urls[0]]
else:
ffprobe_command = ['ffprobe', '-loglevel', 'repeat+warning' if verbose else 'repeat+error', '-print_format', 'json', '-select_streams', 'v', '-show_streams', '--', media_urls[0]]
log_command(ffprobe_command)
ffprobe_process = subprocess.Popen(ffprobe_command, stdout=subprocess.PIPE)
try:
ffprobe_output = json.loads(ffprobe_process.communicate()[0].decode('utf-8', 'replace'))
except KeyboardInterrupt:
logging.warning('Cancelling getting video size, press Ctrl-C again to terminate.')
ffprobe_process.terminate()
return 0, 0
width, height, widthxheight = 0, 0, 0
for stream in dict.get(ffprobe_output, 'streams') or []:
if dict.get(stream, 'width')*dict.get(stream, 'height') > widthxheight:
width, height = dict.get(stream, 'width'), dict.get(stream, 'height')
return width, height
except Exception as e:
log_or_raise(e, debug=debug)
return 0, 0
def convert_comments(cid, video_size):
'''Convert comments to ASS subtitle format
Arguments: cid
Return value: comment_out -> file
'''
_, resp_comment = fetch_url(url_get_comment % {'cid': cid}, cookie=cookie)
comment_in = io.StringIO(resp_comment.decode('utf-8', 'replace'))
comment_out = tempfile.NamedTemporaryFile(mode='w', encoding='utf-8-sig', newline='\r\n', prefix='tmp-danmaku2as | s-', suffix='.ass', delete=False)
logging.info('Invoking Danmaku2ASS, converting to %s' % comment_out.name)
d2a_args = dict({'stage_width': video_size[0], 'stage_height': video_size[1], 'font_face': 'SimHei', 'font_size': math.ceil(video_size[1]/21.6), 'text_opacity': 0.8, 'duration_marquee': min(max(6.75*video_size[0]/video_size[1]-4, 3.0), 8.0), 'duration_still': 5.0} | , **d2aflags)
for i, j in ((('stage_width', 'stage_height', 'reserve_blank'), int), (('font_size', 'text_opacity', 'comment_duration', 'duration_still', 'duration_marquee'), float)):
for k in i:
if k in d2aflags:
d2a_args[k] = j(d2aflags[k])
try:
danmaku2ass.Danmaku2ASS([comment_in], comment_out, **d2a_args)
except Exception as e:
log_or_raise(e, debug=debug)
logging.error('Danmaku2ASS failed, comments are disabled.')
comment_out.flush()
|
#!/usr/bin/env python
# coding=UTF-8
__author__ = "Pierre-Yves Langlois"
__copyright__ = "https://github.com/pylangl | ois/uwsa/blob/master/LICENCE"
__credits__ = ["Pierre-Yves Langlois"]
__license__ = "BSD"
__maintainer__ = "Pierre-Yves Langlois"
from uwsas.common import *
from uwsas.commands.abstract_command import AbstractCommand
class CommandManager(AbstractCommand):
NAME = 'CommandManager'
def __init__(self):
AbstractCommand.__init__(self)
self.help = t("""
Usage: uwsa cmd param
where cmd in %s
""")
d | ef get_log_name(self):
return 'uwsas'
cmanager = CommandManager()
|
deos", category , "Aventura","http://www.nolomires.com/category/peliculas-de-aventura/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Bèlico Guerra ","http://www.nolomires.com/category/peliculas-de-guerra/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Ciencia-Ficción","http://www.nolomires.com/category/peliculas-de-ciencia-ficcion/","","")
#xbmctools.addnewfolder( __channel__ ,"listvideosMirror", category , "Clásicos","http://www.nolomires.com/category/peliculasclasicos/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Comedia","http://www.nolomires.com/category/peliculas-de-comedia/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Documentales","http://www.nolomires.com/category/peliculas-sobre-documentales/","","")
#xbmctools.addnewfolder( __channel__ ,"listvideosMirror", category , "Destacado","http://www.nolomires.com/category/peliculasdestacado/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Documentales Online","http://www.nolomires.c | om/category/documentales-online-completos/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Drama","http://www.nolomires.com/category/peliculas-de-drama/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Entretenimiento","http://www.nolomires.com/category/entretenimiento/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", ca | tegory , "Estrenos","http://www.nolomires.com/category/ultimos-extrenos/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "General","http://www.nolomires.com/category/general/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Intriga","http://www.nolomires.com/category/peliculas-de-intriga/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Musicales","http://www.nolomires.com/category/peliculas-musicales/","","")
#xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Peliculas HD","http://www.nolomires.com/category/peliculaspeliculas-hd-categorias/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Romance","http://www.nolomires.com/category/peliculas-sobre-romance/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Suspenso","http://www.nolomires.com/category/peliculas-de-suspenso/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Terror","http://www.nolomires.com/category/peliculas-de-terror/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Thriller","http://www.nolomires.com/category/peliculas-de-thriller/","","")
xbmctools.addnewfolder( __channel__ ,"listvideos", category , "Todas las Peliculas","http://www.nolomires.com/category/peliculas-en-nolomires/","","")
# Asigna el título, desactiva la ordenación, y cierra el directorio
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def TagList(params,url,category):
logger.info("[nolomires.py] TagList")
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
# Patron de las entradas
patronvideos = "<a href='([^']+)' class='[^']+' title='[^']+' style='[^']+'" # URL
patronvideos += ">([^<]+)</a>" # TITULO
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
# Añade las entradas encontradas
for match in matches:
# Atributos
scrapedtitle = acentos(match[1])
scrapedurl = match[0]
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewfolder( __channel__ , "listvideos" , category , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# Asigna el título, desactiva la ordenación, y cierra el directorio
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def MostWatched(params,url,category):
logger.info("[nolomires.py] MostWatched")
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
# Patron de las entradas
patronvideos = '<li><a href="([^"]+)" ' # URL
patronvideos += 'title="([^"]+)">[^<]+' # TITULO
patronvideos += '</a>([^<]+)</li>' # Cantidad de Vistos
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
# Añade las entradas encontradas
for match in matches:
# Atributos
scrapedtitle = acentos(match[1] + match[2])
scrapedurl = match[0]
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewfolder( __channel__ , "detail" , category , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# Asigna el título, desactiva la ordenación, y cierra el directorio
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def LastSearch(params,url,category):
logger.info("[nolomires.py] LastSearch")
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
# Patron de las entradas
patronvideos = '<li><a href="([^"]+)" ' # URL
patronvideos += 'title="([^"]+)">[^<]+' # TITULO
patronvideos += '</a></li>' # Basura
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
# Añade las entradas encontradas
for match in matches:
# Atributos
scrapedtitle = acentos(match[1])
scrapedtitle = scrapedtitle.replace("online","").replace("ver ","")
scrapedurl = match[0]
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewfolder( __channel__ , "listvideos" , category , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# Asigna el título, desactiva la ordenación, y cierra el directorio
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def listvideos(params,url,category):
logger.info("[nolomires.py] listvideos")
if url=="":
url = "http://www.nolomires.com/"
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
# Extrae las entradas (carpetas)
patronvideos = '<div class="videoitem">.*?<a href="([^"]+)" ' # URL 0
patronvideos += 'title="([^"]+)">' # TITULO 1
patronvideos += '<img style="background: url\(([^\)]+)\)"' # IMAGEN 2
#patronvideos += '</div>[^<]+<div class=[^>]+>.*?href="[^"]+"><img '
#patronvideos += 'style=.*?src="([^"]+)".*?alt=.*?bold.*?>(.*?)</div>' # IMAGEN , DESCRIPCION
#patronvideos += '.*?flashvars="file=(.*?flv)\&' # VIDEO FLV
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
# Titulo
|
""""""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable, Dict
if TYPE_CHECKING:
from .extension import Admin
class AdminPanel:
"""Base classe for admin panels.
Currently this class does nothing. It may be useful in the future
either as just a marker interface (for automatic plugin discovery /
registration), or to add some common functionnalities. Otherwise, it
will be removed.
"""
id: str = ""
label: str = ""
icon: str = ""
admin: Admin
def url_value_preprocess(self, endpoint: str, view_args: dict[Any, Any]):
"""Panel can preprocess values for their views.
This method is called only if the endpoint is for `get()`, `post()`, or
one of the views installed with `install_additional_rules`.
This is also the right place to add items to the breadcrumbs.
"""
def install | _additional_rules(self, add_url_rule: Callable):
"""This method can be redefined in subclasses to install custom url
rules.
All rules are relative to panel 'base' rule, don't prefix rules with panel
id, it will be done by `add_url_rule`.
:param add_url_rule: function to use to add url rules, same interface as
:meth:`flas | k.blueprint.Blueprint.add_url_rule`.
"""
|
set=1, playcards=7)
shallHighlightMatch = Game._shallHighlightMatch_SS
# ************************************************************************
# * Legion
# ************************************************************************
class Legion(Klondike):
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=8)
def startGame(self):
self.startDealSample()
self.s.talon.dealRow()
for i in (1, 2, 3):
self.s.talon.dealRow(rows=self.s.rows[i:-i], flip=0)
self.s.talon.dealRow(rows=self.s.rows[i:-i])
self.s.talon.dealCards()
# ************************************************************************
# * Big Bertha
# ************************************************************************
class BigBertha(Game):
def createGame(self):
lay, s = Layout(self), self.s
self.setSize(lay.XM+15*lay.XS, lay.YM+3*lay.YS+15*lay.YOFFSET)
x, y = lay.XM, lay.YM
s.talon = InitialDealTalonStack(x, y, self)
x, y = lay.XM+3.5*lay.XS, lay.YM
for i in range(8):
s.foundations.append(SS_FoundationStack(x, y, self,
suit=i % 4, max_cards=12))
x += lay.XS
x, y = lay.XM, lay.YM+lay.YS
for i in range(15):
s.rows.append(AC_RowStack(x, y, self))
x += lay.XS
x, y = lay.XM, self.height-lay.YS
for i in range(14):
s.reserves.append(OpenStack(x, y, self, max_accept=0))
x += lay.XS
s.foundations.append(RK_FoundationStack(x, y, self, suit=ANY_SUIT,
base_rank=KING, dir=0, max_cards=8))
lay.defaultStackGroups()
def startGame(self):
self._startDealNumRows(5)
self.s.talon.dealRow()
self.s.talon.dealRow(rows=self.s.reserves)
shallHighlightMatch = Game._shallHighlightMatch_AC
# ************************************************************************
# * Athena
# ************************************************************************
class Athena(Klondike):
def startGame(self):
self.s.talon.dealRow(frames=0, flip=0)
self.s.talon.dealRow(frames=0)
self.s.talon.dealRow(frames=0, flip=0)
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealCards()
# ************************************************************************
# * Kingsley
# ************************************************************************
class Kingsley(Klondike):
Foundation_Class = StackWrapper(SS_FoundationStack, base_rank=KING, dir=-1)
RowStack_Class = StackWrapper(KingAC_RowStack, base_rank=ACE, dir=1)
def createGame(self):
Klondike.createGame(self, max_rounds=1)
# ************************************************************************
# * Scarp
# ************************************************************************
class Scarp(Klondike):
Talon_Class = DealRowTalonStack
RowStack_Class = AC_RowStack
def createGame(self):
Klondike.createGame(self, max_rounds=1, rows=13, waste=0, playcards=28)
def startGame(self):
Klondike.startGame(self, flip=1)
# ************************************************************************
# * Eight Sages
# ************************************************************************
class EightSages_Row(AC_RowStack):
def acceptsCards(self, from_stack, cards):
if not AC_RowStack.acceptsCards(self, from_stack, cards):
return False
return from_stack is self.game.s.waste
class EightSages(Klondike):
RowStack_Class = EightSages_Row
def createGame(self):
lay = Klondike.createGame(self, max_rounds=2, rows=8,
playcards=12, round_text=True)
lay.createRoundText(self.s.talon, 'ne', dx=lay.XS)
def startGame(self):
self.startDealSample()
self.s.talon.dealRow()
self.s.talon.dealCards()
# ************************************************************************
# * Guardian
# ************************************************************************
class Guardian_RowStack(AC_RowStack):
STEP = (3, 3, 3, 4, 4, 4, 4)
def basicIsBlocked(self):
r, step = self.game.s.rows, self.STEP
i, n, mylen = self.id, 1, len(step)
while i < mylen:
i = i + step[i]
n = n + 1
for j in range(i, i + n):
if r[j].cards:
return True
return False
def acceptsCards(self, from_stack, cards):
if len(self.cards) == 0 and self.id > 2:
return False
return AC_RowStack.acceptsCards(self, from_stack, cards)
class Guardian(Game):
def createGame(self):
lay, s = Layout(self), self.s
self.setSize((7 * lay.XS) + lay.XM,
(2.5 * lay.YS) + (13 * lay.YOFFSET) + lay.YM)
# create stacks
for i in range(3):
x = lay.XM + (4 - i) * lay.XS // 2
y = lay.YM + lay.TEXT_HEIGHT + lay.YS + i * lay.YS // 4
for j in range(i + 3):
s.rows.append(Guardian_RowStack(x, y, self))
x = x + lay.XS
x, y = lay.XM, lay.YM
s.talon = WasteTalonStack(x, y, self,
max_rounds=-1, num_deal=3)
lay.createText(s.talon, "s")
x += lay.XS
s.waste = WasteStack(x, y, self)
lay.createText(s.waste, "s")
x += lay.XS
for i in range(4):
x += lay.XS
s.foundations.append(SS_FoundationStack(x, y, self, i,
mod=13, max_move=0))
lay.defaultStackGroups()
def startGame(self):
self.startDealSample()
self.s.talon.dealRow(rows=self.s.rows[:7], flip=0)
self.s.talon.dealRow(rows=self.s.rows[7:])
self.s.talon.dealCar | ds() # deal first card to WasteStack
# register the game
registerGame(GameInfo(2, Klondike, "Klondike",
GI.GT_KLONDI | KE, 1, -1, GI.SL_BALANCED,
altnames=("Classic Solitaire", "American Patience")))
registerGame(GameInfo(61, CasinoKlondike, "Casino Klondike",
GI.GT_KLONDIKE | GI.GT_SCORE, 1, 2, GI.SL_BALANCED))
registerGame(GameInfo(129, VegasKlondike, "Vegas Klondike",
GI.GT_KLONDIKE | GI.GT_SCORE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(18, KlondikeByThrees, "Klondike by Threes",
GI.GT_KLONDIKE, 1, -1, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(58, ThumbAndPouch, "Thumb and Pouch",
GI.GT_KLONDIKE, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(67, Whitehead, "Whitehead",
GI.GT_KLONDIKE, 1, 0, GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(39, SmallHarp, "Small Harp",
GI.GT_KLONDIKE, 1, -1, GI.SL_BALANCED,
altnames=("Die kleine Harfe",)))
registerGame(GameInfo(66, Eastcliff, "Eastcliff",
GI.GT_KLONDIKE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(224, Easthaven, "Easthaven",
GI.GT_GYPSY, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(33, Westcliff, "Westcliff",
GI.GT_KLONDIKE, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(225, Westhaven, "Westhaven",
GI.GT_GYPSY, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(107, PasSeul, "Pas Seul",
GI.GT_KLONDIKE, 1, 0, GI.SL_BALANCED))
registerGame(GameInfo(81, BlindAlleys, "Blind Alleys",
GI.GT_KLONDIKE, 1, 1, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(215, Somerset, "Somerset",
GI.GT_BELEAGUERED_CASTLE | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(231, Canister, "Canister",
GI.GT_BELEAGUERED_CASTLE | GI.GT_OPEN, 1, 0,
GI.SL_MOSTLY_SKILL))
registerGame(GameInfo(229, AgnesSorel, "Agnes Sorel",
GI.GT_GYPSY, 1, 0, GI.SL_MOSTLY_LUCK))
registerGame(GameInfo(4, EightTimesEight, "8 x 8",
|
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
collectStderr=collectStderr)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail() and abandonOnFailure:
log.msg("Source step failed while running command {}".format(cmd))
raise buildstep.BuildStepFailed()
if collectStdout and collectStderr:
return (cmd.stdout, cmd.stderr)
elif collectStdout:
return cmd.stdout
elif collectStderr:
return cmd.stderr
return cmd.rc
def _getMethod(self):
if self.method is not None and self.mode != 'incremental':
return self.method
elif self.mode == 'incremental':
return None
elif self.method is None and self.mode == 'full':
return 'fresh'
return None
@defer.inlineCallbacks
def _sourcedirIsUpdatable(self):
# first, perform a stat to ensure that this is really an svn directory
res = yield self.pathExists(self.build.path_module.join(self.workdir, '.svn'))
if not res:
return False
# then run 'svn info --xml' to check that the URL matches our repourl
stdout, stderr = yield self._dovccmd(['info', '--xml'], collectStdout=True,
collectStderr=True, abandonOnFailure=False)
# svn: E155037: Previous operation has not finished; run 'cleanup' if
# it was interrupted
if 'E155037:' in stderr:
return False
try:
stdout_xml = xml.dom.minidom.parseString(stdout)
extractedurl = stdout_xml.getElementsByTagName(
'url')[0].firstChild.nodeValue
except xml.parsers.expat.ExpatError as e:
yield self.stdio_log.addHeader("Corrupted xml, aborting step")
raise buildstep.BuildStepFailed() from e
return extractedurl == self.svnUriCanonicalize(self.repourl)
@defer.inlineCallbacks
def parseGotRevision(self):
# if this was a full/export, then we need to check svnversion in the
# *source* directory, not the build directory
svnversion_dir = self.workdir
if self.mode == 'full' and self.method == 'export':
svnversion_dir = 'source'
cmd = remotecommand.RemoteShellCommand(svnversion_dir, ['svn', 'info', '--xml'],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=True)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
stdout = cmd.stdout
try:
stdout_xml = xml.dom.minidom.parseString(stdout)
except xml.parsers.expat.ExpatError as e:
yield self.stdio_log.addHeader("Corrupted xml, aborting step")
raise buildstep.BuildStepFailed() from e
revision = None
if self.preferLastChangedRev:
try:
revision = stdout_xml.getElementsByTagName(
'commit')[0].attributes['revision'].value
except (KeyError, IndexError):
msg = ("SVN.parseGotRevision unable to detect Last Changed Rev in"
" output of svn info")
log.msg(msg)
# fall through and try to get 'Revision' instead
if revision is None:
try:
revision = stdout_xml.getElementsByTagName(
'entry')[0].attributes['revision'].value
except (KeyError, IndexError) as e:
msg = ("SVN.parseGotRevision unable to detect revision in"
" output of svn info")
log.msg(msg)
raise buildstep.BuildStepFailed() from e
yield self.stdio_log.addHeader("Got SVN revision {}".format(revision))
self.updateSourceProperty('got_revision', revision)
return cmd.rc
@defer.inlineCallbacks
def purge(self, ignore_ignores):
"""Delete everything that shown up on status."""
command = ['status', '--xml']
if ignore_ignores:
command.append('--no-ignore')
stdout = yield self._dovccmd(command, collectStdout=True)
files = []
for filename in self.getUnversionedFiles(stdout, self.keep_on_purge):
filename = self.build.path_module.join(self.workdir, filename)
files.append(filename)
if files:
if self.workerVersionIsOlderThan('rmdir', '2.14'):
rc = yield self.removeFiles(files)
else:
rc = yield self.runRmdir(files, abandonOnFailure=False, timeout=self.timeout)
if rc != 0:
log.msg("Failed removing files")
raise buildstep.BuildStepFailed()
@staticmethod
def getUnversionedFiles(xmlStr, keep_on_purge):
try:
result_xml = xml.dom.minidom.parseString(xmlStr)
except xml.parsers.expat.ExpatError as e:
log.err("Corrupted xml, aborting step")
raise buildstep.BuildStepFailed() from e
for entry in result_xml.getElementsByTagName('entry'):
(wc_status,) = entry.getElementsByTagName('wc-status')
if wc_status.getAttribute('item') == 'external':
continue
if wc_status.getAttribute('item') == 'missing':
continue
filename = e | ntry.getAttribute('path')
if filename in keep_on_purge or filename == '':
continue
yield filename
@defer.inlineCallb | acks
def removeFiles(self, files):
for filename in files:
res = yield self.runRmdir(filename, abandonOnFailure=False, timeout=self.timeout)
if res:
return res
return 0
@defer.inlineCallbacks
def checkSvn(self):
cmd = remotecommand.RemoteShellCommand(self.workdir, ['svn', '--version'],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
return cmd.rc == 0
def computeSourceRevision(self, changes):
if not changes or None in [c.revision for c in changes]:
return None
lastChange = max([int(c.revision) for c in changes])
return lastChange
@staticmethod
def svnUriCanonicalize(uri):
collapse = re.compile(r'([^/]+/\.\./?|/\./|//|/\.$|/\.\.$|^/\.\.)')
server_authority = re.compile(r'^(?:([^@]+)@)?([^:]+)(?::(.+))?$')
default_port = {'http': '80',
'https': '443',
'svn': '3690'}
relative_schemes = ['http', 'https', 'svn']
def quote(uri):
return urlquote(uri, "!$&'()*+,-./:=@_~", encoding="latin-1")
if not uri or uri == '/':
return uri
(scheme, authority, path, parameters, query, fragment) = urlparse(uri)
scheme = scheme.lower()
if authority:
mo = server_authority.match(authority)
if not mo:
return uri # give up
userinfo, host, port = mo.groups()
if host[-1] == '.':
host = host[:-1]
authority = host.lower()
if userinfo:
authority = "{}@{}".format(userinfo, authority)
if port and port != default_port.get(scheme, None):
authority = "{}:{}".format(authority, port)
if scheme in relative_schemes:
last_path = path
while True:
path = collapse.sub('/', path, 1)
|
# Copyright 2017 IBM Corp.
from zvmconnector import connector
import os
import time
print("Setup client: client=connector.ZVMConnector('9.60.18.170', 8080)\n")
client=connector.ZVMConnector('9.60.18.170', 8080)
print("Test: send_request('vswitch_get_list')")
list = client.send_request('vswitch_get_list')
print("Result: %s\n" % list)
GUEST_USERID = 'DEMOV1S2'
GUEST_PROFILE = 'osdflt'
GUEST_VCPUS = 1
GUEST_MEMORY = 2048
DISK_POOL = 'ECKD:POOL1'
IMAGE_PATH = '/tmp/rhel7eckd_IUCV_zvmguestconfigure.img'
IMAGE_OS_VERSION = 'rhel7.0'
GUEST_IP_ADDR = '192.168.100.3'
GATEWAY = '192.168.100.1'
CIDR = '192.168.100.1/24'
VLANID = 100
VSWITCH_NAME = 'Datanet1'
network_info = [{'ip_addr': GUEST_IP_ADDR, 'gateway_addr': GATEWAY, 'cidr': CIDR}]
image_name = os.path.basename(IMAGE_PATH)
url = 'file://' + IMAGE_PATH
print("Parameter list:")
print("GUEST_USERID: %s" % GUEST_USERID)
print("GUEST_PROFILE: %s" % GUEST_PROFILE)
print("GUEST_VCPUS: %s" % GUEST_VCPUS)
print("GUEST_MEMORY: %s" % GUEST_MEMORY)
print("DISK_POOL: %s" % DISK_POOL)
print("IMAGE_PATH: %s" % IMAGE_PATH)
print("IMAGE_OS_VERSION: %s" % IMAGE_OS_VERSION)
print("image_name: %s" % image_name)
print("url: %s" % url)
print("network_info: %s" % network_info)
print("-----------------------------------------------------------------------------------------------------------\n")
print("Import image: send_request('image_import', '%s', url, {'os_version': '%s'})" % (image_name, IMAGE_OS_VERSION))
info = client.send_request('image_import', image_name, url, {'os_version': IMAGE_OS_VERSION})
print('Result: %s\n' % info)
print("Get image size: send_request('image_get_root_disk_size', '%s')" % image_name)
info = client.send_request('image_get_root_disk_size', image_name)
print('Result: %s\n' % info)
size=info['output']
disks_list = [{'size': size, 'is_boot_disk': True, 'disk_pool': DISK_POOL}]
print("set disks_list: %s\n" % disks_list)
print("Create guest: send_request('guest_create', '%s', '%s', '%s', disk_list='%s', user_profile='%s')" %
(GUEST_USERID, GUEST_VCPUS, GUEST_MEMORY, disks_list, GUEST_PROFILE))
info = client.send_request('guest_create', GUEST_USERID, GUEST_VCPUS, GUEST_MEMORY, disk_list=disks_list, user_profile=GUEST_PROFILE)
print('Result: %s\n' % info)
print("Guest deploy: send_request('guest_deploy', '%s', '%s')" % (GUEST_USERID, image_name))
info = client.send_request('guest_deploy', GUEST_USERID, image_name)
print('Result: %s\n' % info)
print("Set network: send_request('guest_create_network_interface', '%s', '%s', '%s')" %(GUEST_USERID, IMAGE_OS_VERSION, network_info))
info = client.send_request('guest_create_network_interface', GUEST_USERID, IMAGE_OS_VERSION, network_info)
print('Result: %s\n' % info)
nic = info['output'][0]['nic_vdev']
print("Couple network: send_request('guest_nic_couple_to_vswitch', '%s', '%s', '%s')" % (GUEST_USERID, nic, VSWITCH_NAME))
info = client.send_request('guest_nic_couple_to_vswitch', GUEST_USERID, '1000', VSWITCH_NAME)
print('Result: %s\n' % info)
print("Set VLAN ID")
info = client.send_request('vswitch_set_vlan_id_for_user', VSWITCH_NAME, GUEST_USERID, VLANID)
print('Result: %s\n' % info)
print("Grant user: send_request('vswitch_grant_user', '%s', '%s')" % (VSWITCH_NAME, GUEST_USERID))
info = client.send_request('vswitch_grant_user', VSWITCH_NAME, GUEST_USERID)
print('Result: %s\n' % info)
print("Check power state: send_request('guest_get_ | power_state', '%s')" % GUEST_USERID)
info = client.send_request('guest_get_power_state', GUEST_USERID)
print('Result: %s\n' % info)
print("Start guest: send_request('guest_start', '%s')" % GUEST_US | ERID)
info = client.send_request('guest_start', GUEST_USERID)
print('Result: %s\n' % info)
print("Check power state: send_request('guest_get_power_state', '%s')" % GUEST_USERID)
info = client.send_request('guest_get_power_state', GUEST_USERID)
print('Result: %s\n' % info)
print("Get user direct: send_request('guest_get_definition_info', '%s')" % GUEST_USERID)
info = client.send_request('guest_get_definition_info', GUEST_USERID)
print('Result: %s\n' % info)
print('Completed\n')
|
subprocess.check_output(command, shell=True).strip().split("\n")
except subprocess.CalledProcessError:
pass
def check_boot():
if 'onboot' not in sys.argv:
return
init_dir = os.getenv('INIT_DIR', '/etc/init')
systemd_dir = os.getenv('SYSTEMD_DIR', '/etc/systemd/system')
nginx_dir = os.getenv('NGINX_DIR', '/etc/nginx/conf.d')
if os.path.exists(init_dir):
with open('{0}/dokku-installer.conf'.format(init_dir), 'w') as f:
f.write("start on runlevel [2345]\n")
f.write("exec {0} selfdestruct\n".format(os.path.abspath(__file__)))
if os.path.exists(systemd_dir):
with open('{0}/dokku-installer.service'.format(systemd_dir), 'w') as f:
f.write("[Unit]\n")
f.write("Description=Dokku web-installer\n")
f.write("\n")
f.write("[Service]\n")
| f.write("ExecStart={0} selfdestruct\n".format(os.path.abspath(__file__)))
f.write("\n")
f.write("[Install]\n")
f.write("WantedBy=multi-user.target\ | n")
f.write("WantedBy=graphical.target\n")
if os.path.exists(nginx_dir):
with open('{0}/dokku-installer.conf'.format(nginx_dir), 'w') as f:
f.write("upstream dokku-installer { server 127.0.0.1:2000; }\n")
f.write("server {\n")
f.write(" listen 80;\n")
f.write(" location / {\n")
f.write(" proxy_pass http://dokku-installer;\n")
f.write(" }\n")
f.write("}\n")
subprocess.call('rm -f /etc/nginx/sites-enabled/*', shell=True)
sys.exit(0)
class GetHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
content = PAGE.replace('{VERSION}', VERSION)
content = content.replace('{HOSTNAME}', hostname)
content = content.replace('{AUTHORIZED_KEYS_LOCATION}', key_file)
content = content.replace('{ADMIN_KEYS}', "\n".join(admin_keys))
self.send_response(200)
self.end_headers()
self.wfile.write(content)
def do_POST(self):
if self.path not in ['/setup', '/setup/']:
return
params = cgi.FieldStorage(fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type']})
vhost_enable = 'false'
dokku_root = os.getenv('DOKKU_ROOT', '/home/dokku')
if 'vhost' in params and params['vhost'].value == 'true':
vhost_enable = 'true'
with open('{0}/VHOST'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
else:
try:
os.remove('{0}/VHOST'.format(dokku_root))
except OSError:
pass
with open('{0}/HOSTNAME'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
for (index, key) in enumerate(params['keys'].value.splitlines(), 1):
user = 'admin'
if self.admin_user_exists() is not None:
user = 'web-admin'
if self.web_admin_user_exists() is not None:
index = int(self.web_admin_user_exists()) + 1
elif self.web_admin_user_exists() is None:
index = 1
elif self.admin_user_exists() is None:
pass
else:
index = int(self.admin_user_exists()) + 1
user = user + str(index)
command = ['sshcommand', 'acl-add', 'dokku', user]
proc = subprocess.Popen(command, stdin=subprocess.PIPE)
proc.stdin.write(key)
proc.stdin.close()
proc.wait()
set_debconf_selection('boolean', 'nginx_enable', 'true')
set_debconf_selection('boolean', 'skip_key_file', 'true')
set_debconf_selection('boolean', 'vhost_enable', vhost_enable)
set_debconf_selection('boolean', 'web_config', 'false')
set_debconf_selection('string', 'hostname', params['hostname'].value)
if 'selfdestruct' in sys.argv:
DeleteInstallerThread()
self.send_response(200)
self.end_headers()
self.wfile.write(json.dumps({'status': 'ok'}))
def web_admin_user_exists(self):
return self.user_exists('web-admin(\d+)')
def admin_user_exists(self):
return self.user_exists('admin(\d+)')
def user_exists(self, name):
command = 'dokku ssh-keys:list'
pattern = re.compile(r'NAME="' + name + '"')
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
max_num = 0
exists = False
for line in proc.stdout:
m = pattern.search(line)
if m:
# User of the form `user` or `user#` exists
exists = True
max_num = max(max_num, m.group(1))
if exists:
return max_num
else:
return None
def set_debconf_selection(debconf_type, key, value):
found = False
with open('/etc/os-release', 'r') as f:
for line in f:
if 'debian' in line:
found = True
if not found:
return
ps = subprocess.Popen(['echo', 'dokku dokku/{0} {1} {2}'.format(
key, debconf_type, value
)], stdout=subprocess.PIPE)
try:
subprocess.check_output(['debconf-set-selections'], stdin=ps.stdout)
except subprocess.CalledProcessError:
pass
ps.wait()
class DeleteInstallerThread(object):
def __init__(self, interval=1):
thread = threading.Thread(target=self.run, args=())
thread.daemon = True
thread.start()
def run(self):
command = "rm /etc/nginx/conf.d/dokku-installer.conf && /etc/init.d/nginx stop && /etc/init.d/nginx start"
try:
subprocess.call(command, shell=True)
except:
pass
command = "rm -f /etc/init/dokku-installer.conf /etc/systemd/system/dokku-installer.service && (stop dokku-installer || systemctl stop dokku-installer.service)"
try:
subprocess.call(command, shell=True)
except:
pass
def main():
check_boot()
port = int(os.getenv('PORT', 2000))
httpd = SocketServer.TCPServer(("", port), GetHandler)
print "Listening on 0.0.0.0:{0}, CTRL+C to stop".format(port)
httpd.serve_forever()
PAGE = """
<html>
<head>
<meta charset="utf-8" />
<title>Dokku Setup</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css" integrity="sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" crossorigin="anonymous">
<style>
.bd-callout {
padding: 1.25rem;
margin-top: 1.25rem;
margin-bottom: 1.25rem;
border: 1px solid #eee;
border-left-width: .25rem;
border-radius: .25rem;
}
.bd-callout p:last-child {
margin-bottom: 0;
}
.bd-callout-info {
border-left-color: #5bc0de;
}
pre {
font-size: 80%;
margin-bottom: 0;
}
h1 small {
font-size: 50%;
}
h5 {
font-size: 1rem;
}
.container {
width: 640px;
}
.result {
padding-left: 20px;
}
input.form-control, textarea.form-control {
background-color: #fafbfc;
font-size: 14px;
}
input.form-control::placeholder, textarea.form-control::placeholder {
color: #adb2b8
}
</style>
</head>
<body>
<div class="container">
<form id="form" role="form">
<h1 class="pt-3">Dokku Setup <small class="text-muted">{VERSION}</small></h1>
<div class="alert alert-warning small" role="alert">
<strong>Warning:</strong> The SSH key filled out here can grant root access to the server. Please complete the setup as soon as possible.
</div>
<div class="row">
<div class="col">
<h3>Admin Access</h3>
<div class="form-group">
<label for="key">Public SSH Keys</label><br />
<texta |
ure non-file pa | ths are detected."""
os.environ[b'MOZCONFIG'] = gettempdir()
with self. | assertRaises(MozconfigFindException) as e:
self.get_loader().find_mozconfig()
self.assertIn('refers to a non-file', e.exception.message)
self.assertTrue(e.exception.message.endswith(gettempdir()))
def test_find_default_files(self):
"""Ensure default paths are used when present."""
for p in MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS:
d = self.get_temp_dir()
path = os.path.join(d, p)
with open(path, 'w'):
pass
self.assertEqual(MozconfigLoader(d).find_mozconfig(), path)
def test_find_multiple_defaults(self):
"""Ensure we error when multiple default files are present."""
self.assertGreater(len(MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS), 1)
d = self.get_temp_dir()
for p in MozconfigLoader.DEFAULT_TOPSRCDIR_PATHS:
with open(os.path.join(d, p), 'w'):
pass
with self.assertRaises(MozconfigFindException) as e:
MozconfigLoader(d).find_mozconfig()
self.assertIn('Multiple default mozconfig files present',
e.exception.message)
def test_find_deprecated_path_srcdir(self):
"""Ensure we error when deprecated path locations are present."""
for p in MozconfigLoader.DEPRECATED_TOPSRCDIR_PATHS:
d = self.get_temp_dir()
with open(os.path.join(d, p), 'w'):
pass
with self.assertRaises(MozconfigFindException) as e:
MozconfigLoader(d).find_mozconfig()
self.assertIn('This implicit location is no longer',
e.exception.message)
self.assertIn(d, e.exception.message)
def test_find_deprecated_home_paths(self):
"""Ensure we error when deprecated home directory paths are present."""
for p in MozconfigLoader.DEPRECATED_HOME_PATHS:
home = self.get_temp_dir()
os.environ[b'HOME'] = home
path = os.path.join(home, p)
with open(path, 'w'):
pass
with self.assertRaises(MozconfigFindException) as e:
self.get_loader().find_mozconfig()
self.assertIn('This implicit location is no longer',
e.exception.message)
self.assertIn(path, e.exception.message)
def test_read_no_mozconfig(self):
# This is basically to ensure changes to defaults incur a test failure.
result = self.get_loader().read_mozconfig()
self.assertEqual(result, {
'path': None,
'topobjdir': None,
'configure_args': None,
'make_flags': None,
'make_extra': None,
'env': None,
'vars': None,
})
def test_read_empty_mozconfig(self):
with NamedTemporaryFile(mode='w') as mozconfig:
result = self.get_loader().read_mozconfig(mozconfig.name)
self.assertEqual(result['path'], mozconfig.name)
self.assertIsNone(result['topobjdir'])
self.assertEqual(result['configure_args'], [])
self.assertEqual(result['make_flags'], [])
self.assertEqual(result['make_extra'], [])
for f in ('added', 'removed', 'modified'):
self.assertEqual(len(result['vars'][f]), 0)
self.assertEqual(len(result['env'][f]), 0)
self.assertEqual(result['env']['unmodified'], {})
def test_read_capture_ac_options(self):
"""Ensures ac_add_options calls are captured."""
with NamedTemporaryFile(mode='w') as mozconfig:
mozconfig.write('ac_add_options --enable-debug\n')
mozconfig.write('ac_add_options --disable-tests --enable-foo\n')
mozconfig.write('ac_add_options --foo="bar baz"\n')
mozconfig.flush()
result = self.get_loader().read_mozconfig(mozconfig.name)
self.assertEqual(result['configure_args'], [
'--enable-debug', '--disable-tests', '--enable-foo',
'--foo=bar baz'])
def test_read_ac_options_substitution(self):
"""Ensure ac_add_options values are substituted."""
with NamedTemporaryFile(mode='w') as mozconfig:
mozconfig.write('ac_add_options --foo=@TOPSRCDIR@\n')
mozconfig.flush()
loader = self.get_loader()
result = loader.read_mozconfig(mozconfig.name)
self.assertEqual(result['configure_args'], [
'--foo=%s' % loader.topsrcdir])
def test_read_ac_app_options(self):
with NamedTemporaryFile(mode='w') as mozconfig:
mozconfig.write('ac_add_options --foo=@TOPSRCDIR@\n')
mozconfig.write('ac_add_app_options app1 --bar=@TOPSRCDIR@\n')
mozconfig.write('ac_add_app_options app2 --bar=x\n')
mozconfig.flush()
loader = self.get_loader()
result = loader.read_mozconfig(mozconfig.name, moz_build_app='app1')
self.assertEqual(result['configure_args'], [
'--foo=%s' % loader.topsrcdir,
'--bar=%s' % loader.topsrcdir])
result = loader.read_mozconfig(mozconfig.name, moz_build_app='app2')
self.assertEqual(result['configure_args'], [
'--foo=%s' % loader.topsrcdir,
'--bar=x'])
def test_read_capture_mk_options(self):
"""Ensures mk_add_options calls are captured."""
with NamedTemporaryFile(mode='w') as mozconfig:
mozconfig.write('mk_add_options MOZ_OBJDIR=/foo/bar\n')
mozconfig.write('mk_add_options MOZ_MAKE_FLAGS="-j8 -s"\n')
mozconfig.write('mk_add_options FOO="BAR BAZ"\n')
mozconfig.write('mk_add_options BIZ=1\n')
mozconfig.flush()
result = self.get_loader().read_mozconfig(mozconfig.name)
self.assertEqual(result['topobjdir'], '/foo/bar')
self.assertEqual(result['make_flags'], ['-j8', '-s'])
self.assertEqual(result['make_extra'], ['FOO=BAR BAZ', 'BIZ=1'])
vars = result['vars']['added']
for var in ('MOZ_OBJDIR', 'MOZ_MAKE_FLAGS', 'FOO', 'BIZ'):
self.assertEqual(vars.get('%s_IS_SET' % var), '1')
def test_read_empty_mozconfig_objdir_environ(self):
os.environ[b'MOZ_OBJDIR'] = b'obj-firefox'
with NamedTemporaryFile(mode='w') as mozconfig:
result = self.get_loader().read_mozconfig(mozconfig.name)
self.assertEqual(result['topobjdir'], 'obj-firefox')
def test_read_capture_mk_options_objdir_environ(self):
"""Ensures mk_add_options calls are captured and override the environ."""
os.environ[b'MOZ_OBJDIR'] = b'obj-firefox'
with NamedTemporaryFile(mode='w') as mozconfig:
mozconfig.write('mk_add_options MOZ_OBJDIR=/foo/bar\n')
mozconfig.flush()
result = self.get_loader().read_mozconfig(mozconfig.name)
self.assertEqual(result['topobjdir'], '/foo/bar')
def test_read_moz_objdir_substitution(self):
"""Ensure @TOPSRCDIR@ substitution is recognized in MOZ_OBJDIR."""
with NamedTemporaryFile(mode='w') as mozconfig:
mozconfig.write('mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/some-objdir')
mozconfig.flush()
loader = self.get_loader()
result = loader.read_mozconfig(mozconfig.name)
self.assertEqual(result['topobjdir'], '%s/some-objdir' %
loader.topsrcdir)
def test_read_new_variables(self):
"""New variables declared in mozconfig file are detected."""
with NamedTemporaryFile(mode='w') as mozconfig:
mozconfig.write('CC=/usr/local/bin/clang\n')
mozconfig.write('CXX=/usr/local/bin/clang++\n')
mozconfig.flush()
result = self.get_loader().read_mozconfig(mozconfig.name)
self.assertEqual(result['vars']['added'], {
'CC': '/usr/local/bin/clang',
'CXX': '/usr/local/bin/clang++'})
|
", "--copt=-fpic", "--linkopt=-znoexecstack",
"--linkopt=-zrelro", "--linkopt=-znow", "--linkopt=-fstack-protector"
]
class IntelPlatform(object):
min_gcc_major_version_ = 0
min_gcc_minor_version_ = 0
host_gcc_major_version_ = 0
host_gcc_minor_version_ = 0
BAZEL_PREFIX_ = "--copt="
ARCH_PREFIX_ = "-march="
FLAG_PREFIX_ = "-m"
def __init__(self, min_gcc_major_version, min_gcc_minor_version):
self.min_gcc_minor_version_ = min_gcc_minor_version
self.min_gcc_major_version_ = min_gcc_major_version
# Return True or False depending on whether
# The platform optimization flags can be generated by
# the gcc version specified in the parameters
def set_host_gcc_version(self, gcc_major_version, gcc_minor_version):
# True only if the gcc version in the tuple is >=
# min_gcc_major_version_, min_gcc_minor_version_
if gcc_major_version < self.min_gcc_major_version_:
print("Your MAJOR version of GCC is too old: {}; "
"it must be at least {}.{}".format(gcc_major_version,
self.min_gcc_major_version_,
self.min_gcc_minor_version_))
return False
elif gcc_major_version == self.min_gcc_major_version_ and \
gcc_minor_version < self.min_gcc_minor_version_:
print("Your MINOR version of GCC is too old: {}; "
"it must be at least {}.{}".format(gcc_minor_version,
self.min_gcc_major_version_,
self.min_gcc_minor_version_))
return False
print("gcc version OK: {}.{}".format(gcc_major_version, gcc_minor_version))
self.host_gcc_major_version_ = gcc_major_version
self.host_gcc_minor_version_ = gcc_minor_version
return True
# return a string with all the necessary bazel formatted flags for this
# platform in this gcc environment
def get_bazel_gcc_flags(self):
raise NotImplementedError(self)
# Returns True if the host gcc version is older | than the gcc version in which
# the new march flag became available.
# Specify the version in which the new name usage began
def use_old_arch_names(self, gcc_new_march_major_version,
gcc_new_march_minor_version):
if self.host_gcc_major_vers | ion_ < gcc_new_march_major_version:
return True
elif self.host_gcc_major_version_ == gcc_new_march_major_version and \
self.host_gcc_minor_version_ < gcc_new_march_minor_version:
return True
return False
class NehalemPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
NEHALEM_ARCH_OLD = "corei7"
NEHALEM_ARCH_NEW = "nehalem"
if self.use_old_arch_names(4, 9):
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
NEHALEM_ARCH_OLD + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
NEHALEM_ARCH_NEW + " "
class SandyBridgePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
SANDYBRIDGE_ARCH_OLD = "corei7-avx"
SANDYBRIDGE_ARCH_NEW = "sandybridge"
if self.use_old_arch_names(4, 9):
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SANDYBRIDGE_ARCH_OLD + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SANDYBRIDGE_ARCH_NEW + " "
class HaswellPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
HASWELL_ARCH_OLD = "core-avx2" # Only missing the POPCNT instruction
HASWELL_ARCH_NEW = "haswell"
POPCNT_FLAG = "popcnt"
if self.use_old_arch_names(4, 9):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
HASWELL_ARCH_OLD + " "
return ret_val + self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + \
POPCNT_FLAG + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
HASWELL_ARCH_NEW + " "
class SkylakePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 9)
def get_bazel_gcc_flags(self):
SKYLAKE_ARCH_OLD = "broadwell" # Only missing the POPCNT instruction
SKYLAKE_ARCH_NEW = "skylake-avx512"
# the flags that broadwell is missing: pku, clflushopt, clwb, avx512vl,
# avx512bw, avx512dq. xsavec and xsaves are available in gcc 5.x
# but for now, just exclude them.
AVX512_FLAGS = ["avx512f", "avx512cd"]
if self.use_old_arch_names(6, 1):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SKYLAKE_ARCH_OLD + " "
for flag in AVX512_FLAGS:
ret_val += self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + flag + " "
return ret_val
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SKYLAKE_ARCH_NEW + " "
class CascadelakePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 8, 3)
def get_bazel_gcc_flags(self):
CASCADELAKE_ARCH_OLD = "skylake-avx512" # Only missing the POPCNT instruction
CASCADELAKE_ARCH_NEW = "cascadelake"
# the flags that broadwell is missing: pku, clflushopt, clwb, avx512vl, avx512bw, avx512dq
VNNI_FLAG = "avx512vnni"
if IntelPlatform.use_old_arch_names(self, 9, 1):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
CASCADELAKE_ARCH_OLD + " "
return ret_val + self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + \
VNNI_FLAG + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
CASCADELAKE_ARCH_NEW + " "
class BuildEnvSetter(object):
"""Prepares the proper environment settings for various Intel platforms."""
default_platform_ = "haswell"
PLATFORMS_ = {
"nehalem": NehalemPlatform(),
"sandybridge": SandyBridgePlatform(),
"haswell": HaswellPlatform(),
"skylake": SkylakePlatform(),
"cascadelake": CascadelakePlatform()
}
def __init__(self):
self.args = None
self.bazel_flags_ = "build "
self.target_platform_ = None
# Return a tuple of the current gcc version
def get_gcc_version(self):
gcc_major_version = 0
gcc_minor_version = 0
# check to see if gcc is present
gcc_path = ""
gcc_path_cmd = "command -v gcc"
try:
gcc_path = subprocess.check_output(gcc_path_cmd, shell=True,
stderr=subprocess.STDOUT).\
strip()
print("gcc located here: {}".format(gcc_path))
if not os.access(gcc_path, os.F_OK | os.X_OK):
raise ValueError(
"{} does not exist or is not executable.".format(gcc_path))
gcc_output = subprocess.check_output(
[gcc_path, "-dumpfullversion", "-dumpversion"],
stderr=subprocess.STDOUT).strip()
# handle python2 vs 3 (bytes vs str type)
if isinstance(gcc_output, bytes):
gcc_output = gcc_output.decode("utf-8")
print("gcc version: {}".format(gcc_output))
gcc_info = gcc_output.split(".")
gcc_major_version = int(gcc_info[0])
gcc_minor_version = int(gcc_info[1])
except subprocess.CalledProcessException as e:
print("Problem getting gcc info: {}".format(e))
gcc_major_version = 0
gcc_minor_version = 0
return gcc_major_version, gcc_minor_version
def parse_args(self):
"""Set up argument parser, and parse CLI args."""
arg_parser = argparse.ArgumentParser(
description="Parse the arguments for the "
"TensorFlow build environment "
" setter")
arg_parser.add_argument(
"--disable-mkl",
dest="disable_mkl",
help="Turn off MKL. By default the compiler flag "
"--config=mkl is enabled.",
action="store_true")
arg_parser.add_argument(
"--disable-v2",
dest="disable_v2",
help="Build TensorFlow v1 rather than v2. By default the "
" compiler flag --config=v2 is enabled.",
action="store_true")
arg_parser.add_argument(
"--enable-bfloat16",
dest="enable_bfloat16",
help="Enable bfloat16 build. By default it is "
" |
#!/usr/bin/env python
'''
Copyright (C) 2005 Aaron Spike, aaron@ekips.org
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
'''
import random, math, inkex, cubicsuperpath
def randomize((x, y), rx, ry, norm):
if norm:
r = abs(random.normalvariate(0.0,0.5*max(rx, ry)))
else:
r = random.uniform(0.0,max(rx, ry))
a = random.uniform(0.0,2*math.pi)
x += math.cos(a)*rx
y += math.sin(a)*ry
return [x, y]
class RadiusRandomize(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--title")
self.OptionParser.add_option("-x", "--radiusx",
action="store", type="float",
dest="radiusx", default=10.0,
| help="Randomly move nodes and handles within this radius, X")
self.OptionParser.add_option("-y", "--radiusy",
action="store", type="float",
dest="radiusy", default=10.0,
help="Randomly move nodes and handles within this radius, Y")
self.Option | Parser.add_option("-c", "--ctrl",
action="store", type="inkbool",
dest="ctrl", default=True,
help="Randomize control points")
self.OptionParser.add_option("-e", "--end",
action="store", type="inkbool",
dest="end", default=True,
help="Randomize nodes")
self.OptionParser.add_option("-n", "--norm",
action="store", type="inkbool",
dest="norm", default=True,
help="Use normal distribution")
def effect(self):
for id, node in self.selected.iteritems():
if node.tag == inkex.addNS('path','svg'):
d = node.get('d')
p = cubicsuperpath.parsePath(d)
for subpath in p:
for csp in subpath:
if self.options.end:
delta=randomize([0,0], self.options.radiusx, self.options.radiusy, self.options.norm)
csp[0][0]+=delta[0]
csp[0][1]+=delta[1]
csp[1][0]+=delta[0]
csp[1][1]+=delta[1]
csp[2][0]+=delta[0]
csp[2][1]+=delta[1]
if self.options.ctrl:
csp[0]=randomize(csp[0], self.options.radiusx, self.options.radiusy, self.options.norm)
csp[2]=randomize(csp[2], self.options.radiusx, self.options.radiusy, self.options.norm)
node.set('d',cubicsuperpath.formatPath(p))
if __name__ == '__main__':
e = RadiusRandomize()
e.affect()
# vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 encoding=utf-8 textwidth=99
|
# Problem name: 12148 Electricity
# Problem url: https://uva.onlinejudge.org/external/121/12148.pdf
# Author: Andrey Yemelyanov
import sys
import math
import datetime
def readline():
return sys.stdin.readline().strip()
def main():
while True:
| n_readings = int(readline())
if n_readings == 0:
break
meter_re | adings = []
for i in range(n_readings):
reading = [int(x) for x in readline().split()]
date = datetime.date(reading[2], reading[1], reading[0])
consumption = reading[3]
meter_readings.append((date, consumption))
c = get_daily_consumption(meter_readings)
print(len(c), sum(c))
def get_daily_consumption(meter_readings):
c = []
for i in range(len(meter_readings)):
if i > 0:
current_date = meter_readings[i][0]
current_consumption = meter_readings[i][1]
prev_date = meter_readings[i - 1][0]
prev_consumption = meter_readings[i - 1][1]
if prev_date + datetime.timedelta(days = 1) == current_date:
c.append(current_consumption - prev_consumption)
return c
if __name__=="__main__":
main()
|
import os
import sys
from django.core.management import setup_environ
thisdir = os.path.dirname(os.path.abspath(__file__))
appdir = os.path.realpath(os.path.join(thisdir, '..', '..', '..', 'lot'))
sys.path.append(appdir)
import settings
setup_environ(settings)
##############################
import pandas as pd
from django.db import connection
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def filter_stand_list(stand_list, min_candidates=3, tpa_factor=1.2, output="candidates_concat.csv"):
cursor = connection.cursor()
keep_going = True
tpa_matches = []
remaining = stand_list[::-1]
while keep_going:
where_clause_template = """(fia_forest_type_name = '%s' AND calc_dbh_class = %d)"""
where_clause_tpa_template = """(fia_forest_type_name = '%s' AND calc_dbh_class = %d AND SumOfTPA > %f AND SumOfTPA < %f)"""
where_clauses = []
for sc in stand_list:
if sc in tpa_matches:
where_clauses.append(where_clause_tpa_template % (sc[0], sc[1], sc[2]/tpa_factor, sc[2]*tpa_factor))
else:
where_clauses.append(where_clause_template % (sc[0], sc[1]))
where_clause = " \n OR ".join(where_clauses)
sql = """
SELECT * FROM (
SELECT
COND_ID,
SUM(SumOfTPA) as "Total_TPA",
SUM(SumOfBA_FT2_AC) as "Total_BAA",
SUM(pct_of_totalba) as "PCT_BA",
COUNT(SumOfTPA) as "class_matches",
AVG(COUNT_SPECIESSIZECLASSES) as "class_total"
FROM treelive_summary
WHERE
%(where_clause)s
GROUP BY COND_ID
) as subselect
WHERE class_matches = %(num_specified_classes)s
ORDER BY "class_matches" DESC, "PCT_BA" DESC
""" % { 'where_clause': where_clause,
'num_specified_classes': len(stand_list)}
print sql
cursor.execute(sql)
local_rows = dictfetchall(cursor)
num_candidates = len(local_rows)
print num_candidates
if num_candidates < 10:
# bail, use last known good query (ie don't assign local_rows to rows)
break
rows = local_rows
if num_candidates <= min_candidates or len(tpa_matches) == len(stand_list):
keep_going = False
else:
tpa_matches.append(remaining.pop())
if rows:
df = pd.DataFrame(rows)
df.index = df['cond_id']
del df['cond_id']
print df[:25]
else:
print "*** NADA"
df.to_csv(outpu | t)
if __name__ == "__main__":
# This guy matches condition 1332 almost exactly
stand_list = [
# species, sizeclass, tpa
('Douglas-fir', 6, 160),
('Douglas-fir', 10, 31),
('Douglas-fir', 14, 7),
('Western hemlock', 14, 5),
#('Western redcedar', 14, 5),
#('Red alder', 6, 40),
]
| filter_stand_list(stand_list, )
|
last_request is None:
self.last_request = now
leak_value = now - self.last_request
self.water_level -= leak_value
self.water_level = max(self.water_level, 0)
self.water_level += self.request_value
difference = self.water_level - self.capacity
self.last_request = now
if difference > 0:
self.water_level -= self.request_value
self.next_request = now + difference
return difference
cap = self.capacity
water = self.water_level
val = self.value
self.remaining = math.floor(((cap - water) / cap) * val)
self.next_request = now
def _get_time(self):
"""Retrieve the current time. Broken out for testability."""
return time.time()
def display_unit(self):
"""Display the string name of the unit."""
return self.UNITS.get(self.unit, "UNKNOWN")
def display(self):
"""Return a useful representation of this class."""
return {
"verb": self.verb,
"URI": self.uri,
"regex": self.regex,
"value": self.value,
"remaining": int(self.remaining),
"unit": self.display_unit(),
"resetTime": int(self.next_request or self._get_time()),
}
# "Limit" format is a dictionary with the HTTP verb, human-readable URI,
# a regular-expression to match, value and unit of measure (PER_DAY, etc.)
DEFAULT_LIMITS = [
Limit("POST", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("POST", "*/servers", "^/servers", 120, utils.TIME_UNITS['MINUTE']),
Limit("PUT", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("GET", "*changes-since*", ".*changes-since.*", 120,
utils.TIME_UNITS['MINUTE']),
Limit("DELETE", "*", ".*", 120, utils.TIME_UNITS['MINUTE']),
Limit("GET", "*/os-fping", "^/os-fping", 12, utils.TIME_UNITS['MINUTE']),
]
class RateLimitingMiddleware(base_wsgi.Middleware):
"""Rate-limits requests passing through this middleware. All limit
information is stored in memory for this implementation.
"""
def __init__(self, application, limits=None, limiter=None, **kwargs):
"""Initialize new `RateLimitingMiddleware`.
It wraps the given WSGI application and sets up the given limits.
@param application: WSGI application to wrap
@param limits: String describing limits
@param limiter: String identifying class for representing limits
Other parameters are passed to the constructor for the limiter.
"""
base_wsgi.Middleware.__init__(self, application)
# Select the limiter class
if limiter is None:
limiter = Limiter
else:
limiter = importutils.import_class(limiter)
# Parse the limits, if any are provided
if limits is not None:
limits = limiter.parse_limits(limits)
self._limiter = limiter(limits or DEFAULT_LIMITS, **kwargs)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Represents a single call through this middleware.
We should record the request if we have a limit relevant to it.
If no limit is relevant to the request, ignore it.
If the request should be rate limited, return a fault telling the user
they are over the limit and need to retry later.
"""
verb = req.method
url = req.url
context = req.environ.get("nova.context")
if context:
username = context.user_id
else:
username = None
delay, error = self._limiter.check_for_delay(verb, url, username)
if delay:
msg = _("This request was rate-limited.")
retry = time.time() + delay
return wsgi.RateLimitFault(msg, error, retry)
req.environ["nova.limits"] = self._limiter.get_limits(username)
return self.application
class Limiter(object):
"""Rate-limit checking class which handles limits in memory."""
def __init__(self, limits, **kwargs):
"""Initialize the new `Limiter`.
@param limits: List of `Limit` objects
"""
self.limits = copy.deepcopy(limits)
self.levels = collections.defaultdict(lambda: copy.deepcopy(limits))
# Pick up any per-user limit information
for key, value in kwargs.items():
if key.startswith(LIMITS_PREFIX):
username = key[len(LIMITS_PREFIX):]
self.levels[username] = self.parse_limits(value)
def get_limits(self, username=None):
"""Return the limits for a given user."""
return [limit.display() for limit in self.levels[username]]
def check_for_delay(self, verb, url, username=None):
"""Check the given verb/user/user triplet for limit.
@return: Tuple of delay (in seconds) and error message (or None, None)
"""
delays = []
for limit in self.levels[username]:
delay = limit(verb, url)
if delay:
delays.append((delay, limit.error_message))
if delays:
delays.sort()
return delays[0]
return None, None
# Note: This method gets called before the class is instantiated,
# so this must be either a static method or a class method. It is
# used to develop a list of limits to feed to the constructor. We
# put this in the class so that subclasses can override the
# default limit parsing.
@staticmethod
def parse_limits(limits):
"""Convert a string into a list of Limit instances. This
implementation expects a semicolon-separated sequence of
parenthesized groups, where each group contains a
comma-separated sequence consisting of HTTP method,
user-readable URI, a URI reg-exp, an integer number of
requests which can be made, and a unit of measure. Valid
values for the latter are "SECOND", "MINUTE", "HOUR", and
"DAY".
@return: List of Limit instances.
"""
# Handle empty limit strings
limits = limits.strip()
if not limits:
return []
# Split up the limits by semicolon
result = []
for group in limits.split(';'):
group = group.strip()
if group[:1] != '(' or group[-1:] != ')':
raise ValueError("Limit rules must be surrounded by "
"parentheses")
group = group[1:-1]
# Extract the Limit arguments
args = [a.strip() for a in group.split(',')]
if len(args) != 5:
raise ValueError("Limit rules must contain the following "
"arguments: verb, uri, regex, value, unit")
# Pull out the arguments
verb, uri, regex, value, unit = args
# Upper-case the verb
verb = verb.upper()
# Convert value--raises ValueError if it's not integer
value = int(value)
# Convert un | it
unit = unit.upper()
if unit not in utils.TIME_UNITS:
raise ValueError("Invalid units specified")
unit = utils.TIME_UNITS[unit]
# Build a limit
| result.append(Limit(verb, uri, regex, value, unit))
return result
class WsgiLimiter(object):
"""Rate-limit checking from a WSGI application. Uses an in-memory
`Limiter`.
To use, POST ``/<username>`` with JSON data such as::
{
"verb" : GET,
"path" : "/servers"
}
and receive a 204 No Content, or a 403 Forbidden with an X-Wait-Seconds
header containing the number of seconds to wait before the action would
succeed.
"""
def __init__(self, limits=None):
"""Initialize the new `WsgiLimiter`.
@param limits: List of `Limit` objects
"""
self._limiter = Limiter(limits or DEFAULT_LIMITS)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, request):
"""Handles a call to |
from celery.exceptions import Ignore
from django.urls import reverse
from oioioi.base.tests import TestCase
from oioioi.contests.models import Contest, ProblemInstance, Submission
from oioioi.evalmgr.tasks import create_environ
from oioioi.programs.controllers import ProgrammingContestController
from oioioi.suspendjudge.handlers import check_problem_instance_state
class TestSuspendjudgeSuper(TestCase):
def _empty_post(self, login, view, problem_instance):
self.assertTrue(self.client.login(username=login))
url = reverse(
'oioioiadmin:suspendjudge_' + view,
kwargs={'problem_instance_id': problem_instance.id},
)
return self.client.post(url, {})
class TestViews(TestSuspendjudgeSuper):
fixtures = [
'test_users',
'test_permissions',
'test_contest',
'test_full_package',
'test_problem_instance',
]
def test_views_permissions(self):
problem_instance = ProblemInstance.objects.get()
login_codes = {'test_user': 403, 'test_admin': 302, 'test_contest_admin': 302}
views = [
'suspend_all',
'resume_and_rejudge',
'suspend_all_but_init',
'resume_and_clear',
]
self.client.get('/c/c/') # 'c' becomes the current contest
for login in login_codes:
for view in views:
response = self._empty_post(login, view, problem_instance)
self.assertEqual(response.status_code, login_codes[login])
class TestSuspending(TestSuspendjudgeSuper):
fixtures = [
'test_users',
'test_contest',
'test_full_package',
'test_problem_instance',
'test_submission',
]
def test_handler_presence(self):
contest = Contest.objects.get()
submission = Submission.objects.get()
controller = ProgrammingContestController(contest)
env = create_environ()
env.setdefault('recipe', []).append(('dummy', 'dummy'))
env['extra_args'] = []
controller.fill_evaluation_environ(env, submission)
controller.finalize_evaluation_environment(env)
self.assertIn(
(
'check_problem_instance_state',
'oioioi.suspendjudge.handlers.check_problem_instance_state',
dict(suspend_init_tests=True),
| ),
env['recipe'],
)
self.assertIn(
(
'check_problem_instance_state',
'oioioi.suspendjudge.handlers.check_problem_instance_state',
),
env['recipe'],
)
def test_handler(self):
problem_instance = ProblemInstance.objects.get()
self.client.get('/c/c/') # 'c' becomes the current contest
self._empty_post('test_admin', 'suspend_all', probl | em_instance)
env = {
'problem_instance_id': problem_instance.id,
'job_id': 'dummy',
'celery_task_id': 'dummy',
'submission_id': 1,
'is_rejudge': False,
'report_kinds': ['INITIAL', 'NORMAL'],
}
with self.assertRaises(Ignore):
check_problem_instance_state(env, suspend_init_tests=True)
self._empty_post('test_admin', 'resume_and_clear', problem_instance)
self._empty_post('test_admin', 'suspend_all_but_init', problem_instance)
check_problem_instance_state(env, suspend_init_tests=True)
with self.assertRaises(Ignore):
check_problem_instance_state(env)
env['is_rejudge'] = True
env['report_kinds'] = ['HIDDEN']
check_problem_instance_state(env)
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'ordre LeverAncre."""
from secondaires.navigation.equipage.signaux import *
from ..ordre import *
class LeverAncre(Ordre):
"""Ordre lever_ancre.
Cet ordre est appelé pour demander à un matelot de lever l'ancre.
"""
cle = "lever_ancre"
etats_autorises = ("ancre", "")
def executer(self):
"""Exécute l'ordre : déplace le matelot."""
personnage = self.matelot.personnage
salle = personnage.salle
if not hasattr(salle, "ancre"):
return
ancre = salle.get_element("ancre")
| if not ancre:
return
if not ancre.jetee:
yield SignalInutile("l'ancre est déjà levée")
else:
ancre.lever(personnage)
i = 0
while "ancre" in p | ersonnage.etats:
i += 1
if i > 100:
yield SignalAbandonne("J'ai essayé trop longtemps.")
elif personnage.stats.endurance < 40:
yield SignalRelais("Je suis trop fatigué.")
else:
yield 2
yield SignalTermine()
|
# coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import tagging
import tagging_converter
import tensorflow as tf
def tags_to_str(tags):
if not tags:
return ''
return '--'.join(map(str, tags))
class TaggingConverterTest(parameterized.TestCase):
@parameterized.parameters(
# A simple test.
{
'input_texts': ['Turing was born in 1912 .', 'Turing died in 1954 .'],
'target': 'Turing was born in 1912 and died in 1954 .',
'phrase_vocabulary': ['and'],
'target_tags': [
'KEEP', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'DELETE|and', 'DELETE',
'KEEP', 'KEEP', 'KEEP', 'KEEP'
],
},
# Test special characters.
{
'input_texts': ['Turing was born in 1912 .', 'Turing died in 1954 .'],
'target': 'Turing was born in 1912 ädåö died in 1954 .',
'phrase_vocabulary': ['ädåö'],
'target_tags': [
'KEEP', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'DELETE|ädåö', 'DELETE',
'KEEP', 'KEEP', 'KEEP', 'KEEP'
],
},
# Test swapping.
{
'input_texts': ['Turing was born in 1912 .', 'Turing died in 1954 .'],
'target': 'Turing died in 1954 and was born in 1912 .',
'phrase_vocabulary': ['and'],
'target_tags': [
'DELETE', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'SWAP', 'KEEP', 'KEEP',
'KEEP', 'KEEP', 'DELETE|and'
],
},
# Test complex swapping.
{
'input_texts': ['Turing was born in 1912 .',
'Turing was a pioneer in TCS .'],
'target': 'Turing , a pioneer in TCS , was born in 1912 .',
'phrase_vocabulary': [','],
'target_tags': [
'DELETE', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'SWAP', 'KEEP',
'DELETE|,', 'KEEP', 'KEEP', 'KEEP', 'KEEP', 'DELETE|,'
],
},
# Test that unnecessary phrases are not added.
{
'input_texts': ['A . And B .'],
'target': 'A , and B .',
'phrase_vocabulary': [',', 'and', ', and'],
# Although, it would be possible to add ", and" and delete "And", this
# shouldn't happen so that the tag sequences are as simple as
# possible.
'target_tags': ['KEEP', 'DELETE|,', 'KEEP', 'KEEP', 'KEEP'],
},
# Test that necessary phrases are added.
{
'input_texts': ['A . And B .'],
'target': 'A , and B .',
'phrase_vocabulary': [', and'],
# Now we need to delete "And" since "," is not in the vocabulary
# anymore.
'target_tags': ['KEEP', 'DELETE|, and', 'DELETE', 'KEEP', 'KEEP'],
},
)
def test_matching_conversion(self, input_texts, target, phrase_vocabulary,
target_tags):
task | = tagging.EditingTask(input_texts)
converter = tagging_converter.TaggingConverter(phrase_v | ocabulary)
tags = converter.compute_tags(task, target)
self.assertEqual(tags_to_str(tags), tags_to_str(target_tags))
def test_no_match(self):
input_texts = ['Turing was born in 1912 .', 'Turing died in 1954 .']
target = 'Turing was born in 1912 and died in 1954 .'
task = tagging.EditingTask(input_texts)
phrase_vocabulary = ['but']
converter = tagging_converter.TaggingConverter(phrase_vocabulary)
tags = converter.compute_tags(task, target)
# Vocabulary doesn't contain "and" so the inputs can't be converted to the
# target.
self.assertFalse(tags)
def test_first_deletion_idx_computation(self):
converter = tagging_converter.TaggingConverter([])
tag_strs = ['KEEP', 'DELETE', 'DELETE', 'KEEP']
tags = [tagging.Tag(s) for s in tag_strs]
source_token_idx = 3
idx = converter._find_first_deletion_idx(source_token_idx, tags)
self.assertEqual(idx, 1)
def test_phrase_vocabulary_extraction(self):
label_map = {'KEEP|, and': 0, 'DELETE|but': 1, 'DELETE': 2, 'KEEP|and': 3,
'DELETE|and': 4}
self.assertEqual(
tagging_converter.get_phrase_vocabulary_from_label_map(label_map),
{', and', 'but', 'and'})
if __name__ == '__main__':
tf.test.main()
|
ntum, float,
msg='Beam: momentum is not a float')
self.assertIsInstance(self.beam.mean_dt, float,
msg='Beam: mean_dt is not a float')
self.assertIsInstance(self.beam.mean_dE, float,
msg='Beam: mean_dE is not a float')
self.assertIsInstance(self.beam.sigma_dt, float,
msg='Beam: sigma_dt is not a float')
self.assertIsInstance(self.beam.sigma_dE, float,
msg='Beam: sigma_dE is not a float')
self.assertIsInstance(self.beam.intensity, float,
msg='Beam: intensity is not a float')
self.assertIsInstance(self.beam.n_macroparticles, int,
msg='Beam: n_macroparticles is not an int')
self.assertIsInstance(self.beam.ratio, float,
msg='Beam: ratio is not a float')
self.assertIsInstance(self.beam.id, numpy.ndarray,
msg='Beam: id is not a numpy.array')
self.assertIn('int', type(self.beam.id[0]).__name__,
msg='Beam: id array does not contain int')
self.assertIsInstance(self.beam.n_macroparticles_lost, int,
msg='Beam: n_macroparticles_lost is not an int')
self.assertIsInstance(self.beam.n_macroparticles_alive, int,
msg='Beam: n_macroparticles_alive is not an int')
self.assertIsInstance(self.beam.dt, numpy.ndarray,
msg='Beam: dt is not a numpy.array')
self.assertIsInstance(self.beam.dE, numpy.ndarray,
msg='Beam: dE is not a numpy.array')
self.assertIn('float', type(self.beam.dt[0]).__name__,
msg='Beam: dt does not contain float')
self.assertIn('float', type(self.beam.dE[0]).__name__,
msg='Beam: dE does not contain float')
def test_beam_statistic(self):
sigma_dt = 1.
sigma_dE = 1.
self.beam.dt = sigma_dt*numpy.random.randn(self.beam.n_macroparticles)
self.beam.dE = sigma_dE*numpy.random.randn(self.beam.n_macroparticles)
self.beam.statistics()
self.assertAlmostEqual(self.beam.sigma_dt, sigma_dt, delta=1e-2,
msg='Beam: Failed statistic sigma_dt')
self.assertAlmostEqual(self.beam.sigma_dE, sigma_dE, delta=1e-2,
msg='Beam: Failed statistic sigma_dE')
self.assertAlmostEqual(self.beam.mean_dt, 0., delta=1e-2,
msg='Beam: Failed statistic mean_dt')
self.assertAlmostEqual(self.beam.mean_dE, 0., delta=1e-2,
msg='Beam: Failed statistic mean_dE')
def test_losses_separatrix(self):
longitudi | nal_tracker = RingAndRFTracker(self.rf_params, self.beam)
full_tracker = FullRingAndRF([longitudinal_tracker])
try:
matched_from_distribution_function(self.beam,
full_tracker,
| distribution_exponent=1.5,
distribution_type='binomial',
bunch_length=1.65e-9,
bunch_length_fit='fwhm',
distribution_variable='Hamiltonian')
except TypeError as te:
self.skipTest("Skipped because of known bug in deepcopy. Exception message %s"
% str(te))
self.beam.losses_separatrix(self.general_params, self.rf_params)
self.assertEqual(len(self.beam.id[self.beam.id == 0]), 0,
msg='Beam: Failed losses_sepatrix, first')
self.beam.dE += 10e8
self.beam.losses_separatrix(self.general_params, self.rf_params)
self.assertEqual(len(self.beam.id[self.beam.id == 0]),
self.beam.n_macroparticles,
msg='Beam: Failed losses_sepatrix, second')
def test_losses_longitudinal_cut(self):
longitudinal_tracker = RingAndRFTracker(self.rf_params, self.beam)
full_tracker = FullRingAndRF([longitudinal_tracker])
try:
matched_from_distribution_function(self.beam,
full_tracker,
distribution_exponent=1.5,
distribution_type='binomial',
bunch_length=1.65e-9,
bunch_length_fit='fwhm',
distribution_variable='Hamiltonian')
except TypeError as te:
self.skipTest("Skipped because of known bug in deepcopy. Exception message %s"
% str(te))
self.beam.losses_longitudinal_cut(0., 5e-9)
self.assertEqual(len(self.beam.id[self.beam.id == 0]), 0,
msg='Beam: Failed losses_longitudinal_cut, first')
self.beam.dt += 10e-9
self.beam.losses_longitudinal_cut(0., 5e-9)
self.assertEqual(len(self.beam.id[self.beam.id == 0]),
self.beam.n_macroparticles,
msg='Beam: Failed losses_longitudinal_cut, second')
def test_losses_energy_cut(self):
longitudinal_tracker = RingAndRFTracker(self.rf_params, self.beam)
full_tracker = FullRingAndRF([longitudinal_tracker])
try:
matched_from_distribution_function(self.beam,
full_tracker,
distribution_exponent=1.5,
distribution_type='binomial',
bunch_length=1.65e-9,
bunch_length_fit='fwhm',
distribution_variable='Hamiltonian')
except TypeError as te:
self.skipTest("Skipped because of known bug in deepcopy. Exception message %s"
% str(te))
self.beam.losses_energy_cut(-3e8, 3e8)
self.assertEqual(len(self.beam.id[self.beam.id == 0]), 0,
msg='Beam: Failed losses_energy_cut, first')
self.beam.dE += 10e8
self.beam.losses_energy_cut(-3e8, 3e8)
self.assertEqual(len(self.beam.id[self.beam.id == 0]),
self.beam.n_macroparticles,
msg='Beam: Failed losses_energy_cut, second')
def test_addition(self):
np = numpy
testdEs = np.linspace(-1E6, 1E6, 2000000)
testdts = np.linspace(0, 10E-9, 2000000)
self.beam.dE = testdEs
self.beam.dt = testdts
testdEs = np.linspace(-2E6, 2E6, 100000)
testdts = np.linspace(-1E-9, 12E-9, 100000)
self.beam.add_particles([testdts, testdEs])
self.assertEqual(self.beam.n_macroparticles, 2100000,
msg="n_macroparticles not incremented correctly")
testBeam = Beam(self.general_params, 200, 0)
testBeam.id[:100] = 0
self.beam.add_beam(testBeam)
self.assertEqual(self.beam.id[2100000:2100100].tolist(), [0]*100,
msg="particle ids not applied correctly")
self.assertEqual(self.beam.n_macroparticles, 2100200,
msg="Added macroparticles not incremented n_macro correctly")
self.beam += testBeam
self.assertEqual(self.beam.n_macroparticles, 2100400,
msg="Added macroparticles not incremented n_macro correctly")
self.beam += (testdts, testdEs)
self.assertEqual(self.beam.n_macroparticles, 2200400,
msg="Added macroparticles not incremented n_macro correctly")
self.assertEqual(-2E6, np.min(self.beam.dE),
msg="coordinates of added bea |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.