code
stringlengths 1
199k
|
|---|
import sys
if sys.version_info[0] >= 3:
basestr = str
else:
basestr = basestring
from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter,
CountryReverseConverter)
from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country
from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError
from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language
from .script import SCRIPTS, SCRIPT_MATRIX, Script
|
from exchanges.bitfinex import Bitfinex
import re
def bitcoinValue(msg):
val = Bitfinex().get_current_price()
formattedVal = "$" + "{:,.2f}".format(val)
if re.search(r"(?i)moon", msg):
return "To the moon! " + formattedVal
else:
return "Bitcoin: " + formattedVal
|
def caught(pyn, fpyn):
fx, fy = fpyn.xy()
return pyn.distance(fx, fy) <= 1
|
import os, sys, time
from reportlab.graphics.barcode.common import *
from reportlab.graphics.barcode.code39 import *
from reportlab.graphics.barcode.code93 import *
from reportlab.graphics.barcode.code128 import *
from reportlab.graphics.barcode.usps import *
from reportlab.graphics.barcode.usps4s import USPS_4State
from reportlab.platypus import Spacer, SimpleDocTemplate, Table, TableStyle, Preformatted, PageBreak
from reportlab.lib.units import inch, cm
from reportlab.lib import colors
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.platypus.paragraph import Paragraph
from reportlab.platypus.frames import Frame
from reportlab.platypus.flowables import XBox, KeepTogether
from reportlab.graphics.shapes import Drawing
from reportlab.graphics.barcode import getCodes, getCodeNames, createBarcodeDrawing, createBarcodeImageInMemory
def run():
styles = getSampleStyleSheet()
styleN = styles['Normal']
styleH = styles['Heading1']
story = []
#for codeNames in code
story.append(Paragraph('I2of5', styleN))
story.append(I2of5(1234, barWidth = inch*0.02, checksum=0))
story.append(Paragraph('MSI', styleN))
story.append(MSI(1234))
story.append(Paragraph('Codabar', styleN))
story.append(Codabar("A012345B", barWidth = inch*0.02))
story.append(Paragraph('Code 11', styleN))
story.append(Code11("01234545634563"))
story.append(Paragraph('Code 39', styleN))
story.append(Standard39("A012345B%R"))
story.append(Paragraph('Extended Code 39', styleN))
story.append(Extended39("A012345B}"))
story.append(Paragraph('Code93', styleN))
story.append(Standard93("CODE 93"))
story.append(Paragraph('Extended Code93', styleN))
story.append(Extended93("L@@K! Code 93 :-)")) #, barWidth=0.005 * inch))
story.append(Paragraph('Code 128', styleN))
c=Code128("AB-12345678") #, barWidth=0.005 * inch)
#print 'WIDTH =', (c.width / inch), 'barWidth =', (c.barWidth / inch)
#print 'LQ =', (c.lquiet / inch), 'RQ =', (c.rquiet / inch)
story.append(c)
story.append(Paragraph('USPS FIM', styleN))
story.append(FIM("A"))
story.append(Paragraph('USPS POSTNET', styleN))
story.append(POSTNET('78247-1043'))
story.append(Paragraph('USPS 4 State', styleN))
story.append(USPS_4State('01234567094987654321','01234567891'))
from reportlab.graphics.barcode import createBarcodeDrawing
story.append(Paragraph('EAN13', styleN))
bcd = createBarcodeDrawing('EAN13', value='123456789012')
story.append(bcd)
story.append(Paragraph('EAN8', styleN))
bcd = createBarcodeDrawing('EAN8', value='1234567')
story.append(bcd)
story.append(Paragraph('UPCA', styleN))
bcd = createBarcodeDrawing('UPCA', value='03600029145')
story.append(bcd)
story.append(Paragraph('USPS_4State', styleN))
bcd = createBarcodeDrawing('USPS_4State', value='01234567094987654321',routing='01234567891')
story.append(bcd)
story.append(Paragraph('Label Size', styleN))
story.append(XBox((2.0 + 5.0/8.0)*inch, 1 * inch, '1x2-5/8"'))
story.append(Paragraph('Label Size', styleN))
story.append(XBox((1.75)*inch, .5 * inch, '1/2x1-3/4"'))
c = Canvas('out.pdf')
f = Frame(inch, inch, 6*inch, 9*inch, showBoundary=1)
f.addFromList(story, c)
c.save()
print 'saved out.pdf'
def fullTest(fileName="test_full.pdf"):
"""Creates large-ish test document with a variety of parameters"""
story = []
styles = getSampleStyleSheet()
styleN = styles['Normal']
styleH = styles['Heading1']
styleH2 = styles['Heading2']
story = []
story.append(Paragraph('ReportLab Barcode Test Suite - full output', styleH))
story.append(Paragraph('Generated on %s' % time.ctime(time.time()), styleN))
story.append(Paragraph('', styleN))
story.append(Paragraph('Repository information for this build:', styleN))
#see if we can figure out where it was built, if we're running in source
if os.path.split(os.getcwd())[-1] == 'barcode' and os.path.isdir('.svn'):
#runnning in a filesystem svn copy
infoLines = os.popen('svn info').read()
story.append(Preformatted(infoLines, styles["Code"]))
story.append(Paragraph('About this document', styleH2))
story.append(Paragraph('History and Status', styleH2))
story.append(Paragraph("""
This is the test suite and docoumentation for the ReportLab open source barcode API,
being re-released as part of the forthcoming ReportLab 2.0 release.
""", styleN))
story.append(Paragraph("""
Several years ago Ty Sarna contributed a barcode module to the ReportLab community.
Several of the codes were used by him in hiw work and to the best of our knowledge
this was correct. These were written as flowable objects and were available in PDFs,
but not in our graphics framework. However, we had no knowledge of barcodes ourselves
and did not advertise or extend the package.
""", styleN))
story.append(Paragraph("""
We "wrapped" the barcodes to be usable within our graphics framework; they are now available
as Drawing objects which can be rendered to EPS files or bitmaps. For the last 2 years this
has been available in our Diagra and Report Markup Language products. However, we did not
charge separately and use was on an "as is" basis.
""", styleN))
story.append(Paragraph("""
A major licensee of our technology has kindly agreed to part-fund proper productisation
of this code on an open source basis in Q1 2006. This has involved addition of EAN codes
as well as a proper testing program. Henceforth we intend to publicise the code more widely,
gather feedback, accept contributions of code and treat it as "supported".
""", styleN))
story.append(Paragraph("""
This involved making available both downloads and testing resources. This PDF document
is the output of the current test suite. It contains codes you can scan (if you use a nice sharp
laser printer!), and will be extended over coming weeks to include usage examples and notes on
each barcode and how widely tested they are. This is being done through documentation strings in
the barcode objects themselves so should always be up to date.
""", styleN))
story.append(Paragraph('Usage examples', styleH2))
story.append(Paragraph("""
To be completed
""", styleN))
story.append(Paragraph('The codes', styleH2))
story.append(Paragraph("""
Below we show a scannable code from each barcode, with and without human-readable text.
These are magnified about 2x from the natural size done by the original author to aid
inspection. This will be expanded to include several test cases per code, and to add
explanations of checksums. Be aware that (a) if you enter numeric codes which are too
short they may be prefixed for you (e.g. "123" for an 8-digit code becomes "00000123"),
and that the scanned results and readable text will generally include extra checksums
at the end.
""", styleN))
codeNames = getCodeNames()
from reportlab.lib.utils import flatten
width = [float(x[8:]) for x in sys.argv if x.startswith('--width=')]
height = [float(x[9:]) for x in sys.argv if x.startswith('--height=')]
isoScale = [int(x[11:]) for x in sys.argv if x.startswith('--isoscale=')]
options = {}
if width: options['width'] = width[0]
if height: options['height'] = height[0]
if isoScale: options['isoScale'] = isoScale[0]
scales = [x[8:].split(',') for x in sys.argv if x.startswith('--scale=')]
scales = map(float,scales and flatten(scales) or [1])
scales = map(float,scales and flatten(scales) or [1])
for scale in scales:
story.append(PageBreak())
story.append(Paragraph('Scale = %.1f'%scale, styleH2))
story.append(Spacer(36, 12))
for codeName in codeNames:
s = [Paragraph('Code: ' + codeName, styleH2)]
for hr in (0,1):
s.append(Spacer(36, 12))
dr = createBarcodeDrawing(codeName, humanReadable=hr,**options)
dr.renderScale = scale
s.append(dr)
s.append(Spacer(36, 12))
s.append(Paragraph('Barcode should say: ' + dr._bc.value, styleN))
story.append(KeepTogether(s))
SimpleDocTemplate(fileName).build(story)
print 'created', fileName
if __name__=='__main__':
run()
fullTest()
def createSample(name,memory):
f = open(name,'wb')
f.write(memory)
f.close()
createSample('test_cbcim.png',createBarcodeImageInMemory('EAN13', value='123456789012'))
createSample('test_cbcim.gif',createBarcodeImageInMemory('EAN8', value='1234567', format='gif'))
createSample('test_cbcim.pdf',createBarcodeImageInMemory('UPCA', value='03600029145',format='pdf'))
createSample('test_cbcim.tiff',createBarcodeImageInMemory('USPS_4State', value='01234567094987654321',routing='01234567891',format='tiff'))
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('libreosteoweb', '0021_therapeutsettings_siret'),
]
operations = [
migrations.AddField(
model_name='therapeutsettings',
name='invoice_footer',
field=models.TextField(null=True, verbose_name='Invoice footer', blank=True),
preserve_default=True,
),
]
|
"""
poller-wrapper A small tool which wraps around the poller and tries to
guide the polling process with a more modern approach with a
Queue and workers
Authors: Job Snijders <job.snijders@atrato.com>
Orsiris de Jong <contact@netpower.fr>
Date: Oct 2019
Usage: This program accepts one command line argument: the number of threads
that should run simultaneously. If no argument is given it will assume
a default of 16 threads.
Ubuntu Linux: apt-get install python-mysqldb
FreeBSD: cd /usr/ports/*/py-MySQLdb && make install clean
RHEL 7: yum install MySQL-python
RHEL 8: dnf install mariadb-connector-c-devel gcc && python -m pip install mysqlclient
Tested on: Python 3.6.8 / PHP 7.2.11 / CentOS 8.0
License: To the extent possible under law, Job Snijders has waived all
copyright and related or neighboring rights to this script.
This script has been put into the Public Domain. This work is
published from: The Netherlands.
"""
import LibreNMS.library as LNMS
try:
import json
import os
import queue
import subprocess
import sys
import threading
import time
from optparse import OptionParser
except ImportError as exc:
print('ERROR: missing one or more of the following python modules:')
print('threading, queue, sys, subprocess, time, os, json')
print('ERROR: %s' % exc)
sys.exit(2)
APP_NAME = "poller_wrapper"
LOG_FILE = "logs/" + APP_NAME + ".log"
_DEBUG = False
distpoll = False
real_duration = 0
polled_devices = 0
"""
Threading helper functions
"""
def memc_alive():
try:
global memc
key = str(uuid.uuid4())
memc.set('poller.ping.' + key, key, 60)
if memc.get('poller.ping.' + key) == key:
memc.delete('poller.ping.' + key)
return True
else:
return False
except:
return False
def memc_touch(key, time):
try:
global memc
val = memc.get(key)
memc.set(key, val, time)
except:
pass
def get_time_tag(step):
ts = int(time.time())
return ts - ts % step
"""
A seperate queue and a single worker for printing information to the screen prevents
the good old joke:
Some people, when confronted with a problem, think,
"I know, I'll use threads," and then two they hav erpoblesms.
"""
def printworker():
nodeso = 0
while True:
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC4
global IsNode
global distpoll
if distpoll:
if not IsNode:
memc_touch(master_tag, 10)
nodes = memc.get(nodes_tag)
if nodes is None and not memc_alive():
print("WARNING: Lost Memcached. Taking over all devices. Nodes will quit shortly.")
distpoll = False
nodes = nodeso
if nodes is not nodeso:
print("INFO: %s Node(s) Total" % (nodes))
nodeso = nodes
else:
memc_touch(nodes_tag, 10)
try:
worker_id, device_id, elapsed_time = print_queue.get(False)
except:
pass
try:
time.sleep(1)
except:
pass
continue
else:
worker_id, device_id, elapsed_time = print_queue.get()
# EOC4
global real_duration
global per_device_duration
global polled_devices
real_duration += elapsed_time
per_device_duration[device_id] = elapsed_time
polled_devices += 1
if elapsed_time < step:
print("INFO: worker %s finished device %s in %s seconds" % (worker_id, device_id, elapsed_time))
else:
print("WARNING: worker %s finished device %s in %s seconds" % (worker_id, device_id, elapsed_time))
print_queue.task_done()
"""
This class will fork off single instances of the poller.php process, record
how long it takes, and push the resulting reports to the printer queue
"""
def poll_worker():
while True:
device_id = poll_queue.get()
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC5
if not distpoll or memc.get('poller.device.%s.%s' % (device_id, time_tag)) is None:
if distpoll:
result = memc.add('poller.device.%s.%s' % (device_id, time_tag), config['distributed_poller_name'],
step)
if not result:
print("This device (%s) appears to be being polled by another poller" % (device_id))
poll_queue.task_done()
continue
if not memc_alive() and IsNode:
print("Lost Memcached, Not polling Device %s as Node. Master will poll it." % device_id)
poll_queue.task_done()
continue
# EOC5
try:
start_time = time.time()
output = "-d >> %s/poll_device_%s.log" % (log_dir, device_id) if debug else ">> /dev/null"
command = "/usr/bin/env php %s -h %s %s 2>&1" % (poller_path, device_id, output)
# TODO: replace with command_runner
subprocess.check_call(command, shell=True)
elapsed_time = int(time.time() - start_time)
print_queue.put([threading.current_thread().name, device_id, elapsed_time])
except (KeyboardInterrupt, SystemExit):
raise
except:
pass
poll_queue.task_done()
if __name__ == '__main__':
logger = LNMS.logger_get_logger(LOG_FILE, debug=_DEBUG)
install_dir = os.path.dirname(os.path.realpath(__file__))
LNMS.check_for_file(install_dir + '/.env')
config = json.loads(LNMS.get_config_data(install_dir))
poller_path = config['install_dir'] + '/poller.php'
log_dir = config['log_dir']
if 'rrd' in config and 'step' in config['rrd']:
step = config['rrd']['step']
else:
step = 300
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC1
if 'distributed_poller_group' in config:
poller_group = str(config['distributed_poller_group'])
else:
poller_group = False
if ('distributed_poller' in config and
'distributed_poller_memcached_host' in config and
'distributed_poller_memcached_port' in config and
config['distributed_poller']):
time_tag = str(get_time_tag(step))
master_tag = "poller.master." + time_tag
nodes_tag = "poller.nodes." + time_tag
try:
import memcache
import uuid
memc = memcache.Client([config['distributed_poller_memcached_host'] + ':' +
str(config['distributed_poller_memcached_port'])])
if str(memc.get(master_tag)) == config['distributed_poller_name']:
print("This system is already joined as the poller master.")
sys.exit(2)
if memc_alive():
if memc.get(master_tag) is None:
print("Registered as Master")
memc.set(master_tag, config['distributed_poller_name'], 10)
memc.set(nodes_tag, 0, step)
IsNode = False
else:
print("Registered as Node joining Master %s" % memc.get(master_tag))
IsNode = True
memc.incr(nodes_tag)
distpoll = True
else:
print("Could not connect to memcached, disabling distributed poller.")
distpoll = False
IsNode = False
except SystemExit:
raise
except ImportError:
print("ERROR: missing memcache python module:")
print("On deb systems: apt-get install python3-memcache")
print("On other systems: pip3 install python-memcached")
print("Disabling distributed poller.")
distpoll = False
else:
distpoll = False
# EOC1
s_time = time.time()
real_duration = 0
per_device_duration = {}
polled_devices = 0
"""
Take the amount of threads we want to run in parallel from the commandline
if None are given or the argument was garbage, fall back to default of 16
"""
usage = "usage: %prog [options] <workers> (Default: 16 (Do not set too high)"
description = "Spawn multiple poller.php processes in parallel."
parser = OptionParser(usage=usage, description=description)
parser.add_option('-d', '--debug', action='store_true', default=False,
help="Enable debug output. WARNING: Leaving this enabled will consume a lot of disk space.")
(options, args) = parser.parse_args()
debug = options.debug
try:
amount_of_workers = int(args[0])
except (IndexError, ValueError):
amount_of_workers = 16
devices_list = []
"""
This query specificly orders the results depending on the last_polled_timetaken variable
Because this way, we put the devices likely to be slow, in the top of the queue
thus greatening our chances of completing _all_ the work in exactly the time it takes to
poll the slowest device! cool stuff he
"""
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC2
if poller_group is not False:
query = 'select device_id from devices where poller_group IN(' + poller_group + \
') and disabled = 0 order by last_polled_timetaken desc'
else:
query = 'select device_id from devices where disabled = 0 order by last_polled_timetaken desc'
# EOC2
db = LNMS.db_open(config['db_socket'], config['db_host'], config['db_port'], config['db_user'], config['db_pass'], config['db_name'])
cursor = db.cursor()
cursor.execute(query)
devices = cursor.fetchall()
for row in devices:
devices_list.append(int(row[0]))
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC3
if distpoll and not IsNode:
query = "select max(device_id),min(device_id) from devices"
cursor.execute(query)
devices = cursor.fetchall()
maxlocks = devices[0][0] or 0
minlocks = devices[0][1] or 0
# EOC3
db.close()
poll_queue = queue.Queue()
print_queue = queue.Queue()
print(
"INFO: starting the poller at %s with %s threads, slowest devices first" % (time.strftime("%Y-%m-%d %H:%M:%S"),
amount_of_workers))
for device_id in devices_list:
poll_queue.put(device_id)
for i in range(amount_of_workers):
t = threading.Thread(target=poll_worker)
t.setDaemon(True)
t.start()
p = threading.Thread(target=printworker)
p.setDaemon(True)
p.start()
try:
poll_queue.join()
print_queue.join()
except (KeyboardInterrupt, SystemExit):
raise
total_time = int(time.time() - s_time)
print("INFO: poller-wrapper polled %s devices in %s seconds with %s workers" % (
polled_devices, total_time, amount_of_workers))
# (c) 2015, GPLv3, Daniel Preussker <f0o@devilcode.org> <<<EOC6
if distpoll or memc_alive():
master = memc.get(master_tag)
if master == config['distributed_poller_name'] and not IsNode:
print("Wait for all poller-nodes to finish")
nodes = memc.get(nodes_tag)
while nodes is not None and nodes > 0:
try:
time.sleep(1)
nodes = memc.get(nodes_tag)
except:
pass
print("Clearing Locks for %s" % time_tag)
x = minlocks
while x <= maxlocks:
res = memc.delete('poller.device.%s.%s' % (x, time_tag))
x += 1
print("%s Locks Cleared" % x)
print("Clearing Nodes")
memc.delete(master_tag)
memc.delete(nodes_tag)
else:
memc.decr(nodes_tag)
print("Finished %.3fs after interval start." % (time.time() - int(time_tag)))
# EOC6
show_stopper = False
db = LNMS.db_open(config['db_socket'], config['db_host'], config['db_port'], config['db_user'], config['db_pass'], config['db_name'])
cursor = db.cursor()
query = "update pollers set last_polled=NOW(), devices='%d', time_taken='%d' where poller_name='%s'" % (
polled_devices,
total_time,
config['distributed_poller_name'])
response = cursor.execute(query)
if response == 1:
db.commit()
else:
query = "insert into pollers set poller_name='%s', last_polled=NOW(), devices='%d', time_taken='%d'" % (
config['distributed_poller_name'], polled_devices, total_time)
cursor.execute(query)
db.commit()
db.close()
if total_time > step:
print(
"WARNING: the process took more than %s seconds to finish, you need faster hardware or more threads" % step)
print("INFO: in sequential style polling the elapsed time would have been: %s seconds" % real_duration)
for device in per_device_duration:
if per_device_duration[device] > step:
print("WARNING: device %s is taking too long: %s seconds" % (device, per_device_duration[device]))
show_stopper = True
if show_stopper:
print(
"ERROR: Some devices are taking more than %s seconds, the script cannot recommend you what to do." % step)
else:
recommend = int(total_time / step * amount_of_workers + 1)
print(
"WARNING: Consider setting a minimum of %d threads. (This does not constitute professional advice!)" % recommend)
sys.exit(2)
|
import copy
import secrets
races = {}
colors = {
'πΆ': 0xccd6dd,
'π±': 0xffcb4e,
'π': 0x99aab5,
'π°': 0x99aab5,
'π': 0x9266cc,
'π ': 0xffcc4d,
'π¦': 0xf4900c,
'π¦': 0xbe1931,
'πΈ': 0x77b255,
'π§': 0xf5f8fa
}
names = {
'πΆ': 'dog',
'π±': 'cat',
'π': 'mouse',
'π°': 'rabbit',
'π': 'octopus',
'π ': 'fish',
'π¦': 'fox',
'π¦': 'crab',
'πΈ': 'frog',
'π§': 'penguin'
}
participant_icons = ['πΆ', 'π±', 'π', 'π°', 'π', 'π ', 'π¦', 'π¦', 'πΈ', 'π§']
def make_race(channel_id, buyin):
icon_copy = copy.deepcopy(participant_icons)
race_data = {
'icons': icon_copy,
'users': [],
'buyin': buyin
}
races.update({channel_id: race_data})
def add_participant(channel_id, user):
race = races[channel_id]
icons = race['icons']
users = race['users']
usr_icon = secrets.choice(icons)
icons.remove(usr_icon)
race.update({'icons': icons})
participant_data = {
'user': user,
'icon': usr_icon
}
users.append(participant_data)
race.update({'users': users})
races.update({channel_id: race})
return usr_icon
|
"""Extra SQLAlchemy ORM types"""
__all__ = ['JSONEncodeDict']
import cjson
from sqlalchemy.types import TypeDecorator, String
from sqlalchemy.ext.mutable import Mutable
from pynojo.exc import PynojoRuntimeError
class JSONEncodeDict(TypeDecorator):
"""Represents an mutable python *dict* as a json-encoded string."""
# pylint: disable=W0223
impl = String
def process_bind_param(self, value, dialect):
if value is not None:
value = cjson.encode(value)
if len(value) > self.length:
raise PynojoRuntimeError(_(
'{class_name}: encoded string too long',
class_name = self.__class__.__name__))
return value
def process_result_value(self, value, dialect):
if value is not None:
value = cjson.decode(value)
return value
class _JSONEncodeDictMutabilize(Mutable, dict):
@classmethod
def coerce(cls, key, value):
if not isinstance(value, _JSONEncodeDictMutabilize):
if isinstance(value, dict):
return _JSONEncodeDictMutabilize(value)
return Mutable.coerce(key, value)
else:
return value
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
dict.__delitem__(self, key)
self.changed()
_JSONEncodeDictMutabilize.associate_with(JSONEncodeDict)
|
"""An FTP client class and some helper functions.
Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds
Example:
>>> from ftplib import FTP
>>> ftp = FTP('ftp.python.org') # connect to host, default port
>>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@
'230 Guest login ok, access restrictions apply.'
>>> ftp.retrlines('LIST') # list directory contents
total 9
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 ..
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc
d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming
drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib
drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub
drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr
-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg
'226 Transfer complete.'
>>> ftp.quit()
'221 Goodbye.'
>>>
A nice test that reveals some of the network dialogue would be:
python ftplib.py -d localhost -l -p -l
"""
import os
import sys
try:
import SOCKS; socket = SOCKS; del SOCKS # import SOCKS as socket
from socket import getfqdn; socket.getfqdn = getfqdn; del getfqdn
except ImportError:
import socket
from socket import _GLOBAL_DEFAULT_TIMEOUT
__all__ = ["FTP","Netrc"]
MSG_OOB = 0x1 # Process data out of band
FTP_PORT = 21
class Error(Exception): pass
class error_reply(Error): pass # unexpected [123]xx reply
class error_temp(Error): pass # 4xx errors
class error_perm(Error): pass # 5xx errors
class error_proto(Error): pass # response does not begin with [1-5]
all_errors = (Error, IOError, EOFError)
CRLF = '\r\n'
class FTP:
'''An FTP client class.
To create a connection, call the class using these arguments:
host, user, passwd, acct, timeout
The first four arguments are all strings, and have default value ''.
timeout must be numeric and defaults to None if not passed,
meaning that no timeout will be set on any ftp socket(s)
If a timeout is passed, then this is now the default timeout for all ftp
socket operations for this instance.
Then use self.connect() with optional host and port argument.
To download a file, use ftp.retrlines('RETR ' + filename),
or ftp.retrbinary() with slightly different arguments.
To upload a file, use ftp.storlines() or ftp.storbinary(),
which have an open file as argument (see their definitions
below for details).
The download/upload functions first issue appropriate TYPE
and PORT or PASV commands.
'''
debugging = 0
host = ''
port = FTP_PORT
sock = None
file = None
welcome = None
passiveserver = 1
# Initialization method (called by class instantiation).
# Initialize host to localhost, port to standard ftp port
# Optional arguments are host (for connect()),
# and user, passwd, acct (for login())
def __init__(self, host='', user='', passwd='', acct='',
timeout=_GLOBAL_DEFAULT_TIMEOUT):
self.timeout = timeout
if host:
self.connect(host)
if user:
self.login(user, passwd, acct)
def connect(self, host='', port=0, timeout=-999):
'''Connect to host. Arguments are:
- host: hostname to connect to (string, default previous host)
- port: port to connect to (integer, default previous port)
'''
if host != '':
self.host = host
if port > 0:
self.port = port
if timeout != -999:
self.timeout = timeout
self.sock = socket.create_connection((self.host, self.port), self.timeout)
self.af = self.sock.family
self.file = self.sock.makefile('rb')
self.welcome = self.getresp()
return self.welcome
def getwelcome(self):
'''Get the welcome message from the server.
(this is read and squirreled away by connect())'''
if self.debugging:
print '*welcome*', self.sanitize(self.welcome)
return self.welcome
def set_debuglevel(self, level):
'''Set the debugging level.
The required argument level means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF'''
self.debugging = level
debug = set_debuglevel
def set_pasv(self, val):
'''Use passive or active mode for data transfers.
With a false argument, use the normal PORT mode,
With a true argument, use the PASV command.'''
self.passiveserver = val
# Internal: "sanitize" a string for printing
def sanitize(self, s):
if s[:5] == 'pass ' or s[:5] == 'PASS ':
i = len(s)
while i > 5 and s[i-1] in '\r\n':
i = i-1
s = s[:5] + '*'*(i-5) + s[i:]
return repr(s)
# Internal: send one line to the server, appending CRLF
def putline(self, line):
line = line + CRLF
if self.debugging > 1: print '*put*', self.sanitize(line)
self.sock.sendall(line)
# Internal: send one command to the server (through putline())
def putcmd(self, line):
if self.debugging: print '*cmd*', self.sanitize(line)
self.putline(line)
# Internal: return one line from the server, stripping CRLF.
# Raise EOFError if the connection is closed
def getline(self):
line = self.file.readline()
if self.debugging > 1:
print '*get*', self.sanitize(line)
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
# Internal: get a response from the server, which may possibly
# consist of multiple lines. Return a single string with no
# trailing CRLF. If the response consists of multiple lines,
# these are separated by '\n' characters in the string
def getmultiline(self):
line = self.getline()
if line[3:4] == '-':
code = line[:3]
while 1:
nextline = self.getline()
line = line + ('\n' + nextline)
if nextline[:3] == code and \
nextline[3:4] != '-':
break
return line
# Internal: get a response from the server.
# Raise various errors if the response indicates an error
def getresp(self):
resp = self.getmultiline()
if self.debugging: print '*resp*', self.sanitize(resp)
self.lastresp = resp[:3]
c = resp[:1]
if c in ('1', '2', '3'):
return resp
if c == '4':
raise error_temp, resp
if c == '5':
raise error_perm, resp
raise error_proto, resp
def voidresp(self):
"""Expect a response beginning with '2'."""
resp = self.getresp()
if resp[0] != '2':
raise error_reply, resp
return resp
def abort(self):
'''Abort a file transfer. Uses out-of-band data.
This does not follow the procedure from the RFC to send Telnet
IP and Synch; that doesn't seem to work with the servers I've
tried. Instead, just send the ABOR command as OOB data.'''
line = 'ABOR' + CRLF
if self.debugging > 1: print '*put urgent*', self.sanitize(line)
self.sock.sendall(line, MSG_OOB)
resp = self.getmultiline()
if resp[:3] not in ('426', '226'):
raise error_proto, resp
def sendcmd(self, cmd):
'''Send a command and return the response.'''
self.putcmd(cmd)
return self.getresp()
def voidcmd(self, cmd):
"""Send a command and expect a response beginning with '2'."""
self.putcmd(cmd)
return self.voidresp()
def sendport(self, host, port):
'''Send a PORT command with the current host and the given
port number.
'''
hbytes = host.split('.')
pbytes = [repr(port//256), repr(port%256)]
bytes = hbytes + pbytes
cmd = 'PORT ' + ','.join(bytes)
return self.voidcmd(cmd)
def sendeprt(self, host, port):
'''Send a EPRT command with the current host and the given port number.'''
af = 0
if self.af == socket.AF_INET:
af = 1
if self.af == socket.AF_INET6:
af = 2
if af == 0:
raise error_proto, 'unsupported address family'
fields = ['', repr(af), host, repr(port), '']
cmd = 'EPRT ' + '|'.join(fields)
return self.voidcmd(cmd)
def makeport(self):
'''Create a new socket and send a PORT command for it.'''
msg = "getaddrinfo returns an empty list"
sock = None
for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
except socket.error, msg:
if sock:
sock.close()
sock = None
continue
break
if not sock:
raise socket.error, msg
sock.listen(1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
if self.af == socket.AF_INET:
resp = self.sendport(host, port)
else:
resp = self.sendeprt(host, port)
return sock
def makepasv(self):
if self.af == socket.AF_INET:
host, port = parse227(self.sendcmd('PASV'))
else:
host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
return host, port
def ntransfercmd(self, cmd, rest=None):
"""Initiate a transfer over the data connection.
If the transfer is active, send a port command and the
transfer command, and accept the connection. If the server is
passive, send a pasv command, connect to it, and start the
transfer command. Either way, return the socket for the
connection and the expected size of the transfer. The
expected size may be None if it could not be determined.
Optional `rest' argument can be a string that is sent as the
argument to a REST command. This is essentially a server
marker used to tell the server to skip over any data up to the
given marker.
"""
size = None
if self.passiveserver:
host, port = self.makepasv()
conn = socket.create_connection((host, port), self.timeout)
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
# Some servers apparently send a 200 reply to
# a LIST or STOR command, before the 150 reply
# (and way before the 226 reply). This seems to
# be in violation of the protocol (which only allows
# 1xx or error messages for LIST), so we just discard
# this response.
if resp[0] == '2':
resp = self.getresp()
if resp[0] != '1':
raise error_reply, resp
else:
sock = self.makeport()
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
# See above.
if resp[0] == '2':
resp = self.getresp()
if resp[0] != '1':
raise error_reply, resp
conn, sockaddr = sock.accept()
if resp[:3] == '150':
# this is conditional in case we received a 125
size = parse150(resp)
return conn, size
def transfercmd(self, cmd, rest=None):
"""Like ntransfercmd() but returns only the socket."""
return self.ntransfercmd(cmd, rest)[0]
def login(self, user = '', passwd = '', acct = ''):
'''Login, default anonymous.'''
if not user: user = 'anonymous'
if not passwd: passwd = ''
if not acct: acct = ''
if user == 'anonymous' and passwd in ('', '-'):
# If there is no anonymous ftp password specified
# then we'll just use anonymous@
# We don't send any other thing because:
# - We want to remain anonymous
# - We want to stop SPAM
# - We don't want to let ftp sites to discriminate by the user,
# host or country.
passwd = passwd + 'anonymous@'
resp = self.sendcmd('USER ' + user)
if resp[0] == '3': resp = self.sendcmd('PASS ' + passwd)
if resp[0] == '3': resp = self.sendcmd('ACCT ' + acct)
if resp[0] != '2':
raise error_reply, resp
return resp
def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
"""Retrieve data in binary mode. A new port is created for you.
Args:
cmd: A RETR command.
callback: A single parameter callable to be called on each
block of data read.
blocksize: The maximum number of bytes to read from the
socket at one time. [default: 8192]
rest: Passed to transfercmd(). [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
while 1:
data = conn.recv(blocksize)
if not data:
break
callback(data)
conn.close()
return self.voidresp()
def retrlines(self, cmd, callback = None):
"""Retrieve data in line mode. A new port is created for you.
Args:
cmd: A RETR, LIST, NLST, or MLSD command.
callback: An optional single parameter callable that is called
for each line with the trailing CRLF stripped.
[default: print_line()]
Returns:
The response code.
"""
if callback is None: callback = print_line
resp = self.sendcmd('TYPE A')
conn = self.transfercmd(cmd)
fp = conn.makefile('rb')
while 1:
line = fp.readline()
if self.debugging > 2: print '*retr*', repr(line)
if not line:
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] == '\n':
line = line[:-1]
callback(line)
fp.close()
conn.close()
return self.voidresp()
def storbinary(self, cmd, fp, blocksize=8192, callback=None):
"""Store a file in binary mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a read(num_bytes) method.
blocksize: The maximum data size to read from fp and send over
the connection at once. [default: 8192]
callback: An optional single parameter callable that is called on
on each block of data after it is sent. [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd)
while 1:
buf = fp.read(blocksize)
if not buf: break
conn.sendall(buf)
if callback: callback(buf)
conn.close()
return self.voidresp()
def storlines(self, cmd, fp, callback=None):
"""Store a file in line mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a readline() method.
callback: An optional single parameter callable that is called on
on each line after it is sent. [default: None]
Returns:
The response code.
"""
self.voidcmd('TYPE A')
conn = self.transfercmd(cmd)
while 1:
buf = fp.readline()
if not buf: break
if buf[-2:] != CRLF:
if buf[-1] in CRLF: buf = buf[:-1]
buf = buf + CRLF
conn.sendall(buf)
if callback: callback(buf)
conn.close()
return self.voidresp()
def acct(self, password):
'''Send new account name.'''
cmd = 'ACCT ' + password
return self.voidcmd(cmd)
def nlst(self, *args):
'''Return a list of files in a given directory (default the current).'''
cmd = 'NLST'
for arg in args:
cmd = cmd + (' ' + arg)
files = []
self.retrlines(cmd, files.append)
return files
def dir(self, *args):
'''List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)'''
cmd = 'LIST'
func = None
if args[-1:] and type(args[-1]) != type(''):
args, func = args[:-1], args[-1]
for arg in args:
if arg:
cmd = cmd + (' ' + arg)
self.retrlines(cmd, func)
def rename(self, fromname, toname):
'''Rename a file.'''
resp = self.sendcmd('RNFR ' + fromname)
if resp[0] != '3':
raise error_reply, resp
return self.voidcmd('RNTO ' + toname)
def delete(self, filename):
'''Delete a file.'''
resp = self.sendcmd('DELE ' + filename)
if resp[:3] in ('250', '200'):
return resp
elif resp[:1] == '5':
raise error_perm, resp
else:
raise error_reply, resp
def cwd(self, dirname):
'''Change to a directory.'''
if dirname == '..':
try:
return self.voidcmd('CDUP')
except error_perm, msg:
if msg.args[0][:3] != '500':
raise
elif dirname == '':
dirname = '.' # does nothing, but could return error
cmd = 'CWD ' + dirname
return self.voidcmd(cmd)
def size(self, filename):
'''Retrieve the size of a file.'''
# The SIZE command is defined in RFC-3659
resp = self.sendcmd('SIZE ' + filename)
if resp[:3] == '213':
s = resp[3:].strip()
try:
return int(s)
except (OverflowError, ValueError):
return long(s)
def mkd(self, dirname):
'''Make a directory, return its full pathname.'''
resp = self.sendcmd('MKD ' + dirname)
return parse257(resp)
def rmd(self, dirname):
'''Remove a directory.'''
return self.voidcmd('RMD ' + dirname)
def pwd(self):
'''Return current working directory.'''
resp = self.sendcmd('PWD')
return parse257(resp)
def quit(self):
'''Quit, and close the connection.'''
resp = self.voidcmd('QUIT')
self.close()
return resp
def close(self):
'''Close the connection without assuming anything about it.'''
if self.file:
self.file.close()
self.sock.close()
self.file = self.sock = None
_150_re = None
def parse150(resp):
'''Parse the '150' response for a RETR request.
Returns the expected transfer size or None; size is not guaranteed to
be present in the 150 message.
'''
if resp[:3] != '150':
raise error_reply, resp
global _150_re
if _150_re is None:
import re
_150_re = re.compile("150 .* \((\d+) bytes\)", re.IGNORECASE)
m = _150_re.match(resp)
if not m:
return None
s = m.group(1)
try:
return int(s)
except (OverflowError, ValueError):
return long(s)
_227_re = None
def parse227(resp):
'''Parse the '227' response for a PASV request.
Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '227':
raise error_reply, resp
global _227_re
if _227_re is None:
import re
_227_re = re.compile(r'(\d+),(\d+),(\d+),(\d+),(\d+),(\d+)')
m = _227_re.search(resp)
if not m:
raise error_proto, resp
numbers = m.groups()
host = '.'.join(numbers[:4])
port = (int(numbers[4]) << 8) + int(numbers[5])
return host, port
def parse229(resp, peer):
'''Parse the '229' response for a EPSV request.
Raises error_proto if it does not contain '(|||port|)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '229':
raise error_reply, resp
left = resp.find('(')
if left < 0: raise error_proto, resp
right = resp.find(')', left + 1)
if right < 0:
raise error_proto, resp # should contain '(|||port|)'
if resp[left + 1] != resp[right - 1]:
raise error_proto, resp
parts = resp[left + 1:right].split(resp[left+1])
if len(parts) != 5:
raise error_proto, resp
host = peer[0]
port = int(parts[3])
return host, port
def parse257(resp):
'''Parse the '257' response for a MKD or PWD request.
This is a response to a MKD or PWD request: a directory name.
Returns the directoryname in the 257 reply.'''
if resp[:3] != '257':
raise error_reply, resp
if resp[3:5] != ' "':
return '' # Not compliant to RFC 959, but UNIX ftpd does this
dirname = ''
i = 5
n = len(resp)
while i < n:
c = resp[i]
i = i+1
if c == '"':
if i >= n or resp[i] != '"':
break
i = i+1
dirname = dirname + c
return dirname
def print_line(line):
'''Default retrlines callback to print a line.'''
print line
def ftpcp(source, sourcename, target, targetname = '', type = 'I'):
'''Copy file from one FTP-instance to another.'''
if not targetname: targetname = sourcename
type = 'TYPE ' + type
source.voidcmd(type)
target.voidcmd(type)
sourcehost, sourceport = parse227(source.sendcmd('PASV'))
target.sendport(sourcehost, sourceport)
# RFC 959: the user must "listen" [...] BEFORE sending the
# transfer request.
# So: STOR before RETR, because here the target is a "user".
treply = target.sendcmd('STOR ' + targetname)
if treply[:3] not in ('125', '150'): raise error_proto # RFC 959
sreply = source.sendcmd('RETR ' + sourcename)
if sreply[:3] not in ('125', '150'): raise error_proto # RFC 959
source.voidresp()
target.voidresp()
class Netrc:
"""Class to parse & provide access to 'netrc' format files.
See the netrc(4) man page for information on the file format.
WARNING: This class is obsolete -- use module netrc instead.
"""
__defuser = None
__defpasswd = None
__defacct = None
def __init__(self, filename=None):
if filename is None:
if "HOME" in os.environ:
filename = os.path.join(os.environ["HOME"],
".netrc")
else:
raise IOError, \
"specify file to load or set $HOME"
self.__hosts = {}
self.__macros = {}
fp = open(filename, "r")
in_macro = 0
while 1:
line = fp.readline()
if not line: break
if in_macro and line.strip():
macro_lines.append(line)
continue
elif in_macro:
self.__macros[macro_name] = tuple(macro_lines)
in_macro = 0
words = line.split()
host = user = passwd = acct = None
default = 0
i = 0
while i < len(words):
w1 = words[i]
if i+1 < len(words):
w2 = words[i + 1]
else:
w2 = None
if w1 == 'default':
default = 1
elif w1 == 'machine' and w2:
host = w2.lower()
i = i + 1
elif w1 == 'login' and w2:
user = w2
i = i + 1
elif w1 == 'password' and w2:
passwd = w2
i = i + 1
elif w1 == 'account' and w2:
acct = w2
i = i + 1
elif w1 == 'macdef' and w2:
macro_name = w2
macro_lines = []
in_macro = 1
break
i = i + 1
if default:
self.__defuser = user or self.__defuser
self.__defpasswd = passwd or self.__defpasswd
self.__defacct = acct or self.__defacct
if host:
if host in self.__hosts:
ouser, opasswd, oacct = \
self.__hosts[host]
user = user or ouser
passwd = passwd or opasswd
acct = acct or oacct
self.__hosts[host] = user, passwd, acct
fp.close()
def get_hosts(self):
"""Return a list of hosts mentioned in the .netrc file."""
return self.__hosts.keys()
def get_account(self, host):
"""Returns login information for the named host.
The return value is a triple containing userid,
password, and the accounting field.
"""
host = host.lower()
user = passwd = acct = None
if host in self.__hosts:
user, passwd, acct = self.__hosts[host]
user = user or self.__defuser
passwd = passwd or self.__defpasswd
acct = acct or self.__defacct
return user, passwd, acct
def get_macros(self):
"""Return a list of all defined macro names."""
return self.__macros.keys()
def get_macro(self, macro):
"""Return a sequence of lines which define a named macro."""
return self.__macros[macro]
def test():
'''Test program.
Usage: ftp [-d] [-r[file]] host [-l[dir]] [-d[dir]] [-p] [file] ...
-d dir
-l list
-p password
'''
if len(sys.argv) < 2:
print test.__doc__
sys.exit(0)
debugging = 0
rcfile = None
while sys.argv[1] == '-d':
debugging = debugging+1
del sys.argv[1]
if sys.argv[1][:2] == '-r':
# get name of alternate ~/.netrc file:
rcfile = sys.argv[1][2:]
del sys.argv[1]
host = sys.argv[1]
ftp = FTP(host)
ftp.set_debuglevel(debugging)
userid = passwd = acct = ''
try:
netrc = Netrc(rcfile)
except IOError:
if rcfile is not None:
sys.stderr.write("Could not open account file"
" -- using anonymous login.")
else:
try:
userid, passwd, acct = netrc.get_account(host)
except KeyError:
# no account for host
sys.stderr.write(
"No account -- using anonymous login.")
ftp.login(userid, passwd, acct)
for file in sys.argv[2:]:
if file[:2] == '-l':
ftp.dir(file[2:])
elif file[:2] == '-d':
cmd = 'CWD'
if file[2:]: cmd = cmd + ' ' + file[2:]
resp = ftp.sendcmd(cmd)
elif file == '-p':
ftp.set_pasv(not ftp.passiveserver)
else:
ftp.retrbinary('RETR ' + file, \
sys.stdout.write, 1024)
ftp.quit()
if __name__ == '__main__':
test()
|
from __future__ import division
import numpy as np
import pytest
import odl
from odl.trafos.backends import pyfftw_call, PYFFTW_AVAILABLE
from odl.util import (
is_real_dtype, complex_dtype)
from odl.util.testutils import (
all_almost_equal, simple_fixture)
pytestmark = pytest.mark.skipif(not PYFFTW_AVAILABLE,
reason='`pyfftw` backend not available')
planning = simple_fixture('planning', ['estimate', 'measure', 'patient',
'exhaustive'])
direction = simple_fixture('direction', ['forward', 'backward'])
def _random_array(shape, dtype):
if is_real_dtype(dtype):
return np.random.rand(*shape).astype(dtype)
else:
return (np.random.rand(*shape).astype(dtype) +
1j * np.random.rand(*shape).astype(dtype))
def _params_from_dtype(dtype):
if is_real_dtype(dtype):
halfcomplex = True
else:
halfcomplex = False
return halfcomplex, complex_dtype(dtype)
def _halfcomplex_shape(shape, axes=None):
if axes is None:
axes = tuple(range(len(shape)))
try:
axes = (int(axes),)
except TypeError:
pass
shape = list(shape)
shape[axes[-1]] = shape[axes[-1]] // 2 + 1
return shape
def test_pyfftw_call_forward(odl_floating_dtype):
# Test against Numpy's FFT
dtype = odl_floating_dtype
if dtype == np.dtype('float16'): # not supported, skipping
return
halfcomplex, out_dtype = _params_from_dtype(dtype)
for shape in [(10,), (3, 4, 5)]:
arr = _random_array(shape, dtype)
if halfcomplex:
true_dft = np.fft.rfftn(arr)
dft_arr = np.empty(_halfcomplex_shape(shape), dtype=out_dtype)
else:
true_dft = np.fft.fftn(arr)
dft_arr = np.empty(shape, dtype=out_dtype)
pyfftw_call(arr, dft_arr, direction='forward',
halfcomplex=halfcomplex, preserve_input=False)
assert all_almost_equal(dft_arr, true_dft)
def test_pyfftw_call_threads():
shape = (3, 4, 5)
arr = _random_array(shape, dtype='complex64')
true_dft = np.fft.fftn(arr)
dft_arr = np.empty(shape, dtype='complex64')
pyfftw_call(arr, dft_arr, direction='forward', preserve_input=False,
threads=4)
assert all_almost_equal(dft_arr, true_dft)
shape = (1000,) # Trigger cpu_count() as number of threads
arr = _random_array(shape, dtype='complex64')
true_dft = np.fft.fftn(arr)
dft_arr = np.empty(shape, dtype='complex64')
pyfftw_call(arr, dft_arr, direction='forward', preserve_input=False)
assert all_almost_equal(dft_arr, true_dft)
def test_pyfftw_call_backward(odl_floating_dtype):
# Test against Numpy's IFFT, no normalization
dtype = odl_floating_dtype
if dtype == np.dtype('float16'): # not supported, skipping
return
halfcomplex, in_dtype = _params_from_dtype(dtype)
for shape in [(10,), (3, 4, 5)]:
# Scaling happens wrt output (large) shape
idft_scaling = np.prod(shape)
if halfcomplex:
arr = _random_array(_halfcomplex_shape(shape), in_dtype)
true_idft = np.fft.irfftn(arr, shape) * idft_scaling
else:
arr = _random_array(shape, in_dtype)
true_idft = np.fft.ifftn(arr) * idft_scaling
idft_arr = np.empty(shape, dtype=dtype)
pyfftw_call(arr, idft_arr, direction='backward',
halfcomplex=halfcomplex)
assert all_almost_equal(idft_arr, true_idft)
def test_pyfftw_call_bad_input(direction):
# Complex
# Bad dtype
dtype_in = np.dtype('complex128')
arr_in = np.empty(3, dtype=dtype_in)
bad_dtypes_out = np.sctypes['float'] + np.sctypes['complex']
if dtype_in in bad_dtypes_out:
# This one is correct, so we remove it
bad_dtypes_out.remove(dtype_in)
for bad_dtype in bad_dtypes_out:
arr_out = np.empty(3, dtype=bad_dtype)
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, halfcomplex=False,
direction=direction)
# Bad shape
shape = (3, 4)
arr_in = np.empty(shape, dtype='complex128')
bad_shapes_out = [(3, 3), (3,), (4,), (3, 4, 5), ()]
for bad_shape in bad_shapes_out:
arr_out = np.empty(bad_shape, dtype='complex128')
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, halfcomplex=False,
direction=direction)
# Duplicate axes
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in)
bad_axes_list = [(0, 0, 1), (1, 1, 1), (-1, -1)]
for bad_axes in bad_axes_list:
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, axes=bad_axes,
direction=direction)
# Axis entry out of range
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in)
bad_axes_list = [(0, 3), (-4,)]
for bad_axes in bad_axes_list:
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, axes=bad_axes,
direction=direction)
# Halfcomplex not possible for complex data
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in)
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, halfcomplex=True,
direction=direction)
# Data type mismatch
arr_in = np.empty((3, 4, 5), dtype='complex128')
arr_out = np.empty_like(arr_in, dtype='complex64')
with pytest.raises(ValueError):
pyfftw_call(arr_in, arr_out, direction=direction)
# Halfcomplex
# Bad dtype
dtype_in = 'float64'
arr_in = np.empty(10, dtype=dtype_in)
bad_dtypes_out = np.sctypes['float'] + np.sctypes['complex']
try:
# This one is correct, so we remove it
bad_dtypes_out.remove(np.dtype('complex128'))
except ValueError:
pass
for bad_dtype in bad_dtypes_out:
arr_out = np.empty(6, dtype=bad_dtype)
with pytest.raises(ValueError):
if direction == 'forward':
pyfftw_call(arr_in, arr_out, halfcomplex=True,
direction='forward')
else:
pyfftw_call(arr_out, arr_in, halfcomplex=True,
direction='backward')
# Bad shape
shape = (3, 4, 5)
axes_list = [None, (0, 1), (1,), (1, 2), (2, 1), (-1, -2, -3)]
arr_in = np.empty(shape, dtype='float64')
# Correct shapes:
# [(3, 4, 3), (3, 3, 5), (3, 3, 5), (3, 4, 3), (3, 3, 5), (2, 4, 5)]
bad_shapes_out = [(3, 4, 2), (3, 4, 3), (2, 3, 5), (3, 2, 3),
(3, 4, 3), (3, 4, 3)]
always_bad_shapes = [(3, 4), (3, 4, 5)]
for bad_shape, axes in zip(bad_shapes_out, axes_list):
for always_bad_shape in always_bad_shapes:
arr_out = np.empty(always_bad_shape, dtype='complex128')
with pytest.raises(ValueError):
if direction == 'forward':
pyfftw_call(arr_in, arr_out, axes=axes, halfcomplex=True,
direction='forward')
else:
pyfftw_call(arr_out, arr_in, axes=axes, halfcomplex=True,
direction='backward')
arr_out = np.empty(bad_shape, dtype='complex128')
with pytest.raises(ValueError):
if direction == 'forward':
pyfftw_call(arr_in, arr_out, axes=axes, halfcomplex=True,
direction='forward')
else:
pyfftw_call(arr_out, arr_in, axes=axes, halfcomplex=True,
direction='backward')
def test_pyfftw_call_forward_real_not_halfcomplex():
# Test against Numpy's FFT
for shape in [(10,), (3, 4, 5)]:
arr = _random_array(shape, dtype='float64')
true_dft = np.fft.fftn(arr)
dft_arr = np.empty(shape, dtype='complex128')
pyfftw_call(arr, dft_arr, direction='forward', halfcomplex=False)
assert all_almost_equal(dft_arr, true_dft)
def test_pyfftw_call_backward_real_not_halfcomplex():
# Test against Numpy's IFFT, no normalization
for shape in [(10,), (3, 4, 5)]:
# Scaling happens wrt output (large) shape
idft_scaling = np.prod(shape)
arr = _random_array(shape, dtype='float64')
true_idft = np.fft.ifftn(arr) * idft_scaling
idft_arr = np.empty(shape, dtype='complex128')
pyfftw_call(arr, idft_arr, direction='backward', halfcomplex=False)
assert all_almost_equal(idft_arr, true_idft)
def test_pyfftw_call_plan_preserve_input(planning):
for shape in [(10,), (3, 4)]:
arr = _random_array(shape, dtype='complex128')
arr_cpy = arr.copy()
idft_scaling = np.prod(shape)
true_idft = np.fft.ifftn(arr) * idft_scaling
idft_arr = np.empty(shape, dtype='complex128')
pyfftw_call(arr, idft_arr, direction='backward', halfcomplex=False,
planning=planning)
assert all_almost_equal(arr, arr_cpy) # Input perserved
assert all_almost_equal(idft_arr, true_idft)
def test_pyfftw_call_forward_with_axes(odl_floating_dtype):
dtype = odl_floating_dtype
if dtype == np.dtype('float16'): # not supported, skipping
return
halfcomplex, out_dtype = _params_from_dtype(dtype)
shape = (3, 4, 5)
test_axes = [(0, 1), [1], (-1,), (1, 0), (-1, -2, -3)]
for axes in test_axes:
arr = _random_array(shape, dtype)
if halfcomplex:
true_dft = np.fft.rfftn(arr, axes=axes)
dft_arr = np.empty(_halfcomplex_shape(shape, axes),
dtype=out_dtype)
else:
true_dft = np.fft.fftn(arr, axes=axes)
dft_arr = np.empty(shape, dtype=out_dtype)
pyfftw_call(arr, dft_arr, direction='forward', axes=axes,
halfcomplex=halfcomplex)
assert all_almost_equal(dft_arr, true_dft)
def test_pyfftw_call_backward_with_axes(odl_floating_dtype):
dtype = odl_floating_dtype
if dtype == np.dtype('float16'): # not supported, skipping
return
halfcomplex, in_dtype = _params_from_dtype(dtype)
shape = (3, 4, 5)
test_axes = [(0, 1), [1], (-1,), (1, 0), (-1, -2, -3)]
for axes in test_axes:
# Only the shape indexed by axes count for the scaling
active_shape = np.take(shape, axes)
idft_scaling = np.prod(active_shape)
if halfcomplex:
arr = _random_array(_halfcomplex_shape(shape, axes), in_dtype)
true_idft = (np.fft.irfftn(arr, s=active_shape, axes=axes) *
idft_scaling)
else:
arr = _random_array(shape, in_dtype)
true_idft = (np.fft.ifftn(arr, s=active_shape, axes=axes) *
idft_scaling)
idft_arr = np.empty(shape, dtype=dtype)
pyfftw_call(arr, idft_arr, direction='backward', axes=axes,
halfcomplex=halfcomplex)
assert all_almost_equal(idft_arr, true_idft)
def test_pyfftw_call_forward_with_plan():
for shape in [(10,), (3, 4, 5)]:
arr = _random_array(shape, dtype='complex128')
arr_cpy = arr.copy()
true_dft = np.fft.fftn(arr)
# First run, create plan
dft_arr = np.empty(shape, dtype='complex128')
plan = pyfftw_call(arr, dft_arr, direction='forward',
halfcomplex=False, planning_effort='measure')
# Second run, reuse with fresh output array
dft_arr = np.empty(shape, dtype='complex128')
pyfftw_call(arr, dft_arr, direction='forward', fftw_plan=plan,
halfcomplex=False)
assert all_almost_equal(arr, arr_cpy) # Input perserved
assert all_almost_equal(dft_arr, true_dft)
def test_pyfftw_call_backward_with_plan():
for shape in [(10,), (3, 4, 5)]:
arr = _random_array(shape, dtype='complex128')
arr_cpy = arr.copy()
idft_scaling = np.prod(shape)
true_idft = np.fft.ifftn(arr) * idft_scaling
# First run, create plan
idft_arr = np.empty(shape, dtype='complex128')
plan = pyfftw_call(arr, idft_arr, direction='backward',
halfcomplex=False, planning_effort='measure')
# Second run, reuse with fresh output array
idft_arr = np.empty(shape, dtype='complex128')
pyfftw_call(arr, idft_arr, direction='backward', fftw_plan=plan,
halfcomplex=False)
assert all_almost_equal(arr, arr_cpy) # Input perserved
assert all_almost_equal(idft_arr, true_idft)
if __name__ == '__main__':
odl.util.test_file(__file__)
|
from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import allow_lazy
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode,
urlparse,
)
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = str(":/?#[]@")
RFC3986_SUBDELIMS = str("!$&'()*+,;=")
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(quote(force_str(url), force_str(safe)))
urlquote = allow_lazy(urlquote, six.text_type)
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(quote_plus(force_str(url), force_str(safe)))
urlquote_plus = allow_lazy(urlquote_plus, six.text_type)
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(unquote(force_str(quoted_url)))
urlunquote = allow_lazy(urlunquote, six.text_type)
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(unquote_plus(force_str(quoted_url)))
urlunquote_plus = allow_lazy(urlunquote_plus, six.text_type)
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return original_urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if six.PY2 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
char_set = '0123456789abcdefghijklmnopqrstuvwxyz'
if i < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if i < 36:
return char_set[i]
b36 = ''
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = force_bytes(s)
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
p1, p2 = urlparse(url1), urlparse(url2)
try:
o1 = (p1.scheme, p1.hostname, p1.port or PROTOCOL_TO_PORT[p1.scheme])
o2 = (p2.scheme, p2.hostname, p2.port or PROTOCOL_TO_PORT[p2.scheme])
return o1 == o2
except (ValueError, KeyError):
return False
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host and uses a safe scheme).
Always returns ``False`` on an empty url.
"""
if url is not None:
url = url.strip()
if not url:
return False
if six.PY2:
try:
url = force_text(url)
except UnicodeDecodeError:
return False
# Chrome treats \ completely as / in paths but it could be part of some
# basic auth credentials so we need to check both URLs.
return _is_safe_url(url, host) and _is_safe_url(url.replace('\\', '/'), host)
def _is_safe_url(url, host):
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith('///'):
return False
url_info = urlparse(url)
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == 'C':
return False
return ((not url_info.netloc or url_info.netloc == host) and
(not url_info.scheme or url_info.scheme in ['http', 'https']))
|
from datetime import date, timedelta
from django.conf import settings
date_in_near_future = date.today() + timedelta(days=14)
FOUR_YEARS_IN_DAYS = 1462
election_date_before = lambda r: {
'DATE_TODAY': date.today()
}
election_date_on_election_day = lambda r: {
'DATE_TODAY': date_in_near_future
}
election_date_after = lambda r: {
'DATE_TODAY': date.today() + timedelta(days=28)
}
processors = settings.TEMPLATE_CONTEXT_PROCESSORS
processors_before = processors + \
("candidates.tests.dates.election_date_before",)
processors_on_election_day = processors + \
("candidates.tests.dates.election_date_on_election_day",)
processors_after = processors + \
("candidates.tests.dates.election_date_after",)
|
"""
XBlock runtime services for LibraryContentModule
"""
from django.core.exceptions import PermissionDenied
from opaque_keys.edx.locator import LibraryLocator
from xmodule.library_content_module import ANY_CAPA_TYPE_VALUE
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.capa_module import CapaDescriptor
class LibraryToolsService(object):
"""
Service that allows LibraryContentModule to interact with libraries in the
modulestore.
"""
def __init__(self, modulestore):
self.store = modulestore
def _get_library(self, library_key):
"""
Given a library key like "library-v1:ProblemX+PR0B", return the
'library' XBlock with meta-information about the library.
Returns None on error.
"""
if not isinstance(library_key, LibraryLocator):
library_key = LibraryLocator.from_string(library_key)
assert library_key.version_guid is None
try:
return self.store.get_library(library_key, remove_version=False, remove_branch=False)
except ItemNotFoundError:
return None
def get_library_version(self, lib_key):
"""
Get the version (an ObjectID) of the given library.
Returns None if the library does not exist.
"""
library = self._get_library(lib_key)
if library:
# We need to know the library's version so ensure it's set in library.location.library_key.version_guid
assert library.location.library_key.version_guid is not None
return library.location.library_key.version_guid
return None
def create_block_analytics_summary(self, course_key, block_keys):
"""
Given a CourseKey and a list of (block_type, block_id) pairs,
prepare the JSON-ready metadata needed for analytics logging.
This is [
{"usage_key": x, "original_usage_key": y, "original_usage_version": z, "descendants": [...]}
]
where the main list contains all top-level blocks, and descendants contains a *flat* list of all
descendants of the top level blocks, if any.
"""
def summarize_block(usage_key):
""" Basic information about the given block """
orig_key, orig_version = self.store.get_block_original_usage(usage_key)
return {
"usage_key": unicode(usage_key),
"original_usage_key": unicode(orig_key) if orig_key else None,
"original_usage_version": unicode(orig_version) if orig_version else None,
}
result_json = []
for block_key in block_keys:
key = course_key.make_usage_key(*block_key)
info = summarize_block(key)
info['descendants'] = []
try:
block = self.store.get_item(key, depth=None) # Load the item and all descendants
children = list(getattr(block, "children", []))
while children:
child_key = children.pop()
child = self.store.get_item(child_key)
info['descendants'].append(summarize_block(child_key))
children.extend(getattr(child, "children", []))
except ItemNotFoundError:
pass # The block has been deleted
result_json.append(info)
return result_json
def _filter_child(self, usage_key, capa_type):
"""
Filters children by CAPA problem type, if configured
"""
if capa_type == ANY_CAPA_TYPE_VALUE:
return True
if usage_key.block_type != "problem":
return False
descriptor = self.store.get_item(usage_key, depth=0)
assert isinstance(descriptor, CapaDescriptor)
return capa_type in descriptor.problem_types
def can_use_library_content(self, block):
"""
Determines whether a modulestore holding a course_id supports libraries.
"""
return self.store.check_supports(block.location.course_key, 'copy_from_template')
def update_children(self, dest_block, user_id, user_perms=None):
"""
This method is to be used when the library that a LibraryContentModule
references has been updated. It will re-fetch all matching blocks from
the libraries, and copy them as children of dest_block. The children
will be given new block_ids, but the definition ID used should be the
exact same definition ID used in the library.
This method will update dest_block's 'source_library_version' field to
store the version number of the libraries used, so we easily determine
if dest_block is up to date or not.
"""
if user_perms and not user_perms.can_write(dest_block.location.course_key):
raise PermissionDenied()
if not dest_block.source_library_id:
dest_block.source_library_version = ""
return
source_blocks = []
library_key = dest_block.source_library_key
library = self._get_library(library_key)
if library is None:
raise ValueError("Requested library not found.")
if user_perms and not user_perms.can_read(library_key):
raise PermissionDenied()
filter_children = (dest_block.capa_type != ANY_CAPA_TYPE_VALUE)
if filter_children:
# Apply simple filtering based on CAPA problem types:
source_blocks.extend([key for key in library.children if self._filter_child(key, dest_block.capa_type)])
else:
source_blocks.extend(library.children)
with self.store.bulk_operations(dest_block.location.course_key):
dest_block.source_library_version = unicode(library.location.library_key.version_guid)
self.store.update_item(dest_block, user_id)
dest_block.children = self.store.copy_from_template(source_blocks, dest_block.location, user_id)
# ^-- copy_from_template updates the children in the DB
# but we must also set .children here to avoid overwriting the DB again
def list_available_libraries(self):
"""
List all known libraries.
Returns tuples of (LibraryLocator, display_name)
"""
return [
(lib.location.library_key.replace(version_guid=None, branch=None), lib.display_name)
for lib in self.store.get_libraries()
]
|
from PyQt4 import QtCore, QtGui
import os
class ConfigPage(QtGui.QWizardPage):
def __init__(self, templates, parent=None):
super(ConfigPage, self).__init__(parent)
#self.setTitle("Configuration")
#self.setSubTitle("Alter configuration and build your own platform.")
#self.setPixmap(QtGui.QWizard.WatermarkPixmap,
# QtGui.QPixmap(':/images/watermark1.png'))
self.templates = templates
self.view = QtGui.QTreeView()
self.panel = QtGui.QWidget()
self.info = QtGui.QTextBrowser()
self.hsplit = QtGui.QSplitter(QtCore.Qt.Vertical)
self.vsplit = QtGui.QSplitter(QtCore.Qt.Horizontal)
self.hsplit.addWidget(self.panel)
self.hsplit.addWidget(self.info)
self.vsplit.addWidget(self.view)
self.vsplit.addWidget(self.hsplit)
def click(index):
item = index.internalPointer()
self.info.setText(QtCore.QVariant(item.description).toString())
self.model.clicked(item)
self.view.activated.connect(click)
self.view.entered.connect(click)
self.view.clicked.connect(click)
#self.view.setModel(model)
self.layout = QtGui.QGridLayout()
self.layout.addWidget(self.vsplit)
#self.setStyleSheet("* { background: yellow }")
#self.setMaximumHeight(0xFFFFFF)
#self.vsplit.setMaximumHeight(0xFFFFFF)
#self.hsplit.setMaximumHeight(0xFFFFFF)
#self.view.setMaximumHeight(0xFFFFFF)
self.setLayout(self.layout)
#self.hsplit.moveSplitter(340,0)
def initializePage(self):
self.panel.setParent(None)
self.panel = QtGui.QWidget()
self.hsplit.insertWidget(0, self.panel)
self.model = self.templates.getModel(self.panel)
self.view.setModel(self.model)
self.view.expandAll()
self.view.setColumnWidth(0, 220)
self.view.setColumnWidth(1, 20)
self.setLayout(self.layout)
#self.vsplit.moveSplitter(280,1)
#self.hsplit.moveSplitter(120,1)
|
import platform
import shutil
import sys
import os
from spack import *
class Namd(MakefilePackage):
"""NAMDis a parallel molecular dynamics code designed for
high-performance simulation of large biomolecular systems."""
homepage = "http://www.ks.uiuc.edu/Research/namd/"
url = "file://{0}/NAMD_2.12_Source.tar.gz".format(os.getcwd())
version('2.12', '2a1191909b1ab03bf0205971ad4d8ee9')
variant('fftw', default='3', values=('none', '2', '3', 'mkl'),
description='Enable the use of FFTW/FFTW3/MKL FFT')
variant('interface', default='none', values=('none', 'tcl', 'python'),
description='Enables TCL and/or python interface')
depends_on('charm')
depends_on('fftw@:2.99', when="fftw=2")
depends_on('fftw@3:', when="fftw=3")
depends_on('intel-mkl', when="fftw=mkl")
depends_on('tcl', when='interface=tcl')
depends_on('tcl', when='interface=python')
depends_on('python', when='interface=python')
def _copy_arch_file(self, lib):
config_filename = 'arch/{0}.{1}'.format(self.arch, lib)
shutil.copy('arch/Linux-x86_64.{0}'.format(lib),
config_filename)
if lib == 'tcl':
filter_file(r'-ltcl8\.5',
'-ltcl{0}'.format(self.spec['tcl'].version.up_to(2)),
config_filename)
def _append_option(self, opts, lib):
if lib != 'python':
self._copy_arch_file(lib)
spec = self.spec
opts.extend([
'--with-{0}'.format(lib),
'--{0}-prefix'.format(lib), spec[lib].prefix
])
@property
def arch(self):
plat = sys.platform
if plat.startswith("linux"):
plat = "linux"
march = platform.machine()
return '{0}-{1}'.format(plat, march)
@property
def build_directory(self):
return '{0}-spack'.format(self.arch)
def edit(self, spec, prefix):
with working_dir('arch'):
with open('{0}.arch'.format(self.build_directory), 'w') as fh:
# this options are take from the default provided
# configuration files
optims_opts = {
'gcc': '-m64 -O3 -fexpensive-optimizations -ffast-math',
'intel': '-O2 -ip'
}
optim_opts = optims_opts[self.compiler.name] \
if self.compiler.name in optims_opts else ''
fh.write('\n'.join([
'NAMD_ARCH = {0}'.format(self.arch),
'CHARMARCH = ',
'CXX = {0.cxx} {0.cxx11_flag}'.format(
self.compiler),
'CXXOPTS = {0}'.format(optim_opts),
'CC = {0}'.format(self.compiler.cc),
'COPTS = {0}'.format(optim_opts),
''
]))
self._copy_arch_file('base')
opts = ['--charm-base', spec['charm'].prefix]
fftw_version = spec.variants['fftw'].value
if fftw_version == 'none':
opts.append('--without-fftw')
elif fftw_version == 'mkl':
self._append_option(opts, 'mkl')
else:
_fftw = 'fftw{0}'.format('' if fftw_version == '2' else '3')
self._copy_arch_file(_fftw)
opts.extend(['--with-{0}'.format(_fftw),
'--fftw-prefix', spec['fftw'].prefix])
interface_type = spec.variants['interface'].value
if interface_type != 'none':
self._append_option(opts, 'tcl')
if interface_type == 'python':
self._append_option(opts, 'python')
else:
opts.extend([
'--without-tcl',
'--without-python'
])
config = Executable('./config')
config(self.build_directory, *opts)
def install(self, spec, prefix):
with working_dir(self.build_directory):
mkdirp(prefix.bin)
install('namd2', prefix.bin)
# I'm not sure this is a good idea or if an autoload of the charm
# module would not be better.
install('charmrun', prefix.bin)
|
from twisted.internet import defer
from twisted.spread import pb
from flumotion.common import testsuite
from flumotion.test import realm
from flumotion.twisted import pb as fpb
from flumotion.worker import medium
class TestWorkerAvatar(fpb.PingableAvatar):
def __init__(self, avatarId, mind):
fpb.PingableAvatar.__init__(self, avatarId)
self.setMind(mind)
class TestWorkerRealm(realm.TestRealm):
deferredAvatar = None
deferredLogout = None
def getDeferredAvatar(self):
if self.deferredAvatar is None:
self.deferredAvatar = defer.Deferred()
return self.deferredAvatar
def getDeferredLogout(self):
if self.deferredLogout is None:
self.deferredLogout = defer.Deferred()
return self.deferredLogout
def requestAvatar(self, avatarId, keycard, mind, *ifaces):
avatar = TestWorkerAvatar(avatarId, mind)
self.getDeferredAvatar().callback(avatar)
return (pb.IPerspective, avatar,
lambda: self.avatarLogout(avatar))
def avatarLogout(self, avatar):
self.debug('worker logged out: %s', avatar.avatarId)
self.getDeferredLogout().callback(avatar)
class TestWorkerMedium(testsuite.TestCase):
def setUp(self):
self.realm = TestWorkerRealm()
def tearDown(self):
return self.realm.shutdown()
def testConnect(self):
m = medium.WorkerMedium(None)
connectionInfo = self.realm.getConnectionInfo()
connectionInfo.authenticator.avatarId = 'foo'
m.startConnecting(connectionInfo)
def connected(avatar):
m.stopConnecting()
return self.realm.getDeferredLogout()
def disconnected(avatar):
self.assertEquals(avatar.avatarId, 'foo')
d = self.realm.getDeferredAvatar()
d.addCallback(connected)
d.addCallback(disconnected)
return d
|
from spack import *
class PyLocalcider(PythonPackage):
"""Tools for calculating sequence properties of disordered proteins"""
homepage = "http://pappulab.github.io/localCIDER"
url = "https://pypi.io/packages/source/l/localcider/localcider-0.1.14.tar.gz"
version('0.1.14', sha256='54ff29e8a011947cca5df79e96f3c69a76c49c4db41dcf1608663992be3e3f5f')
depends_on('py-setuptools', type='build')
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-matplotlib', type=('build', 'run'))
depends_on('py-scipy', type=('build', 'run'))
|
"""distutils.unixccompiler
Contains the UnixCCompiler class, a subclass of CCompiler that handles
the "typical" Unix-style command-line C compiler:
* macros defined with -Dname[=value]
* macros undefined with -Uname
* include search directories specified with -Idir
* libraries specified with -lllib
* library search directories specified with -Ldir
* compile handled by 'cc' (or similar) executable with -c option:
compiles .c to .o
* link static library handled by 'ar' command (possibly with 'ranlib')
* link shared library handled by 'cc -shared'
"""
import os, sys, re
from distutils import sysconfig
from distutils.dep_util import newer
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.errors import \
DistutilsExecError, CompileError, LibError, LinkError
from distutils import log
if sys.platform == 'darwin':
import _osx_support
class UnixCCompiler(CCompiler):
compiler_type = 'unix'
# These are used by CCompiler in two places: the constructor sets
# instance attributes 'preprocessor', 'compiler', etc. from them, and
# 'set_executable()' allows any of these to be set. The defaults here
# are pretty generic; they will probably have to be set by an outsider
# (eg. using information discovered by the sysconfig about building
# Python extensions).
executables = {'preprocessor' : None,
'compiler' : ["cc"],
'compiler_so' : ["cc"],
'compiler_cxx' : ["cc"],
'linker_so' : ["cc", "-shared"],
'linker_exe' : ["cc"],
'archiver' : ["ar", "-cr"],
'ranlib' : None,
}
if sys.platform[:6] == "darwin":
executables['ranlib'] = ["ranlib"]
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
# reasonable common default here, but it's not necessarily used on all
# Unices!
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".so"
dylib_lib_extension = ".dylib"
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
if sys.platform == "cygwin":
exe_extension = ".exe"
def preprocess(self, source, output_file=None, macros=None,
include_dirs=None, extra_preargs=None, extra_postargs=None):
fixed_args = self._fix_compile_args(None, macros, include_dirs)
ignore, macros, include_dirs = fixed_args
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = self.preprocessor + pp_opts
if output_file:
pp_args.extend(['-o', output_file])
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or we're
# generating output to stdout, or there's a target output file and
# the source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError as msg:
raise CompileError(msg)
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
compiler_so = self.compiler_so
if sys.platform == 'darwin':
compiler_so = _osx_support.compiler_fixup(compiler_so,
cc_args + extra_postargs)
try:
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError as msg:
raise CompileError(msg)
def create_static_lib(self, objects, output_libname,
output_dir=None, debug=0, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = \
self.library_filename(output_libname, output_dir=output_dir)
if self._need_link(objects, output_filename):
self.mkpath(os.path.dirname(output_filename))
self.spawn(self.archiver +
[output_filename] +
objects + self.objects)
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
# think the only major Unix that does. Maybe we need some
# platform intelligence here to skip ranlib if it's not
# needed -- or maybe Python's configure script took care of
# it for us, hence the check for leading colon.
if self.ranlib:
try:
self.spawn(self.ranlib + [output_filename])
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
fixed_args = self._fix_lib_args(libraries, library_dirs,
runtime_library_dirs)
libraries, library_dirs, runtime_library_dirs = fixed_args
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if not isinstance(output_dir, (str, type(None))):
raise TypeError("'output_dir' must be a string or None")
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
ld_args = (objects + self.objects +
lib_opts + ['-o', output_filename])
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
if target_lang == "c++" and self.compiler_cxx:
# skip over environment variable settings if /usr/bin/env
# is used to set up the linker's environment.
# This is needed on OSX. Note: this assumes that the
# normal and C++ compiler have the same environment
# settings.
i = 0
if os.path.basename(linker[0]) == "env":
i = 1
while '=' in linker[i]:
i += 1
linker[i] = self.compiler_cxx[i]
if sys.platform == 'darwin':
linker = _osx_support.compiler_fixup(linker, ld_args)
self.spawn(linker + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "-L" + dir
def _is_gcc(self, compiler_name):
return "gcc" in compiler_name or "g++" in compiler_name
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
# http://sourceforge.net/tracker/index.php
# ?func=detail&aid=445902&group_id=5470&atid=105470
# Linkers on different platforms need different options to
# specify that directories need to be added to the list of
# directories searched for dependencies when a dynamic library
# is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to
# be told to pass the -R option through to the linker, whereas
# other compilers and gcc on other systems just know this.
# Other compilers may need something slightly different. At
# this time, there's no way to determine this information from
# the configuration data stored in the Python installation, so
# we use this hack.
compiler = os.path.basename(sysconfig.get_config_var("CC"))
if sys.platform[:6] == "darwin":
# MacOSX's linker doesn't understand the -R flag at all
return "-L" + dir
elif sys.platform[:5] == "hp-ux":
if self._is_gcc(compiler):
return ["-Wl,+s", "-L" + dir]
return ["+s", "-L" + dir]
elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
return ["-rpath", dir]
else:
if self._is_gcc(compiler):
# gcc on non-GNU systems does not need -Wl, but can
# use it anyway. Since distutils has always passed in
# -Wl whenever gcc was used in the past it is probably
# safest to keep doing so.
if sysconfig.get_config_var("GNULD") == "yes":
# GNU ld needs an extra option to get a RUNPATH
# instead of just an RPATH.
return "-Wl,--enable-new-dtags,-R" + dir
else:
return "-Wl,-R" + dir
else:
# No idea how --enable-new-dtags would be passed on to
# ld if this system was using GNU ld. Don't know if a
# system like this even exists.
return "-R" + dir
def library_option(self, lib):
return "-l" + lib
def find_library_file(self, dirs, lib, debug=0):
shared_f = self.library_filename(lib, lib_type='shared')
dylib_f = self.library_filename(lib, lib_type='dylib')
static_f = self.library_filename(lib, lib_type='static')
if sys.platform == 'darwin':
# On OSX users can specify an alternate SDK using
# '-isysroot', calculate the SDK root if it is specified
# (and use it further on)
cflags = sysconfig.get_config_var('CFLAGS')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is None:
sysroot = '/'
else:
sysroot = m.group(1)
for dir in dirs:
shared = os.path.join(dir, shared_f)
dylib = os.path.join(dir, dylib_f)
static = os.path.join(dir, static_f)
if sys.platform == 'darwin' and (
dir.startswith('/System/') or (
dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
shared = os.path.join(sysroot, dir[1:], shared_f)
dylib = os.path.join(sysroot, dir[1:], dylib_f)
static = os.path.join(sysroot, dir[1:], static_f)
# We're second-guessing the linker here, with not much hard
# data to go on: GCC seems to prefer the shared library, so I'm
# assuming that *all* Unix C compilers do. And of course I'm
# ignoring even GCC's "-static" option. So sue me.
if os.path.exists(dylib):
return dylib
elif os.path.exists(shared):
return shared
elif os.path.exists(static):
return static
# Oops, didn't find it in *any* of 'dirs'
return None
|
from __future__ import division
import sys, os, re
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'SpiffWorkflow'
copyright = '2012 ' + ', '.join(open('../AUTHORS').readlines())
import SpiffWorkflow
version = SpiffWorkflow.__version__
release = version
show_authors = True
pygments_style = 'friendly'
html_style = 'sphinxdoc.css'
html_static_path = ['figures']
html_last_updated_fmt = '%b %d, %Y'
html_index = 'index.html'
html_sidebars = {'index': 'indexsidebar.html'}
html_additional_pages = {'index': 'index.html'}
html_use_opensearch = 'http://sphinx.pocoo.org'
htmlhelp_basename = 'Sphinxdoc'
latex_documents = [('contents', 'sphinx.tex', 'Sphinx Documentation',
'Georg Brandl', 'manual', 1)]
latex_logo = '_static/sphinx.png'
latex_elements = {
'fontpkg': '\\usepackage{palatino}'
}
from sphinx import addnodes
dir_sig_re = re.compile(r'\.\. ([^:]+)::(.*)$')
def parse_directive(env, sig, signode):
if not sig.startswith('.'):
dec_sig = '.. %s::' % sig
signode += addnodes.desc_name(dec_sig, dec_sig)
return sig
m = dir_sig_re.match(sig)
if not m:
signode += addnodes.desc_name(sig, sig)
return sig
name, args = m.groups()
dec_name = '.. %s::' % name
signode += addnodes.desc_name(dec_name, dec_name)
signode += addnodes.desc_addname(args, args)
return name
def parse_role(env, sig, signode):
signode += addnodes.desc_name(':%s:' % sig, ':%s:' % sig)
return sig
event_sig_re = re.compile(r'([a-zA-Z-]+)\s*\((.*)\)')
def parse_event(env, sig, signode):
m = event_sig_re.match(sig)
if not m:
signode += addnodes.desc_name(sig, sig)
return sig
name, args = m.groups()
signode += addnodes.desc_name(name, name)
plist = addnodes.desc_parameterlist()
for arg in args.split(','):
arg = arg.strip()
plist += addnodes.desc_parameter(arg, arg)
signode += plist
return name
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(4, what=['module']))
app.add_description_unit('directive', 'dir', 'pair: %s; directive', parse_directive)
app.add_description_unit('role', 'role', 'pair: %s; role', parse_role)
app.add_description_unit('confval', 'confval', 'pair: %s; configuration value')
app.add_description_unit('event', 'event', 'pair: %s; event', parse_event)
|
"""
Management class for basic VM operations.
"""
import functools
import os
from oslo.config import cfg
from nova.api.metadata import base as instance_metadata
from nova import exception
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova.openstack.common import units
from nova import utils
from nova.virt import configdrive
from nova.virt.hyperv import constants
from nova.virt.hyperv import imagecache
from nova.virt.hyperv import utilsfactory
from nova.virt.hyperv import vhdutilsv2
from nova.virt.hyperv import vmutils
from nova.virt.hyperv import volumeops
LOG = logging.getLogger(__name__)
hyperv_opts = [
cfg.BoolOpt('limit_cpu_features',
default=False,
help='Required for live migration among '
'hosts with different CPU features'),
cfg.BoolOpt('config_drive_inject_password',
default=False,
help='Sets the admin password in the config drive image'),
cfg.StrOpt('qemu_img_cmd',
default="qemu-img.exe",
help='Path of qemu-img command which is used to convert '
'between different image types'),
cfg.BoolOpt('config_drive_cdrom',
default=False,
help='Attaches the Config Drive image as a cdrom drive '
'instead of a disk drive'),
cfg.BoolOpt('enable_instance_metrics_collection',
default=False,
help='Enables metrics collections for an instance by using '
'Hyper-V\'s metric APIs. Collected data can by retrieved '
'by other apps and services, e.g.: Ceilometer. '
'Requires Hyper-V / Windows Server 2012 and above'),
cfg.FloatOpt('dynamic_memory_ratio',
default=1.0,
help='Enables dynamic memory allocation (ballooning) when '
'set to a value greater than 1. The value expresses '
'the ratio between the total RAM assigned to an '
'instance and its startup RAM amount. For example a '
'ratio of 2.0 for an instance with 1024MB of RAM '
'implies 512MB of RAM allocated at startup')
]
CONF = cfg.CONF
CONF.register_opts(hyperv_opts, 'hyperv')
CONF.import_opt('use_cow_images', 'nova.virt.driver')
CONF.import_opt('network_api_class', 'nova.network')
def check_admin_permissions(function):
@functools.wraps(function)
def wrapper(self, *args, **kwds):
# Make sure the windows account has the required admin permissions.
self._vmutils.check_admin_permissions()
return function(self, *args, **kwds)
return wrapper
class VMOps(object):
_vif_driver_class_map = {
'nova.network.neutronv2.api.API':
'nova.virt.hyperv.vif.HyperVNeutronVIFDriver',
'nova.network.api.API':
'nova.virt.hyperv.vif.HyperVNovaNetworkVIFDriver',
}
def __init__(self):
self._vmutils = utilsfactory.get_vmutils()
self._vhdutils = utilsfactory.get_vhdutils()
self._pathutils = utilsfactory.get_pathutils()
self._volumeops = volumeops.VolumeOps()
self._imagecache = imagecache.ImageCache()
self._vif_driver = None
self._load_vif_driver_class()
def _load_vif_driver_class(self):
try:
class_name = self._vif_driver_class_map[CONF.network_api_class]
self._vif_driver = importutils.import_object(class_name)
except KeyError:
raise TypeError(_("VIF driver not found for "
"network_api_class: %s") %
CONF.network_api_class)
def list_instances(self):
return self._vmutils.list_instances()
def get_info(self, instance):
"""Get information about the VM."""
LOG.debug(_("get_info called for instance"), instance=instance)
instance_name = instance['name']
if not self._vmutils.vm_exists(instance_name):
raise exception.InstanceNotFound(instance_id=instance['uuid'])
info = self._vmutils.get_vm_summary_info(instance_name)
state = constants.HYPERV_POWER_STATE[info['EnabledState']]
return {'state': state,
'max_mem': info['MemoryUsage'],
'mem': info['MemoryUsage'],
'num_cpu': info['NumberOfProcessors'],
'cpu_time': info['UpTime']}
def _create_root_vhd(self, context, instance):
base_vhd_path = self._imagecache.get_cached_image(context, instance)
format_ext = base_vhd_path.split('.')[-1]
root_vhd_path = self._pathutils.get_root_vhd_path(instance['name'],
format_ext)
try:
if CONF.use_cow_images:
LOG.debug(_("Creating differencing VHD. Parent: "
"%(base_vhd_path)s, Target: %(root_vhd_path)s"),
{'base_vhd_path': base_vhd_path,
'root_vhd_path': root_vhd_path})
self._vhdutils.create_differencing_vhd(root_vhd_path,
base_vhd_path)
else:
LOG.debug(_("Copying VHD image %(base_vhd_path)s to target: "
"%(root_vhd_path)s"),
{'base_vhd_path': base_vhd_path,
'root_vhd_path': root_vhd_path})
self._pathutils.copyfile(base_vhd_path, root_vhd_path)
base_vhd_info = self._vhdutils.get_vhd_info(base_vhd_path)
base_vhd_size = base_vhd_info['MaxInternalSize']
root_vhd_size = instance['root_gb'] * units.Gi
# NOTE(lpetrut): Checking the namespace is needed as the
# following method is not yet implemented in vhdutilsv2.
if not isinstance(self._vhdutils, vhdutilsv2.VHDUtilsV2):
root_vhd_internal_size = (
self._vhdutils.get_internal_vhd_size_by_file_size(
root_vhd_path, root_vhd_size))
else:
root_vhd_internal_size = root_vhd_size
if root_vhd_internal_size < base_vhd_size:
error_msg = _("Cannot resize a VHD to a smaller size, the"
" original size is %(base_vhd_size)s, the"
" newer size is %(root_vhd_size)s"
) % {'base_vhd_size': base_vhd_size,
'root_vhd_size': root_vhd_internal_size}
raise vmutils.HyperVException(error_msg)
elif root_vhd_internal_size > base_vhd_size:
LOG.debug(_("Resizing VHD %(root_vhd_path)s to new "
"size %(root_vhd_size)s"),
{'root_vhd_size': root_vhd_internal_size,
'root_vhd_path': root_vhd_path})
self._vhdutils.resize_vhd(root_vhd_path, root_vhd_size)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(root_vhd_path):
self._pathutils.remove(root_vhd_path)
return root_vhd_path
def create_ephemeral_vhd(self, instance):
eph_vhd_size = instance.get('ephemeral_gb', 0) * units.Gi
if eph_vhd_size:
vhd_format = self._vhdutils.get_best_supported_vhd_format()
eph_vhd_path = self._pathutils.get_ephemeral_vhd_path(
instance['name'], vhd_format)
self._vhdutils.create_dynamic_vhd(eph_vhd_path, eph_vhd_size,
vhd_format)
return eph_vhd_path
@check_admin_permissions
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None):
"""Create a new VM and start it."""
LOG.info(_("Spawning new instance"), instance=instance)
instance_name = instance['name']
if self._vmutils.vm_exists(instance_name):
raise exception.InstanceExists(name=instance_name)
# Make sure we're starting with a clean slate.
self._delete_disk_files(instance_name)
if self._volumeops.ebs_root_in_block_devices(block_device_info):
root_vhd_path = None
else:
root_vhd_path = self._create_root_vhd(context, instance)
eph_vhd_path = self.create_ephemeral_vhd(instance)
try:
self.create_instance(instance, network_info, block_device_info,
root_vhd_path, eph_vhd_path)
if configdrive.required_by(instance):
self._create_config_drive(instance, injected_files,
admin_password)
self.power_on(instance)
except Exception as ex:
LOG.exception(ex)
self.destroy(instance)
raise vmutils.HyperVException(_('Spawn instance failed'))
def create_instance(self, instance, network_info, block_device_info,
root_vhd_path, eph_vhd_path):
instance_name = instance['name']
self._vmutils.create_vm(instance_name,
instance['memory_mb'],
instance['vcpus'],
CONF.hyperv.limit_cpu_features,
CONF.hyperv.dynamic_memory_ratio)
ctrl_disk_addr = 0
if root_vhd_path:
self._vmutils.attach_ide_drive(instance_name,
root_vhd_path,
0,
ctrl_disk_addr,
constants.IDE_DISK)
ctrl_disk_addr += 1
if eph_vhd_path:
self._vmutils.attach_ide_drive(instance_name,
eph_vhd_path,
0,
ctrl_disk_addr,
constants.IDE_DISK)
self._vmutils.create_scsi_controller(instance_name)
self._volumeops.attach_volumes(block_device_info,
instance_name,
root_vhd_path is None)
for vif in network_info:
LOG.debug(_('Creating nic for instance: %s'), instance_name)
self._vmutils.create_nic(instance_name,
vif['id'],
vif['address'])
self._vif_driver.plug(instance, vif)
if CONF.hyperv.enable_instance_metrics_collection:
self._vmutils.enable_vm_metrics_collection(instance_name)
def _create_config_drive(self, instance, injected_files, admin_password):
if CONF.config_drive_format != 'iso9660':
vmutils.HyperVException(_('Invalid config_drive_format "%s"') %
CONF.config_drive_format)
LOG.info(_('Using config drive for instance: %s'), instance=instance)
extra_md = {}
if admin_password and CONF.hyperv.config_drive_inject_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md)
instance_path = self._pathutils.get_instance_dir(
instance['name'])
configdrive_path_iso = os.path.join(instance_path, 'configdrive.iso')
LOG.info(_('Creating config drive at %(path)s'),
{'path': configdrive_path_iso}, instance=instance)
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
try:
cdb.make_drive(configdrive_path_iso)
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_('Creating config drive failed with error: %s'),
e, instance=instance)
if not CONF.hyperv.config_drive_cdrom:
drive_type = constants.IDE_DISK
configdrive_path = os.path.join(instance_path,
'configdrive.vhd')
utils.execute(CONF.hyperv.qemu_img_cmd,
'convert',
'-f',
'raw',
'-O',
'vpc',
configdrive_path_iso,
configdrive_path,
attempts=1)
self._pathutils.remove(configdrive_path_iso)
else:
drive_type = constants.IDE_DVD
configdrive_path = configdrive_path_iso
self._vmutils.attach_ide_drive(instance['name'], configdrive_path,
1, 0, drive_type)
def _disconnect_volumes(self, volume_drives):
for volume_drive in volume_drives:
self._volumeops.disconnect_volume(volume_drive)
def _delete_disk_files(self, instance_name):
self._pathutils.get_instance_dir(instance_name,
create_dir=False,
remove_dir=True)
def destroy(self, instance, network_info=None, block_device_info=None,
destroy_disks=True):
instance_name = instance['name']
LOG.info(_("Got request to destroy instance: %s"), instance_name)
try:
if self._vmutils.vm_exists(instance_name):
#Stop the VM first.
self.power_off(instance)
storage = self._vmutils.get_vm_storage_paths(instance_name)
(disk_files, volume_drives) = storage
self._vmutils.destroy_vm(instance_name)
self._disconnect_volumes(volume_drives)
else:
LOG.debug(_("Instance not found: %s"), instance_name)
if destroy_disks:
self._delete_disk_files(instance_name)
except Exception as ex:
LOG.exception(ex)
raise vmutils.HyperVException(_('Failed to destroy instance: %s') %
instance_name)
def reboot(self, instance, network_info, reboot_type):
"""Reboot the specified instance."""
LOG.debug(_("reboot instance"), instance=instance)
self._set_vm_state(instance['name'],
constants.HYPERV_VM_STATE_REBOOT)
def pause(self, instance):
"""Pause VM instance."""
LOG.debug(_("Pause instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_PAUSED)
def unpause(self, instance):
"""Unpause paused VM instance."""
LOG.debug(_("Unpause instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def suspend(self, instance):
"""Suspend the specified instance."""
LOG.debug(_("Suspend instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_SUSPENDED)
def resume(self, instance):
"""Resume the suspended VM instance."""
LOG.debug(_("Resume instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def power_off(self, instance):
"""Power off the specified instance."""
LOG.debug(_("Power off instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_DISABLED)
def power_on(self, instance):
"""Power on the specified instance."""
LOG.debug(_("Power on instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def _set_vm_state(self, vm_name, req_state):
try:
self._vmutils.set_vm_state(vm_name, req_state)
LOG.debug(_("Successfully changed state of VM %(vm_name)s"
" to: %(req_state)s"),
{'vm_name': vm_name, 'req_state': req_state})
except Exception as ex:
LOG.exception(ex)
msg = (_("Failed to change vm state of %(vm_name)s"
" to %(req_state)s") %
{'vm_name': vm_name, 'req_state': req_state})
raise vmutils.HyperVException(msg)
|
from foam.sfa.util.xrn import urn_to_hrn
from foam.sfa.trust.credential import Credential
from foam.sfa.trust.auth import Auth
class Start:
def __init__(self, xrn, creds, **kwargs):
hrn, type = urn_to_hrn(xrn)
valid_creds = Auth().checkCredentials(creds, 'startslice', hrn)
origin_hrn = Credential(string=valid_creds[0]).get_gid_caller().get_hrn()
return
|
from datetime import datetime
class AliveSquidsCSV(object):
##
# Write a line to text file.
# @param self The Object Pointer.
# @param record Record (text)
#
def write_record(self, file, record):
try:
csv_file = open(file, "a")
csv_file.write(record)
csv_file.close
except:
print("CSV: Failed to write CSV File")
def write_alive_squids_csv(self, context, basename="ikabattle_log", debug=False):
csv = ["tick,y\n", "tick,y\n"]
for sample in context['game']['livesTrack']:
if debug:
print('lives sample = %s', sample)
time = sample[0]
del sample[0]
num_team = 0
for team in sample:
num_squid = 0
for alive in team:
num_squid = num_squid + 1
if alive:
csv[num_team] = "%s%d, %d\n" % (
csv[num_team], time, num_squid)
num_team = num_team + 1
num_team = 0
t = datetime.now()
t_str = t.strftime("%Y%m%d_%H%M")
for f in csv:
self.write_record('%s/%s_team%d.csv' %
(self.dest_dir, basename, num_team), f)
num_team = num_team + 1
def write_flags_csv(self, context, basename="ikabattle_log", debug=False):
# γγΌγΏγγͺγε ΄εγ―ζΈγγͺγ
if len(context['game']['towerTrack']) == 0:
return
csv = "tick,pos,max,min\n"
for sample in context['game']['towerTrack']:
if debug:
print('tower sample = %s', sample)
time = sample[0]
sample = sample[1]
csv = "%s%d, %d, %d, %d\n" % (
csv, time, sample['pos'], sample['max'], sample['min'])
self.write_record('%s/%s_tower.csv' % (self.dest_dir, basename), csv)
##
# on_game_individual_result Hook
# @param self The Object Pointer
# @param context IkaLog context
#
def on_game_individual_result(self, context):
t = datetime.now()
basename = t.strftime("ikabattle_log_%Y%m%d_%H%M")
self.write_alive_squids_csv(context, basename=basename, debug=self.debug)
self.write_flags_csv(context, basename=basename, debug=self.debug)
##
# Constructor
# @param self The Object Pointer.
# @param dest_dir Destionation directory (Relative path, or absolute path)
def __init__(self, dir='./log/', debug=False):
self.dest_dir = dir
self.debug = debug
|
import os
import sys
import argparse
import subprocess
import random
from os.path import join as pjoin
DIMENSIONS = '150x150' # Dimensions of the resized image (<width>x<height>)
GEOMETRY = '+4+4' # How to arrange images (+<rows>+<columns>)
TO_CREATE_DIRS = ['resized/', 'final/']
def setup(output_path):
"""
Create missing directories.
"""
for directory in TO_CREATE_DIRS:
final_path = pjoin(output_path, directory)
if not os.path.exists(final_path):
os.makedirs(final_path)
def get_logo_files(input_path):
logo_files = os.listdir(input_path)
logo_files = [name for name in logo_files if
'resized' not in name and name.endswith('png')]
logo_files = [pjoin(input_path, name) for name in logo_files]
return logo_files
def resize_images(logo_files, output_path):
resized_images = []
for logo_file in logo_files:
name, ext = os.path.splitext(os.path.basename(logo_file))
new_name = '%s%s' % (name, ext)
out_name = pjoin(output_path, 'resized/', new_name)
print('Resizing image: %(name)s' % {'name': logo_file})
values = {'name': logo_file, 'out_name': out_name,
'dimensions': DIMENSIONS}
cmd = 'convert %(name)s -resize %(dimensions)s %(out_name)s'
cmd = cmd % values
subprocess.call(cmd, shell=True)
resized_images.append(out_name)
return resized_images
def assemble_final_image(resized_images, output_path):
final_name = pjoin(output_path, 'final/logos.png')
random.shuffle(resized_images)
values = {'images': ' '.join(resized_images), 'geometry': GEOMETRY,
'out_name': final_name}
cmd = 'montage %(images)s -geometry %(geometry)s %(out_name)s'
cmd = cmd % values
print('Generating final image: %(name)s' % {'name': final_name})
subprocess.call(cmd, shell=True)
def main(input_path, output_path):
if not os.path.exists(input_path):
print('Path doesn\'t exist: %s' % (input_path))
sys.exit(2)
if not os.path.exists(output_path):
print('Path doesn\'t exist: %s' % (output_path))
sys.exit(2)
logo_files = get_logo_files(input_path=input_path)
setup(output_path=output_path)
resized_images = resize_images(logo_files=logo_files,
output_path=output_path)
assemble_final_image(resized_images=resized_images,
output_path=output_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Assemble provider logos '
' in a single image')
parser.add_argument('--input-path', action='store',
help='Path to directory which contains provider '
'logo files')
parser.add_argument('--output-path', action='store',
help='Path where the new files will be written')
args = parser.parse_args()
input_path = os.path.abspath(args.input_path)
output_path = os.path.abspath(args.output_path)
main(input_path=input_path, output_path=output_path)
|
ο»Ώ# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
from clr import AddReference
AddReference("System.Core")
AddReference("QuantConnect.Common")
AddReference("QuantConnect.Algorithm")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import QCAlgorithm
from QuantConnect.Data.UniverseSelection import *
class CoarseFundamentalTop3Algorithm(QCAlgorithm):
def Initialize(self):
'''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
self.SetStartDate(2014,3,24) #Set Start Date
self.SetEndDate(2014,4,7) #Set End Date
self.SetCash(50000) #Set Strategy Cash
# what resolution should the data *added* to the universe be?
self.UniverseSettings.Resolution = Resolution.Daily
# this add universe method accepts a single parameter that is a function that
# accepts an IEnumerable<CoarseFundamental> and returns IEnumerable<Symbol>
self.AddUniverse(self.CoarseSelectionFunction)
self.__numberOfSymbols = 3
self._changes = None
# sort the data by daily dollar volume and take the top 'NumberOfSymbols'
def CoarseSelectionFunction(self, coarse):
# sort descending by daily dollar volume
sortedByDollarVolume = sorted(coarse, key=lambda x: x.DollarVolume, reverse=True)
# return the symbol objects of the top entries from our sorted collection
return [ x.Symbol for x in sortedByDollarVolume[:self.__numberOfSymbols] ]
def OnData(self, data):
self.Log(f"OnData({self.UtcTime}): Keys: {', '.join([key.Value for key in data.Keys])}")
# if we have no changes, do nothing
if self._changes is None: return
# liquidate removed securities
for security in self._changes.RemovedSecurities:
if security.Invested:
self.Liquidate(security.Symbol)
# we want 1/N allocation in each security in our universe
for security in self._changes.AddedSecurities:
self.SetHoldings(security.Symbol, 1 / self.__numberOfSymbols)
self._changes = None
# this event fires whenever we have changes to our universe
def OnSecuritiesChanged(self, changes):
self._changes = changes
self.Log(f"OnSecuritiesChanged({self.UtcTime}):: {changes}")
def OnOrderEvent(self, fill):
self.Log(f"OnOrderEvent({self.UtcTime}):: {fill}")
|
import json
import os
import re
import cherrypy
import mako
from girder import constants
from girder.models.setting import Setting
from girder.settings import SettingKey
from girder.utility import config
class WebrootBase:
"""
Serves a template file in response to GET requests.
This will typically be the base class of any non-API endpoints.
"""
exposed = True
def __init__(self, templatePath):
self.vars = {}
self.config = config.getConfig()
self._templateDirs = []
self.setTemplatePath(templatePath)
def updateHtmlVars(self, vars):
"""
If any of the variables in the index html need to change, call this
with the updated set of variables to render the template with.
"""
self.vars.update(vars)
def setTemplatePath(self, templatePath):
"""
Set the path to a template file to render instead of the default template.
The default template remains available so that custom templates can
inherit from it. To do so, save the default template filename from
the templateFilename attribute before calling this function, pass
it as a variable to the custom template using updateHtmlVars(), and
reference that variable in an <%inherit> directive like:
<%inherit file="${context.get('defaultTemplateFilename')}"/>
"""
templateDir, templateFilename = os.path.split(templatePath)
self._templateDirs.append(templateDir)
self.templateFilename = templateFilename
# Reset TemplateLookup instance so that it will be instantiated lazily,
# with the latest template directories, on the next GET request
self._templateLookup = None
@staticmethod
def _escapeJavascript(string):
# Per the advice at:
# https://www.owasp.org/index.php/XSS_(Cross_Site_Scripting)_Prevention_Cheat_Sheet#Output_Encoding_Rules_Summary
# replace all non-alphanumeric characters with "\0uXXXX" unicode escaping:
# https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar#Unicode_escape_sequences
return re.sub(
r'[^a-zA-Z0-9]',
lambda match: '\\u%04X' % ord(match.group()),
string
)
def _renderHTML(self):
if self._templateLookup is None:
self._templateLookup = mako.lookup.TemplateLookup(directories=self._templateDirs)
template = self._templateLookup.get_template(self.templateFilename)
return template.render(js=self._escapeJavascript, json=json.dumps, **self.vars)
def GET(self, **params):
return self._renderHTML()
def DELETE(self, **params):
raise cherrypy.HTTPError(405)
def PATCH(self, **params):
raise cherrypy.HTTPError(405)
def POST(self, **params):
raise cherrypy.HTTPError(405)
def PUT(self, **params):
raise cherrypy.HTTPError(405)
class Webroot(WebrootBase):
"""
The webroot endpoint simply serves the main index HTML file.
"""
def __init__(self, templatePath=None):
if not templatePath:
templatePath = os.path.join(constants.PACKAGE_DIR, 'utility', 'webroot.mako')
super().__init__(templatePath)
self.vars = {}
def _renderHTML(self):
from girder.utility import server
from girder.plugin import loadedPlugins
self.vars['plugins'] = loadedPlugins()
self.vars['pluginCss'] = []
self.vars['pluginJs'] = []
builtDir = os.path.join(constants.STATIC_ROOT_DIR, 'built', 'plugins')
for plugin in self.vars['plugins']:
if os.path.exists(os.path.join(builtDir, plugin, 'plugin.min.css')):
self.vars['pluginCss'].append(plugin)
if os.path.exists(os.path.join(builtDir, plugin, 'plugin.min.js')):
self.vars['pluginJs'].append(plugin)
self.vars['apiRoot'] = server.getApiRoot()
self.vars['staticPublicPath'] = server.getStaticPublicPath()
self.vars['brandName'] = Setting().get(SettingKey.BRAND_NAME)
self.vars['contactEmail'] = Setting().get(SettingKey.CONTACT_EMAIL_ADDRESS)
self.vars['privacyNoticeHref'] = Setting().get(SettingKey.PRIVACY_NOTICE)
self.vars['bannerColor'] = Setting().get(SettingKey.BANNER_COLOR)
self.vars['registrationPolicy'] = Setting().get(SettingKey.REGISTRATION_POLICY)
self.vars['enablePasswordLogin'] = Setting().get(SettingKey.ENABLE_PASSWORD_LOGIN)
return super()._renderHTML()
|
from sahara.service.edp import base_engine
from sahara.utils import edp
class FakeJobEngine(base_engine.JobEngine):
def cancel_job(self, job_execution):
pass
def get_job_status(self, job_execution):
pass
def run_job(self, job_execution):
return 'engine_job_id', edp.JOB_STATUS_SUCCEEDED, None
def run_scheduled_job(self, job_execution):
pass
def validate_job_execution(self, cluster, job, data):
pass
@staticmethod
def get_possible_job_config(job_type):
return None
@staticmethod
def get_supported_job_types():
return edp.JOB_TYPES_ALL
|
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import properties
from heat.engine.resources.openstack.neutron import neutron
from heat.engine import support
class Net(neutron.NeutronResource):
PROPERTIES = (
NAME, VALUE_SPECS, ADMIN_STATE_UP, TENANT_ID, SHARED,
DHCP_AGENT_IDS, PORT_SECURITY_ENABLED,
) = (
'name', 'value_specs', 'admin_state_up', 'tenant_id', 'shared',
'dhcp_agent_ids', 'port_security_enabled',
)
ATTRIBUTES = (
STATUS, NAME_ATTR, SUBNETS, ADMIN_STATE_UP_ATTR, TENANT_ID_ATTR,
PORT_SECURITY_ENABLED_ATTR, MTU_ATTR,
) = (
"status", "name", "subnets", "admin_state_up", "tenant_id",
"port_security_enabled", "mtu",
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('A string specifying a symbolic name for the network, which is '
'not required to be unique.'),
update_allowed=True
),
VALUE_SPECS: properties.Schema(
properties.Schema.MAP,
_('Extra parameters to include in the "network" object in the '
'creation request. Parameters are often specific to installed '
'hardware or extensions.'),
default={},
update_allowed=True
),
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('A boolean value specifying the administrative status of the '
'network.'),
default=True,
update_allowed=True
),
TENANT_ID: properties.Schema(
properties.Schema.STRING,
_('The ID of the tenant which will own the network. Only '
'administrative users can set the tenant identifier; this '
'cannot be changed using authorization policies.')
),
SHARED: properties.Schema(
properties.Schema.BOOLEAN,
_('Whether this network should be shared across all tenants. '
'Note that the default policy setting restricts usage of this '
'attribute to administrative users only.'),
default=False,
update_allowed=True
),
DHCP_AGENT_IDS: properties.Schema(
properties.Schema.LIST,
_('The IDs of the DHCP agent to schedule the network. Note that '
'the default policy setting in Neutron restricts usage of this '
'property to administrative users only.'),
update_allowed=True
),
PORT_SECURITY_ENABLED: properties.Schema(
properties.Schema.BOOLEAN,
_('Flag to enable/disable port security on the network. It '
'provides the default value for the attribute of the ports '
'created on this network'),
update_allowed=True,
support_status=support.SupportStatus(version='5.0.0')
),
}
attributes_schema = {
STATUS: attributes.Schema(
_("The status of the network."),
type=attributes.Schema.STRING
),
NAME_ATTR: attributes.Schema(
_("The name of the network."),
type=attributes.Schema.STRING
),
SUBNETS: attributes.Schema(
_("Subnets of this network."),
type=attributes.Schema.LIST
),
ADMIN_STATE_UP_ATTR: attributes.Schema(
_("The administrative status of the network."),
type=attributes.Schema.STRING
),
TENANT_ID_ATTR: attributes.Schema(
_("The tenant owning this network."),
type=attributes.Schema.STRING
),
PORT_SECURITY_ENABLED_ATTR: attributes.Schema(
_("Port security enabled of the network."),
support_status=support.SupportStatus(version='5.0.0'),
type=attributes.Schema.BOOLEAN
),
MTU_ATTR: attributes.Schema(
_("The maximum transmission unit size(in bytes) for the network."),
support_status=support.SupportStatus(version='5.0.0'),
type=attributes.Schema.INTEGER
),
}
def handle_create(self):
props = self.prepare_properties(
self.properties,
self.physical_resource_name())
dhcp_agent_ids = props.pop(self.DHCP_AGENT_IDS, None)
net = self.neutron().create_network({'network': props})['network']
self.resource_id_set(net['id'])
if dhcp_agent_ids:
self._replace_dhcp_agents(dhcp_agent_ids)
def _show_resource(self):
return self.neutron().show_network(
self.resource_id)['network']
def check_create_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def handle_delete(self):
client = self.neutron()
try:
client.delete_network(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
else:
return True
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
props = self.prepare_update_properties(json_snippet)
dhcp_agent_ids = props.pop(self.DHCP_AGENT_IDS, None)
if self.DHCP_AGENT_IDS in prop_diff:
if dhcp_agent_ids is not None:
self._replace_dhcp_agents(dhcp_agent_ids)
del prop_diff[self.DHCP_AGENT_IDS]
if len(prop_diff) > 0:
self.neutron().update_network(
self.resource_id, {'network': props})
def check_update_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def _replace_dhcp_agents(self, dhcp_agent_ids):
ret = self.neutron().list_dhcp_agent_hosting_networks(
self.resource_id)
old = set([agent['id'] for agent in ret['agents']])
new = set(dhcp_agent_ids)
for dhcp_agent_id in new - old:
try:
self.neutron().add_network_to_dhcp_agent(
dhcp_agent_id, {'network_id': self.resource_id})
except Exception as ex:
# if 409 is happened, the agent is already associated.
if not self.client_plugin().is_conflict(ex):
raise
for dhcp_agent_id in old - new:
try:
self.neutron().remove_network_from_dhcp_agent(
dhcp_agent_id, self.resource_id)
except Exception as ex:
# assume 2 patterns about status_code following:
# 404: the network or agent is already gone
# 409: the network isn't scheduled by the dhcp_agent
if not (self.client_plugin().is_conflict(ex) or
self.client_plugin().is_not_found(ex)):
raise
def resource_mapping():
return {
'OS::Neutron::Net': Net,
}
|
input = """
% Guess colours.
chosenColour(N,C) | notChosenColour(N,C) :- node(N), colour(C).
% At least one color per node.
:- #count{ C : chosenColour(X,C) } > 1, node(X).
:- #count{ C : chosenColour(X,C) } < 1, node(X).
% No two adjacent nodes have the same colour.
:- link(X,Y), X<Y, chosenColour(X,C), chosenColour(Y,C).
node(1).
node(2).
node(3).
node(4).
node(5).
link(1,2).
link(2,1).
link(1,3).
link(3,1).
link(2,3).
link(3,2).
link(3,5).
link(5,3).
link(4,5).
link(5,4).
colour(red0).
colour(green0).
colour(blue0).
"""
output = """
{chosenColour(1,blue0), chosenColour(2,green0), chosenColour(3,red0), chosenColour(4,blue0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,red0)}
{chosenColour(1,blue0), chosenColour(2,green0), chosenColour(3,red0), chosenColour(4,green0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,blue0), chosenColour(2,green0), chosenColour(3,red0), chosenColour(4,red0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,blue0), chosenColour(2,green0), chosenColour(3,red0), chosenColour(4,red0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,blue0), notChosenColour(5,red0)}
{chosenColour(1,blue0), chosenColour(2,red0), chosenColour(3,green0), chosenColour(4,blue0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,blue0), chosenColour(2,red0), chosenColour(3,green0), chosenColour(4,green0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,blue0), chosenColour(2,red0), chosenColour(3,green0), chosenColour(4,green0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,blue0), chosenColour(2,red0), chosenColour(3,green0), chosenColour(4,red0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,green0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,green0), chosenColour(2,blue0), chosenColour(3,red0), chosenColour(4,blue0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,red0)}
{chosenColour(1,green0), chosenColour(2,blue0), chosenColour(3,red0), chosenColour(4,green0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,green0), chosenColour(2,blue0), chosenColour(3,red0), chosenColour(4,red0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,green0), chosenColour(2,blue0), chosenColour(3,red0), chosenColour(4,red0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,green0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,blue0), notChosenColour(5,red0)}
{chosenColour(1,green0), chosenColour(2,red0), chosenColour(3,blue0), chosenColour(4,blue0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,red0)}
{chosenColour(1,green0), chosenColour(2,red0), chosenColour(3,blue0), chosenColour(4,blue0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,green0), chosenColour(2,red0), chosenColour(3,blue0), chosenColour(4,green0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,green0), chosenColour(2,red0), chosenColour(3,blue0), chosenColour(4,red0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,red0), notChosenColour(2,blue0), notChosenColour(2,green0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,blue0), notChosenColour(5,red0)}
{chosenColour(1,red0), chosenColour(2,blue0), chosenColour(3,green0), chosenColour(4,blue0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,red0), chosenColour(2,blue0), chosenColour(3,green0), chosenColour(4,green0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,red0), chosenColour(2,blue0), chosenColour(3,green0), chosenColour(4,green0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,red0), chosenColour(2,blue0), chosenColour(3,green0), chosenColour(4,red0), chosenColour(5,blue0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,green0), notChosenColour(2,red0), notChosenColour(3,blue0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,green0), notChosenColour(5,red0)}
{chosenColour(1,red0), chosenColour(2,green0), chosenColour(3,blue0), chosenColour(4,blue0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,red0)}
{chosenColour(1,red0), chosenColour(2,green0), chosenColour(3,blue0), chosenColour(4,blue0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,green0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,red0), chosenColour(2,green0), chosenColour(3,blue0), chosenColour(4,green0), chosenColour(5,red0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,red0), notChosenColour(5,blue0), notChosenColour(5,green0)}
{chosenColour(1,red0), chosenColour(2,green0), chosenColour(3,blue0), chosenColour(4,red0), chosenColour(5,green0), colour(blue0), colour(green0), colour(red0), link(1,2), link(1,3), link(2,1), link(2,3), link(3,1), link(3,2), link(3,5), link(4,5), link(5,3), link(5,4), node(1), node(2), node(3), node(4), node(5), notChosenColour(1,blue0), notChosenColour(1,green0), notChosenColour(2,blue0), notChosenColour(2,red0), notChosenColour(3,green0), notChosenColour(3,red0), notChosenColour(4,blue0), notChosenColour(4,green0), notChosenColour(5,blue0), notChosenColour(5,red0)}
"""
|
"""
Unit tests for :py:obj:`OpenSSL.rand`.
"""
from unittest import main
import os
import stat
from OpenSSL.test.util import TestCase, b
from OpenSSL import rand
class RandTests(TestCase):
def test_bytes_wrong_args(self):
"""
:py:obj:`OpenSSL.rand.bytes` raises :py:obj:`TypeError` if called with the wrong
number of arguments or with a non-:py:obj:`int` argument.
"""
self.assertRaises(TypeError, rand.bytes)
self.assertRaises(TypeError, rand.bytes, None)
self.assertRaises(TypeError, rand.bytes, 3, None)
# XXX Test failure of the malloc() in rand_bytes.
def test_bytes(self):
"""
Verify that we can obtain bytes from rand_bytes() and
that they are different each time. Test the parameter
of rand_bytes() for bad values.
"""
b1 = rand.bytes(50)
self.assertEqual(len(b1), 50)
b2 = rand.bytes(num_bytes=50) # parameter by name
self.assertNotEqual(b1, b2) # Hip, Hip, Horay! FIPS complaince
b3 = rand.bytes(num_bytes=0)
self.assertEqual(len(b3), 0)
exc = self.assertRaises(ValueError, rand.bytes, -1)
self.assertEqual(str(exc), "num_bytes must not be negative")
def test_add_wrong_args(self):
"""
When called with the wrong number of arguments, or with arguments not of
type :py:obj:`str` and :py:obj:`int`, :py:obj:`OpenSSL.rand.add` raises :py:obj:`TypeError`.
"""
self.assertRaises(TypeError, rand.add)
self.assertRaises(TypeError, rand.add, b("foo"), None)
self.assertRaises(TypeError, rand.add, None, 3)
self.assertRaises(TypeError, rand.add, b("foo"), 3, None)
def test_add(self):
"""
:py:obj:`OpenSSL.rand.add` adds entropy to the PRNG.
"""
rand.add(b('hamburger'), 3)
def test_seed_wrong_args(self):
"""
When called with the wrong number of arguments, or with a non-:py:obj:`str`
argument, :py:obj:`OpenSSL.rand.seed` raises :py:obj:`TypeError`.
"""
self.assertRaises(TypeError, rand.seed)
self.assertRaises(TypeError, rand.seed, None)
self.assertRaises(TypeError, rand.seed, b("foo"), None)
def test_seed(self):
"""
:py:obj:`OpenSSL.rand.seed` adds entropy to the PRNG.
"""
rand.seed(b('milk shake'))
def test_status_wrong_args(self):
"""
:py:obj:`OpenSSL.rand.status` raises :py:obj:`TypeError` when called with any
arguments.
"""
self.assertRaises(TypeError, rand.status, None)
def test_status(self):
"""
:py:obj:`OpenSSL.rand.status` returns :py:obj:`True` if the PRNG has sufficient
entropy, :py:obj:`False` otherwise.
"""
# It's hard to know what it is actually going to return. Different
# OpenSSL random engines decide differently whether they have enough
# entropy or not.
self.assertTrue(rand.status() in (1, 2))
def test_egd_wrong_args(self):
"""
:py:obj:`OpenSSL.rand.egd` raises :py:obj:`TypeError` when called with the wrong
number of arguments or with arguments not of type :py:obj:`str` and :py:obj:`int`.
"""
self.assertRaises(TypeError, rand.egd)
self.assertRaises(TypeError, rand.egd, None)
self.assertRaises(TypeError, rand.egd, "foo", None)
self.assertRaises(TypeError, rand.egd, None, 3)
self.assertRaises(TypeError, rand.egd, "foo", 3, None)
def test_egd_missing(self):
"""
:py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the
EGD socket passed to it does not exist.
"""
result = rand.egd(self.mktemp())
expected = (-1, 0)
self.assertTrue(
result in expected,
"%r not in %r" % (result, expected))
def test_cleanup_wrong_args(self):
"""
:py:obj:`OpenSSL.rand.cleanup` raises :py:obj:`TypeError` when called with any
arguments.
"""
self.assertRaises(TypeError, rand.cleanup, None)
def test_cleanup(self):
"""
:py:obj:`OpenSSL.rand.cleanup` releases the memory used by the PRNG and returns
:py:obj:`None`.
"""
self.assertIdentical(rand.cleanup(), None)
def test_load_file_wrong_args(self):
"""
:py:obj:`OpenSSL.rand.load_file` raises :py:obj:`TypeError` when called the wrong
number of arguments or arguments not of type :py:obj:`str` and :py:obj:`int`.
"""
self.assertRaises(TypeError, rand.load_file)
self.assertRaises(TypeError, rand.load_file, "foo", None)
self.assertRaises(TypeError, rand.load_file, None, 1)
self.assertRaises(TypeError, rand.load_file, "foo", 1, None)
def test_write_file_wrong_args(self):
"""
:py:obj:`OpenSSL.rand.write_file` raises :py:obj:`TypeError` when called with the
wrong number of arguments or a non-:py:obj:`str` argument.
"""
self.assertRaises(TypeError, rand.write_file)
self.assertRaises(TypeError, rand.write_file, None)
self.assertRaises(TypeError, rand.write_file, "foo", None)
def test_files(self):
"""
Test reading and writing of files via rand functions.
"""
# Write random bytes to a file
tmpfile = self.mktemp()
# Make sure it exists (so cleanup definitely succeeds)
fObj = open(tmpfile, 'w')
fObj.close()
try:
rand.write_file(tmpfile)
# Verify length of written file
size = os.stat(tmpfile)[stat.ST_SIZE]
self.assertEquals(size, 1024)
# Read random bytes from file
rand.load_file(tmpfile)
rand.load_file(tmpfile, 4) # specify a length
finally:
# Cleanup
os.unlink(tmpfile)
if __name__ == '__main__':
main()
|
from binascii import hexlify
import mock
import socket
import unittest
from networking_cisco.plugins.cisco.cpnr.cpnr_client import UnexpectedError
from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import (
DnsRelayAgent)
from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import cfg
from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import DnsPacket
from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import OPTS
class TestDnsRelayAgent(unittest.TestCase):
@mock.patch('networking_cisco.plugins.cisco.'
'cpnr.cpnr_dns_relay_agent.netns')
@mock.patch('socket.socket')
def test_open_dns_ext_socket(self,
mock_socket,
mock_netns):
cfg.CONF.register_opts(OPTS, 'cisco_pnr')
relay = DnsRelayAgent()
mock_netns.iflist.return_value = []
mock_netns.iflist.return_value.append(('lo', '127.0.0.1', '255.0.0.0'))
sock = mock_socket.return_value
sock.getsockname.return_value = ('127.0.0.1', 123456)
sock, addr, port = relay._open_dns_ext_socket()
mock_socket.assert_has_calls([
mock.call(socket.AF_INET, socket.SOCK_DGRAM),
mock.call().bind(('127.0.0.1', 0)),
mock.call().getsockname(),
mock.call().connect(('127.0.0.1', 53))]
)
# check exception thrown if no interfaces
with self.assertRaises(UnexpectedError):
mock_netns.iflist.return_value = []
sock, addr, port = relay._open_dns_ext_socket()
# check exception thrown if no matching interfaces
with self.assertRaises(UnexpectedError):
mock_netns.iflist.return_value = []
mock_netns.iflist.return_value.append(('eth0', '10.0.0.10',
'255.255.255.0'))
sock, addr, port = relay._open_dns_ext_socket()
# check matching interface found if not first in list
mock_netns.iflist.return_value = []
mock_netns.iflist.return_value.append(('eth0', '10.0.0.10',
'255.255.255.0'))
mock_netns.iflist.return_value.append(('lo', '127.0.0.1', '255.0.0.0'))
sock, addr, port = relay._open_dns_ext_socket()
@mock.patch('networking_cisco.plugins.cisco.'
'cpnr.cpnr_dns_relay_agent.netns')
@mock.patch('socket.socket')
def test_open_dns_int_socket(self,
mock_socket,
mock_netns):
cfg.CONF.register_opts(OPTS, 'cisco_pnr')
relay = DnsRelayAgent()
mock_netns.iflist.return_value = []
mock_netns.iflist.return_value.append(('eth0', '10.21.1.13',
'255.255.255.0'))
sock, addr, port = relay._open_dns_int_socket()
self.assertTrue(mock_netns.iflist.called, "Failed to call iflist.")
mock_socket.assert_has_calls([
mock.call(socket.AF_INET, socket.SOCK_DGRAM),
mock.call().setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1),
mock.call().bind(('10.21.1.13', 53))]
)
# check exception thrown if no interfaces
with self.assertRaises(UnexpectedError):
mock_netns.iflist.return_value = []
sock, addr, port = relay._open_dns_int_socket()
def test_convert_namespace_to_viewid(self):
cfg.CONF.register_opts(OPTS, 'cisco_pnr')
relay = DnsRelayAgent()
namespace = 'qdhcp-d7c31f74-5d9e-47b7-86f2-64879023c04d'
viewid = relay._convert_namespace_to_viewid(namespace)
tmp = 0x64879023c04d & 0x7fffffff
self.assertEqual(viewid, str(tmp))
class TestDnsPacket(unittest.TestCase):
def test_parse(self):
# test regular DNS request
line = ('84 a5 01 00 00 01 00 00 00 00 00 00 06 72 '
'65 64 68 61 74 03 63 6f 6d 00 00 01 00 01')
buf = bytearray.fromhex(line)
pkt = DnsPacket.parse(buf, 28)
self.assertEqual(0x84a5, pkt.get_msgid())
self.assertTrue(pkt.isreq)
self.assertEqual(0, pkt.arcnt)
self.assertEqual(0, pkt.optlen)
self.assertEqual(28, pkt.txt_insert_pos)
# test DNS request with EDNS0
line = ('81 71 01 20 00 01 00 00 00 00 00 01 06 72 65 '
'64 68 61 74 03 63 6f 6d 00 00 01 00 01 00 00 '
'29 10 00 00 00 00 00 00 00')
buf = bytearray.fromhex(line)
pkt = DnsPacket.parse(buf, 38)
self.assertEqual(0x8171, pkt.get_msgid())
self.assertTrue(pkt.isreq)
self.assertEqual(1, pkt.arcnt)
self.assertEqual(10, pkt.optlen)
self.assertEqual(28, pkt.txt_insert_pos)
# test regular DNS response
line = ('b6 5e 81 80 00 01 00 01 00 00 00 00 06 72 65 '
'64 68 61 74 03 63 6f 6d 00 00 01 00 01 c0 0c '
'00 01 00 01 00 00 00 08 00 04 d1 84 b7 69')
buf = bytearray.fromhex(line)
pkt = DnsPacket.parse(buf, 44)
self.assertEqual(0xb65e, pkt.get_msgid())
self.assertFalse(pkt.isreq)
self.assertEqual(0, pkt.arcnt)
self.assertEqual(0, pkt.optlen)
self.assertEqual(-1, pkt.txt_insert_pos)
def test_set_viewid(self):
pkt = DnsPacket()
pkt.set_viewid('123456789')
self.assertEqual(pkt.viewid, '123456789')
def test_data(self):
# call with regular DNS request
line = ('84 a5 01 00 00 01 00 00 00 00 00 00 06 72 '
'65 64 68 61 74 03 63 6f 6d 00 00 01 00 01')
buf = bytearray.fromhex(line)
pktbuf = bytearray(4096)
pktbuf[0:len(buf)] = buf
pkt = DnsPacket.parse(pktbuf, 28)
pkt.set_viewid('123456')
mod_buf = pkt.data()
self.assertEqual(pkt.arcnt, 1)
hextxtstr = hexlify(DnsPacket.TXT_RR)
hexstr = hexlify(mod_buf)
self.assertNotEqual(-1, hexstr.find(hextxtstr))
# call with DNS request with EDNS0
line = ('81 71 01 20 00 01 00 00 00 00 00 01 06 72 65 '
'64 68 61 74 03 63 6f 6d 00 00 01 00 01 00 00 '
'29 10 00 00 00 00 00 00 00')
buf = bytearray.fromhex(line)
pktbuf = bytearray(4096)
pktbuf[0:len(buf)] = buf
pkt = DnsPacket.parse(pktbuf, 38)
pkt.set_viewid('123456')
mod_buf = pkt.data()
self.assertEqual(2, pkt.arcnt)
hexstr = hexlify(mod_buf)
self.assertNotEqual(-1, hexstr.find(hextxtstr))
def test_skip_over_domain_name(self):
# test skip over name at beginning, end up on ^
# 4test5cisco3com0^
bytes = bytearray(b'\x04\x74\x65\x73\x74\x05\x63\x69\x73\x63'
b'\x6f\x03\x63\x6f\x6d\x00\x5e')
pos = DnsPacket.skip_over_domain_name(bytes, 0)
self.assertEqual(16, pos)
self.assertEqual('^', chr(bytes[pos]))
# test skip over name in the middle, end up on ^
# 2552552552554test5cisco3com0^
bytes = bytearray(b'\xff\xff\xff\xff\x04\x74\x65\x73\x74\x05\x63'
b'\x69\x73\x63\x6f\x03\x63\x6f\x6d\x00\x5e')
pos = DnsPacket.skip_over_domain_name(bytes, 4)
self.assertEqual(20, pos)
self.assertEqual('^', chr(bytes[pos]))
# test skip over length and pointer at beginning, end up on ^
bytes = bytearray(b'\xc0\x55\x5e')
pos = DnsPacket.skip_over_domain_name(bytes, 0)
self.assertEqual(2, pos)
self.assertEqual('^', chr(bytes[pos]))
# test skip over length and pointer in the middle, end up on ^
bytes = bytearray(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x55\x5e')
pos = DnsPacket.skip_over_domain_name(bytes, 9)
self.assertEqual(11, pos)
self.assertEqual('^', chr(bytes[pos]))
|
import os
ARCH='arm'
CPU='cortex-m4'
CROSS_TOOL='gcc'
BSP_LIBRARY_TYPE = None
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
if os.getenv('RTT_ROOT'):
RTT_ROOT = os.getenv('RTT_ROOT')
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = r'C:\Users\XXYYZZ'
elif CROSS_TOOL == 'keil':
PLATFORM = 'armcc'
EXEC_PATH = r'C:/Keil_v5'
elif CROSS_TOOL == 'iar':
PLATFORM = 'iar'
EXEC_PATH = r'C:/Program Files (x86)/IAR Systems/Embedded Workbench 8.0'
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
CXX = PREFIX + 'g++'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m4 -mthumb -mfpu=fpv4-sp-d16 -mfloat-abi=hard -ffunction-sections -fdata-sections'
CFLAGS = DEVICE + ' -Dgcc'
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp -Wa,-mimplicit-it=thumb '
LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rt-thread.map,-cref,-u,Reset_Handler -T board/linker_scripts/link.lds'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2 -g'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
CXXFLAGS = CFLAGS
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
elif PLATFORM == 'armcc':
# toolchains
CC = 'armcc'
CXX = 'armcc'
AS = 'armasm'
AR = 'armar'
LINK = 'armlink'
TARGET_EXT = 'axf'
DEVICE = ' --cpu Cortex-M4.fp '
CFLAGS = '-c ' + DEVICE + ' --apcs=interwork --c99'
AFLAGS = DEVICE + ' --apcs=interwork '
LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers --list rtthread.map --strict --scatter "board\linker_scripts\link.sct"'
CFLAGS += ' -I' + EXEC_PATH + '/ARM/ARMCC/include'
LFLAGS += ' --libpath=' + EXEC_PATH + '/ARM/ARMCC/lib'
CFLAGS += ' -D__MICROLIB '
AFLAGS += ' --pd "__MICROLIB SETA 1" '
LFLAGS += ' --library_type=microlib '
EXEC_PATH += '/ARM/ARMCC/bin/'
if BUILD == 'debug':
CFLAGS += ' -g -O0'
AFLAGS += ' -g'
else:
CFLAGS += ' -O2'
CXXFLAGS = CFLAGS
CFLAGS += ' -std=c99'
POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET'
elif PLATFORM == 'iar':
# toolchains
CC = 'iccarm'
CXX = 'iccarm'
AS = 'iasmarm'
AR = 'iarchive'
LINK = 'ilinkarm'
TARGET_EXT = 'out'
DEVICE = '-Dewarm'
CFLAGS = DEVICE
CFLAGS += ' --diag_suppress Pa050'
CFLAGS += ' --no_cse'
CFLAGS += ' --no_unroll'
CFLAGS += ' --no_inline'
CFLAGS += ' --no_code_motion'
CFLAGS += ' --no_tbaa'
CFLAGS += ' --no_clustering'
CFLAGS += ' --no_scheduling'
CFLAGS += ' --endian=little'
CFLAGS += ' --cpu=Cortex-M4'
CFLAGS += ' -e'
CFLAGS += ' --fpu=VFPv4_sp'
CFLAGS += ' --dlib_config "' + EXEC_PATH + '/arm/INC/c/DLib_Config_Normal.h"'
CFLAGS += ' --silent'
AFLAGS = DEVICE
AFLAGS += ' -s+'
AFLAGS += ' -w+'
AFLAGS += ' -r'
AFLAGS += ' --cpu Cortex-M4'
AFLAGS += ' --fpu VFPv4_sp'
AFLAGS += ' -S'
if BUILD == 'debug':
CFLAGS += ' --debug'
CFLAGS += ' -On'
else:
CFLAGS += ' -Oh'
LFLAGS = ' --config "board/linker_scripts/link.icf"'
LFLAGS += ' --entry __iar_program_start'
CXXFLAGS = CFLAGS
EXEC_PATH = EXEC_PATH + '/arm/bin/'
POST_ACTION = 'ielftool --bin $TARGET rtthread.bin'
|
"""Unique operator"""
from tvm import te, tir
from ..te import hybrid
from .scan import cumsum
from .sort import sort, argsort
def _calc_adjacent_diff_ir(data, output, binop=tir.Sub):
"""Low level IR to calculate adjacent difference in an 1-D array.
Parameters
----------
data : Buffer
Input 1-D Buffer.
output: Buffer
A buffer to store adjacent difference, of the same shape as data. The adjacent difference
is defined as: output[0] = 0, output[i] = binop(data[i], data[i-1])
where i > 0 and i < len(data).
binop: function, optional
A binary associative op to use for calculating adjacent difference. The function takes two
TIR expressions and produce a new TIR expression. By default it uses tvm.tir.Sub to
compute the adjacent difference.
"""
ib = tir.ir_builder.create()
data_ptr = ib.buffer_ptr(data)
output_ptr = ib.buffer_ptr(output)
with ib.for_range(0, data.shape[0], kind="parallel") as i:
with ib.if_scope(i == 0):
output_ptr[0] = 0
with ib.else_scope():
output_ptr[i] = tir.Cast(output.dtype, binop(data_ptr[i], data_ptr[i - 1]))
return ib.get()
def _calc_adjacent_diff(data, out_dtype="int32", binop=tir.Sub):
"""Function calculate adjacent difference in an 1-D array.
Parameters
----------
data : tvm.te.Tensor
Input 1-D tensor.
output_dtype : str
The output tensor data type.
binop: function, optional
A binary associative op to use for calculating difference. The function takes two
TIR expressions and produce a new TIR expression. By default it uses tvm.tir.Sub to
compute the adjacent difference.
Returns
-------
output : tvm.te.Tensor
1-D tensor storing the adjacent difference of the input tensor. The adjacent difference
is defined as: output[0] = 0, output[i] = binop(data[i], data[i-1])
where i > 0 and i < len(data).
"""
return te.extern(
[data.shape],
[data],
lambda ins, outs: _calc_adjacent_diff_ir(ins[0], outs[0], binop=binop),
dtype=[out_dtype],
name="_calc_adjacent_diff",
tag="_calc_adjacent_diff_cpu",
)
@hybrid.script
def _calc_num_unique(inc_scan):
"""Helper function to get the number of unique elements fron inc_scan tensor"""
output = output_tensor((1,), "int32")
output[0] = inc_scan[inc_scan.shape[0] - 1] + int32(1)
return output
def _calc_unique_ir(
data, argsorted_indices, inc_scan, index_converter, unique_elements, inverse_indices, counts
):
"""Low level IR to calculate unique elements, inverse indices, and counts (optional) of
unique elements of 1-D array.
Parameters
----------
data : Buffer
Input 1-D Buffer.
argsorted_indices : Buffer
A buffer that stores the argsorted indices of the input data.
inc_scan : Buffer
A buffer that stores the inclusive scan of the binary tir.NE adjacent difference
of the sorted data.
index_converter (optional) : Buffer
An optional index converter that transforms the unique element index
such that new_idx = index_converter[old_idx].
unique_elements : Buffer
A buffer that stores the unique elements.
inverse_indices : Buffer
A buffer that stores the the index of each input data element in the unique element array.
counts (optional) : Buffer
A buffer that stores the count of each unique element.
"""
ib = tir.ir_builder.create()
data_ptr = ib.buffer_ptr(data)
argsorted_indices_ptr = ib.buffer_ptr(argsorted_indices)
inc_scan_ptr = ib.buffer_ptr(inc_scan)
unique_elements_ptr = ib.buffer_ptr(unique_elements)
inverse_indices_ptr = ib.buffer_ptr(inverse_indices)
index_converter_ptr = None
if isinstance(index_converter, tir.Buffer):
index_converter_ptr = ib.buffer_ptr(index_converter)
if isinstance(counts, tir.Buffer):
counts_ptr = ib.buffer_ptr(counts)
# use indices_ptr as a tmp buffer to store tids with inc_scan[tid] != inc_scan[tid-1]
unique_seq_indices_ptr = ib.buffer_ptr(inverse_indices)
data_length = data.shape[0]
# if need to return counts
if isinstance(counts, tir.Buffer):
num_unique = inc_scan_ptr[inc_scan.shape[0] - 1] + 1
num_elements = data.shape[0]
unique_seq_indices_ptr[num_unique - 1] = num_elements
with ib.new_scope():
with ib.for_range(0, data_length, kind="parallel") as i:
with ib.if_scope(i > 0):
with ib.if_scope(inc_scan_ptr[i] != inc_scan_ptr[i - 1]):
unique_seq_indices_ptr[inc_scan_ptr[i] - 1] = i
with ib.new_scope():
with ib.for_range(0, num_unique, kind="parallel") as i:
unique_idx = i if not index_converter_ptr else index_converter_ptr[i]
with ib.if_scope(i == 0):
counts_ptr[unique_idx] = unique_seq_indices_ptr[i]
with ib.else_scope():
counts_ptr[unique_idx] = (
unique_seq_indices_ptr[i] - unique_seq_indices_ptr[i - 1]
)
# calculate unique elements and inverse indices
with ib.new_scope():
with ib.for_range(0, data_length, kind="parallel") as i:
data_idx = argsorted_indices_ptr[i]
unique_idx = (
inc_scan_ptr[i] if not index_converter_ptr else index_converter_ptr[inc_scan_ptr[i]]
)
inverse_indices_ptr[data_idx] = unique_idx
with ib.if_scope(i == 0):
unique_elements_ptr[unique_idx] = data_ptr[data_idx]
with ib.else_scope():
with ib.if_scope(inc_scan_ptr[i] != inc_scan_ptr[i - 1]):
unique_elements_ptr[unique_idx] = data_ptr[data_idx]
return ib.get()
@hybrid.script
def _calc_first_occurence(argsorted_indices, inc_scan):
"""Hybrid script to calculate the first occurence of each unique element in the input data.
Parameters
----------
argsorted_indices : tvm.te.Tensor
A tensor that stores the argsorted indices of the input data.
inc_scan : tvm.te.Tensor
A tensor that stores the inclusive scan of the binary tir.NE adjacent difference
of the sorted data.
first_occurence : tvm.te.Tensor
A tensor that stores the first occurence of each unique element in the input data.
"""
first_occurence = output_tensor(argsorted_indices.shape, "int32")
for i in parallel(argsorted_indices.shape[0]):
first_occurence[i] = argsorted_indices.shape[0]
for i in parallel(argsorted_indices.shape[0]):
if i == 0 or inc_scan[i] != inc_scan[i - 1]:
first_occurence[inc_scan[i]] = argsorted_indices[i]
return first_occurence
def unique(data, is_sorted=True, return_counts=False):
"""
Find the unique elements of a 1-D tensor. Please note `output` and `counts` are all padded to
have the same length of `data` and element with index >= num_unique[0] has undefined value.
Parameters
----------
data : tvm.te.Tensor
A 1-D tensor of integers.
sorted : bool
Whether to sort the unique elements in ascending order before returning as output.
return_counts : bool
Whether to return the count of each unique element.
Returns
-------
unique : tvm.te.Tensor
A 1-D tensor containing the unique elements of the input data tensor. The same size as
the input data. If there are less unique elements than input data, the end of the tensor
is padded with zeros.
indices : tvm.te.Tensor
A 1-D tensor. The same size as output. For each entry in output, it contains
the index of its first occurence in the input data. The end of the tensor is padded
with the length of the input data.
inverse_indices : tvm.te.Tensor
A 1-D tensor. For each entry in data, it contains the index of that data element in
the unique array. (Note that inverse_indices is very similar to indices if output is not
sorted.)
num_unique : tvm.te.Tensor
A 1-D tensor with size=1 containing the number of unique elements in the input data tensor.
counts (optional) : tvm.te.Tensor
A 1-D tensor containing the count of each unique element in the output.
Examples
--------
.. code-block:: python
[output, indices, num_unique] = unique([4, 5, 1, 2, 3, 3, 4, 5], False, False)
output = [4, 5, 1, 2, 3, _, _, _]
indices = [0, 1, 2, 3, 4, _, _, _]
inverse_indices = [0, 1, 2, 3, 4, 4, 0, 1]
num_unique = [5]
[output, indices, num_unique, counts] = unique([4, 5, 1, 2, 3, 3, 4, 5], False, True)
output = [4, 5, 1, 2, 3, _, _, _]
indices = [0, 1, 2, 3, 4, _, _, _]
inverse_indices = [0, 1, 2, 3, 4, 4, 0, 1]
num_unique = [5]
counts = [2, 2, 1, 1, 2, _, _, _]
[output, indices, num_unique] = unique([4, 5, 1, 2, 3, 3, 4, 5], True)
output = [1, 2, 3, 4, 5, _, _, _]
indices = [2, 3, 4, 0, 1, _, _, _]
inverse_indices = [3, 4, 0, 1, 2, 2, 3, 4]
num_unique = [5]
"""
sorted_data = sort(data)
argsorted_indices = argsort(data, dtype="int32")
# adjacent difference
adjacent_diff = _calc_adjacent_diff(sorted_data, "int32", tir.NE)
# inclusive scan
inc_scan = cumsum(adjacent_diff, dtype="int32", exclusive=0)
# total number of unique elements
num_unique_elements = _calc_num_unique(inc_scan)
# prepare outputs
if return_counts:
out_data_shape = [data.shape] * 3
out_dtypes = [data.dtype, "int32", "int32"]
else:
out_data_shape = [data.shape] * 2
out_dtypes = [data.dtype, "int32"]
# prepare inputs and fcompute
first_occurence = _calc_first_occurence(argsorted_indices, inc_scan)
if is_sorted:
in_data = [data, argsorted_indices, inc_scan]
if return_counts:
fcompute = lambda ins, outs: _calc_unique_ir(*ins, None, *outs)
else:
fcompute = lambda ins, outs: _calc_unique_ir(*ins, None, *outs, None)
indices = first_occurence
else:
# calculate index converter by sorting unique elements by their first occurence
argsorted_first_occurence = argsort(first_occurence, dtype="int32")
index_converter = argsort(argsorted_first_occurence, dtype="int32")
in_data = [data, argsorted_indices, inc_scan, index_converter]
if return_counts:
fcompute = lambda ins, outs: _calc_unique_ir(*ins, *outs)
else:
fcompute = lambda ins, outs: _calc_unique_ir(*ins, *outs, None)
# First occurence is in order of sorted unique output, if we sort the first_occurence array
# we get the correct result
indices = sort(first_occurence)
outs = te.extern(
out_data_shape,
in_data,
fcompute,
dtype=out_dtypes,
name="_calc_unique",
tag="_calc_unique_cpu",
)
if return_counts:
return [outs[0], indices, outs[1], num_unique_elements, outs[2]]
return [outs[0], indices, outs[1], num_unique_elements]
|
def this_is_the_outer_lib():
print 'For imports test'
|
from tempest.api.orchestration import base
from tempest.common.utils import data_utils
from tempest.openstack.common import log as logging
from tempest.test import attr
LOG = logging.getLogger(__name__)
class StacksTestJSON(base.BaseOrchestrationTest):
_interface = 'json'
empty_template = "HeatTemplateFormatVersion: '2012-12-12'\n"
@classmethod
def setUpClass(cls):
super(StacksTestJSON, cls).setUpClass()
cls.client = cls.orchestration_client
@attr(type='smoke')
def test_stack_list_responds(self):
resp, stacks = self.client.list_stacks()
self.assertEqual('200', resp['status'])
self.assertIsInstance(stacks, list)
@attr(type='smoke')
def test_stack_crud_no_resources(self):
stack_name = data_utils.rand_name('heat')
# create the stack
stack_identifier = self.create_stack(
stack_name, self.empty_template)
stack_id = stack_identifier.split('/')[1]
# wait for create complete (with no resources it should be instant)
self.client.wait_for_stack_status(stack_identifier, 'CREATE_COMPLETE')
# check for stack in list
resp, stacks = self.client.list_stacks()
list_ids = list([stack['id'] for stack in stacks])
self.assertIn(stack_id, list_ids)
# fetch the stack
resp, stack = self.client.get_stack(stack_identifier)
self.assertEqual('CREATE_COMPLETE', stack['stack_status'])
# fetch the stack by name
resp, stack = self.client.get_stack(stack_name)
self.assertEqual('CREATE_COMPLETE', stack['stack_status'])
# fetch the stack by id
resp, stack = self.client.get_stack(stack_id)
self.assertEqual('CREATE_COMPLETE', stack['stack_status'])
# delete the stack
resp = self.client.delete_stack(stack_identifier)
self.assertEqual('204', resp[0]['status'])
|
"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.emr_terminate_job_flow`."""
import warnings
from airflow.providers.amazon.aws.operators.emr_terminate_job_flow import EmrTerminateJobFlowOperator # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.amazon.aws.operators.emr_terminate_job_flow`.",
DeprecationWarning,
stacklevel=2,
)
|
"""
WSGI config for mdotproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
application = get_wsgi_application()
|
import os, sys, re, urllib, json, subprocess
import time
import urllib.request
import smtplib
from email.mime.text import MIMEText
def getJSON(url, creds = None, cookie = None):
headers = {}
if creds and len(creds) > 0:
xcreds = creds.encode(encoding='ascii', errors='replace')
auth = base64.encodebytes(xcreds).decode('ascii', errors='replace').replace("\n", '')
headers = {"Content-type": "application/json",
"Accept": "*/*",
"Authorization": "Basic %s" % auth
}
request = urllib.request.Request(url, headers = headers)
result = urllib.request.urlopen(request)
return json.loads(result.read().decode('utf-8', errors = 'replace'))
js = getJSON("https://reporeq.apache.org/queue.json")
created = 0
if js:
print("analysing %u items" % len(js))
# For each item:
# - Check that it hasn't been mirrored yet
# - Check that a repo with this name doesn't exist already
# - Check that name is valid
# - Mirror repo if all is okay
for item in js:
# Make sure this is a GH integration request AND it's been mirrored more than a day ago, so GH caught up.
if not 'githubbed' in item and item['github'] == True and 'mirrordate' in item and item['mirrordate'] < (time.time()-86400):
reponame = item['name']
# Check valid name
if len(reponame) < 5 or reponame.find("..") != -1 or reponame.find("/") != -1:
print("Invalid repo name!")
continue
# Set some vars
notify = item['notify']
description = item['description'] if 'description' in item else "Unknown"
# Make sure the repo exists!
if os.path.exists("/x1/git/mirrors/%s" % reponame):
print("%s is there, adding web hooks" % reponame)
try:
xreponame = reponame.replace(".git", "") # Cut off the .git part, so GH will not bork
inp = subprocess.check_output("/usr/local/etc/git_self_serve/add-webhook.sh %s" % xreponame, shell = True).decode('ascii', 'replace')
except subprocess.CalledProcessError as err:
print("Borked: %s" % err.output)
continue
else:
print("Repo doesn't exist, ignoring this request...sort of")
# Notify reporeq that we've GH'ed this repository!
print("Notifying https://reporeq.apache.org/ss.lua?githubbed=%s" % reponame)
request = urllib.request.Request("https://reporeq.apache.org/ss.lua?githubbed=%s" % reponame)
result = urllib.request.urlopen(request)
# Inform infra@ and private@$pmc that the mirror has been set up
msg = MIMEText("New repository %s has now had GitHub integration enabled!\n\nWith regards,\nApache Infrastructure." % (reponame))
msg['Subject'] = 'Github integration set up: %s' % reponame
msg['From'] = "git@apache.org"
msg['Reply-To'] = "users@infra.apache.org"
msg['To'] = "users@infra.apache.org, private@%s.apache.org" % item['pmc']
s = smtplib.SMTP(host='mail.apache.org', port=2025)
s.send_message(msg)
s.quit()
# We made a thing!
created += 1
print("All done for today! Made %u new repos" % created)
|
import os
import signal
import subprocess
import logging
import socket
import time
import redis
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
redis_ver = '2.6.13'
redis_bdir = '/tmp/cache/' + os.environ['USER'] + '/systemless_test'
redis_url = redis_bdir + '/redis-'+redis_ver+'.tar.gz'
redis_exe = redis_bdir + '/bin/redis-server'
def install_redis():
if not os.path.exists(redis_url):
process = subprocess.Popen(['wget', '-P', redis_bdir,
'https://redis.googlecode.com/files/redis-'\
+ redis_ver + '.tar.gz'],
cwd=redis_bdir)
process.wait()
if process.returncode is not 0:
raise SystemError('wget '+redis_url)
if not os.path.exists(redis_bdir + '/redis-'+redis_ver):
process = subprocess.Popen(['tar', 'xzvf', redis_url],
cwd=redis_bdir)
process.wait()
if process.returncode is not 0:
raise SystemError('untar '+redis_url)
if not os.path.exists(redis_exe):
process = subprocess.Popen(['make', 'PREFIX=' + redis_bdir, 'install'],
cwd=redis_bdir + '/redis-'+redis_ver)
process.wait()
if process.returncode is not 0:
raise SystemError('install '+redis_url)
def get_redis_path():
if not os.path.exists(redis_exe):
install_redis()
return redis_exe
def redis_version():
'''
Determine redis-server version
'''
return 2.6
'''
command = "redis-server --version"
logging.info('redis_version call 1')
process = subprocess.Popen(command.split(' '), stdout=subprocess.PIPE)
logging.info('redis_version call 2')
output, _ = process.communicate()
if "v=2.6" in output[0]:
return 2.6
else:
return 2.4
'''
def start_redis(port, password=None):
'''
Client uses this function to start an instance of redis
Arguments:
cport : An unused TCP port for redis to use as the client port
'''
exe = get_redis_path()
version = redis_version()
if version == 2.6:
redis_conf = "redis.26.conf"
else:
redis_conf = "redis.24.conf"
conftemplate = os.path.dirname(os.path.abspath(__file__)) + "/" +\
redis_conf
redisbase = "/tmp/redis.%s.%d/" % (os.getenv('USER', 'None'), port)
output, _ = call_command_("rm -rf " + redisbase)
output, _ = call_command_("mkdir " + redisbase)
output, _ = call_command_("mkdir " + redisbase + "cache")
logging.info('Redis Port %d' % port)
output, _ = call_command_("cp " + conftemplate + " " + redisbase +
redis_conf)
replace_string_(redisbase + redis_conf,
[("/var/run/redis_6379.pid", redisbase + "pid"),
("port 6379", "port " + str(port)),
("/var/log/redis_6379.log", redisbase + "log"),
("/var/lib/redis/6379", redisbase + "cache")])
if password:
replace_string_(redisbase + redis_conf,[("# requirepass foobared","requirepass " + password)])
command = exe + " " + redisbase + redis_conf
subprocess.Popen(command.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
r = redis.StrictRedis(host='localhost', port=port, db=0, password=password)
done = False
start_wait = os.getenv('CONTRIAL_ANALYTICS_TEST_MAX_START_WAIT_TIME', 15)
cnt = 0
while not done:
try:
r.ping()
except:
cnt += 1
if cnt > start_wait:
logging.info('Redis Failed. Logs below: ')
with open(redisbase + "log", 'r') as fin:
logging.info(fin.read())
return False
logging.info('Redis not ready')
time.sleep(1)
else:
done = True
logging.info('Redis ready')
return True
def stop_redis(port, password=None):
'''
Client uses this function to stop an instance of redis
This will only work for redis instances that were started by this module
Arguments:
cport : The Client Port for the instance of redis to be stopped
'''
r = redis.StrictRedis(host='localhost', port=port, db=0, password=password)
r.shutdown()
del r
redisbase = "/tmp/redis.%s.%d/" % (os.getenv('USER', 'None'), port)
output, _ = call_command_("rm -rf " + redisbase)
def replace_string_(filePath, findreplace):
"replaces all findStr by repStr in file filePath"
print filePath
tempName = filePath + '~~~'
input = open(filePath)
output = open(tempName, 'w')
s = input.read()
for couple in findreplace:
outtext = s.replace(couple[0], couple[1])
s = outtext
output.write(outtext)
output.close()
input.close()
os.rename(tempName, filePath)
def call_command_(command):
process = subprocess.Popen(command.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
return process.communicate()
if __name__ == "__main__":
cs = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
cs.bind(("", 0))
cport = cs.getsockname()[1]
cs.close()
start_redis(cport)
|
import logging
from shotgun.settings import LOG_FILE
def configure_logger():
"""Configures shotgun logger
"""
logger = logging.getLogger('shotgun')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s %(levelname)s %(process)d (%(module)s) %(message)s',
"%Y-%m-%d %H:%M:%S")
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
stream_handler.setFormatter(formatter)
file_handler = logging.FileHandler(LOG_FILE)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.addHandler(file_handler)
|
import mock
from oslo_config import cfg
from ryu.services.protocols.bgp import bgpspeaker
from ryu.services.protocols.bgp.rtconf.neighbors import CONNECT_MODE_ACTIVE
from neutron.services.bgp.agent import config as bgp_config
from neutron.services.bgp.driver import exceptions as bgp_driver_exc
from neutron.services.bgp.driver.ryu import driver as ryu_driver
from neutron.tests import base
FAKE_LOCAL_AS1 = 12345
FAKE_LOCAL_AS2 = 23456
FAKE_ROUTER_ID = '1.1.1.1'
FAKE_PEER_AS = 45678
FAKE_PEER_IP = '2.2.2.5'
FAKE_AUTH_TYPE = 'md5'
FAKE_PEER_PASSWORD = 'awesome'
FAKE_ROUTE = '2.2.2.0/24'
FAKE_NEXTHOP = '5.5.5.5'
class TestRyuBgpDriver(base.BaseTestCase):
def setUp(self):
super(TestRyuBgpDriver, self).setUp()
cfg.CONF.register_opts(bgp_config.BGP_PROTO_CONFIG_OPTS, 'BGP')
cfg.CONF.set_override('bgp_router_id', FAKE_ROUTER_ID, 'BGP')
self.ryu_bgp_driver = ryu_driver.RyuBgpDriver(cfg.CONF.BGP)
mock_ryu_speaker_p = mock.patch.object(bgpspeaker, 'BGPSpeaker')
self.mock_ryu_speaker = mock_ryu_speaker_p.start()
def test_add_new_bgp_speaker(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.mock_ryu_speaker.assert_called_once_with(
as_number=FAKE_LOCAL_AS1, router_id=FAKE_ROUTER_ID,
bgp_server_port=0,
best_path_change_handler=ryu_driver.best_path_change_cb,
peer_down_handler=ryu_driver.bgp_peer_down_cb,
peer_up_handler=ryu_driver.bgp_peer_up_cb)
def test_remove_bgp_speaker(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
self.ryu_bgp_driver.delete_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(0,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.assertEqual(1, speaker.shutdown.call_count)
def test_add_bgp_peer_without_password(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.add_bgp_peer(FAKE_LOCAL_AS1,
FAKE_PEER_IP,
FAKE_PEER_AS)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.neighbor_add.assert_called_once_with(
address=FAKE_PEER_IP,
remote_as=FAKE_PEER_AS,
password=None,
connect_mode=CONNECT_MODE_ACTIVE)
def test_add_bgp_peer_with_password(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.add_bgp_peer(FAKE_LOCAL_AS1,
FAKE_PEER_IP,
FAKE_PEER_AS,
FAKE_AUTH_TYPE,
FAKE_PEER_PASSWORD)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.neighbor_add.assert_called_once_with(
address=FAKE_PEER_IP,
remote_as=FAKE_PEER_AS,
password=FAKE_PEER_PASSWORD,
connect_mode=CONNECT_MODE_ACTIVE)
def test_remove_bgp_peer(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.delete_bgp_peer(FAKE_LOCAL_AS1, FAKE_PEER_IP)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.neighbor_del.assert_called_once_with(address=FAKE_PEER_IP)
def test_advertise_route(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.advertise_route(FAKE_LOCAL_AS1,
FAKE_ROUTE,
FAKE_NEXTHOP)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.prefix_add.assert_called_once_with(prefix=FAKE_ROUTE,
next_hop=FAKE_NEXTHOP)
def test_withdraw_route(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.withdraw_route(FAKE_LOCAL_AS1, FAKE_ROUTE)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.prefix_del.assert_called_once_with(prefix=FAKE_ROUTE)
def test_add_same_bgp_speakers_twice(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.BgpSpeakerAlreadyScheduled,
self.ryu_bgp_driver.add_bgp_speaker, FAKE_LOCAL_AS1)
def test_add_different_bgp_speakers_when_one_already_added(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.BgpSpeakerMaxScheduled,
self.ryu_bgp_driver.add_bgp_speaker,
FAKE_LOCAL_AS2)
def test_add_bgp_speaker_with_invalid_asnum_paramtype(self):
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_speaker, '12345')
def test_add_bgp_speaker_with_invalid_asnum_range(self):
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_speaker, -1)
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_speaker, 65536)
def test_add_bgp_peer_with_invalid_paramtype(self):
# Test with an invalid asnum data-type
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, '12345')
# Test with an invalid auth-type and an invalid password
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
'sha-1', 1234)
# Test with an invalid auth-type and a valid password
self.assertRaises(bgp_driver_exc.InvaildAuthType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
'hmac-md5', FAKE_PEER_PASSWORD)
# Test with none auth-type and a valid password
self.assertRaises(bgp_driver_exc.InvaildAuthType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
'none', FAKE_PEER_PASSWORD)
# Test with none auth-type and an invalid password
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
'none', 1234)
# Test with a valid auth-type and no password
self.assertRaises(bgp_driver_exc.PasswordNotSpecified,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
FAKE_AUTH_TYPE, None)
def test_add_bgp_peer_with_invalid_asnum_range(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, -1)
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, 65536)
def test_add_bgp_peer_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS)
def test_remove_bgp_peer_with_invalid_paramtype(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.delete_bgp_peer,
FAKE_LOCAL_AS1, 12345)
def test_remove_bgp_peer_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.delete_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP)
def test_advertise_route_with_invalid_paramtype(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.advertise_route,
FAKE_LOCAL_AS1, 12345, FAKE_NEXTHOP)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.advertise_route,
FAKE_LOCAL_AS1, FAKE_ROUTE, 12345)
def test_advertise_route_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.advertise_route,
FAKE_LOCAL_AS1, FAKE_ROUTE, FAKE_NEXTHOP)
def test_withdraw_route_with_invalid_paramtype(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.withdraw_route,
FAKE_LOCAL_AS1, 12345)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.withdraw_route,
FAKE_LOCAL_AS1, 12345)
def test_withdraw_route_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.withdraw_route,
FAKE_LOCAL_AS1, FAKE_ROUTE)
def test_add_multiple_bgp_speakers(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.assertRaises(bgp_driver_exc.BgpSpeakerMaxScheduled,
self.ryu_bgp_driver.add_bgp_speaker,
FAKE_LOCAL_AS2)
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.delete_bgp_speaker,
FAKE_LOCAL_AS2)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.delete_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(0,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
|
import optparse
import StringIO
import webkitpy.thirdparty.unittest2 as unittest
from webkitpy.common.host_mock import MockHost
from webkitpy.layout_tests import lint_test_expectations
class FakePort(object):
def __init__(self, host, name, path):
self.host = host
self.name = name
self.path = path
def test_configuration(self):
return None
def expectations_dict(self):
self.host.ports_parsed.append(self.name)
return {self.path: ''}
def bot_expectations(self):
return {}
def skipped_layout_tests(self, _):
return set([])
def all_test_configurations(self):
return []
def configuration_specifier_macros(self):
return []
def get_option(self, _, val):
return val
def path_to_generic_test_expectations_file(self):
return ''
class FakeFactory(object):
def __init__(self, host, ports):
self.host = host
self.ports = {}
for port in ports:
self.ports[port.name] = port
def get(self, port_name, *args, **kwargs): # pylint: disable=W0613,E0202
return self.ports[port_name]
def all_port_names(self, platform=None): # pylint: disable=W0613,E0202
return sorted(self.ports.keys())
class LintTest(unittest.TestCase):
def test_all_configurations(self):
host = MockHost()
host.ports_parsed = []
host.port_factory = FakeFactory(host, (FakePort(host, 'a', 'path-to-a'),
FakePort(host, 'b', 'path-to-b'),
FakePort(host, 'b-win', 'path-to-b')))
logging_stream = StringIO.StringIO()
options = optparse.Values({'platform': None})
res = lint_test_expectations.lint(host, options, logging_stream)
self.assertEqual(res, 0)
self.assertEqual(host.ports_parsed, ['a', 'b', 'b-win'])
def test_lint_test_files(self):
logging_stream = StringIO.StringIO()
options = optparse.Values({'platform': 'test-mac-leopard'})
host = MockHost()
# pylint appears to complain incorrectly about the method overrides pylint: disable=E0202,C0322
# FIXME: incorrect complaints about spacing pylint: disable=C0322
host.port_factory.all_port_names = lambda platform=None: [platform]
res = lint_test_expectations.lint(host, options, logging_stream)
self.assertEqual(res, 0)
self.assertIn('Lint succeeded', logging_stream.getvalue())
def test_lint_test_files__errors(self):
options = optparse.Values({'platform': 'test', 'debug_rwt_logging': False})
host = MockHost()
# FIXME: incorrect complaints about spacing pylint: disable=C0322
port = host.port_factory.get(options.platform, options=options)
port.expectations_dict = lambda: {'foo': '-- syntax error1', 'bar': '-- syntax error2'}
host.port_factory.get = lambda platform, options=None: port
host.port_factory.all_port_names = lambda platform=None: [port.name()]
logging_stream = StringIO.StringIO()
res = lint_test_expectations.lint(host, options, logging_stream)
self.assertEqual(res, -1)
self.assertIn('Lint failed', logging_stream.getvalue())
self.assertIn('foo:1', logging_stream.getvalue())
self.assertIn('bar:1', logging_stream.getvalue())
class MainTest(unittest.TestCase):
def test_success(self):
orig_lint_fn = lint_test_expectations.lint
# unused args pylint: disable=W0613
def interrupting_lint(host, options, logging_stream):
raise KeyboardInterrupt
def successful_lint(host, options, logging_stream):
return 0
def exception_raising_lint(host, options, logging_stream):
assert False
stdout = StringIO.StringIO()
stderr = StringIO.StringIO()
try:
lint_test_expectations.lint = interrupting_lint
res = lint_test_expectations.main([], stdout, stderr)
self.assertEqual(res, lint_test_expectations.INTERRUPTED_EXIT_STATUS)
lint_test_expectations.lint = successful_lint
res = lint_test_expectations.main(['--platform', 'test'], stdout, stderr)
self.assertEqual(res, 0)
lint_test_expectations.lint = exception_raising_lint
res = lint_test_expectations.main([], stdout, stderr)
self.assertEqual(res, lint_test_expectations.EXCEPTIONAL_EXIT_STATUS)
finally:
lint_test_expectations.lint = orig_lint_fn
|
from oslo_config import cfg
cfg.CONF.import_opt('stack_scheduler_hints', 'heat.common.config')
class SchedulerHintsMixin(object):
'''
Utility class to encapsulate Scheduler Hint related logic shared
between resources.
'''
HEAT_ROOT_STACK_ID = 'heat_root_stack_id'
HEAT_STACK_ID = 'heat_stack_id'
HEAT_STACK_NAME = 'heat_stack_name'
HEAT_PATH_IN_STACK = 'heat_path_in_stack'
HEAT_RESOURCE_NAME = 'heat_resource_name'
HEAT_RESOURCE_UUID = 'heat_resource_uuid'
def _scheduler_hints(self, scheduler_hints):
'''Augment scheduler hints with supplemental content.'''
if cfg.CONF.stack_scheduler_hints:
if scheduler_hints is None:
scheduler_hints = {}
scheduler_hints[self.HEAT_ROOT_STACK_ID] = \
self.stack.root_stack_id()
scheduler_hints[self.HEAT_STACK_ID] = self.stack.id
scheduler_hints[self.HEAT_STACK_NAME] = self.stack.name
scheduler_hints[self.HEAT_PATH_IN_STACK] = \
self.stack.path_in_stack()
scheduler_hints[self.HEAT_RESOURCE_NAME] = self.name
scheduler_hints[self.HEAT_RESOURCE_UUID] = self.uuid
return scheduler_hints
|
"""Utilities and helper functions."""
import abc
import contextlib
import datetime
import functools
import hashlib
import inspect
import logging as py_logging
import os
import pyclbr
import random
import re
import shutil
import socket
import stat
import sys
import tempfile
import time
import types
from xml.dom import minidom
from xml.parsers import expat
from xml import sax
from xml.sax import expatreader
from xml.sax import saxutils
from os_brick.initiator import connector
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import importutils
from oslo_utils import strutils
from oslo_utils import timeutils
import retrying
import six
from cinder import exception
from cinder.i18n import _, _LE, _LW
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
ISO_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
VALID_TRACE_FLAGS = {'method', 'api'}
TRACE_METHOD = False
TRACE_API = False
synchronized = lockutils.synchronized_with_prefix('cinder-')
def find_config(config_path):
"""Find a configuration file using the given hint.
:param config_path: Full or relative path to the config.
:returns: Full path of the config, if it exists.
:raises: `cinder.exception.ConfigNotFound`
"""
possible_locations = [
config_path,
os.path.join(CONF.state_path, "etc", "cinder", config_path),
os.path.join(CONF.state_path, "etc", config_path),
os.path.join(CONF.state_path, config_path),
"/etc/cinder/%s" % config_path,
]
for path in possible_locations:
if os.path.exists(path):
return os.path.abspath(path)
raise exception.ConfigNotFound(path=os.path.abspath(config_path))
def as_int(obj, quiet=True):
# Try "2" -> 2
try:
return int(obj)
except (ValueError, TypeError):
pass
# Try "2.5" -> 2
try:
return int(float(obj))
except (ValueError, TypeError):
pass
# Eck, not sure what this is then.
if not quiet:
raise TypeError(_("Can not translate %s to integer.") % (obj))
return obj
def is_int_like(val):
"""Check if a value looks like an int."""
try:
return str(int(val)) == str(val)
except Exception:
return False
def check_exclusive_options(**kwargs):
"""Checks that only one of the provided options is actually not-none.
Iterates over all the kwargs passed in and checks that only one of said
arguments is not-none, if more than one is not-none then an exception will
be raised with the names of those arguments who were not-none.
"""
if not kwargs:
return
pretty_keys = kwargs.pop("pretty_keys", True)
exclusive_options = {}
for (k, v) in kwargs.items():
if v is not None:
exclusive_options[k] = True
if len(exclusive_options) > 1:
# Change the format of the names from pythonic to
# something that is more readable.
#
# Ex: 'the_key' -> 'the key'
if pretty_keys:
names = [k.replace('_', ' ') for k in kwargs.keys()]
else:
names = kwargs.keys()
names = ", ".join(sorted(names))
msg = (_("May specify only one of %s") % (names))
raise exception.InvalidInput(reason=msg)
def execute(*cmd, **kwargs):
"""Convenience wrapper around oslo's execute() method."""
if 'run_as_root' in kwargs and 'root_helper' not in kwargs:
kwargs['root_helper'] = get_root_helper()
return processutils.execute(*cmd, **kwargs)
def check_ssh_injection(cmd_list):
ssh_injection_pattern = ['`', '$', '|', '||', ';', '&', '&&', '>', '>>',
'<']
# Check whether injection attacks exist
for arg in cmd_list:
arg = arg.strip()
# Check for matching quotes on the ends
is_quoted = re.match('^(?P<quote>[\'"])(?P<quoted>.*)(?P=quote)$', arg)
if is_quoted:
# Check for unescaped quotes within the quoted argument
quoted = is_quoted.group('quoted')
if quoted:
if (re.match('[\'"]', quoted) or
re.search('[^\\\\][\'"]', quoted)):
raise exception.SSHInjectionThreat(command=cmd_list)
else:
# We only allow spaces within quoted arguments, and that
# is the only special character allowed within quotes
if len(arg.split()) > 1:
raise exception.SSHInjectionThreat(command=cmd_list)
# Second, check whether danger character in command. So the shell
# special operator must be a single argument.
for c in ssh_injection_pattern:
if c not in arg:
continue
result = arg.find(c)
if not result == -1:
if result == 0 or not arg[result - 1] == '\\':
raise exception.SSHInjectionThreat(command=cmd_list)
def create_channel(client, width, height):
"""Invoke an interactive shell session on server."""
channel = client.invoke_shell()
channel.resize_pty(width, height)
return channel
def cinderdir():
import cinder
return os.path.abspath(cinder.__file__).split('cinder/__init__.py')[0]
def last_completed_audit_period(unit=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous.
"""
if not unit:
unit = CONF.volume_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
rightnow = timeutils.utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError('Time period must be hour, day, month or year')
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def list_of_dicts_to_dict(seq, key):
"""Convert list of dicts to a indexted dict.
Takes a list of dicts, and converts it a nested dict
indexed by <key>
:param seq: list of dicts
:parm key: key in dicts to index by
example:
lst = [{'id': 1, ...}, {'id': 2, ...}...]
key = 'id'
returns {1:{'id': 1, ...}, 2:{'id':2, ...}
"""
return {d[key]: dict(d, index=d[key]) for (i, d) in enumerate(seq)}
class ProtectedExpatParser(expatreader.ExpatParser):
"""An expat parser which disables DTD's and entities by default."""
def __init__(self, forbid_dtd=True, forbid_entities=True,
*args, **kwargs):
# Python 2.x old style class
expatreader.ExpatParser.__init__(self, *args, **kwargs)
self.forbid_dtd = forbid_dtd
self.forbid_entities = forbid_entities
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise ValueError("Inline DTD forbidden")
def entity_decl(self, entityName, is_parameter_entity, value, base,
systemId, publicId, notationName):
raise ValueError("<!ENTITY> forbidden")
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise ValueError("<!ENTITY> forbidden")
def reset(self):
expatreader.ExpatParser.reset(self)
if self.forbid_dtd:
self._parser.StartDoctypeDeclHandler = self.start_doctype_decl
if self.forbid_entities:
self._parser.EntityDeclHandler = self.entity_decl
self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
def safe_minidom_parse_string(xml_string):
"""Parse an XML string using minidom safely.
"""
try:
return minidom.parseString(xml_string, parser=ProtectedExpatParser())
except sax.SAXParseException:
raise expat.ExpatError()
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML."""
return saxutils.escape(value, {'"': '"', "'": '''})
def get_from_path(items, path):
"""Returns a list of items matching the specified path.
Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item
in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the
intermediate results are lists it will treat each list item individually.
A 'None' in items or any child expressions will be ignored, this function
will not throw because of None (anywhere) in items. The returned list
will contain no None values.
"""
if path is None:
raise exception.Error('Invalid mini_xpath')
(first_token, sep, remainder) = path.partition('/')
if first_token == '':
raise exception.Error('Invalid mini_xpath')
results = []
if items is None:
return results
if not isinstance(items, list):
# Wrap single objects in a list
items = [items]
for item in items:
if item is None:
continue
get_method = getattr(item, 'get', None)
if get_method is None:
continue
child = get_method(first_token)
if child is None:
continue
if isinstance(child, list):
# Flatten intermediate lists
for x in child:
results.append(x)
else:
results.append(child)
if not sep:
# No more tokens
return results
else:
return get_from_path(results, remainder)
def is_valid_boolstr(val):
"""Check if the provided string is a valid bool string or not."""
val = str(val).lower()
return (val == 'true' or val == 'false' or
val == 'yes' or val == 'no' or
val == 'y' or val == 'n' or
val == '1' or val == '0')
def is_none_string(val):
"""Check if a string represents a None value."""
if not isinstance(val, six.string_types):
return False
return val.lower() == 'none'
def monkey_patch():
"""Patches decorators for all functions in a specified module.
If the CONF.monkey_patch set as True,
this function patches a decorator
for all functions in specified modules.
You can set decorators for each modules
using CONF.monkey_patch_modules.
The format is "Module path:Decorator function".
Example: 'cinder.api.ec2.cloud:' \
cinder.openstack.common.notifier.api.notify_decorator'
Parameters of the decorator is as follows.
(See cinder.openstack.common.notifier.api.notify_decorator)
:param name: name of the function
:param function: object of the function
"""
# If CONF.monkey_patch is not True, this function do nothing.
if not CONF.monkey_patch:
return
# Get list of modules and decorators
for module_and_decorator in CONF.monkey_patch_modules:
module, decorator_name = module_and_decorator.split(':')
# import decorator function
decorator = importutils.import_class(decorator_name)
__import__(module)
# Retrieve module information using pyclbr
module_data = pyclbr.readmodule_ex(module)
for key in module_data.keys():
# set the decorator for the class methods
if isinstance(module_data[key], pyclbr.Class):
clz = importutils.import_class("%s.%s" % (module, key))
for method, func in inspect.getmembers(clz, inspect.ismethod):
setattr(
clz, method,
decorator("%s.%s.%s" % (module, key, method), func))
# set the decorator for the function
if isinstance(module_data[key], pyclbr.Function):
func = importutils.import_class("%s.%s" % (module, key))
setattr(sys.modules[module], key,
decorator("%s.%s" % (module, key), func))
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def sanitize_hostname(hostname):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
if six.PY3:
hostname = hostname.encode('latin-1', 'ignore')
hostname = hostname.decode('latin-1')
else:
if isinstance(hostname, six.text_type):
hostname = hostname.encode('latin-1', 'ignore')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
return hostname
def hash_file(file_like_object):
"""Generate a hash for the contents of a file."""
checksum = hashlib.sha1()
any(map(checksum.update, iter(lambda: file_like_object.read(32768), b'')))
return checksum.hexdigest()
def service_is_up(service):
"""Check whether a service is up based on last heartbeat."""
last_heartbeat = service['updated_at'] or service['created_at']
# Timestamps in DB are UTC.
elapsed = (timeutils.utcnow(with_timezone=True) -
last_heartbeat).total_seconds()
return abs(elapsed) <= CONF.service_down_time
def read_file_as_root(file_path):
"""Secure helper to read file as root."""
try:
out, _err = execute('cat', file_path, run_as_root=True)
return out
except processutils.ProcessExecutionError:
raise exception.FileNotFound(file_path=file_path)
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:params owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
execute('chown', orig_uid, path, run_as_root=True)
@contextlib.contextmanager
def tempdir(**kwargs):
tmpdir = tempfile.mkdtemp(**kwargs)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError as e:
LOG.debug('Could not remove tmpdir: %s',
six.text_type(e))
def walk_class_hierarchy(clazz, encountered=None):
"""Walk class hierarchy, yielding most derived classes first."""
if not encountered:
encountered = []
for subclass in clazz.__subclasses__():
if subclass not in encountered:
encountered.append(subclass)
# drill down to leaves first
for subsubclass in walk_class_hierarchy(subclass, encountered):
yield subsubclass
yield subclass
def get_root_helper():
return 'sudo cinder-rootwrap %s' % CONF.rootwrap_config
def brick_get_connector_properties(multipath=False, enforce_multipath=False):
"""Wrapper to automatically set root_helper in brick calls.
:param multipath: A boolean indicating whether the connector can
support multipath.
:param enforce_multipath: If True, it raises exception when multipath=True
is specified but multipathd is not running.
If False, it falls back to multipath=False
when multipathd is not running.
"""
root_helper = get_root_helper()
return connector.get_connector_properties(root_helper,
CONF.my_ip,
multipath,
enforce_multipath)
def brick_get_connector(protocol, driver=None,
execute=processutils.execute,
use_multipath=False,
device_scan_attempts=3,
*args, **kwargs):
"""Wrapper to get a brick connector object.
This automatically populates the required protocol as well
as the root_helper needed to execute commands.
"""
root_helper = get_root_helper()
return connector.InitiatorConnector.factory(protocol, root_helper,
driver=driver,
execute=execute,
use_multipath=use_multipath,
device_scan_attempts=
device_scan_attempts,
*args, **kwargs)
def require_driver_initialized(driver):
"""Verifies if `driver` is initialized
If the driver is not initialized, an exception will be raised.
:params driver: The driver instance.
:raises: `exception.DriverNotInitialized`
"""
# we can't do anything if the driver didn't init
if not driver.initialized:
driver_name = driver.__class__.__name__
LOG.error(_LE("Volume driver %s not initialized"), driver_name)
raise exception.DriverNotInitialized()
def get_file_mode(path):
"""This primarily exists to make unit testing easier."""
return stat.S_IMODE(os.stat(path).st_mode)
def get_file_gid(path):
"""This primarily exists to make unit testing easier."""
return os.stat(path).st_gid
def get_file_size(path):
"""Returns the file size."""
return os.stat(path).st_size
def _get_disk_of_partition(devpath, st=None):
"""Gets a disk device path and status from partition path.
Returns a disk device path from a partition device path, and stat for
the device. If devpath is not a partition, devpath is returned as it is.
For example, '/dev/sda' is returned for '/dev/sda1', and '/dev/disk1' is
for '/dev/disk1p1' ('p' is prepended to the partition number if the disk
name ends with numbers).
"""
diskpath = re.sub('(?:(?<=\d)p)?\d+$', '', devpath)
if diskpath != devpath:
try:
st_disk = os.stat(diskpath)
if stat.S_ISBLK(st_disk.st_mode):
return (diskpath, st_disk)
except OSError:
pass
# devpath is not a partition
if st is None:
st = os.stat(devpath)
return (devpath, st)
def get_bool_param(param_string, params):
param = params.get(param_string, False)
if not is_valid_boolstr(param):
msg = _('Value %(param)s for %(param_string)s is not a '
'boolean.') % {'param': param, 'param_string': param_string}
raise exception.InvalidParameterValue(err=msg)
return strutils.bool_from_string(param, strict=True)
def get_blkdev_major_minor(path, lookup_for_file=True):
"""Get 'major:minor' number of block device.
Get the device's 'major:minor' number of a block device to control
I/O ratelimit of the specified path.
If lookup_for_file is True and the path is a regular file, lookup a disk
device which the file lies on and returns the result for the device.
"""
st = os.stat(path)
if stat.S_ISBLK(st.st_mode):
path, st = _get_disk_of_partition(path, st)
return '%d:%d' % (os.major(st.st_rdev), os.minor(st.st_rdev))
elif stat.S_ISCHR(st.st_mode):
# No I/O ratelimit control is provided for character devices
return None
elif lookup_for_file:
# lookup the mounted disk which the file lies on
out, _err = execute('df', path)
devpath = out.split("\n")[1].split()[0]
if devpath[0] is not '/':
# the file is on a network file system
return None
return get_blkdev_major_minor(devpath, False)
else:
msg = _("Unable to get a block device for file \'%s\'") % path
raise exception.Error(msg)
def check_string_length(value, name, min_length=0, max_length=None):
"""Check the length of specified string.
:param value: the value of the string
:param name: the name of the string
:param min_length: the min_length of the string
:param max_length: the max_length of the string
"""
if not isinstance(value, six.string_types):
msg = _("%s is not a string or unicode") % name
raise exception.InvalidInput(message=msg)
if len(value) < min_length:
msg = _("%(name)s has a minimum character requirement of "
"%(min_length)s.") % {'name': name, 'min_length': min_length}
raise exception.InvalidInput(message=msg)
if max_length and len(value) > max_length:
msg = _("%(name)s has more than %(max_length)s "
"characters.") % {'name': name, 'max_length': max_length}
raise exception.InvalidInput(message=msg)
_visible_admin_metadata_keys = ['readonly', 'attached_mode']
def add_visible_admin_metadata(volume):
"""Add user-visible admin metadata to regular metadata.
Extracts the admin metadata keys that are to be made visible to
non-administrators, and adds them to the regular metadata structure for the
passed-in volume.
"""
visible_admin_meta = {}
if volume.get('volume_admin_metadata'):
if isinstance(volume['volume_admin_metadata'], dict):
volume_admin_metadata = volume['volume_admin_metadata']
for key in volume_admin_metadata:
if key in _visible_admin_metadata_keys:
visible_admin_meta[key] = volume_admin_metadata[key]
else:
for item in volume['volume_admin_metadata']:
if item['key'] in _visible_admin_metadata_keys:
visible_admin_meta[item['key']] = item['value']
# avoid circular ref when volume is a Volume instance
elif (volume.get('admin_metadata') and
isinstance(volume.get('admin_metadata'), dict)):
for key in _visible_admin_metadata_keys:
if key in volume['admin_metadata'].keys():
visible_admin_meta[key] = volume['admin_metadata'][key]
if not visible_admin_meta:
return
# NOTE(zhiyan): update visible administration metadata to
# volume metadata, administration metadata will rewrite existing key.
if volume.get('volume_metadata'):
orig_meta = list(volume.get('volume_metadata'))
for item in orig_meta:
if item['key'] in visible_admin_meta.keys():
item['value'] = visible_admin_meta.pop(item['key'])
for key, value in visible_admin_meta.items():
orig_meta.append({'key': key, 'value': value})
volume['volume_metadata'] = orig_meta
# avoid circular ref when vol is a Volume instance
elif (volume.get('metadata') and
isinstance(volume.get('metadata'), dict)):
volume['metadata'].update(visible_admin_meta)
else:
volume['metadata'] = visible_admin_meta
def remove_invalid_filter_options(context, filters,
allowed_search_options):
"""Remove search options that are not valid for non-admin API/context."""
if context.is_admin:
# Allow all options
return
# Otherwise, strip out all unknown options
unknown_options = [opt for opt in filters
if opt not in allowed_search_options]
bad_options = ", ".join(unknown_options)
LOG.debug("Removing options '%s' from query.", bad_options)
for opt in unknown_options:
del filters[opt]
def is_blk_device(dev):
try:
if stat.S_ISBLK(os.stat(dev).st_mode):
return True
return False
except Exception:
LOG.debug('Path %s not found in is_blk_device check', dev)
return False
def retry(exceptions, interval=1, retries=3, backoff_rate=2,
wait_random=False):
def _retry_on_exception(e):
return isinstance(e, exceptions)
def _backoff_sleep(previous_attempt_number, delay_since_first_attempt_ms):
exp = backoff_rate ** previous_attempt_number
wait_for = interval * exp
if wait_random:
random.seed()
wait_val = random.randrange(interval * 1000.0, wait_for * 1000.0)
else:
wait_val = wait_for * 1000.0
LOG.debug("Sleeping for %s seconds", (wait_val / 1000.0))
return wait_val
def _print_stop(previous_attempt_number, delay_since_first_attempt_ms):
delay_since_first_attempt = delay_since_first_attempt_ms / 1000.0
LOG.debug("Failed attempt %s", previous_attempt_number)
LOG.debug("Have been at this for %s seconds",
delay_since_first_attempt)
return previous_attempt_number == retries
if retries < 1:
raise ValueError('Retries must be greater than or '
'equal to 1 (received: %s). ' % retries)
def _decorator(f):
@six.wraps(f)
def _wrapper(*args, **kwargs):
r = retrying.Retrying(retry_on_exception=_retry_on_exception,
wait_func=_backoff_sleep,
stop_func=_print_stop)
return r.call(f, *args, **kwargs)
return _wrapper
return _decorator
def convert_version_to_int(version):
try:
if isinstance(version, six.string_types):
version = convert_version_to_tuple(version)
if isinstance(version, tuple):
return six.moves.reduce(lambda x, y: (x * 1000) + y, version)
except Exception:
msg = _("Version %s is invalid.") % version
raise exception.CinderException(msg)
def convert_version_to_str(version_int):
version_numbers = []
factor = 1000
while version_int != 0:
version_number = version_int - (version_int // factor * factor)
version_numbers.insert(0, six.text_type(version_number))
version_int = version_int // factor
return '.'.join(map(str, version_numbers))
def convert_version_to_tuple(version_str):
return tuple(int(part) for part in version_str.split('.'))
def convert_str(text):
"""Convert to native string.
Convert bytes and Unicode strings to native strings:
* convert to bytes on Python 2:
encode Unicode using encodeutils.safe_encode()
* convert to Unicode on Python 3: decode bytes from UTF-8
"""
if six.PY2:
return encodeutils.safe_encode(text)
else:
if isinstance(text, bytes):
return text.decode('utf-8')
else:
return text
def trace_method(f):
"""Decorates a function if TRACE_METHOD is true."""
@functools.wraps(f)
def trace_method_logging_wrapper(*args, **kwargs):
if TRACE_METHOD:
return trace(f)(*args, **kwargs)
return f(*args, **kwargs)
return trace_method_logging_wrapper
def trace_api(f):
"""Decorates a function if TRACE_API is true."""
@functools.wraps(f)
def trace_api_logging_wrapper(*args, **kwargs):
if TRACE_API:
return trace(f)(*args, **kwargs)
return f(*args, **kwargs)
return trace_api_logging_wrapper
def trace(f):
"""Trace calls to the decorated function.
This decorator should always be defined as the outermost decorator so it
is defined last. This is important so it does not interfere
with other decorators.
Using this decorator on a function will cause its execution to be logged at
`DEBUG` level with arguments, return values, and exceptions.
:returns a function decorator
"""
func_name = f.__name__
@functools.wraps(f)
def trace_logging_wrapper(*args, **kwargs):
if len(args) > 0:
maybe_self = args[0]
else:
maybe_self = kwargs.get('self', None)
if maybe_self and hasattr(maybe_self, '__module__'):
logger = logging.getLogger(maybe_self.__module__)
else:
logger = LOG
# NOTE(ameade): Don't bother going any further if DEBUG log level
# is not enabled for the logger.
if not logger.isEnabledFor(py_logging.DEBUG):
return f(*args, **kwargs)
all_args = inspect.getcallargs(f, *args, **kwargs)
logger.debug('==> %(func)s: call %(all_args)r',
{'func': func_name, 'all_args': all_args})
start_time = time.time() * 1000
try:
result = f(*args, **kwargs)
except Exception as exc:
total_time = int(round(time.time() * 1000)) - start_time
logger.debug('<== %(func)s: exception (%(time)dms) %(exc)r',
{'func': func_name,
'time': total_time,
'exc': exc})
raise
total_time = int(round(time.time() * 1000)) - start_time
logger.debug('<== %(func)s: return (%(time)dms) %(result)r',
{'func': func_name,
'time': total_time,
'result': result})
return result
return trace_logging_wrapper
class TraceWrapperMetaclass(type):
"""Metaclass that wraps all methods of a class with trace_method.
This metaclass will cause every function inside of the class to be
decorated with the trace_method decorator.
To use the metaclass you define a class like so:
@six.add_metaclass(utils.TraceWrapperMetaclass)
class MyClass(object):
"""
def __new__(meta, classname, bases, classDict):
newClassDict = {}
for attributeName, attribute in classDict.items():
if isinstance(attribute, types.FunctionType):
# replace it with a wrapped version
attribute = functools.update_wrapper(trace_method(attribute),
attribute)
newClassDict[attributeName] = attribute
return type.__new__(meta, classname, bases, newClassDict)
class TraceWrapperWithABCMetaclass(abc.ABCMeta, TraceWrapperMetaclass):
"""Metaclass that wraps all methods of a class with trace."""
pass
def setup_tracing(trace_flags):
"""Set global variables for each trace flag.
Sets variables TRACE_METHOD and TRACE_API, which represent
whether to log method and api traces.
:param trace_flags: a list of strings
"""
global TRACE_METHOD
global TRACE_API
try:
trace_flags = [flag.strip() for flag in trace_flags]
except TypeError: # Handle when trace_flags is None or a test mock
trace_flags = []
for invalid_flag in (set(trace_flags) - VALID_TRACE_FLAGS):
LOG.warning(_LW('Invalid trace flag: %s'), invalid_flag)
TRACE_METHOD = 'method' in trace_flags
TRACE_API = 'api' in trace_flags
def resolve_hostname(hostname):
"""Resolves host name to IP address.
Resolves a host name (my.data.point.com) to an IP address (10.12.143.11).
This routine also works if the data passed in hostname is already an IP.
In this case, the same IP address will be returned.
:param hostname: Host name to resolve.
:return: IP Address for Host name.
"""
result = socket.getaddrinfo(hostname, None)[0]
(family, socktype, proto, canonname, sockaddr) = result
LOG.debug('Asked to resolve hostname %(host)s and got IP %(ip)s.',
{'host': hostname, 'ip': sockaddr[0]})
return sockaddr[0]
|
import sys
import bottle
import commands
from bottle import route, send_file, template
@route('/')
def index():
bottle.TEMPLATES.clear() # For rapid development
return template("index", master_port = master_port)
@route('/framework/:id#[0-9-]*#')
def framework(id):
bottle.TEMPLATES.clear() # For rapid development
return template("framework", master_port = master_port, framework_id = id)
@route('/static/:filename#.*#')
def static(filename):
send_file(filename, root = './webui/static')
@route('/log/:level#[A-Z]*#')
def log_full(level):
send_file('mesos-master.' + level, root = log_dir,
guessmime = False, mimetype = 'text/plain')
@route('/log/:level#[A-Z]*#/:lines#[0-9]*#')
def log_tail(level, lines):
bottle.response.content_type = 'text/plain'
command = 'tail -%s %s/mesos-master.%s' % (lines, log_dir, level)
return commands.getoutput(command)
bottle.TEMPLATE_PATH.append('./webui/master/')
master_port = sys.argv[1]
webui_port = sys.argv[2]
log_dir = sys.argv[3]
bottle.debug(True)
bottle.run(host = '0.0.0.0', port = webui_port)
|
from requestbuilder import Arg
from requestbuilder.response import PaginatedResponse
from euca2ools.commands.iam import IAMRequest, arg_account_name
from euca2ools.commands.iam.getaccountpolicy import GetAccountPolicy
class ListAccountPolicies(IAMRequest):
DESCRIPTION = ('[Eucalyptus only] List one or all policies '
'policies attached to an account')
ARGS = [arg_account_name(help='''name or ID of the account owning
the policies to list (required)'''),
Arg('-p', '--policy-name', metavar='POLICY', route_to=None,
help='display a specific policy'),
Arg('-v', '--verbose', action='store_true', route_to=None,
help='''display the contents of the resulting policies (in
addition to their names)'''),
Arg('--pretty-print', action='store_true', route_to=None,
help='''when printing the contents of policies, reformat them
for easier reading''')]
LIST_TAGS = ['PolicyNames']
def main(self):
return PaginatedResponse(self, (None,), ('PolicyNames',))
def prepare_for_page(self, page):
# Pages are defined by markers
self.params['Marker'] = page
def get_next_page(self, response):
if response.get('IsTruncated') == 'true':
return response['Marker']
def print_result(self, result):
if self.args.get('policy_name'):
# Look for the specific policy the user asked for
for policy_name in result.get('PolicyNames', []):
if policy_name == self.args['policy_name']:
if self.args['verbose']:
self.print_policy(policy_name)
else:
print policy_name
break
else:
for policy_name in result.get('PolicyNames', []):
print policy_name
if self.args['verbose']:
self.print_policy(policy_name)
def print_policy(self, policy_name):
req = GetAccountPolicy(
service=self.service, AccountName=self.args['AccountName'],
PolicyName=policy_name, pretty_print=self.args['pretty_print'])
response = req.main()
req.print_result(response)
|
from typing import List
import numpy as np
from scipy import signal
from cerebralcortex.data_processor.signalprocessing.dataquality import Quality
from cerebralcortex.kernel.datatypes.datapoint import DataPoint
from cerebralcortex.kernel.datatypes.datastream import DataStream
def filter_bad_ecg(ecg: DataStream,
ecg_quality: DataStream) -> DataStream:
"""
This function combines the raw ecg and ecg data quality datastream and only keeps those datapoints that are assigned acceptable in data quality
:param ecg: raw ecg datastream
:param ecg_quality: ecg quality datastream
:return: filtered ecg datastream
"""
ecg_filtered = DataStream.from_datastream([ecg])
ecg_quality_array = ecg_quality.data
ecg_raw_timestamp_array = np.array([i.start_time.timestamp() for i in ecg.data])
ecg_filtered_array = []
initial_index = 0
for item in ecg_quality_array:
if item.sample == Quality.ACCEPTABLE:
final_index = initial_index
for i in range(initial_index, len(ecg.data)):
if item.start_time.timestamp() <= ecg_raw_timestamp_array[i] <= item.end_time.timestamp():
ecg_filtered_array.append(ecg.data[i])
final_index = i
initial_index = final_index
ecg_filtered.data = ecg_filtered_array
return ecg_filtered
def compute_rr_intervals(ecg: DataStream,
ecg_quality: DataStream,
fs: float) -> DataStream:
"""
filter ecg datastream first and compute rr-interval datastream from the ecg datastream
:param ecg:ecg datastream
:param ecg_quality : ecg quality annotated datastream
:param fs: sampling frequency
:return: rr-interval datastream
"""
ecg_filtered = filter_bad_ecg(ecg, ecg_quality)
# compute the r-peak array
ecg_rpeak = detect_rpeak(ecg_filtered, fs)
return ecg_rpeak
def rr_interval_update(rpeak_temp1: List[DataPoint],
rr_ave: float,
min_size: int = 8) -> float:
"""
:param min_size: 8 last R-peaks are checked to compute the running rr interval average
:param rpeak_temp1: R peak locations
:param rr_ave: previous rr-interval average
:return: the new rr-interval average of the previously detected 8 R peak locations
"""
peak_interval = np.diff([0] + rpeak_temp1) # TODO: rpeak_temp1 is a datapoint, what should this be converted to?
return rr_ave if len(peak_interval) < min_size else np.sum(peak_interval[-min_size:]) / min_size
def compute_moving_window_int(sample: np.ndarray,
fs: float,
blackman_win_length: int,
filter_length: int = 257,
delta: float = .02) -> np.ndarray:
"""
:param sample: ecg sample array
:param fs: sampling frequency
:param blackman_win_length: length of the blackman window on which to compute the moving window integration
:param filter_length: length of the FIR bandpass filter on which filtering is done on ecg sample array
:param delta: to compute the weights of each band in FIR filter
:return: the Moving window integration of the sample array
"""
# I believe these constants can be kept in a file
# filter edges
filter_edges = [0, 4.5 * 2 / fs, 5 * 2 / fs, 20 * 2 / fs, 20.5 * 2 / fs, 1]
# gains at filter band edges
gains = [0, 0, 1, 1, 0, 0]
# weights
weights = [500 / delta, 1 / delta, 500 / delta]
# length of the FIR filter
# FIR filter coefficients for bandpass filtering
filter_coeff = signal.firls(filter_length, filter_edges, gains, weights)
# bandpass filtered signal
bandpass_signal = signal.convolve(sample, filter_coeff, 'same')
bandpass_signal /= np.percentile(bandpass_signal, 90)
# derivative array
derivative_array = (np.array([-1.0, -2.0, 0, 2.0, 1.0])) * (1 / 8)
# derivative signal (differentiation of the bandpass)
derivative_signal = signal.convolve(bandpass_signal, derivative_array, 'same')
derivative_signal /= np.percentile(derivative_signal, 90)
# squared derivative signal
derivative_squared_signal = derivative_signal ** 2
derivative_squared_signal /= np.percentile(derivative_squared_signal, 90)
# blackman window
blackman_window = np.blackman(blackman_win_length)
# moving window Integration of squared derivative signal
mov_win_int_signal = signal.convolve(derivative_squared_signal, blackman_window, 'same')
mov_win_int_signal /= np.percentile(mov_win_int_signal, 90)
return mov_win_int_signal
def check_peak(data: List[DataPoint]) -> bool:
"""
This is a function to check the condition of a simple peak of signal y in index i
:param data:
:return:
"""
if len(data) < 3:
return False
midpoint = int(len(data) / 2)
test_value = data[0]
for i in data[1:midpoint + 1]:
if test_value < i:
test_value = i
else:
return False
for i in data[midpoint + 1:]:
if test_value > i:
test_value = i
else:
return False
return True
def compute_r_peaks(threshold_1: float,
rr_ave: float,
mov_win_int_signal: np.ndarray,
peak_tuple_array: List[tuple]) -> list:
"""
This function does the adaptive thresholding of the signal to get the R-peak locations
:param threshold_1: Thr1 is the threshold above which the R peak
:param rr_ave: running RR-interval average
:param mov_win_int_signal: signal sample array
:param peak_tuple_array: A tuple array containing location and values of the simple peaks detected in the process before
:returns rpeak_array_indices: The location of the R peaks in the signal sample array once found this is returned
"""
peak_location_in_signal_array = [i[0] for i in peak_tuple_array] # location of the simple peaks in signal array
amplitude_in_peak_locations = [i[1] for i in peak_tuple_array] # simple peak's amplitude in signal array
threshold_2 = 0.5 * threshold_1 # any signal value between threshold_2 and threshold_1 is a noise peak
sig_lev = 4 * threshold_1 # current signal level -any signal above thrice the signal level is discarded as a spurious value
noise_lev = 0.1 * sig_lev # current noise level of the signal
ind_rpeak = 0
rpeak_array_indices = []
rpeak_inds_in_peak_array = []
while ind_rpeak < len(peak_location_in_signal_array):
# if for 166 percent of the present RR interval no peak is detected as R peak then threshold_2 is taken as the
# R peak threshold and the maximum of the range is taken as a R peak and RR interval is updated accordingly
if len(rpeak_array_indices) >= 1 and peak_location_in_signal_array[ind_rpeak] - peak_location_in_signal_array[
rpeak_inds_in_peak_array[-1]] > 1.66 * rr_ave and ind_rpeak - rpeak_inds_in_peak_array[-1] > 1:
# values and indexes of previous peaks discarded as not an R peak whose magnitude is above threshold_2
searchback_array = [(k - rpeak_inds_in_peak_array[-1], amplitude_in_peak_locations[k]) for k in
range(rpeak_inds_in_peak_array[-1] + 1, ind_rpeak) if
3 * sig_lev > amplitude_in_peak_locations[k] > threshold_2]
if len(searchback_array) > 0:
# maximum inside the range calculated beforehand is taken as R peak
searchback_array_inrange_values = [x[1] for x in searchback_array]
searchback_max_index = np.argmax(searchback_array_inrange_values)
rpeak_array_indices.append(peak_location_in_signal_array[
rpeak_inds_in_peak_array[-1] + searchback_array[searchback_max_index][
0]])
rpeak_inds_in_peak_array.append(
rpeak_inds_in_peak_array[-1] + searchback_array[searchback_max_index][0])
sig_lev = ewma(sig_lev, mov_win_int_signal[peak_location_in_signal_array[ind_rpeak]],
.125) # update the current signal level
threshold_1 = noise_lev + 0.25 * (sig_lev - noise_lev)
threshold_2 = 0.5 * threshold_1
rr_ave = rr_interval_update(rpeak_array_indices, rr_ave)
ind_rpeak = rpeak_inds_in_peak_array[-1] + 1
else:
threshold_1 = noise_lev + 0.25 * (sig_lev - noise_lev)
threshold_2 = 0.5 * threshold_1
ind_rpeak += 1
else:
# R peak checking
if threshold_1 <= mov_win_int_signal[peak_location_in_signal_array[ind_rpeak]] < 3 * sig_lev:
rpeak_array_indices.append(peak_location_in_signal_array[ind_rpeak])
rpeak_inds_in_peak_array.append(ind_rpeak)
sig_lev = ewma(sig_lev, mov_win_int_signal[peak_location_in_signal_array[ind_rpeak]],
.125) # update the signal level
# noise peak checking
elif threshold_1 > mov_win_int_signal[peak_location_in_signal_array[ind_rpeak]] > threshold_2:
noise_lev = ewma(noise_lev, mov_win_int_signal[peak_location_in_signal_array[ind_rpeak]],
.125) # update the noise level
threshold_1 = noise_lev + 0.25 * (sig_lev - noise_lev)
threshold_2 = 0.5 * threshold_1
ind_rpeak += 1
rr_ave = rr_interval_update(rpeak_array_indices, rr_ave)
return rpeak_array_indices
def ewma(value: float, new_value: float, alpha: float) -> float:
"""
:param value:
:param new_value:
:param alpha:
:return:
"""
return alpha * new_value + (1 - alpha) * value
def remove_close_peaks(rpeak_temp1: list,
sample: np.ndarray,
fs: float,
min_range: float = .5) -> list:
"""
This function removes one of two peaks from two consecutive R peaks
if difference among them is less than the minimum possible
:param min_range:
:param rpeak_temp1: R peak array containing the index of the R peaks
:param sample: sample array
:param fs: sampling frequency
:return: R peak array with no close R peaks
"""
difference = 0
rpeak_temp2 = rpeak_temp1
while difference != 1:
length_rpeak_temp2 = len(rpeak_temp2)
temp = np.diff(rpeak_temp2)
comp_index1 = [rpeak_temp2[i] for i in range(len(temp)) if temp[i] < min_range * fs]
comp_index2 = [rpeak_temp2[i + 1] for i in range(len(temp)) if temp[i] < min_range * fs]
comp1 = sample[comp_index1]
comp2 = sample[comp_index2]
checkmin = np.matrix([comp1, comp2])
temp_ind1 = [i for i in range(len(temp)) if temp[i] < min_range * fs]
temp_ind2 = np.argmin(np.array(checkmin), axis=0)
temp_ind = temp_ind1 + temp_ind2
temp_ind = np.unique(temp_ind)
count = 0
for i in temp_ind:
rpeak_temp2.remove(rpeak_temp2[i - count])
count = count + 1
difference = length_rpeak_temp2 - len(rpeak_temp2) + 1
return rpeak_temp2
def confirm_peaks(rpeak_temp1: list,
sample: np.ndarray,
fs: float,
range_for_checking: float = 1 / 10) -> np.ndarray:
"""
This function does the final check on the R peaks detected and
finds the maximum in a range of fs/10 of the detected peak location and assigns it to be the peak
:param rpeak_temp1: R peak array containing the index of the R peaks
:param sample: sample array
:param fs: sampling frequency
:param range_for_checking : The peaks are checked within a range of fs/10 to get the maximum value within that range
:return: final R peak array
"""
for i in range(1, len(rpeak_temp1) - 1):
start_index = int(rpeak_temp1[i] - np.ceil(range_for_checking * fs))
end_index = int(rpeak_temp1[i] + np.ceil(range_for_checking * fs) + 1)
index = np.argmax(sample[start_index:end_index])
rpeak_temp1[i] = rpeak_temp1[i] - np.ceil(range_for_checking * fs) + index
return np.array(rpeak_temp1).astype(np.int64)
def detect_rpeak(ecg: DataStream,
fs: float = 64,
threshold: float = 0.5,
blackman_win_len_range: float = 0.2) -> DataStream:
"""
This program implements the Pan Tomkins algorithm on ECG signal to detect the R peaks
Since the ecg array can have discontinuity in the timestamp arrays the rr-interval calculated
in the algorithm is calculated in terms of the index in the sample array
The algorithm consists of some major steps
1. computation of the moving window integration of the signal in terms of blackman window of a prescribed length
2. compute all the peaks of the moving window integration signal
3. adaptive thresholding with dynamic signal and noise thresholds applied to filter out the R peak locations
4. confirm the R peaks through differentiation from the nearby peaks and remove the false peaks
:param ecg: ecg array of tuples (timestamp,value)
:param fs: sampling frequency
:param threshold: initial threshold to detect the R peak in a signal normalized by the 90th percentile. .5 is default.
:param blackman_win_len_range : the range to calculate blackman window length
:return: R peak array of tuples (timestamp, Rpeak interval)
"""
data = ecg.data
result = DataStream.from_datastream([ecg])
if len(data) == 0:
result.data = []
return result
sample = np.array([i.sample for i in data])
timestamp = np.array([i.start_time for i in data])
# computes the moving window integration of the signal
blackman_win_len = np.ceil(fs * blackman_win_len_range)
y = compute_moving_window_int(sample, fs, blackman_win_len)
peak_location_values = [(i, y[i]) for i in range(2, len(y) - 1) if check_peak(y[i - 2:i + 3])]
# initial RR interval average
peak_location = [i[0] for i in peak_location_values]
running_rr_avg = sum(np.diff(peak_location)) / (len(peak_location) - 1)
rpeak_temp1 = compute_r_peaks(threshold, running_rr_avg, y, peak_location_values)
rpeak_temp2 = remove_close_peaks(rpeak_temp1, sample, fs)
index = confirm_peaks(rpeak_temp2, sample, fs)
rpeak_timestamp = timestamp[index]
rpeak_value = np.diff(rpeak_timestamp)
rpeak_timestamp = rpeak_timestamp[1:]
result_data = []
for k in range(len(rpeak_value)):
result_data.append(
DataPoint.from_tuple(rpeak_timestamp[k], rpeak_value[k].seconds + rpeak_value[k].microseconds / 1e6))
# Create resulting datastream to be returned
result.data = result_data
return result
|
from .apitask import APITask
from thing.models import RefType
class RefTypes(APITask):
name = 'thing.ref_types'
def run(self, url, taskstate_id, apikey_id, zero):
if self.init(taskstate_id, apikey_id) is False:
return
# Fetch the API data
if self.fetch_api(url, {}, use_auth=False) is False or self.root is None:
return
# Build a refTypeID:row dictionary
bulk_data = {}
for row in self.root.findall('result/rowset/row'):
bulk_data[int(row.attrib['refTypeID'])] = row
# Bulk retrieve all of those stations that exist
rt_map = RefType.objects.in_bulk(bulk_data.keys())
new = []
for refTypeID, row in bulk_data.items():
reftype = rt_map.get(refTypeID)
# RefType does not exist, make a new one
if reftype is None:
new.append(RefType(
id=refTypeID,
name=row.attrib['refTypeName'],
))
# RefType exists and name has changed, update it
elif reftype.name != row.attrib['refTypeName']:
reftype.name = row.attrib['refTypeName']
reftype.save()
# Create any new stations
if new:
RefType.objects.bulk_create(new)
return True
|
from django.conf import settings as dsettings
from django.contrib.auth import models as authModels
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.http import HttpResponse, Http404
from django.shortcuts import render, render_to_response, get_object_or_404
from django.template import RequestContext
from django.template.defaultfilters import slugify
from microblog import models, settings
from taggit.models import Tag, TaggedItem
from decorator import decorator
try:
import json
except ImportError:
import simplejson as json
def render_json(f):
"""
decoratore da applicare ad una vista per serializzare in json il risultato.
"""
if dsettings.DEBUG:
ct = 'text/plain'
j = lambda d: json.dumps(d, indent=2)
else:
ct = 'application/json'
j = json.dumps
def wrapper(func, *args, **kw):
try:
result = func(*args, **kw)
except Exception, e:
result = j(str(e))
status = 500
else:
if isinstance(result, HttpResponse):
return result
else:
result = j(result)
status = 200
return HttpResponse(content=result, content_type=ct, status=status)
return decorator(wrapper, f)
def post_list(request):
return render(request, 'microblog/post_list.html', {})
def category(request, category):
category = get_object_or_404(models.Category, name=category)
return render_to_response(
'microblog/category.html',
{
'category': category,
},
context_instance=RequestContext(request)
)
def post_list_by_year(request, year, month=None):
return render_to_response(
'microblog/list_by_year.html',
{
'year': year,
'month': month,
},
context_instance=RequestContext(request)
)
def tag(request, tag):
tag = get_object_or_404(Tag, name=tag)
return render_to_response(
'microblog/tag.html',
{
'tag': tag,
},
context_instance=RequestContext(request)
)
def author(request, author):
user = [
u for u in authModels.User.objects.all()
if slugify('%s-%s' % (u.first_name, u.last_name)) == author
]
if not user:
raise Http404()
else:
user = user[0]
return render_to_response(
'microblog/author.html',
{
'author': user,
},
context_instance=RequestContext(request)
)
def _paginate_posts(post_list, request):
if settings.MICROBLOG_POST_LIST_PAGINATION:
paginator = Paginator(post_list, settings.MICROBLOG_POST_PER_PAGE)
try:
page = int(request.GET.get("page", "1"))
except ValueError:
page = 1
try:
posts = paginator.page(page)
except (EmptyPage, InvalidPage):
posts = paginator.page(1)
else:
paginator = Paginator(post_list, len(post_list) or 1)
posts = paginator.page(1)
return posts
def _posts_list(request, featured=False):
if settings.MICROBLOG_LANGUAGE_FALLBACK_ON_POST_LIST:
lang = None
else:
lang = request.LANGUAGE_CODE
return models.Post.objects\
.byLanguage(lang)\
.byFeatured(featured)\
.published()
def _post_detail(request, content):
if not settings.MICROBLOG_POST_FILTER([content.post], request.user):
raise Http404()
return render_to_response(
'microblog/post_detail.html',
{
'post': content.post,
'content': content
},
context_instance=RequestContext(request)
)
def _trackback_ping(request, content):
def success():
x = ('<?xml version="1.0" encoding="utf-8"?>\n'
'<response><error>0</error></response>')
return HttpResponse(content=x, content_type='text/xml')
def failure(message=''):
x = ('<?xml version="1.0" encoding="utf-8"?>\n'
'<response><error>1</error><message>%s</message></response>') % message
return HttpResponse(content=x, content_type='text/xml', status=400)
if request.method != 'POST':
return failure('only POST method is supported')
if not request.POST.get('url'):
return failure('url argument is mandatory')
t = {
'url': request.POST['url'],
'blog_name': request.POST.get('blog_name', ''),
'title': request.POST.get('title', ''),
'excerpt': request.POST.get('excerpt', ''),
}
from microblog.moderation import moderate
if not moderate(request, 'trackback', t['title'], url=t['url']):
return failure('moderated')
content.new_trackback(**t)
return success()
@render_json
def _comment_count(request, content):
post = content.post
if settings.MICROBLOG_COMMENT == 'comment':
import django_comments as comments
from django.contrib.contenttypes.models import ContentType
model = comments.get_model()
q = model.objects.filter(
content_type=ContentType.objects.get_for_model(post),
object_pk=post.id,
is_public=True
)
return q.count()
else:
import httplib2
from urllib import quote
h = httplib2.Http()
params = {
'forum_api_key': settings.MICROBLOG_COMMENT_DISQUS_FORUM_KEY,
'url': content.get_url(),
}
args = '&'.join('%s=%s' % (k, quote(v)) for k, v in params.items())
url = settings.MICROBLOG_COMMENT_DISQUS_API_URL + 'get_thread_by_url?%s' % args
resp, page = h.request(url)
if resp.status != 200:
return -1
page = json.loads(page)
if not page['succeeded']:
return -1
elif page['message'] is None:
return 0
else:
return page['message']['num_comments']
def _post404(f):
def wrapper(*args, **kw):
try:
return f(*args, **kw)
except models.PostContent.DoesNotExist:
raise Http404()
return wrapper
if settings.MICROBLOG_URL_STYLE == 'date':
def _get(slug, year, month, day):
return models.PostContent.objects\
.select_related('post')\
.getBySlugAndDate(slug, year, month, day)
@_post404
def post_detail(request, year, month, day, slug):
return _post_detail(
request,
content=_get(slug, year, month, day)
)
@_post404
def trackback_ping(request, year, month, day, slug):
return _trackback_ping(
request,
content=_get(slug, year, month, day)
)
@_post404
def comment_count(request, year, month, day, slug):
return _comment_count(
request,
content = _get(slug, year, month, day)
)
elif settings.MICROBLOG_URL_STYLE == 'category':
def _get(slug, category):
return models.PostContent.objects\
.select_related('post')\
.getBySlugAndCategory(slug, category)
@_post404
def post_detail(request, category, slug):
return _post_detail(
request,
content=_get(slug, category),
)
@_post404
def trackback_ping(request, category, slug):
return _trackback_ping(
request,
content=_get(slug, category),
)
@_post404
def comment_count(request, category, slug):
return _comment_count(
request,
content=_get(slug, category),
)
|
import sys
from collections import namedtuple
import poppler
import cairo
from os.path import abspath
Point = namedtuple('Point', ['x', 'y'])
Line = namedtuple('Line', ['start', 'end'])
Polygon = namedtuple('Polygon', 'points')
Rectangle = namedtuple('Rectangle', ['top_left', 'bottom_right'])
AnnotationGroup = namedtuple('AnnotationGroup', ['name', 'color', 'shapes'])
Color = namedtuple('Color', ['red', 'green', 'blue'])
__all__ = [
'render_page',
'make_annotations',
]
def draw_line(context, line):
context.move_to(line.start.x, line.start.y)
context.line_to(line.end.x, line.end.y)
context.stroke()
def draw_polygon(context, polygon):
if len(polygon.points) == 0:
return
first_point = polygon.points[0]
context.move_to(first_point.x, first_point.y)
for line in polygon.points[1:]:
context.line_to(line.x, line.y)
context.stroke()
def draw_rectangle(context, rectangle):
width = abs(rectangle.bottom_right.x - rectangle.top_left.x)
height = abs(rectangle.bottom_right.y - rectangle.top_left.y)
context.rectangle(rectangle.top_left.x,
rectangle.top_left.y,
width,
height)
context.stroke()
RENDERERS = {}
RENDERERS[Line] = draw_line
RENDERERS[Rectangle] = draw_rectangle
RENDERERS[Polygon] = draw_polygon
class CairoPdfPageRenderer(object):
def __init__(self, pdf_page, svg_filename, png_filename):
self._svg_filename = abspath(svg_filename)
self._png_filename = abspath(png_filename) if png_filename else None
self._context, self._surface = self._get_context(
svg_filename, *pdf_page.get_size())
white = poppler.Color()
white.red = white.green = white.blue = 65535
black = poppler.Color()
black.red = black.green = black.blue = 0
# red = poppler.Color()
# red.red = red.green = red.blue = 0
# red.red = 65535
width = pdf_page.get_size()[0]
# We render everything 3 times, moving
# one page-width to the right each time.
self._offset_colors = [
(0, white, white, True),
(width, black, white, True),
(2 * width, black, black, False)
]
for offset, fg_color, bg_color, render_graphics in self._offset_colors:
# Render into context, with a different offset
# each time.
self._context.save()
self._context.translate(offset, 0)
sel = poppler.Rectangle()
sel.x1, sel.y1 = (0, 0)
sel.x2, sel.y2 = pdf_page.get_size()
if render_graphics:
pdf_page.render(self._context)
pdf_page.render_selection(
self._context, sel, sel, poppler.SELECTION_GLYPH,
fg_color, bg_color)
self._context.restore()
@staticmethod
def _get_context(filename, width, height):
SCALE = 1
# left, middle, right
N_RENDERINGS = 3
surface = cairo.SVGSurface(
filename, N_RENDERINGS * width * SCALE, height * SCALE)
# srf = cairo.ImageSurface(
# cairo.FORMAT_RGB24, int(w*SCALE), int(h*SCALE))
context = cairo.Context(surface)
context.scale(SCALE, SCALE)
# Set background color to white
context.set_source_rgb(1, 1, 1)
context.paint()
return context, surface
def draw(self, shape, color):
self._context.save()
self._context.set_line_width(1)
self._context.set_source_rgba(color.red,
color.green,
color.blue,
0.5)
self._context.translate(self._offset_colors[1][0], 0)
RENDERERS[type(shape)](self._context, shape)
self._context.restore()
def flush(self):
if self._png_filename is not None:
self._surface.write_to_png(self._png_filename)
# NOTE! The flush is rather expensive, since it writes out the svg
# data. The profile will show a large amount of time spent inside it.
# Removing it won't help the execution time at all, it will just move
# it somewhere that the profiler can't see it
# (at garbage collection time)
self._surface.flush()
self._surface.finish()
def render_page(pdf_filename, page_number, annotations, svg_file=None,
png_file=None):
"""
Render a single page of a pdf with graphical annotations added.
"""
page = extract_pdf_page(pdf_filename, page_number)
renderer = CairoPdfPageRenderer(page, svg_file, png_file)
for annotation in annotations:
assert isinstance(annotation, AnnotationGroup), (
"annotations: {0}, annotation: {1}".format(
annotations, annotation))
for shape in annotation.shapes:
renderer.draw(shape, annotation.color)
renderer.flush()
def extract_pdf_page(filename, page_number):
file_uri = "file://{0}".format(abspath(filename))
doc = poppler.document_new_from_file(file_uri, "")
page = doc.get_page(page_number)
return page
def make_annotations(table_container):
"""
Take the output of the table-finding algorithm (TableFinder) and create
AnnotationGroups. These can be drawn on top of the original PDF page to
visualise how the algorithm arrived at its output.
"""
annotations = []
annotations.append(
AnnotationGroup(
name='all_glyphs',
color=Color(0, 1, 0),
shapes=convert_rectangles(table_container.all_glyphs)))
annotations.append(
AnnotationGroup(
name='all_words',
color=Color(0, 0, 1),
shapes=convert_rectangles(table_container.all_words)))
annotations.append(
AnnotationGroup(
name='text_barycenters',
color=Color(0, 0, 1),
shapes=convert_barycenters(table_container.all_glyphs)))
annotations.append(
AnnotationGroup(
name='hat_graph_vertical',
color=Color(0, 1, 0),
shapes=make_hat_graph(
table_container._y_point_values,
table_container._center_lines,
direction="vertical")))
for table in table_container:
annotations.append(
AnnotationGroup(
name='row_edges',
color=Color(1, 0, 0),
shapes=convert_horizontal_lines(
table.row_edges, table.bounding_box)))
annotations.append(
AnnotationGroup(
name='column_edges',
color=Color(1, 0, 0),
shapes=convert_vertical_lines(
table.column_edges, table.bounding_box)))
annotations.append(
AnnotationGroup(
name='glyph_histogram_horizontal',
color=Color(1, 0, 0),
shapes=make_glyph_histogram(
table._x_glyph_histogram, table.bounding_box,
direction="horizontal")))
annotations.append(
AnnotationGroup(
name='glyph_histogram_vertical',
color=Color(1, 0, 0),
shapes=make_glyph_histogram(
table._y_glyph_histogram, table.bounding_box,
direction="vertical")))
annotations.append(
AnnotationGroup(
name='horizontal_glyph_above_threshold',
color=Color(0, 0, 0),
shapes=make_thresholds(
table._x_threshold_segs, table.bounding_box,
direction="horizontal")))
annotations.append(
AnnotationGroup(
name='vertical_glyph_above_threshold',
color=Color(0, 0, 0),
shapes=make_thresholds(
table._y_threshold_segs, table.bounding_box,
direction="vertical")))
# Draw bounding boxes last so that they appear on top
annotations.append(
AnnotationGroup(
name='table_bounding_boxes',
color=Color(0, 0, 1),
shapes=convert_rectangles(table_container.bounding_boxes)))
return annotations
def make_thresholds(segments, box, direction):
lines = []
for segment in segments:
if direction == "horizontal":
lines.append(Line(Point(segment.start, box.bottom + 10),
Point(segment.end, box.bottom + 10)))
else:
lines.append(Line(Point(10, segment.start),
Point(10, segment.end)))
return lines
def make_hat_graph(hats, center_lines, direction):
"""
Draw estimated text barycenter
"""
max_value = max(v for _, v in hats)
DISPLAY_WIDTH = 25
points = []
polygon = Polygon(points)
def point(x, y):
points.append(Point(x, y))
for position, value in hats:
point(((value / max_value - 1) * DISPLAY_WIDTH), position)
lines = []
for position in center_lines:
lines.append(Line(Point(-DISPLAY_WIDTH, position),
Point(0, position)))
return [polygon] + lines
def make_glyph_histogram(histogram, box, direction):
# if direction == "vertical":
# return []
bin_edges, bin_values = histogram
if not bin_edges:
# There are no glyphs, and nothing to render!
return []
points = []
polygon = Polygon(points)
def point(x, y):
points.append(Point(x, y))
# def line(*args):
# lines.append(Line(*args))
previous_value = 0 if direction == "horizontal" else box.bottom
x = zip(bin_edges, bin_values)
for edge, value in x:
if direction == "horizontal":
value *= 0.75
value = box.bottom - value
point(edge, previous_value)
point(edge, value)
else:
value *= 0.25
value += 7 # shift pixels to the right
point(previous_value, edge)
point(value, edge)
previous_value = value
# Final point is at 0
if direction == "horizontal":
point(edge, 0)
else:
point(box.bottom, edge)
# Draw edge density plot (not terribly interesting, should probably be
# deleted)
# lines = []
# if direction == "horizontal":
# for edge in bin_edges:
# lines.append(Line(Point(edge, box.bottom),
# Point(edge, box.bottom + 5)))
# else:
# for edge in bin_edges:
# lines.append(Line(Point(0, edge), Point(5, edge)))
return [polygon] # + lines
def convert_rectangles(boxes):
return [Rectangle(Point(b.left, b.top), Point(b.right, b.bottom))
for b in boxes]
def convert_barycenters(boxes):
return [Line(Point(b.left, b.barycenter.midpoint),
Point(b.right, b.barycenter.midpoint))
for b in boxes if b.barycenter is not None]
def convert_horizontal_lines(y_edges, bbox):
return [Line(Point(bbox.left, y), Point(bbox.right, y))
for y in y_edges]
def convert_vertical_lines(x_edges, bbox):
return [Line(Point(x, bbox.top), Point(x, bbox.bottom))
for x in x_edges]
if __name__ == '__main__':
annotations = [
AnnotationGroup(
name='',
color=Color(1, 0, 0),
shapes=[Rectangle(Point(100, 100), Point(200, 200))])
]
render_page(sys.argv[1], 0, annotations)
|
"""
Maximum likelihood covariance estimator.
"""
import warnings
import numpy as np
from scipy import linalg
from ..base import BaseEstimator
from ..utils import check_array
from ..utils.extmath import fast_logdet
from ..metrics.pairwise import pairwise_distances
def log_likelihood(emp_cov, precision):
"""Computes the sample mean of the log_likelihood under a covariance model
computes the empirical expected log-likelihood (accounting for the
normalization terms and scaling), allowing for universal comparison (beyond
this software package)
Parameters
----------
emp_cov : 2D ndarray (n_features, n_features)
Maximum Likelihood Estimator of covariance
precision : 2D ndarray (n_features, n_features)
The precision matrix of the covariance model to be tested
Returns
-------
sample mean of the log-likelihood
"""
p = precision.shape[0]
log_likelihood_ = - np.sum(emp_cov * precision) + fast_logdet(precision)
log_likelihood_ -= p * np.log(2 * np.pi)
log_likelihood_ /= 2.
return log_likelihood_
def empirical_covariance(X, assume_centered=False):
"""Computes the Maximum likelihood covariance estimator
Parameters
----------
X : ndarray, shape (n_samples, n_features)
Data from which to compute the covariance estimate
assume_centered : boolean
If True, data will not be centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False, data will be centered before computation.
Returns
-------
covariance : 2D ndarray, shape (n_features, n_features)
Empirical covariance (Maximum Likelihood Estimator).
"""
X = np.asarray(X)
if X.ndim == 1:
X = np.reshape(X, (1, -1))
if X.shape[0] == 1:
warnings.warn("Only one sample available. "
"You may want to reshape your data array")
if assume_centered:
covariance = np.dot(X.T, X) / X.shape[0]
else:
covariance = np.cov(X.T, bias=1)
if covariance.ndim == 0:
covariance = np.array([[covariance]])
return covariance
class EmpiricalCovariance(BaseEstimator):
"""Maximum likelihood covariance estimator
Read more in the :ref:`User Guide <covariance>`.
Parameters
----------
store_precision : bool
Specifies if the estimated precision is stored.
assume_centered : bool
If True, data are not centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False (default), data are centered before computation.
Attributes
----------
location_ : array-like, shape (n_features,)
Estimated location, i.e. the estimated mean.
covariance_ : 2D ndarray, shape (n_features, n_features)
Estimated covariance matrix
precision_ : 2D ndarray, shape (n_features, n_features)
Estimated pseudo-inverse matrix.
(stored only if store_precision is True)
Examples
--------
>>> import numpy as np
>>> from sklearn.covariance import EmpiricalCovariance
>>> from sklearn.datasets import make_gaussian_quantiles
>>> real_cov = np.array([[.8, .3],
... [.3, .4]])
>>> rng = np.random.RandomState(0)
>>> X = rng.multivariate_normal(mean=[0, 0],
... cov=real_cov,
... size=500)
>>> cov = EmpiricalCovariance().fit(X)
>>> cov.covariance_
array([[0.7569..., 0.2818...],
[0.2818..., 0.3928...]])
>>> cov.location_
array([0.0622..., 0.0193...])
"""
def __init__(self, store_precision=True, assume_centered=False):
self.store_precision = store_precision
self.assume_centered = assume_centered
def _set_covariance(self, covariance):
"""Saves the covariance and precision estimates
Storage is done accordingly to `self.store_precision`.
Precision stored only if invertible.
Parameters
----------
covariance : 2D ndarray, shape (n_features, n_features)
Estimated covariance matrix to be stored, and from which precision
is computed.
"""
covariance = check_array(covariance)
# set covariance
self.covariance_ = covariance
# set precision
if self.store_precision:
self.precision_ = linalg.pinvh(covariance)
else:
self.precision_ = None
def get_precision(self):
"""Getter for the precision matrix.
Returns
-------
precision_ : array-like
The precision matrix associated to the current covariance object.
"""
if self.store_precision:
precision = self.precision_
else:
precision = linalg.pinvh(self.covariance_)
return precision
def fit(self, X, y=None):
"""Fits the Maximum Likelihood Estimator covariance model
according to the given training data and parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training data, where n_samples is the number of samples and
n_features is the number of features.
y
not used, present for API consistence purpose.
Returns
-------
self : object
"""
X = check_array(X)
if self.assume_centered:
self.location_ = np.zeros(X.shape[1])
else:
self.location_ = X.mean(0)
covariance = empirical_covariance(
X, assume_centered=self.assume_centered)
self._set_covariance(covariance)
return self
def score(self, X_test, y=None):
"""Computes the log-likelihood of a Gaussian data set with
`self.covariance_` as an estimator of its covariance matrix.
Parameters
----------
X_test : array-like, shape = [n_samples, n_features]
Test data of which we compute the likelihood, where n_samples is
the number of samples and n_features is the number of features.
X_test is assumed to be drawn from the same distribution than
the data used in fit (including centering).
y
not used, present for API consistence purpose.
Returns
-------
res : float
The likelihood of the data set with `self.covariance_` as an
estimator of its covariance matrix.
"""
# compute empirical covariance of the test set
test_cov = empirical_covariance(
X_test - self.location_, assume_centered=True)
# compute log likelihood
res = log_likelihood(test_cov, self.get_precision())
return res
def error_norm(self, comp_cov, norm='frobenius', scaling=True,
squared=True):
"""Computes the Mean Squared Error between two covariance estimators.
(In the sense of the Frobenius norm).
Parameters
----------
comp_cov : array-like, shape = [n_features, n_features]
The covariance to compare with.
norm : str
The type of norm used to compute the error. Available error types:
- 'frobenius' (default): sqrt(tr(A^t.A))
- 'spectral': sqrt(max(eigenvalues(A^t.A))
where A is the error ``(comp_cov - self.covariance_)``.
scaling : bool
If True (default), the squared error norm is divided by n_features.
If False, the squared error norm is not rescaled.
squared : bool
Whether to compute the squared error norm or the error norm.
If True (default), the squared error norm is returned.
If False, the error norm is returned.
Returns
-------
The Mean Squared Error (in the sense of the Frobenius norm) between
`self` and `comp_cov` covariance estimators.
"""
# compute the error
error = comp_cov - self.covariance_
# compute the error norm
if norm == "frobenius":
squared_norm = np.sum(error ** 2)
elif norm == "spectral":
squared_norm = np.amax(linalg.svdvals(np.dot(error.T, error)))
else:
raise NotImplementedError(
"Only spectral and frobenius norms are implemented")
# optionally scale the error norm
if scaling:
squared_norm = squared_norm / error.shape[0]
# finally get either the squared norm or the norm
if squared:
result = squared_norm
else:
result = np.sqrt(squared_norm)
return result
def mahalanobis(self, X):
"""Computes the squared Mahalanobis distances of given observations.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
The observations, the Mahalanobis distances of the which we
compute. Observations are assumed to be drawn from the same
distribution than the data used in fit.
Returns
-------
dist : array, shape = [n_samples,]
Squared Mahalanobis distances of the observations.
"""
precision = self.get_precision()
# compute mahalanobis distances
dist = pairwise_distances(X, self.location_[np.newaxis, :],
metric='mahalanobis', VI=precision)
return np.reshape(dist, (len(X),)) ** 2
|
import sys
bsd = '''
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Willow Garage, Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
skip_check_tag = "Willow Garage BSD License not applicable"
nerrors = 0
import os
autofix = False
if "ECTO_LICENSE_AUTOFIX" in os.environ:
autofix = True
files = sys.argv[1:]
commentchars = { '.cpp' : '//',
'.hpp' : '//',
'.py' : '#',
'.cmake' : '#',
'.txt' : '#'
}
for filename in files:
txt = open(filename).read()
thiserror = False
result = filename + "..."
if skip_check_tag in txt:
result += "ok"
else:
for l in bsd.split('\n'):
if l not in txt:
result += "missing: " + l + "\n"
thiserror = True
if thiserror:
nerrors += 1
else:
result += "ok"
if thiserror and autofix:
newf = open(filename, "w")
for k, v in commentchars.iteritems():
if filename.endswith(k):
cmt = v
if txt.startswith('#!'):
hashbang, rest = txt.split('\n', 1)
print >>newf, hashbang
else:
rest = txt
print >>newf, cmt, bsd.replace('\n', '\n' + cmt + ' ')
print >>newf, rest
newf.close()
result += filename + "AUTOFIXED"
print result
sys.exit(nerrors)
|
"""Makes sure that files include headers from allowed directories.
Checks DEPS files in the source tree for rules, and applies those rules to
"#include" commands in source files. Any source file including something not
permitted by the DEPS files will fail.
The format of the deps file:
First you have the normal module-level deps. These are the ones used by
gclient. An example would be:
deps = {
"base":"http://foo.bar/trunk/base"
}
DEPS files not in the top-level of a module won't need this. Then you have
any additional include rules. You can add (using "+") or subtract (using "-")
from the previously specified rules (including module-level deps).
include_rules = {
# Code should be able to use base (it's specified in the module-level
# deps above), but nothing in "base/evil" because it's evil.
"-base/evil",
# But this one subdirectory of evil is OK.
"+base/evil/not",
# And it can include files from this other directory even though there is
# no deps rule for it.
"+tools/crime_fighter"
}
DEPS files may be placed anywhere in the tree. Each one applies to all
subdirectories, where there may be more DEPS files that provide additions or
subtractions for their own sub-trees.
There is an implicit rule for the current directory (where the DEPS file lives)
and all of its subdirectories. This prevents you from having to explicitly
allow the current directory everywhere. This implicit rule is applied first,
so you can modify or remove it using the normal include rules.
The rules are processed in order. This means you can explicitly allow a higher
directory and then take away permissions from sub-parts, or the reverse.
Note that all directory separators must be slashes (Unix-style) and not
backslashes. All directories should be relative to the source root and use
only lowercase.
"""
import os
import optparse
import pipes
import re
import sys
import copy
DEPS_VAR_NAME = "deps"
INCLUDE_RULES_VAR_NAME = "include_rules"
SKIP_SUBDIRS_VAR_NAME = "skip_child_includes"
MAX_LINES = 150
MAX_LINE_LENGTH = 128
VERBOSE = False
EXTRACT_INCLUDE_PATH = re.compile('[ \t]*#[ \t]*(?:include|import)[ \t]+"(.*)"')
BASE_DIRECTORY = ""
GIT_SOURCE_DIRECTORY = set()
class Rule(object):
def __init__(self, allow, dir, source):
self._allow = allow
self._dir = dir
self._source = source
def __str__(self):
if (self._allow):
return '"+%s" from %s.' % (self._dir, self._source)
return '"-%s" from %s.' % (self._dir, self._source)
def ParentOrMatch(self, other):
"""Returns true if the input string is an exact match or is a parent
of the current rule. For example, the input "foo" would match "foo/bar"."""
return self._dir == other or self._dir.startswith(other + "/")
def ChildOrMatch(self, other):
"""Returns true if the input string would be covered by this rule. For
example, the input "foo/bar" would match the rule "foo"."""
return self._dir == other or other.startswith(self._dir + "/")
def ParseRuleString(rule_string, source):
"""Returns a tuple of a boolean indicating whether the directory is an allow
rule, and a string holding the directory name.
"""
if len(rule_string) < 1:
raise Exception('The rule string "%s" is too short\nin %s' %
(rule_string, source))
if rule_string[0] == "+":
return (True, rule_string[1:])
if rule_string[0] == "-":
return (False, rule_string[1:])
raise Exception('The rule string "%s" does not begin with a "+" or a "-"' %
rule_string)
class Rules:
def __init__(self):
"""Initializes the current rules with an empty rule list."""
self._rules = []
def __str__(self):
ret = "Rules = [\n"
ret += "\n".join([" %s" % x for x in self._rules])
ret += "]\n"
return ret
def AddRule(self, rule_string, source):
"""Adds a rule for the given rule string.
Args:
rule_string: The include_rule string read from the DEPS file to apply.
source: A string representing the location of that string (filename, etc.)
so that we can give meaningful errors.
"""
(add_rule, rule_dir) = ParseRuleString(rule_string, source)
# Remove any existing rules or sub-rules that apply. For example, if we're
# passed "foo", we should remove "foo", "foo/bar", but not "foobar".
self._rules = [x for x in self._rules if not x.ParentOrMatch(rule_dir)]
self._rules.insert(0, Rule(add_rule, rule_dir, source))
def DirAllowed(self, allowed_dir):
"""Returns a tuple (success, message), where success indicates if the given
directory is allowed given the current set of rules, and the message tells
why if the comparison failed."""
for rule in self._rules:
if rule.ChildOrMatch(allowed_dir):
# This rule applies.
if rule._allow:
return (True, "")
return (False, rule.__str__())
# No rules apply, fail.
return (False, "no rule applying")
def ApplyRules(existing_rules, deps, includes, cur_dir):
"""Applies the given deps and include rules, returning the new rules.
Args:
existing_rules: A set of existing rules that will be combined.
deps: The list of imports from the "deps" section of the DEPS file.
include: The list of rules from the "include_rules" section of DEPS.
cur_dir: The current directory. We will create an implicit rule that
allows inclusion from this directory.
Returns: A new set of rules combining the existing_rules with the other
arguments.
"""
rules = copy.copy(existing_rules)
# First apply the implicit "allow" rule for the current directory.
if cur_dir.lower().startswith(BASE_DIRECTORY):
relative_dir = cur_dir[len(BASE_DIRECTORY) + 1:]
# Normalize path separators to slashes.
relative_dir = relative_dir.replace("\\", "/")
source = relative_dir
if len(source) == 0:
source = "top level" # Make the help string a little more meaningful.
rules.AddRule("+" + relative_dir, "Default rule for " + source)
else:
raise Exception("Internal error: base directory is not at the beginning" +
" for\n %s and base dir\n %s" %
(cur_dir, BASE_DIRECTORY))
# Next apply the DEPS additions, these are all allowed. Note that DEPS start
# out with "src/" which we want to trim.
for (index, key) in enumerate(deps):
if key.startswith("src/"):
key = key[4:]
rules.AddRule("+" + key, relative_dir + "'s deps for " + key)
# Last, apply the additional explicit rules.
for (index, rule_str) in enumerate(includes):
if not len(relative_dir):
rule_description = "the top level include_rules"
else:
rule_description = relative_dir + "'s include_rules"
rules.AddRule(rule_str, rule_description)
return rules
def ApplyDirectoryRules(existing_rules, dir_name):
"""Combines rules from the existing rules and the new directory.
Any directory can contain a DEPS file. Toplevel DEPS files can contain
module dependencies which are used by gclient. We use these, along with
additional include rules and implicit rules for the given directory, to
come up with a combined set of rules to apply for the directory.
Args:
existing_rules: The rules for the parent directory. We'll add-on to these.
dir_name: The directory name that the deps file may live in (if it exists).
This will also be used to generate the implicit rules.
Returns: A tuple containing: (1) the combined set of rules to apply to the
sub-tree, and (2) a list of all subdirectories that should NOT be
checked, as specified in the DEPS file (if any).
"""
# Check for a .svn directory in this directory or check this directory is
# contained in git source direcotries. This will tell us if it's a source
# directory and should be checked.
if not (os.path.exists(os.path.join(dir_name, ".svn")) or
(dir_name.lower() in GIT_SOURCE_DIRECTORY)):
return (None, [])
# Check the DEPS file in this directory.
if VERBOSE:
print "Applying rules from", dir_name
def FromImpl(unused, unused2):
pass # NOP function so "From" doesn't fail.
def FileImpl(unused):
pass # NOP function so "File" doesn't fail.
class _VarImpl:
def __init__(self, local_scope):
self._local_scope = local_scope
def Lookup(self, var_name):
"""Implements the Var syntax."""
if var_name in self._local_scope.get("vars", {}):
return self._local_scope["vars"][var_name]
raise Error("Var is not defined: %s" % var_name)
local_scope = {}
global_scope = {
"File": FileImpl,
"From": FromImpl,
"Var": _VarImpl(local_scope).Lookup,
}
deps_file = os.path.join(dir_name, "DEPS")
if os.path.isfile(deps_file):
execfile(deps_file, global_scope, local_scope)
elif VERBOSE:
print " No deps file found in", dir_name
# Even if a DEPS file does not exist we still invoke ApplyRules
# to apply the implicit "allow" rule for the current directory
deps = local_scope.get(DEPS_VAR_NAME, {})
include_rules = local_scope.get(INCLUDE_RULES_VAR_NAME, [])
skip_subdirs = local_scope.get(SKIP_SUBDIRS_VAR_NAME, [])
return (ApplyRules(existing_rules, deps, include_rules, dir_name),
skip_subdirs)
def ShouldCheckFile(file_name):
"""Returns True if the given file is a type we want to check."""
checked_extensions = [
'.h',
'.cc',
'.m',
'.mm',
]
basename, extension = os.path.splitext(file_name)
return extension in checked_extensions
def CheckLine(rules, line):
"""Checks the given file with the given rule set. If the line is an #include
directive and is illegal, a string describing the error will be returned.
Otherwise, None will be returned."""
found_item = EXTRACT_INCLUDE_PATH.match(line)
if not found_item:
return None # Not a match
include_path = found_item.group(1)
# Fix up backslashes in case somebody accidentally used them.
include_path.replace("\\", "/")
if include_path.find("/") < 0:
# Don't fail when no directory is specified. We may want to be more
# strict about this in the future.
if VERBOSE:
print " WARNING: directory specified with no path: " + include_path
return None
(allowed, why_failed) = rules.DirAllowed(include_path)
if not allowed:
if VERBOSE:
retval = "\nFor " + rules.__str__()
else:
retval = ""
return retval + ('Illegal include: "%s"\n Because of %s' %
(include_path, why_failed))
return None
def CheckFile(rules, file_name):
"""Checks the given file with the given rule set.
Args:
rules: The set of rules that apply to files in this directory.
file_name: The source file to check.
Returns: Either a string describing the error if there was one, or None if
the file checked out OK.
"""
if VERBOSE:
print "Checking: " + file_name
ret_val = "" # We'll collect the error messages in here
try:
cur_file = open(file_name, "r")
in_if0 = 0
for cur_line in range(MAX_LINES):
cur_line = cur_file.readline(MAX_LINE_LENGTH).strip()
# Check to see if we're at / inside a #if 0 block
if cur_line == '#if 0':
in_if0 += 1
continue
if in_if0 > 0:
if cur_line.startswith('#if'):
in_if0 += 1
elif cur_line == '#endif':
in_if0 -= 1
continue
line_status = CheckLine(rules, cur_line)
if line_status is not None:
if len(line_status) > 0: # Add newline to separate messages.
line_status += "\n"
ret_val += line_status
cur_file.close()
except IOError:
if VERBOSE:
print "Unable to open file: " + file_name
cur_file.close()
# Map empty string to None for easier checking.
if len(ret_val) == 0:
return None
return ret_val
def CheckDirectory(parent_rules, dir_name):
(rules, skip_subdirs) = ApplyDirectoryRules(parent_rules, dir_name)
if rules == None:
return True
# Collect a list of all files and directories to check.
files_to_check = []
dirs_to_check = []
success = True
contents = os.listdir(dir_name)
for cur in contents:
if cur in skip_subdirs:
continue # Don't check children that DEPS has asked us to skip.
full_name = os.path.join(dir_name, cur)
if os.path.isdir(full_name):
dirs_to_check.append(full_name)
elif ShouldCheckFile(full_name):
files_to_check.append(full_name)
# First check all files in this directory.
for cur in files_to_check:
file_status = CheckFile(rules, cur)
if file_status != None:
print "ERROR in " + cur + "\n" + file_status
success = False
# Next recurse into the subdirectories.
for cur in dirs_to_check:
if not CheckDirectory(rules, cur):
success = False
return success
def GetGitSourceDirectory(root):
"""Returns a set of the directories to be checked.
Args:
root: The repository root where .git directory exists.
Returns:
A set of directories which contain sources managed by git.
"""
git_source_directory = set()
popen_out = os.popen("cd %s && git ls-files --full-name ." %
pipes.quote(root))
for line in popen_out.readlines():
dir_name = os.path.join(root, os.path.dirname(line))
# Add the directory as well as all the parent directories.
while dir_name != root:
git_source_directory.add(dir_name)
dir_name = os.path.dirname(dir_name)
git_source_directory.add(root)
return git_source_directory
def PrintUsage():
print """Usage: python checkdeps.py [--root <root>] [tocheck]
--root Specifies the repository root. This defaults to "../../.." relative
to the script file. This will be correct given the normal location
of the script in "<root>/tools/checkdeps".
tocheck Specifies the directory, relative to root, to check. This defaults
to "." so it checks everything. Only one level deep is currently
supported, so you can say "chrome" but not "chrome/browser".
Examples:
python checkdeps.py
python checkdeps.py --root c:\\source chrome"""
def checkdeps(options, args):
global VERBOSE
if options.verbose:
VERBOSE = True
# Optional base directory of the repository.
global BASE_DIRECTORY
if not options.base_directory:
BASE_DIRECTORY = os.path.abspath(
os.path.join(os.path.abspath(os.path.dirname(__file__)), "../.."))
else:
BASE_DIRECTORY = os.path.abspath(options.base_directory)
# Figure out which directory we have to check.
if len(args) == 0:
# No directory to check specified, use the repository root.
start_dir = BASE_DIRECTORY
elif len(args) == 1:
# Directory specified. Start here. It's supposed to be relative to the
# base directory.
start_dir = os.path.abspath(os.path.join(BASE_DIRECTORY, args[0]))
else:
# More than one argument, we don't handle this.
PrintUsage()
return 1
print "Using base directory:", BASE_DIRECTORY
print "Checking:", start_dir
base_rules = Rules()
# The base directory should be lower case from here on since it will be used
# for substring matching on the includes, and we compile on case-insensitive
# systems. Plus, we always use slashes here since the include parsing code
# will also normalize to slashes.
BASE_DIRECTORY = BASE_DIRECTORY.lower()
BASE_DIRECTORY = BASE_DIRECTORY.replace("\\", "/")
start_dir = start_dir.replace("\\", "/")
if os.path.exists(os.path.join(BASE_DIRECTORY, ".git")):
global GIT_SOURCE_DIRECTORY
GIT_SOURCE_DIRECTORY = GetGitSourceDirectory(BASE_DIRECTORY)
success = CheckDirectory(base_rules, start_dir)
if not success:
print "\nFAILED\n"
return 1
print "\nSUCCESS\n"
return 0
def main():
option_parser = optparse.OptionParser()
option_parser.add_option("", "--root", default="", dest="base_directory",
help='Specifies the repository root. This defaults '
'to "../../.." relative to the script file, which '
'will normally be the repository root.')
option_parser.add_option("-v", "--verbose", action="store_true",
default=False, help="Print debug logging")
options, args = option_parser.parse_args()
return checkdeps(options, args)
if '__main__' == __name__:
sys.exit(main())
|
"""
Bottle is a fast and simple micro-framework for small web applications. It
offers request dispatching (Routes) with url parameter support, templates,
a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
template engines - all in a single file and with no dependencies other than the
Python Standard Library.
Homepage and documentation: http://bottlepy.org/
Copyright (c) 2014, Marcel Hellkamp.
License: MIT (see LICENSE for details)
"""
from __future__ import with_statement
__author__ = 'Marcel Hellkamp'
__version__ = '0.13-dev'
__license__ = 'MIT'
if __name__ == '__main__':
from optparse import OptionParser
_cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app")
_opt = _cmd_parser.add_option
_opt("--version", action="store_true", help="show version number.")
_opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
_opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
_opt("-p", "--plugin", action="append", help="install additional plugin/s.")
_opt("--debug", action="store_true", help="start server in debug mode.")
_opt("--reload", action="store_true", help="auto-reload on file changes.")
_cmd_options, _cmd_args = _cmd_parser.parse_args()
if _cmd_options.server and _cmd_options.server.startswith('gevent'):
import gevent.monkey; gevent.monkey.patch_all()
import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\
os, re, subprocess, sys, tempfile, threading, time, warnings
from datetime import date as datedate, datetime, timedelta
from tempfile import TemporaryFile
from traceback import format_exc, print_exc
from inspect import getargspec
from unicodedata import normalize
try: from simplejson import dumps as json_dumps, loads as json_lds
except ImportError: # pragma: no cover
try: from json import dumps as json_dumps, loads as json_lds
except ImportError:
try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
except ImportError:
def json_dumps(data):
raise ImportError("JSON support requires Python 2.6 or simplejson.")
json_lds = json_dumps
py = sys.version_info
py3k = py >= (3, 0, 0)
py25 = py < (2, 6, 0)
py31 = (3, 1, 0) <= py < (3, 2, 0)
def _e(): return sys.exc_info()[1]
try:
_stdout, _stderr = sys.stdout.write, sys.stderr.write
except IOError:
_stdout = lambda x: sys.stdout.write(x)
_stderr = lambda x: sys.stderr.write(x)
if py3k:
import http.client as httplib
import _thread as thread
from urllib.parse import urljoin, SplitResult as UrlSplitResult
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
urlunquote = functools.partial(urlunquote, encoding='latin1')
from http.cookies import SimpleCookie
from collections import MutableMapping as DictMixin
import pickle
from io import BytesIO
from configparser import ConfigParser
basestring = str
unicode = str
json_loads = lambda s: json_lds(touni(s))
callable = lambda x: hasattr(x, '__call__')
imap = map
def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
else: # 2.x
import httplib
import thread
from urlparse import urljoin, SplitResult as UrlSplitResult
from urllib import urlencode, quote as urlquote, unquote as urlunquote
from Cookie import SimpleCookie
from itertools import imap
import cPickle as pickle
from StringIO import StringIO as BytesIO
from ConfigParser import SafeConfigParser as ConfigParser
if py25:
msg = "Python 2.5 support may be dropped in future versions of Bottle."
warnings.warn(msg, DeprecationWarning)
from UserDict import DictMixin
def next(it): return it.next()
bytes = str
else: # 2.6, 2.7
from collections import MutableMapping as DictMixin
unicode = unicode
json_loads = json_lds
eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
def tob(s, enc='utf8'):
return s.encode(enc) if isinstance(s, unicode) else bytes(s)
def touni(s, enc='utf8', err='strict'):
if isinstance(s, bytes):
return s.decode(enc, err)
else:
return unicode(s or ("" if s is None else s))
tonat = touni if py3k else tob
if py31:
from io import TextIOWrapper
class NCTextIOWrapper(TextIOWrapper):
def close(self): pass # Keep wrapped buffer open.
def update_wrapper(wrapper, wrapped, *a, **ka):
try: functools.update_wrapper(wrapper, wrapped, *a, **ka)
except AttributeError: pass
def depr(message, hard=False):
warnings.warn(message, DeprecationWarning, stacklevel=3)
def makelist(data): # This is just to handy
if isinstance(data, (tuple, list, set, dict)): return list(data)
elif data: return [data]
else: return []
class DictProperty(object):
''' Property that maps to a key in a local dict-like attribute. '''
def __init__(self, attr, key=None, read_only=False):
self.attr, self.key, self.read_only = attr, key, read_only
def __call__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter, self.key = func, self.key or func.__name__
return self
def __get__(self, obj, cls):
if obj is None: return self
key, storage = self.key, getattr(obj, self.attr)
if key not in storage: storage[key] = self.getter(obj)
return storage[key]
def __set__(self, obj, value):
if self.read_only: raise AttributeError("Read-Only property.")
getattr(obj, self.attr)[self.key] = value
def __delete__(self, obj):
if self.read_only: raise AttributeError("Read-Only property.")
del getattr(obj, self.attr)[self.key]
class cached_property(object):
''' A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property. '''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None: return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
class lazy_attribute(object):
''' A property that caches itself to the class object. '''
def __init__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter = func
def __get__(self, obj, cls):
value = self.getter(cls)
setattr(cls, self.__name__, value)
return value
class BottleException(Exception):
""" A base class for exceptions used by bottle. """
pass
class RouteError(BottleException):
""" This is a base class for all routing related exceptions """
class RouteReset(BottleException):
""" If raised by a plugin or request handler, the route is reset and all
plugins are re-applied. """
class RouterUnknownModeError(RouteError): pass
class RouteSyntaxError(RouteError):
""" The route parser found something not supported by this router. """
class RouteBuildError(RouteError):
""" The route could not be built. """
def _re_flatten(p):
''' Turn all capturing groups in a regular expression pattern into
non-capturing groups. '''
if '(' not in p: return p
return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))',
lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p)
class Router(object):
''' A Router is an ordered collection of route->target pairs. It is used to
efficiently match WSGI requests against a number of routes and return
the first target that satisfies the request. The target may be anything,
usually a string, ID or callable object. A route consists of a path-rule
and a HTTP method.
The path-rule is either a static path (e.g. `/contact`) or a dynamic
path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax
and details on the matching order are described in docs:`routing`.
'''
default_pattern = '[^/]+'
default_filter = 're'
#: The current CPython regexp implementation does not allow more
#: than 99 matching groups per regular expression.
_MAX_GROUPS_PER_PATTERN = 99
def __init__(self, strict=False):
self.rules = [] # All rules in order
self._groups = {} # index of regexes to find them in dyna_routes
self.builder = {} # Data structure for the url builder
self.static = {} # Search structure for static routes
self.dyna_routes = {}
self.dyna_regexes = {} # Search structure for dynamic routes
#: If true, static routes are no longer checked first.
self.strict_order = strict
self.filters = {
're': lambda conf:
(_re_flatten(conf or self.default_pattern), None, None),
'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
'path': lambda conf: (r'.+?', None, None)}
def add_filter(self, name, func):
''' Add a filter. The provided function is called with the configuration
string as parameter and must return a (regexp, to_python, to_url) tuple.
The first element is a string, the last two are callables or None. '''
self.filters[name] = func
rule_syntax = re.compile('(\\\\*)'\
'(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\
'|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\
'(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
def _itertokens(self, rule):
offset, prefix = 0, ''
for match in self.rule_syntax.finditer(rule):
prefix += rule[offset:match.start()]
g = match.groups()
if len(g[0])%2: # Escaped wildcard
prefix += match.group(0)[len(g[0]):]
offset = match.end()
continue
if prefix:
yield prefix, None, None
name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
yield name, filtr or 'default', conf or None
offset, prefix = match.end(), ''
if offset <= len(rule) or prefix:
yield prefix+rule[offset:], None, None
def add(self, rule, method, target, name=None):
''' Add a new rule or replace the target for an existing rule. '''
anons = 0 # Number of anonymous wildcards found
keys = [] # Names of keys
pattern = '' # Regular expression pattern with named groups
filters = [] # Lists of wildcard input filters
builder = [] # Data structure for the URL builder
is_static = True
for key, mode, conf in self._itertokens(rule):
if mode:
is_static = False
if mode == 'default': mode = self.default_filter
mask, in_filter, out_filter = self.filters[mode](conf)
if not key:
pattern += '(?:%s)' % mask
key = 'anon%d' % anons
anons += 1
else:
pattern += '(?P<%s>%s)' % (key, mask)
keys.append(key)
if in_filter: filters.append((key, in_filter))
builder.append((key, out_filter or str))
elif key:
pattern += re.escape(key)
builder.append((None, key))
self.builder[rule] = builder
if name: self.builder[name] = builder
if is_static and not self.strict_order:
self.static.setdefault(method, {})
self.static[method][self.build(rule)] = (target, None)
return
try:
re_pattern = re.compile('^(%s)$' % pattern)
re_match = re_pattern.match
except re.error:
raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
if filters:
def getargs(path):
url_args = re_match(path).groupdict()
for name, wildcard_filter in filters:
try:
url_args[name] = wildcard_filter(url_args[name])
except ValueError:
raise HTTPError(400, 'Path has wrong format.')
return url_args
elif re_pattern.groupindex:
def getargs(path):
return re_match(path).groupdict()
else:
getargs = None
flatpat = _re_flatten(pattern)
whole_rule = (rule, flatpat, target, getargs)
if (flatpat, method) in self._groups:
if DEBUG:
msg = 'Route <%s %s> overwrites a previously defined route'
warnings.warn(msg % (method, rule), RuntimeWarning)
self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
else:
self.dyna_routes.setdefault(method, []).append(whole_rule)
self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
self._compile(method)
def _compile(self, method):
all_rules = self.dyna_routes[method]
comborules = self.dyna_regexes[method] = []
maxgroups = self._MAX_GROUPS_PER_PATTERN
for x in range(0, len(all_rules), maxgroups):
some = all_rules[x:x+maxgroups]
combined = (flatpat for (_, flatpat, _, _) in some)
combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
combined = re.compile(combined).match
rules = [(target, getargs) for (_, _, target, getargs) in some]
comborules.append((combined, rules))
def build(self, _name, *anons, **query):
''' Build an URL by filling the wildcards in a rule. '''
builder = self.builder.get(_name)
if not builder: raise RouteBuildError("No route with that name.", _name)
try:
for i, value in enumerate(anons): query['anon%d'%i] = value
url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder])
return url if not query else url+'?'+urlencode(query)
except KeyError:
raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
def match(self, environ):
''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). '''
verb = environ['REQUEST_METHOD'].upper()
path = environ['PATH_INFO'] or '/'
target = None
if verb == 'HEAD':
methods = ['PROXY', verb, 'GET', 'ANY']
else:
methods = ['PROXY', verb, 'ANY']
for method in methods:
if method in self.static and path in self.static[method]:
target, getargs = self.static[method][path]
return target, getargs(path) if getargs else {}
elif method in self.dyna_regexes:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
target, getargs = rules[match.lastindex - 1]
return target, getargs(path) if getargs else {}
# No matching route found. Collect alternative methods for 405 response
allowed = set([])
nocheck = set(methods)
for method in set(self.static) - nocheck:
if path in self.static[method]:
allowed.add(verb)
for method in set(self.dyna_regexes) - allowed - nocheck:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
allowed.add(method)
if allowed:
allow_header = ",".join(sorted(allowed))
raise HTTPError(405, "Method not allowed.", Allow=allow_header)
# No matching route and no alternative method found. We give up
raise HTTPError(404, "Not found: " + repr(path))
class Route(object):
''' This class wraps a route callback along with route specific metadata and
configuration and applies Plugins on demand. It is also responsible for
turing an URL path rule into a regular expression usable by the Router.
'''
def __init__(self, app, rule, method, callback, name=None,
plugins=None, skiplist=None, **config):
#: The application this route is installed to.
self.app = app
#: The path-rule string (e.g. ``/wiki/:page``).
self.rule = rule
#: The HTTP method as a string (e.g. ``GET``).
self.method = method
#: The original callback with no plugins applied. Useful for introspection.
self.callback = callback
#: The name of the route (if specified) or ``None``.
self.name = name or None
#: A list of route-specific plugins (see :meth:`Bottle.route`).
self.plugins = plugins or []
#: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
self.skiplist = skiplist or []
#: Additional keyword arguments passed to the :meth:`Bottle.route`
#: decorator are stored in this dictionary. Used for route-specific
#: plugin configuration and meta-data.
self.config = ConfigDict().load_dict(config)
@cached_property
def call(self):
''' The route callback with all plugins applied. This property is
created on demand and then cached to speed up subsequent requests.'''
return self._make_callback()
def reset(self):
''' Forget any cached values. The next time :attr:`call` is accessed,
all plugins are re-applied. '''
self.__dict__.pop('call', None)
def prepare(self):
''' Do all on-demand work immediately (useful for debugging).'''
self.call
def all_plugins(self):
''' Yield all Plugins affecting this route. '''
unique = set()
for p in reversed(self.app.plugins + self.plugins):
if True in self.skiplist: break
name = getattr(p, 'name', False)
if name and (name in self.skiplist or name in unique): continue
if p in self.skiplist or type(p) in self.skiplist: continue
if name: unique.add(name)
yield p
def _make_callback(self):
callback = self.callback
for plugin in self.all_plugins():
try:
if hasattr(plugin, 'apply'):
callback = plugin.apply(callback, self)
else:
callback = plugin(callback)
except RouteReset: # Try again with changed configuration.
return self._make_callback()
if not callback is self.callback:
update_wrapper(callback, self.callback)
return callback
def get_undecorated_callback(self):
''' Return the callback. If the callback is a decorated function, try to
recover the original function. '''
func = self.callback
func = getattr(func, '__func__' if py3k else 'im_func', func)
closure_attr = '__closure__' if py3k else 'func_closure'
while hasattr(func, closure_attr) and getattr(func, closure_attr):
func = getattr(func, closure_attr)[0].cell_contents
return func
def get_callback_args(self):
''' Return a list of argument names the callback (most likely) accepts
as keyword arguments. If the callback is a decorated function, try
to recover the original function before inspection. '''
return getargspec(self.get_undecorated_callback())[0]
def get_config(self, key, default=None):
''' Lookup a config field and return its value, first checking the
route.config, then route.app.config.'''
for conf in (self.config, self.app.conifg):
if key in conf: return conf[key]
return default
def __repr__(self):
cb = self.get_undecorated_callback()
return '<%s %r %r>' % (self.method, self.rule, cb)
class Bottle(object):
""" Each Bottle object represents a single, distinct web application and
consists of routes, callbacks, plugins, resources and configuration.
Instances are callable WSGI applications.
:param catchall: If true (default), handle all exceptions. Turn off to
let debugging middleware handle exceptions.
"""
def __init__(self, catchall=True, autojson=True):
#: A :class:`ConfigDict` for app specific configuration.
self.config = ConfigDict()
self.config._on_change = functools.partial(self.trigger_hook, 'config')
self.config.meta_set('autojson', 'validate', bool)
self.config.meta_set('catchall', 'validate', bool)
self.config['catchall'] = catchall
self.config['autojson'] = autojson
#: A :class:`ResourceManager` for application files
self.resources = ResourceManager()
self.routes = [] # List of installed :class:`Route` instances.
self.router = Router() # Maps requests to :class:`Route` instances.
self.error_handler = {}
# Core plugins
self.plugins = [] # List of installed plugins.
if self.config['autojson']:
self.install(JSONPlugin())
self.install(TemplatePlugin())
#: If true, most exceptions are caught and returned as :exc:`HTTPError`
catchall = DictProperty('config', 'catchall')
__hook_names = 'before_request', 'after_request', 'app_reset', 'config'
__hook_reversed = 'after_request'
@cached_property
def _hooks(self):
return dict((name, []) for name in self.__hook_names)
def add_hook(self, name, func):
''' Attach a callback to a hook. Three hooks are currently implemented:
before_request
Executed once before each request. The request context is
available, but no routing has happened yet.
after_request
Executed once after each request regardless of its outcome.
app_reset
Called whenever :meth:`Bottle.reset` is called.
'''
if name in self.__hook_reversed:
self._hooks[name].insert(0, func)
else:
self._hooks[name].append(func)
def remove_hook(self, name, func):
''' Remove a callback from a hook. '''
if name in self._hooks and func in self._hooks[name]:
self._hooks[name].remove(func)
return True
def trigger_hook(self, __name, *args, **kwargs):
''' Trigger a hook and return a list of results. '''
return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
def hook(self, name):
""" Return a decorator that attaches a callback to a hook. See
:meth:`add_hook` for details."""
def decorator(func):
self.add_hook(name, func)
return func
return decorator
def mount(self, prefix, app, **options):
''' Mount an application (:class:`Bottle` or plain WSGI) to a specific
URL prefix. Example::
root_app.mount('/admin/', admin_app)
:param prefix: path prefix or `mount-point`. If it ends in a slash,
that slash is mandatory.
:param app: an instance of :class:`Bottle` or a WSGI application.
All other parameters are passed to the underlying :meth:`route` call.
'''
segments = [p for p in prefix.split('/') if p]
if not segments: raise ValueError('Empty path prefix.')
path_depth = len(segments)
def mountpoint_wrapper():
try:
request.path_shift(path_depth)
rs = HTTPResponse([])
def start_response(status, headerlist, exc_info=None):
if exc_info:
try:
_raise(*exc_info)
finally:
exc_info = None
rs.status = status
for name, value in headerlist: rs.add_header(name, value)
return rs.body.append
body = app(request.environ, start_response)
if body and rs.body: body = itertools.chain(rs.body, body)
rs.body = body or rs.body
return rs
finally:
request.path_shift(-path_depth)
options.setdefault('skip', True)
options.setdefault('method', 'PROXY')
options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
options['callback'] = mountpoint_wrapper
self.route('/%s/<:re:.*>' % '/'.join(segments), **options)
if not prefix.endswith('/'):
self.route('/' + '/'.join(segments), **options)
def merge(self, routes):
''' Merge the routes of another :class:`Bottle` application or a list of
:class:`Route` objects into this application. The routes keep their
'owner', meaning that the :data:`Route.app` attribute is not
changed. '''
if isinstance(routes, Bottle):
routes = routes.routes
for route in routes:
self.add_route(route)
def install(self, plugin):
''' Add a plugin to the list of plugins and prepare it for being
applied to all routes of this application. A plugin may be a simple
decorator or an object that implements the :class:`Plugin` API.
'''
if hasattr(plugin, 'setup'): plugin.setup(self)
if not callable(plugin) and not hasattr(plugin, 'apply'):
raise TypeError("Plugins must be callable or implement .apply()")
self.plugins.append(plugin)
self.reset()
return plugin
def uninstall(self, plugin):
''' Uninstall plugins. Pass an instance to remove a specific plugin, a type
object to remove all plugins that match that type, a string to remove
all plugins with a matching ``name`` attribute or ``True`` to remove all
plugins. Return the list of removed plugins. '''
removed, remove = [], plugin
for i, plugin in list(enumerate(self.plugins))[::-1]:
if remove is True or remove is plugin or remove is type(plugin) \
or getattr(plugin, 'name', True) == remove:
removed.append(plugin)
del self.plugins[i]
if hasattr(plugin, 'close'): plugin.close()
if removed: self.reset()
return removed
def reset(self, route=None):
''' Reset all routes (force plugins to be re-applied) and clear all
caches. If an ID or route object is given, only that specific route
is affected. '''
if route is None: routes = self.routes
elif isinstance(route, Route): routes = [route]
else: routes = [self.routes[route]]
for route in routes: route.reset()
if DEBUG:
for route in routes: route.prepare()
self.trigger_hook('app_reset')
def close(self):
''' Close the application and all installed plugins. '''
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
self.stopped = True
def run(self, **kwargs):
''' Calls :func:`run` with the same parameters. '''
run(self, **kwargs)
def match(self, environ):
""" Search for a matching route and return a (:class:`Route` , urlargs)
tuple. The second value is a dictionary with parameters extracted
from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
return self.router.match(environ)
def get_url(self, routename, **kargs):
""" Return a string that matches a named route """
scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
location = self.router.build(routename, **kargs).lstrip('/')
return urljoin(urljoin('/', scriptname), location)
def add_route(self, route):
''' Add a route object, but do not change the :data:`Route.app`
attribute.'''
self.routes.append(route)
self.router.add(route.rule, route.method, route, name=route.name)
if DEBUG: route.prepare()
def route(self, path=None, method='GET', callback=None, name=None,
apply=None, skip=None, **config):
""" A decorator to bind a function to a request URL. Example::
@app.route('/hello/:name')
def hello(name):
return 'Hello %s' % name
The ``:name`` part is a wildcard. See :class:`Router` for syntax
details.
:param path: Request path or a list of paths to listen to. If no
path is specified, it is automatically generated from the
signature of the function.
:param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
methods to listen to. (default: `GET`)
:param callback: An optional shortcut to avoid the decorator
syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
:param name: The name for this route. (default: None)
:param apply: A decorator or plugin or a list of plugins. These are
applied to the route callback in addition to installed plugins.
:param skip: A list of plugins, plugin classes or names. Matching
plugins are not installed to this route. ``True`` skips all.
Any additional keyword arguments are stored as route-specific
configuration and passed to plugins (see :meth:`Plugin.apply`).
"""
if callable(path): path, callback = None, path
plugins = makelist(apply)
skiplist = makelist(skip)
def decorator(callback):
# TODO: Documentation and tests
if isinstance(callback, basestring): callback = load(callback)
for rule in makelist(path) or yieldroutes(callback):
for verb in makelist(method):
verb = verb.upper()
route = Route(self, rule, verb, callback, name=name,
plugins=plugins, skiplist=skiplist, **config)
self.add_route(route)
return callback
return decorator(callback) if callback else decorator
def get(self, path=None, method='GET', **options):
""" Equals :meth:`route`. """
return self.route(path, method, **options)
def post(self, path=None, method='POST', **options):
""" Equals :meth:`route` with a ``POST`` method parameter. """
return self.route(path, method, **options)
def put(self, path=None, method='PUT', **options):
""" Equals :meth:`route` with a ``PUT`` method parameter. """
return self.route(path, method, **options)
def delete(self, path=None, method='DELETE', **options):
""" Equals :meth:`route` with a ``DELETE`` method parameter. """
return self.route(path, method, **options)
def error(self, code=500):
""" Decorator: Register an output handler for a HTTP error code"""
def wrapper(handler):
self.error_handler[int(code)] = handler
return handler
return wrapper
def default_error_handler(self, res):
return tob(template(ERROR_PAGE_TEMPLATE, e=res))
def _handle(self, environ):
path = environ['bottle.raw_path'] = environ['PATH_INFO']
if py3k:
try:
environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
except UnicodeError:
return HTTPError(400, 'Invalid path string. Expected UTF-8')
try:
environ['bottle.app'] = self
request.bind(environ)
response.bind()
try:
self.trigger_hook('before_request')
route, args = self.router.match(environ)
environ['route.handle'] = route
environ['bottle.route'] = route
environ['route.url_args'] = args
return route.call(**args)
finally:
self.trigger_hook('after_request')
except HTTPResponse:
return _e()
except RouteReset:
route.reset()
return self._handle(environ)
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
stacktrace = format_exc()
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", _e(), stacktrace)
def _cast(self, out, peek=None):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
# Empty output is done here
if not out:
if 'Content-Length' not in response:
response['Content-Length'] = 0
return []
# Join lists of byte or unicode strings. Mixed lists are NOT supported
if isinstance(out, (tuple, list))\
and isinstance(out[0], (bytes, unicode)):
out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
# Encode unicode strings
if isinstance(out, unicode):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
if 'Content-Length' not in response:
response['Content-Length'] = len(out)
return [out]
# HTTPError or HTTPException (recursive, because they may wrap anything)
# TODO: Handle these explicitly in handle() or make them iterable.
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
return self._cast(out)
if isinstance(out, HTTPResponse):
out.apply(response)
return self._cast(out.body)
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
iout = iter(out)
first = next(iout)
while not first:
first = next(iout)
except StopIteration:
return self._cast('')
except HTTPResponse:
first = _e()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', _e(), format_exc())
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
return self._cast(first)
elif isinstance(first, bytes):
new_iter = itertools.chain([first], iout)
elif isinstance(first, unicode):
encoder = lambda x: x.encode(response.charset)
new_iter = imap(encoder, itertools.chain([first], iout))
else:
msg = 'Unsupported response type: %s' % type(first)
return self._cast(HTTPError(500, msg))
if hasattr(out, 'close'):
new_iter = _closeiter(new_iter, out.close)
return new_iter
def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if response._status_code in (100, 101, 204, 304)\
or environ['REQUEST_METHOD'] == 'HEAD':
if hasattr(out, 'close'): out.close()
out = []
start_response(response._status_line, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if DEBUG:
err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \
% (html_escape(repr(_e())), html_escape(format_exc()))
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
return [tob(err)]
def __call__(self, environ, start_response):
''' Each instance of :class:'Bottle' is a WSGI application. '''
return self.wsgi(environ, start_response)
def __enter__(self):
''' Use this application as default for all module-level shortcuts. '''
default_app.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
default_app.pop()
class BaseRequest(object):
""" A wrapper for WSGI environment dictionaries that adds a lot of
convenient access methods and properties. Most of them are read-only.
Adding new attributes to a request actually adds them to the environ
dictionary (as 'bottle.request.ext.<name>'). This is the recommended
way to store and access request-specific data.
"""
__slots__ = ('environ')
#: Maximum size of memory buffer for :attr:`body` in bytes.
MEMFILE_MAX = 102400
def __init__(self, environ=None):
""" Wrap a WSGI environ dictionary. """
#: The wrapped WSGI environ dictionary. This is the only real attribute.
#: All other attributes actually are read-only properties.
self.environ = {} if environ is None else environ
self.environ['bottle.request'] = self
@DictProperty('environ', 'bottle.app', read_only=True)
def app(self):
''' Bottle application handling this request. '''
raise RuntimeError('This request is not connected to an application.')
@DictProperty('environ', 'bottle.route', read_only=True)
def route(self):
""" The bottle :class:`Route` object that matches this request. """
raise RuntimeError('This request is not connected to a route.')
@DictProperty('environ', 'route.url_args', read_only=True)
def url_args(self):
""" The arguments extracted from the URL. """
raise RuntimeError('This request is not connected to a route.')
@property
def path(self):
''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
broken clients and avoid the "empty path" edge case). '''
return '/' + self.environ.get('PATH_INFO','').lstrip('/')
@property
def method(self):
''' The ``REQUEST_METHOD`` value as an uppercase string. '''
return self.environ.get('REQUEST_METHOD', 'GET').upper()
@DictProperty('environ', 'bottle.request.headers', read_only=True)
def headers(self):
''' A :class:`WSGIHeaderDict` that provides case-insensitive access to
HTTP request headers. '''
return WSGIHeaderDict(self.environ)
def get_header(self, name, default=None):
''' Return the value of a request header, or a given default value. '''
return self.headers.get(name, default)
@DictProperty('environ', 'bottle.request.cookies', read_only=True)
def cookies(self):
""" Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
decoded. Use :meth:`get_cookie` if you expect signed cookies. """
cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values()
return FormsDict((c.key, c.value) for c in cookies)
def get_cookie(self, key, default=None, secret=None):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret and value:
dec = cookie_decode(value, secret) # (key, value) tuple or None
return dec[1] if dec and dec[0] == key else default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
''' The :attr:`query_string` parsed into a :class:`FormsDict`. These
values are sometimes called "URL arguments" or "GET parameters", but
not to be confused with "URL wildcards" as they are provided by the
:class:`Router`. '''
get = self.environ['bottle.get'] = FormsDict()
pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
for key, value in pairs:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is returned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
for name, item in self.POST.allitems():
if not isinstance(item, FileUpload):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from `multipart/form-data` encoded POST or PUT
request body. The values are instances of :class:`FileUpload`.
"""
files = FormsDict()
for name, item in self.POST.allitems():
if isinstance(item, FileUpload):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
''' If the ``Content-Type`` header is ``application/json``, this
property holds the parsed content of the request body. Only requests
smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
exhaustion. '''
if 'application/json' in self.environ.get('CONTENT_TYPE', ''):
return json_loads(self._get_body_string())
return None
def _iter_body(self, read, bufsize):
maxread = max(0, self.content_length)
while maxread:
part = read(min(maxread, bufsize))
if not part: break
yield part
maxread -= len(part)
def _iter_chunked(self, read, bufsize):
err = HTTPError(400, 'Error while parsing chunked transfer body.')
rn, sem, bs = tob('\r\n'), tob(';'), tob('')
while True:
header = read(1)
while header[-2:] != rn:
c = read(1)
header += c
if not c: raise err
if len(header) > bufsize: raise err
size, _, _ = header.partition(sem)
try:
maxread = int(tonat(size.strip()), 16)
except ValueError:
raise err
if maxread == 0: break
buff = bs
while maxread > 0:
if not buff:
buff = read(min(maxread, bufsize))
part, buff = buff[:maxread], buff[maxread:]
if not part: raise err
yield part
maxread -= len(part)
if read(2) != rn:
raise err
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
body_iter = self._iter_chunked if self.chunked else self._iter_body
read_func = self.environ['wsgi.input'].read
body, body_size, is_temp_file = BytesIO(), 0, False
for part in body_iter(read_func, self.MEMFILE_MAX):
body.write(part)
body_size += len(part)
if not is_temp_file and body_size > self.MEMFILE_MAX:
body, tmp = TemporaryFile(mode='w+b'), body
body.write(tmp.getvalue())
del tmp
is_temp_file = True
self.environ['wsgi.input'] = body
body.seek(0)
return body
def _get_body_string(self):
''' read body until content-length or MEMFILE_MAX into a string. Raise
HTTPError(413) on requests that are to large. '''
clen = self.content_length
if clen > self.MEMFILE_MAX:
raise HTTPError(413, 'Request to large')
if clen < 0: clen = self.MEMFILE_MAX + 1
data = self.body.read(clen)
if len(data) > self.MEMFILE_MAX: # Fail fast
raise HTTPError(413, 'Request to large')
return data
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
@property
def chunked(self):
''' True if Chunked transfer encoding was. '''
return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower()
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
for key, value in pairs:
post[key] = value
return post
safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py31:
args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8',
newline='\n')
elif py3k:
args['encoding'] = 'utf8'
data = cgi.FieldStorage(**args)
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. '''
env = self.environ
http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
''' The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. '''
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
''' Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
'''
script = self.environ.get('SCRIPT_NAME','/')
self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift)
@property
def content_length(self):
''' The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. '''
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def content_type(self):
''' The Content-Type header as a lowercase-string (default: empty). '''
return self.environ.get('CONTENT_TYPE', '').lower()
@property
def is_xhr(self):
''' True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). '''
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. '''
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
return route[0] if route else None
def copy(self):
""" Return a new :class:`Request` with a shallow :attr:`environ` copy. """
return Request(self.environ.copy())
def get(self, value, default=None): return self.environ.get(value, default)
def __getitem__(self, key): return self.environ[key]
def __delitem__(self, key): self[key] = ""; del(self.environ[key])
def __iter__(self): return iter(self.environ)
def __len__(self): return len(self.environ)
def keys(self): return self.environ.keys()
def __setitem__(self, key, value):
""" Change an environ value and clear all caches that depend on it. """
if self.environ.get('bottle.request.readonly'):
raise KeyError('The environ dictionary is read-only.')
self.environ[key] = value
todelete = ()
if key == 'wsgi.input':
todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
elif key == 'QUERY_STRING':
todelete = ('query', 'params')
elif key.startswith('HTTP_'):
todelete = ('headers', 'cookies')
for key in todelete:
self.environ.pop('bottle.request.'+key, None)
def __repr__(self):
return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
def __getattr__(self, name):
''' Search in self.environ for additional user defined attributes. '''
try:
var = self.environ['bottle.request.ext.%s'%name]
return var.__get__(self) if hasattr(var, '__get__') else var
except KeyError:
raise AttributeError('Attribute %r not defined.' % name)
def __setattr__(self, name, value):
if name == 'environ': return object.__setattr__(self, name, value)
self.environ['bottle.request.ext.%s'%name] = value
def _hkey(s):
return s.title().replace('_','-')
class HeaderProperty(object):
def __init__(self, name, reader=None, writer=str, default=''):
self.name, self.default = name, default
self.reader, self.writer = reader, writer
self.__doc__ = 'Current value of the %r header.' % name.title()
def __get__(self, obj, cls):
if obj is None: return self
value = obj.headers.get(self.name, self.default)
return self.reader(value) if self.reader else value
def __set__(self, obj, value):
obj.headers[self.name] = self.writer(value)
def __delete__(self, obj):
del obj.headers[self.name]
class BaseResponse(object):
""" Storage class for a response body as well as headers and cookies.
This class does support dict-like case-insensitive item-access to
headers, but is NOT a dict. Most notably, iterating over a response
yields parts of the body and not the headers.
:param body: The response body as one of the supported types.
:param status: Either an HTTP status code (e.g. 200) or a status line
including the reason phrase (e.g. '200 OK').
:param headers: A dictionary or a list of name-value pairs.
Additional keyword arguments are added to the list of headers.
Underscores in the header name are replaced with dashes.
"""
default_status = 200
default_content_type = 'text/html; charset=UTF-8'
# Header blacklist for specific response codes
# (rfc2616 section 10.2.3 and 10.3.5)
bad_headers = {
204: set(('Content-Type',)),
304: set(('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Range', 'Content-Type',
'Content-Md5', 'Last-Modified'))}
def __init__(self, body='', status=None, headers=None, **more_headers):
self._cookies = None
self._headers = {}
self.body = body
self.status = status or self.default_status
if headers:
if isinstance(headers, dict):
headers = headers.items()
for name, value in headers:
self.add_header(name, value)
if more_headers:
for name, value in more_headers.items():
self.add_header(name, value)
def copy(self, cls=None):
''' Returns a copy of self. '''
cls = cls or BaseResponse
assert issubclass(cls, BaseResponse)
copy = cls()
copy.status = self.status
copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
if self._cookies:
copy._cookies = SimpleCookie()
copy._cookies.load(self._cookies.output())
return copy
def __iter__(self):
return iter(self.body)
def close(self):
if hasattr(self.body, 'close'):
self.body.close()
@property
def status_line(self):
''' The HTTP status line as a string (e.g. ``404 Not Found``).'''
return self._status_line
@property
def status_code(self):
''' The HTTP status code as an integer (e.g. 404).'''
return self._status_code
def _set_status(self, status):
if isinstance(status, int):
code, status = status, _HTTP_STATUS_LINES.get(status)
elif ' ' in status:
status = status.strip()
code = int(status.split()[0])
else:
raise ValueError('String status line without a reason phrase.')
if not 100 <= code <= 999: raise ValueError('Status code out of range.')
self._status_code = code
self._status_line = str(status or ('%d Unknown' % code))
def _get_status(self):
return self._status_line
status = property(_get_status, _set_status, None,
''' A writeable property to change the HTTP response status. It accepts
either a numeric code (100-999) or a string with a custom reason
phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
:data:`status_code` are updated accordingly. The return value is
always a status string. ''')
del _get_status, _set_status
@property
def headers(self):
''' An instance of :class:`HeaderDict`, a case-insensitive dict-like
view on the response headers. '''
hdict = HeaderDict()
hdict.dict = self._headers
return hdict
def __contains__(self, name): return _hkey(name) in self._headers
def __delitem__(self, name): del self._headers[_hkey(name)]
def __getitem__(self, name): return self._headers[_hkey(name)][-1]
def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)]
def get_header(self, name, default=None):
''' Return the value of a previously defined header. If there is no
header with that name, return a default value. '''
return self._headers.get(_hkey(name), [default])[-1]
def set_header(self, name, value):
''' Create a new response header, replacing any previously defined
headers with the same name. '''
self._headers[_hkey(name)] = [str(value)]
def add_header(self, name, value):
''' Add an additional response header, not removing duplicates. '''
self._headers.setdefault(_hkey(name), []).append(str(value))
def iter_headers(self):
''' Yield (header, value) tuples, skipping headers that are not
allowed with the current response status code. '''
return self.headerlist
@property
def headerlist(self):
''' WSGI conform list of (header, value) tuples. '''
out = []
headers = list(self._headers.items())
if 'Content-Type' not in self._headers:
headers.append(('Content-Type', [self.default_content_type]))
if self._status_code in self.bad_headers:
bad_headers = self.bad_headers[self._status_code]
headers = [h for h in headers if h[0] not in bad_headers]
out += [(name, val) for name, vals in headers for val in vals]
if self._cookies:
for c in self._cookies.values():
out.append(('Set-Cookie', c.OutputString()))
return out
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int)
expires = HeaderProperty('Expires',
reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
writer=lambda x: http_date(x))
@property
def charset(self, default='UTF-8'):
""" Return the charset specified in the content-type header (default: utf8). """
if 'charset=' in self.content_type:
return self.content_type.split('charset=')[-1].split(';')[0].strip()
return default
def set_cookie(self, name, value, secret=None, **options):
''' Create a new cookie or replace an old one. If the `secret` parameter is
set, create a `Signed Cookie` (described below).
:param name: the name of the cookie.
:param value: the value of the cookie.
:param secret: a signature key required for signed cookies.
Additionally, this method accepts all RFC 2109 attributes that are
supported by :class:`cookie.Morsel`, including:
:param max_age: maximum age in seconds. (default: None)
:param expires: a datetime object or UNIX timestamp. (default: None)
:param domain: the domain that is allowed to read the cookie.
(default: current domain)
:param path: limits the cookie to a given path (default: current path)
:param secure: limit the cookie to HTTPS connections (default: off).
:param httponly: prevents client-side javascript to read this cookie
(default: off, requires Python 2.6 or newer).
If neither `expires` nor `max_age` is set (default), the cookie will
expire at the end of the browser session (as soon as the browser
window is closed).
Signed cookies may store any pickle-able object and are
cryptographically signed to prevent manipulation. Keep in mind that
cookies are limited to 4kb in most browsers.
Warning: Signed cookies are not encrypted (the client can still see
the content) and not copy-protected (the client can restore an old
cookie). The main intention is to make pickling and unpickling
save, not to store secret information at client side.
'''
if not self._cookies:
self._cookies = SimpleCookie()
if secret:
value = touni(cookie_encode((name, value), secret))
elif not isinstance(value, basestring):
raise TypeError('Secret key missing for non-string Cookie.')
if len(value) > 4096: raise ValueError('Cookie value to long.')
self._cookies[name] = value
for key, value in options.items():
if key == 'max_age':
if isinstance(value, timedelta):
value = value.seconds + value.days * 24 * 3600
if key == 'expires':
if isinstance(value, (datedate, datetime)):
value = value.timetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
self._cookies[name][key.replace('_', '-')] = value
def delete_cookie(self, key, **kwargs):
''' Delete a cookie. Be sure to use the same `domain` and `path`
settings as used to create the cookie. '''
kwargs['max_age'] = -1
kwargs['expires'] = 0
self.set_cookie(key, '', **kwargs)
def __repr__(self):
out = ''
for name, value in self.headerlist:
out += '%s: %s\n' % (name.title(), value.strip())
return out
def _local_property():
ls = threading.local()
def fget(self):
try: return ls.var
except AttributeError:
raise RuntimeError("Request context not initialized.")
def fset(self, value): ls.var = value
def fdel(self): del ls.var
return property(fget, fset, fdel, 'Thread-local property')
class LocalRequest(BaseRequest):
''' A thread-local subclass of :class:`BaseRequest` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`request`). If accessed during a
request/response cycle, this instance always refers to the *current*
request (even on a multithreaded server). '''
bind = BaseRequest.__init__
environ = _local_property()
class LocalResponse(BaseResponse):
''' A thread-local subclass of :class:`BaseResponse` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`response`). Its attributes are used
to build the HTTP response at the end of the request/response cycle.
'''
bind = BaseResponse.__init__
_status_line = _local_property()
_status_code = _local_property()
_cookies = _local_property()
_headers = _local_property()
body = _local_property()
Request = BaseRequest
Response = BaseResponse
class HTTPResponse(Response, BottleException):
def __init__(self, body='', status=None, headers=None, **more_headers):
super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
def apply(self, response):
response._status_code = self._status_code
response._status_line = self._status_line
response._headers = self._headers
response._cookies = self._cookies
response.body = self.body
class HTTPError(HTTPResponse):
default_status = 500
def __init__(self, status=None, body=None, exception=None, traceback=None,
**options):
self.exception = exception
self.traceback = traceback
super(HTTPError, self).__init__(body, status, **options)
class PluginError(BottleException): pass
class JSONPlugin(object):
name = 'json'
api = 2
def __init__(self, json_dumps=json_dumps):
self.json_dumps = json_dumps
def apply(self, callback, route):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization succesful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
class TemplatePlugin(object):
''' This plugin applies the :func:`view` decorator to all routes with a
`template` config parameter. If the parameter is a tuple, the second
element must be a dict with additional options (e.g. `template_engine`)
or default variables for the template. '''
name = 'template'
api = 2
def apply(self, callback, route):
conf = route.config.get('template')
if isinstance(conf, (tuple, list)) and len(conf) == 2:
return view(conf[0], **conf[1])(callback)
elif isinstance(conf, str):
return view(conf)(callback)
else:
return callback
class _ImportRedirect(object):
def __init__(self, name, impmask):
''' Create a virtual package that redirects imports (see PEP 302). '''
self.name = name
self.impmask = impmask
self.module = sys.modules.setdefault(name, imp.new_module(name))
self.module.__dict__.update({'__file__': __file__, '__path__': [],
'__all__': [], '__loader__': self})
sys.meta_path.append(self)
def find_module(self, fullname, path=None):
if '.' not in fullname: return
packname = fullname.rsplit('.', 1)[0]
if packname != self.name: return
return self
def load_module(self, fullname):
if fullname in sys.modules: return sys.modules[fullname]
modname = fullname.rsplit('.', 1)[1]
realname = self.impmask % modname
__import__(realname)
module = sys.modules[fullname] = sys.modules[realname]
setattr(self.module, modname, module)
module.__loader__ = self
return module
class MultiDict(DictMixin):
""" This dict stores multiple values per key, but behaves exactly like a
normal dict in that it returns only the newest value for any given key.
There are special methods available to access the full list of values.
"""
def __init__(self, *a, **k):
self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
def __len__(self): return len(self.dict)
def __iter__(self): return iter(self.dict)
def __contains__(self, key): return key in self.dict
def __delitem__(self, key): del self.dict[key]
def __getitem__(self, key): return self.dict[key][-1]
def __setitem__(self, key, value): self.append(key, value)
def keys(self): return self.dict.keys()
if py3k:
def values(self): return (v[-1] for v in self.dict.values())
def items(self): return ((k, v[-1]) for k, v in self.dict.items())
def allitems(self):
return ((k, v) for k, vl in self.dict.items() for v in vl)
iterkeys = keys
itervalues = values
iteritems = items
iterallitems = allitems
else:
def values(self): return [v[-1] for v in self.dict.values()]
def items(self): return [(k, v[-1]) for k, v in self.dict.items()]
def iterkeys(self): return self.dict.iterkeys()
def itervalues(self): return (v[-1] for v in self.dict.itervalues())
def iteritems(self):
return ((k, v[-1]) for k, v in self.dict.iteritems())
def iterallitems(self):
return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
def allitems(self):
return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
def get(self, key, default=None, index=-1, type=None):
''' Return the most recent value for a key.
:param default: The default value to be returned if the key is not
present or the type conversion fails.
:param index: An index for the list of available values.
:param type: If defined, this callable is used to cast the value
into a specific type. Exception are suppressed and result in
the default value to be returned.
'''
try:
val = self.dict[key][index]
return type(val) if type else val
except Exception:
pass
return default
def append(self, key, value):
''' Add a new value to the list of values for this key. '''
self.dict.setdefault(key, []).append(value)
def replace(self, key, value):
''' Replace the list of values with a single value. '''
self.dict[key] = [value]
def getall(self, key):
''' Return a (possibly empty) list of values for a key. '''
return self.dict.get(key) or []
#: Aliases for WTForms to mimic other multi-dict APIs (Django)
getone = get
getlist = getall
class FormsDict(MultiDict):
''' This :class:`MultiDict` subclass is used to store request form data.
Additionally to the normal dict-like item access methods (which return
unmodified data as native strings), this container also supports
attribute-like access to its values. Attributes are automatically de-
or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
attributes default to an empty string. '''
#: Encoding used for attribute values.
input_encoding = 'utf8'
#: If true (default), unicode strings are first encoded with `latin1`
#: and then decoded to match :attr:`input_encoding`.
recode_unicode = True
def _fix(self, s, encoding=None):
if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
return s.encode('latin1').decode(encoding or self.input_encoding)
elif isinstance(s, bytes): # Python 2 WSGI
return s.decode(encoding or self.input_encoding)
else:
return s
def decode(self, encoding=None):
''' Returns a copy with all keys and values de- or recoded to match
:attr:`input_encoding`. Some libraries (e.g. WTForms) want a
unicode dictionary. '''
copy = FormsDict()
enc = copy.input_encoding = encoding or self.input_encoding
copy.recode_unicode = False
for key, value in self.allitems():
copy.append(self._fix(key, enc), self._fix(value, enc))
return copy
def getunicode(self, name, default=None, encoding=None):
''' Return the value as a unicode string, or the default. '''
try:
return self._fix(self[name], encoding)
except (UnicodeError, KeyError):
return default
def __getattr__(self, name, default=unicode()):
# Without this guard, pickle generates a cryptic TypeError:
if name.startswith('__') and name.endswith('__'):
return super(FormsDict, self).__getattr__(name)
return self.getunicode(name, default=default)
class HeaderDict(MultiDict):
""" A case-insensitive version of :class:`MultiDict` that defaults to
replace the old value instead of appending it. """
def __init__(self, *a, **ka):
self.dict = {}
if a or ka: self.update(*a, **ka)
def __contains__(self, key): return _hkey(key) in self.dict
def __delitem__(self, key): del self.dict[_hkey(key)]
def __getitem__(self, key): return self.dict[_hkey(key)][-1]
def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)]
def append(self, key, value):
self.dict.setdefault(_hkey(key), []).append(str(value))
def replace(self, key, value): self.dict[_hkey(key)] = [str(value)]
def getall(self, key): return self.dict.get(_hkey(key)) or []
def get(self, key, default=None, index=-1):
return MultiDict.get(self, _hkey(key), default, index)
def filter(self, names):
for name in [_hkey(n) for n in names]:
if name in self.dict:
del self.dict[name]
class WSGIHeaderDict(DictMixin):
''' This dict-like class wraps a WSGI environ dict and provides convenient
access to HTTP_* fields. Keys and values are native strings
(2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
environment contains non-native string values, these are de- or encoded
using a lossless 'latin1' character set.
The API will remain stable even on changes to the relevant PEPs.
Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
that uses non-native strings.)
'''
#: List of keys that do not have a ``HTTP_`` prefix.
cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
def __init__(self, environ):
self.environ = environ
def _ekey(self, key):
''' Translate header field name to CGI/WSGI environ key. '''
key = key.replace('-','_').upper()
if key in self.cgikeys:
return key
return 'HTTP_' + key
def raw(self, key, default=None):
''' Return the header value as is (may be bytes or unicode). '''
return self.environ.get(self._ekey(key), default)
def __getitem__(self, key):
return tonat(self.environ[self._ekey(key)], 'latin1')
def __setitem__(self, key, value):
raise TypeError("%s is read-only." % self.__class__)
def __delitem__(self, key):
raise TypeError("%s is read-only." % self.__class__)
def __iter__(self):
for key in self.environ:
if key[:5] == 'HTTP_':
yield key[5:].replace('_', '-').title()
elif key in self.cgikeys:
yield key.replace('_', '-').title()
def keys(self): return [x for x in self]
def __len__(self): return len(self.keys())
def __contains__(self, key): return self._ekey(key) in self.environ
class ConfigDict(dict):
''' A dict-like configuration storage with additional support for
namespaces, validators, meta-data, on_change listeners and more.
'''
__slots__ = ('_meta', '_on_change')
def __init__(self):
self._meta = {}
self._on_change = lambda name, value: None
def load_config(self, filename):
''' Load values from an *.ini style config file.
If the config file contains sections, their names are used as
namespaces for the values within. The two special sections
``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
'''
conf = ConfigParser()
conf.read(filename)
for section in conf.sections():
for key, value in conf.items(section):
if section not in ('DEFAULT', 'bottle'):
key = section + '.' + key
self[key] = value
return self
def load_dict(self, source, namespace=''):
''' Load values from a dictionary structure. Nesting can be used to
represent namespaces.
>>> c.load_dict({'some': {'namespace': {'key': 'value'} } })
{'some.namespace.key': 'value'}
'''
for key, value in source.items():
if isinstance(key, str):
nskey = (namespace + '.' + key).strip('.')
if isinstance(value, dict):
self.load_dict(value, namespace=nskey)
else:
self[nskey] = value
else:
raise TypeError('Key has type %r (not a string)' % type(key))
return self
def update(self, *a, **ka):
''' If the first parameter is a string, all keys are prefixed with this
namespace. Apart from that it works just as the usual dict.update().
Example: ``update('some.namespace', key='value')`` '''
prefix = ''
if a and isinstance(a[0], str):
prefix = a[0].strip('.') + '.'
a = a[1:]
for key, value in dict(*a, **ka).items():
self[prefix+key] = value
def setdefault(self, key, value):
if key not in self:
self[key] = value
def __setitem__(self, key, value):
if not isinstance(key, str):
raise TypeError('Key has type %r (not a string)' % type(key))
value = self.meta_get(key, 'filter', lambda x: x)(value)
if key in self and self[key] is value:
return
self._on_change(key, value)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self._on_change(key, None)
dict.__delitem__(self, key)
def meta_get(self, key, metafield, default=None):
''' Return the value of a meta field for a key. '''
return self._meta.get(key, {}).get(metafield, default)
def meta_set(self, key, metafield, value):
''' Set the meta field for a key to a new value. This triggers the
on-change handler for existing keys. '''
self._meta.setdefault(key, {})[metafield] = value
if key in self:
self[key] = self[key]
def meta_list(self, key):
''' Return an iterable of meta field names defined for a key. '''
return self._meta.get(key, {}).keys()
class AppStack(list):
""" A stack-like list. Calling it returns the head of the stack. """
def __call__(self):
""" Return the current default application. """
return self[-1]
def push(self, value=None):
""" Add a new :class:`Bottle` instance to the stack """
if not isinstance(value, Bottle):
value = Bottle()
self.append(value)
return value
class WSGIFileWrapper(object):
def __init__(self, fp, buffer_size=1024*64):
self.fp, self.buffer_size = fp, buffer_size
for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
def __iter__(self):
buff, read = self.buffer_size, self.read
while True:
part = read(buff)
if not part: return
yield part
class _closeiter(object):
''' This only exists to be able to attach a .close method to iterators that
do not support attribute assignment (most of itertools). '''
def __init__(self, iterator, close=None):
self.iterator = iterator
self.close_callbacks = makelist(close)
def __iter__(self):
return iter(self.iterator)
def close(self):
for func in self.close_callbacks:
func()
class ResourceManager(object):
''' This class manages a list of search paths and helps to find and open
application-bound resources (files).
:param base: default value for :meth:`add_path` calls.
:param opener: callable used to open resources.
:param cachemode: controls which lookups are cached. One of 'all',
'found' or 'none'.
'''
def __init__(self, base='./', opener=open, cachemode='all'):
self.opener = open
self.base = base
self.cachemode = cachemode
#: A list of search paths. See :meth:`add_path` for details.
self.path = []
#: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
self.cache = {}
def add_path(self, path, base=None, index=None, create=False):
''' Add a new path to the list of search paths. Return False if the
path does not exist.
:param path: The new search path. Relative paths are turned into
an absolute and normalized form. If the path looks like a file
(not ending in `/`), the filename is stripped off.
:param base: Path used to absolutize relative search paths.
Defaults to :attr:`base` which defaults to ``os.getcwd()``.
:param index: Position within the list of search paths. Defaults
to last index (appends to the list).
The `base` parameter makes it easy to reference files installed
along with a python module or package::
res.add_path('./resources/', __file__)
'''
base = os.path.abspath(os.path.dirname(base or self.base))
path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
path += os.sep
if path in self.path:
self.path.remove(path)
if create and not os.path.isdir(path):
os.makedirs(path)
if index is None:
self.path.append(path)
else:
self.path.insert(index, path)
self.cache.clear()
return os.path.exists(path)
def __iter__(self):
''' Iterate over all existing files in all registered paths. '''
search = self.path[:]
while search:
path = search.pop()
if not os.path.isdir(path): continue
for name in os.listdir(path):
full = os.path.join(path, name)
if os.path.isdir(full): search.append(full)
else: yield full
def lookup(self, name):
''' Search for a resource and return an absolute file path, or `None`.
The :attr:`path` list is searched in order. The first match is
returend. Symlinks are followed. The result is cached to speed up
future lookups. '''
if name not in self.cache or DEBUG:
for path in self.path:
fpath = os.path.join(path, name)
if os.path.isfile(fpath):
if self.cachemode in ('all', 'found'):
self.cache[name] = fpath
return fpath
if self.cachemode == 'all':
self.cache[name] = None
return self.cache[name]
def open(self, name, mode='r', *args, **kwargs):
''' Find a resource and return a file object, or raise IOError. '''
fname = self.lookup(name)
if not fname: raise IOError("Resource %r not found." % name)
return self.opener(fname, mode=mode, *args, **kwargs)
class FileUpload(object):
def __init__(self, fileobj, name, filename, headers=None):
''' Wrapper for file uploads. '''
#: Open file(-like) object (BytesIO buffer or temporary file)
self.file = fileobj
#: Name of the upload form field
self.name = name
#: Raw filename as sent by the client (may contain unsafe characters)
self.raw_filename = filename
#: A :class:`HeaderDict` with additional headers (e.g. content-type)
self.headers = HeaderDict(headers) if headers else HeaderDict()
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int, default=-1)
@cached_property
def filename(self):
''' Name of the file on the client file system, but normalized to ensure
file system compatibility. An empty filename is returned as 'empty'.
Only ASCII letters, digits, dashes, underscores and dots are
allowed in the final filename. Accents are removed, if possible.
Whitespace is replaced by a single dash. Leading or tailing dots
or dashes are removed. The filename is limited to 255 characters.
'''
fname = self.raw_filename
if not isinstance(fname, unicode):
fname = fname.decode('utf8', 'ignore')
fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
fname = os.path.basename(fname.replace('\\', os.path.sep))
fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
return fname[:255] or 'empty'
def _copy_file(self, fp, chunk_size=2**16):
read, write, offset = self.file.read, fp.write, self.file.tell()
while 1:
buf = read(chunk_size)
if not buf: break
write(buf)
self.file.seek(offset)
def save(self, destination, overwrite=False, chunk_size=2**16):
''' Save file to disk or copy its content to an open file(-like) object.
If *destination* is a directory, :attr:`filename` is added to the
path. Existing files are not overwritten by default (IOError).
:param destination: File path, directory or file(-like) object.
:param overwrite: If True, replace existing files. (default: False)
:param chunk_size: Bytes to read at a time. (default: 64kb)
'''
if isinstance(destination, basestring): # Except file-likes here
if os.path.isdir(destination):
destination = os.path.join(destination, self.filename)
if not overwrite and os.path.exists(destination):
raise IOError('File exists.')
with open(destination, 'wb') as fp:
self._copy_file(fp, chunk_size)
else:
self._copy_file(destination, chunk_size)
def abort(code=500, text='Unknown Error.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.set_header('Location', urljoin(request.url, url))
raise res
def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
''' Yield chunks from a range in a file. No chunk is bigger than maxread.'''
fp.seek(offset)
while bytes > 0:
part = fp.read(min(bytes, maxread))
if not part: break
bytes -= len(part)
yield part
def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
""" Open a file in a safe way and return :exc:`HTTPResponse` with status
code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
``Content-Length`` and ``Last-Modified`` headers are set if possible.
Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
requests.
:param filename: Name or path of the file to send.
:param root: Root path for file lookups. Should be an absolute directory
path.
:param mimetype: Defines the content-type header (default: guess from
file extension)
:param download: If True, ask the browser to open a `Save as...` dialog
instead of opening the file with the associated program. You can
specify a custom filename as a string. If not specified, the
original filename is used (default: False).
:param charset: The charset to use for files with a ``text/*``
mime-type. (default: UTF-8)
"""
root = os.path.abspath(root) + os.sep
filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
headers = dict()
if not filename.startswith(root):
return HTTPError(403, "Access denied.")
if not os.path.exists(filename) or not os.path.isfile(filename):
return HTTPError(404, "File does not exist.")
if not os.access(filename, os.R_OK):
return HTTPError(403, "You do not have permission to access this file.")
if mimetype == 'auto':
mimetype, encoding = mimetypes.guess_type(filename)
if encoding: headers['Content-Encoding'] = encoding
if mimetype:
if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
mimetype += '; charset=%s' % charset
headers['Content-Type'] = mimetype
if download:
download = os.path.basename(filename if download == True else download)
headers['Content-Disposition'] = 'attachment; filename="%s"' % download
stats = os.stat(filename)
headers['Content-Length'] = clen = stats.st_size
lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
headers['Last-Modified'] = lm
ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
if ims:
ims = parse_date(ims.split(";")[0].strip())
if ims is not None and ims >= int(stats.st_mtime):
headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
return HTTPResponse(status=304, **headers)
body = '' if request.method == 'HEAD' else open(filename, 'rb')
headers["Accept-Ranges"] = "bytes"
ranges = request.environ.get('HTTP_RANGE')
if 'HTTP_RANGE' in request.environ:
ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
if not ranges:
return HTTPError(416, "Requested Range Not Satisfiable")
offset, end = ranges[0]
headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
headers["Content-Length"] = str(end-offset)
if body: body = _file_iter_range(body, offset, end-offset)
return HTTPResponse(body, status=206, **headers)
return HTTPResponse(body, **headers)
def debug(mode=True):
""" Change the debug level.
There is only one debug level supported at the moment."""
global DEBUG
if mode: warnings.simplefilter('default')
DEBUG = bool(mode)
def http_date(value):
if isinstance(value, (datedate, datetime)):
value = value.utctimetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
if not isinstance(value, basestring):
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
return value
def parse_date(ims):
""" Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
try:
ts = email.utils.parsedate_tz(ims)
return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
except (TypeError, ValueError, IndexError, OverflowError):
return None
def parse_auth(header):
""" Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
try:
method, data = header.split(None, 1)
if method.lower() == 'basic':
user, pwd = touni(base64.b64decode(tob(data))).split(':',1)
return user, pwd
except (KeyError, ValueError):
return None
def parse_range_header(header, maxlen=0):
''' Yield (start, end) ranges parsed from a HTTP Range header. Skip
unsatisfiable ranges. The end index is non-inclusive.'''
if not header or header[:6] != 'bytes=': return
ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
for start, end in ranges:
try:
if not start: # bytes=-100 -> last 100 bytes
start, end = max(0, maxlen-int(end)), maxlen
elif not end: # bytes=100- -> all but the first 99 bytes
start, end = int(start), maxlen
else: # bytes=100-200 -> bytes 100-200 (inclusive)
start, end = int(start), min(int(end)+1, maxlen)
if 0 <= start < end <= maxlen:
yield start, end
except ValueError:
pass
def _parse_qsl(qs):
r = []
for pair in qs.replace(';','&').split('&'):
if not pair: continue
nv = pair.split('=', 1)
if len(nv) != 2: nv.append('')
key = urlunquote(nv[0].replace('+', ' '))
value = urlunquote(nv[1].replace('+', ' '))
r.append((key, value))
return r
def _lscmp(a, b):
''' Compares two strings in a cryptographically safe way:
Runtime is not affected by length of common prefix. '''
return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
def cookie_encode(data, key):
''' Encode and sign a pickle-able object. Return a (byte) string '''
msg = base64.b64encode(pickle.dumps(data, -1))
sig = base64.b64encode(hmac.new(tob(key), msg).digest())
return tob('!') + sig + tob('?') + msg
def cookie_decode(data, key):
''' Verify and decode an encoded string. Return an object or None.'''
data = tob(data)
if cookie_is_encoded(data):
sig, msg = data.split(tob('?'), 1)
if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())):
return pickle.loads(base64.b64decode(msg))
return None
def cookie_is_encoded(data):
''' Return True if the argument looks like a encoded cookie.'''
return bool(data.startswith(tob('!')) and tob('?') in data)
def html_escape(string):
''' Escape HTML special characters ``&<>`` and quotes ``'"``. '''
return string.replace('&','&').replace('<','<').replace('>','>')\
.replace('"','"').replace("'",''')
def html_quote(string):
''' Escape and quote a string to be used as an HTTP attribute.'''
return '"%s"' % html_escape(string).replace('\n',' ')\
.replace('\r',' ').replace('\t','	')
def yieldroutes(func):
""" Return a generator for routes that match the signature (name, args)
of the func parameter. This may yield more than one route if the function
takes optional keyword arguments. The output is best described by example::
a() -> '/a'
b(x, y) -> '/b/<x>/<y>'
c(x, y=5) -> '/c/<x>' and '/c/<x>/<y>'
d(x=5, y=6) -> '/d' and '/d/<x>' and '/d/<x>/<y>'
"""
path = '/' + func.__name__.replace('__','/').lstrip('/')
spec = getargspec(func)
argc = len(spec[0]) - len(spec[3] or [])
path += ('/<%s>' * argc) % tuple(spec[0][:argc])
yield path
for arg in spec[0][argc:]:
path += '/<%s>' % arg
yield path
def path_shift(script_name, path_info, shift=1):
''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
:return: The modified paths.
:param script_name: The SCRIPT_NAME path.
:param script_name: The PATH_INFO path.
:param shift: The number of path fragments to shift. May be negative to
change the shift direction. (default: 1)
'''
if shift == 0: return script_name, path_info
pathlist = path_info.strip('/').split('/')
scriptlist = script_name.strip('/').split('/')
if pathlist and pathlist[0] == '': pathlist = []
if scriptlist and scriptlist[0] == '': scriptlist = []
if shift > 0 and shift <= len(pathlist):
moved = pathlist[:shift]
scriptlist = scriptlist + moved
pathlist = pathlist[shift:]
elif shift < 0 and shift >= -len(scriptlist):
moved = scriptlist[shift:]
pathlist = moved + pathlist
scriptlist = scriptlist[:shift]
else:
empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
raise AssertionError("Cannot shift. Nothing left from %s" % empty)
new_script_name = '/' + '/'.join(scriptlist)
new_path_info = '/' + '/'.join(pathlist)
if path_info.endswith('/') and pathlist: new_path_info += '/'
return new_script_name, new_path_info
def auth_basic(check, realm="private", text="Access denied"):
''' Callback decorator to require HTTP auth (basic).
TODO: Add route(check_auth=...) parameter. '''
def decorator(func):
@functools.wraps(func)
def wrapper(*a, **ka):
user, password = request.auth or (None, None)
if user is None or not check(user, password):
err = HTTPError(401, text)
err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
return err
return func(*a, **ka)
return wrapper
return decorator
def make_default_app_wrapper(name):
''' Return a callable that relays calls to the current default app. '''
@functools.wraps(getattr(Bottle, name))
def wrapper(*a, **ka):
return getattr(app(), name)(*a, **ka)
return wrapper
route = make_default_app_wrapper('route')
get = make_default_app_wrapper('get')
post = make_default_app_wrapper('post')
put = make_default_app_wrapper('put')
delete = make_default_app_wrapper('delete')
error = make_default_app_wrapper('error')
mount = make_default_app_wrapper('mount')
hook = make_default_app_wrapper('hook')
install = make_default_app_wrapper('install')
uninstall = make_default_app_wrapper('uninstall')
url = make_default_app_wrapper('get_url')
class ServerAdapter(object):
quiet = False
def __init__(self, host='127.0.0.1', port=8080, **options):
self.options = options
self.host = host
self.port = int(port)
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
return "%s(%s)" % (self.__class__.__name__, args)
class CGIServer(ServerAdapter):
quiet = True
def run(self, handler): # pragma: no cover
from wsgiref.handlers import CGIHandler
def fixed_environ(environ, start_response):
environ.setdefault('PATH_INFO', '')
return handler(environ, start_response)
CGIHandler().run(fixed_environ)
class FlupFCGIServer(ServerAdapter):
def run(self, handler): # pragma: no cover
import flup.server.fcgi
self.options.setdefault('bindAddress', (self.host, self.port))
flup.server.fcgi.WSGIServer(handler, **self.options).run()
class WSGIRefServer(ServerAdapter):
def run(self, app): # pragma: no cover
from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
from wsgiref.simple_server import make_server
import socket
class FixedHandler(WSGIRequestHandler):
def address_string(self): # Prevent reverse DNS lookups please.
return self.client_address[0]
def log_request(*args, **kw):
if not self.quiet:
return WSGIRequestHandler.log_request(*args, **kw)
handler_cls = self.options.get('handler_class', FixedHandler)
server_cls = self.options.get('server_class', WSGIServer)
if ':' in self.host: # Fix wsgiref for IPv6 addresses.
if getattr(server_cls, 'address_family') == socket.AF_INET:
class server_cls(server_cls):
address_family = socket.AF_INET6
srv = make_server(self.host, self.port, app, server_cls, handler_cls)
srv.serve_forever()
class CherryPyServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from cherrypy import wsgiserver
self.options['bind_addr'] = (self.host, self.port)
self.options['wsgi_app'] = handler
certfile = self.options.get('certfile')
if certfile:
del self.options['certfile']
keyfile = self.options.get('keyfile')
if keyfile:
del self.options['keyfile']
ssl_module = self.options.get('ssl_module')
if ssl_module:
del self.options['ssl_module']
server = wsgiserver.CherryPyWSGIServer(**self.options)
if certfile:
server.ssl_certificate = certfile
if keyfile:
server.ssl_private_key = keyfile
if ssl_module:
print("Setting SSL module = %s" % ssl_module)
adapterClass = wsgiserver.get_ssl_adapter_class(ssl_module)
adapter = adapterClass(certfile, keyfile)
server.ssl_adapter = adapter
try:
server.start()
finally:
server.stop()
class WaitressServer(ServerAdapter):
def run(self, handler):
from waitress import serve
serve(handler, host=self.host, port=self.port)
class PasteServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from paste import httpserver
from paste.translogger import TransLogger
handler = TransLogger(handler, setup_console_handler=(not self.quiet))
httpserver.serve(handler, host=self.host, port=str(self.port),
**self.options)
class MeinheldServer(ServerAdapter):
def run(self, handler):
from meinheld import server
server.listen((self.host, self.port))
server.run(handler)
class FapwsServer(ServerAdapter):
""" Extremely fast webserver using libev. See http://www.fapws.org/ """
def run(self, handler): # pragma: no cover
import fapws._evwsgi as evwsgi
from fapws import base, config
port = self.port
if float(config.SERVER_IDENT[-2:]) > 0.4:
# fapws3 silently changed its API in 0.5
port = str(port)
evwsgi.start(self.host, port)
# fapws3 never releases the GIL. Complain upstream. I tried. No luck.
if 'BOTTLE_CHILD' in os.environ and not self.quiet:
_stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
_stderr(" (Fapws3 breaks python thread support)\n")
evwsgi.set_base_module(base)
def app(environ, start_response):
environ['wsgi.multiprocess'] = False
return handler(environ, start_response)
evwsgi.wsgi_cb(('', app))
evwsgi.run()
class TornadoServer(ServerAdapter):
""" The super hyped asynchronous server by facebook. Untested. """
def run(self, handler): # pragma: no cover
import tornado.wsgi, tornado.httpserver, tornado.ioloop
container = tornado.wsgi.WSGIContainer(handler)
server = tornado.httpserver.HTTPServer(container)
server.listen(port=self.port,address=self.host)
tornado.ioloop.IOLoop.instance().start()
class AppEngineServer(ServerAdapter):
""" Adapter for Google App Engine. """
quiet = True
def run(self, handler):
from google.appengine.ext.webapp import util
# A main() function in the handler script enables 'App Caching'.
# Lets makes sure it is there. This _really_ improves performance.
module = sys.modules.get('__main__')
if module and not hasattr(module, 'main'):
module.main = lambda: util.run_wsgi_app(handler)
util.run_wsgi_app(handler)
class TwistedServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from twisted.web import server, wsgi
from twisted.python.threadpool import ThreadPool
from twisted.internet import reactor
thread_pool = ThreadPool()
thread_pool.start()
reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
reactor.listenTCP(self.port, factory, interface=self.host)
reactor.run()
class DieselServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from diesel.protocols.wsgi import WSGIApplication
app = WSGIApplication(handler, port=self.port)
app.run()
class GeventServer(ServerAdapter):
""" Untested. Options:
* `fast` (default: False) uses libevent's http server, but has some
issues: No streaming, no pipelining, no SSL.
* See gevent.wsgi.WSGIServer() documentation for more options.
"""
def run(self, handler):
from gevent import wsgi, pywsgi, local
if not isinstance(threading.local(), local.local):
msg = "Bottle requires gevent.monkey.patch_all() (before import)"
raise RuntimeError(msg)
if not self.options.pop('fast', None): wsgi = pywsgi
self.options['log'] = None if self.quiet else 'default'
address = (self.host, self.port)
server = wsgi.WSGIServer(address, handler, **self.options)
if 'BOTTLE_CHILD' in os.environ:
import signal
signal.signal(signal.SIGINT, lambda s, f: server.stop())
server.serve_forever()
class GeventSocketIOServer(ServerAdapter):
def run(self,handler):
from socketio import server
address = (self.host, self.port)
server.SocketIOServer(address, handler, **self.options).serve_forever()
class GunicornServer(ServerAdapter):
""" Untested. See http://gunicorn.org/configure.html for options. """
def run(self, handler):
from gunicorn.app.base import Application
config = {'bind': "%s:%d" % (self.host, int(self.port))}
config.update(self.options)
class GunicornApplication(Application):
def init(self, parser, opts, args):
return config
def load(self):
return handler
GunicornApplication().run()
class EventletServer(ServerAdapter):
""" Untested """
def run(self, handler):
from eventlet import wsgi, listen
try:
wsgi.server(listen((self.host, self.port)), handler,
log_output=(not self.quiet))
except TypeError:
# Fallback, if we have old version of eventlet
wsgi.server(listen((self.host, self.port)), handler)
class RocketServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from rocket import Rocket
server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
server.start()
class BjoernServer(ServerAdapter):
""" Fast server written in C: https://github.com/jonashaag/bjoern """
def run(self, handler):
from bjoern import run
run(handler, self.host, self.port)
class AutoServer(ServerAdapter):
""" Untested. """
adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer]
def run(self, handler):
for sa in self.adapters:
try:
return sa(self.host, self.port, **self.options).run(handler)
except ImportError:
pass
server_names = {
'cgi': CGIServer,
'flup': FlupFCGIServer,
'wsgiref': WSGIRefServer,
'waitress': WaitressServer,
'cherrypy': CherryPyServer,
'paste': PasteServer,
'fapws3': FapwsServer,
'tornado': TornadoServer,
'gae': AppEngineServer,
'twisted': TwistedServer,
'diesel': DieselServer,
'meinheld': MeinheldServer,
'gunicorn': GunicornServer,
'eventlet': EventletServer,
'gevent': GeventServer,
'geventSocketIO':GeventSocketIOServer,
'rocket': RocketServer,
'bjoern' : BjoernServer,
'auto': AutoServer,
}
def load(target, **namespace):
""" Import a module or fetch an object from a module.
* ``package.module`` returns `module` as a module object.
* ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
* ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
The last form accepts not only function calls, but any type of
expression. Keyword arguments passed to this function are available as
local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
"""
module, target = target.split(":", 1) if ':' in target else (target, None)
if module not in sys.modules: __import__(module)
if not target: return sys.modules[module]
if target.isalnum(): return getattr(sys.modules[module], target)
package_name = module.split('.')[0]
namespace[package_name] = sys.modules[package_name]
return eval('%s.%s' % (module, target), namespace)
def load_app(target):
""" Load a bottle application from a module and make sure that the import
does not affect the current default application, but returns a separate
application object. See :func:`load` for the target parameter. """
global NORUN; NORUN, nr_old = True, NORUN
try:
tmp = default_app.push() # Create a new "default application"
rv = load(target) # Import the target module
return rv if callable(rv) else tmp
finally:
default_app.remove(tmp) # Remove the temporary added default application
NORUN = nr_old
_debug = debug
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
interval=1, reloader=False, quiet=False, plugins=None,
debug=None, **kargs):
""" Start a server instance. This method blocks until the server terminates.
:param app: WSGI application or target string supported by
:func:`load_app`. (default: :func:`default_app`)
:param server: Server adapter to use. See :data:`server_names` keys
for valid names or pass a :class:`ServerAdapter` subclass.
(default: `wsgiref`)
:param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
all interfaces including the external one. (default: 127.0.0.1)
:param port: Server port to bind to. Values below 1024 require root
privileges. (default: 8080)
:param reloader: Start auto-reloading server? (default: False)
:param interval: Auto-reloader interval in seconds (default: 1)
:param quiet: Suppress output to stdout and stderr? (default: False)
:param options: Options passed to the server adapter.
"""
if NORUN: return
if reloader and not os.environ.get('BOTTLE_CHILD'):
try:
lockfile = None
fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
os.close(fd) # We only need this file to exist. We never write to it
while os.path.exists(lockfile):
args = [sys.executable] + sys.argv
environ = os.environ.copy()
environ['BOTTLE_CHILD'] = 'true'
environ['BOTTLE_LOCKFILE'] = lockfile
p = subprocess.Popen(args, env=environ)
while p.poll() is None: # Busy wait...
os.utime(lockfile, None) # I am alive!
time.sleep(interval)
if p.poll() != 3:
if os.path.exists(lockfile): os.unlink(lockfile)
sys.exit(p.poll())
except KeyboardInterrupt:
pass
finally:
if os.path.exists(lockfile):
os.unlink(lockfile)
return
try:
if debug is not None: _debug(debug)
app = app or default_app()
if isinstance(app, basestring):
app = load_app(app)
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
for plugin in plugins or []:
app.install(plugin)
if server in server_names:
server = server_names.get(server)
if isinstance(server, basestring):
server = load(server)
if isinstance(server, type):
server = server(host=host, port=port, **kargs)
if not isinstance(server, ServerAdapter):
raise ValueError("Unknown or unsupported server: %r" % server)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server)))
_stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
_stderr("Hit Ctrl-C to quit.\n\n")
if reloader:
lockfile = os.environ.get('BOTTLE_LOCKFILE')
bgcheck = FileCheckerThread(lockfile, interval)
with bgcheck:
server.run(app)
if bgcheck.status == 'reload':
sys.exit(3)
else:
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except:
if not reloader: raise
if not getattr(server, 'quiet', quiet):
print_exc()
time.sleep(interval)
sys.exit(3)
class FileCheckerThread(threading.Thread):
''' Interrupt main-thread as soon as a changed module file is detected,
the lockfile gets deleted or gets to old. '''
def __init__(self, lockfile, interval):
threading.Thread.__init__(self)
self.lockfile, self.interval = lockfile, interval
#: Is one of 'reload', 'error' or 'exit'
self.status = None
def run(self):
exists = os.path.exists
mtime = lambda path: os.stat(path).st_mtime
files = dict()
for module in list(sys.modules.values()):
path = getattr(module, '__file__', '')
if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
if path and exists(path): files[path] = mtime(path)
while not self.status:
if not exists(self.lockfile)\
or mtime(self.lockfile) < time.time() - self.interval - 5:
self.status = 'error'
thread.interrupt_main()
for path, lmtime in list(files.items()):
if not exists(path) or mtime(path) > lmtime:
self.status = 'reload'
thread.interrupt_main()
break
time.sleep(self.interval)
def __enter__(self):
self.start()
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.status: self.status = 'exit' # silent exit
self.join()
return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
class TemplateError(HTTPError):
def __init__(self, message):
HTTPError.__init__(self, 500, message)
class BaseTemplate(object):
""" Base class and minimal API for template adapters """
extensions = ['tpl','html','thtml','stpl']
settings = {} #used in prepare()
defaults = {} #used in render()
def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
""" Create a new template.
If the source parameter (str or buffer) is missing, the name argument
is used to guess a template filename. Subclasses can assume that
self.source and/or self.filename are set. Both are strings.
The lookup, encoding and settings parameters are stored as instance
variables.
The lookup parameter stores a list containing directory paths.
The encoding parameter should be used to decode byte strings or files.
The settings parameter contains a dict for engine-specific settings.
"""
self.name = name
self.source = source.read() if hasattr(source, 'read') else source
self.filename = source.filename if hasattr(source, 'filename') else None
self.lookup = [os.path.abspath(x) for x in lookup]
self.encoding = encoding
self.settings = self.settings.copy() # Copy from class variable
self.settings.update(settings) # Apply
if not self.source and self.name:
self.filename = self.search(self.name, self.lookup)
if not self.filename:
raise TemplateError('Template %s not found.' % repr(name))
if not self.source and not self.filename:
raise TemplateError('No template specified.')
self.prepare(**self.settings)
@classmethod
def search(cls, name, lookup=[]):
""" Search name in all directories specified in lookup.
First without, then with common extensions. Return first hit. """
if not lookup:
depr('The template lookup path list should not be empty.', True) #0.12
lookup = ['.']
if os.path.isabs(name) and os.path.isfile(name):
depr('Absolute template path names are deprecated.', True) #0.12
return os.path.abspath(name)
for spath in lookup:
spath = os.path.abspath(spath) + os.sep
fname = os.path.abspath(os.path.join(spath, name))
if not fname.startswith(spath): continue
if os.path.isfile(fname): return fname
for ext in cls.extensions:
if os.path.isfile('%s.%s' % (fname, ext)):
return '%s.%s' % (fname, ext)
@classmethod
def global_config(cls, key, *args):
''' This reads or sets the global settings stored in class.settings. '''
if args:
cls.settings = cls.settings.copy() # Make settings local to class
cls.settings[key] = args[0]
else:
return cls.settings[key]
def prepare(self, **options):
""" Run preparations (parsing, caching, ...).
It should be possible to call this again to refresh a template or to
update settings.
"""
raise NotImplementedError
def render(self, *args, **kwargs):
""" Render the template with the specified local variables and return
a single byte or unicode string. If it is a byte string, the encoding
must match self.encoding. This method must be thread-safe!
Local variables may be provided in dictionaries (args)
or directly, as keywords (kwargs).
"""
raise NotImplementedError
class MakoTemplate(BaseTemplate):
def prepare(self, **options):
from mako.template import Template
from mako.lookup import TemplateLookup
options.update({'input_encoding':self.encoding})
options.setdefault('format_exceptions', bool(DEBUG))
lookup = TemplateLookup(directories=self.lookup, **options)
if self.source:
self.tpl = Template(self.source, lookup=lookup, **options)
else:
self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
class CheetahTemplate(BaseTemplate):
def prepare(self, **options):
from Cheetah.Template import Template
self.context = threading.local()
self.context.vars = {}
options['searchList'] = [self.context.vars]
if self.source:
self.tpl = Template(source=self.source, **options)
else:
self.tpl = Template(file=self.filename, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
self.context.vars.update(self.defaults)
self.context.vars.update(kwargs)
out = str(self.tpl)
self.context.vars.clear()
return out
class Jinja2Template(BaseTemplate):
def prepare(self, filters=None, tests=None, globals={}, **kwargs):
from jinja2 import Environment, FunctionLoader
self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
if filters: self.env.filters.update(filters)
if tests: self.env.tests.update(tests)
if globals: self.env.globals.update(globals)
if self.source:
self.tpl = self.env.from_string(self.source)
else:
self.tpl = self.env.get_template(self.filename)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
def loader(self, name):
fname = self.search(name, self.lookup)
if not fname: return
with open(fname, "rb") as f:
return f.read().decode(self.encoding)
class SimpleTemplate(BaseTemplate):
# TODO: Figure out how to pass the arguments for this correctly
def prepare(self, escape_func=html_escape, noescape=True, syntax=None, **ka):
self.cache = {}
enc = self.encoding
self._str = lambda x: touni(x, enc)
self._escape = lambda x: escape_func(touni(x, enc))
self.syntax = syntax
if noescape:
self._str, self._escape = self._escape, self._str
@cached_property
def co(self):
return compile(self.code, self.filename or '<string>', 'exec')
@cached_property
def code(self):
source = self.source or open(self.filename, 'rb').read()
try:
source, encoding = touni(source), 'utf8'
except UnicodeError:
depr('Template encodings other than utf8 are no longer supported.') #0.11
source, encoding = touni(source, 'latin1'), 'latin1'
parser = StplParser(source, encoding=encoding, syntax=self.syntax)
code = parser.translate()
self.encoding = parser.encoding
return code
def _rebase(self, _env, _name=None, **kwargs):
_env['_rebase'] = (_name, kwargs)
def _include(self, _env, _name=None, **kwargs):
env = _env.copy()
env.update(kwargs)
if _name not in self.cache:
self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
return self.cache[_name].execute(env['_stdout'], env)
def execute(self, _stdout, kwargs):
env = self.defaults.copy()
env.update(kwargs)
env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
'include': functools.partial(self._include, env),
'rebase': functools.partial(self._rebase, env), '_rebase': None,
'_str': self._str, '_escape': self._escape, 'get': env.get,
'setdefault': env.setdefault, 'defined': env.__contains__ })
eval(self.co, env)
if env.get('_rebase'):
subtpl, rargs = env.pop('_rebase')
rargs['base'] = ''.join(_stdout) #copy stdout
del _stdout[:] # clear stdout
return self._include(env, subtpl, **rargs)
return env
def render(self, *args, **kwargs):
""" Render the template using keyword arguments as local variables. """
env = {}; stdout = []
for dictarg in args: env.update(dictarg)
env.update(kwargs)
self.execute(stdout, env)
return ''.join(stdout)
class StplSyntaxError(TemplateError): pass
class StplParser(object):
''' Parser for stpl templates. '''
_re_cache = {} #: Cache for compiled re patterns
# This huge pile of voodoo magic splits python code into 8 different tokens.
# 1: All kinds of python strings (trust me, it works)
_re_tok = '((?m)[urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \
'|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \
'|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \
'|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))'
_re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later
# 2: Comments (until end of line, but not the newline itself)
_re_tok += '|(#.*)'
# 3,4: Keywords that start or continue a python block (only start of line)
_re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \
'|^([ \\t]*(?:elif|else|except|finally)\\b)'
# 5: Our special 'end' keyword (but only if it stands alone)
_re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))'
# 6: A customizable end-of-code-block template token (only end of line)
_re_tok += '|(%(block_close)s[ \\t]*(?=$))'
# 7: And finally, a single newline. The 8th token is 'everything else'
_re_tok += '|(\\r?\\n)'
# Match the start tokens of code areas in a template
_re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))'
# Match inline statements (may contain python strings)
_re_inl = '%%(inline_start)s((?:%s|[^\'"\n]*?)+)%%(inline_end)s' % _re_inl
default_syntax = '<% %> % {{ }}'
def __init__(self, source, syntax=None, encoding='utf8'):
self.source, self.encoding = touni(source, encoding), encoding
self.set_syntax(syntax or self.default_syntax)
self.code_buffer, self.text_buffer = [], []
self.lineno, self.offset = 1, 0
self.indent, self.indent_mod = 0, 0
def get_syntax(self):
''' Tokens as a space separated string (default: <% %> % {{ }}) '''
return self._syntax
def set_syntax(self, syntax):
self._syntax = syntax
self._tokens = syntax.split()
if not syntax in self._re_cache:
names = 'block_start block_close line_start inline_start inline_end'
etokens = map(re.escape, self._tokens)
pattern_vars = dict(zip(names.split(), etokens))
patterns = (self._re_split, self._re_tok, self._re_inl)
patterns = [re.compile(p%pattern_vars) for p in patterns]
self._re_cache[syntax] = patterns
self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
syntax = property(get_syntax, set_syntax)
def translate(self):
if self.offset: raise RuntimeError('Parser is a one time instance.')
while True:
m = self.re_split.search(self.source[self.offset:])
if m:
text = self.source[self.offset:self.offset+m.start()]
self.text_buffer.append(text)
self.offset += m.end()
if m.group(1): # Escape syntax
line, sep, _ = self.source[self.offset:].partition('\n')
self.text_buffer.append(m.group(2)+line+sep)
self.offset += len(line+sep)+1
continue
self.flush_text()
self.read_code(multiline=bool(m.group(4)))
else: break
self.text_buffer.append(self.source[self.offset:])
self.flush_text()
return ''.join(self.code_buffer)
def read_code(self, multiline):
code_line, comment = '', ''
while True:
m = self.re_tok.search(self.source[self.offset:])
if not m:
code_line += self.source[self.offset:]
self.offset = len(self.source)
self.write_code(code_line.strip(), comment)
return
code_line += self.source[self.offset:self.offset+m.start()]
self.offset += m.end()
_str, _com, _blk1, _blk2, _end, _cend, _nl = m.groups()
if _str: # Python string
code_line += _str
elif _com: # Python comment (up to EOL)
comment = _com
if multiline and _com.strip().endswith(self._tokens[1]):
multiline = False # Allow end-of-block in comments
elif _blk1: # Start-block keyword (if/for/while/def/try/...)
code_line, self.indent_mod = _blk1, -1
self.indent += 1
elif _blk2: # Continue-block keyword (else/elif/except/...)
code_line, self.indent_mod = _blk2, -1
elif _end: # The non-standard 'end'-keyword (ends a block)
self.indent -= 1
elif _cend: # The end-code-block template token (usually '%>')
if multiline: multiline = False
else: code_line += _cend
else: # \n
self.write_code(code_line.strip(), comment)
self.lineno += 1
code_line, comment, self.indent_mod = '', '', 0
if not multiline:
break
def flush_text(self):
text = ''.join(self.text_buffer)
del self.text_buffer[:]
if not text: return
parts, pos, nl = [], 0, '\\\n'+' '*self.indent
for m in self.re_inl.finditer(text):
prefix, pos = text[pos:m.start()], m.end()
if prefix:
parts.append(nl.join(map(repr, prefix.splitlines(True))))
if prefix.endswith('\n'): parts[-1] += nl
parts.append(self.process_inline(m.group(1).strip()))
if pos < len(text):
prefix = text[pos:]
lines = prefix.splitlines(True)
if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
parts.append(nl.join(map(repr, lines)))
code = '_printlist((%s,))' % ', '.join(parts)
self.lineno += code.count('\n')+1
self.write_code(code)
def process_inline(self, chunk):
if chunk[0] == '!': return '_str(%s)' % chunk[1:]
return '_escape(%s)' % chunk
def write_code(self, line, comment=''):
code = ' ' * (self.indent+self.indent_mod)
code += line.lstrip() + comment + '\n'
self.code_buffer.append(code)
def template(*args, **kwargs):
'''
Get a rendered template as a string iterator.
You can use a name, a filename or a template string as first parameter.
Template rendering arguments can be passed as dictionaries
or directly (as keyword arguments).
'''
tpl = args[0] if args else None
adapter = kwargs.pop('template_adapter', SimpleTemplate)
lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
tplid = (id(lookup), tpl)
if tplid not in TEMPLATES or DEBUG:
settings = kwargs.pop('template_settings', {})
if isinstance(tpl, adapter):
TEMPLATES[tplid] = tpl
if settings: TEMPLATES[tplid].prepare(**settings)
elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
else:
TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
if not TEMPLATES[tplid]:
abort(500, 'Template (%s) not found' % tpl)
for dictarg in args[1:]: kwargs.update(dictarg)
return TEMPLATES[tplid].render(kwargs)
mako_template = functools.partial(template, template_adapter=MakoTemplate)
cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
def view(tpl_name, **defaults):
''' Decorator: renders a template for a handler.
The handler can control its behavior like that:
- return a dict of template vars to fill out the template
- return something other than a dict and the view decorator will not
process the template, but return the handler result as is.
This includes returning a HTTPResponse(dict) to get,
for instance, JSON with autojson or other castfilters.
'''
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, (dict, DictMixin)):
tplvars = defaults.copy()
tplvars.update(result)
return template(tpl_name, **tplvars)
elif result is None:
return template(tpl_name, defaults)
return result
return wrapper
return decorator
mako_view = functools.partial(view, template_adapter=MakoTemplate)
cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
TEMPLATE_PATH = ['./', './views/']
TEMPLATES = {}
DEBUG = False
NORUN = False # If set, run() does nothing. Used by load_app()
HTTP_CODES = httplib.responses
HTTP_CODES[418] = "I'm a teapot" # RFC 2324
HTTP_CODES[428] = "Precondition Required"
HTTP_CODES[429] = "Too Many Requests"
HTTP_CODES[431] = "Request Header Fields Too Large"
HTTP_CODES[511] = "Network Authentication Required"
_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items())
ERROR_PAGE_TEMPLATE = """
%%try:
%%from %s import DEBUG, HTTP_CODES, request, touni
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html>
<head>
<title>Error: {{e.status}}</title>
<style type="text/css">
html {background-color: #eee; font-family: sans;}
body {background-color: #fff; border: 1px solid #ddd;
padding: 15px; margin: 15px;}
pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
</style>
</head>
<body>
<h1>Error: {{e.status}}</h1>
<p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt>
caused an error:</p>
<pre>{{e.body}}</pre>
%%if DEBUG and e.exception:
<h2>Exception:</h2>
<pre>{{repr(e.exception)}}</pre>
%%end
%%if DEBUG and e.traceback:
<h2>Traceback:</h2>
<pre>{{e.traceback}}</pre>
%%end
</body>
</html>
%%except ImportError:
<b>ImportError:</b> Could not generate the error page. Please add bottle to
the import path.
%%end
""" % __name__
request = LocalRequest()
response = LocalResponse()
local = threading.local()
app = default_app = AppStack()
app.push()
ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module
if __name__ == '__main__':
opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
if opt.version:
_stdout('Bottle %s\n'%__version__)
sys.exit(0)
if not args:
parser.print_help()
_stderr('\nError: No application specified.\n')
sys.exit(1)
sys.path.insert(0, '.')
sys.modules.setdefault('bottle', sys.modules['__main__'])
host, port = (opt.bind or 'localhost'), 8080
if ':' in host and host.rfind(']') < host.rfind(':'):
host, port = host.rsplit(':', 1)
host = host.strip('[]')
run(args[0], host=host, port=int(port), server=opt.server,
reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
|
""" lazy generator of 2D pharmacophore signature data
"""
from __future__ import print_function
from rdkit.Chem.Pharm2D import SigFactory, Matcher
raise NotImplementedError('not finished yet')
class Generator(object):
"""
Important attributes:
- mol: the molecules whose signature is being worked with
- sigFactory : the SigFactory object with signature parameters
NOTE: no preprocessing is carried out for _sigFactory_.
It *must* be pre-initialized.
**Notes**
-
"""
def __init__(self, sigFactory, mol, dMat=None, bitCache=True):
""" constructor
**Arguments**
- sigFactory: a signature factory, see class docs
- mol: a molecule, see class docs
- dMat: (optional) a distance matrix for the molecule. If this
is not provided, one will be calculated
- bitCache: (optional) if nonzero, a local cache of which bits
have been queried will be maintained. Otherwise things must
be recalculate each time a bit is queried.
"""
if not isinstance(sigFactory, SigFactory.SigFactory):
raise ValueError('bad factory')
self.sigFactory = sigFactory
self.mol = mol
if dMat is None:
useBO = sigFactory.includeBondOrder
dMat = Chem.GetDistanceMatrix(mol, useBO)
self.dMat = dMat
if bitCache:
self.bits = {}
else:
self.bits = None
featFamilies = [fam for fam in sigFactory.featFactory.GetFeatureFamilies()
if fam not in sigFactory.skipFeats]
nFeats = len(featFamilies)
featMatches = {}
for fam in featFamilies:
featMatches[fam] = []
feats = sigFactory.featFactory.GetFeaturesForMol(mol)
for feat in feats:
if feat.GetFamily() not in sigFactory.skipFeats:
featMatches[feat.GetFamily()].append(feat.GetAtomIds())
featMatches = [None] * nFeats
for i in range(nFeats):
featMatches[i] = sigFactory.featFactory.GetMolFeature()
self.pattMatches = pattMatches
def GetBit(self, idx):
""" returns a bool indicating whether or not the bit is set
"""
if idx < 0 or idx >= self.sig.GetSize():
raise IndexError('Index %d invalid' % (idx))
if self.bits is not None and idx in self.bits:
return self.bits[idx]
tmp = Matcher.GetAtomsMatchingBit(self.sig, idx, self.mol, dMat=self.dMat, justOne=1,
matchingAtoms=self.pattMatches)
if not tmp or len(tmp) == 0:
res = 0
else:
res = 1
if self.bits is not None:
self.bits[idx] = res
return res
def __len__(self):
""" allows class to support len()
"""
return self.sig.GetSize()
def __getitem__(self, itm):
""" allows class to support random access.
Calls self.GetBit()
"""
return self.GetBit(itm)
if __name__ == '__main__':
import time
from rdkit import RDConfig, Chem
from rdkit.Chem.Pharm2D import Gobbi_Pharm2D, Generate
import random
factory = Gobbi_Pharm2D.factory
nToDo = 100
inD = open(RDConfig.RDDataDir + "/NCI/first_5K.smi", 'r').readlines()[:nToDo]
mols = [None] * len(inD)
for i in range(len(inD)):
smi = inD[i].split('\t')[0]
smi.strip()
mols[i] = Chem.MolFromSmiles(smi)
sig = factory.GetSignature()
nBits = 300
random.seed(23)
bits = [random.randint(0, sig.GetSize() - 1) for x in range(nBits)]
print('Using the Lazy Generator')
t1 = time.time()
for i in range(len(mols)):
if not i % 10:
print('done mol %d of %d' % (i, len(mols)))
gen = Generator(factory, mols[i])
for bit in bits:
v = gen[bit]
t2 = time.time()
print('\tthat took %4.2f seconds' % (t2 - t1))
print('Generating and checking signatures')
t1 = time.time()
for i in range(len(mols)):
if not i % 10:
print('done mol %d of %d' % (i, len(mols)))
sig = Generate.Gen2DFingerprint(mols[i], factory)
for bit in bits:
v = sig[bit]
t2 = time.time()
print('\tthat took %4.2f seconds' % (t2 - t1))
|
import pickle
import unittest
import numpy as np
import scipy.sparse as sp
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import raises
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_equal
from sklearn import linear_model, datasets, metrics
from sklearn.base import clone
from sklearn.linear_model import SGDClassifier, SGDRegressor
from sklearn.preprocessing import LabelEncoder, scale
class SparseSGDClassifier(SGDClassifier):
def fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return SGDClassifier.fit(self, X, y, *args, **kw)
def partial_fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return SGDClassifier.partial_fit(self, X, y, *args, **kw)
def decision_function(self, X, *args, **kw):
X = sp.csr_matrix(X)
return SGDClassifier.decision_function(self, X, *args, **kw)
def predict_proba(self, X, *args, **kw):
X = sp.csr_matrix(X)
return SGDClassifier.predict_proba(self, X, *args, **kw)
def predict_log_proba(self, X, *args, **kw):
X = sp.csr_matrix(X)
return SGDClassifier.predict_log_proba(self, X, *args, **kw)
class SparseSGDRegressor(SGDRegressor):
def fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return SGDRegressor.fit(self, X, y, *args, **kw)
def partial_fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return SGDRegressor.partial_fit(self, X, y, *args, **kw)
def decision_function(self, X, *args, **kw):
X = sp.csr_matrix(X)
return SGDRegressor.decision_function(self, X, *args, **kw)
X = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]])
Y = [1, 1, 1, 2, 2, 2]
T = np.array([[-1, -1], [2, 2], [3, 2]])
true_result = [1, 2, 2]
X2 = np.array([[-1, 1], [-0.75, 0.5], [-1.5, 1.5],
[1, 1], [0.75, 0.5], [1.5, 1.5],
[-1, -1], [0, -0.5], [1, -1]])
Y2 = ["one"] * 3 + ["two"] * 3 + ["three"] * 3
T2 = np.array([[-1.5, 0.5], [1, 2], [0, -2]])
true_result2 = ["one", "two", "three"]
X3 = np.array([[1, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0], [0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 1, 1],
[0, 0, 0, 1, 0, 0], [0, 0, 0, 1, 0, 0]])
Y3 = np.array([1, 1, 1, 1, 2, 2, 2, 2])
X4 = np.array([[1, 0.9, 0.8, 0, 0, 0], [1, .84, .98, 0, 0, 0],
[1, .96, .88, 0, 0, 0], [1, .91, .99, 0, 0, 0],
[0, 0, 0, .89, .91, 1], [0, 0, 0, .79, .84, 1],
[0, 0, 0, .91, .95, 1], [0, 0, 0, .93, 1, 1]])
Y4 = np.array([1, 1, 1, 1, 2, 2, 2, 2])
iris = datasets.load_iris()
X5 = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]])
Y5 = [1, 1, 1, 2, 2, 2]
true_result5 = [0, 1, 1]
class CommonTest(object):
def _test_warm_start(self, X, Y, lr):
# Test that explicit warm restart...
clf = self.factory(alpha=0.01, eta0=0.01, n_iter=5, shuffle=False,
learning_rate=lr)
clf.fit(X, Y)
clf2 = self.factory(alpha=0.001, eta0=0.01, n_iter=5, shuffle=False,
learning_rate=lr)
clf2.fit(X, Y,
coef_init=clf.coef_.copy(),
intercept_init=clf.intercept_.copy())
#... and implicit warm restart are equivalent.
clf3 = self.factory(alpha=0.01, eta0=0.01, n_iter=5, shuffle=False,
warm_start=True, learning_rate=lr)
clf3.fit(X, Y)
assert_equal(clf3.t_, clf.t_)
assert_array_almost_equal(clf3.coef_, clf.coef_)
clf3.set_params(alpha=0.001)
clf3.fit(X, Y)
assert_equal(clf3.t_, clf2.t_)
assert_array_almost_equal(clf3.coef_, clf2.coef_)
def test_warm_start_constant(self):
self._test_warm_start(X, Y, "constant")
def test_warm_start_invscaling(self):
self._test_warm_start(X, Y, "invscaling")
def test_warm_start_optimal(self):
self._test_warm_start(X, Y, "optimal")
def test_input_format(self):
"""Input format tests. """
clf = self.factory(alpha=0.01, n_iter=5,
shuffle=False)
clf.fit(X, Y)
Y_ = np.array(Y)[:, np.newaxis]
Y_ = np.c_[Y_, Y_]
assert_raises(ValueError, clf.fit, X, Y_)
def test_clone(self):
"""Test whether clone works ok. """
clf = self.factory(alpha=0.01, n_iter=5, penalty='l1')
clf = clone(clf)
clf.set_params(penalty='l2')
clf.fit(X, Y)
clf2 = self.factory(alpha=0.01, n_iter=5, penalty='l2')
clf2.fit(X, Y)
assert_array_equal(clf.coef_, clf2.coef_)
class DenseSGDClassifierTestCase(unittest.TestCase, CommonTest):
"""Test suite for the dense representation variant of SGD"""
factory = SGDClassifier
def test_sgd(self):
"""Check that SGD gives any results :-)"""
for loss in ("hinge", "squared_hinge", "log", "modified_huber"):
clf = self.factory(penalty='l2', alpha=0.01, fit_intercept=True,
loss=loss, n_iter=10, shuffle=True)
clf.fit(X, Y)
#assert_almost_equal(clf.coef_[0], clf.coef_[1], decimal=7)
assert_array_equal(clf.predict(T), true_result)
@raises(ValueError)
def test_sgd_bad_l1_ratio(self):
"""Check whether expected ValueError on bad l1_ratio"""
self.factory(l1_ratio=1.1)
@raises(ValueError)
def test_sgd_bad_learning_rate_schedule(self):
"""Check whether expected ValueError on bad learning_rate"""
self.factory(learning_rate="<unknown>")
@raises(ValueError)
def test_sgd_bad_eta0(self):
"""Check whether expected ValueError on bad eta0"""
self.factory(eta0=0, learning_rate="constant")
@raises(ValueError)
def test_sgd_bad_alpha(self):
"""Check whether expected ValueError on bad alpha"""
self.factory(alpha=-.1)
@raises(ValueError)
def test_sgd_bad_penalty(self):
"""Check whether expected ValueError on bad penalty"""
self.factory(penalty='foobar', l1_ratio=0.85)
@raises(ValueError)
def test_sgd_bad_loss(self):
"""Check whether expected ValueError on bad loss"""
self.factory(loss="foobar")
@raises(ValueError)
def test_sgd_n_iter_param(self):
"""Test parameter validity check"""
self.factory(n_iter=-10000)
@raises(ValueError)
def test_sgd_shuffle_param(self):
"""Test parameter validity check"""
self.factory(shuffle="false")
@raises(TypeError)
def test_argument_coef(self):
"""Checks coef_init not allowed as model argument (only fit)"""
# Provided coef_ does not match dataset.
self.factory(coef_init=np.zeros((3,))).fit(X, Y)
@raises(ValueError)
def test_provide_coef(self):
"""Checks coef_init shape for the warm starts"""
# Provided coef_ does not match dataset.
self.factory().fit(X, Y, coef_init=np.zeros((3,)))
@raises(ValueError)
def test_set_intercept(self):
"""Checks intercept_ shape for the warm starts"""
# Provided intercept_ does not match dataset.
self.factory().fit(X, Y, intercept_init=np.zeros((3,)))
def test_set_intercept_binary(self):
"""Checks intercept_ shape for the warm starts in binary case"""
self.factory().fit(X5, Y5, intercept_init=0)
def test_set_intercept_to_intercept(self):
"""Checks intercept_ shape consistency for the warm starts"""
# Inconsistent intercept_ shape.
clf = self.factory().fit(X5, Y5)
self.factory().fit(X5, Y5, intercept_init=clf.intercept_)
clf = self.factory().fit(X, Y)
self.factory().fit(X, Y, intercept_init=clf.intercept_)
@raises(ValueError)
def test_sgd_at_least_two_labels(self):
"""Target must have at least two labels"""
self.factory(alpha=0.01, n_iter=20).fit(X2, np.ones(9))
def test_sgd_multiclass(self):
"""Multi-class test case"""
clf = self.factory(alpha=0.01, n_iter=20).fit(X2, Y2)
assert_equal(clf.coef_.shape, (3, 2))
assert_equal(clf.intercept_.shape, (3,))
assert_equal(clf.decision_function([0, 0]).shape, (1, 3))
pred = clf.predict(T2)
assert_array_equal(pred, true_result2)
def test_sgd_multiclass_with_init_coef(self):
"""Multi-class test case"""
clf = self.factory(alpha=0.01, n_iter=20)
clf.fit(X2, Y2, coef_init=np.zeros((3, 2)),
intercept_init=np.zeros(3))
assert_equal(clf.coef_.shape, (3, 2))
assert_true(clf.intercept_.shape, (3,))
pred = clf.predict(T2)
assert_array_equal(pred, true_result2)
def test_sgd_multiclass_njobs(self):
"""Multi-class test case with multi-core support"""
clf = self.factory(alpha=0.01, n_iter=20, n_jobs=2).fit(X2, Y2)
assert_equal(clf.coef_.shape, (3, 2))
assert_equal(clf.intercept_.shape, (3,))
assert_equal(clf.decision_function([0, 0]).shape, (1, 3))
pred = clf.predict(T2)
assert_array_equal(pred, true_result2)
def test_set_coef_multiclass(self):
"""Checks coef_init and intercept_init shape for for multi-class
problems"""
# Provided coef_ does not match dataset
clf = self.factory()
assert_raises(ValueError, clf.fit, X2, Y2, coef_init=np.zeros((2, 2)))
# Provided coef_ does match dataset
clf = self.factory().fit(X2, Y2, coef_init=np.zeros((3, 2)))
# Provided intercept_ does not match dataset
clf = self.factory()
assert_raises(ValueError, clf.fit, X2, Y2,
intercept_init=np.zeros((1,)))
# Provided intercept_ does match dataset.
clf = self.factory().fit(X2, Y2, intercept_init=np.zeros((3,)))
def test_sgd_proba(self):
"""Check SGD.predict_proba"""
# hinge loss does not allow for conditional prob estimate
clf = self.factory(loss="hinge", alpha=0.01, n_iter=10).fit(X, Y)
assert_raises(NotImplementedError, clf.predict_proba, [3, 2])
# log and modified_huber losses can output probability estimates
# binary case
for loss in ["log", "modified_huber"]:
clf = self.factory(loss="modified_huber", alpha=0.01, n_iter=10)
clf.fit(X, Y)
p = clf.predict_proba([3, 2])
assert_true(p[0, 1] > 0.5)
p = clf.predict_proba([-1, -1])
assert_true(p[0, 1] < 0.5)
p = clf.predict_log_proba([3, 2])
assert_true(p[0, 1] > p[0, 0])
p = clf.predict_log_proba([-1, -1])
assert_true(p[0, 1] < p[0, 0])
# log loss multiclass probability estimates
clf = self.factory(loss="log", alpha=0.01, n_iter=10).fit(X2, Y2)
d = clf.decision_function([[.1, -.1], [.3, .2]])
p = clf.predict_proba([[.1, -.1], [.3, .2]])
assert_array_equal(np.argmax(p, axis=1), np.argmax(d, axis=1))
assert_almost_equal(p[0].sum(), 1)
assert_true(np.all(p[0] >= 0))
p = clf.predict_proba([-1, -1])
d = clf.decision_function([-1, -1])
assert_array_equal(np.argsort(p[0]), np.argsort(d[0]))
l = clf.predict_log_proba([3, 2])
p = clf.predict_proba([3, 2])
assert_array_almost_equal(np.log(p), l)
l = clf.predict_log_proba([-1, -1])
p = clf.predict_proba([-1, -1])
assert_array_almost_equal(np.log(p), l)
# Modified Huber multiclass probability estimates; requires a separate
# test because the hard zero/one probabilities may destroy the
# ordering present in decision_function output.
clf = self.factory(loss="modified_huber", alpha=0.01, n_iter=10)
clf.fit(X2, Y2)
d = clf.decision_function([3, 2])
p = clf.predict_proba([3, 2])
if not isinstance(self, SparseSGDClassifierTestCase):
assert_equal(np.argmax(d, axis=1), np.argmax(p, axis=1))
else: # XXX the sparse test gets a different X2 (?)
assert_equal(np.argmin(d, axis=1), np.argmin(p, axis=1))
# the following sample produces decision_function values < -1,
# which would cause naive normalization to fail (see comment
# in SGDClassifier.predict_proba)
x = X.mean(axis=0)
d = clf.decision_function(x)
if np.all(d < -1): # XXX not true in sparse test case (why?)
p = clf.predict_proba(x)
assert_array_almost_equal(p[0], [1/3.] * 3)
def test_sgd_l1(self):
"""Test L1 regularization"""
n = len(X4)
rng = np.random.RandomState(13)
idx = np.arange(n)
rng.shuffle(idx)
X = X4[idx, :]
Y = Y4[idx]
clf = self.factory(penalty='l1', alpha=.2, fit_intercept=False,
n_iter=2000)
clf.fit(X, Y)
assert_array_equal(clf.coef_[0, 1:-1], np.zeros((4,)))
pred = clf.predict(X)
assert_array_equal(pred, Y)
# test sparsify with dense inputs
clf.sparsify()
assert_true(sp.issparse(clf.coef_))
pred = clf.predict(X)
assert_array_equal(pred, Y)
# pickle and unpickle with sparse coef_
clf = pickle.loads(pickle.dumps(clf))
assert_true(sp.issparse(clf.coef_))
pred = clf.predict(X)
assert_array_equal(pred, Y)
def test_class_weights(self):
"""
Test class weights.
"""
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False,
class_weight=None)
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# we give a small weights to class 1
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False,
class_weight={1: 0.001})
clf.fit(X, y)
# now the hyperplane should rotate clock-wise and
# the prediction on this point should shift
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1]))
def test_equal_class_weight(self):
"""Test if equal class weights approx. equals no class weights. """
X = [[1, 0], [1, 0], [0, 1], [0, 1]]
y = [0, 0, 1, 1]
clf = self.factory(alpha=0.1, n_iter=1000, class_weight=None)
clf.fit(X, y)
X = [[1, 0], [0, 1]]
y = [0, 1]
clf_weighted = self.factory(alpha=0.1, n_iter=1000,
class_weight={0: 0.5, 1: 0.5})
clf_weighted.fit(X, y)
# should be similar up to some epsilon due to learning rate schedule
assert_almost_equal(clf.coef_, clf_weighted.coef_, decimal=2)
@raises(ValueError)
def test_wrong_class_weight_label(self):
"""ValueError due to not existing class label."""
clf = self.factory(alpha=0.1, n_iter=1000, class_weight={0: 0.5})
clf.fit(X, Y)
@raises(ValueError)
def test_wrong_class_weight_format(self):
"""ValueError due to wrong class_weight argument type."""
clf = self.factory(alpha=0.1, n_iter=1000, class_weight=[0.5])
clf.fit(X, Y)
def test_auto_weight(self):
"""Test class weights for imbalanced data"""
# compute reference metrics on iris dataset that is quite balanced by
# default
X, y = iris.data, iris.target
X = scale(X)
idx = np.arange(X.shape[0])
rng = np.random.RandomState(0)
rng.shuffle(idx)
X = X[idx]
y = y[idx]
clf = self.factory(alpha=0.0001, n_iter=1000,
class_weight=None).fit(X, y)
assert_almost_equal(metrics.f1_score(y, clf.predict(X)), 0.96,
decimal=1)
# make the same prediction using automated class_weight
clf_auto = self.factory(alpha=0.0001, n_iter=1000,
class_weight="auto").fit(X, y)
assert_almost_equal(metrics.f1_score(y, clf_auto.predict(X)), 0.96,
decimal=1)
# Make sure that in the balanced case it does not change anything
# to use "auto"
assert_array_almost_equal(clf.coef_, clf_auto.coef_, 6)
# build an very very imbalanced dataset out of iris data
X_0 = X[y == 0, :]
y_0 = y[y == 0]
X_imbalanced = np.vstack([X] + [X_0] * 10)
y_imbalanced = np.concatenate([y] + [y_0] * 10)
# fit a model on the imbalanced data without class weight info
clf = self.factory(n_iter=1000, class_weight=None)
clf.fit(X_imbalanced, y_imbalanced)
y_pred = clf.predict(X)
assert_less(metrics.f1_score(y, y_pred), 0.96)
# fit a model with auto class_weight enabled
clf = self.factory(n_iter=1000, class_weight="auto")
clf.fit(X_imbalanced, y_imbalanced)
y_pred = clf.predict(X)
assert_greater(metrics.f1_score(y, y_pred), 0.96)
# fit another using a fit parameter override
clf = self.factory(n_iter=1000, class_weight="auto")
clf.fit(X_imbalanced, y_imbalanced)
y_pred = clf.predict(X)
assert_greater(metrics.f1_score(y, y_pred), 0.96)
def test_sample_weights(self):
"""Test weights on individual samples"""
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False)
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# we give a small weights to class 1
clf.fit(X, y, sample_weight=[0.001] * 3 + [1] * 2)
# now the hyperplane should rotate clock-wise and
# the prediction on this point should shift
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1]))
@raises(ValueError)
def test_wrong_sample_weights(self):
"""Test if ValueError is raised if sample_weight has wrong shape"""
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False)
# provided sample_weight too long
clf.fit(X, Y, sample_weight=np.arange(7))
@raises(ValueError)
def test_partial_fit_exception(self):
clf = self.factory(alpha=0.01)
# classes was not specified
clf.partial_fit(X3, Y3)
def test_partial_fit_binary(self):
third = X.shape[0] // 3
clf = self.factory(alpha=0.01)
classes = np.unique(Y)
clf.partial_fit(X[:third], Y[:third], classes=classes)
assert_equal(clf.coef_.shape, (1, X.shape[1]))
assert_equal(clf.intercept_.shape, (1,))
assert_equal(clf.decision_function([0, 0]).shape, (1, ))
id1 = id(clf.coef_.data)
clf.partial_fit(X[third:], Y[third:])
id2 = id(clf.coef_.data)
# check that coef_ haven't been re-allocated
assert_true(id1, id2)
y_pred = clf.predict(T)
assert_array_equal(y_pred, true_result)
def test_partial_fit_multiclass(self):
third = X2.shape[0] // 3
clf = self.factory(alpha=0.01)
classes = np.unique(Y2)
clf.partial_fit(X2[:third], Y2[:third], classes=classes)
assert_equal(clf.coef_.shape, (3, X2.shape[1]))
assert_equal(clf.intercept_.shape, (3,))
assert_equal(clf.decision_function([0, 0]).shape, (1, 3))
id1 = id(clf.coef_.data)
clf.partial_fit(X2[third:], Y2[third:])
id2 = id(clf.coef_.data)
# check that coef_ haven't been re-allocated
assert_true(id1, id2)
def test_fit_then_partial_fit(self):
"""Partial_fit should work after initial fit in the multiclass case.
Non-regression test for #2496; fit would previously produce a
Fortran-ordered coef_ that subsequent partial_fit couldn't handle.
"""
clf = self.factory()
clf.fit(X2, Y2)
clf.partial_fit(X2, Y2) # no exception here
def _test_partial_fit_equal_fit(self, lr):
for X_, Y_, T_ in ((X, Y, T), (X2, Y2, T2)):
clf = self.factory(alpha=0.01, eta0=0.01, n_iter=2,
learning_rate=lr, shuffle=False)
clf.fit(X_, Y_)
y_pred = clf.decision_function(T_)
t = clf.t_
classes = np.unique(Y_)
clf = self.factory(alpha=0.01, eta0=0.01, learning_rate=lr,
shuffle=False)
for i in range(2):
clf.partial_fit(X_, Y_, classes=classes)
y_pred2 = clf.decision_function(T_)
assert_equal(clf.t_, t)
assert_array_almost_equal(y_pred, y_pred2, decimal=2)
def test_partial_fit_equal_fit_constant(self):
self._test_partial_fit_equal_fit("constant")
def test_partial_fit_equal_fit_optimal(self):
self._test_partial_fit_equal_fit("optimal")
def test_partial_fit_equal_fit_invscaling(self):
self._test_partial_fit_equal_fit("invscaling")
def test_regression_losses(self):
clf = self.factory(alpha=0.01, learning_rate="constant",
eta0=0.1, loss="epsilon_insensitive")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
clf = self.factory(alpha=0.01, learning_rate="constant",
eta0=0.1, loss="squared_epsilon_insensitive")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
clf = self.factory(alpha=0.01, loss="huber")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
clf = self.factory(alpha=0.01, learning_rate="constant", eta0=0.01,
loss="squared_loss")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
def test_warm_start_multiclass(self):
self._test_warm_start(X2, Y2, "optimal")
def test_multiple_fit(self):
"""Test multiple calls of fit w/ different shaped inputs."""
clf = self.factory(alpha=0.01, n_iter=5,
shuffle=False)
clf.fit(X, Y)
assert_true(hasattr(clf, "coef_"))
# Non-regression test: try fitting with a different label set.
y = [["ham", "spam"][i] for i in LabelEncoder().fit_transform(Y)]
clf.fit(X[:, :-1], y)
class SparseSGDClassifierTestCase(DenseSGDClassifierTestCase):
"""Run exactly the same tests using the sparse representation variant"""
factory = SparseSGDClassifier
class DenseSGDRegressorTestCase(unittest.TestCase, CommonTest):
"""Test suite for the dense representation variant of SGD"""
factory = SGDRegressor
def test_sgd(self):
"""Check that SGD gives any results."""
clf = self.factory(alpha=0.1, n_iter=2,
fit_intercept=False)
clf.fit([[0, 0], [1, 1], [2, 2]], [0, 1, 2])
assert_equal(clf.coef_[0], clf.coef_[1])
@raises(ValueError)
def test_sgd_bad_penalty(self):
"""Check whether expected ValueError on bad penalty"""
self.factory(penalty='foobar', l1_ratio=0.85)
@raises(ValueError)
def test_sgd_bad_loss(self):
"""Check whether expected ValueError on bad loss"""
self.factory(loss="foobar")
def test_sgd_least_squares_fit(self):
xmin, xmax = -5, 5
n_samples = 100
rng = np.random.RandomState(0)
X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1)
# simple linear function without noise
y = 0.5 * X.ravel()
clf = self.factory(loss='squared_loss', alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.99)
# simple linear function with noise
y = 0.5 * X.ravel() + rng.randn(n_samples, 1).ravel()
clf = self.factory(loss='squared_loss', alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.5)
def test_sgd_epsilon_insensitive(self):
xmin, xmax = -5, 5
n_samples = 100
X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1)
# simple linear function without noise
y = 0.5 * X.ravel()
clf = self.factory(loss='epsilon_insensitive', epsilon=0.01,
alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_true(score > 0.99)
# simple linear function with noise
y = 0.5 * X.ravel() \
+ np.random.randn(n_samples, 1).ravel()
clf = self.factory(loss='epsilon_insensitive', epsilon=0.01,
alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_true(score > 0.5)
def test_sgd_huber_fit(self):
xmin, xmax = -5, 5
n_samples = 100
rng = np.random.RandomState(0)
X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1)
# simple linear function without noise
y = 0.5 * X.ravel()
clf = self.factory(loss="huber", epsilon=0.1, alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.99)
# simple linear function with noise
y = 0.5 * X.ravel() + rng.randn(n_samples, 1).ravel()
clf = self.factory(loss="huber", epsilon=0.1, alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.5)
def test_elasticnet_convergence(self):
"""Check that the SGD output is consistent with coordinate descent"""
n_samples, n_features = 1000, 5
rng = np.random.RandomState(0)
X = np.random.randn(n_samples, n_features)
# ground_truth linear model that generate y from X and to which the
# models should converge if the regularizer would be set to 0.0
ground_truth_coef = rng.randn(n_features)
y = np.dot(X, ground_truth_coef)
# XXX: alpha = 0.1 seems to cause convergence problems
for alpha in [0.01, 0.001]:
for l1_ratio in [0.5, 0.8, 1.0]:
cd = linear_model.ElasticNet(alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=False)
cd.fit(X, y)
sgd = self.factory(penalty='elasticnet', n_iter=50,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=False)
sgd.fit(X, y)
err_msg = ("cd and sgd did not converge to comparable "
"results for alpha=%f and l1_ratio=%f"
% (alpha, l1_ratio))
assert_almost_equal(cd.coef_, sgd.coef_, decimal=2,
err_msg=err_msg)
def test_partial_fit(self):
third = X.shape[0] // 3
clf = self.factory(alpha=0.01)
clf.partial_fit(X[:third], Y[:third])
assert_equal(clf.coef_.shape, (X.shape[1], ))
assert_equal(clf.intercept_.shape, (1,))
assert_equal(clf.decision_function([0, 0]).shape, (1, ))
id1 = id(clf.coef_.data)
clf.partial_fit(X[third:], Y[third:])
id2 = id(clf.coef_.data)
# check that coef_ haven't been re-allocated
assert_true(id1, id2)
def _test_partial_fit_equal_fit(self, lr):
clf = self.factory(alpha=0.01, n_iter=2, eta0=0.01,
learning_rate=lr, shuffle=False)
clf.fit(X, Y)
y_pred = clf.predict(T)
t = clf.t_
clf = self.factory(alpha=0.01, eta0=0.01,
learning_rate=lr, shuffle=False)
for i in range(2):
clf.partial_fit(X, Y)
y_pred2 = clf.predict(T)
assert_equal(clf.t_, t)
assert_array_almost_equal(y_pred, y_pred2, decimal=2)
def test_partial_fit_equal_fit_constant(self):
self._test_partial_fit_equal_fit("constant")
def test_partial_fit_equal_fit_optimal(self):
self._test_partial_fit_equal_fit("optimal")
def test_partial_fit_equal_fit_invscaling(self):
self._test_partial_fit_equal_fit("invscaling")
def test_loss_function_epsilon(self):
clf = self.factory(epsilon=0.9)
clf.set_params(epsilon=0.1)
assert clf.loss_functions['huber'][1] == 0.1
class SparseSGDRegressorTestCase(DenseSGDRegressorTestCase):
"""Run exactly the same tests using the sparse representation variant"""
factory = SparseSGDRegressor
def test_l1_ratio():
"""Test if l1 ratio extremes match L1 and L2 penalty settings. """
X, y = datasets.make_classification(n_samples=1000, n_features=100, n_informative=20,
random_state=1234)
# test if elasticnet with l1_ratio near 1 gives same result as pure l1
est_en = SGDClassifier(alpha=0.001, penalty='elasticnet', l1_ratio=0.9999999999).fit(X, y)
est_l1 = SGDClassifier(alpha=0.001, penalty='l1').fit(X, y)
assert_array_almost_equal(est_en.coef_, est_l1.coef_)
# test if elasticnet with l1_ratio near 0 gives same result as pure l2
est_en = SGDClassifier(alpha=0.001, penalty='elasticnet', l1_ratio=0.0000000001).fit(X, y)
est_l2 = SGDClassifier(alpha=0.001, penalty='l2').fit(X, y)
assert_array_almost_equal(est_en.coef_, est_l2.coef_)
|
from w3lib.url import parse_data_uri
from scrapy.http import TextResponse
from scrapy.responsetypes import responsetypes
from scrapy.utils.decorators import defers
class DataURIDownloadHandler(object):
lazy = False
def __init__(self, settings):
super(DataURIDownloadHandler, self).__init__()
@defers
def download_request(self, request, spider):
uri = parse_data_uri(request.url)
respcls = responsetypes.from_mimetype(uri.media_type)
resp_kwargs = {}
if (issubclass(respcls, TextResponse) and
uri.media_type.split('/')[0] == 'text'):
charset = uri.media_type_parameters.get('charset')
resp_kwargs['encoding'] = charset
return respcls(url=request.url, body=uri.data, **resp_kwargs)
|
from functools import partial
from .primitives import EMPTY
__all__ = ['identity', 'constantly', 'caller',
'partial', 'rpartial', 'func_partial',
'curry', 'rcurry', 'autocurry',
'iffy']
def identity(x):
return x
def constantly(x):
return lambda *a, **kw: x
def caller(*a, **kw):
return lambda f: f(*a, **kw)
def func_partial(func, *args, **kwargs):
"""
A functools.partial alternative, which returns a real function.
Can be used to construct methods.
"""
return lambda *a, **kw: func(*(args + a), **dict(kwargs, **kw))
def rpartial(func, *args):
return lambda *a: func(*(a + args))
def curry(func, n=EMPTY):
if n is EMPTY:
n = func.__code__.co_argcount
if n <= 1:
return func
elif n == 2:
return lambda x: lambda y: func(x, y)
else:
return lambda x: curry(partial(func, x), n - 1)
def rcurry(func, n=EMPTY):
if n is EMPTY:
n = func.__code__.co_argcount
if n <= 1:
return func
elif n == 2:
return lambda x: lambda y: func(y, x)
else:
return lambda x: rcurry(rpartial(func, x), n - 1)
def autocurry(func, n=EMPTY, _args=(), _kwargs={}):
if n is EMPTY:
n = func.__code__.co_argcount
def autocurried(*a, **kw):
args = _args + a
kwargs = _kwargs.copy()
kwargs.update(kw)
if len(args) + len(kwargs) >= n:
return func(*args, **kwargs)
else:
return autocurry(func, n, _args=args, _kwargs=kwargs)
return autocurried
def iffy(pred, action=EMPTY, default=identity):
if action is EMPTY:
return iffy(bool, pred)
else:
return lambda v: action(v) if pred(v) else \
default(v) if callable(default) else \
default
|
"""Tools for solving inequalities and systems of inequalities. """
from __future__ import print_function, division
from sympy.core import Symbol
from sympy.sets import Interval
from sympy.core.relational import Relational, Eq, Ge, Lt
from sympy.sets.sets import FiniteSet, Union
from sympy.core.singleton import S
from sympy.assumptions import ask, AppliedPredicate, Q
from sympy.functions import re, im, Abs
from sympy.logic import And
from sympy.polys import Poly, PolynomialError, parallel_poly_from_expr
def solve_poly_inequality(poly, rel):
"""Solve a polynomial inequality with rational coefficients.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> from sympy.solvers.inequalities import solve_poly_inequality
>>> solve_poly_inequality(Poly(x, x, domain='ZZ'), '==')
[{0}]
>>> solve_poly_inequality(Poly(x**2 - 1, x, domain='ZZ'), '!=')
[(-oo, -1), (-1, 1), (1, oo)]
>>> solve_poly_inequality(Poly(x**2 - 1, x, domain='ZZ'), '==')
[{-1}, {1}]
See Also
========
solve_poly_inequalities
"""
reals, intervals = poly.real_roots(multiple=False), []
if rel == '==':
for root, _ in reals:
interval = Interval(root, root)
intervals.append(interval)
elif rel == '!=':
left = S.NegativeInfinity
for right, _ in reals + [(S.Infinity, 1)]:
interval = Interval(left, right, True, True)
intervals.append(interval)
left = right
else:
if poly.LC() > 0:
sign = +1
else:
sign = -1
eq_sign, equal = None, False
if rel == '>':
eq_sign = +1
elif rel == '<':
eq_sign = -1
elif rel == '>=':
eq_sign, equal = +1, True
elif rel == '<=':
eq_sign, equal = -1, True
else:
raise ValueError("'%s' is not a valid relation" % rel)
right, right_open = S.Infinity, True
reals.sort(key=lambda w: w[0], reverse=True)
for left, multiplicity in reals:
if multiplicity % 2:
if sign == eq_sign:
intervals.insert(
0, Interval(left, right, not equal, right_open))
sign, right, right_open = -sign, left, not equal
else:
if sign == eq_sign and not equal:
intervals.insert(
0, Interval(left, right, True, right_open))
right, right_open = left, True
elif sign != eq_sign and equal:
intervals.insert(0, Interval(left, left))
if sign == eq_sign:
intervals.insert(
0, Interval(S.NegativeInfinity, right, True, right_open))
return intervals
def solve_poly_inequalities(polys):
"""Solve polynomial inequalities with rational coefficients.
Examples
========
>>> from sympy.solvers.inequalities import solve_poly_inequalities
>>> from sympy.polys import Poly
>>> from sympy.abc import x
>>> solve_poly_inequalities(((
... Poly(x**2 - 3), ">"), (
... Poly(-x**2 + 1), ">")))
(-oo, -sqrt(3)) U (-1, 1) U (sqrt(3), oo)
"""
from sympy import Union
return Union(*[solve_poly_inequality(*p) for p in polys])
def solve_rational_inequalities(eqs):
"""Solve a system of rational inequalities with rational coefficients.
Examples
========
>>> from sympy.abc import x
>>> from sympy import Poly
>>> from sympy.solvers.inequalities import solve_rational_inequalities
>>> solve_rational_inequalities([[
... ((Poly(-x + 1), Poly(1, x)), '>='),
... ((Poly(-x + 1), Poly(1, x)), '<=')]])
{1}
>>> solve_rational_inequalities([[
... ((Poly(x), Poly(1, x)), '!='),
... ((Poly(-x + 1), Poly(1, x)), '>=')]])
(-oo, 0) U (0, 1]
See Also
========
solve_poly_inequality
"""
result = S.EmptySet
for _eqs in eqs:
global_intervals = None
for (numer, denom), rel in _eqs:
numer_intervals = solve_poly_inequality(numer*denom, rel)
denom_intervals = solve_poly_inequality(denom, '==')
if global_intervals is None:
global_intervals = numer_intervals
else:
intervals = []
for numer_interval in numer_intervals:
for global_interval in global_intervals:
interval = numer_interval.intersect(global_interval)
if interval is not S.EmptySet:
intervals.append(interval)
global_intervals = intervals
intervals = []
for global_interval in global_intervals:
for denom_interval in denom_intervals:
global_interval -= denom_interval
if global_interval is not S.EmptySet:
intervals.append(global_interval)
global_intervals = intervals
if not global_intervals:
break
for interval in global_intervals:
result = result.union(interval)
return result
def reduce_rational_inequalities(exprs, gen, assume=True, relational=True):
"""Reduce a system of rational inequalities with rational coefficients.
Examples
========
>>> from sympy import Poly, Symbol
>>> from sympy.solvers.inequalities import reduce_rational_inequalities
>>> x = Symbol('x', real=True)
>>> reduce_rational_inequalities([[x**2 <= 0]], x)
x == 0
>>> reduce_rational_inequalities([[x + 2 > 0]], x)
x > -2
>>> reduce_rational_inequalities([[(x + 2, ">")]], x)
x > -2
>>> reduce_rational_inequalities([[x + 2]], x)
x == -2
"""
exact = True
eqs = []
for _exprs in exprs:
_eqs = []
for expr in _exprs:
if isinstance(expr, tuple):
expr, rel = expr
else:
if expr.is_Relational:
expr, rel = expr.lhs - expr.rhs, expr.rel_op
else:
expr, rel = expr, '=='
try:
(numer, denom), opt = parallel_poly_from_expr(
expr.together().as_numer_denom(), gen)
except PolynomialError:
raise PolynomialError("only polynomials and "
"rational functions are supported in this context")
if not opt.domain.is_Exact:
numer, denom, exact = numer.to_exact(), denom.to_exact(), False
domain = opt.domain.get_exact()
if not (domain.is_ZZ or domain.is_QQ):
raise NotImplementedError(
"inequality solving is not supported over %s" % opt.domain)
_eqs.append(((numer, denom), rel))
eqs.append(_eqs)
solution = solve_rational_inequalities(eqs)
if not exact:
solution = solution.evalf()
if not relational:
return solution
real = ask(Q.real(gen), assumptions=assume)
if not real:
result = And(solution.as_relational(re(gen)), Eq(im(gen), 0))
else:
result = solution.as_relational(gen)
return result
def reduce_abs_inequality(expr, rel, gen, assume=True):
"""Reduce an inequality with nested absolute values.
Examples
========
>>> from sympy import Q, Abs
>>> from sympy.abc import x
>>> from sympy.solvers.inequalities import reduce_abs_inequality
>>> reduce_abs_inequality(Abs(x - 5) - 3, '<', x, assume=Q.real(x))
And(2 < x, x < 8)
>>> reduce_abs_inequality(Abs(x + 2)*3 - 13, '<', x, assume=Q.real(x))
And(-19/3 < x, x < 7/3)
See Also
========
reduce_abs_inequalities
"""
if not ask(Q.real(gen), assumptions=assume):
raise NotImplementedError("can't solve inequalities with absolute "
"values of a complex variable")
def _bottom_up_scan(expr):
exprs = []
if expr.is_Add or expr.is_Mul:
op = expr.__class__
for arg in expr.args:
_exprs = _bottom_up_scan(arg)
if not exprs:
exprs = _exprs
else:
args = []
for expr, conds in exprs:
for _expr, _conds in _exprs:
args.append((op(expr, _expr), conds + _conds))
exprs = args
elif expr.is_Pow:
n = expr.exp
if not n.is_Integer or n < 0:
raise ValueError(
"only non-negative integer powers are allowed")
_exprs = _bottom_up_scan(expr.base)
for expr, conds in _exprs:
exprs.append((expr**n, conds))
elif isinstance(expr, Abs):
_exprs = _bottom_up_scan(expr.args[0])
for expr, conds in _exprs:
exprs.append(( expr, conds + [Ge(expr, 0)]))
exprs.append((-expr, conds + [Lt(expr, 0)]))
else:
exprs = [(expr, [])]
return exprs
exprs = _bottom_up_scan(expr)
mapping = {'<': '>', '<=': '>='}
inequalities = []
for expr, conds in exprs:
if rel not in mapping.keys():
expr = Relational( expr, 0, rel)
else:
expr = Relational(-expr, 0, mapping[rel])
inequalities.append([expr] + conds)
return reduce_rational_inequalities(inequalities, gen, assume)
def reduce_abs_inequalities(exprs, gen, assume=True):
"""Reduce a system of inequalities with nested absolute values.
Examples
========
>>> from sympy import Q, Abs
>>> from sympy.abc import x
>>> from sympy.solvers.inequalities import reduce_abs_inequalities
>>> reduce_abs_inequalities([(Abs(3*x - 5) - 7, '<'),
... (Abs(x + 25) - 13, '>')], x, assume=Q.real(x))
And(-2/3 < x, Or(x < -38, x > -12), x < 4)
>>> reduce_abs_inequalities([(Abs(x - 4) + Abs(3*x - 5) - 7, '<')], x,
... assume=Q.real(x))
And(1/2 < x, x < 4)
See Also
========
reduce_abs_inequality
"""
return And(*[ reduce_abs_inequality(expr, rel, gen, assume)
for expr, rel in exprs ])
def solve_univariate_inequality(expr, gen, assume=True, relational=True):
"""Solves a real univariate inequality.
Examples
========
>>> from sympy.solvers.inequalities import solve_univariate_inequality
>>> from sympy.core.symbol import Symbol
>>> x = Symbol('x', real=True)
>>> solve_univariate_inequality(x**2 >= 4, x)
Or(x <= -2, x >= 2)
>>> solve_univariate_inequality(x**2 >= 4, x, relational=False)
(-oo, -2] U [2, oo)
"""
# Implementation for continous functions
from sympy.solvers.solvers import solve
solns = solve(expr.lhs - expr.rhs, gen, assume=assume)
oo = S.Infinity
start = -oo
sol_sets = [S.EmptySet]
for x in sorted(s for s in solns if s.is_real):
end = x
if expr.subs(gen, (start + end)/2 if start != -oo else end - 1):
sol_sets.append(Interval(start, end, True, True))
if expr.subs(gen, x):
sol_sets.append(FiniteSet(x))
start = end
end = oo
if expr.subs(gen, start + 1):
sol_sets.append(Interval(start, end, True, True))
rv = Union(*sol_sets)
return rv if not relational else rv.as_relational(gen)
def _solve_inequality(ie, s, assume=True):
""" A hacky replacement for solve, since the latter only works for
univariate inequalities. """
if not ie.rel_op in ('>', '>=', '<', '<='):
raise NotImplementedError
expr = ie.lhs - ie.rhs
try:
p = Poly(expr, s)
if p.degree() != 1:
raise NotImplementedError
except (PolynomialError, NotImplementedError):
try:
n, d = expr.as_numer_denom()
return reduce_rational_inequalities([[ie]], s, assume=assume)
except PolynomialError:
return solve_univariate_inequality(ie, s, assume=assume)
a, b = p.all_coeffs()
if a.is_positive:
return ie.func(s, -b/a)
elif a.is_negative:
return ie.func(-b/a, s)
else:
raise NotImplementedError
def reduce_inequalities(inequalities, assume=True, symbols=[]):
"""Reduce a system of inequalities with rational coefficients.
Examples
========
>>> from sympy import Q, sympify as S
>>> from sympy.abc import x, y
>>> from sympy.solvers.inequalities import reduce_inequalities
>>> reduce_inequalities(S(0) <= x + 3, Q.real(x), [])
x >= -3
>>> reduce_inequalities(S(0) <= x + y*2 - 1, True, [x])
-2*y + 1 <= x
"""
if not hasattr(inequalities, '__iter__'):
inequalities = [inequalities]
if len(inequalities) == 1 and len(symbols) == 1 \
and inequalities[0].is_Relational:
try:
return _solve_inequality(inequalities[0], symbols[0],
assume=assume)
except NotImplementedError:
pass
poly_part, abs_part, extra_assume = {}, {}, []
for inequality in inequalities:
if inequality == True:
continue
elif inequality == False:
return False
if isinstance(inequality, AppliedPredicate):
extra_assume.append(inequality)
continue
if inequality.is_Relational:
expr, rel = inequality.lhs - inequality.rhs, inequality.rel_op
else:
expr, rel = inequality, '=='
gens = expr.free_symbols
if not gens:
return False
elif len(gens) == 1:
gen = gens.pop()
else:
raise NotImplementedError(
"only univariate inequalities are supported")
components = expr.find(lambda u: u.is_Function)
if not components:
if gen in poly_part:
poly_part[gen].append((expr, rel))
else:
poly_part[gen] = [(expr, rel)]
else:
if all(isinstance(comp, Abs) for comp in components):
if gen in abs_part:
abs_part[gen].append((expr, rel))
else:
abs_part[gen] = [(expr, rel)]
else:
raise NotImplementedError("can't reduce %s" % inequalities)
extra_assume = And(*extra_assume)
if assume is not None:
assume = And(assume, extra_assume)
else:
assume = extra_assume
poly_reduced = []
abs_reduced = []
for gen, exprs in poly_part.items():
poly_reduced.append(reduce_rational_inequalities([exprs], gen, assume))
for gen, exprs in abs_part.items():
abs_reduced.append(reduce_abs_inequalities(exprs, gen, assume))
return And(*(poly_reduced + abs_reduced))
|
import Tkinter as tk
root = tk.Tk()
def noop(): pass
menubar = tk.Menu(root)
filemenu = tk.Menu(menubar)
filemenu.add_command(label="Open", command=noop)
filemenu.add_command(label="Save", command=noop)
filemenu.add_separator()
filemenu.add_command(label="Exit", command=root.quit)
menubar.add_cascade(label="File", menu=filemenu)
editmenu = tk.Menu(menubar)
editmenu.add_command(label="Cut", command=noop)
editmenu.add_command(label="Copy", command=noop)
editmenu.add_command(label="Paste", command=noop)
menubar.add_cascade(label="Edit", menu=editmenu)
helpmenu = tk.Menu(menubar)
helpmenu.add_command(label="About", command=noop)
menubar.add_cascade(label="Help", menu=helpmenu)
root.config(menu=menubar)
root.mainloop()
|
"""Test interact and interactive."""
from __future__ import print_function
from collections import OrderedDict
import nose.tools as nt
import IPython.testing.tools as tt
from IPython.kernel.comm import Comm
from IPython.html import widgets
from IPython.html.widgets import interact, interactive, Widget, interaction
from IPython.utils.py3compat import annotate
class DummyComm(Comm):
comm_id = 'a-b-c-d'
def open(self, *args, **kwargs):
pass
def send(self, *args, **kwargs):
pass
def close(self, *args, **kwargs):
pass
_widget_attrs = {}
displayed = []
undefined = object()
def setup():
_widget_attrs['_comm_default'] = getattr(Widget, '_comm_default', undefined)
Widget._comm_default = lambda self: DummyComm()
_widget_attrs['_ipython_display_'] = Widget._ipython_display_
def raise_not_implemented(*args, **kwargs):
raise NotImplementedError()
Widget._ipython_display_ = raise_not_implemented
def teardown():
for attr, value in _widget_attrs.items():
if value is undefined:
delattr(Widget, attr)
else:
setattr(Widget, attr, value)
def f(**kwargs):
pass
def clear_display():
global displayed
displayed = []
def record_display(*args):
displayed.extend(args)
def check_widget(w, **d):
"""Check a single widget against a dict"""
for attr, expected in d.items():
if attr == 'cls':
nt.assert_is(w.__class__, expected)
else:
value = getattr(w, attr)
nt.assert_equal(value, expected,
"%s.%s = %r != %r" % (w.__class__.__name__, attr, value, expected)
)
def check_widgets(container, **to_check):
"""Check that widgets are created as expected"""
# build a widget dictionary, so it matches
widgets = {}
for w in container.children:
widgets[w.description] = w
for key, d in to_check.items():
nt.assert_in(key, widgets)
check_widget(widgets[key], **d)
def test_single_value_string():
a = u'hello'
c = interactive(f, a=a)
w = c.children[0]
check_widget(w,
cls=widgets.Text,
description='a',
value=a,
)
def test_single_value_bool():
for a in (True, False):
c = interactive(f, a=a)
w = c.children[0]
check_widget(w,
cls=widgets.Checkbox,
description='a',
value=a,
)
def test_single_value_dict():
for d in [
dict(a=5),
dict(a=5, b='b', c=dict),
]:
c = interactive(f, d=d)
w = c.children[0]
check_widget(w,
cls=widgets.Dropdown,
description='d',
options=d,
value=next(iter(d.values())),
)
def test_single_value_float():
for a in (2.25, 1.0, -3.5):
c = interactive(f, a=a)
w = c.children[0]
check_widget(w,
cls=widgets.FloatSlider,
description='a',
value=a,
min= -a if a > 0 else 3*a,
max= 3*a if a > 0 else -a,
step=0.1,
readout=True,
)
def test_single_value_int():
for a in (1, 5, -3):
c = interactive(f, a=a)
nt.assert_equal(len(c.children), 1)
w = c.children[0]
check_widget(w,
cls=widgets.IntSlider,
description='a',
value=a,
min= -a if a > 0 else 3*a,
max= 3*a if a > 0 else -a,
step=1,
readout=True,
)
def test_list_tuple_2_int():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,1))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,-1))
for min, max in [ (0,1), (1,10), (1,2), (-5,5), (-20,-19) ]:
c = interactive(f, tup=(min, max), lis=[min, max])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.IntSlider,
min=min,
max=max,
step=1,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_3_int():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2,0))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2,-1))
for min, max, step in [ (0,2,1), (1,10,2), (1,100,2), (-5,5,4), (-100,-20,4) ]:
c = interactive(f, tup=(min, max, step), lis=[min, max, step])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.IntSlider,
min=min,
max=max,
step=step,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_2_float():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1.0,1.0))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(0.5,-0.5))
for min, max in [ (0.5, 1.5), (1.1,10.2), (1,2.2), (-5.,5), (-20,-19.) ]:
c = interactive(f, tup=(min, max), lis=[min, max])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.FloatSlider,
min=min,
max=max,
step=.1,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_3_float():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2,0.0))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(-1,-2,1.))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2.,-1.))
for min, max, step in [ (0.,2,1), (1,10.,2), (1,100,2.), (-5.,5.,4), (-100,-20.,4.) ]:
c = interactive(f, tup=(min, max, step), lis=[min, max, step])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.FloatSlider,
min=min,
max=max,
step=step,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_str():
values = ['hello', 'there', 'guy']
first = values[0]
c = interactive(f, tup=tuple(values), lis=list(values))
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.Dropdown,
value=first,
options=values
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_invalid():
for bad in [
(),
(5, 'hi'),
('hi', 5),
({},),
(None,),
]:
with nt.assert_raises(ValueError):
print(bad) # because there is no custom message in assert_raises
c = interactive(f, tup=bad)
def test_defaults():
@annotate(n=10)
def f(n, f=4.5, g=1):
pass
c = interactive(f)
check_widgets(c,
n=dict(
cls=widgets.IntSlider,
value=10,
),
f=dict(
cls=widgets.FloatSlider,
value=4.5,
),
g=dict(
cls=widgets.IntSlider,
value=1,
),
)
def test_default_values():
@annotate(n=10, f=(0, 10.), g=5, h={'a': 1, 'b': 2}, j=['hi', 'there'])
def f(n, f=4.5, g=1, h=2, j='there'):
pass
c = interactive(f)
check_widgets(c,
n=dict(
cls=widgets.IntSlider,
value=10,
),
f=dict(
cls=widgets.FloatSlider,
value=4.5,
),
g=dict(
cls=widgets.IntSlider,
value=5,
),
h=dict(
cls=widgets.Dropdown,
options={'a': 1, 'b': 2},
value=2
),
j=dict(
cls=widgets.Dropdown,
options=['hi', 'there'],
value='there'
),
)
def test_default_out_of_bounds():
@annotate(f=(0, 10.), h={'a': 1}, j=['hi', 'there'])
def f(f='hi', h=5, j='other'):
pass
c = interactive(f)
check_widgets(c,
f=dict(
cls=widgets.FloatSlider,
value=5.,
),
h=dict(
cls=widgets.Dropdown,
options={'a': 1},
value=1,
),
j=dict(
cls=widgets.Dropdown,
options=['hi', 'there'],
value='hi',
),
)
def test_annotations():
@annotate(n=10, f=widgets.FloatText())
def f(n, f):
pass
c = interactive(f)
check_widgets(c,
n=dict(
cls=widgets.IntSlider,
value=10,
),
f=dict(
cls=widgets.FloatText,
),
)
def test_priority():
@annotate(annotate='annotate', kwarg='annotate')
def f(kwarg='default', annotate='default', default='default'):
pass
c = interactive(f, kwarg='kwarg')
check_widgets(c,
kwarg=dict(
cls=widgets.Text,
value='kwarg',
),
annotate=dict(
cls=widgets.Text,
value='annotate',
),
)
@nt.with_setup(clear_display)
def test_decorator_kwarg():
with tt.monkeypatch(interaction, 'display', record_display):
@interact(a=5)
def foo(a):
pass
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.IntSlider,
value=5,
)
@nt.with_setup(clear_display)
def test_interact_instancemethod():
class Foo(object):
def show(self, x):
print(x)
f = Foo()
with tt.monkeypatch(interaction, 'display', record_display):
g = interact(f.show, x=(1,10))
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.IntSlider,
value=5,
)
@nt.with_setup(clear_display)
def test_decorator_no_call():
with tt.monkeypatch(interaction, 'display', record_display):
@interact
def foo(a='default'):
pass
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.Text,
value='default',
)
@nt.with_setup(clear_display)
def test_call_interact():
def foo(a='default'):
pass
with tt.monkeypatch(interaction, 'display', record_display):
ifoo = interact(foo)
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.Text,
value='default',
)
@nt.with_setup(clear_display)
def test_call_interact_kwargs():
def foo(a='default'):
pass
with tt.monkeypatch(interaction, 'display', record_display):
ifoo = interact(foo, a=10)
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.IntSlider,
value=10,
)
@nt.with_setup(clear_display)
def test_call_decorated_on_trait_change():
"""test calling @interact decorated functions"""
d = {}
with tt.monkeypatch(interaction, 'display', record_display):
@interact
def foo(a='default'):
d['a'] = a
return a
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.Text,
value='default',
)
# test calling the function directly
a = foo('hello')
nt.assert_equal(a, 'hello')
nt.assert_equal(d['a'], 'hello')
# test that setting trait values calls the function
w.value = 'called'
nt.assert_equal(d['a'], 'called')
@nt.with_setup(clear_display)
def test_call_decorated_kwargs_on_trait_change():
"""test calling @interact(foo=bar) decorated functions"""
d = {}
with tt.monkeypatch(interaction, 'display', record_display):
@interact(a='kwarg')
def foo(a='default'):
d['a'] = a
return a
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.Text,
value='kwarg',
)
# test calling the function directly
a = foo('hello')
nt.assert_equal(a, 'hello')
nt.assert_equal(d['a'], 'hello')
# test that setting trait values calls the function
w.value = 'called'
nt.assert_equal(d['a'], 'called')
def test_fixed():
c = interactive(f, a=widgets.fixed(5), b='text')
nt.assert_equal(len(c.children), 1)
w = c.children[0]
check_widget(w,
cls=widgets.Text,
value='text',
description='b',
)
def test_default_description():
c = interactive(f, b='text')
w = c.children[0]
check_widget(w,
cls=widgets.Text,
value='text',
description='b',
)
def test_custom_description():
d = {}
def record_kwargs(**kwargs):
d.clear()
d.update(kwargs)
c = interactive(record_kwargs, b=widgets.Text(value='text', description='foo'))
w = c.children[0]
check_widget(w,
cls=widgets.Text,
value='text',
description='foo',
)
w.value = 'different text'
nt.assert_equal(d, {'b': 'different text'})
def test_interact_manual_button():
c = interactive(f, __manual=True)
w = c.children[0]
check_widget(w, cls=widgets.Button)
def test_interact_manual_nocall():
callcount = 0
def calltest(testarg):
callcount += 1
c = interactive(calltest, testarg=5, __manual=True)
c.children[0].value = 10
nt.assert_equal(callcount, 0)
def test_int_range_logic():
irsw = widgets.IntRangeSlider
w = irsw(value=(2, 4), min=0, max=6)
check_widget(w, cls=irsw, value=(2, 4), min=0, max=6)
w.value = (4, 2)
check_widget(w, cls=irsw, value=(2, 4), min=0, max=6)
w.value = (-1, 7)
check_widget(w, cls=irsw, value=(0, 6), min=0, max=6)
w.min = 3
check_widget(w, cls=irsw, value=(3, 6), min=3, max=6)
w.max = 3
check_widget(w, cls=irsw, value=(3, 3), min=3, max=3)
w.min = 0
w.max = 6
w.lower = 2
w.upper = 4
check_widget(w, cls=irsw, value=(2, 4), min=0, max=6)
w.value = (0, 1) #lower non-overlapping range
check_widget(w, cls=irsw, value=(0, 1), min=0, max=6)
w.value = (5, 6) #upper non-overlapping range
check_widget(w, cls=irsw, value=(5, 6), min=0, max=6)
w.value = (-1, 4) #semi out-of-range
check_widget(w, cls=irsw, value=(0, 4), min=0, max=6)
w.lower = 2
check_widget(w, cls=irsw, value=(2, 4), min=0, max=6)
w.value = (-2, -1) #wholly out of range
check_widget(w, cls=irsw, value=(0, 0), min=0, max=6)
w.value = (7, 8)
check_widget(w, cls=irsw, value=(6, 6), min=0, max=6)
with nt.assert_raises(ValueError):
w.min = 7
with nt.assert_raises(ValueError):
w.max = -1
with nt.assert_raises(ValueError):
w.lower = 5
with nt.assert_raises(ValueError):
w.upper = 1
w = irsw(min=2, max=3)
check_widget(w, min=2, max=3)
w = irsw(min=100, max=200)
check_widget(w, lower=125, upper=175, value=(125, 175))
with nt.assert_raises(ValueError):
irsw(value=(2, 4), lower=3)
with nt.assert_raises(ValueError):
irsw(value=(2, 4), upper=3)
with nt.assert_raises(ValueError):
irsw(value=(2, 4), lower=3, upper=3)
with nt.assert_raises(ValueError):
irsw(min=2, max=1)
with nt.assert_raises(ValueError):
irsw(lower=5)
with nt.assert_raises(ValueError):
irsw(upper=5)
def test_float_range_logic():
frsw = widgets.FloatRangeSlider
w = frsw(value=(.2, .4), min=0., max=.6)
check_widget(w, cls=frsw, value=(.2, .4), min=0., max=.6)
w.value = (.4, .2)
check_widget(w, cls=frsw, value=(.2, .4), min=0., max=.6)
w.value = (-.1, .7)
check_widget(w, cls=frsw, value=(0., .6), min=0., max=.6)
w.min = .3
check_widget(w, cls=frsw, value=(.3, .6), min=.3, max=.6)
w.max = .3
check_widget(w, cls=frsw, value=(.3, .3), min=.3, max=.3)
w.min = 0.
w.max = .6
w.lower = .2
w.upper = .4
check_widget(w, cls=frsw, value=(.2, .4), min=0., max=.6)
w.value = (0., .1) #lower non-overlapping range
check_widget(w, cls=frsw, value=(0., .1), min=0., max=.6)
w.value = (.5, .6) #upper non-overlapping range
check_widget(w, cls=frsw, value=(.5, .6), min=0., max=.6)
w.value = (-.1, .4) #semi out-of-range
check_widget(w, cls=frsw, value=(0., .4), min=0., max=.6)
w.lower = .2
check_widget(w, cls=frsw, value=(.2, .4), min=0., max=.6)
w.value = (-.2, -.1) #wholly out of range
check_widget(w, cls=frsw, value=(0., 0.), min=0., max=.6)
w.value = (.7, .8)
check_widget(w, cls=frsw, value=(.6, .6), min=.0, max=.6)
with nt.assert_raises(ValueError):
w.min = .7
with nt.assert_raises(ValueError):
w.max = -.1
with nt.assert_raises(ValueError):
w.lower = .5
with nt.assert_raises(ValueError):
w.upper = .1
w = frsw(min=2, max=3)
check_widget(w, min=2, max=3)
w = frsw(min=1., max=2.)
check_widget(w, lower=1.25, upper=1.75, value=(1.25, 1.75))
with nt.assert_raises(ValueError):
frsw(value=(2, 4), lower=3)
with nt.assert_raises(ValueError):
frsw(value=(2, 4), upper=3)
with nt.assert_raises(ValueError):
frsw(value=(2, 4), lower=3, upper=3)
with nt.assert_raises(ValueError):
frsw(min=.2, max=.1)
with nt.assert_raises(ValueError):
frsw(lower=5)
with nt.assert_raises(ValueError):
frsw(upper=5)
def test_multiple_selection():
smw = widgets.SelectMultiple
# degenerate multiple select
w = smw()
check_widget(w, value=tuple(), options=None, selected_labels=tuple())
# don't accept random other value when no options
with nt.assert_raises(KeyError):
w.value = (2,)
check_widget(w, value=tuple(), selected_labels=tuple())
# basic multiple select
w = smw(options=[(1, 1)], value=[1])
check_widget(w, cls=smw, value=(1,), options=[(1, 1)])
# don't accept random other value
with nt.assert_raises(KeyError):
w.value = w.value + (2,)
check_widget(w, value=(1,), selected_labels=(1,))
# change options
w.options = w.options + [(2, 2)]
check_widget(w, options=[(1, 1), (2,2)])
# change value
w.value = w.value + (2,)
check_widget(w, value=(1, 2), selected_labels=(1, 2))
# change value name
w.selected_labels = (1,)
check_widget(w, value=(1,))
# don't accept random other names when no options
with nt.assert_raises(KeyError):
w.selected_labels = (3,)
check_widget(w, value=(1,))
# don't accept selected_label (from superclass)
with nt.assert_raises(AttributeError):
w.selected_label = 3
# don't return selected_label (from superclass)
with nt.assert_raises(AttributeError):
print(w.selected_label)
# dict style
w.options = {1: 1}
check_widget(w, options={1: 1})
# updating
with nt.assert_raises(KeyError):
w.value = (2,)
check_widget(w, options={1: 1})
|
"""
EasyBuild support for MyMediaLite, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.run import run_cmd
class EB_MyMediaLite(ConfigureMake):
"""Support for building/installing MyMediaLite."""
def configure_step(self):
"""Custom configure step for MyMediaLite, using "make CONFIGURE_OPTIONS='...' configure"."""
if LooseVersion(self.version) < LooseVersion('3'):
cmd = "make CONFIGURE_OPTIONS='--prefix=%s' configure" % self.installdir
run_cmd(cmd, log_all=True, simple=True)
else:
self.cfg.update('installopts', "PREFIX=%s" % self.installdir)
def build_step(self):
"""Custom build step for MyMediaLite, using 'make all' in 'src' directory."""
cmd = "cd src && make all && cd .."
run_cmd(cmd, log_all=True, simple=True)
def sanity_check_step(self):
"""Custom sanity check for MyMediaLite."""
if LooseVersion(self.version) < LooseVersion('3'):
bin_files = ["bin/%s_prediction" % x for x in ['item', 'mapping_item', 'mapping_rating', 'rating']]
else:
bin_files = ["bin/item_recommendation", "bin/rating_based_ranking", "bin/rating_prediction"]
custom_paths = {
'files': bin_files,
'dirs': ["lib/mymedialite"],
}
super(EB_MyMediaLite, self).sanity_check_step(custom_paths=custom_paths)
|
import karamba
drop_txt = None
def initWidget(widget):
# this resets the text to "" so we know we've never
# received anything yet from the other theme
name = karamba.getPrettyThemeName(widget)
print "2.py name: ", name
karamba.setIncomingData(widget, name, "")
karamba.redrawWidget(widget)
expected_seq = 0
def widgetUpdated(widget):
global expected_seq # i hate globals. please write better code than this example.
# get the "message"...
disp = karamba.getIncomingData(widget)
if disp == "":
return
# decode it...
(seq, x, y, button) = eval(disp)
# if it's been seen before, skip it...
if seq <= expected_seq:
pass
expected_seq += 1
message = "seq:%d x:%d y:%d btn:%d" % (seq, x, y, button)
# delete previous text if exists.
global drop_txt
if drop_txt is not None:
karamba.deleteText(widget, drop_txt)
# display it...
drop_txt = karamba.createText(widget, 0, 20, 300, 20, message)
karamba.changeTextColor(widget, drop_txt, 252,252,252)
karamba.redrawWidget(widget)
pass
print "Loaded my python 2.py extension!"
|
import sys
import os
print "-------------------------"
print "StegHide Options"
print "-------------------------"
print "Usage Example :"
print ""
print"To embed emb.txt in cvr.jpg: steghide embed -cf cvr.jpg -ef emb.txt"
print ""
print "To extract embedded data from stg.jpg: steghide extract -sf stg.jpg"
cmd1 = os.system ("xterm ")
|
"""BibObject Module providing BibObject prividing features for documents containing text (not necessarily as the main part of the content)"""
import os
import re
from datetime import datetime
from invenio.config import CFG_BIBINDEX_PERFORM_OCR_ON_DOCNAMES
from invenio.legacy.bibdocfile.api import BibDoc, InvenioBibDocFileError
from invenio.legacy.dbquery import run_sql
from invenio.ext.logging import register_exception
_RE_PERFORM_OCR = re.compile(CFG_BIBINDEX_PERFORM_OCR_ON_DOCNAMES)
class BibTextDoc(BibDoc):
def get_text(self, version=None):
"""
@param version: the requested version. If not set, the latest version
will be used.
@type version: integer
@return: the textual content corresponding to the specified version
of the document.
@rtype: string
"""
if version is None:
version = self.get_latest_version()
if self.has_text(version):
return open(os.path.join(self.basedir, '.text;%i' % version)).read()
else:
return ""
def is_ocr_required(self):
"""
Return True if this document require OCR in order to extract text from it.
"""
for bibrec_link in self.bibrec_links:
if _RE_PERFORM_OCR.match(bibrec_link['docname']):
return True
return False
def get_text_path(self, version=None):
"""
@param version: the requested version. If not set, the latest version
will be used.
@type version: int
@return: the full path to the textual content corresponding to the specified version
of the document.
@rtype: string
"""
if version is None:
version = self.get_latest_version()
if self.has_text(version):
return os.path.join(self.basedir, '.text;%i' % version)
else:
return ""
def extract_text(self, version=None, perform_ocr=False, ln='en'):
"""
Try what is necessary to extract the textual information of a document.
@param version: the version of the document for which text is required.
If not specified the text will be retrieved from the last version.
@type version: integer
@param perform_ocr: whether to perform OCR.
@type perform_ocr: bool
@param ln: a two letter language code to give as a hint to the OCR
procedure.
@type ln: string
@raise InvenioBibDocFileError: in case of error.
@note: the text is extracted and cached for later use. Use L{get_text}
to retrieve it.
"""
raise RuntimeError("Text extraction is not implemented.")
def pdf_a_p(self):
"""
@return: True if this document contains a PDF in PDF/A format.
@rtype: bool"""
return self.has_flag('PDF/A', 'pdf')
def has_text(self, require_up_to_date=False, version=None):
"""
Return True if the text of this document has already been extracted.
@param require_up_to_date: if True check the text was actually
extracted after the most recent format of the given version.
@type require_up_to_date: bool
@param version: a version for which the text should have been
extracted. If not specified the latest version is considered.
@type version: integer
@return: True if the text has already been extracted.
@rtype: bool
"""
if version is None:
version = self.get_latest_version()
if os.path.exists(os.path.join(self.basedir, '.text;%i' % version)):
if not require_up_to_date:
return True
else:
docfiles = self.list_version_files(version)
text_md = datetime.fromtimestamp(os.path.getmtime(os.path.join(self.basedir, '.text;%i' % version)))
for docfile in docfiles:
if text_md <= docfile.md:
return False
return True
return False
def __repr__(self):
return 'BibTextDoc(%s, %s, %s)' % (repr(self.id), repr(self.doctype), repr(self.human_readable))
def supports(doctype, extensions):
return doctype == "Fulltext" or reduce(lambda x, y: x or y.startswith(".pdf") or y.startswith(".ps") , extensions, False)
def create_instance(docid=None, doctype='Main', human_readable=False, # pylint: disable=W0613
initial_data = None):
return BibTextDoc(docid=docid, human_readable=human_readable,
initial_data = initial_data)
|
"""Rest alarm notifier with trusted authentication."""
from keystoneclient.v3 import client as keystone_client
from oslo.config import cfg
from six.moves.urllib import parse
from ceilometer.alarm.notifier import rest
cfg.CONF.import_opt('http_timeout', 'ceilometer.service')
cfg.CONF.import_group('service_credentials', 'ceilometer.service')
class TrustRestAlarmNotifier(rest.RestAlarmNotifier):
"""Notifier supporting keystone trust authentication.
This alarm notifier is intended to be used to call an endpoint using
keystone authentication. It uses the ceilometer service user to
authenticate using the trust ID provided.
The URL must be in the form trust+http://trust-id@host/action.
"""
@staticmethod
def notify(action, alarm_id, previous, current, reason, reason_data):
trust_id = action.username
auth_url = cfg.CONF.service_credentials.os_auth_url.replace(
"v2.0", "v3")
client = keystone_client.Client(
username=cfg.CONF.service_credentials.os_username,
password=cfg.CONF.service_credentials.os_password,
cacert=cfg.CONF.service_credentials.os_cacert,
auth_url=auth_url,
region_name=cfg.CONF.service_credentials.os_region_name,
insecure=cfg.CONF.service_credentials.insecure,
timeout=cfg.CONF.http_timeout,
trust_id=trust_id)
# Remove the fake user
netloc = action.netloc.split("@")[1]
# Remove the trust prefix
scheme = action.scheme[6:]
action = parse.SplitResult(scheme, netloc, action.path, action.query,
action.fragment)
headers = {'X-Auth-Token': client.auth_token}
rest.RestAlarmNotifier.notify(
action, alarm_id, previous, current, reason, reason_data, headers)
|
import gtk
import pango
import gobject
from radialnet.bestwidgets.boxes import *
from radialnet.bestwidgets.expanders import BWExpander
from radialnet.bestwidgets.labels import *
from radialnet.bestwidgets.textview import *
import zenmapCore.I18N
PORTS_HEADER = [
_('Port'), _('Protocol'), _('State'), _('Service'), _('Method')]
EXTRAPORTS_HEADER = [_('Count'), _('State'), _('Reasons')]
SERVICE_COLORS = {'open': '#ffd5d5',
'closed': '#d5ffd5',
'filtered': '#ffffd5',
'unfiltered': '#ffd5d5',
'open|filtered': '#ffd5d5',
'closed|filtered': '#d5ffd5'}
UNKNOWN_SERVICE_COLOR = '#d5d5d5'
TRACE_HEADER = [_('TTL'), _('RTT'), _('IP'), _('Hostname')]
TRACE_TEXT = _(
"Traceroute on port <b>%s/%s</b> totalized <b>%d</b> known hops.")
NO_TRACE_TEXT = _("No traceroute information available.")
HOP_COLOR = {'known': '#ffffff',
'unknown': '#cccccc'}
SYSTEM_ADDRESS_TEXT = "[%s] %s"
OSMATCH_HEADER = ['%', _('Name'), _('DB Line')]
OSCLASS_HEADER = ['%', _('Vendor'), _('Type'), _('Family'), _('Version')]
USED_PORTS_TEXT = "%d/%s %s"
TCP_SEQ_NOTE = _("""\
<b>*</b> TCP sequence <i>index</i> equal to %d and <i>difficulty</i> is "%s".\
""")
def get_service_color(state):
color = SERVICE_COLORS.get(state)
if color is None:
color = UNKNOWN_SERVICE_COLOR
return color
class NodeNotebook(gtk.Notebook):
"""
"""
def __init__(self, node):
"""
"""
gtk.Notebook.__init__(self)
self.set_tab_pos(gtk.POS_TOP)
self.__node = node
self.__create_widgets()
def __create_widgets(self):
"""
"""
# create body elements
self.__services_page = ServicesPage(self.__node)
self.__system_page = SystemPage(self.__node)
self.__trace_page = TraceroutePage(self.__node)
# packing notebook elements
self.append_page(self.__system_page, BWLabel(_('General')))
self.append_page(self.__services_page, BWLabel(_('Services')))
self.append_page(self.__trace_page, BWLabel(_('Traceroute')))
class ServicesPage(gtk.Notebook):
"""
"""
def __init__(self, node):
"""
"""
gtk.Notebook.__init__(self)
self.set_border_width(6)
self.set_tab_pos(gtk.POS_TOP)
self.__node = node
self.__font = pango.FontDescription('Monospace')
self.__create_widgets()
def __create_widgets(self):
"""
"""
self.__cell = gtk.CellRendererText()
# texteditor widgets
self.__texteditor = BWTextEditor()
self.__texteditor.bw_modify_font(self.__font)
self.__texteditor.bw_set_editable(False)
self.__texteditor.set_border_width(0)
self.__select_combobox = gtk.combo_box_new_text()
self.__select_combobox.connect('changed', self.__change_text_value)
self.__viewer = BWVBox(spacing=6)
self.__viewer.set_border_width(6)
self.__viewer.bw_pack_start_noexpand_nofill(self.__select_combobox)
self.__viewer.bw_pack_start_expand_fill(self.__texteditor)
self.__text = list()
# ports information
number_of_ports = len(self.__node.get_info('ports'))
self.__ports_label = BWLabel(_('Ports (%s)') % number_of_ports)
self.__ports_scroll = BWScrolledWindow()
self.__ports_store = gtk.TreeStore(gobject.TYPE_INT,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_BOOLEAN)
self.__ports_treeview = gtk.TreeView(self.__ports_store)
for port in self.__node.get_info('ports'):
color = get_service_color(port['state']['state'])
service_name = port['service'].get('name', _('<unknown>'))
service_method = port['service'].get('method', _('<none>'))
reference = self.__ports_store.append(None,
[port['id'],
port['protocol'],
port['state']['state'],
service_name,
service_method,
color,
True])
for key in port['state']:
self.__ports_store.append(reference,
[port['id'],
'state',
key,
port['state'][key],
'',
'white',
True])
for key in port['service']:
if key in ['servicefp']:
text = _('[%d] service: %s') % (port['id'], key)
self.__select_combobox.append_text(text)
self.__text.append(port['service'][key])
value = _('<special field>')
else:
value = port['service'][key]
self.__ports_store.append(reference,
[port['id'],
'service',
key,
value,
'',
'white',
True])
#for script in port['scripts']:
# text = _('[%d] script: %s') % (port['id'], script['id'])
# self.__select_combobox.append_text(text)
# self.__text.append(script['output'])
#
# self.__ports_store.append(reference,
# [port['id'],
# 'script',
# 'id',
# script['id'],
# _('<special field>'),
# 'white',
# True])
self.__ports_column = list()
for i in range(len(PORTS_HEADER)):
column = gtk.TreeViewColumn(PORTS_HEADER[i],
self.__cell,
text=i)
self.__ports_column.append(column)
self.__ports_column[i].set_reorderable(True)
self.__ports_column[i].set_resizable(True)
self.__ports_column[i].set_sort_column_id(i)
self.__ports_column[i].set_attributes(self.__cell,
text=i,
background=5,
editable=6)
self.__ports_treeview.append_column(self.__ports_column[i])
self.__ports_scroll.add_with_viewport(self.__ports_treeview)
# extraports information
number_of_xports = 0
self.__xports_scroll = BWScrolledWindow()
self.__xports_store = gtk.TreeStore(gobject.TYPE_INT,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_BOOLEAN)
self.__xports_treeview = gtk.TreeView(self.__xports_store)
for xports in self.__node.get_info('extraports'):
color = get_service_color(xports['state'])
number_of_xports += xports['count']
reference = self.__xports_store.append(
None, [xports['count'], xports['state'],
", ".join(xports['reason']), color, True])
for xreason in xports['all_reason']:
self.__xports_store.append(reference,
[xreason['count'],
xports['state'],
xreason['reason'],
'white',
True])
self.__xports_column = list()
for i in range(len(EXTRAPORTS_HEADER)):
column = gtk.TreeViewColumn(EXTRAPORTS_HEADER[i],
self.__cell,
text=i)
self.__xports_column.append(column)
self.__xports_column[i].set_reorderable(True)
self.__xports_column[i].set_resizable(True)
self.__xports_column[i].set_sort_column_id(i)
self.__xports_column[i].set_attributes(self.__cell,
text=i,
background=3,
editable=4)
self.__xports_treeview.append_column(self.__xports_column[i])
xports_label_text = _('Extraports (%s)') % number_of_xports
self.__xports_label = BWLabel(xports_label_text)
self.__xports_scroll.add_with_viewport(self.__xports_treeview)
self.append_page(self.__ports_scroll, self.__ports_label)
self.append_page(self.__xports_scroll, self.__xports_label)
self.append_page(self.__viewer, BWLabel(_('Special fields')))
if len(self.__text) > 0:
self.__select_combobox.set_active(0)
def __change_text_value(self, widget):
"""
"""
id = self.__select_combobox.get_active()
self.__texteditor.bw_set_text(self.__text[id])
class SystemPage(BWScrolledWindow):
"""
"""
def __init__(self, node):
"""
"""
BWScrolledWindow.__init__(self)
self.__node = node
self.__font = pango.FontDescription('Monospace')
self.__create_widgets()
def __create_widgets(self):
"""
"""
self.__vbox = BWVBox()
self.__vbox.set_border_width(6)
self.__cell = gtk.CellRendererText()
self.__general_frame = BWExpander(_('General information'))
self.__sequences_frame = BWExpander(_('Sequences'))
self.__os_frame = BWExpander(_('Operating System'))
self.__sequences_frame.bw_add(gtk.Label(_('No sequence information.')))
self.__os_frame.bw_add(gtk.Label(_('No OS information.')))
# general information widgets
self.__general = BWTable(3, 2)
self.__address_label = BWSectionLabel(_('Address:'))
self.__address_list = gtk.combo_box_entry_new_text()
self.__address_list.child.set_editable(False)
for address in self.__node.get_info('addresses'):
params = address['type'], address['addr']
address_text = SYSTEM_ADDRESS_TEXT % params
if address['vendor'] is not None and address['vendor'] != '':
address_text += " (%s)" % address['vendor']
self.__address_list.append_text(address_text)
self.__address_list.set_active(0)
self.__general.bw_attach_next(self.__address_label,
yoptions=gtk.FILL,
xoptions=gtk.FILL)
self.__general.bw_attach_next(self.__address_list, yoptions=gtk.FILL)
if self.__node.get_info('hostnames') is not None:
self.__hostname_label = BWSectionLabel(_('Hostname:'))
self.__hostname_list = gtk.combo_box_entry_new_text()
self.__hostname_list.child.set_editable(False)
for hostname in self.__node.get_info('hostnames'):
params = hostname['type'], hostname['name']
self.__hostname_list.append_text(SYSTEM_ADDRESS_TEXT % params)
self.__hostname_list.set_active(0)
self.__general.bw_attach_next(self.__hostname_label,
yoptions=gtk.FILL,
xoptions=gtk.FILL)
self.__general.bw_attach_next(self.__hostname_list,
yoptions=gtk.FILL)
if self.__node.get_info('uptime') is not None:
self.__uptime_label = BWSectionLabel(_('Last boot:'))
seconds = self.__node.get_info('uptime')['seconds']
lastboot = self.__node.get_info('uptime')['lastboot']
text = _('%s (%s seconds).') % (lastboot, seconds)
self.__uptime_value = BWLabel(text)
self.__uptime_value.set_selectable(True)
self.__uptime_value.set_line_wrap(False)
self.__general.bw_attach_next(self.__uptime_label,
yoptions=gtk.FILL,
xoptions=gtk.FILL)
self.__general.bw_attach_next(self.__uptime_value,
yoptions=gtk.FILL)
self.__general_frame.bw_add(self.__general)
self.__general_frame.set_expanded(True)
sequences = self.__node.get_info('sequences')
if len(sequences) > 0:
self.__sequences_frame.bw_add(
self.__create_sequences_widget(sequences))
# operating system information widgets
self.__os = gtk.Notebook()
os = self.__node.get_info('os')
if os is not None:
if 'matches' in os:
self.__match_scroll = BWScrolledWindow()
self.__match_store = gtk.ListStore(gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_INT,
gobject.TYPE_BOOLEAN)
self.__match_treeview = gtk.TreeView(self.__match_store)
for os_match in os['matches']:
self.__match_store.append([os_match['accuracy'],
os_match['name'],
#os_match['db_line'],
0, # unsupported
True])
self.__match_column = list()
for i in range(len(OSMATCH_HEADER)):
column = gtk.TreeViewColumn(OSMATCH_HEADER[i],
self.__cell,
text=i)
self.__match_column.append(column)
self.__match_column[i].set_reorderable(True)
self.__match_column[i].set_resizable(True)
self.__match_column[i].set_attributes(self.__cell,
text=i,
editable=3)
self.__match_column[i].set_sort_column_id(i)
self.__match_treeview.append_column(self.__match_column[i])
self.__match_scroll.add_with_viewport(self.__match_treeview)
self.__os.append_page(self.__match_scroll, BWLabel(_('Match')))
if 'classes' in os:
self.__class_scroll = BWScrolledWindow()
self.__class_store = gtk.ListStore(gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_BOOLEAN)
self.__class_treeview = gtk.TreeView(self.__class_store)
for os_class in os['classes']:
os_gen = os_class.get('os_gen', '')
self.__class_store.append([os_class['accuracy'],
os_class['vendor'],
os_class['type'],
os_class['os_family'],
os_gen,
True])
self.__class_column = list()
for i in range(len(OSCLASS_HEADER)):
column = gtk.TreeViewColumn(OSCLASS_HEADER[i],
self.__cell,
text=i)
self.__class_column.append(column)
self.__class_column[i].set_reorderable(True)
self.__class_column[i].set_resizable(True)
self.__class_column[i].set_attributes(self.__cell,
text=i,
editable=5)
self.__class_column[i].set_sort_column_id(i)
self.__class_treeview.append_column(self.__class_column[i])
self.__class_scroll.add_with_viewport(self.__class_treeview)
self.__os.append_page(self.__class_scroll, BWLabel(_('Class')))
self.__fp_viewer = BWTextEditor()
self.__fp_viewer.bw_modify_font(self.__font)
self.__fp_viewer.bw_set_editable(False)
self.__fp_viewer.bw_set_text(os['fingerprint'])
self.__fp_ports = BWHBox()
self.__fp_label = BWSectionLabel(_('Used ports:'))
self.__fp_ports_list = gtk.combo_box_entry_new_text()
self.__fp_ports_list.child.set_editable(False)
self.__fp_vbox = BWVBox()
if 'used_ports' in os:
used_ports = os['used_ports']
for port in used_ports:
params = port['id'], port['protocol'], port['state']
self.__fp_ports_list.append_text(USED_PORTS_TEXT % params)
self.__fp_ports_list.set_active(0)
self.__fp_ports.bw_pack_start_noexpand_nofill(self.__fp_label)
self.__fp_ports.bw_pack_start_expand_fill(self.__fp_ports_list)
self.__fp_vbox.bw_pack_start_noexpand_nofill(self.__fp_ports)
self.__os.append_page(self.__fp_viewer, BWLabel(_('Fingerprint')))
self.__fp_vbox.bw_pack_start_expand_fill(self.__os)
self.__os_frame.bw_add(self.__fp_vbox)
self.__os_frame.set_expanded(True)
self.__vbox.bw_pack_start_noexpand_nofill(self.__general_frame)
self.__vbox.bw_pack_start_expand_fill(self.__os_frame)
self.__vbox.bw_pack_start_noexpand_nofill(self.__sequences_frame)
self.add_with_viewport(self.__vbox)
def __create_sequences_widget(self, sequences):
"""Return a widget representing various OS detection sequences. The
sequences argument is a dict with zero or more of the keys 'tcp',
'ip_id', and 'tcp_ts'."""
# sequences information widgets
table = BWTable(5, 3)
table.attach(BWSectionLabel(_('Class')), 1, 2, 0, 1)
table.attach(BWSectionLabel(_('Values')), 2, 3, 0, 1)
table.attach(BWSectionLabel(_('TCP *')), 0, 1, 1, 2)
table.attach(BWSectionLabel(_('IP ID')), 0, 1, 2, 3)
table.attach(BWSectionLabel(_('TCP Timestamp')), 0, 1, 3, 4)
tcp = sequences.get('tcp')
if tcp is not None:
tcp_class = BWLabel(tcp['class'])
tcp_class.set_selectable(True)
table.attach(tcp_class, 1, 2, 1, 2)
tcp_values = gtk.combo_box_entry_new_text()
for value in tcp['values']:
tcp_values.append_text(value)
tcp_values.set_active(0)
table.attach(tcp_values, 2, 3, 1, 2)
tcp_note = BWLabel()
tcp_note.set_selectable(True)
tcp_note.set_line_wrap(False)
tcp_note.set_alignment(1.0, 0.5)
tcp_note.set_markup(
TCP_SEQ_NOTE % (tcp['index'], tcp['difficulty']))
table.attach(tcp_note, 0, 3, 4, 5)
ip_id = sequences.get('ip_id')
if ip_id is not None:
ip_id_class = BWLabel(ip_id['class'])
ip_id_class.set_selectable(True)
table.attach(ip_id_class, 1, 2, 2, 3)
ip_id_values = gtk.combo_box_entry_new_text()
for value in ip_id['values']:
ip_id_values.append_text(value)
ip_id_values.set_active(0)
table.attach(ip_id_values, 2, 3, 2, 3)
tcp_ts = sequences.get('tcp_ts')
if tcp_ts is not None:
tcp_ts_class = BWLabel(tcp_ts['class'])
tcp_ts_class.set_selectable(True)
table.attach(tcp_ts_class, 1, 2, 3, 4)
if tcp_ts['values'] is not None:
tcp_ts_values = gtk.combo_box_entry_new_text()
for value in tcp_ts['values']:
tcp_ts_values.append_text(value)
tcp_ts_values.set_active(0)
table.attach(tcp_ts_values, 2, 3, 3, 4)
return table
class TraceroutePage(BWVBox):
"""
"""
def __init__(self, node):
"""
"""
BWVBox.__init__(self)
self.set_border_width(6)
self.__node = node
self.__create_widgets()
def __create_widgets(self):
"""
"""
trace = self.__node.get_info('trace')
hops = None
if trace is not None:
hops = trace.get("hops")
if hops is None or len(hops) == 0:
self.__trace_label = gtk.Label(NO_TRACE_TEXT)
self.pack_start(self.__trace_label, True, True)
else:
# add hops
hops = self.__node.get_info('trace')['hops']
ttls = [int(i['ttl']) for i in hops]
self.__cell = gtk.CellRendererText()
self.__trace_scroll = BWScrolledWindow()
self.__trace_scroll.set_border_width(0)
self.__trace_store = gtk.ListStore(gobject.TYPE_INT,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_BOOLEAN)
self.__trace_treeview = gtk.TreeView(self.__trace_store)
count = 0
for i in range(1, max(ttls) + 1):
if i in ttls:
hop = hops[count]
count += 1
self.__trace_store.append([hop['ttl'],
hop['rtt'],
hop['ip'],
hop['hostname'],
HOP_COLOR['known'],
True])
else:
self.__trace_store.append([i,
'',
_('<unknown>'),
'',
HOP_COLOR['unknown'],
True])
self.__trace_column = list()
for i in range(len(TRACE_HEADER)):
column = gtk.TreeViewColumn(TRACE_HEADER[i],
self.__cell,
text=i)
self.__trace_column.append(column)
self.__trace_column[i].set_reorderable(True)
self.__trace_column[i].set_resizable(True)
self.__trace_column[i].set_attributes(self.__cell,
text=i,
background=4,
editable=5)
self.__trace_treeview.append_column(self.__trace_column[i])
self.__trace_column[0].set_sort_column_id(0)
self.__trace_scroll.add_with_viewport(self.__trace_treeview)
self.__trace_info = (self.__node.get_info('trace')['port'],
self.__node.get_info('trace')['protocol'],
len(self.__node.get_info('trace')['hops']))
self.__trace_label = BWLabel(TRACE_TEXT % self.__trace_info)
self.__trace_label.set_use_markup(True)
self.bw_pack_start_expand_fill(self.__trace_scroll)
self.bw_pack_start_noexpand_nofill(self.__trace_label)
|
from robottelo.decorators.func_shared.shared import ( # noqa
shared,
SharedFunctionError,
SharedFunctionException,
)
|
from __future__ import with_statement
import os.path
import time
import urllib
import re
import threading
import datetime
import random
import locale
from Cheetah.Template import Template
import cherrypy.lib
import sickbeard
from sickbeard import config, sab
from sickbeard import clients
from sickbeard import history, notifiers, processTV
from sickbeard import ui
from sickbeard import logger, helpers, exceptions, classes, db
from sickbeard import encodingKludge as ek
from sickbeard import search_queue
from sickbeard import image_cache
from sickbeard import scene_exceptions
from sickbeard import naming
from sickbeard import subtitles
from sickbeard.providers import newznab
from sickbeard.common import Quality, Overview, statusStrings
from sickbeard.common import SNATCHED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED
from sickbeard.exceptions import ex
from sickbeard.webapi import Api
from lib.tvdb_api import tvdb_api
from lib.dateutil import tz
import network_timezones
import subliminal
try:
import json
except ImportError:
from lib import simplejson as json
try:
import xml.etree.cElementTree as etree
except ImportError:
import xml.etree.ElementTree as etree
from sickbeard import browser
class PageTemplate (Template):
def __init__(self, *args, **KWs):
KWs['file'] = os.path.join(sickbeard.PROG_DIR, "data/interfaces/default/", KWs['file'])
super(PageTemplate, self).__init__(*args, **KWs)
self.sbRoot = sickbeard.WEB_ROOT
self.sbHttpPort = sickbeard.WEB_PORT
self.sbHttpsPort = sickbeard.WEB_PORT
self.sbHttpsEnabled = sickbeard.ENABLE_HTTPS
if cherrypy.request.headers['Host'][0] == '[':
self.sbHost = re.match("^\[.*\]", cherrypy.request.headers['Host'], re.X|re.M|re.S).group(0)
else:
self.sbHost = re.match("^[^:]+", cherrypy.request.headers['Host'], re.X|re.M|re.S).group(0)
self.projectHomePage = "http://code.google.com/p/sickbeard/"
if sickbeard.NZBS and sickbeard.NZBS_UID and sickbeard.NZBS_HASH:
logger.log(u"NZBs.org has been replaced, please check the config to configure the new provider!", logger.ERROR)
ui.notifications.error("NZBs.org Config Update", "NZBs.org has a new site. Please <a href=\""+sickbeard.WEB_ROOT+"/config/providers\">update your config</a> with the api key from <a href=\"http://nzbs.org/login\">http://nzbs.org</a> and then disable the old NZBs.org provider.")
if "X-Forwarded-Host" in cherrypy.request.headers:
self.sbHost = cherrypy.request.headers['X-Forwarded-Host']
if "X-Forwarded-Port" in cherrypy.request.headers:
self.sbHttpPort = cherrypy.request.headers['X-Forwarded-Port']
self.sbHttpsPort = self.sbHttpPort
if "X-Forwarded-Proto" in cherrypy.request.headers:
self.sbHttpsEnabled = True if cherrypy.request.headers['X-Forwarded-Proto'] == 'https' else False
logPageTitle = 'Logs & Errors'
if len(classes.ErrorViewer.errors):
logPageTitle += ' ('+str(len(classes.ErrorViewer.errors))+')'
self.logPageTitle = logPageTitle
self.sbPID = str(sickbeard.PID)
self.menu = [
{ 'title': 'Home', 'key': 'home' },
{ 'title': 'Coming Episodes', 'key': 'comingEpisodes' },
{ 'title': 'History', 'key': 'history' },
{ 'title': 'Manage', 'key': 'manage' },
{ 'title': 'Config', 'key': 'config' },
{ 'title': logPageTitle, 'key': 'errorlogs' },
]
def redirect(abspath, *args, **KWs):
assert abspath[0] == '/'
raise cherrypy.HTTPRedirect(sickbeard.WEB_ROOT + abspath, *args, **KWs)
class TVDBWebUI:
def __init__(self, config, log=None):
self.config = config
self.log = log
def selectSeries(self, allSeries):
searchList = ",".join([x['id'] for x in allSeries])
showDirList = ""
for curShowDir in self.config['_showDir']:
showDirList += "showDir="+curShowDir+"&"
redirect("/home/addShows/addShow?" + showDirList + "seriesList=" + searchList)
def _munge(string):
return unicode(string).encode('utf-8', 'xmlcharrefreplace')
def _genericMessage(subject, message):
t = PageTemplate(file="genericMessage.tmpl")
t.submenu = HomeMenu()
t.subject = subject
t.message = message
return _munge(t)
def _getEpisode(show, season, episode):
if show == None or season == None or episode == None:
return "Invalid parameters"
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return "Show not in show list"
epObj = showObj.getEpisode(int(season), int(episode))
if epObj == None:
return "Episode couldn't be retrieved"
return epObj
ManageMenu = [
{ 'title': 'Backlog Overview', 'path': 'manage/backlogOverview' },
{ 'title': 'Manage Searches', 'path': 'manage/manageSearches' },
{ 'title': 'Episode Status Management', 'path': 'manage/episodeStatuses' },
{ 'title': 'Manage Missed Subtitles', 'path': 'manage/subtitleMissed' },
]
if sickbeard.USE_SUBTITLES:
ManageMenu.append({ 'title': 'Missed Subtitle Management', 'path': 'manage/subtitleMissed' })
class ManageSearches:
@cherrypy.expose
def index(self):
t = PageTemplate(file="manage_manageSearches.tmpl")
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() #@UndefinedVariable
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() #@UndefinedVariable
t.searchStatus = sickbeard.currentSearchScheduler.action.amActive #@UndefinedVariable
t.submenu = ManageMenu
return _munge(t)
@cherrypy.expose
def forceSearch(self):
# force it to run the next time it looks
result = sickbeard.currentSearchScheduler.forceRun()
if result:
logger.log(u"Search forced")
ui.notifications.message('Episode search started',
'Note: RSS feeds may not be updated if retrieved recently')
redirect("/manage/manageSearches")
@cherrypy.expose
def pauseBacklog(self, paused=None):
if paused == "1":
sickbeard.searchQueueScheduler.action.pause_backlog() #@UndefinedVariable
else:
sickbeard.searchQueueScheduler.action.unpause_backlog() #@UndefinedVariable
redirect("/manage/manageSearches")
@cherrypy.expose
def forceVersionCheck(self):
# force a check to see if there is a new version
result = sickbeard.versionCheckScheduler.action.check_for_new_version(force=True) #@UndefinedVariable
if result:
logger.log(u"Forcing version check")
redirect("/manage/manageSearches")
class Manage:
manageSearches = ManageSearches()
@cherrypy.expose
def index(self):
t = PageTemplate(file="manage.tmpl")
t.submenu = ManageMenu
return _munge(t)
@cherrypy.expose
def showEpisodeStatuses(self, tvdb_id, whichStatus):
myDB = db.DBConnection()
status_list = [int(whichStatus)]
if status_list[0] == SNATCHED:
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH
cur_show_results = myDB.select("SELECT season, episode, name FROM tv_episodes WHERE showid = ? AND season != 0 AND status IN ("+','.join(['?']*len(status_list))+")", [int(tvdb_id)] + status_list)
result = {}
for cur_result in cur_show_results:
cur_season = int(cur_result["season"])
cur_episode = int(cur_result["episode"])
if cur_season not in result:
result[cur_season] = {}
result[cur_season][cur_episode] = cur_result["name"]
return json.dumps(result)
@cherrypy.expose
def episodeStatuses(self, whichStatus=None):
if whichStatus:
whichStatus = int(whichStatus)
status_list = [whichStatus]
if status_list[0] == SNATCHED:
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH
else:
status_list = []
t = PageTemplate(file="manage_episodeStatuses.tmpl")
t.submenu = ManageMenu
t.whichStatus = whichStatus
# if we have no status then this is as far as we need to go
if not status_list:
return _munge(t)
myDB = db.DBConnection()
status_results = myDB.select("SELECT show_name, tv_shows.tvdb_id as tvdb_id FROM tv_episodes, tv_shows WHERE tv_episodes.status IN ("+','.join(['?']*len(status_list))+") AND season != 0 AND tv_episodes.showid = tv_shows.tvdb_id ORDER BY show_name", status_list)
ep_counts = {}
show_names = {}
sorted_show_ids = []
for cur_status_result in status_results:
cur_tvdb_id = int(cur_status_result["tvdb_id"])
if cur_tvdb_id not in ep_counts:
ep_counts[cur_tvdb_id] = 1
else:
ep_counts[cur_tvdb_id] += 1
show_names[cur_tvdb_id] = cur_status_result["show_name"]
if cur_tvdb_id not in sorted_show_ids:
sorted_show_ids.append(cur_tvdb_id)
t.show_names = show_names
t.ep_counts = ep_counts
t.sorted_show_ids = sorted_show_ids
return _munge(t)
@cherrypy.expose
def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs):
status_list = [int(oldStatus)]
if status_list[0] == SNATCHED:
status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH
to_change = {}
# make a list of all shows and their associated args
for arg in kwargs:
tvdb_id, what = arg.split('-')
# we don't care about unchecked checkboxes
if kwargs[arg] != 'on':
continue
if tvdb_id not in to_change:
to_change[tvdb_id] = []
to_change[tvdb_id].append(what)
myDB = db.DBConnection()
for cur_tvdb_id in to_change:
# get a list of all the eps we want to change if they just said "all"
if 'all' in to_change[cur_tvdb_id]:
all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status IN ("+','.join(['?']*len(status_list))+") AND season != 0 AND showid = ?", status_list + [cur_tvdb_id])
all_eps = [str(x["season"])+'x'+str(x["episode"]) for x in all_eps_results]
to_change[cur_tvdb_id] = all_eps
Home().setStatus(cur_tvdb_id, '|'.join(to_change[cur_tvdb_id]), newStatus, direct=True)
redirect('/manage/episodeStatuses')
@cherrypy.expose
def showSubtitleMissed(self, tvdb_id, whichSubs):
myDB = db.DBConnection()
cur_show_results = myDB.select("SELECT season, episode, name, subtitles FROM tv_episodes WHERE showid = ? AND season != 0 AND status LIKE '%4'", [int(tvdb_id)])
result = {}
for cur_result in cur_show_results:
if whichSubs == 'all':
if len(set(cur_result["subtitles"].split(',')).intersection(set(subtitles.wantedLanguages()))) >= len(subtitles.wantedLanguages()):
continue
elif whichSubs in cur_result["subtitles"].split(','):
continue
cur_season = int(cur_result["season"])
cur_episode = int(cur_result["episode"])
if cur_season not in result:
result[cur_season] = {}
if cur_episode not in result[cur_season]:
result[cur_season][cur_episode] = {}
result[cur_season][cur_episode]["name"] = cur_result["name"]
result[cur_season][cur_episode]["subtitles"] = ",".join(subliminal.language.Language(subtitle).alpha2 for subtitle in cur_result["subtitles"].split(',')) if not cur_result["subtitles"] == '' else ''
return json.dumps(result)
@cherrypy.expose
def subtitleMissed(self, whichSubs=None):
t = PageTemplate(file="manage_subtitleMissed.tmpl")
t.submenu = ManageMenu
t.whichSubs = whichSubs
if not whichSubs:
return _munge(t)
myDB = db.DBConnection()
status_results = myDB.select("SELECT show_name, tv_shows.tvdb_id as tvdb_id, tv_episodes.subtitles subtitles FROM tv_episodes, tv_shows WHERE tv_shows.subtitles = 1 AND tv_episodes.status LIKE '%4' AND tv_episodes.season != 0 AND tv_episodes.showid = tv_shows.tvdb_id ORDER BY show_name")
ep_counts = {}
show_names = {}
sorted_show_ids = []
for cur_status_result in status_results:
if whichSubs == 'all':
if len(set(cur_status_result["subtitles"].split(',')).intersection(set(subtitles.wantedLanguages()))) >= len(subtitles.wantedLanguages()):
continue
elif whichSubs in cur_status_result["subtitles"].split(','):
continue
cur_tvdb_id = int(cur_status_result["tvdb_id"])
if cur_tvdb_id not in ep_counts:
ep_counts[cur_tvdb_id] = 1
else:
ep_counts[cur_tvdb_id] += 1
show_names[cur_tvdb_id] = cur_status_result["show_name"]
if cur_tvdb_id not in sorted_show_ids:
sorted_show_ids.append(cur_tvdb_id)
t.show_names = show_names
t.ep_counts = ep_counts
t.sorted_show_ids = sorted_show_ids
return _munge(t)
@cherrypy.expose
def downloadSubtitleMissed(self, *args, **kwargs):
to_download = {}
# make a list of all shows and their associated args
for arg in kwargs:
tvdb_id, what = arg.split('-')
# we don't care about unchecked checkboxes
if kwargs[arg] != 'on':
continue
if tvdb_id not in to_download:
to_download[tvdb_id] = []
to_download[tvdb_id].append(what)
for cur_tvdb_id in to_download:
# get a list of all the eps we want to download subtitles if they just said "all"
if 'all' in to_download[cur_tvdb_id]:
myDB = db.DBConnection()
all_eps_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE status LIKE '%4' AND season != 0 AND showid = ?", [cur_tvdb_id])
to_download[cur_tvdb_id] = [str(x["season"])+'x'+str(x["episode"]) for x in all_eps_results]
for epResult in to_download[cur_tvdb_id]:
season, episode = epResult.split('x');
show = sickbeard.helpers.findCertainShow(sickbeard.showList, int(cur_tvdb_id))
subtitles = show.getEpisode(int(season), int(episode)).downloadSubtitles()
redirect('/manage/subtitleMissed')
@cherrypy.expose
def backlogShow(self, tvdb_id):
show_obj = helpers.findCertainShow(sickbeard.showList, int(tvdb_id))
if show_obj:
sickbeard.backlogSearchScheduler.action.searchBacklog([show_obj]) #@UndefinedVariable
redirect("/manage/backlogOverview")
@cherrypy.expose
def backlogOverview(self):
t = PageTemplate(file="manage_backlogOverview.tmpl")
t.submenu = ManageMenu
myDB = db.DBConnection()
showCounts = {}
showCats = {}
showSQLResults = {}
for curShow in sickbeard.showList:
epCounts = {}
epCats = {}
epCounts[Overview.SKIPPED] = 0
epCounts[Overview.WANTED] = 0
epCounts[Overview.QUAL] = 0
epCounts[Overview.GOOD] = 0
epCounts[Overview.UNAIRED] = 0
epCounts[Overview.SNATCHED] = 0
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC", [curShow.tvdbid])
for curResult in sqlResults:
curEpCat = curShow.getOverview(int(curResult["status"]))
epCats[str(curResult["season"]) + "x" + str(curResult["episode"])] = curEpCat
epCounts[curEpCat] += 1
showCounts[curShow.tvdbid] = epCounts
showCats[curShow.tvdbid] = epCats
showSQLResults[curShow.tvdbid] = sqlResults
t.showCounts = showCounts
t.showCats = showCats
t.showSQLResults = showSQLResults
return _munge(t)
@cherrypy.expose
def massEdit(self, toEdit=None):
t = PageTemplate(file="manage_massEdit.tmpl")
t.submenu = ManageMenu
if not toEdit:
redirect("/manage")
showIDs = toEdit.split("|")
showList = []
for curID in showIDs:
curID = int(curID)
showObj = helpers.findCertainShow(sickbeard.showList, curID)
if showObj:
showList.append(showObj)
flatten_folders_all_same = True
last_flatten_folders = None
paused_all_same = True
last_paused = None
frenched_all_same = True
last_frenched = None
quality_all_same = True
last_quality = None
subtitles_all_same = True
last_subtitles = None
lang_all_same = True
last_lang_metadata= None
lang_audio_all_same = True
last_lang_audio = None
root_dir_list = []
for curShow in showList:
cur_root_dir = ek.ek(os.path.dirname, curShow._location)
if cur_root_dir not in root_dir_list:
root_dir_list.append(cur_root_dir)
# if we know they're not all the same then no point even bothering
if paused_all_same:
# if we had a value already and this value is different then they're not all the same
if last_paused not in (curShow.paused, None):
paused_all_same = False
else:
last_paused = curShow.paused
if frenched_all_same:
# if we had a value already and this value is different then they're not all the same
if last_frenched not in (curShow.frenchsearch, None):
frenched_all_same = False
else:
last_frenched = curShow.frenchsearch
if flatten_folders_all_same:
if last_flatten_folders not in (None, curShow.flatten_folders):
flatten_folders_all_same = False
else:
last_flatten_folders = curShow.flatten_folders
if quality_all_same:
if last_quality not in (None, curShow.quality):
quality_all_same = False
else:
last_quality = curShow.quality
if subtitles_all_same:
if last_subtitles not in (None, curShow.subtitles):
subtitles_all_same = False
else:
last_subtitles = curShow.subtitles
if lang_all_same:
if last_lang_metadata not in (None, curShow.lang):
lang_all_same = False
else:
last_lang_metadata = curShow.lang
if lang_audio_all_same:
if last_lang_audio not in (None, curShow.audio_lang):
lang_audio_all_same = False
else:
last_lang_audio = curShow.audio_lang
t.showList = toEdit
t.paused_value = last_paused if paused_all_same else None
t.frenched_value = last_frenched if frenched_all_same else None
t.flatten_folders_value = last_flatten_folders if flatten_folders_all_same else None
t.quality_value = last_quality if quality_all_same else None
t.subtitles_value = last_subtitles if subtitles_all_same else None
t.root_dir_list = root_dir_list
t.lang_value = last_lang_metadata if lang_all_same else None
t.audio_value = last_lang_audio if lang_audio_all_same else None
return _munge(t)
@cherrypy.expose
def massEditSubmit(self, paused=None, frenched=None, flatten_folders=None, quality_preset=False, subtitles=None,
anyQualities=[], bestQualities=[], tvdbLang=None, audioLang = None, toEdit=None, *args, **kwargs):
dir_map = {}
for cur_arg in kwargs:
if not cur_arg.startswith('orig_root_dir_'):
continue
which_index = cur_arg.replace('orig_root_dir_', '')
end_dir = kwargs['new_root_dir_'+which_index]
dir_map[kwargs[cur_arg]] = end_dir
showIDs = toEdit.split("|")
errors = []
for curShow in showIDs:
curErrors = []
showObj = helpers.findCertainShow(sickbeard.showList, int(curShow))
if not showObj:
continue
cur_root_dir = ek.ek(os.path.dirname, showObj._location)
cur_show_dir = ek.ek(os.path.basename, showObj._location)
if cur_root_dir in dir_map and cur_root_dir != dir_map[cur_root_dir]:
new_show_dir = ek.ek(os.path.join, dir_map[cur_root_dir], cur_show_dir)
logger.log(u"For show "+showObj.name+" changing dir from "+showObj._location+" to "+new_show_dir)
else:
new_show_dir = showObj._location
if paused == 'keep':
new_paused = showObj.paused
else:
new_paused = True if paused == 'enable' else False
new_paused = 'on' if new_paused else 'off'
if frenched == 'keep':
new_frenched = showObj.frenchsearch
else:
new_frenched = True if frenched == 'enable' else False
new_frenched = 'on' if new_frenched else 'off'
if flatten_folders == 'keep':
new_flatten_folders = showObj.flatten_folders
else:
new_flatten_folders = True if flatten_folders == 'enable' else False
new_flatten_folders = 'on' if new_flatten_folders else 'off'
if subtitles == 'keep':
new_subtitles = showObj.subtitles
else:
new_subtitles = True if subtitles == 'enable' else False
new_subtitles = 'on' if new_subtitles else 'off'
if quality_preset == 'keep':
anyQualities, bestQualities = Quality.splitQuality(showObj.quality)
if tvdbLang == 'None':
new_lang = 'en'
else:
new_lang = tvdbLang
if audioLang == 'keep':
new_audio_lang = showObj.audio_lang;
else:
new_audio_lang = audioLang
exceptions_list = []
curErrors += Home().editShow(curShow, new_show_dir, anyQualities, bestQualities, exceptions_list, new_flatten_folders, new_paused, new_frenched, subtitles=new_subtitles, tvdbLang=new_lang, audio_lang=new_audio_lang, directCall=True)
if curErrors:
logger.log(u"Errors: "+str(curErrors), logger.ERROR)
errors.append('<b>%s:</b>\n<ul>' % showObj.name + ' '.join(['<li>%s</li>' % error for error in curErrors]) + "</ul>")
if len(errors) > 0:
ui.notifications.error('%d error%s while saving changes:' % (len(errors), "" if len(errors) == 1 else "s"),
" ".join(errors))
redirect("/manage")
@cherrypy.expose
def massUpdate(self, toUpdate=None, toRefresh=None, toRename=None, toDelete=None, toMetadata=None, toSubtitle=None):
if toUpdate != None:
toUpdate = toUpdate.split('|')
else:
toUpdate = []
if toRefresh != None:
toRefresh = toRefresh.split('|')
else:
toRefresh = []
if toRename != None:
toRename = toRename.split('|')
else:
toRename = []
if toSubtitle != None:
toSubtitle = toSubtitle.split('|')
else:
toSubtitle = []
if toDelete != None:
toDelete = toDelete.split('|')
else:
toDelete = []
if toMetadata != None:
toMetadata = toMetadata.split('|')
else:
toMetadata = []
errors = []
refreshes = []
updates = []
renames = []
subtitles = []
for curShowID in set(toUpdate+toRefresh+toRename+toSubtitle+toDelete+toMetadata):
if curShowID == '':
continue
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(curShowID))
if showObj == None:
continue
if curShowID in toDelete:
showObj.deleteShow()
# don't do anything else if it's being deleted
continue
if curShowID in toUpdate:
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable
updates.append(showObj.name)
except exceptions.CantUpdateException, e:
errors.append("Unable to update show "+showObj.name+": "+ex(e))
# don't bother refreshing shows that were updated anyway
if curShowID in toRefresh and curShowID not in toUpdate:
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
refreshes.append(showObj.name)
except exceptions.CantRefreshException, e:
errors.append("Unable to refresh show "+showObj.name+": "+ex(e))
if curShowID in toRename:
sickbeard.showQueueScheduler.action.renameShowEpisodes(showObj) #@UndefinedVariable
renames.append(showObj.name)
if curShowID in toSubtitle:
sickbeard.showQueueScheduler.action.downloadSubtitles(showObj) #@UndefinedVariable
subtitles.append(showObj.name)
if len(errors) > 0:
ui.notifications.error("Errors encountered",
'<br >\n'.join(errors))
messageDetail = ""
if len(updates) > 0:
messageDetail += "<br /><b>Updates</b><br /><ul><li>"
messageDetail += "</li><li>".join(updates)
messageDetail += "</li></ul>"
if len(refreshes) > 0:
messageDetail += "<br /><b>Refreshes</b><br /><ul><li>"
messageDetail += "</li><li>".join(refreshes)
messageDetail += "</li></ul>"
if len(renames) > 0:
messageDetail += "<br /><b>Renames</b><br /><ul><li>"
messageDetail += "</li><li>".join(renames)
messageDetail += "</li></ul>"
if len(subtitles) > 0:
messageDetail += "<br /><b>Subtitles</b><br /><ul><li>"
messageDetail += "</li><li>".join(subtitles)
messageDetail += "</li></ul>"
if len(updates+refreshes+renames+subtitles) > 0:
ui.notifications.message("The following actions were queued:",
messageDetail)
redirect("/manage")
class History:
@cherrypy.expose
def index(self, limit=100):
myDB = db.DBConnection()
if limit == "0":
sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.tvdb_id ORDER BY date DESC")
else:
sqlResults = myDB.select("SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.tvdb_id ORDER BY date DESC LIMIT ?", [limit])
t = PageTemplate(file="history.tmpl")
t.historyResults = sqlResults
t.limit = limit
t.submenu = [
{ 'title': 'Clear History', 'path': 'history/clearHistory' },
{ 'title': 'Trim History', 'path': 'history/trimHistory' },
{ 'title': 'Trunc Episode Links', 'path': 'history/truncEplinks' },
{ 'title': 'Trunc Episode List Processed', 'path': 'history/truncEpListProc' },
]
return _munge(t)
@cherrypy.expose
def clearHistory(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM history WHERE 1=1")
ui.notifications.message('History cleared')
redirect("/history")
@cherrypy.expose
def trimHistory(self):
myDB = db.DBConnection()
myDB.action("DELETE FROM history WHERE date < "+str((datetime.datetime.today()-datetime.timedelta(days=30)).strftime(history.dateFormat)))
ui.notifications.message('Removed history entries greater than 30 days old')
redirect("/history")
@cherrypy.expose
def truncEplinks(self):
myDB = db.DBConnection()
nbep=myDB.select("SELECT count(*) from episode_links")
myDB.action("DELETE FROM episode_links WHERE 1=1")
messnum = str(nbep[0][0]) + ' history links deleted'
ui.notifications.message('All Episode Links Removed', messnum)
redirect("/history")
@cherrypy.expose
def truncEpListProc(self):
myDB = db.DBConnection()
nbep=myDB.select("SELECT count(*) from processed_files")
myDB.action("DELETE FROM processed_files WHERE 1=1")
messnum = str(nbep[0][0]) + ' record for file processed delete'
ui.notifications.message('Clear list of file processed', messnum)
redirect("/history")
ConfigMenu = [
{ 'title': 'General', 'path': 'config/general/' },
{ 'title': 'Search Settings', 'path': 'config/search/' },
{ 'title': 'Search Providers', 'path': 'config/providers/' },
{ 'title': 'Subtitles Settings','path': 'config/subtitles/' },
{ 'title': 'Post Processing', 'path': 'config/postProcessing/' },
{ 'title': 'Notifications', 'path': 'config/notifications/' },
]
class ConfigGeneral:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_general.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveRootDirs(self, rootDirString=None):
sickbeard.ROOT_DIRS = rootDirString
sickbeard.save_config()
@cherrypy.expose
def saveAddShowDefaults(self, defaultFlattenFolders, defaultStatus, anyQualities, bestQualities, audio_lang, subtitles=None):
if anyQualities:
anyQualities = anyQualities.split(',')
else:
anyQualities = []
if bestQualities:
bestQualities = bestQualities.split(',')
else:
bestQualities = []
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
sickbeard.STATUS_DEFAULT = int(defaultStatus)
sickbeard.QUALITY_DEFAULT = int(newQuality)
sickbeard.AUDIO_SHOW_DEFAULT = str(audio_lang)
if defaultFlattenFolders == "true":
defaultFlattenFolders = 1
else:
defaultFlattenFolders = 0
sickbeard.FLATTEN_FOLDERS_DEFAULT = int(defaultFlattenFolders)
if subtitles == "true":
subtitles = 1
else:
subtitles = 0
sickbeard.SUBTITLES_DEFAULT = int(subtitles)
sickbeard.save_config()
@cherrypy.expose
def generateKey(self):
""" Return a new randomized API_KEY
"""
try:
from hashlib import md5
except ImportError:
from md5 import md5
# Create some values to seed md5
t = str(time.time())
r = str(random.random())
# Create the md5 instance and give it the current time
m = md5(t)
# Update the md5 instance with the random variable
m.update(r)
# Return a hex digest of the md5, eg 49f68a5c8493ec2c0bf489821c21fc3b
logger.log(u"New API generated")
return m.hexdigest()
@cherrypy.expose
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, web_ipv6=None,
update_shows_on_start=None,launch_browser=None, web_username=None, use_api=None, api_key=None,
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, sort_article=None, french_column=None):
results = []
if web_ipv6 == "on":
web_ipv6 = 1
else:
web_ipv6 = 0
if web_log == "on":
web_log = 1
else:
web_log = 0
if launch_browser == "on":
launch_browser = 1
else:
launch_browser = 0
if update_shows_on_start == "on":
update_shows_on_start = 1
else:
update_shows_on_start = 0
if sort_article == "on":
sort_article = 1
else:
sort_article = 0
if french_column == "on":
french_column = 1
else:
french_column= 0
if version_notify == "on":
version_notify = 1
else:
version_notify = 0
if not config.change_LOG_DIR(log_dir):
results += ["Unable to create directory " + os.path.normpath(log_dir) + ", log dir not changed."]
sickbeard.UPDATE_SHOWS_ON_START = update_shows_on_start
sickbeard.LAUNCH_BROWSER = launch_browser
sickbeard.SORT_ARTICLE = sort_article
sickbeard.FRENCH_COLUMN = french_column
sickbeard.WEB_PORT = int(web_port)
sickbeard.WEB_IPV6 = web_ipv6
sickbeard.WEB_LOG = web_log
sickbeard.WEB_USERNAME = web_username
sickbeard.WEB_PASSWORD = web_password
if use_api == "on":
use_api = 1
else:
use_api = 0
sickbeard.USE_API = use_api
sickbeard.API_KEY = api_key
if enable_https == "on":
enable_https = 1
else:
enable_https = 0
sickbeard.ENABLE_HTTPS = enable_https
if not config.change_HTTPS_CERT(https_cert):
results += ["Unable to create directory " + os.path.normpath(https_cert) + ", https cert dir not changed."]
if not config.change_HTTPS_KEY(https_key):
results += ["Unable to create directory " + os.path.normpath(https_key) + ", https key dir not changed."]
config.change_VERSION_NOTIFY(version_notify)
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/general/")
class ConfigSearch:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_search.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None,
sab_apikey=None, sab_category=None, sab_host=None, nzbget_password=None, nzbget_category=None, nzbget_host=None,
torrent_dir=None,torrent_method=None, nzb_method=None, usenet_retention=None, search_frequency=None, french_delay=None,
download_propers=None, download_french=None, torrent_username=None, torrent_password=None, torrent_host=None,
torrent_label=None, torrent_path=None, torrent_custom_url=None, torrent_ratio=None, torrent_paused=None, ignore_words=None,
prefered_method=None, torrent_use_ftp = None, ftp_host=None, ftp_port=None, ftp_timeout=None, ftp_passive = None, ftp_login=None,
ftp_password=None, ftp_remotedir=None):
results = []
if not config.change_NZB_DIR(nzb_dir):
results += ["Unable to create directory " + os.path.normpath(nzb_dir) + ", dir not changed."]
if not config.change_TORRENT_DIR(torrent_dir):
results += ["Unable to create directory " + os.path.normpath(torrent_dir) + ", dir not changed."]
config.change_SEARCH_FREQUENCY(search_frequency)
if download_propers == "on":
download_propers = 1
else:
download_propers = 0
if download_french == "on":
download_french = 1
else:
download_french = 0
if use_nzbs == "on":
use_nzbs = 1
else:
use_nzbs = 0
if use_torrents == "on":
use_torrents = 1
else:
use_torrents = 0
if usenet_retention == None:
usenet_retention = 200
if french_delay == None:
french_delay = 120
if ignore_words == None:
ignore_words = ""
if ftp_port == None:
ftp_port = 21
if ftp_timeout == None:
ftp_timeout = 120
sickbeard.USE_NZBS = use_nzbs
sickbeard.USE_TORRENTS = use_torrents
sickbeard.NZB_METHOD = nzb_method
sickbeard.PREFERED_METHOD = prefered_method
sickbeard.TORRENT_METHOD = torrent_method
sickbeard.USENET_RETENTION = int(usenet_retention)
sickbeard.FRENCH_DELAY = int(french_delay)
sickbeard.IGNORE_WORDS = ignore_words
sickbeard.DOWNLOAD_PROPERS = download_propers
sickbeard.DOWNLOAD_FRENCH = download_french
sickbeard.SAB_USERNAME = sab_username
sickbeard.SAB_PASSWORD = sab_password
sickbeard.SAB_APIKEY = sab_apikey.strip()
sickbeard.SAB_CATEGORY = sab_category
if sab_host and not re.match('https?://.*', sab_host):
sab_host = 'http://' + sab_host
if not sab_host.endswith('/'):
sab_host = sab_host + '/'
sickbeard.SAB_HOST = sab_host
sickbeard.NZBGET_PASSWORD = nzbget_password
sickbeard.NZBGET_CATEGORY = nzbget_category
sickbeard.NZBGET_HOST = nzbget_host
sickbeard.TORRENT_USERNAME = torrent_username
sickbeard.TORRENT_PASSWORD = torrent_password
sickbeard.TORRENT_LABEL = torrent_label
sickbeard.TORRENT_PATH = torrent_path
if torrent_custom_url == "on":
torrent_custom_url = 1
else:
torrent_custom_url = 0
sickbeard.TORRENT_CUSTOM_URL = torrent_custom_url
sickbeard.TORRENT_RATIO = torrent_ratio
if torrent_paused == "on":
torrent_paused = 1
else:
torrent_paused = 0
sickbeard.TORRENT_PAUSED = torrent_paused
if torrent_host and not re.match('https?://.*', torrent_host):
torrent_host = 'http://' + torrent_host
if not torrent_host.endswith('/'):
torrent_host = torrent_host + '/'
sickbeard.TORRENT_HOST = torrent_host
if torrent_use_ftp == "on":
torrent_use_ftp = 1
else:
torrent_use_ftp = 0
sickbeard.USE_TORRENT_FTP = torrent_use_ftp
sickbeard.FTP_HOST = ftp_host
sickbeard.FTP_PORT = ftp_port
sickbeard.FTP_TIMEOUT = ftp_timeout
if ftp_passive == "on":
ftp_passive = 1
else:
ftp_passive = 0
sickbeard.FTP_PASSIVE = ftp_passive
sickbeard.FTP_LOGIN = ftp_login
sickbeard.FTP_PASSWORD = ftp_password
sickbeard.FTP_DIR = ftp_remotedir
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/search/")
class ConfigPostProcessing:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_postProcessing.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def savePostProcessing(self, naming_pattern=None, naming_multi_ep=None,
xbmc_data=None, xbmc__frodo__data=None, mediabrowser_data=None, synology_data=None, sony_ps3_data=None, wdtv_data=None, tivo_data=None,
use_banner=None, keep_processed_dir=None, process_method=None, process_automatically=None, process_automatically_torrent=None, rename_episodes=None,
move_associated_files=None, tv_download_dir=None, torrent_download_dir=None, naming_custom_abd=None, naming_abd_pattern=None):
results = []
if not config.change_TV_DOWNLOAD_DIR(tv_download_dir):
results += ["Unable to create directory " + os.path.normpath(tv_download_dir) + ", dir not changed."]
if not config.change_TORRENT_DOWNLOAD_DIR(torrent_download_dir):
results += ["Unable to create directory " + os.path.normpath(torrent_download_dir) + ", dir not changed."]
if use_banner == "on":
use_banner = 1
else:
use_banner = 0
if process_automatically == "on":
process_automatically = 1
else:
process_automatically = 0
if process_automatically_torrent == "on":
process_automatically_torrent = 1
else:
process_automatically_torrent = 0
if rename_episodes == "on":
rename_episodes = 1
else:
rename_episodes = 0
if keep_processed_dir == "on":
keep_processed_dir = 1
else:
keep_processed_dir = 0
if move_associated_files == "on":
move_associated_files = 1
else:
move_associated_files = 0
if naming_custom_abd == "on":
naming_custom_abd = 1
else:
naming_custom_abd = 0
sickbeard.PROCESS_AUTOMATICALLY = process_automatically
sickbeard.PROCESS_AUTOMATICALLY_TORRENT = process_automatically_torrent
sickbeard.KEEP_PROCESSED_DIR = keep_processed_dir
sickbeard.PROCESS_METHOD = process_method
sickbeard.RENAME_EPISODES = rename_episodes
sickbeard.MOVE_ASSOCIATED_FILES = move_associated_files
sickbeard.NAMING_CUSTOM_ABD = naming_custom_abd
sickbeard.metadata_provider_dict['XBMC'].set_config(xbmc_data)
sickbeard.metadata_provider_dict['XBMC (Frodo)'].set_config(xbmc__frodo__data)
sickbeard.metadata_provider_dict['MediaBrowser'].set_config(mediabrowser_data)
sickbeard.metadata_provider_dict['Synology'].set_config(synology_data)
sickbeard.metadata_provider_dict['Sony PS3'].set_config(sony_ps3_data)
sickbeard.metadata_provider_dict['WDTV'].set_config(wdtv_data)
sickbeard.metadata_provider_dict['TIVO'].set_config(tivo_data)
if self.isNamingValid(naming_pattern, naming_multi_ep) != "invalid":
sickbeard.NAMING_PATTERN = naming_pattern
sickbeard.NAMING_MULTI_EP = int(naming_multi_ep)
sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
else:
results.append("You tried saving an invalid naming config, not saving your naming settings")
if self.isNamingValid(naming_abd_pattern, None, True) != "invalid":
sickbeard.NAMING_ABD_PATTERN = naming_abd_pattern
elif naming_custom_abd:
results.append("You tried saving an invalid air-by-date naming config, not saving your air-by-date settings")
sickbeard.USE_BANNER = use_banner
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/postProcessing/")
@cherrypy.expose
def testNaming(self, pattern=None, multi=None, abd=False):
if multi != None:
multi = int(multi)
result = naming.test_name(pattern, multi, abd)
result = ek.ek(os.path.join, result['dir'], result['name'])
return result
@cherrypy.expose
def isNamingValid(self, pattern=None, multi=None, abd=False):
if pattern == None:
return "invalid"
# air by date shows just need one check, we don't need to worry about season folders
if abd:
is_valid = naming.check_valid_abd_naming(pattern)
require_season_folders = False
else:
# check validity of single and multi ep cases for the whole path
is_valid = naming.check_valid_naming(pattern, multi)
# check validity of single and multi ep cases for only the file name
require_season_folders = naming.check_force_season_folders(pattern, multi)
if is_valid and not require_season_folders:
return "valid"
elif is_valid and require_season_folders:
return "seasonfolders"
else:
return "invalid"
class ConfigProviders:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_providers.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def canAddNewznabProvider(self, name):
if not name:
return json.dumps({'error': 'Invalid name specified'})
providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
tempProvider = newznab.NewznabProvider(name, '')
if tempProvider.getID() in providerDict:
return json.dumps({'error': 'Exists as '+providerDict[tempProvider.getID()].name})
else:
return json.dumps({'success': tempProvider.getID()})
@cherrypy.expose
def saveNewznabProvider(self, name, url, key=''):
if not name or not url:
return '0'
if not url.endswith('/'):
url = url + '/'
providerDict = dict(zip([x.name for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
if name in providerDict:
if not providerDict[name].default:
providerDict[name].name = name
providerDict[name].url = url
providerDict[name].key = key
return providerDict[name].getID() + '|' + providerDict[name].configStr()
else:
newProvider = newznab.NewznabProvider(name, url, key)
sickbeard.newznabProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.configStr()
@cherrypy.expose
def deleteNewznabProvider(self, id):
providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
if id not in providerDict or providerDict[id].default:
return '0'
# delete it from the list
sickbeard.newznabProviderList.remove(providerDict[id])
if id in sickbeard.PROVIDER_ORDER:
sickbeard.PROVIDER_ORDER.remove(id)
return '1'
@cherrypy.expose
def saveProviders(self, nzbmatrix_username=None, nzbmatrix_apikey=None,
nzbs_r_us_uid=None, nzbs_r_us_hash=None, newznab_string='',
omgwtfnzbs_uid=None, omgwtfnzbs_key=None,
tvtorrents_digest=None, tvtorrents_hash=None,
torrentleech_key=None,
btn_api_key=None,
newzbin_username=None, newzbin_password=None,t411_username=None,t411_password=None,ftdb_username=None,ftdb_password=None,addict_username=None,addict_password=None,fnt_username=None,fnt_password=None,libertalia_username=None,libertalia_password=None,xthor_username=None,xthor_password=None,thinkgeek_username=None,thinkgeek_password=None,
ethor_key=None,
provider_order=None):
results = []
provider_str_list = provider_order.split()
provider_list = []
newznabProviderDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
finishedNames = []
# add all the newznab info we got into our list
for curNewznabProviderStr in newznab_string.split('!!!'):
if not curNewznabProviderStr:
continue
curName, curURL, curKey = curNewznabProviderStr.split('|')
newProvider = newznab.NewznabProvider(curName, curURL, curKey)
curID = newProvider.getID()
# if it already exists then update it
if curID in newznabProviderDict:
newznabProviderDict[curID].name = curName
newznabProviderDict[curID].url = curURL
newznabProviderDict[curID].key = curKey
else:
sickbeard.newznabProviderList.append(newProvider)
finishedNames.append(curID)
# delete anything that is missing
for curProvider in sickbeard.newznabProviderList:
if curProvider.getID() not in finishedNames:
sickbeard.newznabProviderList.remove(curProvider)
# do the enable/disable
for curProviderStr in provider_str_list:
curProvider, curEnabled = curProviderStr.split(':')
curEnabled = int(curEnabled)
provider_list.append(curProvider)
if curProvider == 'nzbs_r_us':
sickbeard.NZBSRUS = curEnabled
elif curProvider == 'nzbs_org_old':
sickbeard.NZBS = curEnabled
elif curProvider == 'nzbmatrix':
sickbeard.NZBMATRIX = curEnabled
elif curProvider == 'newzbin':
sickbeard.NEWZBIN = curEnabled
elif curProvider == 'bin_req':
sickbeard.BINREQ = curEnabled
elif curProvider == 'womble_s_index':
sickbeard.WOMBLE = curEnabled
elif curProvider == 'nzbx':
sickbeard.NZBX = curEnabled
elif curProvider == 'omgwtfnzbs':
sickbeard.OMGWTFNZBS = curEnabled
elif curProvider == 'ezrss':
sickbeard.EZRSS = curEnabled
elif curProvider == 'tvtorrents':
sickbeard.TVTORRENTS = curEnabled
elif curProvider == 'torrentleech':
sickbeard.TORRENTLEECH = curEnabled
elif curProvider == 'btn':
sickbeard.BTN = curEnabled
elif curProvider == 'binnewz':
sickbeard.BINNEWZ = curEnabled
elif curProvider == 't411':
sickbeard.T411 = curEnabled
elif curProvider == 'ftdb':
sickbeard.FTDB = curEnabled
elif curProvider == 'addict':
sickbeard.ADDICT = curEnabled
elif curProvider == 'fnt':
sickbeard.FNT = curEnabled
elif curProvider == 'libertalia':
sickbeard.LIBERTALIA = curEnabled
elif curProvider == 'xthor':
sickbeard.XTHOR = curEnabled
elif curProvider == 'thinkgeek':
sickbeard.THINKGEEK = curEnabled
elif curProvider == 'cpasbien':
sickbeard.Cpasbien = curEnabled
elif curProvider == 'kat':
sickbeard.kat = curEnabled
elif curProvider == 'piratebay':
sickbeard.THEPIRATEBAY = curEnabled
elif curProvider == 'ethor':
sickbeard.ETHOR = curEnabled
elif curProvider in newznabProviderDict:
newznabProviderDict[curProvider].enabled = bool(curEnabled)
else:
logger.log(u"don't know what " + curProvider + " is, skipping")
sickbeard.TVTORRENTS_DIGEST = tvtorrents_digest.strip()
sickbeard.TVTORRENTS_HASH = tvtorrents_hash.strip()
sickbeard.TORRENTLEECH_KEY = torrentleech_key.strip()
sickbeard.ETHOR_KEY = ethor_key.strip()
sickbeard.BTN_API_KEY = btn_api_key.strip()
sickbeard.T411_USERNAME = t411_username
sickbeard.T411_PASSWORD = t411_password
sickbeard.FTDB_USERNAME = ftdb_username
sickbeard.FTDB_PASSWORD = ftdb_password
sickbeard.ADDICT_USERNAME = addict_username
sickbeard.ADDICT_PASSWORD = addict_password
sickbeard.FNT_USERNAME = fnt_username
sickbeard.FNT_PASSWORD = fnt_password
sickbeard.LIBERTALIA_USERNAME = libertalia_username
sickbeard.LIBERTALIA_PASSWORD = libertalia_password
sickbeard.XTHOR_USERNAME = xthor_username
sickbeard.XTHOR_PASSWORD = xthor_password
sickbeard.THINKGEEK_USERNAME = thinkgeek_username
sickbeard.THINKGEEK_PASSWORD = thinkgeek_password
sickbeard.NZBSRUS_UID = nzbs_r_us_uid.strip()
sickbeard.NZBSRUS_HASH = nzbs_r_us_hash.strip()
sickbeard.OMGWTFNZBS_UID = omgwtfnzbs_uid.strip()
sickbeard.OMGWTFNZBS_KEY = omgwtfnzbs_key.strip()
sickbeard.PROVIDER_ORDER = provider_list
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/providers/")
class ConfigNotifications:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_notifications.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveNotifications(self, use_xbmc=None, xbmc_notify_onsnatch=None, xbmc_notify_ondownload=None, xbmc_update_onlyfirst=None, xbmc_notify_onsubtitledownload=None,
xbmc_update_library=None, xbmc_update_full=None, xbmc_host=None, xbmc_username=None, xbmc_password=None,
use_plex=None, plex_notify_onsnatch=None, plex_notify_ondownload=None, plex_notify_onsubtitledownload=None, plex_update_library=None,
plex_server_host=None, plex_host=None, plex_username=None, plex_password=None,
use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None, growl_notify_onsubtitledownload=None, growl_host=None, growl_password=None,
use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None, prowl_notify_onsubtitledownload=None, prowl_api=None, prowl_priority=0,
use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None, twitter_notify_onsubtitledownload=None,
use_boxcar=None, boxcar_notify_onsnatch=None, boxcar_notify_ondownload=None, boxcar_notify_onsubtitledownload=None, boxcar_username=None,
use_boxcar2=None, boxcar2_notify_onsnatch=None, boxcar2_notify_ondownload=None, boxcar2_notify_onsubtitledownload=None, boxcar2_access_token=None, boxcar2_sound=None,
use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None, pushover_notify_onsubtitledownload=None, pushover_userkey=None, pushover_prio=None,
use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None, libnotify_notify_onsubtitledownload=None,
use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None,
use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None,
use_trakt=None, trakt_username=None, trakt_password=None, trakt_api=None,trakt_remove_watchlist=None,trakt_use_watchlist=None,trakt_start_paused=None,trakt_method_add=None,
use_betaseries=None, betaseries_username=None, betaseries_password=None,
use_synologynotifier=None, synologynotifier_notify_onsnatch=None, synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None,
use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None, pytivo_notify_onsubtitledownload=None, pytivo_update_library=None,
pytivo_host=None, pytivo_share_name=None, pytivo_tivo_name=None,
use_nma=None, nma_notify_onsnatch=None, nma_notify_ondownload=None, nma_notify_onsubtitledownload=None, nma_api=None, nma_priority=0,
use_pushalot=None, pushalot_notify_onsnatch=None, pushalot_notify_ondownload=None, pushalot_notify_onsubtitledownload=None, pushalot_authorizationtoken=None,
use_pushbullet=None, pushbullet_notify_onsnatch=None, pushbullet_notify_ondownload=None, pushbullet_notify_onsubtitledownload=None, pushbullet_api=None, pushbullet_device=None, pushbullet_device_list=None, pushbullet_channel_list=None,
use_mail=None, mail_username=None, mail_password=None, mail_server=None, mail_ssl=None, mail_from=None, mail_to=None, mail_notify_onsnatch=None ):
results = []
if xbmc_notify_onsnatch == "on":
xbmc_notify_onsnatch = 1
else:
xbmc_notify_onsnatch = 0
if xbmc_notify_ondownload == "on":
xbmc_notify_ondownload = 1
else:
xbmc_notify_ondownload = 0
if xbmc_notify_onsubtitledownload == "on":
xbmc_notify_onsubtitledownload = 1
else:
xbmc_notify_onsubtitledownload = 0
if xbmc_update_library == "on":
xbmc_update_library = 1
else:
xbmc_update_library = 0
if xbmc_update_full == "on":
xbmc_update_full = 1
else:
xbmc_update_full = 0
if xbmc_update_onlyfirst == "on":
xbmc_update_onlyfirst = 1
else:
xbmc_update_onlyfirst = 0
if use_xbmc == "on":
use_xbmc = 1
else:
use_xbmc = 0
if plex_update_library == "on":
plex_update_library = 1
else:
plex_update_library = 0
if plex_notify_onsnatch == "on":
plex_notify_onsnatch = 1
else:
plex_notify_onsnatch = 0
if plex_notify_ondownload == "on":
plex_notify_ondownload = 1
else:
plex_notify_ondownload = 0
if plex_notify_onsubtitledownload == "on":
plex_notify_onsubtitledownload = 1
else:
plex_notify_onsubtitledownload = 0
if use_plex == "on":
use_plex = 1
else:
use_plex = 0
if growl_notify_onsnatch == "on":
growl_notify_onsnatch = 1
else:
growl_notify_onsnatch = 0
if growl_notify_ondownload == "on":
growl_notify_ondownload = 1
else:
growl_notify_ondownload = 0
if growl_notify_onsubtitledownload == "on":
growl_notify_onsubtitledownload = 1
else:
growl_notify_onsubtitledownload = 0
if use_growl == "on":
use_growl = 1
else:
use_growl = 0
if prowl_notify_onsnatch == "on":
prowl_notify_onsnatch = 1
else:
prowl_notify_onsnatch = 0
if prowl_notify_ondownload == "on":
prowl_notify_ondownload = 1
else:
prowl_notify_ondownload = 0
if prowl_notify_onsubtitledownload == "on":
prowl_notify_onsubtitledownload = 1
else:
prowl_notify_onsubtitledownload = 0
if use_prowl == "on":
use_prowl = 1
else:
use_prowl = 0
if twitter_notify_onsnatch == "on":
twitter_notify_onsnatch = 1
else:
twitter_notify_onsnatch = 0
if twitter_notify_ondownload == "on":
twitter_notify_ondownload = 1
else:
twitter_notify_ondownload = 0
if twitter_notify_onsubtitledownload == "on":
twitter_notify_onsubtitledownload = 1
else:
twitter_notify_onsubtitledownload = 0
if use_twitter == "on":
use_twitter = 1
else:
use_twitter = 0
if boxcar_notify_onsnatch == "on":
boxcar_notify_onsnatch = 1
else:
boxcar_notify_onsnatch = 0
if boxcar_notify_ondownload == "on":
boxcar_notify_ondownload = 1
else:
boxcar_notify_ondownload = 0
if boxcar_notify_onsubtitledownload == "on":
boxcar_notify_onsubtitledownload = 1
else:
boxcar_notify_onsubtitledownload = 0
if use_boxcar == "on":
use_boxcar = 1
else:
use_boxcar = 0
if pushover_notify_onsnatch == "on":
pushover_notify_onsnatch = 1
else:
pushover_notify_onsnatch = 0
if pushover_notify_ondownload == "on":
pushover_notify_ondownload = 1
else:
pushover_notify_ondownload = 0
if pushover_notify_onsubtitledownload == "on":
pushover_notify_onsubtitledownload = 1
else:
pushover_notify_onsubtitledownload = 0
if use_pushover == "on":
use_pushover = 1
else:
use_pushover = 0
if use_nmj == "on":
use_nmj = 1
else:
use_nmj = 0
if use_synoindex == "on":
use_synoindex = 1
else:
use_synoindex = 0
if use_synologynotifier == "on":
use_synologynotifier = 1
else:
use_synologynotifier = 0
if synologynotifier_notify_onsnatch == "on":
synologynotifier_notify_onsnatch = 1
else:
synologynotifier_notify_onsnatch = 0
if synologynotifier_notify_ondownload == "on":
synologynotifier_notify_ondownload = 1
else:
synologynotifier_notify_ondownload = 0
if synologynotifier_notify_onsubtitledownload == "on":
synologynotifier_notify_onsubtitledownload = 1
else:
synologynotifier_notify_onsubtitledownload = 0
if use_nmjv2 == "on":
use_nmjv2 = 1
else:
use_nmjv2 = 0
if use_trakt == "on":
use_trakt = 1
else:
use_trakt = 0
if trakt_remove_watchlist == "on":
trakt_remove_watchlist = 1
else:
trakt_remove_watchlist = 0
if trakt_use_watchlist == "on":
trakt_use_watchlist = 1
else:
trakt_use_watchlist = 0
if trakt_start_paused == "on":
trakt_start_paused = 1
else:
trakt_start_paused = 0
if use_betaseries == "on":
use_betaseries = 1
else:
use_betaseries = 0
if use_pytivo == "on":
use_pytivo = 1
else:
use_pytivo = 0
if pytivo_notify_onsnatch == "on":
pytivo_notify_onsnatch = 1
else:
pytivo_notify_onsnatch = 0
if pytivo_notify_ondownload == "on":
pytivo_notify_ondownload = 1
else:
pytivo_notify_ondownload = 0
if pytivo_notify_onsubtitledownload == "on":
pytivo_notify_onsubtitledownload = 1
else:
pytivo_notify_onsubtitledownload = 0
if pytivo_update_library == "on":
pytivo_update_library = 1
else:
pytivo_update_library = 0
if use_nma == "on":
use_nma = 1
else:
use_nma = 0
if nma_notify_onsnatch == "on":
nma_notify_onsnatch = 1
else:
nma_notify_onsnatch = 0
if nma_notify_ondownload == "on":
nma_notify_ondownload = 1
else:
nma_notify_ondownload = 0
if nma_notify_onsubtitledownload == "on":
nma_notify_onsubtitledownload = 1
else:
nma_notify_onsubtitledownload = 0
if use_mail == "on":
use_mail = 1
else:
use_mail = 0
if mail_ssl == "on":
mail_ssl = 1
else:
mail_ssl = 0
if mail_notify_onsnatch == "on":
mail_notify_onsnatch = 1
else:
mail_notify_onsnatch = 0
if use_pushalot == "on":
use_pushalot = 1
else:
use_pushalot = 0
if pushalot_notify_onsnatch == "on":
pushalot_notify_onsnatch = 1
else:
pushalot_notify_onsnatch = 0
if pushalot_notify_ondownload == "on":
pushalot_notify_ondownload = 1
else:
pushalot_notify_ondownload = 0
if pushalot_notify_onsubtitledownload == "on":
pushalot_notify_onsubtitledownload = 1
else:
pushalot_notify_onsubtitledownload = 0
if use_pushbullet == "on":
use_pushbullet = 1
else:
use_pushbullet = 0
if pushbullet_notify_onsnatch == "on":
pushbullet_notify_onsnatch = 1
else:
pushbullet_notify_onsnatch = 0
if pushbullet_notify_ondownload == "on":
pushbullet_notify_ondownload = 1
else:
pushbullet_notify_ondownload = 0
if pushbullet_notify_onsubtitledownload == "on":
pushbullet_notify_onsubtitledownload = 1
else:
pushbullet_notify_onsubtitledownload = 0
if use_boxcar2=="on":
use_boxcar2=1
else:
use_boxcar2=0
if boxcar2_notify_onsnatch == "on":
boxcar2_notify_onsnatch = 1
else:
boxcar2_notify_onsnatch = 0
if boxcar2_notify_ondownload == "on":
boxcar2_notify_ondownload = 1
else:
boxcar2_notify_ondownload = 0
if boxcar2_notify_onsubtitledownload == "on":
boxcar2_notify_onsubtitledownload = 1
else:
boxcar2_notify_onsubtitledownload = 0
sickbeard.USE_XBMC = use_xbmc
sickbeard.XBMC_NOTIFY_ONSNATCH = xbmc_notify_onsnatch
sickbeard.XBMC_NOTIFY_ONDOWNLOAD = xbmc_notify_ondownload
sickbeard.XBMC_NOTIFY_ONSUBTITLEDOWNLOAD = xbmc_notify_onsubtitledownload
sickbeard.XBMC_UPDATE_LIBRARY = xbmc_update_library
sickbeard.XBMC_UPDATE_FULL = xbmc_update_full
sickbeard.XBMC_UPDATE_ONLYFIRST = xbmc_update_onlyfirst
sickbeard.XBMC_HOST = xbmc_host
sickbeard.XBMC_USERNAME = xbmc_username
sickbeard.XBMC_PASSWORD = xbmc_password
sickbeard.USE_PLEX = use_plex
sickbeard.PLEX_NOTIFY_ONSNATCH = plex_notify_onsnatch
sickbeard.PLEX_NOTIFY_ONDOWNLOAD = plex_notify_ondownload
sickbeard.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = plex_notify_onsubtitledownload
sickbeard.PLEX_UPDATE_LIBRARY = plex_update_library
sickbeard.PLEX_HOST = plex_host
sickbeard.PLEX_SERVER_HOST = plex_server_host
sickbeard.PLEX_USERNAME = plex_username
sickbeard.PLEX_PASSWORD = plex_password
sickbeard.USE_GROWL = use_growl
sickbeard.GROWL_NOTIFY_ONSNATCH = growl_notify_onsnatch
sickbeard.GROWL_NOTIFY_ONDOWNLOAD = growl_notify_ondownload
sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = growl_notify_onsubtitledownload
sickbeard.GROWL_HOST = growl_host
sickbeard.GROWL_PASSWORD = growl_password
sickbeard.USE_PROWL = use_prowl
sickbeard.PROWL_NOTIFY_ONSNATCH = prowl_notify_onsnatch
sickbeard.PROWL_NOTIFY_ONDOWNLOAD = prowl_notify_ondownload
sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = prowl_notify_onsubtitledownload
sickbeard.PROWL_API = prowl_api
sickbeard.PROWL_PRIORITY = prowl_priority
sickbeard.USE_TWITTER = use_twitter
sickbeard.TWITTER_NOTIFY_ONSNATCH = twitter_notify_onsnatch
sickbeard.TWITTER_NOTIFY_ONDOWNLOAD = twitter_notify_ondownload
sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = twitter_notify_onsubtitledownload
sickbeard.USE_BOXCAR = use_boxcar
sickbeard.BOXCAR_NOTIFY_ONSNATCH = boxcar_notify_onsnatch
sickbeard.BOXCAR_NOTIFY_ONDOWNLOAD = boxcar_notify_ondownload
sickbeard.BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD = boxcar_notify_onsubtitledownload
sickbeard.BOXCAR_USERNAME = boxcar_username
sickbeard.USE_BOXCAR2 = use_boxcar2
sickbeard.BOXCAR2_NOTIFY_ONSNATCH = boxcar2_notify_onsnatch
sickbeard.BOXCAR2_NOTIFY_ONDOWNLOAD = boxcar2_notify_ondownload
sickbeard.BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = boxcar2_notify_onsubtitledownload
sickbeard.BOXCAR2_ACCESS_TOKEN = boxcar2_access_token
sickbeard.BOXCAR2_SOUND = boxcar2_sound
sickbeard.USE_PUSHOVER = use_pushover
sickbeard.PUSHOVER_NOTIFY_ONSNATCH = pushover_notify_onsnatch
sickbeard.PUSHOVER_NOTIFY_ONDOWNLOAD = pushover_notify_ondownload
sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = pushover_notify_onsubtitledownload
sickbeard.PUSHOVER_USERKEY = pushover_userkey
sickbeard.PUSHOVER_PRIO = pushover_prio
sickbeard.USE_LIBNOTIFY = use_libnotify == "on"
sickbeard.LIBNOTIFY_NOTIFY_ONSNATCH = libnotify_notify_onsnatch == "on"
sickbeard.LIBNOTIFY_NOTIFY_ONDOWNLOAD = libnotify_notify_ondownload == "on"
sickbeard.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = libnotify_notify_onsubtitledownload == "on"
sickbeard.USE_NMJ = use_nmj
sickbeard.NMJ_HOST = nmj_host
sickbeard.NMJ_DATABASE = nmj_database
sickbeard.NMJ_MOUNT = nmj_mount
sickbeard.USE_SYNOINDEX = use_synoindex
sickbeard.USE_SYNOLOGYNOTIFIER = use_synologynotifier
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = synologynotifier_notify_onsnatch
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = synologynotifier_notify_ondownload
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = synologynotifier_notify_onsubtitledownload
sickbeard.USE_NMJv2 = use_nmjv2
sickbeard.NMJv2_HOST = nmjv2_host
sickbeard.NMJv2_DATABASE = nmjv2_database
sickbeard.NMJv2_DBLOC = nmjv2_dbloc
sickbeard.USE_TRAKT = use_trakt
sickbeard.TRAKT_USERNAME = trakt_username
sickbeard.TRAKT_PASSWORD = trakt_password
sickbeard.TRAKT_API = trakt_api
sickbeard.TRAKT_REMOVE_WATCHLIST = trakt_remove_watchlist
sickbeard.TRAKT_USE_WATCHLIST = trakt_use_watchlist
sickbeard.TRAKT_METHOD_ADD = trakt_method_add
sickbeard.TRAKT_START_PAUSED = trakt_start_paused
sickbeard.USE_BETASERIES = use_betaseries
sickbeard.BETASERIES_USERNAME = betaseries_username
sickbeard.BETASERIES_PASSWORD = betaseries_password
sickbeard.USE_PYTIVO = use_pytivo
sickbeard.PYTIVO_NOTIFY_ONSNATCH = pytivo_notify_onsnatch == "off"
sickbeard.PYTIVO_NOTIFY_ONDOWNLOAD = pytivo_notify_ondownload == "off"
sickbeard.PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = pytivo_notify_onsubtitledownload == "off"
sickbeard.PYTIVO_UPDATE_LIBRARY = pytivo_update_library
sickbeard.PYTIVO_HOST = pytivo_host
sickbeard.PYTIVO_SHARE_NAME = pytivo_share_name
sickbeard.PYTIVO_TIVO_NAME = pytivo_tivo_name
sickbeard.USE_NMA = use_nma
sickbeard.NMA_NOTIFY_ONSNATCH = nma_notify_onsnatch
sickbeard.NMA_NOTIFY_ONDOWNLOAD = nma_notify_ondownload
sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD = nma_notify_onsubtitledownload
sickbeard.NMA_API = nma_api
sickbeard.NMA_PRIORITY = nma_priority
sickbeard.USE_MAIL = use_mail
sickbeard.MAIL_USERNAME = mail_username
sickbeard.MAIL_PASSWORD = mail_password
sickbeard.MAIL_SERVER = mail_server
sickbeard.MAIL_SSL = mail_ssl
sickbeard.MAIL_FROM = mail_from
sickbeard.MAIL_TO = mail_to
sickbeard.MAIL_NOTIFY_ONSNATCH = mail_notify_onsnatch
sickbeard.USE_PUSHALOT = use_pushalot
sickbeard.PUSHALOT_NOTIFY_ONSNATCH = pushalot_notify_onsnatch
sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD = pushalot_notify_ondownload
sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = pushalot_notify_onsubtitledownload
sickbeard.PUSHALOT_AUTHORIZATIONTOKEN = pushalot_authorizationtoken
sickbeard.USE_PUSHBULLET = use_pushbullet
sickbeard.PUSHBULLET_NOTIFY_ONSNATCH = pushbullet_notify_onsnatch
sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD = pushbullet_notify_ondownload
sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = pushbullet_notify_onsubtitledownload
sickbeard.PUSHBULLET_API = pushbullet_api
sickbeard.PUSHBULLET_DEVICE = pushbullet_device_list
sickbeard.PUSHBULLET_CHANNEL = pushbullet_channel_list
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/notifications/")
class ConfigSubtitles:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config_subtitles.tmpl")
t.submenu = ConfigMenu
return _munge(t)
@cherrypy.expose
def saveSubtitles(self, use_subtitles=None, subsnewasold=None, subtitles_plugins=None, subtitles_languages=None, subtitles_dir=None, subtitles_dir_sub=None, subsnolang = None, service_order=None, subtitles_history=None, subtitles_clean_hi=None, subtitles_clean_team=None, subtitles_clean_music=None, subtitles_clean_punc=None):
results = []
if use_subtitles == "on":
use_subtitles = 1
if sickbeard.subtitlesFinderScheduler.thread == None or not sickbeard.subtitlesFinderScheduler.thread.isAlive():
sickbeard.subtitlesFinderScheduler.initThread()
else:
use_subtitles = 0
sickbeard.subtitlesFinderScheduler.abort = True
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
try:
sickbeard.subtitlesFinderScheduler.thread.join(5)
except:
pass
if subtitles_history == "on":
subtitles_history = 1
else:
subtitles_history = 0
if subtitles_dir_sub == "on":
subtitles_dir_sub = 1
else:
subtitles_dir_sub = 0
if subsnewasold == "on":
subsnewasold = 1
else:
subsnewasold = 0
if subsnolang == "on":
subsnolang = 1
else:
subsnolang = 0
sickbeard.USE_SUBTITLES = use_subtitles
sickbeard.SUBSNEWASOLD = subsnewasold
sickbeard.SUBTITLES_LANGUAGES = [lang.alpha2 for lang in subtitles.isValidLanguage(subtitles_languages.replace(' ', '').split(','))] if subtitles_languages != '' else ''
sickbeard.SUBTITLES_DIR = subtitles_dir
sickbeard.SUBTITLES_DIR_SUB = subtitles_dir_sub
sickbeard.SUBSNOLANG = subsnolang
sickbeard.SUBTITLES_HISTORY = subtitles_history
# Subtitles services
services_str_list = service_order.split()
subtitles_services_list = []
subtitles_services_enabled = []
for curServiceStr in services_str_list:
curService, curEnabled = curServiceStr.split(':')
subtitles_services_list.append(curService)
subtitles_services_enabled.append(int(curEnabled))
sickbeard.SUBTITLES_SERVICES_LIST = subtitles_services_list
sickbeard.SUBTITLES_SERVICES_ENABLED = subtitles_services_enabled
#Subtitles Cleansing
if subtitles_clean_hi == "on":
subtitles_clean_hi = 1
else:
subtitles_clean_hi = 0
if subtitles_clean_team == "on":
subtitles_clean_team = 1
else:
subtitles_clean_team = 0
if subtitles_clean_music == "on":
subtitles_clean_music = 1
else:
subtitles_clean_music = 0
if subtitles_clean_punc == "on":
subtitles_clean_punc = 1
else:
subtitles_clean_punc = 0
sickbeard.SUBTITLES_CLEAN_HI = subtitles_clean_hi
sickbeard.SUBTITLES_CLEAN_TEAM = subtitles_clean_team
sickbeard.SUBTITLES_CLEAN_MUSIC = subtitles_clean_music
sickbeard.SUBTITLES_CLEAN_PUNC = subtitles_clean_punc
sickbeard.save_config()
if len(results) > 0:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br />\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
redirect("/config/subtitles/")
class Config:
@cherrypy.expose
def index(self):
t = PageTemplate(file="config.tmpl")
t.submenu = ConfigMenu
return _munge(t)
general = ConfigGeneral()
search = ConfigSearch()
postProcessing = ConfigPostProcessing()
providers = ConfigProviders()
notifications = ConfigNotifications()
subtitles = ConfigSubtitles()
def haveXBMC():
return sickbeard.USE_XBMC and sickbeard.XBMC_UPDATE_LIBRARY
def havePLEX():
return sickbeard.USE_PLEX and sickbeard.PLEX_UPDATE_LIBRARY
def HomeMenu():
return [
{ 'title': 'Add Shows', 'path': 'home/addShows/', },
{ 'title': 'Manual Post-Processing', 'path': 'home/postprocess/' },
{ 'title': 'Update XBMC', 'path': 'home/updateXBMC/', 'requires': haveXBMC },
{ 'title': 'Update Plex', 'path': 'home/updatePLEX/', 'requires': havePLEX },
{ 'title': 'Update', 'path': 'manage/manageSearches/forceVersionCheck', 'confirm': True},
{ 'title': 'Restart', 'path': 'home/restart/?pid='+str(sickbeard.PID), 'confirm': True },
{ 'title': 'Shutdown', 'path': 'home/shutdown/?pid='+str(sickbeard.PID), 'confirm': True },
]
class HomePostProcess:
@cherrypy.expose
def index(self):
t = PageTemplate(file="home_postprocess.tmpl")
t.submenu = HomeMenu()
return _munge(t)
@cherrypy.expose
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None):
if not dir:
redirect("/home/postprocess")
else:
result = processTV.processDir(dir, nzbName)
if quiet != None and int(quiet) == 1:
return result
result = result.replace("\n","<br />\n")
return _genericMessage("Postprocessing results", result)
class NewHomeAddShows:
@cherrypy.expose
def index(self):
t = PageTemplate(file="home_addShows.tmpl")
t.submenu = HomeMenu()
return _munge(t)
@cherrypy.expose
def getTVDBLanguages(self):
result = tvdb_api.Tvdb().config['valid_languages']
# Make sure list is sorted alphabetically but 'fr' is in front
if 'fr' in result:
del result[result.index('fr')]
result.sort()
result.insert(0, 'fr')
return json.dumps({'results': result})
@cherrypy.expose
def sanitizeFileName(self, name):
return helpers.sanitizeFileName(name)
@cherrypy.expose
def searchTVDBForShowName(self, name, lang="fr"):
if not lang or lang == 'null':
lang = "fr"
baseURL = "http://thetvdb.com/api/GetSeries.php?"
nameUTF8 = name.encode('utf-8')
logger.log(u"Trying to find Show on thetvdb.com with: " + nameUTF8.decode('utf-8'), logger.DEBUG)
# Use each word in the show's name as a possible search term
keywords = nameUTF8.split(' ')
# Insert the whole show's name as the first search term so best results are first
# ex: keywords = ['Some Show Name', 'Some', 'Show', 'Name']
if len(keywords) > 1:
keywords.insert(0, nameUTF8)
# Query the TVDB for each search term and build the list of results
results = []
for searchTerm in keywords:
params = {'seriesname': searchTerm,
'language': lang}
finalURL = baseURL + urllib.urlencode(params)
logger.log(u"Searching for Show with searchterm: \'" + searchTerm.decode('utf-8') + u"\' on URL " + finalURL, logger.DEBUG)
urlData = helpers.getURL(finalURL)
if urlData is None:
# When urlData is None, trouble connecting to TVDB, don't try the rest of the keywords
logger.log(u"Unable to get URL: " + finalURL, logger.ERROR)
break
else:
try:
seriesXML = etree.ElementTree(etree.XML(urlData))
series = seriesXML.getiterator('Series')
except Exception, e:
# use finalURL in log, because urlData can be too much information
logger.log(u"Unable to parse XML for some reason: " + ex(e) + " from XML: " + finalURL, logger.ERROR)
series = ''
# add each result to our list
for curSeries in series:
tvdb_id = int(curSeries.findtext('seriesid'))
# don't add duplicates
if tvdb_id in [x[0] for x in results]:
continue
results.append((tvdb_id, curSeries.findtext('SeriesName'), curSeries.findtext('FirstAired')))
lang_id = tvdb_api.Tvdb().config['langabbv_to_id'][lang]
return json.dumps({'results': results, 'langid': lang_id})
@cherrypy.expose
def massAddTable(self, rootDir=None):
t = PageTemplate(file="home_massAddTable.tmpl")
t.submenu = HomeMenu()
myDB = db.DBConnection()
if not rootDir:
return "No folders selected."
elif type(rootDir) != list:
root_dirs = [rootDir]
else:
root_dirs = rootDir
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
default_index = int(sickbeard.ROOT_DIRS.split('|')[0])
if len(root_dirs) > default_index:
tmp = root_dirs[default_index]
if tmp in root_dirs:
root_dirs.remove(tmp)
root_dirs = [tmp]+root_dirs
dir_list = []
for root_dir in root_dirs:
try:
file_list = ek.ek(os.listdir, root_dir)
except:
continue
for cur_file in file_list:
cur_path = ek.ek(os.path.normpath, ek.ek(os.path.join, root_dir, cur_file))
if not ek.ek(os.path.isdir, cur_path):
continue
cur_dir = {
'dir': cur_path,
'display_dir': '<b>'+ek.ek(os.path.dirname, cur_path)+os.sep+'</b>'+ek.ek(os.path.basename, cur_path),
}
# see if the folder is in XBMC already
dirResults = myDB.select("SELECT * FROM tv_shows WHERE location = ?", [cur_path])
if dirResults:
cur_dir['added_already'] = True
else:
cur_dir['added_already'] = False
dir_list.append(cur_dir)
tvdb_id = ''
show_name = ''
for cur_provider in sickbeard.metadata_provider_dict.values():
(tvdb_id, show_name) = cur_provider.retrieveShowMetadata(cur_path)
if tvdb_id and show_name:
break
cur_dir['existing_info'] = (tvdb_id, show_name)
if tvdb_id and helpers.findCertainShow(sickbeard.showList, tvdb_id):
cur_dir['added_already'] = True
t.dirList = dir_list
return _munge(t)
@cherrypy.expose
def newShow(self, show_to_add=None, other_shows=None):
"""
Display the new show page which collects a tvdb id, folder, and extra options and
posts them to addNewShow
"""
t = PageTemplate(file="home_newShow.tmpl")
t.submenu = HomeMenu()
show_dir, tvdb_id, show_name = self.split_extra_show(show_to_add)
if tvdb_id and show_name:
use_provided_info = True
else:
use_provided_info = False
# tell the template whether we're giving it show name & TVDB ID
t.use_provided_info = use_provided_info
# use the given show_dir for the tvdb search if available
if not show_dir:
t.default_show_name = ''
elif not show_name:
t.default_show_name = ek.ek(os.path.basename, ek.ek(os.path.normpath, show_dir)).replace('.',' ')
else:
t.default_show_name = show_name
# carry a list of other dirs if given
if not other_shows:
other_shows = []
elif type(other_shows) != list:
other_shows = [other_shows]
if use_provided_info:
t.provided_tvdb_id = tvdb_id
t.provided_tvdb_name = show_name
t.provided_show_dir = show_dir
t.other_shows = other_shows
return _munge(t)
@cherrypy.expose
def addNewShow(self, whichSeries=None, tvdbLang="fr", rootDir=None, defaultStatus=None,
anyQualities=None, bestQualities=None, flatten_folders=None, subtitles=None, fullShowPath=None,
other_shows=None, skipShow=None, audio_lang=None):
"""
Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
provided then it forwards back to newShow, if not it goes to /home.
"""
# grab our list of other dirs if given
if not other_shows:
other_shows = []
elif type(other_shows) != list:
other_shows = [other_shows]
def finishAddShow():
# if there are no extra shows then go home
if not other_shows:
redirect('/home')
# peel off the next one
next_show_dir = other_shows[0]
rest_of_show_dirs = other_shows[1:]
# go to add the next show
return self.newShow(next_show_dir, rest_of_show_dirs)
# if we're skipping then behave accordingly
if skipShow:
return finishAddShow()
# sanity check on our inputs
if (not rootDir and not fullShowPath) or not whichSeries:
return "Missing params, no tvdb id or folder:"+repr(whichSeries)+" and "+repr(rootDir)+"/"+repr(fullShowPath)
# figure out what show we're adding and where
series_pieces = whichSeries.partition('|')
if len(series_pieces) < 3:
return "Error with show selection."
tvdb_id = int(series_pieces[0])
show_name = series_pieces[2]
# use the whole path if it's given, or else append the show name to the root dir to get the full show path
if fullShowPath:
show_dir = ek.ek(os.path.normpath, fullShowPath)
else:
show_dir = ek.ek(os.path.join, rootDir, helpers.sanitizeFileName(show_name))
# blanket policy - if the dir exists you should have used "add existing show" numbnuts
if ek.ek(os.path.isdir, show_dir) and not fullShowPath:
ui.notifications.error("Unable to add show", "Folder "+show_dir+" exists already")
redirect('/home/addShows/existingShows')
# don't create show dir if config says not to
if sickbeard.ADD_SHOWS_WO_DIR:
logger.log(u"Skipping initial creation of "+show_dir+" due to config.ini setting")
else:
dir_exists = helpers.makeDir(show_dir)
if not dir_exists:
logger.log(u"Unable to create the folder "+show_dir+", can't add the show", logger.ERROR)
ui.notifications.error("Unable to add show", "Unable to create the folder "+show_dir+", can't add the show")
redirect("/home")
else:
helpers.chmodAsParent(show_dir)
# prepare the inputs for passing along
if flatten_folders == "on":
flatten_folders = 1
else:
flatten_folders = 0
if subtitles == "on":
subtitles = 1
else:
subtitles = 0
if not anyQualities:
anyQualities = []
if not bestQualities:
bestQualities = []
if type(anyQualities) != list:
anyQualities = [anyQualities]
if type(bestQualities) != list:
bestQualities = [bestQualities]
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
# add the show
sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, int(defaultStatus), newQuality, flatten_folders, tvdbLang, subtitles, audio_lang) #@UndefinedVariable
ui.notifications.message('Show added', 'Adding the specified show into '+show_dir)
return finishAddShow()
@cherrypy.expose
def existingShows(self):
"""
Prints out the page to add existing shows from a root dir
"""
t = PageTemplate(file="home_addExistingShow.tmpl")
t.submenu = HomeMenu()
return _munge(t)
def split_extra_show(self, extra_show):
if not extra_show:
return (None, None, None)
split_vals = extra_show.split('|')
if len(split_vals) < 3:
return (extra_show, None, None)
show_dir = split_vals[0]
tvdb_id = split_vals[1]
show_name = '|'.join(split_vals[2:])
return (show_dir, tvdb_id, show_name)
@cherrypy.expose
def addExistingShows(self, shows_to_add=None, promptForSettings=None):
"""
Receives a dir list and add them. Adds the ones with given TVDB IDs first, then forwards
along to the newShow page.
"""
# grab a list of other shows to add, if provided
if not shows_to_add:
shows_to_add = []
elif type(shows_to_add) != list:
shows_to_add = [shows_to_add]
shows_to_add = [urllib.unquote_plus(x) for x in shows_to_add]
if promptForSettings == "on":
promptForSettings = 1
else:
promptForSettings = 0
tvdb_id_given = []
dirs_only = []
# separate all the ones with TVDB IDs
for cur_dir in shows_to_add:
if not '|' in cur_dir:
dirs_only.append(cur_dir)
else:
show_dir, tvdb_id, show_name = self.split_extra_show(cur_dir)
if not show_dir or not tvdb_id or not show_name:
continue
tvdb_id_given.append((show_dir, int(tvdb_id), show_name))
# if they want me to prompt for settings then I will just carry on to the newShow page
if promptForSettings and shows_to_add:
return self.newShow(shows_to_add[0], shows_to_add[1:])
# if they don't want me to prompt for settings then I can just add all the nfo shows now
num_added = 0
for cur_show in tvdb_id_given:
show_dir, tvdb_id, show_name = cur_show
# add the show
sickbeard.showQueueScheduler.action.addShow(tvdb_id, show_dir, int(sickbeard.STATUS_DEFAULT), sickbeard.QUALITY_DEFAULT, sickbeard.FLATTEN_FOLDERS_DEFAULT,"fr", sickbeard.SUBTITLES_DEFAULT, sickbeard.AUDIO_SHOW_DEFAULT) #@UndefinedVariable
num_added += 1
if num_added:
ui.notifications.message("Shows Added", "Automatically added "+str(num_added)+" from their existing metadata files")
# if we're done then go home
if not dirs_only:
redirect('/home')
# for the remaining shows we need to prompt for each one, so forward this on to the newShow page
return self.newShow(dirs_only[0], dirs_only[1:])
ErrorLogsMenu = [
{ 'title': 'Clear Errors', 'path': 'errorlogs/clearerrors' },
#{ 'title': 'View Log', 'path': 'errorlogs/viewlog' },
]
class ErrorLogs:
@cherrypy.expose
def index(self):
t = PageTemplate(file="errorlogs.tmpl")
t.submenu = ErrorLogsMenu
return _munge(t)
@cherrypy.expose
def clearerrors(self):
classes.ErrorViewer.clear()
redirect("/errorlogs")
@cherrypy.expose
def viewlog(self, minLevel=logger.MESSAGE, maxLines=500):
t = PageTemplate(file="viewlogs.tmpl")
t.submenu = ErrorLogsMenu
minLevel = int(minLevel)
data = []
if os.path.isfile(logger.sb_log_instance.log_file):
f = open(logger.sb_log_instance.log_file)
data = f.readlines()
f.close()
regex = "^(\w+).?\-(\d\d)\s+(\d\d)\:(\d\d):(\d\d)\s+([A-Z]+)\s+(.*)$"
finalData = []
numLines = 0
lastLine = False
numToShow = min(maxLines, len(data))
for x in reversed(data):
x = x.decode('utf-8')
match = re.match(regex, x)
if match:
level = match.group(6)
if level not in logger.reverseNames:
lastLine = False
continue
if logger.reverseNames[level] >= minLevel:
lastLine = True
finalData.append(x)
else:
lastLine = False
continue
elif lastLine:
finalData.append("AA"+x)
numLines += 1
if numLines >= numToShow:
break
result = "".join(finalData)
t.logLines = result
t.minLevel = minLevel
return _munge(t)
class Home:
@cherrypy.expose
def is_alive(self, *args, **kwargs):
if 'callback' in kwargs and '_' in kwargs:
callback, _ = kwargs['callback'], kwargs['_']
else:
return "Error: Unsupported Request. Send jsonp request with 'callback' variable in the query stiring."
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
cherrypy.response.headers['Content-Type'] = 'text/javascript'
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
cherrypy.response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
if sickbeard.started:
return callback+'('+json.dumps({"msg": str(sickbeard.PID)})+');'
else:
return callback+'('+json.dumps({"msg": "nope"})+');'
@cherrypy.expose
def index(self):
t = PageTemplate(file="home.tmpl")
t.submenu = HomeMenu()
return _munge(t)
addShows = NewHomeAddShows()
postprocess = HomePostProcess()
@cherrypy.expose
def testSABnzbd(self, host=None, username=None, password=None, apikey=None):
if not host.endswith("/"):
host = host + "/"
connection, accesMsg = sab.getSabAccesMethod(host, username, password, apikey)
if connection:
authed, authMsg = sab.testAuthentication(host, username, password, apikey) #@UnusedVariable
if authed:
return "Success. Connected and authenticated"
else:
return "Authentication failed. SABnzbd expects '"+accesMsg+"' as authentication method"
else:
return "Unable to connect to host"
@cherrypy.expose
def testTorrent(self, torrent_method=None, host=None, username=None, password=None):
if not host.endswith("/"):
host = host + "/"
client = clients.getClientIstance(torrent_method)
connection, accesMsg = client(host, username, password).testAuthentication()
return accesMsg
@cherrypy.expose
def testGrowl(self, host=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.growl_notifier.test_notify(host, password)
if password==None or password=='':
pw_append = ''
else:
pw_append = " with password: " + password
if result:
return "Registered and Tested growl successfully "+urllib.unquote_plus(host)+pw_append
else:
return "Registration and Testing of growl failed "+urllib.unquote_plus(host)+pw_append
@cherrypy.expose
def testProwl(self, prowl_api=None, prowl_priority=0):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.prowl_notifier.test_notify(prowl_api, prowl_priority)
if result:
return "Test prowl notice sent successfully"
else:
return "Test prowl notice failed"
@cherrypy.expose
def testBoxcar(self, username=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.boxcar_notifier.test_notify(username)
if result:
return "Boxcar notification succeeded. Check your Boxcar clients to make sure it worked"
else:
return "Error sending Boxcar notification"
@cherrypy.expose
def testBoxcar2(self, accessToken=None, sound=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.boxcar2_notifier.test_notify(accessToken, sound)
if result:
return "Boxcar2 notification succeeded. Check your Boxcar2 clients to make sure it worked"
else:
return "Error sending Boxcar2 notification"
@cherrypy.expose
def testPushover(self, userKey=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushover_notifier.test_notify(userKey)
if result:
return "Pushover notification succeeded. Check your Pushover clients to make sure it worked"
else:
return "Error sending Pushover notification"
@cherrypy.expose
def twitterStep1(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
return notifiers.twitter_notifier._get_authorization()
@cherrypy.expose
def twitterStep2(self, key):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.twitter_notifier._get_credentials(key)
logger.log(u"result: "+str(result))
if result:
return "Key verification successful"
else:
return "Unable to verify key"
@cherrypy.expose
def testTwitter(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.twitter_notifier.test_notify()
if result:
return "Tweet successful, check your twitter to make sure it worked"
else:
return "Error sending tweet"
@cherrypy.expose
def testXBMC(self, host=None, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
finalResult = ''
for curHost in [x.strip() for x in host.split(",")]:
curResult = notifiers.xbmc_notifier.test_notify(urllib.unquote_plus(curHost), username, password)
if len(curResult.split(":")) > 2 and 'OK' in curResult.split(":")[2]:
finalResult += "Test XBMC notice sent successfully to " + urllib.unquote_plus(curHost)
else:
finalResult += "Test XBMC notice failed to " + urllib.unquote_plus(curHost)
finalResult += "<br />\n"
return finalResult
@cherrypy.expose
def testPLEX(self, host=None, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
finalResult = ''
for curHost in [x.strip() for x in host.split(",")]:
curResult = notifiers.plex_notifier.test_notify(urllib.unquote_plus(curHost), username, password)
if len(curResult.split(":")) > 2 and 'OK' in curResult.split(":")[2]:
finalResult += "Test Plex notice sent successfully to " + urllib.unquote_plus(curHost)
else:
finalResult += "Test Plex notice failed to " + urllib.unquote_plus(curHost)
finalResult += "<br />\n"
return finalResult
@cherrypy.expose
def testLibnotify(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
if notifiers.libnotify_notifier.test_notify():
return "Tried sending desktop notification via libnotify"
else:
return notifiers.libnotify.diagnose()
@cherrypy.expose
def testNMJ(self, host=None, database=None, mount=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmj_notifier.test_notify(urllib.unquote_plus(host), database, mount)
if result:
return "Successfull started the scan update"
else:
return "Test failed to start the scan update"
@cherrypy.expose
def settingsNMJ(self, host=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmj_notifier.notify_settings(urllib.unquote_plus(host))
if result:
return '{"message": "Got settings from %(host)s", "database": "%(database)s", "mount": "%(mount)s"}' % {"host": host, "database": sickbeard.NMJ_DATABASE, "mount": sickbeard.NMJ_MOUNT}
else:
return '{"message": "Failed! Make sure your Popcorn is on and NMJ is running. (see Log & Errors -> Debug for detailed info)", "database": "", "mount": ""}'
@cherrypy.expose
def testNMJv2(self, host=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmjv2_notifier.test_notify(urllib.unquote_plus(host))
if result:
return "Test notice sent successfully to " + urllib.unquote_plus(host)
else:
return "Test notice failed to " + urllib.unquote_plus(host)
@cherrypy.expose
def settingsNMJv2(self, host=None, dbloc=None, instance=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nmjv2_notifier.notify_settings(urllib.unquote_plus(host), dbloc, instance)
if result:
return '{"message": "NMJ Database found at: %(host)s", "database": "%(database)s"}' % {"host": host, "database": sickbeard.NMJv2_DATABASE}
else:
return '{"message": "Unable to find NMJ Database at location: %(dbloc)s. Is the right location selected and PCH running?", "database": ""}' % {"dbloc": dbloc}
@cherrypy.expose
def testTrakt(self, api=None, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.trakt_notifier.test_notify(api, username, password)
if result:
return "Test notice sent successfully to Trakt"
else:
return "Test notice failed to Trakt"
@cherrypy.expose
def testBetaSeries(self, username=None, password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.betaseries_notifier.test_notify(username, password)
if result:
return "Test notice sent successfully to BetaSeries"
else:
return "Test notice failed to BetaSeries"
@cherrypy.expose
def testMail(self, mail_from=None, mail_to=None, mail_server=None, mail_ssl=None, mail_user=None, mail_password=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.mail_notifier.test_notify(mail_from, mail_to, mail_server, mail_ssl, mail_user, mail_password)
if result:
return "Mail sent"
else:
return "Can't sent mail."
@cherrypy.expose
def testNMA(self, nma_api=None, nma_priority=0):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.nma_notifier.test_notify(nma_api, nma_priority)
if result:
return "Test NMA notice sent successfully"
else:
return "Test NMA notice failed"
@cherrypy.expose
def testPushalot(self, authorizationToken=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushalot_notifier.test_notify(authorizationToken)
if result:
return "Pushalot notification succeeded. Check your Pushalot clients to make sure it worked"
else:
return "Error sending Pushalot notification"
@cherrypy.expose
def testPushbullet(self, api=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushbullet_notifier.test_notify(api)
if result:
return "Pushbullet notification succeeded. Check your device to make sure it worked"
else:
return "Error sending Pushbullet notification"
@cherrypy.expose
def getPushbulletDevices(self, api=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushbullet_notifier.get_devices(api)
if result:
return result
else:
return "Error sending Pushbullet notification"
@cherrypy.expose
#get channels
def getPushbulletChannels(self, api=None):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
result = notifiers.pushbullet_notifier.get_channels(api)
if result:
return result
else:
return "Error sending Pushbullet notification"
@cherrypy.expose
def shutdown(self, pid=None):
if str(pid) != str(sickbeard.PID):
redirect("/home")
threading.Timer(2, sickbeard.invoke_shutdown).start()
title = "Shutting down"
message = "Sick Beard is shutting down..."
return _genericMessage(title, message)
@cherrypy.expose
def restart(self, pid=None):
if str(pid) != str(sickbeard.PID):
redirect("/home")
t = PageTemplate(file="restart.tmpl")
t.submenu = HomeMenu()
# do a soft restart
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
return _munge(t)
@cherrypy.expose
def update(self, pid=None):
if str(pid) != str(sickbeard.PID):
redirect("/home")
updated = sickbeard.versionCheckScheduler.action.update() #@UndefinedVariable
if updated:
# do a hard restart
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
t = PageTemplate(file="restart_bare.tmpl")
return _munge(t)
else:
return _genericMessage("Update Failed","Update wasn't successful, not restarting. Check your log for more information.")
@cherrypy.expose
def displayShow(self, show=None):
if show == None:
return _genericMessage("Error", "Invalid show ID")
else:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Show not in show list")
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.tvdbid)
myDB = db.DBConnection()
seasonResults = myDB.select(
"SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season desc",
[showObj.tvdbid]
)
sqlResults = myDB.select(
"SELECT * FROM tv_episodes WHERE showid = ? ORDER BY season DESC, episode DESC",
[showObj.tvdbid]
)
t = PageTemplate(file="displayShow.tmpl")
t.submenu = [ { 'title': 'Edit', 'path': 'home/editShow?show=%d'%showObj.tvdbid } ]
try:
t.showLoc = (showObj.location, True)
except sickbeard.exceptions.ShowDirNotFoundException:
t.showLoc = (showObj._location, False)
show_message = ''
if sickbeard.showQueueScheduler.action.isBeingAdded(showObj): #@UndefinedVariable
show_message = 'This show is in the process of being downloaded from theTVDB.com - the info below is incomplete.'
elif sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable
show_message = 'The information below is in the process of being updated.'
elif sickbeard.showQueueScheduler.action.isBeingRefreshed(showObj): #@UndefinedVariable
show_message = 'The episodes below are currently being refreshed from disk'
elif sickbeard.showQueueScheduler.action.isBeingSubtitled(showObj): #@UndefinedVariable
show_message = 'Currently downloading subtitles for this show'
elif sickbeard.showQueueScheduler.action.isBeingCleanedSubtitle(showObj): #@UndefinedVariable
show_message = 'Currently cleaning subtitles for this show'
elif sickbeard.showQueueScheduler.action.isInRefreshQueue(showObj): #@UndefinedVariable
show_message = 'This show is queued to be refreshed.'
elif sickbeard.showQueueScheduler.action.isInUpdateQueue(showObj): #@UndefinedVariable
show_message = 'This show is queued and awaiting an update.'
elif sickbeard.showQueueScheduler.action.isInSubtitleQueue(showObj): #@UndefinedVariable
show_message = 'This show is queued and awaiting subtitles download.'
if not sickbeard.showQueueScheduler.action.isBeingAdded(showObj): #@UndefinedVariable
if not sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable
t.submenu.append({ 'title': 'Delete', 'path': 'home/deleteShow?show=%d'%showObj.tvdbid, 'confirm': True })
t.submenu.append({ 'title': 'Re-scan files', 'path': 'home/refreshShow?show=%d'%showObj.tvdbid })
t.submenu.append({ 'title': 'Force Full Update', 'path': 'home/updateShow?show=%d&force=1'%showObj.tvdbid })
t.submenu.append({ 'title': 'Update show in XBMC', 'path': 'home/updateXBMC?showName=%s'%urllib.quote_plus(showObj.name.encode('utf-8')), 'requires': haveXBMC })
t.submenu.append({ 'title': 'Preview Rename', 'path': 'home/testRename?show=%d'%showObj.tvdbid })
t.submenu.append({ 'title': 'French Search', 'path': 'home/frenchSearch?show=%d'%showObj.tvdbid })
if sickbeard.USE_SUBTITLES and not sickbeard.showQueueScheduler.action.isBeingSubtitled(showObj) and not sickbeard.showQueueScheduler.action.isBeingCleanedSubtitle(showObj) and showObj.subtitles:
t.submenu.append({ 'title': 'Download Subtitles', 'path': 'home/subtitleShow?show=%d'%showObj.tvdbid })
t.submenu.append({ 'title': 'Clean Subtitles', 'path': 'home/subtitleShowClean?show=%d'%showObj.tvdbid })
t.show = showObj
t.sqlResults = sqlResults
t.seasonResults = seasonResults
t.show_message = show_message
epCounts = {}
epCats = {}
epCounts[Overview.SKIPPED] = 0
epCounts[Overview.WANTED] = 0
epCounts[Overview.QUAL] = 0
epCounts[Overview.GOOD] = 0
epCounts[Overview.UNAIRED] = 0
epCounts[Overview.SNATCHED] = 0
showSceneNumberColum = False
for curResult in sqlResults:
if not showSceneNumberColum and (isinstance(curResult["scene_season"], int) and isinstance(curResult["scene_episode"], int)):
showSceneNumberColum = True
curEpCat = showObj.getOverview(int(curResult["status"]))
epCats[str(curResult["season"])+"x"+str(curResult["episode"])] = curEpCat
epCounts[curEpCat] += 1
t.showSceneNumberColum = showSceneNumberColum
def titler(x):
if not x:
return x
if x.lower().startswith('a '):
x = x[2:]
elif x.lower().startswith('the '):
x = x[4:]
return x
t.sortedShowList = sorted(sickbeard.showList, lambda x, y: cmp(titler(x.name), titler(y.name)))
t.epCounts = epCounts
t.epCats = epCats
return _munge(t)
@cherrypy.expose
def plotDetails(self, show, season, episode):
result = db.DBConnection().action("SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", (show, season, episode)).fetchone()
return result['description'] if result else 'Episode not found.'
@cherrypy.expose
def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[], flatten_folders=None, paused=None, frenchsearch=None, directCall=False, air_by_date=None, tvdbLang=None, audio_lang=None, subtitles=None):
if show == None:
errString = "Invalid show ID: "+str(show)
if directCall:
return [errString]
else:
return _genericMessage("Error", errString)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
errString = "Unable to find the specified show: "+str(show)
if directCall:
return [errString]
else:
return _genericMessage("Error", errString)
showObj.exceptions = scene_exceptions.get_scene_exceptions(showObj.tvdbid)
if not location and not anyQualities and not bestQualities and not flatten_folders:
t = PageTemplate(file="editShow.tmpl")
t.submenu = HomeMenu()
with showObj.lock:
t.show = showObj
return _munge(t)
if flatten_folders == "on":
flatten_folders = 1
else:
flatten_folders = 0
logger.log(u"flatten folders: "+str(flatten_folders))
if paused == "on":
paused = 1
else:
paused = 0
if frenchsearch == "on":
frenchsearch = 1
else:
frenchsearch = 0
if air_by_date == "on":
air_by_date = 1
else:
air_by_date = 0
if subtitles == "on":
subtitles = 1
else:
subtitles = 0
if tvdbLang and tvdbLang in tvdb_api.Tvdb().config['valid_languages']:
tvdb_lang = tvdbLang
else:
tvdb_lang = showObj.lang
# if we changed the language then kick off an update
if tvdb_lang == showObj.lang:
do_update = False
else:
do_update = True
if type(anyQualities) != list:
anyQualities = [anyQualities]
if type(bestQualities) != list:
bestQualities = [bestQualities]
if type(exceptions_list) != list:
exceptions_list = [exceptions_list]
#If directCall from mass_edit_update no scene exceptions handling
if directCall:
do_update_exceptions = False
else:
if set(exceptions_list) == set(showObj.exceptions):
do_update_exceptions = False
else:
do_update_exceptions = True
errors = []
with showObj.lock:
newQuality = Quality.combineQualities(map(int, anyQualities), map(int, bestQualities))
showObj.quality = newQuality
# reversed for now
if bool(showObj.flatten_folders) != bool(flatten_folders):
showObj.flatten_folders = flatten_folders
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
except exceptions.CantRefreshException, e:
errors.append("Unable to refresh this show: "+ex(e))
showObj.paused = paused
showObj.air_by_date = air_by_date
showObj.subtitles = subtitles
showObj.frenchsearch = frenchsearch
showObj.lang = tvdb_lang
showObj.audio_lang = audio_lang
# if we change location clear the db of episodes, change it, write to db, and rescan
if os.path.normpath(showObj._location) != os.path.normpath(location):
logger.log(os.path.normpath(showObj._location)+" != "+os.path.normpath(location), logger.DEBUG)
if not ek.ek(os.path.isdir, location):
errors.append("New location <tt>%s</tt> does not exist" % location)
# don't bother if we're going to update anyway
elif not do_update:
# change it
try:
showObj.location = location
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
except exceptions.CantRefreshException, e:
errors.append("Unable to refresh this show:"+ex(e))
# grab updated info from TVDB
#showObj.loadEpisodesFromTVDB()
# rescan the episodes in the new folder
except exceptions.NoNFOException:
errors.append("The folder at <tt>%s</tt> doesn't contain a tvshow.nfo - copy your files to that folder before you change the directory in Sick Beard." % location)
# save it to the DB
showObj.saveToDB()
# force the update
if do_update:
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, True) #@UndefinedVariable
time.sleep(1)
except exceptions.CantUpdateException, e:
errors.append("Unable to force an update on the show.")
if do_update_exceptions:
try:
scene_exceptions.update_scene_exceptions(showObj.tvdbid, exceptions_list) #@UndefinedVariable
time.sleep(1)
except exceptions.CantUpdateException, e:
errors.append("Unable to force an update on scene exceptions of the show.")
if directCall:
return errors
if len(errors) > 0:
ui.notifications.error('%d error%s while saving changes:' % (len(errors), "" if len(errors) == 1 else "s"),
'<ul>' + '\n'.join(['<li>%s</li>' % error for error in errors]) + "</ul>")
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def deleteShow(self, show=None):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
if sickbeard.showQueueScheduler.action.isBeingAdded(showObj) or sickbeard.showQueueScheduler.action.isBeingUpdated(showObj): #@UndefinedVariable
return _genericMessage("Error", "Shows can't be deleted while they're being added or updated.")
showObj.deleteShow()
ui.notifications.message('<b>%s</b> has been deleted' % showObj.name)
redirect("/home")
@cherrypy.expose
def refreshShow(self, show=None):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# force the update from the DB
try:
sickbeard.showQueueScheduler.action.refreshShow(showObj) #@UndefinedVariable
except exceptions.CantRefreshException, e:
ui.notifications.error("Unable to refresh this show.",
ex(e))
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def updateShow(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# force the update
try:
sickbeard.showQueueScheduler.action.updateShow(showObj, bool(force)) #@UndefinedVariable
except exceptions.CantUpdateException, e:
ui.notifications.error("Unable to update this show.",
ex(e))
# just give it some time
time.sleep(3)
redirect("/home/displayShow?show=" + str(showObj.tvdbid))
@cherrypy.expose
def subtitleShow(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# search and download subtitles
sickbeard.showQueueScheduler.action.downloadSubtitles(showObj, bool(force)) #@UndefinedVariable
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def subtitleShowClean(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# search and download subtitles
sickbeard.showQueueScheduler.action.cleanSubtitles(showObj, bool(force)) #@UndefinedVariable
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def frenchSearch(self, show=None, force=0):
if show == None:
return _genericMessage("Error", "Invalid show ID")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Unable to find the specified show")
# search and download subtitles
sickbeard.showQueueScheduler.action.searchFrench(showObj, bool(force)) #@UndefinedVariable
time.sleep(3)
redirect("/home/displayShow?show="+str(showObj.tvdbid))
@cherrypy.expose
def updateXBMC(self, showName=None):
if sickbeard.XBMC_UPDATE_ONLYFIRST:
# only send update to first host in the list -- workaround for xbmc sql backend users
host = sickbeard.XBMC_HOST.split(",")[0].strip()
else:
host = sickbeard.XBMC_HOST
if notifiers.xbmc_notifier.update_library(showName=showName):
ui.notifications.message("Library update command sent to XBMC host(s): " + host)
else:
ui.notifications.error("Unable to contact one or more XBMC host(s): " + host)
redirect('/home')
@cherrypy.expose
def updatePLEX(self):
if notifiers.plex_notifier.update_library():
ui.notifications.message("Library update command sent to Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST)
else:
ui.notifications.error("Unable to contact Plex Media Server host: " + sickbeard.PLEX_SERVER_HOST)
redirect('/home')
@cherrypy.expose
def setStatus(self, show=None, eps=None, status=None, direct=False):
if show == None or eps == None or status == None:
errMsg = "You must specify a show and at least one episode"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
if not statusStrings.has_key(int(status)):
errMsg = "Invalid status"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
errMsg = "Error", "Show not in show list"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
segment_list = []
if eps != None:
for curEp in eps.split('|'):
logger.log(u"Attempting to set status on episode "+curEp+" to "+status, logger.DEBUG)
epInfo = curEp.split('x')
epObj = showObj.getEpisode(int(epInfo[0]), int(epInfo[1]))
if int(status) == WANTED:
# figure out what segment the episode is in and remember it so we can backlog it
if epObj.show.air_by_date:
ep_segment = str(epObj.airdate)[:7]
else:
ep_segment = epObj.season
if ep_segment not in segment_list:
segment_list.append(ep_segment)
if epObj == None:
return _genericMessage("Error", "Episode couldn't be retrieved")
with epObj.lock:
# don't let them mess up UNAIRED episodes
if epObj.status == UNAIRED:
logger.log(u"Refusing to change status of "+curEp+" because it is UNAIRED", logger.ERROR)
continue
if int(status) in Quality.DOWNLOADED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_FRENCH + Quality.DOWNLOADED + [IGNORED] and not ek.ek(os.path.isfile, epObj.location):
logger.log(u"Refusing to change status of "+curEp+" to DOWNLOADED because it's not SNATCHED/DOWNLOADED", logger.ERROR)
continue
epObj.status = int(status)
epObj.saveToDB()
msg = "Backlog was automatically started for the following seasons of <b>"+showObj.name+"</b>:<br />"
for cur_segment in segment_list:
msg += "<li>Season "+str(cur_segment)+"</li>"
logger.log(u"Sending backlog for "+showObj.name+" season "+str(cur_segment)+" because some eps were set to wanted")
cur_backlog_queue_item = search_queue.BacklogQueueItem(showObj, cur_segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) #@UndefinedVariable
msg += "</ul>"
if segment_list:
ui.notifications.message("Backlog started", msg)
if direct:
return json.dumps({'result': 'success'})
else:
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def setAudio(self, show=None, eps=None, audio_langs=None, direct=False):
if show == None or eps == None or audio_langs == None:
errMsg = "You must specify a show and at least one episode"
if direct:
ui.notifications.error('Error', errMsg)
return json.dumps({'result': 'error'})
else:
return _genericMessage("Error", errMsg)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Show not in show list")
try:
show_loc = showObj.location #@UnusedVariable
except exceptions.ShowDirNotFoundException:
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
ep_obj_rename_list = []
for curEp in eps.split('|'):
logger.log(u"Attempting to set audio on episode "+curEp+" to "+audio_langs, logger.DEBUG)
epInfo = curEp.split('x')
epObj = showObj.getEpisode(int(epInfo[0]), int(epInfo[1]))
epObj.audio_langs = str(audio_langs)
epObj.saveToDB()
if direct:
return json.dumps({'result': 'success'})
else:
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def testRename(self, show=None):
if show == None:
return _genericMessage("Error", "You must specify a show")
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj == None:
return _genericMessage("Error", "Show not in show list")
try:
show_loc = showObj.location #@UnusedVariable
except exceptions.ShowDirNotFoundException:
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
ep_obj_rename_list = []
ep_obj_list = showObj.getAllEpisodes(has_location=True)
for cur_ep_obj in ep_obj_list:
# Only want to rename if we have a location
if cur_ep_obj.location:
if cur_ep_obj.relatedEps:
# do we have one of multi-episodes in the rename list already
have_already = False
for cur_related_ep in cur_ep_obj.relatedEps + [cur_ep_obj]:
if cur_related_ep in ep_obj_rename_list:
have_already = True
break
if not have_already:
ep_obj_rename_list.append(cur_ep_obj)
else:
ep_obj_rename_list.append(cur_ep_obj)
if ep_obj_rename_list:
# present season DESC episode DESC on screen
ep_obj_rename_list.reverse()
t = PageTemplate(file="testRename.tmpl")
t.submenu = [{'title': 'Edit', 'path': 'home/editShow?show=%d' % showObj.tvdbid}]
t.ep_obj_list = ep_obj_rename_list
t.show = showObj
return _munge(t)
@cherrypy.expose
def doRename(self, show=None, eps=None):
if show == None or eps == None:
errMsg = "You must specify a show and at least one episode"
return _genericMessage("Error", errMsg)
show_obj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if show_obj == None:
errMsg = "Error", "Show not in show list"
return _genericMessage("Error", errMsg)
try:
show_loc = show_obj.location #@UnusedVariable
except exceptions.ShowDirNotFoundException:
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
myDB = db.DBConnection()
if eps == None:
redirect("/home/displayShow?show=" + show)
for curEp in eps.split('|'):
epInfo = curEp.split('x')
# this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database
ep_result = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5", [show, epInfo[0], epInfo[1]])
if not ep_result:
logger.log(u"Unable to find an episode for "+curEp+", skipping", logger.WARNING)
continue
related_eps_result = myDB.select("SELECT * FROM tv_episodes WHERE location = ? AND episode != ?", [ep_result[0]["location"], epInfo[1]])
root_ep_obj = show_obj.getEpisode(int(epInfo[0]), int(epInfo[1]))
for cur_related_ep in related_eps_result:
related_ep_obj = show_obj.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"]))
if related_ep_obj not in root_ep_obj.relatedEps:
root_ep_obj.relatedEps.append(related_ep_obj)
root_ep_obj.rename()
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def trunchistory(self, epid):
myDB = db.DBConnection()
nbep = myDB.select("Select count(*) from episode_links where episode_id=?",[epid])
myDB.action("DELETE from episode_links where episode_id=?",[epid])
messnum = str(nbep[0][0]) + ' history links deleted'
ui.notifications.message('Episode History Truncated' , messnum)
return json.dumps({'result': 'ok'})
@cherrypy.expose
def searchEpisode(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj = _getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj)
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) #@UndefinedVariable
# wait until the queue item tells us whether it worked or not
while ep_queue_item.success == None: #@UndefinedVariable
time.sleep(1)
# return the correct json value
if ep_queue_item.success:
return json.dumps({'result': statusStrings[ep_obj.status]})
return json.dumps({'result': 'failure'})
@cherrypy.expose
def searchEpisodeSubtitles(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj = _getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# try do download subtitles for that episode
previous_subtitles = ep_obj.subtitles
try:
subtitles = ep_obj.downloadSubtitles()
if sickbeard.SUBTITLES_DIR:
for video in subtitles:
subs_new_path = ek.ek(os.path.join, os.path.dirname(video.path), sickbeard.SUBTITLES_DIR)
dir_exists = helpers.makeDir(subs_new_path)
if not dir_exists:
logger.log(u"Unable to create subtitles folder "+subs_new_path, logger.ERROR)
else:
helpers.chmodAsParent(subs_new_path)
for subtitle in subtitles.get(video):
new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
helpers.moveFile(subtitle.path, new_file_path)
if sickbeard.SUBSNOLANG:
helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path)
else:
if sickbeard.SUBTITLES_DIR_SUB:
for video in subtitles:
subs_new_path = os.path.join(os.path.dirname(video.path),"Subs")
dir_exists = helpers.makeDir(subs_new_path)
if not dir_exists:
logger.log(u"Unable to create subtitles folder "+subs_new_path, logger.ERROR)
else:
helpers.chmodAsParent(subs_new_path)
for subtitle in subtitles.get(video):
new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
helpers.moveFile(subtitle.path, new_file_path)
if sickbeard.SUBSNOLANG:
helpers.copyFile(new_file_path,new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path[:-6]+"srt")
helpers.chmodAsParent(new_file_path)
else:
for video in subtitles:
for subtitle in subtitles.get(video):
if sickbeard.SUBSNOLANG:
helpers.copyFile(subtitle.path,subtitle.path[:-6]+"srt")
helpers.chmodAsParent(subtitle.path[:-6]+"srt")
helpers.chmodAsParent(subtitle.path)
except:
return json.dumps({'result': 'failure'})
# return the correct json value
if previous_subtitles != ep_obj.subtitles:
status = 'New subtitles downloaded: %s' % ' '.join(["<img src='"+sickbeard.WEB_ROOT+"/images/flags/"+subliminal.language.Language(x).alpha2+".png' alt='"+subliminal.language.Language(x).name+"'/>" for x in sorted(list(set(ep_obj.subtitles).difference(previous_subtitles)))])
else:
status = 'No subtitles downloaded'
ui.notifications.message('Subtitles Search', status)
return json.dumps({'result': status, 'subtitles': ','.join([x for x in ep_obj.subtitles])})
@cherrypy.expose
def mergeEpisodeSubtitles(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj = _getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# try do merge subtitles for that episode
try:
ep_obj.mergeSubtitles()
except Exception as e:
return json.dumps({'result': 'failure', 'exception': str(e)})
# return the correct json value
status = 'Subtitles merged successfully '
ui.notifications.message('Merge Subtitles', status)
return json.dumps({'result': 'ok'})
class UI:
@cherrypy.expose
def add_message(self):
ui.notifications.message('Test 1', 'This is test number 1')
ui.notifications.error('Test 2', 'This is test number 2')
return "ok"
@cherrypy.expose
def get_messages(self):
messages = {}
cur_notification_num = 1
for cur_notification in ui.notifications.get_notifications():
messages['notification-'+str(cur_notification_num)] = {'title': cur_notification.title,
'message': cur_notification.message,
'type': cur_notification.type}
cur_notification_num += 1
return json.dumps(messages)
class WebInterface:
@cherrypy.expose
def index(self):
redirect("/home")
@cherrypy.expose
def showPoster(self, show=None, which=None):
#Redirect initial poster/banner thumb to default images
if which[0:6] == 'poster':
default_image_name = 'poster.png'
else:
default_image_name = 'banner.png'
default_image_path = ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', default_image_name)
if show is None:
return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png")
else:
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
if showObj is None:
return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png")
cache_obj = image_cache.ImageCache()
if which == 'poster':
image_file_name = cache_obj.poster_path(showObj.tvdbid)
if which == 'poster_thumb':
image_file_name = cache_obj.poster_thumb_path(showObj.tvdbid)
if which == 'banner':
image_file_name = cache_obj.banner_path(showObj.tvdbid)
if which == 'banner_thumb':
image_file_name = cache_obj.banner_thumb_path(showObj.tvdbid)
if ek.ek(os.path.isfile, image_file_name):
return cherrypy.lib.static.serve_file(image_file_name, content_type="image/jpeg")
else:
return cherrypy.lib.static.serve_file(default_image_path, content_type="image/png")
@cherrypy.expose
def setHomeLayout(self, layout):
if layout not in ('poster', 'banner', 'simple'):
layout = 'poster'
sickbeard.HOME_LAYOUT = layout
redirect("/home")
@cherrypy.expose
def setHomeSearch(self, search):
if search not in ('True', 'False'):
search = 'False'
sickbeard.TOGGLE_SEARCH= search
redirect("/home")
@cherrypy.expose
def toggleDisplayShowSpecials(self, show):
sickbeard.DISPLAY_SHOW_SPECIALS = not sickbeard.DISPLAY_SHOW_SPECIALS
redirect("/home/displayShow?show=" + show)
@cherrypy.expose
def setComingEpsLayout(self, layout):
if layout not in ('poster', 'banner', 'list'):
layout = 'banner'
sickbeard.COMING_EPS_LAYOUT = layout
redirect("/comingEpisodes")
@cherrypy.expose
def toggleComingEpsDisplayPaused(self):
sickbeard.COMING_EPS_DISPLAY_PAUSED = not sickbeard.COMING_EPS_DISPLAY_PAUSED
redirect("/comingEpisodes")
@cherrypy.expose
def setComingEpsSort(self, sort):
if sort not in ('date', 'network', 'show'):
sort = 'date'
sickbeard.COMING_EPS_SORT = sort
redirect("/comingEpisodes")
@cherrypy.expose
def comingEpisodes(self, layout="None"):
# get local timezone and load network timezones
sb_timezone = tz.tzlocal()
network_dict = network_timezones.load_network_dict()
myDB = db.DBConnection()
today1 = datetime.date.today()
today = today1.toordinal()
next_week1 = (datetime.date.today() + datetime.timedelta(days=7))
next_week = next_week1.toordinal()
recently = (datetime.date.today() - datetime.timedelta(days=sickbeard.COMING_EPS_MISSED_RANGE)).toordinal()
done_show_list = []
qualList = Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED, IGNORED]
sql_results1 = myDB.select("SELECT *, 0 as localtime, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND airdate >= ? AND airdate < ? AND tv_shows.tvdb_id = tv_episodes.showid AND tv_episodes.status NOT IN ("+','.join(['?']*len(qualList))+")", [today, next_week] + qualList)
for cur_result in sql_results1:
done_show_list.append(helpers.tryInt(cur_result["showid"]))
more_sql_results = myDB.select("SELECT *, tv_shows.status as show_status FROM tv_episodes outer_eps, tv_shows WHERE season != 0 AND showid NOT IN ("+','.join(['?']*len(done_show_list))+") AND tv_shows.tvdb_id = outer_eps.showid AND airdate IN (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.showid = outer_eps.showid AND inner_eps.airdate >= ? AND inner_eps.status NOT IN ("+','.join(['?']*len(Quality.DOWNLOADED+Quality.SNATCHED))+") ORDER BY inner_eps.airdate ASC LIMIT 1)", done_show_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED)
sql_results1 += more_sql_results
more_sql_results = myDB.select("SELECT *, 0 as localtime, tv_shows.status as show_status FROM tv_episodes, tv_shows WHERE season != 0 AND tv_shows.tvdb_id = tv_episodes.showid AND airdate < ? AND airdate >= ? AND tv_episodes.status = ? AND tv_episodes.status NOT IN ("+','.join(['?']*len(qualList))+")", [today, recently, WANTED] + qualList)
sql_results1 += more_sql_results
# sort by localtime
sorts = {
'date': (lambda x, y: cmp(x["localtime"], y["localtime"])),
'show': (lambda a, b: cmp((a["show_name"], a["localtime"]), (b["show_name"], b["localtime"]))),
'network': (lambda a, b: cmp((a["network"], a["localtime"]), (b["network"], b["localtime"]))),
}
# make a dict out of the sql results
sql_results = [dict(row) for row in sql_results1]
# regex to parse time (12/24 hour format)
time_regex = re.compile(r"(\d{1,2}):(\d{2,2})( [PA]M)?\b", flags=re.IGNORECASE)
# add localtime to the dict
for index, item in enumerate(sql_results1):
mo = time_regex.search(item['airs'])
if mo != None and len(mo.groups()) >= 2:
try:
hr = helpers.tryInt(mo.group(1))
m = helpers.tryInt(mo.group(2))
ap = mo.group(3)
# convert am/pm to 24 hour clock
if ap != None:
if ap.lower() == u" pm" and hr != 12:
hr += 12
elif ap.lower() == u" am" and hr == 12:
hr -= 12
except:
hr = 0
m = 0
else:
hr = 0
m = 0
if hr < 0 or hr > 23 or m < 0 or m > 59:
hr = 0
m = 0
te = datetime.datetime.fromordinal(helpers.tryInt(item['airdate']))
foreign_timezone = network_timezones.get_network_timezone(item['network'], network_dict, sb_timezone)
foreign_naive = datetime.datetime(te.year, te.month, te.day, hr, m,tzinfo=foreign_timezone)
sql_results[index]['localtime'] = foreign_naive.astimezone(sb_timezone)
#Normalize/Format the Airing Time
try:
locale.setlocale(locale.LC_TIME, 'us_US')
sql_results[index]['localtime_string'] = sql_results[index]['localtime'].strftime("%A %H:%M %p")
locale.setlocale(locale.LC_ALL, '') #Reseting to default locale
except:
sql_results[index]['localtime_string'] = sql_results[index]['localtime'].strftime("%A %H:%M %p")
sql_results.sort(sorts[sickbeard.COMING_EPS_SORT])
t = PageTemplate(file="comingEpisodes.tmpl")
paused_item = { 'title': 'View Paused:', 'path': {'': ''} }
paused_item['path'] = {'Hide': 'toggleComingEpsDisplayPaused'} if sickbeard.COMING_EPS_DISPLAY_PAUSED else {'Show': 'toggleComingEpsDisplayPaused'}
t.submenu = [
{ 'title': 'Sort by:', 'path': {'Date': 'setComingEpsSort/?sort=date',
'Show': 'setComingEpsSort/?sort=show',
'Network': 'setComingEpsSort/?sort=network',
}},
{ 'title': 'Layout:', 'path': {'Banner': 'setComingEpsLayout/?layout=banner',
'Poster': 'setComingEpsLayout/?layout=poster',
'List': 'setComingEpsLayout/?layout=list',
}},
paused_item,
]
t.next_week = datetime.datetime.combine(next_week1, datetime.time(tzinfo=sb_timezone))
t.today = datetime.datetime.now().replace(tzinfo=sb_timezone)
t.sql_results = sql_results
# Allow local overriding of layout parameter
if layout and layout in ('poster', 'banner', 'list'):
t.layout = layout
else:
t.layout = sickbeard.COMING_EPS_LAYOUT
return _munge(t)
# Raw iCalendar implementation by Pedro Jose Pereira Vieito (@pvieito).
#
# iCalendar (iCal) - Standard RFC 5545 <http://tools.ietf.org/html/rfc5546>
# Works with iCloud, Google Calendar and Outlook.
@cherrypy.expose
def calendar(self):
""" Provides a subscribeable URL for iCal subscriptions
"""
logger.log(u"Receiving iCal request from %s" % cherrypy.request.remote.ip)
poster_url = cherrypy.url().replace('ical', '')
time_re = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})')
# Create a iCal string
ical = 'BEGIN:VCALENDAR\n'
ical += 'VERSION:2.0\n'
ical += 'PRODID://Sick-Beard Upcoming Episodes//\n'
# Get shows info
myDB = db.DBConnection()
# Limit dates
past_date = (datetime.date.today() + datetime.timedelta(weeks=-2)).toordinal()
future_date = (datetime.date.today() + datetime.timedelta(weeks=52)).toordinal()
# Get all the shows that are not paused and are currently on air (from kjoconnor Fork)
calendar_shows = myDB.select("SELECT show_name, tvdb_id, network, airs, runtime FROM tv_shows WHERE status = 'Continuing' AND paused != '1'")
for show in calendar_shows:
# Get all episodes of this show airing between today and next month
episode_list = myDB.select("SELECT tvdbid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?", (past_date, future_date, int(show["tvdb_id"])))
# Get local timezone and load network timezones
local_zone = tz.tzlocal()
try:
network_zone = network_timezones.get_network_timezone(show['network'], network_timezones.load_network_dict(), local_zone)
except:
# Dummy network_zone for exceptions
network_zone = None
for episode in episode_list:
# Get the air date and time
air_date = datetime.datetime.fromordinal(int(episode['airdate']))
air_time = re.compile('([0-9]{1,2})\:([0-9]{2})(\ |)([AM|am|PM|pm]{2})').search(show["airs"])
# Parse out the air time
try:
if (air_time.group(4).lower() == 'pm' and int(air_time.group(1)) == 12):
t = datetime.time(12, int(air_time.group(2)), 0, tzinfo=network_zone)
elif (air_time.group(4).lower() == 'pm'):
t = datetime.time((int(air_time.group(1)) + 12), int(air_time.group(2)), 0, tzinfo=network_zone)
elif (air_time.group(4).lower() == 'am' and int(air_time.group(1)) == 12):
t = datetime.time(0, int(air_time.group(2)), 0, tzinfo=network_zone)
else:
t = datetime.time(int(air_time.group(1)), int(air_time.group(2)), 0, tzinfo=network_zone)
except:
# Dummy time for exceptions
t = datetime.time(22, 0, 0, tzinfo=network_zone)
# Combine air time and air date into one datetime object
air_date_time = datetime.datetime.combine(air_date, t).astimezone(local_zone)
# Create event for episode
ical = ical + 'BEGIN:VEVENT\n'
ical = ical + 'DTSTART:' + str(air_date_time.date()).replace("-", "") + '\n'
ical = ical + 'SUMMARY:' + show['show_name'] + ': ' + episode['name'] + '\n'
ical = ical + 'UID:' + str(datetime.date.today().isoformat()) + '-' + str(random.randint(10000,99999)) + '@Sick-Beard\n'
if (episode['description'] != ''):
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\\n\\n' + episode['description'] + '\n'
else:
ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\n'
ical = ical + 'LOCATION:' + 'Episode ' + str(episode['episode']) + ' - Season ' + str(episode['season']) + '\n'
ical = ical + 'END:VEVENT\n'
# Ending the iCal
ical += 'END:VCALENDAR\n'
return ical
manage = Manage()
history = History()
config = Config()
home = Home()
api = Api()
browser = browser.WebFileBrowser()
errorlogs = ErrorLogs()
ui = UI()
|
import codecs
from ConfigParser import ConfigParser
import os
import subprocess
import sys
import six
import twiggy
from twiggy import log
from twiggy.levels import name2level
from xdg import BaseDirectory
def asbool(some_value):
""" Cast config values to boolean. """
return six.text_type(some_value).lower() in [
'y', 'yes', 't', 'true', '1', 'on'
]
def get_service_password(service, username, oracle=None, interactive=False):
"""
Retrieve the sensitive password for a service by:
* retrieving password from a secure store (@oracle:use_keyring, default)
* asking the password from the user (@oracle:ask_password, interactive)
* executing a command and use the output as password
(@oracle:eval:<command>)
Note that the keyring may or may not be locked
which requires that the user provides a password (interactive mode).
:param service: Service name, may be key into secure store (as string).
:param username: Username for the service (as string).
:param oracle: Hint which password oracle strategy to use.
:return: Retrieved password (as string)
.. seealso::
https://bitbucket.org/kang/python-keyring-lib
"""
import getpass
import keyring
password = None
if not oracle or oracle == "@oracle:use_keyring":
password = keyring.get_password(service, username)
if interactive and password is None:
# -- LEARNING MODE: Password is not stored in keyring yet.
oracle = "@oracle:ask_password"
password = get_service_password(service, username,
oracle, interactive=True)
if password:
keyring.set_password(service, username, password)
elif interactive and oracle == "@oracle:ask_password":
prompt = "%s password: " % service
password = getpass.getpass(prompt)
elif oracle.startswith('@oracle:eval:'):
command = oracle[13:]
p = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
#stderr=subprocess.STDOUT
)
password = p.stdout.read()[:-1]
if password is None:
die("MISSING PASSWORD: oracle='%s', interactive=%s for service=%s" %
(oracle, interactive, service))
return password
def load_example_rc():
fname = os.path.join(
os.path.dirname(__file__),
'docs/configuration.rst'
)
with open(fname, 'r') as f:
readme = f.read()
example = readme.split('.. example')[1][4:]
return example
error_template = """
*************************************************
* There was a problem with your bugwarriorrc *
* {msg}
* Here's an example template to help: *
*************************************************
{example}"""
def die(msg):
log.options(suppress_newlines=False).critical(
error_template,
msg=msg,
example=load_example_rc(),
)
sys.exit(1)
def validate_config(config, main_section):
if not config.has_section(main_section):
die("No [%s] section found." % main_section)
twiggy.quickSetup(
name2level(config.get(main_section, 'log.level')),
config.get(main_section, 'log.file')
)
if not config.has_option(main_section, 'targets'):
die("No targets= item in [%s] found." % main_section)
targets = config.get(main_section, 'targets')
targets = filter(lambda t: len(t), [t.strip() for t in targets.split(",")])
if not targets:
die("Empty targets= item in [%s]." % main_section)
for target in targets:
if target not in config.sections():
die("No [%s] section found." % target)
# Validate each target one by one.
for target in targets:
service = config.get(target, 'service')
if not service:
die("No 'service' in [%s]" % target)
if service not in SERVICES:
die("'%s' in [%s] is not a valid service." % (service, target))
# Call the service-specific validator
SERVICES[service].validate_config(config, target)
def load_config(main_section):
config = ConfigParser({'log.level': "DEBUG", 'log.file': None})
path = None
first_path = BaseDirectory.load_first_config('bugwarrior')
if first_path is not None:
path = os.path.join(first_path, 'bugwarriorrc')
old_path = os.path.expanduser("~/.bugwarriorrc")
if path is None or not os.path.exists(path):
if os.path.exists(old_path):
path = old_path
else:
path = os.path.join(BaseDirectory.save_config_path('bugwarrior'), 'bugwarriorrc')
config.readfp(
codecs.open(
path,
"r",
"utf-8",
)
)
config.interactive = False # TODO: make this a command-line option
validate_config(config, main_section)
return config
def get_taskrc_path(conf, main_section):
path = '~/.taskrc'
if conf.has_option(main_section, 'taskrc'):
path = conf.get(main_section, 'taskrc')
return os.path.normpath(
os.path.expanduser(path)
)
from bugwarrior.services import SERVICES
|
from datetime import datetime, timedelta
import factory
import pytz
from factory.django import DjangoModelFactory
from factory.fuzzy import FuzzyText
from oauth2_provider.models import AccessToken, Application, RefreshToken
from openedx.core.djangoapps.oauth_dispatch.models import ApplicationAccess
from common.djangoapps.student.tests.factories import UserFactory
class ApplicationFactory(DjangoModelFactory):
class Meta:
model = Application
user = factory.SubFactory(UserFactory)
client_id = factory.Sequence('client_{}'.format)
client_secret = 'some_secret'
client_type = 'confidential'
authorization_grant_type = Application.CLIENT_CONFIDENTIAL
name = FuzzyText(prefix='name', length=8)
class ApplicationAccessFactory(DjangoModelFactory):
class Meta:
model = ApplicationAccess
application = factory.SubFactory(ApplicationFactory)
scopes = ['grades:read']
class AccessTokenFactory(DjangoModelFactory):
class Meta:
model = AccessToken
django_get_or_create = ('user', 'application')
token = FuzzyText(length=32)
expires = datetime.now(pytz.UTC) + timedelta(days=1)
class RefreshTokenFactory(DjangoModelFactory):
class Meta:
model = RefreshToken
django_get_or_create = ('user', 'application')
token = FuzzyText(length=32)
|
from spack import *
class Sleef(CMakePackage):
"""SIMD Library for Evaluating Elementary Functions,
vectorized libm and DFT."""
homepage = "http://sleef.org"
url = "https://github.com/shibatch/sleef/archive/3.2.tar.gz"
version('3.2', '459215058f2c8d55cd2b644d56c8c4f0')
|
import httplib as http
import mock
from nose.tools import * # noqa
from boto.exception import S3ResponseError
from framework.auth import Auth
from tests.base import get_default_metaschema
from tests.factories import ProjectFactory, AuthUserFactory
from website.addons.base import testing
from website.addons.s3.tests.utils import S3AddonTestCase
from website.addons.s3.utils import validate_bucket_name, validate_bucket_location
from website.util import api_url_for
class TestS3Views(S3AddonTestCase, testing.views.OAuthAddonConfigViewsTestCaseMixin):
def setUp(self):
self.mock_can_list = mock.patch('website.addons.s3.views.utils.can_list')
self.mock_can_list.return_value = True
self.mock_can_list.start()
self.mock_uid = mock.patch('website.addons.s3.views.utils.get_user_info')
self.mock_uid.return_value = {'id': '1234567890', 'display_name': 's3.user'}
self.mock_uid.start()
self.mock_exists = mock.patch('website.addons.s3.views.utils.bucket_exists')
self.mock_exists.return_value = True
self.mock_exists.start()
super(TestS3Views, self).setUp()
def tearDown(self):
self.mock_can_list.stop()
self.mock_uid.stop()
self.mock_exists.stop()
super(TestS3Views, self).tearDown()
def test_s3_settings_input_empty_keys(self):
url = self.project.api_url_for('s3_add_user_account')
rv = self.app.post_json(url,{
'access_key': '',
'secret_key': ''
}, auth=self.user.auth, expect_errors=True)
assert_equals(rv.status_int, http.BAD_REQUEST)
assert_in('All the fields above are required.', rv.body)
def test_s3_settings_input_empty_access_key(self):
url = self.project.api_url_for('s3_add_user_account')
rv = self.app.post_json(url,{
'access_key': '',
'secret_key': 'Non-empty-secret-key'
}, auth=self.user.auth, expect_errors=True)
assert_equals(rv.status_int, http.BAD_REQUEST)
assert_in('All the fields above are required.', rv.body)
def test_s3_settings_input_empty_secret_key(self):
url = self.project.api_url_for('s3_add_user_account')
rv = self.app.post_json(url,{
'access_key': 'Non-empty-access-key',
'secret_key': ''
}, auth=self.user.auth, expect_errors=True)
assert_equals(rv.status_int, http.BAD_REQUEST)
assert_in('All the fields above are required.', rv.body)
def test_s3_set_bucket_no_settings(self):
user = AuthUserFactory()
self.project.add_contributor(user, save=True)
url = self.project.api_url_for('s3_set_config')
res = self.app.put_json(
url, {'s3_bucket': 'hammertofall'}, auth=user.auth,
expect_errors=True
)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_s3_set_bucket_no_auth(self):
user = AuthUserFactory()
user.add_addon('s3')
self.project.add_contributor(user, save=True)
url = self.project.api_url_for('s3_set_config')
res = self.app.put_json(
url, {'s3_bucket': 'hammertofall'}, auth=user.auth,
expect_errors=True
)
assert_equal(res.status_code, http.FORBIDDEN)
def test_s3_set_bucket_registered(self):
registration = self.project.register_node(
get_default_metaschema(), Auth(self.user), '', ''
)
url = registration.api_url_for('s3_set_config')
res = self.app.put_json(
url, {'s3_bucket': 'hammertofall'}, auth=self.user.auth,
expect_errors=True,
)
assert_equal(res.status_code, http.BAD_REQUEST)
@mock.patch('website.addons.s3.views.utils.can_list', return_value=False)
def test_user_settings_cant_list(self, mock_can_list):
url = api_url_for('s3_add_user_account')
rv = self.app.post_json(url, {
'access_key': 'aldkjf',
'secret_key': 'las'
}, auth=self.user.auth, expect_errors=True)
assert_equals(rv.status_int, http.BAD_REQUEST)
assert_in('Unable to list buckets.', rv.body)
def test_s3_remove_node_settings_owner(self):
url = self.node_settings.owner.api_url_for('s3_deauthorize_node')
ret = self.app.delete(url, auth=self.user.auth)
result = self.Serializer().serialize_settings(node_settings=self.node_settings, current_user=self.user)
assert_equal(result['nodeHasAuth'], False)
def test_s3_remove_node_settings_unauthorized(self):
url = self.node_settings.owner.api_url_for('s3_deauthorize_node')
ret = self.app.delete(url, auth=None, expect_errors=True)
assert_equal(ret.status_code, 401)
def test_s3_get_node_settings_owner(self):
self.node_settings.set_auth(self.external_account, self.user)
self.node_settings.folder_id = 'bucket'
self.node_settings.save()
url = self.node_settings.owner.api_url_for('s3_get_config')
res = self.app.get(url, auth=self.user.auth)
result = res.json['result']
assert_equal(result['nodeHasAuth'], True)
assert_equal(result['userIsOwner'], True)
assert_equal(result['folder']['path'], self.node_settings.folder_id)
def test_s3_get_node_settings_unauthorized(self):
url = self.node_settings.owner.api_url_for('s3_get_config')
unauthorized = AuthUserFactory()
ret = self.app.get(url, auth=unauthorized.auth, expect_errors=True)
assert_equal(ret.status_code, 403)
## Overrides ##
@mock.patch('website.addons.s3.model.get_bucket_names')
def test_folder_list(self, mock_names):
mock_names.return_value = ['bucket1', 'bucket2']
super(TestS3Views, self).test_folder_list()
@mock.patch('website.addons.s3.model.bucket_exists')
@mock.patch('website.addons.s3.model.get_bucket_location_or_error')
def test_set_config(self, mock_location, mock_exists):
mock_exists.return_value = True
mock_location.return_value = ''
self.node_settings.set_auth(self.external_account, self.user)
url = self.project.api_url_for('{0}_set_config'.format(self.ADDON_SHORT_NAME))
res = self.app.put_json(url, {
'selected': self.folder
}, auth=self.user.auth)
assert_equal(res.status_code, http.OK)
self.project.reload()
self.node_settings.reload()
assert_equal(
self.project.logs.latest().action,
'{0}_bucket_linked'.format(self.ADDON_SHORT_NAME)
)
assert_equal(res.json['result']['folder']['name'], self.node_settings.folder_name)
class TestCreateBucket(S3AddonTestCase):
def setUp(self):
super(TestCreateBucket, self).setUp()
self.user = AuthUserFactory()
self.consolidated_auth = Auth(user=self.user)
self.auth = self.user.auth
self.project = ProjectFactory(creator=self.user)
self.project.add_addon('s3', auth=self.consolidated_auth)
self.project.creator.add_addon('s3')
self.user_settings = self.user.get_addon('s3')
self.user_settings.access_key = 'We-Will-Rock-You'
self.user_settings.secret_key = 'Idontknowanyqueensongs'
self.user_settings.save()
self.node_settings = self.project.get_addon('s3')
self.node_settings.bucket = 'Sheer-Heart-Attack'
self.node_settings.user_settings = self.project.creator.get_addon('s3')
self.node_settings.save()
def test_bad_names(self):
assert_false(validate_bucket_name(''))
assert_false(validate_bucket_name('no'))
assert_false(validate_bucket_name('a' * 64))
assert_false(validate_bucket_name(' leadingspace'))
assert_false(validate_bucket_name('trailingspace '))
assert_false(validate_bucket_name('bogus naMe'))
assert_false(validate_bucket_name('.cantstartwithp'))
assert_false(validate_bucket_name('or.endwith.'))
assert_false(validate_bucket_name('..nodoubles'))
assert_false(validate_bucket_name('no_unders_in'))
assert_false(validate_bucket_name('-leadinghyphen'))
assert_false(validate_bucket_name('trailinghyphen-'))
assert_false(validate_bucket_name('Mixedcase'))
assert_false(validate_bucket_name('empty..label'))
assert_false(validate_bucket_name('label-.trailinghyphen'))
assert_false(validate_bucket_name('label.-leadinghyphen'))
assert_false(validate_bucket_name('8.8.8.8'))
assert_false(validate_bucket_name('600.9000.0.28'))
assert_false(validate_bucket_name('no_underscore'))
assert_false(validate_bucket_name('_nounderscoreinfront'))
assert_false(validate_bucket_name('no-underscore-in-back_'))
assert_false(validate_bucket_name('no-underscore-in_the_middle_either'))
def test_names(self):
assert_true(validate_bucket_name('imagoodname'))
assert_true(validate_bucket_name('still.passing'))
assert_true(validate_bucket_name('can-have-dashes'))
assert_true(validate_bucket_name('kinda.name.spaced'))
assert_true(validate_bucket_name('a-o.valid'))
assert_true(validate_bucket_name('11.12.m'))
assert_true(validate_bucket_name('a--------a'))
assert_true(validate_bucket_name('a' * 63))
def test_bad_locations(self):
assert_false(validate_bucket_location('Venus'))
assert_false(validate_bucket_location('AlphaCentari'))
assert_false(validate_bucket_location('CostaRica'))
def test_locations(self):
assert_true(validate_bucket_location(''))
assert_true(validate_bucket_location('us-east-2'))
assert_true(validate_bucket_location('eu-central-1'))
assert_true(validate_bucket_location('us-west-1'))
assert_true(validate_bucket_location('us-west-2'))
assert_true(validate_bucket_location('ap-northeast-1'))
assert_true(validate_bucket_location('ap-northeast-2'))
assert_true(validate_bucket_location('ap-southeast-1'))
assert_true(validate_bucket_location('ap-southeast-2'))
assert_true(validate_bucket_location('ap-south-1'))
assert_true(validate_bucket_location('sa-east-1'))
assert_true(validate_bucket_location('eu-west-1'))
@mock.patch('website.addons.s3.views.utils.create_bucket')
@mock.patch('website.addons.s3.views.utils.get_bucket_names')
def test_create_bucket_pass(self, mock_names, mock_make):
mock_make.return_value = True
mock_names.return_value = [
'butintheend',
'it',
'doesntevenmatter'
]
url = self.project.api_url_for('create_bucket')
ret = self.app.post_json(
url,
{
'bucket_name': 'doesntevenmatter',
'bucket_location': '',
},
auth=self.user.auth
)
assert_equal(ret.status_int, http.OK)
assert_equal(ret.json, {})
@mock.patch('website.addons.s3.views.utils.create_bucket')
def test_create_bucket_fail(self, mock_make):
error = S3ResponseError(418, 'because Im a test')
error.message = 'This should work'
mock_make.side_effect = error
url = "/api/v1/project/{0}/s3/newbucket/".format(self.project._id)
ret = self.app.post_json(url, {'bucket_name': 'doesntevenmatter'}, auth=self.user.auth, expect_errors=True)
assert_equals(ret.body, '{"message": "This should work", "title": "Problem connecting to S3"}')
@mock.patch('website.addons.s3.views.utils.create_bucket')
def test_bad_location_fails(self, mock_make):
url = "/api/v1/project/{0}/s3/newbucket/".format(self.project._id)
ret = self.app.post_json(
url,
{
'bucket_name': 'doesntevenmatter',
'bucket_location': 'not a real bucket location',
},
auth=self.user.auth,
expect_errors=True)
assert_equals(ret.body, '{"message": "That bucket location is not valid.", "title": "Invalid bucket location"}')
|
import os
import time
import unittest
from multiprocessing import Process
import signal
import numpy
import paddle.fluid as fluid
import paddle.fluid.layers as layers
from paddle.fluid.layers.io import ListenAndServ
from paddle.fluid.layers.io import Recv
from paddle.fluid.layers.io import Send
import paddle.fluid.layers.ops as ops
from paddle.fluid.transpiler.details import program_to_code
class TestProgram2Code(unittest.TestCase):
def test_print(self):
place = fluid.CPUPlace()
self.init_serv(place)
self.init_client(place, 9123)
def init_serv(self, place):
main = fluid.Program()
with fluid.program_guard(main):
serv = ListenAndServ("127.0.0.1:0", ["X"], optimizer_mode=False)
with serv.do():
out_var = main.global_block().create_var(
name="scale_0.tmp_0",
psersistable=True,
dtype="float32",
shape=[32, 32])
x = layers.data(
shape=[32, 32],
dtype='float32',
name="X",
append_batch_size=False)
fluid.initializer.Constant(value=1.0)(x, main.global_block())
ops._scale(x=x, scale=10.0, out=out_var)
program_to_code(main)
def init_client(self, place, port):
main = fluid.Program()
with fluid.program_guard(main):
x = layers.data(
shape=[32, 32],
dtype='float32',
name='X',
append_batch_size=False)
fluid.initializer.Constant(value=2.3)(x, main.global_block())
get_var = main.global_block().create_var(
name="scale_0.tmp_0", # server side var
dtype="float32",
persistable=False,
shape=[32, 32])
fluid.initializer.Constant(value=2.3)(get_var, main.global_block())
Send("127.0.0.1:%d" % port, [x])
o = Recv("127.0.0.1:%d" % port, [get_var])
program_to_code(main)
if __name__ == "__main__":
unittest.main()
|
import re
from django import template
from django import forms
register = template.Library()
def _process_field_attributes(field, attr, process):
# split attribute name and value from 'attr:value' string
params = attr.split(':', 1)
attribute = params[0]
value = params[1] if len(params) == 2 else ''
# decorate field.as_widget method with updated attributes
old_as_widget = field.as_widget
def as_widget(self, widget=None, attrs=None, only_initial=False):
attrs = attrs or {}
process(widget or self.field.widget, attrs, attribute, value)
return old_as_widget(widget, attrs, only_initial)
bound_method = type(old_as_widget)
try:
field.as_widget = bound_method(as_widget, field, field.__class__)
except TypeError: # python 3
field.as_widget = bound_method(as_widget, field)
return field
@register.filter
def subtract(value, arg):
return value - arg
@register.filter
def modulo(num, val):
return num % val
@register.filter
def addcss(field, attr):
def process(widget, attrs, attribute, value):
if attrs.get(attribute):
attrs[attribute] += ' ' + value
elif widget.attrs.get(attribute):
attrs[attribute] = widget.attrs[attribute] + ' ' + value
else:
attrs[attribute] = value
return _process_field_attributes(field, attr, process)
@register.filter
def is_checkbox(field):
return isinstance(field.field.widget, forms.CheckboxInput)
@register.filter
def is_multiple_checkbox(field):
return isinstance(field.field.widget, forms.CheckboxSelectMultiple)
@register.filter
def is_radio(field):
return isinstance(field.field.widget, forms.RadioSelect)
@register.filter
def is_file(field):
return isinstance(field.field.widget, forms.FileInput) or \
isinstance(field, forms.ClearableFileInput)
@register.filter
def sum_dict(d):
total = 0
for key, value in d.items():
total += value
return total
@register.filter
def nice_title(title):
pat = re.compile(r'Finding [0-9][0-9][0-9]:*')
s = pat.split(title, 2)
try:
ret = s[1]
return ret
except:
return title
@register.filter
def pad_zeroes(num):
return str(num).zfill(3)
@register.filter
def hash(h, key):
return h[key]
@register.filter
def getZero(h, key):
return h[key][0]
@register.filter
def getOne(h, key):
return h[key][1]
@register.filter
def getTwo(h, key):
return h[key][2]
@register.filter
def getThree(h, key):
return h[key][3]
@register.filter
def getFour(h, key):
return h[key][4]
|
import mock
import six
from designateclient import exceptions as designate_exceptions
from designateclient import v1 as designate_client
from heat.common import exception as heat_exception
from heat.engine.clients.os import designate as client
from heat.tests import common
class DesignateDomainConstraintTest(common.HeatTestCase):
def test_expected_exceptions(self):
self.assertEqual((heat_exception.EntityNotFound,),
client.DesignateDomainConstraint.expected_exceptions,
"DesignateDomainConstraint expected exceptions error")
def test_constrain(self):
constrain = client.DesignateDomainConstraint()
client_mock = mock.MagicMock()
client_plugin_mock = mock.MagicMock()
client_plugin_mock.get_domain_id.return_value = None
client_mock.client_plugin.return_value = client_plugin_mock
self.assertIsNone(constrain.validate_with_client(client_mock,
'domain_1'))
client_plugin_mock.get_domain_id.assert_called_once_with('domain_1')
class DesignateClientPluginTest(common.HeatTestCase):
@mock.patch.object(designate_client, 'Client')
@mock.patch.object(client.DesignateClientPlugin, '_get_client_args')
def test_client(self,
get_client_args,
client_designate):
args = dict(
auth_url='auth_url',
project_id='project_id',
token=lambda: '',
os_endpoint='os_endpoint',
cacert='cacert',
insecure='insecure'
)
get_client_args.return_value = args
client_plugin = client.DesignateClientPlugin(
context=mock.MagicMock()
)
client_plugin.client()
# Make sure the right args are created
get_client_args.assert_called_once_with(
service_name='designate',
service_type='dns'
)
# Make sure proper client is created with expected args
client_designate.assert_called_once_with(
auth_url='auth_url',
project_id='project_id',
token='',
endpoint='os_endpoint',
cacert='cacert',
insecure='insecure'
)
class DesignateClientPluginDomainTest(common.HeatTestCase):
sample_uuid = '477e8273-60a7-4c41-b683-fdb0bc7cd152'
sample_name = 'test-domain.com'
def _get_mock_domain(self):
domain = mock.MagicMock()
domain.id = self.sample_uuid
domain.name = self.sample_name
return domain
def setUp(self):
super(DesignateClientPluginDomainTest, self).setUp()
self._client = mock.MagicMock()
self.client_plugin = client.DesignateClientPlugin(
context=mock.MagicMock()
)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id(self, client_designate):
self._client.domains.get.return_value = self._get_mock_domain()
client_designate.return_value = self._client
self.assertEqual(self.sample_uuid,
self.client_plugin.get_domain_id(self.sample_uuid))
self._client.domains.get.assert_called_once_with(
self.sample_uuid)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id_not_found(self, client_designate):
self._client.domains.get.side_effect = (designate_exceptions
.NotFound)
client_designate.return_value = self._client
ex = self.assertRaises(heat_exception.EntityNotFound,
self.client_plugin.get_domain_id,
self.sample_uuid)
msg = ("The Designate Domain (%(name)s) could not be found." %
{'name': self.sample_uuid})
self.assertEqual(msg, six.text_type(ex))
self._client.domains.get.assert_called_once_with(
self.sample_uuid)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id_by_name(self, client_designate):
self._client.domains.get.side_effect = (designate_exceptions
.NotFound)
self._client.domains.list.return_value = [self._get_mock_domain()]
client_designate.return_value = self._client
self.assertEqual(self.sample_uuid,
self.client_plugin.get_domain_id(self.sample_name))
self._client.domains.get.assert_called_once_with(
self.sample_name)
self._client.domains.list.assert_called_once_with()
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_get_domain_id_by_name_not_found(self, client_designate):
self._client.domains.get.side_effect = (designate_exceptions
.NotFound)
self._client.domains.list.return_value = []
client_designate.return_value = self._client
ex = self.assertRaises(heat_exception.EntityNotFound,
self.client_plugin.get_domain_id,
self.sample_name)
msg = ("The Designate Domain (%(name)s) could not be found." %
{'name': self.sample_name})
self.assertEqual(msg, six.text_type(ex))
self._client.domains.get.assert_called_once_with(
self.sample_name)
self._client.domains.list.assert_called_once_with()
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.domains.Domain')
def test_domain_create(self, mock_domain, client_designate):
self._client.domains.create.return_value = None
client_designate.return_value = self._client
domain = dict(
name='test-domain.com',
description='updated description',
ttl=4200,
email='xyz@test-domain.com'
)
mock_sample_domain = mock.Mock()
mock_domain.return_value = mock_sample_domain
self.client_plugin.domain_create(**domain)
# Make sure domain entity is created with right arguments
mock_domain.assert_called_once_with(**domain)
self._client.domains.create.assert_called_once_with(
mock_sample_domain)
@mock.patch.object(client.DesignateClientPlugin, 'client')
def test_domain_update(self, client_designate):
self._client.domains.update.return_value = None
mock_domain = self._get_mock_domain()
self._client.domains.get.return_value = mock_domain
client_designate.return_value = self._client
domain = dict(
id='sample-id',
description='updated description',
ttl=4200,
email='xyz@test-domain.com'
)
self.client_plugin.domain_update(**domain)
self._client.domains.get.assert_called_once_with(
mock_domain.id)
for key in domain.keys():
setattr(mock_domain, key, domain[key])
self._client.domains.update.assert_called_once_with(
mock_domain)
class DesignateClientPluginRecordTest(common.HeatTestCase):
sample_uuid = '477e8273-60a7-4c41-b683-fdb0bc7cd152'
sample_domain_id = '477e8273-60a7-4c41-b683-fdb0bc7cd153'
def _get_mock_record(self):
record = mock.MagicMock()
record.id = self.sample_uuid
record.domain_id = self.sample_domain_id
return record
def setUp(self):
super(DesignateClientPluginRecordTest, self).setUp()
self._client = mock.MagicMock()
self.client_plugin = client.DesignateClientPlugin(
context=mock.MagicMock()
)
self.client_plugin.get_domain_id = mock.Mock(
return_value=self.sample_domain_id)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_create(self, mock_record, client_designate):
self._client.records.create.return_value = None
client_designate.return_value = self._client
record = dict(
name='test-record.com',
description='updated description',
ttl=4200,
type='',
priority=1,
data='1.1.1.1',
domain=self.sample_domain_id
)
mock_sample_record = mock.Mock()
mock_record.return_value = mock_sample_record
self.client_plugin.record_create(**record)
# Make sure record entity is created with right arguments
domain_id = record.pop('domain')
mock_record.assert_called_once_with(**record)
self._client.records.create.assert_called_once_with(
domain_id,
mock_sample_record)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_update(self, mock_record, client_designate):
self._client.records.update.return_value = None
mock_record = self._get_mock_record()
self._client.records.get.return_value = mock_record
client_designate.return_value = self._client
record = dict(
id=self.sample_uuid,
name='test-record.com',
description='updated description',
ttl=4200,
type='',
priority=1,
data='1.1.1.1',
domain=self.sample_domain_id
)
self.client_plugin.record_update(**record)
self._client.records.get.assert_called_once_with(
self.sample_domain_id,
self.sample_uuid)
for key in record.keys():
setattr(mock_record, key, record[key])
self._client.records.update.assert_called_once_with(
self.sample_domain_id,
mock_record)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_delete(self, mock_record, client_designate):
self._client.records.delete.return_value = None
client_designate.return_value = self._client
record = dict(
id=self.sample_uuid,
domain=self.sample_domain_id
)
self.client_plugin.record_delete(**record)
self._client.records.delete.assert_called_once_with(
self.sample_domain_id,
self.sample_uuid)
@mock.patch.object(client.DesignateClientPlugin, 'client')
@mock.patch('designateclient.v1.records.Record')
def test_record_show(self, mock_record, client_designate):
self._client.records.get.return_value = None
client_designate.return_value = self._client
record = dict(
id=self.sample_uuid,
domain=self.sample_domain_id
)
self.client_plugin.record_show(**record)
self._client.records.get.assert_called_once_with(
self.sample_domain_id,
self.sample_uuid)
|
from __future__ import absolute_import, division, print_function
from lxml import etree
import os
def open_xml_file(filename, mode):
"""Opens an XML file for use.
:param filename: XML file to create file from
:param mode: file mode for open
:return:
"""
base = os.path.dirname(__file__) + '/xml_test_files/'
return open(os.path.join(base, filename), mode)
def create_node_from_file(filename):
"""Creates an xml node from a given XML file.
:param filename: XML file to create node from
:return: node
"""
node = etree.parse(open_xml_file(filename, 'r'))
return node
|
class EmptyResult(object):
'''
Null Object pattern to prevent Null reference errors
when there is no result
'''
def __init__(self):
self.status = 0
self.body = ''
self.msg = ''
self.reason = ''
def __nonzero__(self):
return False
class HapiError(ValueError):
"""Any problems get thrown as HapiError exceptions with the relevant info inside"""
as_str_template = u'''
---- request ----
{method} {host}{url}, [timeout={timeout}]
---- body ----
{body}
---- headers ----
{headers}
---- result ----
{result_status}
---- body -----
{result_body}
---- headers -----
{result_headers}
---- reason ----
{result_reason}
---- trigger error ----
{error}
'''
def __init__(self, result, request, err=None):
super(HapiError,self).__init__(result and result.reason or "Unknown Reason")
if result == None:
self.result = EmptyResult()
else:
self.result = result
if request == None:
request = {}
self.request = request
self.err = err
def __str__(self):
return self.__unicode__().encode('ascii', 'replace')
def __unicode__(self):
params = {}
request_keys = ('method', 'host', 'url', 'data', 'headers', 'timeout', 'body')
result_attrs = ('status', 'reason', 'msg', 'body', 'headers')
params['error'] = self.err
for key in request_keys:
params[key] = self.request.get(key)
for attr in result_attrs:
params['result_%s' % attr] = getattr(self.result, attr, '')
params = self._dict_vals_to_unicode(params)
return self.as_str_template.format(**params)
def _dict_vals_to_unicode(self, data):
unicode_data = {}
for key, val in data.items():
if not isinstance(val, basestring):
unicode_data[key] = unicode(val)
elif not isinstance(val, unicode):
unicode_data[key] = unicode(val, 'utf8', 'ignore')
else:
unicode_data[key] = val
return unicode_data
class HapiBadRequest(HapiError):
'''Error wrapper for most 40X results and 501 results'''
class HapiNotFound(HapiError):
'''Error wrapper for 404 and 410 results'''
class HapiTimeout(HapiError):
'''Wrapper for socket timeouts, sslerror, and 504'''
class HapiUnauthorized(HapiError):
'''Wrapper for 401 Unauthorized errors'''
class HapiServerError(HapiError):
'''Wrapper for most 500 errors'''
|
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from help.models import ConditionsChapter, FAQ
def faqs(request):
extra_context = {}
extra_context['faqs'] = FAQ.objects.all()
return render_to_response('help/faqs.html',
extra_context,
context_instance=RequestContext(request))
def terms(request):
extra_context = {}
extra_context['termsandconditions'] = ConditionsChapter.objects.all()
return render_to_response('help/terms-and-conditions.html',
extra_context,
context_instance=RequestContext(request))
def about(request):
extra_context = {}
return render_to_response('help/about.html',
extra_context,
context_instance=RequestContext(request))
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.jvm.tasks.jvm_tool_task_mixin import JvmToolTaskMixin
from pants.base.exceptions import TaskError
from pants.java import util
from pants.java.executor import SubprocessExecutor
from pants.java.jar.jar_dependency import JarDependency
from pants.java.nailgun_executor import NailgunExecutor, NailgunProcessGroup
from pants.pantsd.subsystem.subprocess import Subprocess
from pants.task.task import Task, TaskBase
class NailgunTaskBase(JvmToolTaskMixin, TaskBase):
ID_PREFIX = 'ng'
@classmethod
def register_options(cls, register):
super(NailgunTaskBase, cls).register_options(register)
register('--use-nailgun', type=bool, default=True,
help='Use nailgun to make repeated invocations of this task quicker.')
register('--nailgun-timeout-seconds', advanced=True, default=10, type=float,
help='Timeout (secs) for nailgun startup.')
register('--nailgun-connect-attempts', advanced=True, default=5, type=int,
help='Max attempts for nailgun connects.')
cls.register_jvm_tool(register,
'nailgun-server',
classpath=[
JarDependency(org='com.martiansoftware',
name='nailgun-server',
rev='0.9.1'),
])
@classmethod
def subsystem_dependencies(cls):
return super(NailgunTaskBase, cls).subsystem_dependencies() + (Subprocess.Factory,)
def __init__(self, *args, **kwargs):
"""
:API: public
"""
super(NailgunTaskBase, self).__init__(*args, **kwargs)
id_tuple = (self.ID_PREFIX, self.__class__.__name__)
self._identity = '_'.join(id_tuple)
self._executor_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir,
*id_tuple)
def create_java_executor(self):
"""Create java executor that uses this task's ng daemon, if allowed.
Call only in execute() or later. TODO: Enforce this.
"""
if self.get_options().use_nailgun:
classpath = os.pathsep.join(self.tool_classpath('nailgun-server'))
return NailgunExecutor(self._identity,
self._executor_workdir,
classpath,
self.dist,
connect_timeout=self.get_options().nailgun_timeout_seconds,
connect_attempts=self.get_options().nailgun_connect_attempts)
else:
return SubprocessExecutor(self.dist)
def runjava(self, classpath, main, jvm_options=None, args=None, workunit_name=None,
workunit_labels=None, workunit_log_config=None):
"""Runs the java main using the given classpath and args.
If --no-use-nailgun is specified then the java main is run in a freshly spawned subprocess,
otherwise a persistent nailgun server dedicated to this Task subclass is used to speed up
amortized run times.
:API: public
"""
executor = self.create_java_executor()
# Creating synthetic jar to work around system arg length limit is not necessary
# when `NailgunExecutor` is used because args are passed through socket, therefore turning off
# creating synthetic jar if nailgun is used.
create_synthetic_jar = not self.get_options().use_nailgun
try:
return util.execute_java(classpath=classpath,
main=main,
jvm_options=jvm_options,
args=args,
executor=executor,
workunit_factory=self.context.new_workunit,
workunit_name=workunit_name,
workunit_labels=workunit_labels,
workunit_log_config=workunit_log_config,
create_synthetic_jar=create_synthetic_jar,
synthetic_jar_dir=self._executor_workdir)
except executor.Error as e:
raise TaskError(e)
class NailgunTask(NailgunTaskBase, Task):
"""
:API: public
"""
pass
class NailgunKillall(Task):
"""Kill running nailgun servers."""
@classmethod
def register_options(cls, register):
super(NailgunKillall, cls).register_options(register)
register('--everywhere', type=bool,
help='Kill all nailguns servers launched by pants for all workspaces on the system.')
def execute(self):
NailgunProcessGroup().killall(everywhere=self.get_options().everywhere)
|
"""Tests for supervisor.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import shutil
import time
import uuid
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.framework import meta_graph
def _summary_iterator(test_dir):
"""Reads events from test_dir/events.
Args:
test_dir: Name of the test directory.
Returns:
A summary_iterator
"""
event_paths = sorted(glob.glob(os.path.join(test_dir, "event*")))
return tf.train.summary_iterator(event_paths[-1])
def _test_dir(test_name):
test_dir = os.path.join(tf.test.get_temp_dir(), test_name)
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
return test_dir
class SupervisorTest(tf.test.TestCase):
def _wait_for_glob(self, pattern, timeout_secs, for_checkpoint=True):
"""Wait for a checkpoint file to appear.
Args:
pattern: A string.
timeout_secs: How long to wait for in seconds.
for_checkpoint: whether we're globbing for checkpoints.
"""
end_time = time.time() + timeout_secs
while time.time() < end_time:
if for_checkpoint:
if tf.train.checkpoint_exists(pattern):
return
else:
if len(tf.gfile.Glob(pattern)) >= 1:
return
time.sleep(0.05)
self.assertFalse(True, "Glob never matched any file: %s" % pattern)
# This test does not test much.
def testBasics(self):
logdir = _test_dir("basics")
with tf.Graph().as_default():
my_op = tf.constant(1.0)
sv = tf.train.Supervisor(logdir=logdir)
sess = sv.prepare_or_wait_for_session("")
for _ in xrange(10):
sess.run(my_op)
sess.close()
sv.stop()
def testManagedSession(self):
logdir = _test_dir("managed_session")
with tf.Graph().as_default():
my_op = tf.constant(1.0)
sv = tf.train.Supervisor(logdir=logdir)
with sv.managed_session("") as sess:
for _ in xrange(10):
sess.run(my_op)
# Supervisor has been stopped.
self.assertTrue(sv.should_stop())
def testManagedSessionUserError(self):
logdir = _test_dir("managed_user_error")
with tf.Graph().as_default():
my_op = tf.constant(1.0)
sv = tf.train.Supervisor(logdir=logdir)
last_step = None
with self.assertRaisesRegexp(RuntimeError, "failing here"):
with sv.managed_session("") as sess:
for step in xrange(10):
last_step = step
if step == 1:
raise RuntimeError("failing here")
else:
sess.run(my_op)
# Supervisor has been stopped.
self.assertTrue(sv.should_stop())
self.assertEqual(1, last_step)
def testManagedSessionIgnoreOutOfRangeError(self):
logdir = _test_dir("managed_out_of_range")
with tf.Graph().as_default():
my_op = tf.constant(1.0)
sv = tf.train.Supervisor(logdir=logdir)
last_step = None
with sv.managed_session("") as sess:
for step in xrange(10):
last_step = step
if step == 3:
raise tf.errors.OutOfRangeError(my_op.op.node_def, my_op.op,
"all done")
else:
sess.run(my_op)
# Supervisor has been stopped. OutOfRangeError was not thrown.
self.assertTrue(sv.should_stop())
self.assertEqual(3, last_step)
def testManagedSessionDoNotKeepSummaryWriter(self):
logdir = _test_dir("managed_not_keep_summary_writer")
with tf.Graph().as_default():
tf.summary.scalar("c1", tf.constant(1))
tf.summary.scalar("c2", tf.constant(2))
tf.summary.scalar("c3", tf.constant(3))
summ = tf.summary.merge_all()
sv = tf.train.Supervisor(logdir=logdir, summary_op=None)
with sv.managed_session("", close_summary_writer=True,
start_standard_services=False) as sess:
sv.summary_computed(sess, sess.run(summ))
# Sleep 1.2s to make sure that the next event file has a different name
# than the current one.
time.sleep(1.2)
with sv.managed_session("", close_summary_writer=True,
start_standard_services=False) as sess:
sv.summary_computed(sess, sess.run(summ))
event_paths = sorted(glob.glob(os.path.join(logdir, "event*")))
self.assertEquals(2, len(event_paths))
# The two event files should have the same contents.
for path in event_paths:
# The summary iterator should report the summary once as we closed the
# summary writer across the 2 sessions.
rr = tf.train.summary_iterator(path)
# The first event should list the file_version.
ev = next(rr)
self.assertEquals("brain.Event:2", ev.file_version)
# The next one has the graph and metagraph.
ev = next(rr)
self.assertTrue(ev.graph_def)
ev = next(rr)
self.assertTrue(ev.meta_graph_def)
# The next one should have the values from the summary.
# But only once.
ev = next(rr)
self.assertProtoEquals("""
value { tag: 'c1' simple_value: 1.0 }
value { tag: 'c2' simple_value: 2.0 }
value { tag: 'c3' simple_value: 3.0 }
""", ev.summary)
# The next one should be a stop message if we closed cleanly.
ev = next(rr)
self.assertEquals(tf.SessionLog.STOP, ev.session_log.status)
# We should be done.
with self.assertRaises(StopIteration):
next(rr)
def testManagedSessionKeepSummaryWriter(self):
logdir = _test_dir("managed_keep_summary_writer")
with tf.Graph().as_default():
tf.summary.scalar("c1", tf.constant(1))
tf.summary.scalar("c2", tf.constant(2))
tf.summary.scalar("c3", tf.constant(3))
summ = tf.summary.merge_all()
sv = tf.train.Supervisor(logdir=logdir)
with sv.managed_session("", close_summary_writer=False,
start_standard_services=False) as sess:
sv.summary_computed(sess, sess.run(summ))
with sv.managed_session("", close_summary_writer=False,
start_standard_services=False) as sess:
sv.summary_computed(sess, sess.run(summ))
# Now close the summary writer to flush the events.
sv.summary_writer.close()
# The summary iterator should report the summary twice as we reused
# the same summary writer across the 2 sessions.
rr = _summary_iterator(logdir)
# The first event should list the file_version.
ev = next(rr)
self.assertEquals("brain.Event:2", ev.file_version)
# The next one has the graph.
ev = next(rr)
self.assertTrue(ev.graph_def)
ev = next(rr)
self.assertTrue(ev.meta_graph_def)
# The next one should have the values from the summary.
ev = next(rr)
self.assertProtoEquals("""
value { tag: 'c1' simple_value: 1.0 }
value { tag: 'c2' simple_value: 2.0 }
value { tag: 'c3' simple_value: 3.0 }
""", ev.summary)
# The next one should also have the values from the summary.
ev = next(rr)
self.assertProtoEquals("""
value { tag: 'c1' simple_value: 1.0 }
value { tag: 'c2' simple_value: 2.0 }
value { tag: 'c3' simple_value: 3.0 }
""", ev.summary)
# We should be done.
self.assertRaises(StopIteration, lambda: next(rr))
def _csv_data(self, logdir):
# Create a small data file with 3 CSV records.
data_path = os.path.join(logdir, "data.csv")
with open(data_path, "w") as f:
f.write("1,2,3\n")
f.write("4,5,6\n")
f.write("7,8,9\n")
return data_path
def testManagedEndOfInputOneQueue(self):
# Tests that the supervisor finishes without an error when using
# a fixed number of epochs, reading from a single queue.
logdir = _test_dir("managed_end_of_input_one_queue")
os.makedirs(logdir)
data_path = self._csv_data(logdir)
with tf.Graph().as_default():
# Create an input pipeline that reads the file 3 times.
filename_queue = tf.train.string_input_producer([data_path], num_epochs=3)
reader = tf.TextLineReader()
_, csv = reader.read(filename_queue)
rec = tf.decode_csv(csv, record_defaults=[[1], [1], [1]])
sv = tf.train.Supervisor(logdir=logdir)
with sv.managed_session("") as sess:
while not sv.should_stop():
sess.run(rec)
def testManagedEndOfInputTwoQueues(self):
# Tests that the supervisor finishes without an error when using
# a fixed number of epochs, reading from two queues, the second
# one producing a batch from the first one.
logdir = _test_dir("managed_end_of_input_two_queues")
os.makedirs(logdir)
data_path = self._csv_data(logdir)
with tf.Graph().as_default():
# Create an input pipeline that reads the file 3 times.
filename_queue = tf.train.string_input_producer([data_path], num_epochs=3)
reader = tf.TextLineReader()
_, csv = reader.read(filename_queue)
rec = tf.decode_csv(csv, record_defaults=[[1], [1], [1]])
shuff_rec = tf.train.shuffle_batch(rec, 1, 6, 4)
sv = tf.train.Supervisor(logdir=logdir)
with sv.managed_session("") as sess:
while not sv.should_stop():
sess.run(shuff_rec)
def testManagedMainErrorTwoQueues(self):
# Tests that the supervisor correctly raises a main loop
# error even when using multiple queues for input.
logdir = _test_dir("managed_main_error_two_queues")
os.makedirs(logdir)
data_path = self._csv_data(logdir)
with self.assertRaisesRegexp(RuntimeError, "fail at step 3"):
with tf.Graph().as_default():
# Create an input pipeline that reads the file 3 times.
filename_queue = tf.train.string_input_producer([data_path],
num_epochs=3)
reader = tf.TextLineReader()
_, csv = reader.read(filename_queue)
rec = tf.decode_csv(csv, record_defaults=[[1], [1], [1]])
shuff_rec = tf.train.shuffle_batch(rec, 1, 6, 4)
sv = tf.train.Supervisor(logdir=logdir)
with sv.managed_session("") as sess:
for step in range(9):
if sv.should_stop():
break
elif step == 3:
raise RuntimeError("fail at step 3")
else:
sess.run(shuff_rec)
def testSessionConfig(self):
logdir = _test_dir("session_config")
with tf.Graph().as_default():
with tf.device("/cpu:1"):
my_op = tf.constant([1.0])
sv = tf.train.Supervisor(logdir=logdir)
sess = sv.prepare_or_wait_for_session(
"", config=tf.ConfigProto(device_count={"CPU": 2}))
for _ in xrange(10):
sess.run(my_op)
sess.close()
sv.stop()
def testChiefCanWriteEvents(self):
logdir = _test_dir("can_write")
with tf.Graph().as_default():
tf.summary.scalar("c1", tf.constant(1))
tf.summary.scalar("c2", tf.constant(2))
tf.summary.scalar("c3", tf.constant(3))
summ = tf.summary.merge_all()
sv = tf.train.Supervisor(is_chief=True, logdir=logdir, summary_op=None)
meta_graph_def = meta_graph.create_meta_graph_def()
sess = sv.prepare_or_wait_for_session("")
sv.summary_computed(sess, sess.run(summ))
sess.close()
# Wait to make sure everything is written to file before stopping.
time.sleep(1)
sv.stop()
rr = _summary_iterator(logdir)
# The first event should list the file_version.
ev = next(rr)
self.assertEquals("brain.Event:2", ev.file_version)
# The next one has the graph.
ev = next(rr)
ev_graph = tf.GraphDef()
ev_graph.ParseFromString(ev.graph_def)
self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph)
# Stored MetaGraphDef
ev = next(rr)
ev_meta_graph = meta_graph_pb2.MetaGraphDef()
ev_meta_graph.ParseFromString(ev.meta_graph_def)
self.assertProtoEquals(meta_graph_def, ev_meta_graph)
self.assertProtoEquals(
sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def)
# The next one should have the values from the summary.
ev = next(rr)
self.assertProtoEquals("""
value { tag: 'c1' simple_value: 1.0 }
value { tag: 'c2' simple_value: 2.0 }
value { tag: 'c3' simple_value: 3.0 }
""", ev.summary)
# The next one should be a stop message if we closed cleanly.
ev = next(rr)
self.assertEquals(tf.SessionLog.STOP, ev.session_log.status)
# We should be done.
self.assertRaises(StopIteration, lambda: next(rr))
def testNonChiefCannotWriteEvents(self):
def _summary_computed():
with tf.Graph().as_default():
sv = tf.train.Supervisor(is_chief=False)
sess = sv.prepare_or_wait_for_session("")
tf.summary.scalar("c1", tf.constant(1))
tf.summary.scalar("c2", tf.constant(2))
summ = tf.summary.merge_all()
sv.summary_computed(sess, sess.run(summ))
def _start_standard_services():
with tf.Graph().as_default():
sv = tf.train.Supervisor(is_chief=False)
sess = sv.prepare_or_wait_for_session("")
sv.start_standard_services(sess)
self.assertRaises(RuntimeError, _summary_computed)
self.assertRaises(RuntimeError, _start_standard_services)
def testNoLogdirButWantSummary(self):
with tf.Graph().as_default():
tf.summary.scalar("c1", tf.constant(1))
tf.summary.scalar("c2", tf.constant(2))
tf.summary.scalar("c3", tf.constant(3))
summ = tf.summary.merge_all()
sv = tf.train.Supervisor(logdir="", summary_op=None)
sess = sv.prepare_or_wait_for_session("")
with self.assertRaisesRegexp(RuntimeError, "requires a summary writer"):
sv.summary_computed(sess, sess.run(summ))
def testLogdirButExplicitlyNoSummaryWriter(self):
logdir = _test_dir("explicit_no_summary_writer")
with tf.Graph().as_default():
tf.Variable([1.0], name="foo")
tf.summary.scalar("c1", tf.constant(1))
tf.summary.scalar("c2", tf.constant(2))
tf.summary.scalar("c3", tf.constant(3))
summ = tf.summary.merge_all()
sv = tf.train.Supervisor(logdir=logdir, summary_writer=None)
sess = sv.prepare_or_wait_for_session("")
# Check that a checkpoint is still be generated.
self._wait_for_glob(sv.save_path, 3.0)
# Check that we cannot write a summary
with self.assertRaisesRegexp(RuntimeError, "requires a summary writer"):
sv.summary_computed(sess, sess.run(summ))
def testNoLogdirButExplicitSummaryWriter(self):
logdir = _test_dir("explicit_summary_writer")
with tf.Graph().as_default():
tf.summary.scalar("c1", tf.constant(1))
tf.summary.scalar("c2", tf.constant(2))
tf.summary.scalar("c3", tf.constant(3))
summ = tf.summary.merge_all()
sw = tf.train.SummaryWriter(logdir)
sv = tf.train.Supervisor(logdir="", summary_op=None, summary_writer=sw)
meta_graph_def = meta_graph.create_meta_graph_def()
sess = sv.prepare_or_wait_for_session("")
sv.summary_computed(sess, sess.run(summ))
sess.close()
# Wait to make sure everything is written to file before stopping.
time.sleep(1)
sv.stop()
# Check the summary was written to 'logdir'
rr = _summary_iterator(logdir)
# The first event should list the file_version.
ev = next(rr)
self.assertEquals("brain.Event:2", ev.file_version)
# The next one has the graph.
ev = next(rr)
ev_graph = tf.GraphDef()
ev_graph.ParseFromString(ev.graph_def)
self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph)
# Stored MetaGraphDef
ev = next(rr)
ev_meta_graph = meta_graph_pb2.MetaGraphDef()
ev_meta_graph.ParseFromString(ev.meta_graph_def)
self.assertProtoEquals(meta_graph_def, ev_meta_graph)
self.assertProtoEquals(
sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def)
# The next one should have the values from the summary.
ev = next(rr)
self.assertProtoEquals("""
value { tag: 'c1' simple_value: 1.0 }
value { tag: 'c2' simple_value: 2.0 }
value { tag: 'c3' simple_value: 3.0 }
""", ev.summary)
# The next one should be a stop message if we closed cleanly.
ev = next(rr)
self.assertEquals(tf.SessionLog.STOP, ev.session_log.status)
# We should be done.
self.assertRaises(StopIteration, lambda: next(rr))
def testNoLogdirSucceeds(self):
with tf.Graph().as_default():
tf.Variable([1.0, 2.0, 3.0])
sv = tf.train.Supervisor(logdir="", summary_op=None)
sess = sv.prepare_or_wait_for_session("")
sess.close()
sv.stop()
def testUseSessionManager(self):
with tf.Graph().as_default():
tf.Variable([1.0, 2.0, 3.0])
sm = tf.train.SessionManager()
# Pass in session_manager. The additional init_op is ignored.
sv = tf.train.Supervisor(logdir="", session_manager=sm)
sv.prepare_or_wait_for_session("")
def testInitOp(self):
logdir = _test_dir("default_init_op")
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0])
sv = tf.train.Supervisor(logdir=logdir)
sess = sv.prepare_or_wait_for_session("")
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
sv.stop()
def testInitFn(self):
logdir = _test_dir("default_init_op")
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0])
def _init_fn(sess):
sess.run(v.initializer)
sv = tf.train.Supervisor(logdir=logdir, init_op=None, init_fn=_init_fn)
sess = sv.prepare_or_wait_for_session("")
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
sv.stop()
def testInitOpWithFeedDict(self):
logdir = _test_dir("feed_dict_init_op")
with tf.Graph().as_default():
p = tf.placeholder(tf.float32, shape=(3,))
v = tf.Variable(p, name="v")
sv = tf.train.Supervisor(logdir=logdir,
init_op=tf.initialize_all_variables(),
init_feed_dict={p: [1.0, 2.0, 3.0]})
sess = sv.prepare_or_wait_for_session("")
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
sv.stop()
def testReadyForLocalInitOp(self):
server = tf.train.Server.create_local_server()
logdir = _test_dir("default_ready_for_local_init_op")
uid = uuid.uuid4().hex
def get_session(is_chief):
g = tf.Graph()
with g.as_default():
with tf.device("/job:local"):
v = tf.Variable(
1, name="default_ready_for_local_init_op_v_" + str(uid))
vadd = v.assign_add(1)
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="default_ready_for_local_init_op_w_" + str(uid))
ready_for_local_init_op = tf.report_uninitialized_variables(
tf.all_variables())
sv = tf.train.Supervisor(
logdir=logdir,
is_chief=is_chief,
graph=g,
recovery_wait_secs=1,
init_op=v.initializer,
ready_for_local_init_op=ready_for_local_init_op)
sess = sv.prepare_or_wait_for_session(server.target)
return sv, sess, v, vadd, w
sv0, sess0, v0, _, w0 = get_session(True)
sv1, sess1, _, vadd1, w1 = get_session(False)
self.assertEqual(1, sess0.run(w0))
self.assertEqual(2, sess1.run(vadd1))
self.assertEqual(1, sess1.run(w1))
self.assertEqual(2, sess0.run(v0))
sv0.stop()
sv1.stop()
def testReadyForLocalInitOpRestoreFromCheckpoint(self):
server = tf.train.Server.create_local_server()
logdir = _test_dir("ready_for_local_init_op_restore")
uid = uuid.uuid4().hex
# Create a checkpoint.
with tf.Graph().as_default():
v = tf.Variable(
10.0, name="ready_for_local_init_op_restore_v_" + str(uid))
tf.summary.scalar("ready_for_local_init_op_restore_v_" + str(uid), v)
sv = tf.train.Supervisor(logdir=logdir)
sv.prepare_or_wait_for_session(server.target)
save_path = sv.save_path
self._wait_for_glob(save_path, 3.0)
self._wait_for_glob(
os.path.join(logdir, "*events*"), 3.0, for_checkpoint=False)
# Wait to make sure everything is written to file before stopping.
time.sleep(1)
sv.stop()
def get_session(is_chief):
g = tf.Graph()
with g.as_default():
with tf.device("/job:local"):
v = tf.Variable(
1.0, name="ready_for_local_init_op_restore_v_" + str(uid))
vadd = v.assign_add(1)
w = tf.Variable(
v,
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES],
name="ready_for_local_init_op_restore_w_" + str(uid))
ready_for_local_init_op = tf.report_uninitialized_variables(
tf.all_variables())
sv = tf.train.Supervisor(
logdir=logdir,
is_chief=is_chief,
graph=g,
recovery_wait_secs=1,
ready_for_local_init_op=ready_for_local_init_op)
sess = sv.prepare_or_wait_for_session(server.target)
return sv, sess, v, vadd, w
sv0, sess0, v0, _, w0 = get_session(True)
sv1, sess1, _, vadd1, w1 = get_session(False)
self.assertEqual(10, sess0.run(w0))
self.assertEqual(11, sess1.run(vadd1))
self.assertEqual(10, sess1.run(w1))
self.assertEqual(11, sess0.run(v0))
sv0.stop()
sv1.stop()
def testLocalInitOp(self):
logdir = _test_dir("default_local_init_op")
with tf.Graph().as_default():
# A local variable.
v = tf.Variable([1.0, 2.0, 3.0],
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES])
# An entity which is initialized through a TABLE_INITIALIZER.
w = tf.Variable([4, 5, 6], trainable=False, collections=[])
tf.add_to_collection(tf.GraphKeys.TABLE_INITIALIZERS, w.initializer)
# This shouldn't add a variable to the VARIABLES collection responsible
# for variables that are saved/restored from checkpoints.
self.assertEquals(len(tf.all_variables()), 0)
# Suppress normal variable inits to make sure the local one is
# initialized via local_init_op.
sv = tf.train.Supervisor(logdir=logdir, init_op=None)
sess = sv.prepare_or_wait_for_session("")
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
self.assertAllClose([4, 5, 6], sess.run(w))
sv.stop()
def testLocalInitOpForNonChief(self):
logdir = _test_dir("default_local_init_op_non_chief")
with tf.Graph().as_default():
with tf.device("/job:localhost"):
# A local variable.
v = tf.Variable([1.0, 2.0, 3.0],
trainable=False,
collections=[tf.GraphKeys.LOCAL_VARIABLES])
# This shouldn't add a variable to the VARIABLES collection responsible
# for variables that are saved/restored from checkpoints.
self.assertEquals(len(tf.all_variables()), 0)
# Suppress normal variable inits to make sure the local one is
# initialized via local_init_op.
sv = tf.train.Supervisor(logdir=logdir, init_op=None, is_chief=False)
sess = sv.prepare_or_wait_for_session("")
self.assertAllClose([1.0, 2.0, 3.0], sess.run(v))
sv.stop()
def testInitOpFails(self):
server = tf.train.Server.create_local_server()
logdir = _test_dir("default_init_op_fails")
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0], name="v")
tf.Variable([4.0, 5.0, 6.0], name="w")
# w will not be initialized.
sv = tf.train.Supervisor(logdir=logdir, init_op=v.initializer)
with self.assertRaisesRegexp(RuntimeError,
"Variables not initialized: w"):
sv.prepare_or_wait_for_session(server.target)
def testInitOpFailsForTransientVariable(self):
server = tf.train.Server.create_local_server()
logdir = _test_dir("default_init_op_fails_for_local_variable")
with tf.Graph().as_default():
v = tf.Variable([1.0, 2.0, 3.0], name="v",
collections=[tf.GraphKeys.LOCAL_VARIABLES])
tf.Variable([1.0, 2.0, 3.0], name="w",
collections=[tf.GraphKeys.LOCAL_VARIABLES])
# w will not be initialized.
sv = tf.train.Supervisor(logdir=logdir, local_init_op=v.initializer)
with self.assertRaisesRegexp(
RuntimeError, "Variables not initialized: w"):
sv.prepare_or_wait_for_session(server.target)
def testSetupFail(self):
logdir = _test_dir("setup_fail")
with tf.Graph().as_default():
tf.Variable([1.0, 2.0, 3.0], name="v")
with self.assertRaisesRegexp(ValueError, "must have their device set"):
tf.train.Supervisor(logdir=logdir, is_chief=False)
with tf.Graph().as_default(), tf.device("/job:ps"):
tf.Variable([1.0, 2.0, 3.0], name="v")
tf.train.Supervisor(logdir=logdir, is_chief=False)
def testDefaultGlobalStep(self):
logdir = _test_dir("default_global_step")
with tf.Graph().as_default():
tf.Variable(287, name="global_step")
sv = tf.train.Supervisor(logdir=logdir)
sess = sv.prepare_or_wait_for_session("")
self.assertEquals(287, sess.run(sv.global_step))
sv.stop()
def testRestoreFromMetaGraph(self):
logdir = _test_dir("restore_from_meta_graph")
with tf.Graph().as_default():
tf.Variable(1, name="v0")
sv = tf.train.Supervisor(logdir=logdir)
sess = sv.prepare_or_wait_for_session("")
filename = sv.saver.save(sess, sv.save_path)
sv.stop()
# Create a new Graph and Supervisor and recover.
with tf.Graph().as_default():
new_saver = tf.train.import_meta_graph(".".join([filename, "meta"]))
self.assertIsNotNone(new_saver)
sv2 = tf.train.Supervisor(logdir=logdir, saver=new_saver)
sess = sv2.prepare_or_wait_for_session("")
self.assertEquals(1, sess.run("v0:0"))
sv2.saver.save(sess, sv2.save_path)
sv2.stop()
# This test is based on the fact that the standard services start
# right away and get to run once before sv.stop() returns.
# We still sleep a bit to make the test robust.
def testStandardServicesWithoutGlobalStep(self):
logdir = _test_dir("standard_services_without_global_step")
# Create a checkpoint.
with tf.Graph().as_default():
v = tf.Variable([1.0], name="foo")
tf.summary.scalar("v", v[0])
sv = tf.train.Supervisor(logdir=logdir)
meta_graph_def = meta_graph.create_meta_graph_def(
saver_def=sv.saver.saver_def)
sess = sv.prepare_or_wait_for_session("")
save_path = sv.save_path
self._wait_for_glob(save_path, 3.0)
self._wait_for_glob(
os.path.join(logdir, "*events*"), 3.0, for_checkpoint=False)
# Wait to make sure everything is written to file before stopping.
time.sleep(1)
sv.stop()
# There should be an event file with a version number.
rr = _summary_iterator(logdir)
ev = next(rr)
self.assertEquals("brain.Event:2", ev.file_version)
ev = next(rr)
ev_graph = tf.GraphDef()
ev_graph.ParseFromString(ev.graph_def)
self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph)
# Stored MetaGraphDef
ev = next(rr)
ev_meta_graph = meta_graph_pb2.MetaGraphDef()
ev_meta_graph.ParseFromString(ev.meta_graph_def)
self.assertProtoEquals(meta_graph_def, ev_meta_graph)
self.assertProtoEquals(
sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def)
ev = next(rr)
self.assertProtoEquals("value { tag: 'v' simple_value: 1.0 }", ev.summary)
ev = next(rr)
self.assertEquals(tf.SessionLog.STOP, ev.session_log.status)
self.assertRaises(StopIteration, lambda: next(rr))
# There should be a checkpoint file with the variable "foo"
with tf.Graph().as_default(), self.test_session() as sess:
v = tf.Variable([10.10], name="foo")
sav = tf.train.Saver([v])
sav.restore(sess, save_path)
self.assertEqual(1.0, v.eval()[0])
# Same as testStandardServicesNoGlobalStep but with a global step.
# We should get a summary about the step time.
def testStandardServicesWithGlobalStep(self):
logdir = _test_dir("standard_services_with_global_step")
# Create a checkpoint.
with tf.Graph().as_default():
v = tf.Variable([123], name="global_step")
sv = tf.train.Supervisor(logdir=logdir)
meta_graph_def = meta_graph.create_meta_graph_def(
saver_def=sv.saver.saver_def)
sess = sv.prepare_or_wait_for_session("")
# This is where the checkpoint will appear, with step number 123.
save_path = "%s-123" % sv.save_path
self._wait_for_glob(save_path, 3.0)
self._wait_for_glob(
os.path.join(logdir, "*events*"), 3.0, for_checkpoint=False)
# Wait to make sure everything is written to file before stopping.
time.sleep(1)
sv.stop()
# There should be an event file with a version number.
rr = _summary_iterator(logdir)
ev = next(rr)
self.assertEquals("brain.Event:2", ev.file_version)
ev = next(rr)
ev_graph = tf.GraphDef()
ev_graph.ParseFromString(ev.graph_def)
self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph)
ev = next(rr)
ev_meta_graph = meta_graph_pb2.MetaGraphDef()
ev_meta_graph.ParseFromString(ev.meta_graph_def)
self.assertProtoEquals(meta_graph_def, ev_meta_graph)
self.assertProtoEquals(
sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def)
ev = next(rr)
# It is actually undeterministic whether SessionLog.START gets written
# before the summary or the checkpoint, but this works when run 10000 times.
self.assertEquals(123, ev.step)
self.assertEquals(tf.SessionLog.START, ev.session_log.status)
first = next(rr)
second = next(rr)
# It is undeterministic whether the value gets written before the checkpoint
# since they are on separate threads, so we check for both conditions.
if first.HasField("summary"):
self.assertProtoEquals("""value { tag: 'global_step/sec'
simple_value: 0.0 }""",
first.summary)
self.assertEquals(123, second.step)
self.assertEquals(tf.SessionLog.CHECKPOINT, second.session_log.status)
else:
self.assertEquals(123, first.step)
self.assertEquals(tf.SessionLog.CHECKPOINT, first.session_log.status)
self.assertProtoEquals("""value { tag: 'global_step/sec'
simple_value: 0.0 }""",
second.summary)
ev = next(rr)
self.assertEquals(tf.SessionLog.STOP, ev.session_log.status)
self.assertRaises(StopIteration, lambda: next(rr))
# There should be a checkpoint file with the variable "foo"
with tf.Graph().as_default(), self.test_session() as sess:
v = tf.Variable([-12], name="global_step")
sav = tf.train.Saver([v])
sav.restore(sess, save_path)
self.assertEqual(123, v.eval()[0])
def testNoQueueRunners(self):
with tf.Graph().as_default(), self.test_session() as sess:
sv = tf.train.Supervisor(logdir=_test_dir("no_queue_runners"))
self.assertEqual(0, len(sv.start_queue_runners(sess)))
sv.stop()
def testPrepareSessionAfterStopForChief(self):
logdir = _test_dir("prepare_after_stop_chief")
with tf.Graph().as_default():
sv = tf.train.Supervisor(logdir=logdir, is_chief=True)
# Create a first session and then stop.
sess = sv.prepare_or_wait_for_session("")
sv.stop()
sess.close()
self.assertTrue(sv.should_stop())
# Now create a second session and test that we don't stay stopped, until
# we ask to stop again.
sess2 = sv.prepare_or_wait_for_session("")
self.assertFalse(sv.should_stop())
sv.stop()
sess2.close()
self.assertTrue(sv.should_stop())
def testPrepareSessionAfterStopForNonChief(self):
logdir = _test_dir("prepare_after_stop_nonchief")
with tf.Graph().as_default():
sv = tf.train.Supervisor(logdir=logdir, is_chief=False)
# Create a first session and then stop.
sess = sv.prepare_or_wait_for_session("")
sv.stop()
sess.close()
self.assertTrue(sv.should_stop())
# Now create a second session and test that we don't stay stopped, until
# we ask to stop again.
sess2 = sv.prepare_or_wait_for_session("")
self.assertFalse(sv.should_stop())
sv.stop()
sess2.close()
self.assertTrue(sv.should_stop())
if __name__ == "__main__":
tf.test.main()
|
import six
import webob
from nova.api.openstack.compute.schemas.v3 import flavors_extraspecs
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova import exception
from nova.i18n import _
from nova import objects
from nova import utils
ALIAS = 'os-flavor-extra-specs'
authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS)
class FlavorExtraSpecsController(wsgi.Controller):
"""The flavor extra specs API controller for the OpenStack API."""
def __init__(self, *args, **kwargs):
super(FlavorExtraSpecsController, self).__init__(*args, **kwargs)
def _get_extra_specs(self, context, flavor_id):
flavor = objects.Flavor.get_by_flavor_id(context, flavor_id)
return dict(extra_specs=flavor.extra_specs)
# NOTE(gmann): Max length for numeric value is being checked
# explicitly as json schema cannot have max length check for numeric value
def _check_extra_specs_value(self, specs):
for key, value in specs.iteritems():
try:
if isinstance(value, (six.integer_types, float)):
value = six.text_type(value)
utils.check_string_length(value, 'extra_specs value',
max_length=255)
except exception.InvalidInput as error:
raise webob.exc.HTTPBadRequest(
explanation=error.format_message())
@extensions.expected_errors(())
def index(self, req, flavor_id):
"""Returns the list of extra specs for a given flavor."""
context = req.environ['nova.context']
authorize(context, action='index')
return self._get_extra_specs(context, flavor_id)
# NOTE(gmann): Here should be 201 instead of 200 by v2.1
# +microversions because the flavor extra specs has been created
# completely when returning a response.
@extensions.expected_errors((400, 404, 409))
@validation.schema(flavors_extraspecs.create)
def create(self, req, flavor_id, body):
context = req.environ['nova.context']
authorize(context, action='create')
specs = body['extra_specs']
self._check_extra_specs_value(specs)
try:
flavor = objects.Flavor.get_by_flavor_id(context, flavor_id)
flavor.extra_specs = dict(flavor.extra_specs, **specs)
flavor.save()
except exception.FlavorExtraSpecUpdateCreateFailed as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
except exception.FlavorNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return body
@extensions.expected_errors((400, 404, 409))
@validation.schema(flavors_extraspecs.update)
def update(self, req, flavor_id, id, body):
context = req.environ['nova.context']
authorize(context, action='update')
self._check_extra_specs_value(body)
if id not in body:
expl = _('Request body and URI mismatch')
raise webob.exc.HTTPBadRequest(explanation=expl)
try:
flavor = objects.Flavor.get_by_flavor_id(context, flavor_id)
flavor.extra_specs = dict(flavor.extra_specs, **body)
flavor.save()
except exception.FlavorExtraSpecUpdateCreateFailed as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
except exception.FlavorNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return body
@extensions.expected_errors(404)
def show(self, req, flavor_id, id):
"""Return a single extra spec item."""
context = req.environ['nova.context']
authorize(context, action='show')
try:
flavor = objects.Flavor.get_by_flavor_id(context, flavor_id)
return {id: flavor.extra_specs[id]}
except exception.FlavorNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except KeyError:
msg = _("Flavor %(flavor_id)s has no extra specs with "
"key %(key)s.") % dict(flavor_id=flavor_id,
key=id)
raise webob.exc.HTTPNotFound(explanation=msg)
# NOTE(gmann): Here should be 204(No Content) instead of 200 by v2.1
# +microversions because the flavor extra specs has been deleted
# completely when returning a response.
@extensions.expected_errors(404)
def delete(self, req, flavor_id, id):
"""Deletes an existing extra spec."""
context = req.environ['nova.context']
authorize(context, action='delete')
try:
flavor = objects.Flavor.get_by_flavor_id(context, flavor_id)
del flavor.extra_specs[id]
flavor.save()
except (exception.FlavorExtraSpecsNotFound,
exception.FlavorNotFound) as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except KeyError:
msg = _("Flavor %(flavor_id)s has no extra specs with "
"key %(key)s.") % dict(flavor_id=flavor_id,
key=id)
raise webob.exc.HTTPNotFound(explanation=msg)
class FlavorsExtraSpecs(extensions.V3APIExtensionBase):
"""Flavors extra specs support."""
name = 'FlavorExtraSpecs'
alias = ALIAS
version = 1
def get_resources(self):
extra_specs = extensions.ResourceExtension(
'os-extra_specs',
FlavorExtraSpecsController(),
parent=dict(member_name='flavor', collection_name='flavors'))
return [extra_specs]
def get_controller_extensions(self):
return []
|
"""Bayesian NN using factorized VI (Bayes By Backprop. Blundell et al. 2014).
See https://arxiv.org/abs/1505.05424 for details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from absl import flags
from bandits.core.bayesian_nn import BayesianNN
FLAGS = flags.FLAGS
def log_gaussian(x, mu, sigma, reduce_sum=True):
"""Returns log Gaussian pdf."""
res = (-0.5 * np.log(2 * np.pi) - tf.log(sigma) - tf.square(x - mu) /
(2 * tf.square(sigma)))
if reduce_sum:
return tf.reduce_sum(res)
else:
return res
def analytic_kl(mu_1, sigma_1, mu_2, sigma_2):
"""KL for two Gaussian distributions with diagonal covariance matrix."""
sigma_1_sq = tf.square(sigma_1)
sigma_2_sq = tf.square(sigma_2)
t1 = tf.square(mu_1 - mu_2) / (2. * sigma_2_sq)
t2 = (sigma_1_sq/sigma_2_sq - 1. - tf.log(sigma_1_sq) + tf.log(sigma_2_sq))/2.
return tf.reduce_sum(t1 + t2)
class VariationalNeuralBanditModel(BayesianNN):
"""Implements an approximate Bayesian NN using Variational Inference."""
def __init__(self, hparams, name="BBBNN"):
self.name = name
self.hparams = hparams
self.n_in = self.hparams.context_dim
self.n_out = self.hparams.num_actions
self.layers = self.hparams.layer_sizes
self.init_scale = self.hparams.init_scale
self.f_num_points = None
if "f_num_points" in hparams:
self.f_num_points = self.hparams.f_num_points
self.cleared_times_trained = self.hparams.cleared_times_trained
self.initial_training_steps = self.hparams.initial_training_steps
self.training_schedule = np.linspace(self.initial_training_steps,
self.hparams.training_epochs,
self.cleared_times_trained)
self.verbose = getattr(self.hparams, "verbose", True)
self.weights_m = {}
self.weights_std = {}
self.biases_m = {}
self.biases_std = {}
self.times_trained = 0
if self.hparams.use_sigma_exp_transform:
self.sigma_transform = tf.exp
self.inverse_sigma_transform = np.log
else:
self.sigma_transform = tf.nn.softplus
self.inverse_sigma_transform = lambda y: y + np.log(1. - np.exp(-y))
# Whether to use the local reparameterization trick to compute the loss.
# See details in https://arxiv.org/abs/1506.02557
self.use_local_reparameterization = True
self.build_graph()
def build_mu_variable(self, shape):
"""Returns a mean variable initialized as N(0, 0.05)."""
return tf.Variable(tf.random_normal(shape, 0.0, 0.05))
def build_sigma_variable(self, shape, init=-5.):
"""Returns a sigma variable initialized as N(init, 0.05)."""
# Initialize sigma to be very small initially to encourage MAP opt first
return tf.Variable(tf.random_normal(shape, init, 0.05))
def build_layer(self, input_x, input_x_local, shape,
layer_id, activation_fn=tf.nn.relu):
"""Builds a variational layer, and computes KL term.
Args:
input_x: Input to the variational layer.
input_x_local: Input when the local reparameterization trick was applied.
shape: [number_inputs, number_outputs] for the layer.
layer_id: Number of layer in the architecture.
activation_fn: Activation function to apply.
Returns:
output_h: Output of the variational layer.
output_h_local: Output when local reparameterization trick was applied.
neg_kl: Negative KL term for the layer.
"""
w_mu = self.build_mu_variable(shape)
w_sigma = self.sigma_transform(self.build_sigma_variable(shape))
w_noise = tf.random_normal(shape)
w = w_mu + w_sigma * w_noise
b_mu = self.build_mu_variable([1, shape[1]])
b_sigma = self.sigma_transform(self.build_sigma_variable([1, shape[1]]))
b = b_mu
# Store means and stds
self.weights_m[layer_id] = w_mu
self.weights_std[layer_id] = w_sigma
self.biases_m[layer_id] = b_mu
self.biases_std[layer_id] = b_sigma
# Create outputs
output_h = activation_fn(tf.matmul(input_x, w) + b)
if self.use_local_reparameterization:
# Use analytic KL divergence wrt the prior
neg_kl = -analytic_kl(w_mu, w_sigma,
0., tf.to_float(np.sqrt(2./shape[0])))
else:
# Create empirical KL loss terms
log_p = log_gaussian(w, 0., tf.to_float(np.sqrt(2./shape[0])))
log_q = log_gaussian(w, tf.stop_gradient(w_mu), tf.stop_gradient(w_sigma))
neg_kl = log_p - log_q
# Apply local reparameterization trick: sample activations pre nonlinearity
m_h = tf.matmul(input_x_local, w_mu) + b
v_h = tf.matmul(tf.square(input_x_local), tf.square(w_sigma))
output_h_local = m_h + tf.sqrt(v_h + 1e-6) * tf.random_normal(tf.shape(v_h))
output_h_local = activation_fn(output_h_local)
return output_h, output_h_local, neg_kl
def build_action_noise(self):
"""Defines a model for additive noise per action, and its KL term."""
# Define mean and std variables (log-normal dist) for each action.
noise_sigma_mu = (self.build_mu_variable([1, self.n_out])
+ self.inverse_sigma_transform(self.hparams.noise_sigma))
noise_sigma_sigma = self.sigma_transform(
self.build_sigma_variable([1, self.n_out]))
pre_noise_sigma = (noise_sigma_mu
+ tf.random_normal([1, self.n_out]) * noise_sigma_sigma)
self.noise_sigma = self.sigma_transform(pre_noise_sigma)
# Compute KL for additive noise sigma terms.
if getattr(self.hparams, "infer_noise_sigma", False):
neg_kl_term = log_gaussian(
pre_noise_sigma,
self.inverse_sigma_transform(self.hparams.noise_sigma),
self.hparams.prior_sigma
)
neg_kl_term -= log_gaussian(pre_noise_sigma,
noise_sigma_mu,
noise_sigma_sigma)
else:
neg_kl_term = 0.
return neg_kl_term
def build_model(self, activation_fn=tf.nn.relu):
"""Defines the actual NN model with fully connected layers.
The loss is computed for partial feedback settings (bandits), so only
the observed outcome is backpropagated (see weighted loss).
Selects the optimizer and, finally, it also initializes the graph.
Args:
activation_fn: the activation function used in the nn layers.
"""
if self.verbose:
print("Initializing model {}.".format(self.name))
neg_kl_term, l_number = 0, 0
use_local_reparameterization = self.use_local_reparameterization
# Compute model additive noise for each action with log-normal distribution
neg_kl_term += self.build_action_noise()
# Build network.
input_x = self.x
input_local = self.x
n_in = self.n_in
for l_number, n_nodes in enumerate(self.layers):
if n_nodes > 0:
h, h_local, neg_kl = self.build_layer(input_x, input_local,
[n_in, n_nodes], l_number)
neg_kl_term += neg_kl
input_x, input_local = h, h_local
n_in = n_nodes
# Create last linear layer
h, h_local, neg_kl = self.build_layer(input_x, input_local,
[n_in, self.n_out],
l_number + 1,
activation_fn=lambda x: x)
neg_kl_term += neg_kl
self.y_pred = h
self.y_pred_local = h_local
# Compute log likelihood (with learned or fixed noise level)
if getattr(self.hparams, "infer_noise_sigma", False):
log_likelihood = log_gaussian(
self.y, self.y_pred_local, self.noise_sigma, reduce_sum=False)
else:
y_hat = self.y_pred_local if use_local_reparameterization else self.y_pred
log_likelihood = log_gaussian(
self.y, y_hat, self.hparams.noise_sigma, reduce_sum=False)
# Only take into account observed outcomes (bandits setting)
batch_size = tf.to_float(tf.shape(self.x)[0])
weighted_log_likelihood = tf.reduce_sum(
log_likelihood * self.weights) / batch_size
# The objective is 1/n * (\sum_i log_like_i - KL); neg_kl_term estimates -KL
elbo = weighted_log_likelihood + (neg_kl_term / self.n)
self.loss = -elbo
self.global_step = tf.train.get_or_create_global_step()
self.train_op = tf.train.AdamOptimizer(self.hparams.initial_lr).minimize(
self.loss, global_step=self.global_step)
# Create tensorboard metrics
self.create_summaries()
self.summary_writer = tf.summary.FileWriter(
"{}/graph_{}".format(FLAGS.logdir, self.name), self.sess.graph)
def build_graph(self):
"""Defines graph, session, placeholders, and model.
Placeholders are: n (size of the dataset), x and y (context and observed
reward for each action), and weights (one-hot encoding of selected action
for each context, i.e., only possibly non-zero element in each y).
"""
self.graph = tf.Graph()
with self.graph.as_default():
self.sess = tf.Session()
self.n = tf.placeholder(shape=[], dtype=tf.float32)
self.x = tf.placeholder(shape=[None, self.n_in], dtype=tf.float32)
self.y = tf.placeholder(shape=[None, self.n_out], dtype=tf.float32)
self.weights = tf.placeholder(shape=[None, self.n_out], dtype=tf.float32)
self.build_model()
self.sess.run(tf.global_variables_initializer())
def create_summaries(self):
"""Defines summaries including mean loss, and global step."""
with self.graph.as_default():
with tf.name_scope(self.name + "_summaries"):
tf.summary.scalar("loss", self.loss)
tf.summary.scalar("global_step", self.global_step)
self.summary_op = tf.summary.merge_all()
def assign_lr(self):
"""Resets the learning rate in dynamic schedules for subsequent trainings.
In bandits settings, we do expand our dataset over time. Then, we need to
re-train the network with the new data. The algorithms that do not keep
the step constant, can reset it at the start of each *training* process.
"""
decay_steps = 1
if self.hparams.activate_decay:
current_gs = self.sess.run(self.global_step)
with self.graph.as_default():
self.lr = tf.train.inverse_time_decay(self.hparams.initial_lr,
self.global_step - current_gs,
decay_steps,
self.hparams.lr_decay_rate)
def train(self, data, num_steps):
"""Trains the BNN for num_steps, using the data in 'data'.
Args:
data: ContextualDataset object that provides the data.
num_steps: Number of minibatches to train the network for.
Returns:
losses: Loss history during training.
"""
if self.times_trained < self.cleared_times_trained:
num_steps = int(self.training_schedule[self.times_trained])
self.times_trained += 1
losses = []
with self.graph.as_default():
if self.verbose:
print("Training {} for {} steps...".format(self.name, num_steps))
for step in range(num_steps):
x, y, weights = data.get_batch_with_weights(self.hparams.batch_size)
_, summary, global_step, loss = self.sess.run(
[self.train_op, self.summary_op, self.global_step, self.loss],
feed_dict={
self.x: x,
self.y: y,
self.weights: weights,
self.n: data.num_points(self.f_num_points),
})
losses.append(loss)
if step % self.hparams.freq_summary == 0:
if self.hparams.show_training:
print("{} | step: {}, loss: {}".format(
self.name, global_step, loss))
self.summary_writer.add_summary(summary, global_step)
return losses
|
r"""Reverses xxd dump from to binary file
This script is used to convert models from C++ source file (dumped with xxd) to
the binary model weight file and analyze it with model visualizer like Netron
(https://github.com/lutzroeder/netron) or load the model in TensorFlow Python
API
to evaluate the results in Python.
The command to dump binary file to C++ source file looks like
xxd -i model_data.tflite > model_data.cc
Example usage:
python reverse_xxd_dump_from_cc.py \
--input_cc_file=model_data.cc \
--output_tflite_file=model_data.tflite
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from tensorflow.lite.tools import flatbuffer_utils
from tensorflow.python.platform import app
def main(_):
"""Application run loop."""
parser = argparse.ArgumentParser(
description='Reverses xxd dump from to binary file')
parser.add_argument(
'--input_cc_file',
type=str,
required=True,
help='Full path name to the input cc file.')
parser.add_argument(
'--output_tflite_file',
type=str,
required=True,
help='Full path name to the stripped output tflite file.')
args = parser.parse_args()
# Read the model from xxd output C++ source file
model = flatbuffer_utils.xxd_output_to_object(args.input_cc_file)
# Write the model
flatbuffer_utils.write_model(model, args.output_tflite_file)
if __name__ == '__main__':
app.run(main=main, argv=sys.argv[:1])
|
from tests import BaseTestCase
import mock
import time
from redash.models import User
from redash.authentication.account import invite_token
from tests.handlers import get_request, post_request
class TestInvite(BaseTestCase):
def test_expired_invite_token(self):
with mock.patch('time.time') as patched_time:
patched_time.return_value = time.time() - (7 * 24 * 3600) - 10
token = invite_token(self.factory.user)
response = get_request('/invite/{}'.format(token), org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_invalid_invite_token(self):
response = get_request('/invite/badtoken', org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_valid_token(self):
token = invite_token(self.factory.user)
response = get_request('/invite/{}'.format(token), org=self.factory.org)
self.assertEqual(response.status_code, 200)
def test_already_active_user(self):
pass
class TestInvitePost(BaseTestCase):
def test_empty_password(self):
token = invite_token(self.factory.user)
response = post_request('/invite/{}'.format(token), data={'password': ''}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_invalid_password(self):
token = invite_token(self.factory.user)
response = post_request('/invite/{}'.format(token), data={'password': '1234'}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_bad_token(self):
response = post_request('/invite/{}'.format('jdsnfkjdsnfkj'), data={'password': '1234'}, org=self.factory.org)
self.assertEqual(response.status_code, 400)
def test_already_active_user(self):
pass
def test_valid_password(self):
token = invite_token(self.factory.user)
password = 'test1234'
response = post_request('/invite/{}'.format(token), data={'password': password}, org=self.factory.org)
self.assertEqual(response.status_code, 302)
self.factory.user = User.get_by_id(self.factory.user.id)
self.assertTrue(self.factory.user.verify_password(password))
|
"""
==============
Blob Detection
==============
Blobs are bright on dark or dark on bright regions in an image. In
this example, blobs are detected using 3 algorithms. The image used
in this case is the Hubble eXtreme Deep Field. Each bright dot in the
image is a star or a galaxy.
Laplacian of Gaussian (LoG)
-----------------------------
This is the most accurate and slowest approach. It computes the Laplacian
of Gaussian images with successively increasing standard deviation and
stacks them up in a cube. Blobs are local maximas in this cube. Detecting
larger blobs is especially slower because of larger kernel sizes during
convolution. Only bright blobs on dark backgrounds are detected. See
:py:meth:`skimage.feature.blob_log` for usage.
Difference of Gaussian (DoG)
----------------------------
This is a faster approximation of LoG approach. In this case the image is
blurred with increasing standard deviations and the difference between
two successively blurred images are stacked up in a cube. This method
suffers from the same disadvantage as LoG approach for detecting larger
blobs. Blobs are again assumed to be bright on dark. See
:py:meth:`skimage.feature.blob_dog` for usage.
Determinant of Hessian (DoH)
----------------------------
This is the fastest approach. It detects blobs by finding maximas in the
matrix of the Determinant of Hessian of the image. The detection speed is
independent of the size of blobs as internally the implementation uses
box filters instead of convolutions. Bright on dark as well as dark on
bright blobs are detected. The downside is that small blobs (<3px) are not
detected accurately. See :py:meth:`skimage.feature.blob_doh` for usage.
"""
from math import sqrt
from skimage import data
from skimage.feature import blob_dog, blob_log, blob_doh
from skimage.color import rgb2gray
import matplotlib.pyplot as plt
image = data.hubble_deep_field()[0:500, 0:500]
image_gray = rgb2gray(image)
blobs_log = blob_log(image_gray, max_sigma=30, num_sigma=10, threshold=.1)
blobs_log[:, 2] = blobs_log[:, 2] * sqrt(2)
blobs_dog = blob_dog(image_gray, max_sigma=30, threshold=.1)
blobs_dog[:, 2] = blobs_dog[:, 2] * sqrt(2)
blobs_doh = blob_doh(image_gray, max_sigma=30, threshold=.01)
blobs_list = [blobs_log, blobs_dog, blobs_doh]
colors = ['yellow', 'lime', 'red']
titles = ['Laplacian of Gaussian', 'Difference of Gaussian',
'Determinant of Hessian']
sequence = zip(blobs_list, colors, titles)
fig, axes = plt.subplots(1, 3, figsize=(9, 3), sharex=True, sharey=True,
subplot_kw={'adjustable': 'box-forced'})
ax = axes.ravel()
for idx, (blobs, color, title) in enumerate(sequence):
ax[idx].set_title(title)
ax[idx].imshow(image, interpolation='nearest')
for blob in blobs:
y, x, r = blob
c = plt.Circle((x, y), r, color=color, linewidth=2, fill=False)
ax[idx].add_patch(c)
ax[idx].set_axis_off()
plt.tight_layout()
plt.show()
|
from optparse import make_option
from webkitpy.common.host import Host
from webkitpy.tool.multicommandtool import MultiCommandTool
from webkitpy.tool import commands
class WebKitPatch(MultiCommandTool, Host):
global_options = [
make_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="enable all logging"),
make_option("-d", "--directory", action="append", dest="patch_directories",
default=[], help="Directory to look at for changed files"),
]
def __init__(self, path):
MultiCommandTool.__init__(self)
Host.__init__(self)
self._path = path
def path(self):
return self._path
def should_show_in_main_help(self, command):
if not command.show_in_main_help:
return False
if command.requires_local_commits:
return self.scm().supports_local_commits()
return True
# FIXME: This may be unnecessary since we pass global options to all commands during execute() as well.
def handle_global_options(self, options):
self.initialize_scm(options.patch_directories)
def should_execute_command(self, command):
if command.requires_local_commits and not self.scm().supports_local_commits():
failure_reason = "%s requires local commits using %s in %s." % (
command.name, self.scm().display_name(), self.scm().checkout_root)
return (False, failure_reason)
return (True, None)
|
"""
Filters bad words on outgoing messages from the bot, so the bot can't be made
to say bad words.
"""
import supybot
import supybot.world as world
__version__ = ""
__author__ = supybot.authors.jemfinch
__contributors__ = {}
from . import config
from . import plugin
from imp import reload
reload(plugin) # In case we're being reloaded.
if world.testing:
from . import test
Class = plugin.Class
configure = config.configure
|
project_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifier(), 'Project slug should be valid Python identifier!'
|
"""
This plugin handles various plugin-related things, such as getting help for
a plugin, getting a list of the loaded plugins, and searching and downloading
plugins from supybot.com.
"""
import supybot
import supybot.world as world
__version__ = "%%VERSION%%"
__author__ = supybot.authors.jemfinch
__contributors__ = {
supybot.authors.skorobeus: ['contributors'],
}
from . import config
from . import plugin
from imp import reload
reload(plugin) # In case we're being reloaded.
if world.testing:
from . import test
Class = plugin.Class
configure = config.configure
|
from datetime import datetime
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
Index,
MultiIndex,
Series,
_testing as tm,
)
def test_split(any_string_dtype):
values = Series(["a_b_c", "c_d_e", np.nan, "f_g_h"], dtype=any_string_dtype)
result = values.str.split("_")
exp = Series([["a", "b", "c"], ["c", "d", "e"], np.nan, ["f", "g", "h"]])
tm.assert_series_equal(result, exp)
# more than one char
values = Series(["a__b__c", "c__d__e", np.nan, "f__g__h"], dtype=any_string_dtype)
result = values.str.split("__")
tm.assert_series_equal(result, exp)
result = values.str.split("__", expand=False)
tm.assert_series_equal(result, exp)
# regex split
values = Series(["a,b_c", "c_d,e", np.nan, "f,g,h"], dtype=any_string_dtype)
result = values.str.split("[,_]")
exp = Series([["a", "b", "c"], ["c", "d", "e"], np.nan, ["f", "g", "h"]])
tm.assert_series_equal(result, exp)
def test_split_object_mixed():
mixed = Series(["a_b_c", np.nan, "d_e_f", True, datetime.today(), None, 1, 2.0])
result = mixed.str.split("_")
exp = Series(
[
["a", "b", "c"],
np.nan,
["d", "e", "f"],
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
]
)
assert isinstance(result, Series)
tm.assert_almost_equal(result, exp)
result = mixed.str.split("_", expand=False)
assert isinstance(result, Series)
tm.assert_almost_equal(result, exp)
@pytest.mark.parametrize("method", ["split", "rsplit"])
def test_split_n(any_string_dtype, method):
s = Series(["a b", pd.NA, "b c"], dtype=any_string_dtype)
expected = Series([["a", "b"], pd.NA, ["b", "c"]])
result = getattr(s.str, method)(" ", n=None)
tm.assert_series_equal(result, expected)
result = getattr(s.str, method)(" ", n=0)
tm.assert_series_equal(result, expected)
def test_rsplit(any_string_dtype):
values = Series(["a_b_c", "c_d_e", np.nan, "f_g_h"], dtype=any_string_dtype)
result = values.str.rsplit("_")
exp = Series([["a", "b", "c"], ["c", "d", "e"], np.nan, ["f", "g", "h"]])
tm.assert_series_equal(result, exp)
# more than one char
values = Series(["a__b__c", "c__d__e", np.nan, "f__g__h"], dtype=any_string_dtype)
result = values.str.rsplit("__")
tm.assert_series_equal(result, exp)
result = values.str.rsplit("__", expand=False)
tm.assert_series_equal(result, exp)
# regex split is not supported by rsplit
values = Series(["a,b_c", "c_d,e", np.nan, "f,g,h"], dtype=any_string_dtype)
result = values.str.rsplit("[,_]")
exp = Series([["a,b_c"], ["c_d,e"], np.nan, ["f,g,h"]])
tm.assert_series_equal(result, exp)
# setting max number of splits, make sure it's from reverse
values = Series(["a_b_c", "c_d_e", np.nan, "f_g_h"], dtype=any_string_dtype)
result = values.str.rsplit("_", n=1)
exp = Series([["a_b", "c"], ["c_d", "e"], np.nan, ["f_g", "h"]])
tm.assert_series_equal(result, exp)
def test_rsplit_object_mixed():
# mixed
mixed = Series(["a_b_c", np.nan, "d_e_f", True, datetime.today(), None, 1, 2.0])
result = mixed.str.rsplit("_")
exp = Series(
[
["a", "b", "c"],
np.nan,
["d", "e", "f"],
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
]
)
assert isinstance(result, Series)
tm.assert_almost_equal(result, exp)
result = mixed.str.rsplit("_", expand=False)
assert isinstance(result, Series)
tm.assert_almost_equal(result, exp)
def test_split_blank_string(any_string_dtype):
# expand blank split GH 20067
values = Series([""], name="test", dtype=any_string_dtype)
result = values.str.split(expand=True)
exp = DataFrame([[]], dtype=any_string_dtype) # NOTE: this is NOT an empty df
tm.assert_frame_equal(result, exp)
values = Series(["a b c", "a b", "", " "], name="test", dtype=any_string_dtype)
result = values.str.split(expand=True)
exp = DataFrame(
[
["a", "b", "c"],
["a", "b", np.nan],
[np.nan, np.nan, np.nan],
[np.nan, np.nan, np.nan],
],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, exp)
def test_split_noargs(any_string_dtype):
# #1859
s = Series(["Wes McKinney", "Travis Oliphant"], dtype=any_string_dtype)
result = s.str.split()
expected = ["Travis", "Oliphant"]
assert result[1] == expected
result = s.str.rsplit()
assert result[1] == expected
@pytest.mark.parametrize(
"data, pat",
[
(["bd asdf jfg", "kjasdflqw asdfnfk"], None),
(["bd asdf jfg", "kjasdflqw asdfnfk"], "asdf"),
(["bd_asdf_jfg", "kjasdflqw_asdfnfk"], "_"),
],
)
def test_split_maxsplit(data, pat, any_string_dtype):
# re.split 0, str.split -1
s = Series(data, dtype=any_string_dtype)
result = s.str.split(pat=pat, n=-1)
xp = s.str.split(pat=pat)
tm.assert_series_equal(result, xp)
result = s.str.split(pat=pat, n=0)
tm.assert_series_equal(result, xp)
@pytest.mark.parametrize(
"data, pat, expected",
[
(
["split once", "split once too!"],
None,
Series({0: ["split", "once"], 1: ["split", "once too!"]}),
),
(
["split_once", "split_once_too!"],
"_",
Series({0: ["split", "once"], 1: ["split", "once_too!"]}),
),
],
)
def test_split_no_pat_with_nonzero_n(data, pat, expected, any_string_dtype):
s = Series(data, dtype=any_string_dtype)
result = s.str.split(pat=pat, n=1)
tm.assert_series_equal(expected, result, check_index_type=False)
def test_split_to_dataframe(any_string_dtype):
s = Series(["nosplit", "alsonosplit"], dtype=any_string_dtype)
result = s.str.split("_", expand=True)
exp = DataFrame({0: Series(["nosplit", "alsonosplit"], dtype=any_string_dtype)})
tm.assert_frame_equal(result, exp)
s = Series(["some_equal_splits", "with_no_nans"], dtype=any_string_dtype)
result = s.str.split("_", expand=True)
exp = DataFrame(
{0: ["some", "with"], 1: ["equal", "no"], 2: ["splits", "nans"]},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, exp)
s = Series(
["some_unequal_splits", "one_of_these_things_is_not"], dtype=any_string_dtype
)
result = s.str.split("_", expand=True)
exp = DataFrame(
{
0: ["some", "one"],
1: ["unequal", "of"],
2: ["splits", "these"],
3: [np.nan, "things"],
4: [np.nan, "is"],
5: [np.nan, "not"],
},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, exp)
s = Series(
["some_splits", "with_index"], index=["preserve", "me"], dtype=any_string_dtype
)
result = s.str.split("_", expand=True)
exp = DataFrame(
{0: ["some", "with"], 1: ["splits", "index"]},
index=["preserve", "me"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, exp)
with pytest.raises(ValueError, match="expand must be"):
s.str.split("_", expand="not_a_boolean")
def test_split_to_multiindex_expand():
# https://github.com/pandas-dev/pandas/issues/23677
idx = Index(["nosplit", "alsonosplit", np.nan])
result = idx.str.split("_", expand=True)
exp = idx
tm.assert_index_equal(result, exp)
assert result.nlevels == 1
idx = Index(["some_equal_splits", "with_no_nans", np.nan, None])
result = idx.str.split("_", expand=True)
exp = MultiIndex.from_tuples(
[
("some", "equal", "splits"),
("with", "no", "nans"),
[np.nan, np.nan, np.nan],
[None, None, None],
]
)
tm.assert_index_equal(result, exp)
assert result.nlevels == 3
idx = Index(["some_unequal_splits", "one_of_these_things_is_not", np.nan, None])
result = idx.str.split("_", expand=True)
exp = MultiIndex.from_tuples(
[
("some", "unequal", "splits", np.nan, np.nan, np.nan),
("one", "of", "these", "things", "is", "not"),
(np.nan, np.nan, np.nan, np.nan, np.nan, np.nan),
(None, None, None, None, None, None),
]
)
tm.assert_index_equal(result, exp)
assert result.nlevels == 6
with pytest.raises(ValueError, match="expand must be"):
idx.str.split("_", expand="not_a_boolean")
def test_rsplit_to_dataframe_expand(any_string_dtype):
s = Series(["nosplit", "alsonosplit"], dtype=any_string_dtype)
result = s.str.rsplit("_", expand=True)
exp = DataFrame({0: Series(["nosplit", "alsonosplit"])}, dtype=any_string_dtype)
tm.assert_frame_equal(result, exp)
s = Series(["some_equal_splits", "with_no_nans"], dtype=any_string_dtype)
result = s.str.rsplit("_", expand=True)
exp = DataFrame(
{0: ["some", "with"], 1: ["equal", "no"], 2: ["splits", "nans"]},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, exp)
result = s.str.rsplit("_", expand=True, n=2)
exp = DataFrame(
{0: ["some", "with"], 1: ["equal", "no"], 2: ["splits", "nans"]},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, exp)
result = s.str.rsplit("_", expand=True, n=1)
exp = DataFrame(
{0: ["some_equal", "with_no"], 1: ["splits", "nans"]}, dtype=any_string_dtype
)
tm.assert_frame_equal(result, exp)
s = Series(
["some_splits", "with_index"], index=["preserve", "me"], dtype=any_string_dtype
)
result = s.str.rsplit("_", expand=True)
exp = DataFrame(
{0: ["some", "with"], 1: ["splits", "index"]},
index=["preserve", "me"],
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, exp)
def test_rsplit_to_multiindex_expand():
idx = Index(["nosplit", "alsonosplit"])
result = idx.str.rsplit("_", expand=True)
exp = idx
tm.assert_index_equal(result, exp)
assert result.nlevels == 1
idx = Index(["some_equal_splits", "with_no_nans"])
result = idx.str.rsplit("_", expand=True)
exp = MultiIndex.from_tuples([("some", "equal", "splits"), ("with", "no", "nans")])
tm.assert_index_equal(result, exp)
assert result.nlevels == 3
idx = Index(["some_equal_splits", "with_no_nans"])
result = idx.str.rsplit("_", expand=True, n=1)
exp = MultiIndex.from_tuples([("some_equal", "splits"), ("with_no", "nans")])
tm.assert_index_equal(result, exp)
assert result.nlevels == 2
def test_split_nan_expand(any_string_dtype):
# gh-18450
s = Series(["foo,bar,baz", np.nan], dtype=any_string_dtype)
result = s.str.split(",", expand=True)
exp = DataFrame(
[["foo", "bar", "baz"], [np.nan, np.nan, np.nan]], dtype=any_string_dtype
)
tm.assert_frame_equal(result, exp)
# check that these are actually np.nan/pd.NA and not None
# TODO see GH 18463
# tm.assert_frame_equal does not differentiate
if any_string_dtype == "object":
assert all(np.isnan(x) for x in result.iloc[1])
else:
assert all(x is pd.NA for x in result.iloc[1])
def test_split_with_name(any_string_dtype):
# GH 12617
# should preserve name
s = Series(["a,b", "c,d"], name="xxx", dtype=any_string_dtype)
res = s.str.split(",")
exp = Series([["a", "b"], ["c", "d"]], name="xxx")
tm.assert_series_equal(res, exp)
res = s.str.split(",", expand=True)
exp = DataFrame([["a", "b"], ["c", "d"]], dtype=any_string_dtype)
tm.assert_frame_equal(res, exp)
idx = Index(["a,b", "c,d"], name="xxx")
res = idx.str.split(",")
exp = Index([["a", "b"], ["c", "d"]], name="xxx")
assert res.nlevels == 1
tm.assert_index_equal(res, exp)
res = idx.str.split(",", expand=True)
exp = MultiIndex.from_tuples([("a", "b"), ("c", "d")])
assert res.nlevels == 2
tm.assert_index_equal(res, exp)
def test_partition_series(any_string_dtype):
# https://github.com/pandas-dev/pandas/issues/23558
s = Series(["a_b_c", "c_d_e", np.nan, "f_g_h", None], dtype=any_string_dtype)
result = s.str.partition("_", expand=False)
expected = Series(
[("a", "_", "b_c"), ("c", "_", "d_e"), np.nan, ("f", "_", "g_h"), None]
)
tm.assert_series_equal(result, expected)
result = s.str.rpartition("_", expand=False)
expected = Series(
[("a_b", "_", "c"), ("c_d", "_", "e"), np.nan, ("f_g", "_", "h"), None]
)
tm.assert_series_equal(result, expected)
# more than one char
s = Series(["a__b__c", "c__d__e", np.nan, "f__g__h", None])
result = s.str.partition("__", expand=False)
expected = Series(
[
("a", "__", "b__c"),
("c", "__", "d__e"),
np.nan,
("f", "__", "g__h"),
None,
],
)
tm.assert_series_equal(result, expected)
result = s.str.rpartition("__", expand=False)
expected = Series(
[
("a__b", "__", "c"),
("c__d", "__", "e"),
np.nan,
("f__g", "__", "h"),
None,
],
)
tm.assert_series_equal(result, expected)
# None
s = Series(["a b c", "c d e", np.nan, "f g h", None], dtype=any_string_dtype)
result = s.str.partition(expand=False)
expected = Series(
[("a", " ", "b c"), ("c", " ", "d e"), np.nan, ("f", " ", "g h"), None]
)
tm.assert_series_equal(result, expected)
result = s.str.rpartition(expand=False)
expected = Series(
[("a b", " ", "c"), ("c d", " ", "e"), np.nan, ("f g", " ", "h"), None]
)
tm.assert_series_equal(result, expected)
# Not split
s = Series(["abc", "cde", np.nan, "fgh", None], dtype=any_string_dtype)
result = s.str.partition("_", expand=False)
expected = Series([("abc", "", ""), ("cde", "", ""), np.nan, ("fgh", "", ""), None])
tm.assert_series_equal(result, expected)
result = s.str.rpartition("_", expand=False)
expected = Series([("", "", "abc"), ("", "", "cde"), np.nan, ("", "", "fgh"), None])
tm.assert_series_equal(result, expected)
# unicode
s = Series(["a_b_c", "c_d_e", np.nan, "f_g_h"], dtype=any_string_dtype)
result = s.str.partition("_", expand=False)
expected = Series([("a", "_", "b_c"), ("c", "_", "d_e"), np.nan, ("f", "_", "g_h")])
tm.assert_series_equal(result, expected)
result = s.str.rpartition("_", expand=False)
expected = Series([("a_b", "_", "c"), ("c_d", "_", "e"), np.nan, ("f_g", "_", "h")])
tm.assert_series_equal(result, expected)
# compare to standard lib
s = Series(["A_B_C", "B_C_D", "E_F_G", "EFGHEF"], dtype=any_string_dtype)
result = s.str.partition("_", expand=False).tolist()
assert result == [v.partition("_") for v in s]
result = s.str.rpartition("_", expand=False).tolist()
assert result == [v.rpartition("_") for v in s]
def test_partition_index():
# https://github.com/pandas-dev/pandas/issues/23558
values = Index(["a_b_c", "c_d_e", "f_g_h", np.nan, None])
result = values.str.partition("_", expand=False)
exp = Index(
np.array(
[("a", "_", "b_c"), ("c", "_", "d_e"), ("f", "_", "g_h"), np.nan, None],
dtype=object,
)
)
tm.assert_index_equal(result, exp)
assert result.nlevels == 1
result = values.str.rpartition("_", expand=False)
exp = Index(
np.array(
[("a_b", "_", "c"), ("c_d", "_", "e"), ("f_g", "_", "h"), np.nan, None],
dtype=object,
)
)
tm.assert_index_equal(result, exp)
assert result.nlevels == 1
result = values.str.partition("_")
exp = Index(
[
("a", "_", "b_c"),
("c", "_", "d_e"),
("f", "_", "g_h"),
(np.nan, np.nan, np.nan),
(None, None, None),
]
)
tm.assert_index_equal(result, exp)
assert isinstance(result, MultiIndex)
assert result.nlevels == 3
result = values.str.rpartition("_")
exp = Index(
[
("a_b", "_", "c"),
("c_d", "_", "e"),
("f_g", "_", "h"),
(np.nan, np.nan, np.nan),
(None, None, None),
]
)
tm.assert_index_equal(result, exp)
assert isinstance(result, MultiIndex)
assert result.nlevels == 3
def test_partition_to_dataframe(any_string_dtype):
# https://github.com/pandas-dev/pandas/issues/23558
s = Series(["a_b_c", "c_d_e", np.nan, "f_g_h", None], dtype=any_string_dtype)
result = s.str.partition("_")
expected = DataFrame(
{
0: ["a", "c", np.nan, "f", None],
1: ["_", "_", np.nan, "_", None],
2: ["b_c", "d_e", np.nan, "g_h", None],
},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
result = s.str.rpartition("_")
expected = DataFrame(
{
0: ["a_b", "c_d", np.nan, "f_g", None],
1: ["_", "_", np.nan, "_", None],
2: ["c", "e", np.nan, "h", None],
},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
s = Series(["a_b_c", "c_d_e", np.nan, "f_g_h", None], dtype=any_string_dtype)
result = s.str.partition("_", expand=True)
expected = DataFrame(
{
0: ["a", "c", np.nan, "f", None],
1: ["_", "_", np.nan, "_", None],
2: ["b_c", "d_e", np.nan, "g_h", None],
},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
result = s.str.rpartition("_", expand=True)
expected = DataFrame(
{
0: ["a_b", "c_d", np.nan, "f_g", None],
1: ["_", "_", np.nan, "_", None],
2: ["c", "e", np.nan, "h", None],
},
dtype=any_string_dtype,
)
tm.assert_frame_equal(result, expected)
def test_partition_with_name(any_string_dtype):
# GH 12617
s = Series(["a,b", "c,d"], name="xxx", dtype=any_string_dtype)
result = s.str.partition(",")
expected = DataFrame(
{0: ["a", "c"], 1: [",", ","], 2: ["b", "d"]}, dtype=any_string_dtype
)
tm.assert_frame_equal(result, expected)
# should preserve name
result = s.str.partition(",", expand=False)
expected = Series([("a", ",", "b"), ("c", ",", "d")], name="xxx")
tm.assert_series_equal(result, expected)
def test_partition_index_with_name():
idx = Index(["a,b", "c,d"], name="xxx")
result = idx.str.partition(",")
expected = MultiIndex.from_tuples([("a", ",", "b"), ("c", ",", "d")])
assert result.nlevels == 3
tm.assert_index_equal(result, expected)
# should preserve name
result = idx.str.partition(",", expand=False)
expected = Index(np.array([("a", ",", "b"), ("c", ",", "d")]), name="xxx")
assert result.nlevels == 1
tm.assert_index_equal(result, expected)
def test_partition_sep_kwarg(any_string_dtype):
# GH 22676; depr kwarg "pat" in favor of "sep"
s = Series(["a_b_c", "c_d_e", np.nan, "f_g_h"], dtype=any_string_dtype)
expected = s.str.partition(sep="_")
result = s.str.partition("_")
tm.assert_frame_equal(result, expected)
expected = s.str.rpartition(sep="_")
result = s.str.rpartition("_")
tm.assert_frame_equal(result, expected)
def test_get():
ser = Series(["a_b_c", "c_d_e", np.nan, "f_g_h"])
result = ser.str.split("_").str.get(1)
expected = Series(["b", "d", np.nan, "g"])
tm.assert_series_equal(result, expected)
def test_get_mixed_object():
ser = Series(["a_b_c", np.nan, "c_d_e", True, datetime.today(), None, 1, 2.0])
result = ser.str.split("_").str.get(1)
expected = Series(["b", np.nan, "d", np.nan, np.nan, np.nan, np.nan, np.nan])
tm.assert_series_equal(result, expected)
def test_get_bounds():
ser = Series(["1_2_3_4_5", "6_7_8_9_10", "11_12"])
# positive index
result = ser.str.split("_").str.get(2)
expected = Series(["3", "8", np.nan])
tm.assert_series_equal(result, expected)
# negative index
result = ser.str.split("_").str.get(-3)
expected = Series(["3", "8", np.nan])
tm.assert_series_equal(result, expected)
def test_get_complex():
# GH 20671, getting value not in dict raising `KeyError`
ser = Series([(1, 2, 3), [1, 2, 3], {1, 2, 3}, {1: "a", 2: "b", 3: "c"}])
result = ser.str.get(1)
expected = Series([2, 2, np.nan, "a"])
tm.assert_series_equal(result, expected)
result = ser.str.get(-1)
expected = Series([3, 3, np.nan, np.nan])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("to_type", [tuple, list, np.array])
def test_get_complex_nested(to_type):
ser = Series([to_type([to_type([1, 2])])])
result = ser.str.get(0)
expected = Series([to_type([1, 2])])
tm.assert_series_equal(result, expected)
result = ser.str.get(1)
expected = Series([np.nan])
tm.assert_series_equal(result, expected)
def test_get_strings(any_string_dtype):
ser = Series(["a", "ab", np.nan, "abc"], dtype=any_string_dtype)
result = ser.str.get(2)
expected = Series([np.nan, np.nan, np.nan, "c"], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
|
import sys, re
import cairo
import numpy
import threading
import math
from io import BytesIO
from ginga import ImageView
from ginga.cairow.CanvasRenderCairo import CanvasRenderer
class ImageViewCairoError(ImageView.ImageViewError):
pass
class ImageViewCairo(ImageView.ImageViewBase):
def __init__(self, logger=None, rgbmap=None, settings=None):
ImageView.ImageViewBase.__init__(self, logger=logger,
rgbmap=rgbmap,
settings=settings)
self.surface = None
self.dst_surface = None
if sys.byteorder == 'little':
self._rgb_order = 'BGRA'
self._alpha_idx = 3
else:
self._rgb_order = 'ARGB'
self._alpha_idx = 0
self.renderer = CanvasRenderer(self)
self.cr = None
self.message = None
self.t_.setDefaults(show_pan_position=False,
onscreen_ff='Sans Serif')
def _render_offscreen(self, surface, data, dst_x, dst_y,
width, height):
# NOTE [A]
daht, dawd, depth = data.shape
self.logger.debug("data shape is %dx%dx%d" % (dawd, daht, depth))
cr = cairo.Context(surface)
self.cr = cr
# fill surface with background color
imgwin_wd, imgwin_ht = self.get_window_size()
cr.rectangle(0, 0, imgwin_wd, imgwin_ht)
r, g, b = self.get_bg()
cr.set_source_rgba(r, g, b)
#cr.set_operator(cairo.OPERATOR_OVER)
cr.fill()
## arr8 = data.astype(numpy.uint8).flatten()
arr8 = data
## stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_RGB24,
stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32,
width)
img_surface = cairo.ImageSurface.create_for_data(arr8,
#cairo.FORMAT_RGB24,
cairo.FORMAT_ARGB32,
dawd, daht, stride)
cr.set_source_surface(img_surface, dst_x, dst_y)
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.mask_surface(img_surface, dst_x, dst_y)
#cr.rectangle(dst_x, dst_y, dawd, daht)
cr.fill()
# Draw a cross in the center of the window in debug mode
if self.t_['show_pan_position']:
cr.set_source_rgb(1.0, 0.0, 0.0)
cr.set_line_width(1)
ctr_x, ctr_y = self.get_center()
cr.move_to(ctr_x - 10, ctr_y)
cr.line_to(ctr_x + 10, ctr_y)
cr.move_to(ctr_x, ctr_y - 10)
cr.line_to(ctr_x, ctr_y + 10)
cr.close_path()
cr.stroke_preserve()
# render self.message
if self.message:
self.draw_message(cr, imgwin_wd, imgwin_ht,
self.message)
def draw_message(self, cr, width, height, message):
r, g, b = self.img_fg
#cr.set_source_rgb(1.0, 1.0, 1.0)
cr.set_source_rgb(r, g, b)
cr.select_font_face(self.t_['onscreen_ff'])
cr.set_font_size(24.0)
a, b, wd, ht, i, j = cr.text_extents(message)
y = ((height // 3) * 2) - (ht // 2)
x = (width // 2) - (wd // 2)
cr.move_to(x, y)
cr.show_text(message)
def get_offscreen_context(self):
if self.surface is None:
raise ImageViewCairoError("No offscreen surface defined")
cr = cairo.Context(self.surface)
return cr
def get_offscreen_surface(self):
return self.surface
def render_image(self, rgbobj, dst_x, dst_y):
"""Render the image represented by (rgbobj) at dst_x, dst_y
in the pixel space.
"""
self.logger.debug("redraw surface")
if self.surface is None:
return
# Prepare array for Cairo rendering
arr = rgbobj.get_array(self._rgb_order)
(height, width) = arr.shape[:2]
return self._render_offscreen(self.surface, arr, dst_x, dst_y,
width, height)
def configure_surface(self, width, height):
arr8 = numpy.zeros(height*width*4, dtype=numpy.uint8)
#stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_RGB24,
stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32,
width)
surface = cairo.ImageSurface.create_for_data(arr8,
#cairo.FORMAT_RGB24,
cairo.FORMAT_ARGB32,
width, height, stride)
self.surface = surface
self.configure(width, height)
def save_image_as_surface(self, surface):
try:
self.dst_surface = surface
self.redraw()
finally:
self.dst_surface = None
def get_png_image_as_buffer(self, output=None):
ibuf = output
if ibuf is None:
ibuf = BytesIO()
qimg = self.surface.write_to_png(ibuf)
return ibuf
def update_image(self):
if not self.surface:
return
if not self.dst_surface:
#raise ImageViewCairoError("Please set up the output destination")
self.logger.error("Please set up the output destination")
return
cr = cairo.Context(self.dst_surface)
self.logger.debug("updating destination cairo surface")
# redraw the surface from backing surface
cr.set_source_surface(self.surface, 0, 0)
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.paint()
return False
def set_cursor(self, cursor):
pass
def define_cursor(self, ctype, cursor):
pass
def get_cursor(self, ctype):
return self.cursor[ctype]
def switch_cursor(self, ctype):
self.set_cursor(self.cursor[ctype])
def get_rgb_order(self):
return self._rgb_order
def onscreen_message(self, text, delay=None):
pass
def show_pan_mark(self, tf):
self.t_.set(show_pan_position=tf)
self.redraw(whence=3)
def pix2canvas(self, x, y):
x, y = self.cr.device_to_user(x, y)
return (x, y)
def canvas2pix(self, x, y):
x, y = self.cr.user_to_device(x, y)
return (x, y)
|
"""
logbook._fallback
~~~~~~~~~~~~~~~~~
Fallback implementations in case speedups is not around.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
from itertools import count
from logbook.helpers import get_iterator_next_method
from logbook.concurrency import (thread_get_ident, greenlet_get_ident,
thread_local, greenlet_local,
ThreadLock, GreenletRLock, is_gevent_enabled)
_missing = object()
_MAX_CONTEXT_OBJECT_CACHE = 256
def group_reflected_property(name, default, fallback=_missing):
"""Returns a property for a given name that falls back to the
value of the group if set. If there is no such group, the
provided default is used.
"""
def _get(self):
rv = getattr(self, '_' + name, _missing)
if rv is not _missing and rv != fallback:
return rv
if self.group is None:
return default
return getattr(self.group, name)
def _set(self, value):
setattr(self, '_' + name, value)
def _del(self):
delattr(self, '_' + name)
return property(_get, _set, _del)
class _StackBound(object):
def __init__(self, obj, push, pop):
self.__obj = obj
self.__push = push
self.__pop = pop
def __enter__(self):
self.__push()
return self.__obj
def __exit__(self, exc_type, exc_value, tb):
self.__pop()
class StackedObject(object):
"""Baseclass for all objects that provide stack manipulation
operations.
"""
def push_greenlet(self):
"""Pushes the stacked object to the greenlet stack."""
raise NotImplementedError()
def pop_greenlet(self):
"""Pops the stacked object from the greenlet stack."""
raise NotImplementedError()
def push_thread(self):
"""Pushes the stacked object to the thread stack."""
raise NotImplementedError()
def pop_thread(self):
"""Pops the stacked object from the thread stack."""
raise NotImplementedError()
def push_application(self):
"""Pushes the stacked object to the application stack."""
raise NotImplementedError()
def pop_application(self):
"""Pops the stacked object from the application stack."""
raise NotImplementedError()
def __enter__(self):
if is_gevent_enabled():
self.push_greenlet()
else:
self.push_thread()
return self
def __exit__(self, exc_type, exc_value, tb):
if is_gevent_enabled():
self.pop_greenlet()
else:
self.pop_thread()
def greenletbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the greenlet.
"""
return _cls(self, self.push_greenlet, self.pop_greenlet)
def threadbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the thread.
"""
return _cls(self, self.push_thread, self.pop_thread)
def applicationbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the application.
"""
return _cls(self, self.push_application, self.pop_application)
class ContextStackManager(object):
"""Helper class for context objects that manages a stack of
objects.
"""
def __init__(self):
self._global = []
self._thread_context_lock = ThreadLock()
self._thread_context = thread_local()
self._greenlet_context_lock = GreenletRLock()
self._greenlet_context = greenlet_local()
self._cache = {}
self._stackop = get_iterator_next_method(count())
def iter_context_objects(self):
"""Returns an iterator over all objects for the combined
application and context cache.
"""
use_gevent = is_gevent_enabled()
tid = greenlet_get_ident() if use_gevent else thread_get_ident()
objects = self._cache.get(tid)
if objects is None:
if len(self._cache) > _MAX_CONTEXT_OBJECT_CACHE:
self._cache.clear()
objects = self._global[:]
objects.extend(getattr(self._thread_context, 'stack', ()))
if use_gevent:
objects.extend(getattr(self._greenlet_context, 'stack', ()))
objects.sort(reverse=True)
objects = [x[1] for x in objects]
self._cache[tid] = objects
return iter(objects)
def push_greenlet(self, obj):
self._greenlet_context_lock.acquire()
try:
self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
item = (self._stackop(), obj)
stack = getattr(self._greenlet_context, 'stack', None)
if stack is None:
self._greenlet_context.stack = [item]
else:
stack.append(item)
finally:
self._greenlet_context_lock.release()
def pop_greenlet(self):
self._greenlet_context_lock.acquire()
try:
self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
stack = getattr(self._greenlet_context, 'stack', None)
assert stack, 'no objects on stack'
return stack.pop()[1]
finally:
self._greenlet_context_lock.release()
def push_thread(self, obj):
self._thread_context_lock.acquire()
try:
self._cache.pop(thread_get_ident(), None)
item = (self._stackop(), obj)
stack = getattr(self._thread_context, 'stack', None)
if stack is None:
self._thread_context.stack = [item]
else:
stack.append(item)
finally:
self._thread_context_lock.release()
def pop_thread(self):
self._thread_context_lock.acquire()
try:
self._cache.pop(thread_get_ident(), None)
stack = getattr(self._thread_context, 'stack', None)
assert stack, 'no objects on stack'
return stack.pop()[1]
finally:
self._thread_context_lock.release()
def push_application(self, obj):
self._global.append((self._stackop(), obj))
self._cache.clear()
def pop_application(self):
assert self._global, 'no objects on application stack'
popped = self._global.pop()[1]
self._cache.clear()
return popped
|
from twisted.python import log
from twisted.internet import reactor, defer
from buildbot import util
if False: # for debugging
debuglog = log.msg
else:
debuglog = lambda m: None
class BaseLock:
"""
Class handling claiming and releasing of L{self}, and keeping track of
current and waiting owners.
@note: Ideally, we'd like to maintain FIFO order. The place to do that
would be the L{isAvailable()} function. However, this function is
called by builds/steps both for the first time, and after waking
them up by L{self} from the L{self.waiting} queue. There is
currently no way of distinguishing between them.
"""
description = "<BaseLock>"
def __init__(self, name, maxCount=1):
self.name = name # Name of the lock
self.waiting = [] # Current queue, tuples (LockAccess, deferred)
self.owners = [] # Current owners, tuples (owner, LockAccess)
self.maxCount = maxCount # maximal number of counting owners
def __repr__(self):
return self.description
def _getOwnersCount(self):
""" Return the number of current exclusive and counting owners.
@return: Tuple (number exclusive owners, number counting owners)
"""
num_excl, num_counting = 0, 0
for owner in self.owners:
if owner[1].mode == 'exclusive':
num_excl = num_excl + 1
else: # mode == 'counting'
num_counting = num_counting + 1
assert (num_excl == 1 and num_counting == 0) \
or (num_excl == 0 and num_counting <= self.maxCount)
return num_excl, num_counting
def isAvailable(self, access):
""" Return a boolean whether the lock is available for claiming """
debuglog("%s isAvailable(%s): self.owners=%r"
% (self, access, self.owners))
num_excl, num_counting = self._getOwnersCount()
if access.mode == 'counting':
# Wants counting access
return num_excl == 0 and num_counting < self.maxCount
else:
# Wants exclusive access
return num_excl == 0 and num_counting == 0
def claim(self, owner, access):
""" Claim the lock (lock must be available) """
debuglog("%s claim(%s, %s)" % (self, owner, access.mode))
assert owner is not None
assert self.isAvailable(access), "ask for isAvailable() first"
assert isinstance(access, LockAccess)
assert access.mode in ['counting', 'exclusive']
self.owners.append((owner, access))
debuglog(" %s is claimed '%s'" % (self, access.mode))
def release(self, owner, access):
""" Release the lock """
assert isinstance(access, LockAccess)
debuglog("%s release(%s, %s)" % (self, owner, access.mode))
entry = (owner, access)
assert entry in self.owners
self.owners.remove(entry)
# who can we wake up?
# After an exclusive access, we may need to wake up several waiting.
# Break out of the loop when the first waiting client should not be awakened.
num_excl, num_counting = self._getOwnersCount()
while len(self.waiting) > 0:
access, d = self.waiting[0]
if access.mode == 'counting':
if num_excl > 0 or num_counting == self.maxCount:
break
else:
num_counting = num_counting + 1
else:
# access.mode == 'exclusive'
if num_excl > 0 or num_counting > 0:
break
else:
num_excl = num_excl + 1
del self.waiting[0]
reactor.callLater(0, d.callback, self)
def waitUntilMaybeAvailable(self, owner, access):
"""Fire when the lock *might* be available. The caller will need to
check with isAvailable() when the deferred fires. This loose form is
used to avoid deadlocks. If we were interested in a stronger form,
this would be named 'waitUntilAvailable', and the deferred would fire
after the lock had been claimed.
"""
debuglog("%s waitUntilAvailable(%s)" % (self, owner))
assert isinstance(access, LockAccess)
if self.isAvailable(access):
return defer.succeed(self)
d = defer.Deferred()
self.waiting.append((access, d))
return d
def stopWaitingUntilAvailable(self, owner, access, d):
debuglog("%s stopWaitingUntilAvailable(%s)" % (self, owner))
assert isinstance(access, LockAccess)
assert (access, d) in self.waiting
self.waiting.remove( (access, d) )
def isOwner(self, owner, access):
return (owner, access) in self.owners
class RealMasterLock(BaseLock):
def __init__(self, lockid):
BaseLock.__init__(self, lockid.name, lockid.maxCount)
self.description = "<MasterLock(%s, %s)>" % (self.name, self.maxCount)
def getLock(self, slave):
return self
class RealSlaveLock:
def __init__(self, lockid):
self.name = lockid.name
self.maxCount = lockid.maxCount
self.maxCountForSlave = lockid.maxCountForSlave
self.description = "<SlaveLock(%s, %s, %s)>" % (self.name,
self.maxCount,
self.maxCountForSlave)
self.locks = {}
def __repr__(self):
return self.description
def getLock(self, slavebuilder):
slavename = slavebuilder.slave.slavename
if not self.locks.has_key(slavename):
maxCount = self.maxCountForSlave.get(slavename,
self.maxCount)
lock = self.locks[slavename] = BaseLock(self.name, maxCount)
desc = "<SlaveLock(%s, %s)[%s] %d>" % (self.name, maxCount,
slavename, id(lock))
lock.description = desc
self.locks[slavename] = lock
return self.locks[slavename]
class LockAccess(util.ComparableMixin):
""" I am an object representing a way to access a lock.
@param lockid: LockId instance that should be accessed.
@type lockid: A MasterLock or SlaveLock instance.
@param mode: Mode of accessing the lock.
@type mode: A string, either 'counting' or 'exclusive'.
"""
compare_attrs = ['lockid', 'mode']
def __init__(self, lockid, mode):
self.lockid = lockid
self.mode = mode
assert isinstance(lockid, (MasterLock, SlaveLock))
assert mode in ['counting', 'exclusive']
class BaseLockId(util.ComparableMixin):
""" Abstract base class for LockId classes.
Sets up the 'access()' function for the LockId's available to the user
(MasterLock and SlaveLock classes).
Derived classes should add
- Comparison with the L{util.ComparableMixin} via the L{compare_attrs}
class variable.
- Link to the actual lock class should be added with the L{lockClass}
class variable.
"""
def access(self, mode):
""" Express how the lock should be accessed """
assert mode in ['counting', 'exclusive']
return LockAccess(self, mode)
def defaultAccess(self):
""" For buildbot 0.7.7 compability: When user doesn't specify an access
mode, this one is chosen.
"""
return self.access('counting')
class MasterLock(BaseLockId):
"""I am a semaphore that limits the number of simultaneous actions.
Builds and BuildSteps can declare that they wish to claim me as they run.
Only a limited number of such builds or steps will be able to run
simultaneously. By default this number is one, but my maxCount parameter
can be raised to allow two or three or more operations to happen at the
same time.
Use this to protect a resource that is shared among all builders and all
slaves, for example to limit the load on a common SVN repository.
"""
compare_attrs = ['name', 'maxCount']
lockClass = RealMasterLock
def __init__(self, name, maxCount=1):
self.name = name
self.maxCount = maxCount
class SlaveLock(BaseLockId):
"""I am a semaphore that limits simultaneous actions on each buildslave.
Builds and BuildSteps can declare that they wish to claim me as they run.
Only a limited number of such builds or steps will be able to run
simultaneously on any given buildslave. By default this number is one,
but my maxCount parameter can be raised to allow two or three or more
operations to happen on a single buildslave at the same time.
Use this to protect a resource that is shared among all the builds taking
place on each slave, for example to limit CPU or memory load on an
underpowered machine.
Each buildslave will get an independent copy of this semaphore. By
default each copy will use the same owner count (set with maxCount), but
you can provide maxCountForSlave with a dictionary that maps slavename to
owner count, to allow some slaves more parallelism than others.
"""
compare_attrs = ['name', 'maxCount', '_maxCountForSlaveList']
lockClass = RealSlaveLock
def __init__(self, name, maxCount=1, maxCountForSlave={}):
self.name = name
self.maxCount = maxCount
self.maxCountForSlave = maxCountForSlave
# for comparison purposes, turn this dictionary into a stably-sorted
# list of tuples
self._maxCountForSlaveList = self.maxCountForSlave.items()
self._maxCountForSlaveList.sort()
self._maxCountForSlaveList = tuple(self._maxCountForSlaveList)
|
"""Entry point for running stress tests."""
import argparse
import threading
from grpc.beta import implementations
from six.moves import queue
from src.proto.grpc.testing import metrics_pb2
from src.proto.grpc.testing import test_pb2
from tests.interop import methods
from tests.qps import histogram
from tests.stress import metrics_server
from tests.stress import test_runner
def _args():
parser = argparse.ArgumentParser(description='gRPC Python stress test client')
parser.add_argument(
'--server_addresses',
help='comma seperated list of hostname:port to run servers on',
default='localhost:8080', type=str)
parser.add_argument(
'--test_cases',
help='comma seperated list of testcase:weighting of tests to run',
default='large_unary:100',
type=str)
parser.add_argument(
'--test_duration_secs',
help='number of seconds to run the stress test',
default=-1, type=int)
parser.add_argument(
'--num_channels_per_server',
help='number of channels per server',
default=1, type=int)
parser.add_argument(
'--num_stubs_per_channel',
help='number of stubs to create per channel',
default=1, type=int)
parser.add_argument(
'--metrics_port',
help='the port to listen for metrics requests on',
default=8081, type=int)
return parser.parse_args()
def _test_case_from_arg(test_case_arg):
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError('No test case {}!'.format(test_case_arg))
def _parse_weighted_test_cases(test_case_args):
weighted_test_cases = {}
for test_case_arg in test_case_args.split(','):
name, weight = test_case_arg.split(':', 1)
test_case = _test_case_from_arg(name)
weighted_test_cases[test_case] = int(weight)
return weighted_test_cases
def run_test(args):
test_cases = _parse_weighted_test_cases(args.test_cases)
test_servers = args.server_addresses.split(',')
# Propagate any client exceptions with a queue
exception_queue = queue.Queue()
stop_event = threading.Event()
hist = histogram.Histogram(1, 1)
runners = []
server = metrics_pb2.beta_create_MetricsService_server(
metrics_server.MetricsServer(hist))
server.add_insecure_port('[::]:{}'.format(args.metrics_port))
server.start()
for test_server in test_servers:
host, port = test_server.split(':', 1)
for _ in xrange(args.num_channels_per_server):
channel = implementations.insecure_channel(host, int(port))
for _ in xrange(args.num_stubs_per_channel):
stub = test_pb2.beta_create_TestService_stub(channel)
runner = test_runner.TestRunner(stub, test_cases, hist,
exception_queue, stop_event)
runners.append(runner)
for runner in runners:
runner.start()
try:
timeout_secs = args.test_duration_secs
if timeout_secs < 0:
timeout_secs = None
raise exception_queue.get(block=True, timeout=timeout_secs)
except queue.Empty:
# No exceptions thrown, success
pass
finally:
stop_event.set()
for runner in runners:
runner.join()
runner = None
server.stop(0)
if __name__ == '__main__':
run_test(_args())
|
from __future__ import print_function
import unittest
import numpy as np
import pydrake
import os.path
class TestRBTCoM(unittest.TestCase):
def testCoM0(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
kinsol = r.doKinematics(np.zeros((7, 1)), np.zeros((7, 1)))
c = r.centerOfMass(kinsol)
self.assertTrue(np.allclose(c.flat, [0.0, 0.0, -0.2425], atol=1e-4))
def testCoMJacobian(self):
r = pydrake.rbtree.RigidBodyTree(os.path.join(pydrake.getDrakePath(),
"examples/Pendulum/Pendulum.urdf"))
q = r.getRandomConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.shape(J) == (3, 7))
q = r.getZeroConfiguration()
kinsol = r.doKinematics(q, np.zeros((7, 1)))
J = r.centerOfMassJacobian(kinsol)
self.assertTrue(np.allclose(J.flat, [1., 0., 0., 0., -0.2425, 0., -0.25,
0., 1., 0., 0.2425, 0., 0., 0.,
0., 0., 1., 0., 0., 0., 0.], atol=1e-4))
if __name__ == '__main__':
unittest.main()
|
import unittest
from mock import Mock
from biicode.common.model.content import Content
from biicode.common.model.content import ContentDeserializer
from biicode.common.model.content import content_diff
from biicode.common.exception import BiiSerializationException
from biicode.common.model.id import ID
class ContentTest(unittest.TestCase):
def test_deserialize_exception(self):
self.assertRaises(BiiSerializationException,
ContentDeserializer(ID((0, 0, 0))).deserialize,
"wrong object")
self.assertIsNone(ContentDeserializer(ID).deserialize(None))
def test_content_diff(self):
content_load1 = Mock()
content_load2 = Mock()
content_load1.is_binary = Mock(return_value=True)
self.assertEquals(content_diff(content_load1, content_load2),
"Unable to diff binary contents of base")
content_load1.is_binary = Mock(return_value=False)
content_load2.is_binary = Mock(return_value=True)
self.assertEquals(content_diff(content_load1, content_load2),
"Unable to diff binary contents of base")
def test_content_similarity(self):
content = Content(ID((0, 0, 0)), load=None)
self.assertEquals(content.similarity(content), 1)
|
from kolibri.auth.api import KolibriAuthPermissions, KolibriAuthPermissionsFilter
from kolibri.content.api import OptionalPageNumberPagination
from rest_framework import filters, viewsets
from .models import ContentRatingLog, ContentSessionLog, ContentSummaryLog, UserSessionLog
from .serializers import ContentRatingLogSerializer, ContentSessionLogSerializer, ContentSummaryLogSerializer, UserSessionLogSerializer
class ContentSessionLogFilter(filters.FilterSet):
class Meta:
model = ContentSessionLog
fields = ['user_id', 'content_id']
class ContentSessionLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, filters.DjangoFilterBackend)
queryset = ContentSessionLog.objects.all()
serializer_class = ContentSessionLogSerializer
pagination_class = OptionalPageNumberPagination
filter_class = ContentSessionLogFilter
class ContentSummaryFilter(filters.FilterSet):
class Meta:
model = ContentSummaryLog
fields = ['user_id', 'content_id']
class ContentSummaryLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, filters.DjangoFilterBackend)
queryset = ContentSummaryLog.objects.all()
serializer_class = ContentSummaryLogSerializer
pagination_class = OptionalPageNumberPagination
filter_class = ContentSummaryFilter
class ContentRatingLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = ContentRatingLog.objects.all()
serializer_class = ContentRatingLogSerializer
pagination_class = OptionalPageNumberPagination
class UserSessionLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = UserSessionLog.objects.all()
serializer_class = UserSessionLogSerializer
pagination_class = OptionalPageNumberPagination
|
import logging
from pycsw.core import util
from pycsw.core.etree import etree
LOGGER = logging.getLogger(__name__)
class OAIPMH(object):
"""OAI-PMH wrapper class"""
def __init__(self, context, config):
LOGGER.debug('Initializing OAI-PMH constants')
self.oaipmh_version = '2.0'
self.namespaces = {
'oai': 'http://www.openarchives.org/OAI/2.0/',
'oai_dc': 'http://www.openarchives.org/OAI/2.0/oai_dc/',
'xsi': 'http://www.w3.org/2001/XMLSchema-instance'
}
self.request_model = {
'Identify': [],
'ListSets': ['resumptiontoken'],
'ListMetadataFormats': ['identifier'],
'GetRecord': ['identifier', 'metadataprefix'],
'ListRecords': ['from', 'until', 'set', 'resumptiontoken', 'metadataprefix'],
'ListIdentifiers': ['from', 'until', 'set', 'resumptiontoken', 'metadataprefix'],
}
self.metadata_formats = {
'iso19139': {
'namespace': 'http://www.isotc211.org/2005/gmd',
'schema': 'http://www.isotc211.org/2005/gmd/gmd.xsd',
'identifier': './/gmd:fileIdentifier/gco:CharacterString',
'dateStamp': './/gmd:dateStamp/gco:DateTime|.//gmd:dateStamp/gco:Date',
'setSpec': './/gmd:hierarchyLevel/gmd:MD_ScopeCode'
},
'csw-record': {
'namespace': 'http://www.opengis.net/cat/csw/2.0.2',
'schema': 'http://schemas.opengis.net/csw/2.0.2/record.xsd',
'identifier': './/dc:identifier',
'dateStamp': './/dct:modified',
'setSpec': './/dc:type'
},
'fgdc-std': {
'namespace': 'http://www.opengis.net/cat/csw/csdgm',
'schema': 'http://www.fgdc.gov/metadata/fgdc-std-001-1998.xsd',
'identifier': './/idinfo/datasetid',
'dateStamp': './/metainfo/metd',
'setSpec': './/dataset'
},
'oai_dc': {
'namespace': '%soai_dc/' % self.namespaces['oai'],
'schema': 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd',
'identifier': './/dc:identifier',
'dateStamp': './/dct:modified',
'setSpec': './/dc:type'
},
'dif': {
'namespace': 'http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/',
'schema': 'http://gcmd.gsfc.nasa.gov/Aboutus/xml/dif/dif.xsd',
'identifier': './/dif:Entry_ID',
'dateStamp': './/dif:Last_DIF_Revision_Date',
'setSpec': '//dataset'
},
'gm03': {
'namespace': 'http://www.interlis.ch/INTERLIS2.3',
'schema': 'http://www.geocat.ch/internet/geocat/en/home/documentation/gm03.parsys.50316.downloadList.86742.DownloadFile.tmp/gm0321.zip',
'identifier': './/gm03:DATASECTION//gm03:fileIdentifer',
'dateStamp': './/gm03:DATASECTION//gm03:dateStamp',
'setSpec': './/dataset'
}
}
self.metadata_sets = {
'datasets': ('Datasets', 'dataset'),
'interactiveResources': ('Interactive Resources', 'service')
}
self.error_codes = {
'badArgument': 'InvalidParameterValue',
'badVerb': 'OperationNotSupported',
'idDoesNotExist': None,
'noRecordsMatch': None,
}
self.context = context
self.context.namespaces.update(self.namespaces)
self.context.namespaces.update({'gco': 'http://www.isotc211.org/2005/gco'})
self.config = config
def request(self, kvp):
"""process OAI-PMH request"""
kvpout = {'service': 'CSW', 'version': '2.0.2', 'mode': 'oaipmh'}
LOGGER.debug('Incoming kvp: %s', kvp)
if 'verb' in kvp:
if 'metadataprefix' in kvp:
self.metadata_prefix = kvp['metadataprefix']
try:
kvpout['outputschema'] = self._get_metadata_prefix(kvp['metadataprefix'])
except KeyError:
kvpout['outputschema'] = kvp['metadataprefix']
else:
self.metadata_prefix = 'csw-record'
LOGGER.debug('metadataPrefix: %s', self.metadata_prefix)
if kvp['verb'] in ['ListRecords', 'ListIdentifiers', 'GetRecord']:
kvpout['request'] = 'GetRecords'
kvpout['resulttype'] = 'results'
kvpout['typenames'] = 'csw:Record'
kvpout['elementsetname'] = 'full'
if kvp['verb'] in ['Identify', 'ListMetadataFormats', 'ListSets']:
kvpout['request'] = 'GetCapabilities'
elif kvp['verb'] == 'GetRecord':
kvpout['request'] = 'GetRecordById'
if 'identifier' in kvp:
kvpout['id'] = kvp['identifier']
if ('outputschema' in kvpout and
kvp['metadataprefix'] == 'oai_dc'): # just use default DC
del kvpout['outputschema']
elif kvp['verb'] in ['ListRecords', 'ListIdentifiers']:
if 'resumptiontoken' in kvp:
kvpout['startposition'] = kvp['resumptiontoken']
if ('outputschema' in kvpout and
kvp['verb'] == 'ListIdentifiers'): # simple output only
pass #del kvpout['outputschema']
if ('outputschema' in kvpout and
kvp['metadataprefix'] in ['dc', 'oai_dc']): # just use default DC
del kvpout['outputschema']
start = end = None
LOGGER.debug('Scanning temporal parameters')
if 'from' in kvp:
start = 'dc:date >= %s' % kvp['from']
if 'until' in kvp:
end = 'dc:date <= %s' % kvp['until']
if any([start is not None, end is not None]):
if all([start is not None, end is not None]):
time_query = '%s and %s' % (start, end)
elif end is None:
time_query = start
elif start is None:
time_query = end
kvpout['constraintlanguage'] = 'CQL_TEXT'
kvpout['constraint'] = time_query
LOGGER.debug('Resulting parameters: %s', kvpout)
return kvpout
def response(self, response, kvp, repository, server_url):
"""process OAI-PMH request"""
mode = kvp.pop('mode', None)
if 'config' in kvp:
config_val = kvp.pop('config')
url = '%smode=oaipmh' % util.bind_url(server_url)
node = etree.Element(util.nspath_eval('oai:OAI-PMH', self.namespaces), nsmap=self.namespaces)
node.set(util.nspath_eval('xsi:schemaLocation', self.namespaces), '%s http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd' % self.namespaces['oai'])
LOGGER.debug(etree.tostring(node))
etree.SubElement(node, util.nspath_eval('oai:responseDate', self.namespaces)).text = util.get_today_and_now()
etree.SubElement(node, util.nspath_eval('oai:request', self.namespaces), attrib=kvp).text = url
if 'verb' not in kvp:
etree.SubElement(node, util.nspath_eval('oai:error', self.namespaces), code='badArgument').text = 'Missing \'verb\' parameter'
return node
if kvp['verb'] not in self.request_model.keys():
etree.SubElement(node, util.nspath_eval('oai:error', self.namespaces), code='badArgument').text = 'Unknown verb \'%s\'' % kvp['verb']
return node
if etree.QName(response).localname == 'ExceptionReport':
etree.SubElement(node, util.nspath_eval('oai:error', self.namespaces), code='badArgument').text = response.xpath('//ows:ExceptionText|//ows20:ExceptionText', namespaces=self.context.namespaces)[0].text
return node
verb = kvp.pop('verb')
if verb in ['GetRecord', 'ListIdentifiers', 'ListRecords']:
if 'metadataprefix' not in kvp:
etree.SubElement(node, util.nspath_eval('oai:error', self.namespaces), code='badArgument').text = 'Missing metadataPrefix parameter'
return node
elif kvp['metadataprefix'] not in self.metadata_formats.keys():
etree.SubElement(node, util.nspath_eval('oai:error', self.namespaces), code='badArgument').text = 'Invalid metadataPrefix parameter'
return node
for key, value in kvp.items():
if key != 'mode' and key not in self.request_model[verb]:
etree.SubElement(node, util.nspath_eval('oai:error', self.namespaces), code='badArgument').text = 'Illegal parameter \'%s\'' % key
return node
verbnode = etree.SubElement(node, util.nspath_eval('oai:%s' % verb, self.namespaces))
if verb == 'Identify':
etree.SubElement(verbnode, util.nspath_eval('oai:repositoryName', self.namespaces)).text = self.config.get('metadata:main', 'identification_title')
etree.SubElement(verbnode, util.nspath_eval('oai:baseURL', self.namespaces)).text = url
etree.SubElement(verbnode, util.nspath_eval('oai:protocolVersion', self.namespaces)).text = '2.0'
etree.SubElement(verbnode, util.nspath_eval('oai:adminEmail', self.namespaces)).text = self.config.get('metadata:main', 'contact_email')
etree.SubElement(verbnode, util.nspath_eval('oai:earliestDatestamp', self.namespaces)).text = repository.query_insert('min')
etree.SubElement(verbnode, util.nspath_eval('oai:deletedRecord', self.namespaces)).text = 'no'
etree.SubElement(verbnode, util.nspath_eval('oai:granularity', self.namespaces)).text = 'YYYY-MM-DDThh:mm:ssZ'
elif verb == 'ListSets':
for key, value in sorted(self.metadata_sets.items()):
setnode = etree.SubElement(verbnode, util.nspath_eval('oai:set', self.namespaces))
etree.SubElement(setnode, util.nspath_eval('oai:setSpec', self.namespaces)).text = key
etree.SubElement(setnode, util.nspath_eval('oai:setName', self.namespaces)).text = value[0]
elif verb == 'ListMetadataFormats':
for key, value in sorted(self.metadata_formats.items()):
mdfnode = etree.SubElement(verbnode, util.nspath_eval('oai:metadataFormat', self.namespaces))
etree.SubElement(mdfnode, util.nspath_eval('oai:metadataPrefix', self.namespaces)).text = key
etree.SubElement(mdfnode, util.nspath_eval('oai:schema', self.namespaces)).text = value['schema']
etree.SubElement(mdfnode, util.nspath_eval('oai:metadataNamespace', self.namespaces)).text = value['namespace']
elif verb in ['GetRecord', 'ListIdentifiers', 'ListRecords']:
if verb == 'GetRecord': # GetRecordById
records = response.getchildren()
else: # GetRecords
records = response.getchildren()[1].getchildren()
for child in records:
recnode = etree.SubElement(verbnode, util.nspath_eval('oai:record', self.namespaces))
header = etree.SubElement(recnode, util.nspath_eval('oai:header', self.namespaces))
self._transform_element(header, child, 'oai:identifier')
self._transform_element(header, child, 'oai:dateStamp')
self._transform_element(header, child, 'oai:setSpec')
if verb in ['GetRecord', 'ListRecords']:
metadata = etree.SubElement(recnode, util.nspath_eval('oai:metadata', self.namespaces))
if 'metadataprefix' in kvp and kvp['metadataprefix'] == 'oai_dc':
child.tag = util.nspath_eval('oai_dc:dc', self.namespaces)
metadata.append(child)
if verb != 'GetRecord':
complete_list_size = response.xpath('//@numberOfRecordsMatched')[0]
next_record = response.xpath('//@nextRecord')[0]
cursor = str(int(complete_list_size) - int(next_record) - 1)
resumption_token = etree.SubElement(verbnode, util.nspath_eval('oai:resumptionToken', self.namespaces),
completeListSize=complete_list_size, cursor=cursor).text = next_record
return node
def _get_metadata_prefix(self, prefix):
"""Convenience function to return metadataPrefix as CSW outputschema"""
try:
outputschema = self.metadata_formats[prefix]['namespace']
except KeyError:
outputschema = prefix
return outputschema
def _transform_element(self, parent, element, elname):
"""tests for existence of a given xpath, writes out text if exists"""
xpath = self.metadata_formats[self.metadata_prefix][elname.split(':')[1]]
if xpath.startswith(('.//', '//')):
value = element.xpath(xpath, namespaces=self.context.namespaces)
if value:
value = value[0].text
else: # bare string literal
value = xpath
el = etree.SubElement(parent, util.nspath_eval(elname, self.context.namespaces))
if value:
if elname == 'oai:setSpec':
value = None
for k, v in self.metadata_sets.items():
if v[1] == elname:
value = k
break
el.text = value
|
"""
Menu utilities.
"""
from fnmatch import fnmatch
from django.utils.importlib import import_module
from django.core.urlresolvers import reverse
from wpadmin.utils import (
get_wpadmin_settings, get_admin_site, get_admin_site_name)
def get_menu_cls(menu, admin_site_name='admin'):
"""
menu - menu name ('top' or 'left')
"""
return get_wpadmin_settings(admin_site_name).get('menu', {}).get(menu, None)
def get_menu(menu, admin_site_name='admin'):
"""
menu - menu name ('top' or 'left')
"""
menu_cls = get_menu_cls(menu, admin_site_name)
if menu_cls:
mod, inst = menu_cls.rsplit('.', 1)
mod = import_module(mod)
return getattr(mod, inst)()
return None
def get_avail_models(context):
""" Returns (model, perm,) for all models user can possibly see """
items = []
admin_site = get_admin_site(context)
for model, model_admin in list(admin_site._registry.items()):
perms = model_admin.get_model_perms(context.get('request'))
if True not in list(perms.values()):
continue
items.append((model, perms,))
return items
def filter_models(context, models, exclude):
"""
Returns (model, perm,) for all models that match models/exclude patterns
and are visible by current user.
"""
items = get_avail_models(context)
included = []
full_name = lambda m: '%s.%s' % (m.__module__, m.__name__)
# I believe that that implemented
# O(len(patterns)*len(matched_patterns)*len(all_models))
# algorithm is fine for model lists because they are small and admin
# performance is not a bottleneck. If it is not the case then the code
# should be optimized.
if len(models) == 0:
included = items
else:
for pattern in models:
for item in items:
model, perms = item
if fnmatch(full_name(model), pattern) and item not in included:
included.append(item)
result = included[:]
for pattern in exclude:
for item in included:
model, perms = item
if fnmatch(full_name(model), pattern):
try:
result.remove(item)
except ValueError: # if the item was already removed skip
pass
return result
class UserTestElementMixin(object):
"""
Mixin which adds a method for checking if current user is allowed to see
something (menu, menu item, etc.).
"""
def is_user_allowed(self, user):
"""
This method can be overwritten to check if current user can see this
element.
"""
return True
class AppListElementMixin(object):
"""
Mixin class for AppList and ModelList MenuItem.
"""
def _visible_models(self, context):
included = self.models[:]
excluded = self.exclude[:]
if excluded and not included:
included = ["*"]
return filter_models(context, included, excluded)
def _get_admin_app_list_url(self, model, context):
"""
Returns the admin change url.
"""
app_label = model._meta.app_label
return reverse('%s:app_list' % get_admin_site_name(context),
args=(app_label,))
def _get_admin_change_url(self, model, context):
"""
Returns the admin change url.
"""
app_label = model._meta.app_label
return reverse('%s:%s_%s_changelist' % (get_admin_site_name(context),
app_label,
model.__name__.lower()))
def _get_admin_add_url(self, model, context):
"""
Returns the admin add url.
"""
app_label = model._meta.app_label
return reverse('%s:%s_%s_add' % (get_admin_site_name(context),
app_label,
model.__name__.lower()))
def is_empty(self):
return len(self.children) == 0
|
import email.utils
import errno
import hashlib
import mimetypes
import os
import re
import base64
import binascii
import math
from hashlib import md5
import boto.utils
from boto.compat import BytesIO, six, urllib, encodebytes
from boto.exception import BotoClientError
from boto.exception import StorageDataError
from boto.exception import PleaseRetryException
from boto.provider import Provider
from boto.s3.keyfile import KeyFile
from boto.s3.user import User
from boto import UserAgent
from boto.utils import compute_md5, compute_hash
from boto.utils import find_matching_headers
from boto.utils import merge_headers_by_name
class Key(object):
"""
Represents a key (object) in an S3 bucket.
:ivar bucket: The parent :class:`boto.s3.bucket.Bucket`.
:ivar name: The name of this Key object.
:ivar metadata: A dictionary containing user metadata that you
wish to store with the object or that has been retrieved from
an existing object.
:ivar cache_control: The value of the `Cache-Control` HTTP header.
:ivar content_type: The value of the `Content-Type` HTTP header.
:ivar content_encoding: The value of the `Content-Encoding` HTTP header.
:ivar content_disposition: The value of the `Content-Disposition` HTTP
header.
:ivar content_language: The value of the `Content-Language` HTTP header.
:ivar etag: The `etag` associated with this object.
:ivar last_modified: The string timestamp representing the last
time this object was modified in S3.
:ivar owner: The ID of the owner of this object.
:ivar storage_class: The storage class of the object. Currently, one of:
STANDARD | REDUCED_REDUNDANCY | GLACIER
:ivar md5: The MD5 hash of the contents of the object.
:ivar size: The size, in bytes, of the object.
:ivar version_id: The version ID of this object, if it is a versioned
object.
:ivar encrypted: Whether the object is encrypted while at rest on
the server.
"""
DefaultContentType = 'application/octet-stream'
RestoreBody = """<?xml version="1.0" encoding="UTF-8"?>
<RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-03-01">
<Days>%s</Days>
</RestoreRequest>"""
BufferSize = boto.config.getint('Boto', 'key_buffer_size', 8192)
# The object metadata fields a user can set, other than custom metadata
# fields (i.e., those beginning with a provider-specific prefix like
# x-amz-meta).
base_user_settable_fields = set(["cache-control", "content-disposition",
"content-encoding", "content-language",
"content-md5", "content-type",
"x-robots-tag", "expires"])
_underscore_base_user_settable_fields = set()
for f in base_user_settable_fields:
_underscore_base_user_settable_fields.add(f.replace('-', '_'))
# Metadata fields, whether user-settable or not, other than custom
# metadata fields (i.e., those beginning with a provider specific prefix
# like x-amz-meta).
base_fields = (base_user_settable_fields |
set(["last-modified", "content-length", "date", "etag"]))
def __init__(self, bucket=None, name=None):
self.bucket = bucket
self.name = name
self.metadata = {}
self.cache_control = None
self.content_type = self.DefaultContentType
self.content_encoding = None
self.content_disposition = None
self.content_language = None
self.filename = None
self.etag = None
self.is_latest = False
self.last_modified = None
self.owner = None
self._storage_class = None
self.path = None
self.resp = None
self.mode = None
self.size = None
self.version_id = None
self.source_version_id = None
self.delete_marker = False
self.encrypted = None
# If the object is being restored, this attribute will be set to True.
# If the object is restored, it will be set to False. Otherwise this
# value will be None. If the restore is completed (ongoing_restore =
# False), the expiry_date will be populated with the expiry date of the
# restored object.
self.ongoing_restore = None
self.expiry_date = None
self.local_hashes = {}
def __repr__(self):
if self.bucket:
name = u'<Key: %s,%s>' % (self.bucket.name, self.name)
else:
name = u'<Key: None,%s>' % self.name
# Encode to bytes for Python 2 to prevent display decoding issues
if not isinstance(name, str):
name = name.encode('utf-8')
return name
def __iter__(self):
return self
@property
def provider(self):
provider = None
if self.bucket and self.bucket.connection:
provider = self.bucket.connection.provider
return provider
def _get_key(self):
return self.name
def _set_key(self, value):
self.name = value
key = property(_get_key, _set_key);
def _get_md5(self):
if 'md5' in self.local_hashes and self.local_hashes['md5']:
return binascii.b2a_hex(self.local_hashes['md5'])
def _set_md5(self, value):
if value:
self.local_hashes['md5'] = binascii.a2b_hex(value)
elif 'md5' in self.local_hashes:
self.local_hashes.pop('md5', None)
md5 = property(_get_md5, _set_md5);
def _get_base64md5(self):
if 'md5' in self.local_hashes and self.local_hashes['md5']:
md5 = self.local_hashes['md5']
if not isinstance(md5, bytes):
md5 = md5.encode('utf-8')
return binascii.b2a_base64(md5).decode('utf-8').rstrip('\n')
def _set_base64md5(self, value):
if value:
if not isinstance(value, six.string_types):
value = value.decode('utf-8')
self.local_hashes['md5'] = binascii.a2b_base64(value)
elif 'md5' in self.local_hashes:
del self.local_hashes['md5']
base64md5 = property(_get_base64md5, _set_base64md5);
def _get_storage_class(self):
if self._storage_class is None and self.bucket:
# Attempt to fetch storage class
list_items = list(self.bucket.list(self.name.encode('utf-8')))
if len(list_items) and getattr(list_items[0], '_storage_class',
None):
self._storage_class = list_items[0]._storage_class
else:
# Key is not yet saved? Just use default...
self._storage_class = 'STANDARD'
return self._storage_class
def _set_storage_class(self, value):
self._storage_class = value
storage_class = property(_get_storage_class, _set_storage_class)
def get_md5_from_hexdigest(self, md5_hexdigest):
"""
A utility function to create the 2-tuple (md5hexdigest, base64md5)
from just having a precalculated md5_hexdigest.
"""
digest = binascii.unhexlify(md5_hexdigest)
base64md5 = encodebytes(digest)
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
return (md5_hexdigest, base64md5)
def handle_encryption_headers(self, resp):
provider = self.bucket.connection.provider
if provider.server_side_encryption_header:
self.encrypted = resp.getheader(
provider.server_side_encryption_header, None)
else:
self.encrypted = None
def handle_storage_class_header(self, resp):
provider = self.bucket.connection.provider
if provider.storage_class_header:
self._storage_class = resp.getheader(
provider.storage_class_header, None)
if (self._storage_class is None and
provider.get_provider_name() == 'aws'):
# S3 docs for HEAD object requests say S3 will return this
# header for all objects except Standard storage class objects.
self._storage_class = 'STANDARD'
def handle_version_headers(self, resp, force=False):
provider = self.bucket.connection.provider
# If the Key object already has a version_id attribute value, it
# means that it represents an explicit version and the user is
# doing a get_contents_*(version_id=<foo>) to retrieve another
# version of the Key. In that case, we don't really want to
# overwrite the version_id in this Key object. Comprende?
if self.version_id is None or force:
self.version_id = resp.getheader(provider.version_id, None)
self.source_version_id = resp.getheader(provider.copy_source_version_id,
None)
if resp.getheader(provider.delete_marker, 'false') == 'true':
self.delete_marker = True
else:
self.delete_marker = False
def handle_restore_headers(self, response):
provider = self.bucket.connection.provider
header = response.getheader(provider.restore_header)
if header is None:
return
parts = header.split(',', 1)
for part in parts:
key, val = [i.strip() for i in part.split('=')]
val = val.replace('"', '')
if key == 'ongoing-request':
self.ongoing_restore = True if val.lower() == 'true' else False
elif key == 'expiry-date':
self.expiry_date = val
def handle_addl_headers(self, headers):
"""
Used by Key subclasses to do additional, provider-specific
processing of response headers. No-op for this base class.
"""
pass
def open_read(self, headers=None, query_args='',
override_num_retries=None, response_headers=None):
"""
Open this key for reading
:type headers: dict
:param headers: Headers to pass in the web request
:type query_args: string
:param query_args: Arguments to pass in the query string
(ie, 'torrent')
:type override_num_retries: int
:param override_num_retries: If not None will override configured
num_retries parameter for underlying GET.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
"""
if self.resp is None:
self.mode = 'r'
provider = self.bucket.connection.provider
self.resp = self.bucket.connection.make_request(
'GET', self.bucket.name, self.name, headers,
query_args=query_args,
override_num_retries=override_num_retries)
if self.resp.status < 199 or self.resp.status > 299:
body = self.resp.read()
raise provider.storage_response_error(self.resp.status,
self.resp.reason, body)
response_headers = self.resp.msg
self.metadata = boto.utils.get_aws_metadata(response_headers,
provider)
for name, value in response_headers.items():
# To get correct size for Range GETs, use Content-Range
# header if one was returned. If not, use Content-Length
# header.
if (name.lower() == 'content-length' and
'Content-Range' not in response_headers):
self.size = int(value)
elif name.lower() == 'content-range':
end_range = re.sub('.*/(.*)', '\\1', value)
self.size = int(end_range)
elif name.lower() in Key.base_fields:
self.__dict__[name.lower().replace('-', '_')] = value
self.handle_version_headers(self.resp)
self.handle_encryption_headers(self.resp)
self.handle_restore_headers(self.resp)
self.handle_addl_headers(self.resp.getheaders())
def open_write(self, headers=None, override_num_retries=None):
"""
Open this key for writing.
Not yet implemented
:type headers: dict
:param headers: Headers to pass in the write request
:type override_num_retries: int
:param override_num_retries: If not None will override configured
num_retries parameter for underlying PUT.
"""
raise BotoClientError('Not Implemented')
def open(self, mode='r', headers=None, query_args=None,
override_num_retries=None):
if mode == 'r':
self.mode = 'r'
self.open_read(headers=headers, query_args=query_args,
override_num_retries=override_num_retries)
elif mode == 'w':
self.mode = 'w'
self.open_write(headers=headers,
override_num_retries=override_num_retries)
else:
raise BotoClientError('Invalid mode: %s' % mode)
closed = False
def close(self, fast=False):
"""
Close this key.
:type fast: bool
:param fast: True if you want the connection to be closed without first
reading the content. This should only be used in cases where subsequent
calls don't need to return the content from the open HTTP connection.
Note: As explained at
http://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.getresponse,
callers must read the whole response before sending a new request to the
server. Calling Key.close(fast=True) and making a subsequent request to
the server will work because boto will get an httplib exception and
close/reopen the connection.
"""
if self.resp and not fast:
self.resp.read()
self.resp = None
self.mode = None
self.closed = True
def next(self):
"""
By providing a next method, the key object supports use as an iterator.
For example, you can now say:
for bytes in key:
write bytes to a file or whatever
All of the HTTP connection stuff is handled for you.
"""
self.open_read()
data = self.resp.read(self.BufferSize)
if not data:
self.close()
raise StopIteration
return data
# Python 3 iterator support
__next__ = next
def read(self, size=0):
self.open_read()
if size == 0:
data = self.resp.read()
else:
data = self.resp.read(size)
if not data:
self.close()
return data
def change_storage_class(self, new_storage_class, dst_bucket=None,
validate_dst_bucket=True):
"""
Change the storage class of an existing key.
Depending on whether a different destination bucket is supplied
or not, this will either move the item within the bucket, preserving
all metadata and ACL info bucket changing the storage class or it
will copy the item to the provided destination bucket, also
preserving metadata and ACL info.
:type new_storage_class: string
:param new_storage_class: The new storage class for the Key.
Possible values are:
* STANDARD
* REDUCED_REDUNDANCY
:type dst_bucket: string
:param dst_bucket: The name of a destination bucket. If not
provided the current bucket of the key will be used.
:type validate_dst_bucket: bool
:param validate_dst_bucket: If True, will validate the dst_bucket
by using an extra list request.
"""
bucket_name = dst_bucket or self.bucket.name
if new_storage_class == 'STANDARD':
return self.copy(bucket_name, self.name,
reduced_redundancy=False, preserve_acl=True,
validate_dst_bucket=validate_dst_bucket)
elif new_storage_class == 'REDUCED_REDUNDANCY':
return self.copy(bucket_name, self.name,
reduced_redundancy=True, preserve_acl=True,
validate_dst_bucket=validate_dst_bucket)
else:
raise BotoClientError('Invalid storage class: %s' %
new_storage_class)
def copy(self, dst_bucket, dst_key, metadata=None,
reduced_redundancy=False, preserve_acl=False,
encrypt_key=False, validate_dst_bucket=True):
"""
Copy this Key to another bucket.
:type dst_bucket: string
:param dst_bucket: The name of the destination bucket
:type dst_key: string
:param dst_key: The name of the destination key
:type metadata: dict
:param metadata: Metadata to be associated with new key. If
metadata is supplied, it will replace the metadata of the
source key being copied. If no metadata is supplied, the
source key's metadata will be copied to the new key.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will force the
storage class of the new Key to be REDUCED_REDUNDANCY
regardless of the storage class of the key being copied.
The Reduced Redundancy Storage (RRS) feature of S3,
provides lower redundancy at lower storage cost.
:type preserve_acl: bool
:param preserve_acl: If True, the ACL from the source key will
be copied to the destination key. If False, the
destination key will have the default ACL. Note that
preserving the ACL in the new key object will require two
additional API calls to S3, one to retrieve the current
ACL and one to set that ACL on the new object. If you
don't care about the ACL, a value of False will be
significantly more efficient.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
:type validate_dst_bucket: bool
:param validate_dst_bucket: If True, will validate the dst_bucket
by using an extra list request.
:rtype: :class:`boto.s3.key.Key` or subclass
:returns: An instance of the newly created key object
"""
dst_bucket = self.bucket.connection.lookup(dst_bucket,
validate_dst_bucket)
if reduced_redundancy:
storage_class = 'REDUCED_REDUNDANCY'
else:
storage_class = self.storage_class
return dst_bucket.copy_key(dst_key, self.bucket.name,
self.name, metadata,
storage_class=storage_class,
preserve_acl=preserve_acl,
encrypt_key=encrypt_key,
src_version_id=self.version_id)
def startElement(self, name, attrs, connection):
if name == 'Owner':
self.owner = User(self)
return self.owner
else:
return None
def endElement(self, name, value, connection):
if name == 'Key':
self.name = value
elif name == 'ETag':
self.etag = value
elif name == 'IsLatest':
if value == 'true':
self.is_latest = True
else:
self.is_latest = False
elif name == 'LastModified':
self.last_modified = value
elif name == 'Size':
self.size = int(value)
elif name == 'StorageClass':
self.storage_class = value
elif name == 'Owner':
pass
elif name == 'VersionId':
self.version_id = value
else:
setattr(self, name, value)
def exists(self, headers=None):
"""
Returns True if the key exists
:rtype: bool
:return: Whether the key exists on S3
"""
return bool(self.bucket.lookup(self.name, headers=headers))
def delete(self, headers=None):
"""
Delete this key from S3
"""
return self.bucket.delete_key(self.name, version_id=self.version_id,
headers=headers)
def get_metadata(self, name):
return self.metadata.get(name)
def set_metadata(self, name, value):
# Ensure that metadata that is vital to signing is in the correct
# case. Applies to ``Content-Type`` & ``Content-MD5``.
if name.lower() == 'content-type':
self.metadata['Content-Type'] = value
elif name.lower() == 'content-md5':
self.metadata['Content-MD5'] = value
else:
self.metadata[name] = value
if name.lower() in Key.base_user_settable_fields:
self.__dict__[name.lower().replace('-', '_')] = value
def update_metadata(self, d):
self.metadata.update(d)
# convenience methods for setting/getting ACL
def set_acl(self, acl_str, headers=None):
if self.bucket is not None:
self.bucket.set_acl(acl_str, self.name, headers=headers)
def get_acl(self, headers=None):
if self.bucket is not None:
return self.bucket.get_acl(self.name, headers=headers)
def get_xml_acl(self, headers=None):
if self.bucket is not None:
return self.bucket.get_xml_acl(self.name, headers=headers)
def set_xml_acl(self, acl_str, headers=None):
if self.bucket is not None:
return self.bucket.set_xml_acl(acl_str, self.name, headers=headers)
def set_canned_acl(self, acl_str, headers=None):
return self.bucket.set_canned_acl(acl_str, self.name, headers)
def get_redirect(self):
"""Return the redirect location configured for this key.
If no redirect is configured (via set_redirect), then None
will be returned.
"""
response = self.bucket.connection.make_request(
'HEAD', self.bucket.name, self.name)
if response.status == 200:
return response.getheader('x-amz-website-redirect-location')
else:
raise self.provider.storage_response_error(
response.status, response.reason, response.read())
def set_redirect(self, redirect_location, headers=None):
"""Configure this key to redirect to another location.
When the bucket associated with this key is accessed from the website
endpoint, a 301 redirect will be issued to the specified
`redirect_location`.
:type redirect_location: string
:param redirect_location: The location to redirect.
"""
if headers is None:
headers = {}
else:
headers = headers.copy()
headers['x-amz-website-redirect-location'] = redirect_location
response = self.bucket.connection.make_request('PUT', self.bucket.name,
self.name, headers)
if response.status == 200:
return True
else:
raise self.provider.storage_response_error(
response.status, response.reason, response.read())
def make_public(self, headers=None):
return self.bucket.set_canned_acl('public-read', self.name, headers)
def generate_url(self, expires_in, method='GET', headers=None,
query_auth=True, force_http=False, response_headers=None,
expires_in_absolute=False, version_id=None,
policy=None, reduced_redundancy=False, encrypt_key=False):
"""
Generate a URL to access this key.
:type expires_in: int
:param expires_in: How long the url is valid for, in seconds.
:type method: string
:param method: The method to use for retrieving the file
(default is GET).
:type headers: dict
:param headers: Any headers to pass along in the request.
:type query_auth: bool
:param query_auth: If True, signs the request in the URL.
:type force_http: bool
:param force_http: If True, http will be used instead of https.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
:type expires_in_absolute: bool
:param expires_in_absolute:
:type version_id: string
:param version_id: The version_id of the object to GET. If specified
this overrides any value in the key.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
:rtype: string
:return: The URL to access the key
"""
provider = self.bucket.connection.provider
version_id = version_id or self.version_id
if headers is None:
headers = {}
else:
headers = headers.copy()
# add headers accordingly (usually PUT case)
if policy:
headers[provider.acl_header] = policy
if reduced_redundancy:
self.storage_class = 'REDUCED_REDUNDANCY'
if provider.storage_class_header:
headers[provider.storage_class_header] = self.storage_class
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
headers = boto.utils.merge_meta(headers, self.metadata, provider)
return self.bucket.connection.generate_url(expires_in, method,
self.bucket.name, self.name,
headers, query_auth,
force_http,
response_headers,
expires_in_absolute,
version_id)
def send_file(self, fp, headers=None, cb=None, num_cb=10,
query_args=None, chunked_transfer=False, size=None):
"""
Upload a file to a key into a bucket on S3.
:type fp: file
:param fp: The file pointer to upload. The file pointer must
point at the offset from which you wish to upload.
ie. if uploading the full file, it should point at the
start of the file. Normally when a file is opened for
reading, the fp will point at the first byte. See the
bytes parameter below for more info.
:type headers: dict
:param headers: The headers to pass along with the PUT request
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file
transfer. Providing a negative integer will cause your
callback to be called with each buffer read.
:type query_args: string
:param query_args: (optional) Arguments to pass in the query string.
:type chunked_transfer: boolean
:param chunked_transfer: (optional) If true, we use chunked
Transfer-Encoding.
:type size: int
:param size: (optional) The Maximum number of bytes to read
from the file pointer (fp). This is useful when uploading
a file in multiple parts where you are splitting the file
up into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
"""
self._send_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb,
query_args=query_args,
chunked_transfer=chunked_transfer, size=size)
def _send_file_internal(self, fp, headers=None, cb=None, num_cb=10,
query_args=None, chunked_transfer=False, size=None,
hash_algs=None):
provider = self.bucket.connection.provider
try:
spos = fp.tell()
except IOError:
spos = None
self.read_from_stream = False
# If hash_algs is unset and the MD5 hasn't already been computed,
# default to an MD5 hash_alg to hash the data on-the-fly.
if hash_algs is None and not self.md5:
hash_algs = {'md5': md5}
digesters = dict((alg, hash_algs[alg]()) for alg in hash_algs or {})
def sender(http_conn, method, path, data, headers):
# This function is called repeatedly for temporary retries
# so we must be sure the file pointer is pointing at the
# start of the data.
if spos is not None and spos != fp.tell():
fp.seek(spos)
elif spos is None and self.read_from_stream:
# if seek is not supported, and we've read from this
# stream already, then we need to abort retries to
# avoid setting bad data.
raise provider.storage_data_error(
'Cannot retry failed request. fp does not support seeking.')
# If the caller explicitly specified host header, tell putrequest
# not to add a second host header. Similarly for accept-encoding.
skips = {}
if boto.utils.find_matching_headers('host', headers):
skips['skip_host'] = 1
if boto.utils.find_matching_headers('accept-encoding', headers):
skips['skip_accept_encoding'] = 1
http_conn.putrequest(method, path, **skips)
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
save_debug = self.bucket.connection.debug
self.bucket.connection.debug = 0
# If the debuglevel < 4 we don't want to show connection
# payload, so turn off HTTP connection-level debug output (to
# be restored below).
# Use the getattr approach to allow this to work in AppEngine.
if getattr(http_conn, 'debuglevel', 0) < 4:
http_conn.set_debuglevel(0)
data_len = 0
if cb:
if size:
cb_size = size
elif self.size:
cb_size = self.size
else:
cb_size = 0
if chunked_transfer and cb_size == 0:
# For chunked Transfer, we call the cb for every 1MB
# of data transferred, except when we know size.
cb_count = (1024 * 1024) / self.BufferSize
elif num_cb > 1:
cb_count = int(
math.ceil(cb_size / self.BufferSize / (num_cb - 1.0)))
elif num_cb < 0:
cb_count = -1
else:
cb_count = 0
i = 0
cb(data_len, cb_size)
bytes_togo = size
if bytes_togo and bytes_togo < self.BufferSize:
chunk = fp.read(bytes_togo)
else:
chunk = fp.read(self.BufferSize)
if not isinstance(chunk, bytes):
chunk = chunk.encode('utf-8')
if spos is None:
# read at least something from a non-seekable fp.
self.read_from_stream = True
while chunk:
chunk_len = len(chunk)
data_len += chunk_len
if chunked_transfer:
http_conn.send('%x;\r\n' % chunk_len)
http_conn.send(chunk)
http_conn.send('\r\n')
else:
http_conn.send(chunk)
for alg in digesters:
digesters[alg].update(chunk)
if bytes_togo:
bytes_togo -= chunk_len
if bytes_togo <= 0:
break
if cb:
i += 1
if i == cb_count or cb_count == -1:
cb(data_len, cb_size)
i = 0
if bytes_togo and bytes_togo < self.BufferSize:
chunk = fp.read(bytes_togo)
else:
chunk = fp.read(self.BufferSize)
if not isinstance(chunk, bytes):
chunk = chunk.encode('utf-8')
self.size = data_len
for alg in digesters:
self.local_hashes[alg] = digesters[alg].digest()
if chunked_transfer:
http_conn.send('0\r\n')
# http_conn.send("Content-MD5: %s\r\n" % self.base64md5)
http_conn.send('\r\n')
if cb and (cb_count <= 1 or i > 0) and data_len > 0:
cb(data_len, cb_size)
http_conn.set_debuglevel(save_debug)
self.bucket.connection.debug = save_debug
response = http_conn.getresponse()
body = response.read()
if not self.should_retry(response, chunked_transfer):
raise provider.storage_response_error(
response.status, response.reason, body)
return response
if not headers:
headers = {}
else:
headers = headers.copy()
# Overwrite user-supplied user-agent.
for header in find_matching_headers('User-Agent', headers):
del headers[header]
headers['User-Agent'] = UserAgent
# If storage_class is None, then a user has not explicitly requested
# a storage class, so we can assume STANDARD here
if self._storage_class not in [None, 'STANDARD']:
headers[provider.storage_class_header] = self.storage_class
if find_matching_headers('Content-Encoding', headers):
self.content_encoding = merge_headers_by_name(
'Content-Encoding', headers)
if find_matching_headers('Content-Language', headers):
self.content_language = merge_headers_by_name(
'Content-Language', headers)
content_type_headers = find_matching_headers('Content-Type', headers)
if content_type_headers:
# Some use cases need to suppress sending of the Content-Type
# header and depend on the receiving server to set the content
# type. This can be achieved by setting headers['Content-Type']
# to None when calling this method.
if (len(content_type_headers) == 1 and
headers[content_type_headers[0]] is None):
# Delete null Content-Type value to skip sending that header.
del headers[content_type_headers[0]]
else:
self.content_type = merge_headers_by_name(
'Content-Type', headers)
elif self.path:
self.content_type = mimetypes.guess_type(self.path)[0]
if self.content_type is None:
self.content_type = self.DefaultContentType
headers['Content-Type'] = self.content_type
else:
headers['Content-Type'] = self.content_type
if self.base64md5:
headers['Content-MD5'] = self.base64md5
if chunked_transfer:
headers['Transfer-Encoding'] = 'chunked'
#if not self.base64md5:
# headers['Trailer'] = "Content-MD5"
else:
headers['Content-Length'] = str(self.size)
# This is terrible. We need a SHA256 of the body for SigV4, but to do
# the chunked ``sender`` behavior above, the ``fp`` isn't available to
# the auth mechanism (because closures). Detect if it's SigV4 & embelish
# while we can before the auth calculations occur.
if 'hmac-v4-s3' in self.bucket.connection._required_auth_capability():
kwargs = {'fp': fp, 'hash_algorithm': hashlib.sha256}
if size is not None:
kwargs['size'] = size
headers['_sha256'] = compute_hash(**kwargs)[0]
headers['Expect'] = '100-Continue'
headers = boto.utils.merge_meta(headers, self.metadata, provider)
resp = self.bucket.connection.make_request(
'PUT',
self.bucket.name,
self.name,
headers,
sender=sender,
query_args=query_args
)
self.handle_version_headers(resp, force=True)
self.handle_addl_headers(resp.getheaders())
def should_retry(self, response, chunked_transfer=False):
provider = self.bucket.connection.provider
if not chunked_transfer:
if response.status in [500, 503]:
# 500 & 503 can be plain retries.
return True
if response.getheader('location'):
# If there's a redirect, plain retry.
return True
if 200 <= response.status <= 299:
self.etag = response.getheader('etag')
md5 = self.md5
if isinstance(md5, bytes):
md5 = md5.decode('utf-8')
# If you use customer-provided encryption keys, the ETag value that
# Amazon S3 returns in the response will not be the MD5 of the
# object.
server_side_encryption_customer_algorithm = response.getheader(
'x-amz-server-side-encryption-customer-algorithm', None)
if server_side_encryption_customer_algorithm is None:
if self.etag != '"%s"' % md5:
raise provider.storage_data_error(
'ETag from S3 did not match computed MD5. '
'%s vs. %s' % (self.etag, self.md5))
return True
if response.status == 400:
# The 400 must be trapped so the retry handler can check to
# see if it was a timeout.
# If ``RequestTimeout`` is present, we'll retry. Otherwise, bomb
# out.
body = response.read()
err = provider.storage_response_error(
response.status,
response.reason,
body
)
if err.error_code in ['RequestTimeout']:
raise PleaseRetryException(
"Saw %s, retrying" % err.error_code,
response=response
)
return False
def compute_md5(self, fp, size=None):
"""
:type fp: file
:param fp: File pointer to the file to MD5 hash. The file
pointer will be reset to the same position before the
method returns.
:type size: int
:param size: (optional) The Maximum number of bytes to read
from the file pointer (fp). This is useful when uploading
a file in multiple parts where the file is being split
in place into different parts. Less bytes may be available.
"""
hex_digest, b64_digest, data_size = compute_md5(fp, size=size)
# Returned values are MD5 hash, base64 encoded MD5 hash, and data size.
# The internal implementation of compute_md5() needs to return the
# data size but we don't want to return that value to the external
# caller because it changes the class interface (i.e. it might
# break some code) so we consume the third tuple value here and
# return the remainder of the tuple to the caller, thereby preserving
# the existing interface.
self.size = data_size
return (hex_digest, b64_digest)
def set_contents_from_stream(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None,
reduced_redundancy=False, query_args=None,
size=None):
"""
Store an object using the name of the Key object as the key in
cloud and the contents of the data stream pointed to by 'fp' as
the contents.
The stream object is not seekable and total size is not known.
This has the implication that we can't specify the
Content-Size and Content-MD5 in the header. So for huge
uploads, the delay in calculating MD5 is avoided but with a
penalty of inability to verify the integrity of the uploaded
data.
:type fp: file
:param fp: the file whose contents are to be uploaded
:type headers: dict
:param headers: additional HTTP headers to be sent with the
PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will first check
to see if an object exists in the bucket with the same key. If it
does, it won't overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted to GS and the second representing the
total number of bytes that need to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter, this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type size: int
:param size: (optional) The Maximum number of bytes to read from
the file pointer (fp). This is useful when uploading a
file in multiple parts where you are splitting the file up
into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
"""
provider = self.bucket.connection.provider
if not provider.supports_chunked_transfer():
raise BotoClientError('%s does not support chunked transfer'
% provider.get_provider_name())
# Name of the Object should be specified explicitly for Streams.
if not self.name or self.name == '':
raise BotoClientError('Cannot determine the destination '
'object name for the given stream')
if headers is None:
headers = {}
if policy:
headers[provider.acl_header] = policy
if reduced_redundancy:
self.storage_class = 'REDUCED_REDUNDANCY'
if provider.storage_class_header:
headers[provider.storage_class_header] = self.storage_class
if self.bucket is not None:
if not replace:
if self.bucket.lookup(self.name):
return
self.send_file(fp, headers, cb, num_cb, query_args,
chunked_transfer=True, size=size)
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=False, query_args=None,
encrypt_key=False, size=None, rewind=False):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the contents of the file pointed to by 'fp' as the
contents. The data is read from 'fp' from its current position until
'size' bytes have been read or EOF.
:type fp: file
:param fp: the file whose contents to upload
:type headers: dict
:param headers: Additional HTTP headers that will be sent with
the PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will
first check to see if an object exists in the bucket with
the same key. If it does, it won't overwrite it. The
default value is True which will overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the second
element. This is the same format returned by the
compute_md5 method.
:param md5: If you need to compute the MD5 for any reason
prior to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values of the
file. Otherwise, the checksum will be computed.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
:type size: int
:param size: (optional) The Maximum number of bytes to read
from the file pointer (fp). This is useful when uploading
a file in multiple parts where you are splitting the file
up into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
:type rewind: bool
:param rewind: (optional) If True, the file pointer (fp) will
be rewound to the start before any bytes are read from
it. The default behaviour is False which reads from the
current position of the file pointer (fp).
:rtype: int
:return: The number of bytes written to the key.
"""
provider = self.bucket.connection.provider
headers = headers or {}
if policy:
headers[provider.acl_header] = policy
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
if rewind:
# caller requests reading from beginning of fp.
fp.seek(0, os.SEEK_SET)
else:
# The following seek/tell/seek logic is intended
# to detect applications using the older interface to
# set_contents_from_file(), which automatically rewound the
# file each time the Key was reused. This changed with commit
# 14ee2d03f4665fe20d19a85286f78d39d924237e, to support uploads
# split into multiple parts and uploaded in parallel, and at
# the time of that commit this check was added because otherwise
# older programs would get a success status and upload an empty
# object. Unfortuantely, it's very inefficient for fp's implemented
# by KeyFile (used, for example, by gsutil when copying between
# providers). So, we skip the check for the KeyFile case.
# TODO: At some point consider removing this seek/tell/seek
# logic, after enough time has passed that it's unlikely any
# programs remain that assume the older auto-rewind interface.
if not isinstance(fp, KeyFile):
spos = fp.tell()
fp.seek(0, os.SEEK_END)
if fp.tell() == spos:
fp.seek(0, os.SEEK_SET)
if fp.tell() != spos:
# Raise an exception as this is likely a programming
# error whereby there is data before the fp but nothing
# after it.
fp.seek(spos)
raise AttributeError('fp is at EOF. Use rewind option '
'or seek() to data start.')
# seek back to the correct position.
fp.seek(spos)
if reduced_redundancy:
self.storage_class = 'REDUCED_REDUNDANCY'
if provider.storage_class_header:
headers[provider.storage_class_header] = self.storage_class
# TODO - What if provider doesn't support reduced reduncancy?
# What if different providers provide different classes?
if hasattr(fp, 'name'):
self.path = fp.name
if self.bucket is not None:
if not md5 and provider.supports_chunked_transfer():
# defer md5 calculation to on the fly and
# we don't know anything about size yet.
chunked_transfer = True
self.size = None
else:
chunked_transfer = False
if isinstance(fp, KeyFile):
# Avoid EOF seek for KeyFile case as it's very inefficient.
key = fp.getkey()
size = key.size - fp.tell()
self.size = size
# At present both GCS and S3 use MD5 for the etag for
# non-multipart-uploaded objects. If the etag is 32 hex
# chars use it as an MD5, to avoid having to read the file
# twice while transferring.
if (re.match('^"[a-fA-F0-9]{32}"$', key.etag)):
etag = key.etag.strip('"')
md5 = (etag, base64.b64encode(binascii.unhexlify(etag)))
if not md5:
# compute_md5() and also set self.size to actual
# size of the bytes read computing the md5.
md5 = self.compute_md5(fp, size)
# adjust size if required
size = self.size
elif size:
self.size = size
else:
# If md5 is provided, still need to size so
# calculate based on bytes to end of content
spos = fp.tell()
fp.seek(0, os.SEEK_END)
self.size = fp.tell() - spos
fp.seek(spos)
size = self.size
self.md5 = md5[0]
self.base64md5 = md5[1]
if self.name is None:
self.name = self.md5
if not replace:
if self.bucket.lookup(self.name):
return
self.send_file(fp, headers=headers, cb=cb, num_cb=num_cb,
query_args=query_args,
chunked_transfer=chunked_transfer, size=size)
# return number of bytes written.
return self.size
def set_contents_from_filename(self, filename, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=False,
encrypt_key=False):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the contents of the file named by 'filename'.
See set_contents_from_file method for details about the
parameters.
:type filename: string
:param filename: The name of the file that you want to put onto S3
:type headers: dict
:param headers: Additional headers to pass along with the
request to AWS.
:type replace: bool
:param replace: If True, replaces the contents of the file
if it already exists.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the second
element. This is the same format returned by the
compute_md5 method.
:param md5: If you need to compute the MD5 for any reason
prior to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values of the
file. Otherwise, the checksum will be computed.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost. :type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object
will be encrypted on the server-side by S3 and will be
stored in an encrypted form while at rest in S3.
:rtype: int
:return: The number of bytes written to the key.
"""
with open(filename, 'rb') as fp:
return self.set_contents_from_file(fp, headers, replace, cb,
num_cb, policy, md5,
reduced_redundancy,
encrypt_key=encrypt_key)
def set_contents_from_string(self, string_data, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=False,
encrypt_key=False):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the string 's' as the contents.
See set_contents_from_file method for details about the
parameters.
:type headers: dict
:param headers: Additional headers to pass along with the
request to AWS.
:type replace: bool
:param replace: If True, replaces the contents of the file if
it already exists.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
num_cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the second
element. This is the same format returned by the
compute_md5 method.
:param md5: If you need to compute the MD5 for any reason
prior to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values of the
file. Otherwise, the checksum will be computed.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
"""
if not isinstance(string_data, bytes):
string_data = string_data.encode("utf-8")
fp = BytesIO(string_data)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, reduced_redundancy,
encrypt_key=encrypt_key)
fp.close()
return r
def get_file(self, fp, headers=None, cb=None, num_cb=10,
torrent=False, version_id=None, override_num_retries=None,
response_headers=None):
"""
Retrieves a file from an S3 Key
:type fp: file
:param fp: File pointer to put the data into
:type headers: string
:param: headers to send when retrieving the files
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: Flag for whether to get a torrent for the file
:type override_num_retries: int
:param override_num_retries: If not None will override configured
num_retries parameter for underlying GET.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
a ``version_id`` attribute, that value will be used when
retrieving the object. You can set the Key object's
``version_id`` attribute to None to always grab the latest
version from a version-enabled bucket.
"""
self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb,
torrent=torrent, version_id=version_id,
override_num_retries=override_num_retries,
response_headers=response_headers,
hash_algs=None,
query_args=None)
def _get_file_internal(self, fp, headers=None, cb=None, num_cb=10,
torrent=False, version_id=None, override_num_retries=None,
response_headers=None, hash_algs=None, query_args=None):
if headers is None:
headers = {}
save_debug = self.bucket.connection.debug
if self.bucket.connection.debug == 1:
self.bucket.connection.debug = 0
query_args = query_args or []
if torrent:
query_args.append('torrent')
if hash_algs is None and not torrent:
hash_algs = {'md5': md5}
digesters = dict((alg, hash_algs[alg]()) for alg in hash_algs or {})
# If a version_id is passed in, use that. If not, check to see
# if the Key object has an explicit version_id and, if so, use that.
# Otherwise, don't pass a version_id query param.
if version_id is None:
version_id = self.version_id
if version_id:
query_args.append('versionId=%s' % version_id)
if response_headers:
for key in response_headers:
query_args.append('%s=%s' % (
key, urllib.parse.quote(response_headers[key])))
query_args = '&'.join(query_args)
self.open('r', headers, query_args=query_args,
override_num_retries=override_num_retries)
data_len = 0
if cb:
if self.size is None:
cb_size = 0
else:
cb_size = self.size
if self.size is None and num_cb != -1:
# If size is not available due to chunked transfer for example,
# we'll call the cb for every 1MB of data transferred.
cb_count = (1024 * 1024) / self.BufferSize
elif num_cb > 1:
cb_count = int(math.ceil(cb_size/self.BufferSize/(num_cb-1.0)))
elif num_cb < 0:
cb_count = -1
else:
cb_count = 0
i = 0
cb(data_len, cb_size)
try:
for bytes in self:
fp.write(bytes)
data_len += len(bytes)
for alg in digesters:
digesters[alg].update(bytes)
if cb:
if cb_size > 0 and data_len >= cb_size:
break
i += 1
if i == cb_count or cb_count == -1:
cb(data_len, cb_size)
i = 0
except IOError as e:
if e.errno == errno.ENOSPC:
raise StorageDataError('Out of space for destination file '
'%s' % fp.name)
raise
if cb and (cb_count <= 1 or i > 0) and data_len > 0:
cb(data_len, cb_size)
for alg in digesters:
self.local_hashes[alg] = digesters[alg].digest()
if self.size is None and not torrent and "Range" not in headers:
self.size = data_len
self.close()
self.bucket.connection.debug = save_debug
def get_torrent_file(self, fp, headers=None, cb=None, num_cb=10):
"""
Get a torrent file (see to get_file)
:type fp: file
:param fp: The file pointer of where to put the torrent
:type headers: dict
:param headers: Headers to be passed
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
"""
return self.get_file(fp, headers, cb, num_cb, torrent=True)
def get_contents_to_file(self, fp, headers=None,
cb=None, num_cb=10,
torrent=False,
version_id=None,
res_download_handler=None,
response_headers=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Write the contents of the object to the file pointed
to by 'fp'.
:type fp: File -like object
:param fp:
:type headers: dict
:param headers: additional HTTP headers that will be sent with
the GET request.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: If True, returns the contents of a torrent
file as a string.
:type res_upload_handler: ResumableDownloadHandler
:param res_download_handler: If provided, this handler will
perform the download.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
a ``version_id`` attribute, that value will be used when
retrieving the object. You can set the Key object's
``version_id`` attribute to None to always grab the latest
version from a version-enabled bucket.
"""
if self.bucket is not None:
if res_download_handler:
res_download_handler.get_file(self, fp, headers, cb, num_cb,
torrent=torrent,
version_id=version_id)
else:
self.get_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
response_headers=response_headers)
def get_contents_to_filename(self, filename, headers=None,
cb=None, num_cb=10,
torrent=False,
version_id=None,
res_download_handler=None,
response_headers=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Store contents of the object to a file named by 'filename'.
See get_contents_to_file method for details about the
parameters.
:type filename: string
:param filename: The filename of where to put the file contents
:type headers: dict
:param headers: Any additional headers to send in the request
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: If True, returns the contents of a torrent file
as a string.
:type res_upload_handler: ResumableDownloadHandler
:param res_download_handler: If provided, this handler will
perform the download.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
a ``version_id`` attribute, that value will be used when
retrieving the object. You can set the Key object's
``version_id`` attribute to None to always grab the latest
version from a version-enabled bucket.
"""
try:
with open(filename, 'wb') as fp:
self.get_contents_to_file(fp, headers, cb, num_cb,
torrent=torrent,
version_id=version_id,
res_download_handler=res_download_handler,
response_headers=response_headers)
except Exception:
os.remove(filename)
raise
# if last_modified date was sent from s3, try to set file's timestamp
if self.last_modified is not None:
try:
modified_tuple = email.utils.parsedate_tz(self.last_modified)
modified_stamp = int(email.utils.mktime_tz(modified_tuple))
os.utime(fp.name, (modified_stamp, modified_stamp))
except Exception:
pass
def get_contents_as_string(self, headers=None,
cb=None, num_cb=10,
torrent=False,
version_id=None,
response_headers=None, encoding=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Return the contents of the object as a string.
See get_contents_to_file method for details about the
parameters.
:type headers: dict
:param headers: Any additional headers to send in the request
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: If True, returns the contents of a torrent file
as a string.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
a ``version_id`` attribute, that value will be used when
retrieving the object. You can set the Key object's
``version_id`` attribute to None to always grab the latest
version from a version-enabled bucket.
:type encoding: str
:param encoding: The text encoding to use, such as ``utf-8``
or ``iso-8859-1``. If set, then a string will be returned.
Defaults to ``None`` and returns bytes.
:rtype: bytes or str
:returns: The contents of the file as bytes or a string
"""
fp = BytesIO()
self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
response_headers=response_headers)
value = fp.getvalue()
if encoding is not None:
value = value.decode(encoding)
return value
def add_email_grant(self, permission, email_address, headers=None):
"""
Convenience method that provides a quick way to add an email grant
to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL
and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type email_address: string
:param email_address: The email address associated with the AWS
account your are granting the permission to.
:type recursive: boolean
:param recursive: A boolean value to controls whether the
command will apply the grant to all keys within the bucket
or not. The default value is False. By passing a True
value, the call will iterate through all keys in the
bucket and apply the same grant to each key. CAUTION: If
you have a lot of keys, this could take a long time!
"""
policy = self.get_acl(headers=headers)
policy.acl.add_email_grant(permission, email_address)
self.set_acl(policy, headers=headers)
def add_user_grant(self, permission, user_id, headers=None,
display_name=None):
"""
Convenience method that provides a quick way to add a canonical
user grant to a key. This method retrieves the current ACL,
creates a new grant based on the parameters passed in, adds that
grant to the ACL and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type user_id: string
:param user_id: The canonical user id associated with the AWS
account your are granting the permission to.
:type display_name: string
:param display_name: An option string containing the user's
Display Name. Only required on Walrus.
"""
policy = self.get_acl(headers=headers)
policy.acl.add_user_grant(permission, user_id,
display_name=display_name)
self.set_acl(policy, headers=headers)
def _normalize_metadata(self, metadata):
if type(metadata) == set:
norm_metadata = set()
for k in metadata:
norm_metadata.add(k.lower())
else:
norm_metadata = {}
for k in metadata:
norm_metadata[k.lower()] = metadata[k]
return norm_metadata
def _get_remote_metadata(self, headers=None):
"""
Extracts metadata from existing URI into a dict, so we can
overwrite/delete from it to form the new set of metadata to apply to a
key.
"""
metadata = {}
for underscore_name in self._underscore_base_user_settable_fields:
if hasattr(self, underscore_name):
value = getattr(self, underscore_name)
if value:
# Generate HTTP field name corresponding to "_" named field.
field_name = underscore_name.replace('_', '-')
metadata[field_name.lower()] = value
# self.metadata contains custom metadata, which are all user-settable.
prefix = self.provider.metadata_prefix
for underscore_name in self.metadata:
field_name = underscore_name.replace('_', '-')
metadata['%s%s' % (prefix, field_name.lower())] = (
self.metadata[underscore_name])
return metadata
def set_remote_metadata(self, metadata_plus, metadata_minus, preserve_acl,
headers=None):
metadata_plus = self._normalize_metadata(metadata_plus)
metadata_minus = self._normalize_metadata(metadata_minus)
metadata = self._get_remote_metadata()
metadata.update(metadata_plus)
for h in metadata_minus:
if h in metadata:
del metadata[h]
src_bucket = self.bucket
# Boto prepends the meta prefix when adding headers, so strip prefix in
# metadata before sending back in to copy_key() call.
rewritten_metadata = {}
for h in metadata:
if (h.startswith('x-goog-meta-') or h.startswith('x-amz-meta-')):
rewritten_h = (h.replace('x-goog-meta-', '')
.replace('x-amz-meta-', ''))
else:
rewritten_h = h
rewritten_metadata[rewritten_h] = metadata[h]
metadata = rewritten_metadata
src_bucket.copy_key(self.name, self.bucket.name, self.name,
metadata=metadata, preserve_acl=preserve_acl,
headers=headers)
def restore(self, days, headers=None):
"""Restore an object from an archive.
:type days: int
:param days: The lifetime of the restored object (must
be at least 1 day). If the object is already restored
then this parameter can be used to readjust the lifetime
of the restored object. In this case, the days
param is with respect to the initial time of the request.
If the object has not been restored, this param is with
respect to the completion time of the request.
"""
response = self.bucket.connection.make_request(
'POST', self.bucket.name, self.name,
data=self.RestoreBody % days,
headers=headers, query_args='restore')
if response.status not in (200, 202):
provider = self.bucket.connection.provider
raise provider.storage_response_error(response.status,
response.reason,
response.read())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.