code
stringlengths 1
199k
|
|---|
import sys
if sys.version_info[0] < 3:
from exceptions import NotImplementedError, ValueError, IOError
if(sys.version_info[0] < 2 or
(sys.version_info[0] == 2 and sys.version_info[1] < 5)):
print("Please consider upgrading your interpreter\nThis script needs at least python >= 2.5")
sys.exit(1)
import os
import re
import hashlib
from optparse import OptionParser
version = '1.0'
hash_pattern = re.compile("(\w{32,128})\s+[*]{0,1}(.*)")
class Worker():
def __init__(self, hashname, rmode='rb', bufsize=8192, name=""):
self.hash_known = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']
self.hash_length = {32: "md5", 40: "sha1", 56: "sha224", 64: "sha256", 96: "sha384", 128: "sha512"}
self.rmode = rmode
self.bufsize = bufsize
self.hashname = hashname
self.hashdata = None
if len(name) > 0:
self.name = name
else:
self.name = "worker-%s" % self.hashname
if self.hashname in self.hash_known:
if self.hashname == 'md5':
#~ self.hashdata = hashlib.md5()
print("# %s : md5 hash algorithm selected" % (self.name))
if self.hashname == 'sha1':
#~ self.hashdata = hashlib.sha1()
print("# %s : sha1 hash algorithm selected" % (self.name))
if self.hashname == 'sha224':
#~ self.hashdata = hashlib.sha224()
print("# %s : sha224 hash algorithm selected" % (self.name))
if self.hashname == 'sha256':
#~ self.hashdata = hashlib.sha256()
print("# %s : sha256 hash algorithm selected" % (self.name))
if self.hashname == 'sha384':
#~ self.hashdata = hashlib.sha384()
print("# %s : sha384 hash algorithm selected" % (self.name))
if self.hashname == 'sha512':
#~ self.hashdata = hashlib.sha512()
print("# %s : sha512 hash algorithm selected" % (self.name))
elif self.hashname == 'auto':
print("# %s : auto hash algorithm selected" % (self.name))
else:
print("# %s : hash algorithm [ %s ] not implemented" % (self.name, self.hashname))
def compute(self, fname):
try:
self.hashdata = hashlib.new(self.hashname)
except ValueError:
raise NotImplementedError("# %s : hash algorithm [ %s ] not implemented" % (self.name, self.hashname))
fhandle = open(fname, self.rmode)
data = fhandle.read(self.bufsize)
while(data):
self.hashdata.update(data)
data = fhandle.read(self.bufsize)
fhandle.close()
return self.hashdata.hexdigest()
def guess_hash(self, hexdigest):
length = len(hexdigest)
if length in self.hash_length:
return self.hash_length[length]
else:
return None
def main():
usage = "usage: %prog [options] arg"
parser = OptionParser(usage=usage)
parser.add_option("-r", dest="recursive", action="store_true", default=False, help="recursively calculate checksums")
parser.add_option("-c", dest="check", action="store_true", default=False, help="check sums")
parser.add_option("--hash", dest="hashname", default="auto", help="select hash algorithm")
(options, args) = parser.parse_args()
print("# generated by pysummer version %s" % (version))
print("# recursive option : %s" % ("True" if options.recursive else "False"))
print("# action : %s sums" % ("checking" if options.check else "generating"))
if args:
arg0 = args[0]
if not options.check:
if options.hashname == "auto":
options.hashname = "sha1"
print("# WARNING : 'auto' as hash selected, so defaulting to 'sha1'")
w = Worker(options.hashname)
if os.path.isfile(arg0):
hw = w.compute(arg0)
print("%s *%s" % (hw, arg0))
elif os.path.isdir(arg0):
topdir = os.path.abspath(arg0)
if options.recursive:
for root, dirs, files in os.walk(topdir):
for fname in files:
fullpath = os.path.abspath(os.path.join(root, fname))
relpath = fullpath[len(os.path.dirname(topdir)):].lstrip("\/")
relpath = os.path.normpath(relpath) # OS independance
hw = w.compute(fullpath)
print("%s *%s" % (hw, relpath))
else:
for item in os.listdir(topdir):
if os.path.isfile(item):
hw = w.compute(item)
item = os.path.normpath(item) # OS independance
print("%s *%s" % (hw, item))
else:
raise IOError("Specified file or directory not found")
else:
if os.path.isfile(arg0):
fhandle = open(arg0, 'r')
fenum = enumerate(fhandle)
filechk = list()
for ldata in fenum:
mp = hash_pattern.match(ldata[1])
if(mp):
filechk.append(mp.groups())
fhandle.close()
w = Worker(options.hashname)
if options.hashname == "auto":
print("# guessing hash algorithm for each line")
for item in filechk:
if options.hashname == "auto":
w.hashname = w.guess_hash(item[0])
try:
hw = w.compute(item[1])
if item[0] == hw:
print("[%s] %s : OK" % (w.hashname, item[1]))
else:
print("[%s] %s : FAILED" % (w.hashname, item[1]))
except IOError:
print("[%s] %s : EXCEPTION : IOError" % (w.hashname, item[1]))
else:
raise IOError("Argument missing : use -h flag to get help")
if __name__ == "__main__":
main()
|
import datetime
import logging
import requests
from redash.query_runner import *
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
def _transform_result(response):
columns = ({'name': 'Time::x', 'type': TYPE_DATETIME},
{'name': 'value::y', 'type': TYPE_FLOAT},
{'name': 'name::series', 'type': TYPE_STRING})
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json_dumps(data)
class Graphite(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
},
'username': {
'type': 'string'
},
'password': {
'type': 'string'
},
'verify': {
'type': 'boolean',
'title': 'Verify SSL certificate'
}
},
'required': ['url'],
'secret': ['password']
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration):
super(Graphite, self).__init__(configuration)
self.syntax = 'custom'
if "username" in self.configuration and self.configuration["username"]:
self.auth = (self.configuration["username"], self.configuration["password"])
else:
self.auth = None
self.verify = self.configuration.get("verify", True)
self.base_url = "%s/render?format=json&" % self.configuration['url']
def test_connection(self):
r = requests.get("{}/render".format(self.configuration['url']), auth=self.auth, verify=self.verify)
if r.status_code != 200:
raise Exception("Got invalid response from Graphite (http status code: {0}).".format(r.status_code))
def run_query(self, query, user):
url = "%s%s" % (self.base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=self.auth, verify=self.verify)
if response.status_code == 200:
data = _transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception as ex:
data = None
error = ex.message
return data, error
register(Graphite)
|
from django import VERSION as DJANGO_VERSION
if DJANGO_VERSION[0:2] < (1, 4):
from django.conf.urls.defaults import include, patterns, url
else:
from django.conf.urls import include, patterns, url
from django.views import generic
from django_comments_xtd import views, models, django_comments_urls
from django_comments_xtd.conf import settings
urlpatterns = patterns('',
url(r'', include(django_comments_urls)),
url(r'^sent/$', views.sent, name='comments-xtd-sent'),
url(r'^confirm/(?P<key>[^/]+)$', views.confirm, name='comments-xtd-confirm'),
url(r'^mute/(?P<key>[^/]+)$', views.mute, name='comments-xtd-mute'),
)
if settings.COMMENTS_XTD_MAX_THREAD_LEVEL > 0:
urlpatterns += patterns("",
url(r'^reply/(?P<cid>[\d]+)$', views.reply, name='comments-xtd-reply'),
)
|
import logging
from collections import namedtuple
from io import IOBase
from itertools import chain, islice
from threading import Thread
from streamlink.buffers import RingBuffer
from streamlink.packages.flashmedia import FLVError
from streamlink.packages.flashmedia.tag import (
AACAudioData, AAC_PACKET_TYPE_SEQUENCE_HEADER, AUDIO_CODEC_ID_AAC, AVCVideoData,
AVC_PACKET_TYPE_SEQUENCE_HEADER, AudioData, Header, ScriptData, TAG_TYPE_AUDIO,
TAG_TYPE_VIDEO, Tag, VIDEO_CODEC_ID_AVC, VideoCommandFrame, VideoData
)
__all__ = ["extract_flv_header_tags", "FLVTagConcat", "FLVTagConcatIO"]
log = logging.getLogger(__name__)
FLVHeaderTags = namedtuple("FLVHeaderTags", "metadata aac vc")
def iter_flv_tags(fd=None, buf=None, strict=False, skip_header=False):
if not (fd or buf):
return
offset = 0
if not skip_header:
if fd:
Header.deserialize(fd)
elif buf:
header, offset = Header.deserialize_from(buf, offset)
while fd or buf and offset < len(buf):
try:
if fd:
tag = Tag.deserialize(fd, strict=strict)
elif buf:
tag, offset = Tag.deserialize_from(buf, offset, strict=strict)
except (OSError, FLVError) as err:
if "Insufficient tag header" in str(err):
break
raise OSError(err)
yield tag
def extract_flv_header_tags(stream):
fd = stream.open()
metadata = aac_header = avc_header = None
for tag_index, tag in enumerate(iter_flv_tags(fd)):
if isinstance(tag.data, ScriptData) and tag.data.name == "onMetaData":
metadata = tag
elif (isinstance(tag.data, VideoData) and isinstance(tag.data.data, AVCVideoData)):
if tag.data.data.type == AVC_PACKET_TYPE_SEQUENCE_HEADER:
avc_header = tag
elif (isinstance(tag.data, AudioData) and isinstance(tag.data.data, AACAudioData)):
if tag.data.data.type == AAC_PACKET_TYPE_SEQUENCE_HEADER:
aac_header = tag
if aac_header and avc_header and metadata:
break
# Give up after 10 tags
if tag_index == 9:
break
return FLVHeaderTags(metadata, aac_header, avc_header)
class FLVTagConcat:
def __init__(self, duration=None, tags=[], has_video=True, has_audio=True,
flatten_timestamps=False, sync_headers=False):
self.duration = duration
self.flatten_timestamps = flatten_timestamps
self.has_audio = has_audio
self.has_video = has_video
self.sync_headers = sync_headers
self.tags = tags
if not (has_audio and has_video):
self.sync_headers = False
self.audio_header_written = False
self.flv_header_written = False
self.video_header_written = False
self.timestamps_add = {}
self.timestamps_orig = {}
self.timestamps_sub = {}
@property
def headers_written(self):
return self.audio_header_written and self.video_header_written
def verify_tag(self, tag):
if tag.filter:
raise OSError("Tag has filter flag set, probably encrypted")
# Only AAC and AVC has detectable headers
if isinstance(tag.data, AudioData) and tag.data.codec != AUDIO_CODEC_ID_AAC:
self.audio_header_written = True
if isinstance(tag.data, VideoData) and tag.data.codec != VIDEO_CODEC_ID_AVC:
self.video_header_written = True
# Make sure there is no timestamp gap between audio and video when syncing
if self.sync_headers and self.timestamps_sub and not self.headers_written:
self.timestamps_sub = {}
if isinstance(tag.data, AudioData):
if isinstance(tag.data.data, AACAudioData):
if tag.data.data.type == AAC_PACKET_TYPE_SEQUENCE_HEADER:
if self.audio_header_written:
return
self.audio_header_written = True
else:
if self.sync_headers and not self.headers_written:
return
if not self.audio_header_written:
return
else:
if self.sync_headers and not self.headers_written:
return
elif isinstance(tag.data, VideoData):
if isinstance(tag.data.data, AVCVideoData):
if tag.data.data.type == AVC_PACKET_TYPE_SEQUENCE_HEADER:
if self.video_header_written:
return
self.video_header_written = True
else:
if self.sync_headers and not self.headers_written:
return
if not self.video_header_written:
return
elif isinstance(tag.data.data, VideoCommandFrame):
return
else:
if self.sync_headers and not self.headers_written:
return
elif isinstance(tag.data, ScriptData):
if tag.data.name == "onMetaData":
if self.duration:
tag.data.value["duration"] = self.duration
elif "duration" in tag.data.value:
del tag.data.value["duration"]
else:
return False
return True
def adjust_tag_gap(self, tag):
timestamp_gap = tag.timestamp - self.timestamps_orig.get(tag.type, 0)
timestamp_sub = self.timestamps_sub.get(tag.type)
if timestamp_gap > 1000 and timestamp_sub is not None:
self.timestamps_sub[tag.type] += timestamp_gap
self.timestamps_orig[tag.type] = tag.timestamp
def adjust_tag_timestamp(self, tag):
timestamp_offset_sub = self.timestamps_sub.get(tag.type)
if timestamp_offset_sub is None and tag not in self.tags:
self.timestamps_sub[tag.type] = tag.timestamp
timestamp_offset_sub = self.timestamps_sub.get(tag.type)
timestamp_offset_add = self.timestamps_add.get(tag.type)
if timestamp_offset_add:
tag.timestamp = max(0, tag.timestamp + timestamp_offset_add)
elif timestamp_offset_sub:
tag.timestamp = max(0, tag.timestamp - timestamp_offset_sub)
def analyze_tags(self, tag_iterator):
tags = list(islice(tag_iterator, 10))
audio_tags = len(list(filter(lambda t: t.type == TAG_TYPE_AUDIO, tags)))
video_tags = len(list(filter(lambda t: t.type == TAG_TYPE_VIDEO, tags)))
self.has_audio = audio_tags > 0
self.has_video = video_tags > 0
if not (self.has_audio and self.has_video):
self.sync_headers = False
return tags
def iter_tags(self, fd=None, buf=None, skip_header=None):
if skip_header is None:
skip_header = not not self.tags
tags_iterator = filter(None, self.tags)
flv_iterator = iter_flv_tags(fd=fd, buf=buf, skip_header=skip_header)
yield from chain(tags_iterator, flv_iterator)
def iter_chunks(self, fd=None, buf=None, skip_header=None):
"""Reads FLV tags from fd or buf and returns them with adjusted
timestamps."""
timestamps = dict(self.timestamps_add)
tag_iterator = self.iter_tags(fd=fd, buf=buf, skip_header=skip_header)
if not self.flv_header_written:
analyzed_tags = self.analyze_tags(tag_iterator)
else:
analyzed_tags = []
for tag in chain(analyzed_tags, tag_iterator):
if not self.flv_header_written:
flv_header = Header(has_video=self.has_video,
has_audio=self.has_audio)
yield flv_header.serialize()
self.flv_header_written = True
if self.verify_tag(tag):
self.adjust_tag_gap(tag)
self.adjust_tag_timestamp(tag)
if self.duration:
norm_timestamp = tag.timestamp / 1000
if norm_timestamp > self.duration:
break
yield tag.serialize()
timestamps[tag.type] = tag.timestamp
if not self.flatten_timestamps:
self.timestamps_add = timestamps
self.tags = []
class FLVTagConcatWorker(Thread):
def __init__(self, iterator, stream):
self.error = None
self.stream = stream
self.stream_iterator = iterator
self.concater = FLVTagConcat(stream.duration, stream.tags,
**stream.concater_params)
Thread.__init__(self)
self.daemon = True
def run(self):
for fd in self.stream_iterator:
try:
chunks = self.concater.iter_chunks(
fd, skip_header=self.stream.skip_header
)
for chunk in chunks:
self.stream.buffer.write(chunk)
if not self.running:
return
except OSError as err:
self.error = err
break
self.stop()
def stop(self):
self.running = False
self.stream.buffer.close()
def start(self):
self.running = True
return Thread.start(self)
class FLVTagConcatIO(IOBase):
__worker__ = FLVTagConcatWorker
def __init__(self, session, duration=None, tags=[], skip_header=None,
timeout=30, **concater_params):
self.session = session
self.timeout = timeout
self.concater_params = concater_params
self.duration = duration
self.skip_header = skip_header
self.tags = tags
def open(self, iterator):
self.buffer = RingBuffer(self.session.get_option("ringbuffer-size"))
self.worker = self.__worker__(iterator, self)
self.worker.start()
def close(self):
self.worker.stop()
if self.worker.is_alive():
self.worker.join()
def read(self, size=-1):
if not self.buffer:
return b""
if self.worker.error:
raise self.worker.error
return self.buffer.read(size, block=self.worker.is_alive(),
timeout=self.timeout)
|
'''
Created on Nov 23, 2011
@author: Mirna Lerotic, 2nd Look Consulting
http://www.2ndlookconsulting.com/
Copyright (c) 2013, Stefan Vogt, Argonne National Laboratory
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the Argonne National Laboratory nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
! This is from file COMPOUND.DAT for HENKE. Entries are:
! compound formula density (g/cc)
water H2O 1.0
protein H48.6C32.9N8.9O8.9S0.6 1.35
lipid H62.5C31.5O6.3 1.0
nucleosome H42.1C31.9N10.3O13.9P1.6S0.3 1.5
dna H35.5C30.8N11.7O18.9P3.1 1.7
helium He 0.1663e-3
chromatin H49.95C24.64N8.66O15.57P1.07S0.03 1.527
air N78.08O20.95Ar0.93 1.2047e-3
pmma C5H8O2 1.18
nitride Si3N4 3.44
graphite C 2.26
nickel Ni 8.876
beryl Be 1.845
copper Cu 8.96
quartz SiO2 2.20
aluminum Al 2.70
gold Au 19.3
ice H2O 0.92
carbon C 1.0
polystyrene C8H8 1.06
silicon Si 2.33
germanium Ge 5.323
'''
from __future__ import division
import numpy as np
from xdrlib import *
import string
class henke:
def __init__(self, logger):
self.logger = logger
self.compound_name = [ 'water' , 'protein', 'lipid', 'nucleosome', 'dna', 'helium', 'chromatin',
'air', 'pmma', 'nitride', 'graphite', 'nickel', 'beryl', 'copper',
'quartz', 'aluminum', 'gold', 'ice', 'carbon', 'polystyrene',
'silicon', 'germanium']
self.compound_forumula = ['H2O', 'H48.6C32.9N8.9O8.9S0.6', 'H62.5C31.5O6.3',
'H42.1C31.9N10.3O13.9P1.6S0.3', 'H35.5C30.8N11.7O18.9P3.1',
'He' , 'H49.95C24.64N8.66O15.57P1.07S0.03',
'N78.08O20.95Ar0.93', 'C5H8O2', 'Si3N4', 'C', 'Ni', 'Be',
'Cu', 'SiO2', 'Al', 'Au', 'H2O', 'C', 'C8H8',
'Si', 'Ge']
self.compound_density = [ 1.0, 1.35, 1.0, 1.5, 1.7, 1.66E-04, 1.527, 1.20E-03, 1.18,
3.44, 2.26, 8.876, 1.845, 8.96, 2.2, 2.7, 19.3, 0.92, 1, 1.06,
2.33, 5.323 ]
def compound(self, compound_string, density):
z_array = []
atwt = 0
if compound_string in self.compound_name:
compound_string = self.compound_forumula[self.compound_name.index(compound_string)]
if compound_string in self.compound_forumula:
z_array = self.zcompound(compound_string, z_array)
atwt = self.zatwt(z_array)
return z_array, atwt
def zcompound(self, compound_string, z_array, paren_multiplier=False):
verbose = False
if verbose:
self.logger.debug('compound_string: %s', compound_string)
if paren_multiplier == False:
z_array = np.zeros(92)
paren_multiplier=1.
max_z_index=93
last_char_index = len(compound_string) - 1
# If we don't start off with a parenthesis, all we have to do
# is strip off the first element and process it. We then
# call the routine over again to handle the next part of
# the string...
if compound_string[0] != '(':
# Look to see if the string has an element
# like "C" or like "He".
first_char=compound_string[0]
if len(compound_string) > 1:
second_char = compound_string[1]
else:
second_char = ''
this_element_name = first_char
if second_char >= 'a' and second_char <= 'z':
this_element_name = this_element_name + second_char
num_start_index = 2
else:
this_element_name = this_element_name + ' '
num_start_index = 1
if verbose:
self.logger.debug('this_element_name: %s num_start_index: %s', this_element_name, num_start_index)
this_z=0
if this_element_name == 'H ': this_z=1
elif this_element_name == 'He': this_z=2
elif this_element_name == 'Li': this_z=3
elif this_element_name == 'Be': this_z=4
elif this_element_name == 'B ': this_z=5
elif this_element_name == 'C ': this_z=6
elif this_element_name == 'N ': this_z=7
elif this_element_name == 'O ': this_z=8
elif this_element_name == 'F ': this_z=9
elif this_element_name == 'Ne': this_z=10
elif this_element_name == 'Na': this_z=11
elif this_element_name == 'Mg': this_z=12
elif this_element_name == 'Al': this_z=13
elif this_element_name == 'Si': this_z=14
elif this_element_name == 'P ': this_z=15
elif this_element_name == 'S ': this_z=16
elif this_element_name == 'Cl': this_z=17
elif this_element_name == 'Ar': this_z=18
elif this_element_name == 'K ': this_z=19
elif this_element_name == 'Ca': this_z=20
elif this_element_name == 'Sc': this_z=21
elif this_element_name == 'Ti': this_z=22
elif this_element_name == 'V ': this_z=23
elif this_element_name == 'Cr': this_z=24
elif this_element_name == 'Mn': this_z=25
elif this_element_name == 'Fe': this_z=26
elif this_element_name == 'Co': this_z=27
elif this_element_name == 'Ni': this_z=28
elif this_element_name == 'Cu': this_z=29
elif this_element_name == 'Zn': this_z=30
elif this_element_name == 'Ga': this_z=31
elif this_element_name == 'Ge': this_z=32
elif this_element_name == 'As': this_z=33
elif this_element_name == 'Se': this_z=34
elif this_element_name == 'Br': this_z=35
elif this_element_name == 'Kr': this_z=36
elif this_element_name == 'Rb': this_z=37
elif this_element_name == 'Sr': this_z=38
elif this_element_name == 'Y ': this_z=39
elif this_element_name == 'Zr': this_z=40
elif this_element_name == 'Nb': this_z=41
elif this_element_name == 'Mo': this_z=42
elif this_element_name == 'Tc': this_z=43
elif this_element_name == 'Ru': this_z=44
elif this_element_name == 'Rh': this_z=45
elif this_element_name == 'Pd': this_z=46
elif this_element_name == 'Ag': this_z=47
elif this_element_name == 'Cd': this_z=48
elif this_element_name == 'In': this_z=49
elif this_element_name == 'Sn': this_z=50
elif this_element_name == 'Sb': this_z=51
elif this_element_name == 'Te': this_z=52
elif this_element_name == 'I ': this_z=53
elif this_element_name == 'Xe': this_z=54
elif this_element_name == 'Cs': this_z=55
elif this_element_name == 'Ba': this_z=56
elif this_element_name == 'La': this_z=57
elif this_element_name == 'Ce': this_z=58
elif this_element_name == 'Pr': this_z=59
elif this_element_name == 'Nd': this_z=60
elif this_element_name == 'Pm': this_z=61
elif this_element_name == 'Sm': this_z=62
elif this_element_name == 'Eu': this_z=63
elif this_element_name == 'Gd': this_z=64
elif this_element_name == 'Tb': this_z=65
elif this_element_name == 'Dy': this_z=66
elif this_element_name == 'Ho': this_z=67
elif this_element_name == 'Er': this_z=68
elif this_element_name == 'Tm': this_z=69
elif this_element_name == 'Yb': this_z=70
elif this_element_name == 'Lu': this_z=71
elif this_element_name == 'Hf': this_z=72
elif this_element_name == 'Ta': this_z=73
elif this_element_name == 'W ': this_z=74
elif this_element_name == 'Re': this_z=75
elif this_element_name == 'Os': this_z=76
elif this_element_name == 'Ir': this_z=77
elif this_element_name == 'Pt': this_z=78
elif this_element_name == 'Au': this_z=79
elif this_element_name == 'Hg': this_z=80
elif this_element_name == 'Tl': this_z=81
elif this_element_name == 'Pb': this_z=82
elif this_element_name == 'Bi': this_z=83
elif this_element_name == 'Po': this_z=84
elif this_element_name == 'At': this_z=85
elif this_element_name == 'Rn': this_z=86
elif this_element_name == 'Fr': this_z=87
elif this_element_name == 'Ra': this_z=88
elif this_element_name == 'Ac': this_z=89
elif this_element_name == 'Th': this_z=90
elif this_element_name == 'Pa': this_z=91
elif this_element_name == 'U ': this_z=92
else: this_z=0
if (this_z == 0):
self.logger.info('zcompound is confused: %s', compound_string)
compound_string=''
return np.zeros(0)
# Find the next element or parenthesis, as
# anything before it must be a number.
postnum_index = num_start_index
if len(compound_string) > num_start_index + 1:
test_char = compound_string[postnum_index]
else:
test_char = ''
while ( ((test_char == '0') or (test_char == '1') or \
(test_char == '2') or (test_char == '2') or \
(test_char == '3') or (test_char == '4') or \
(test_char == '5') or (test_char == '6') or \
(test_char == '7') or (test_char == '8') or \
(test_char == '9') or (test_char == '.')) and \
(postnum_index <= last_char_index) ):
postnum_index = postnum_index + 1
if (postnum_index <= last_char_index) :
test_char = compound_string[postnum_index]
else:
test_char = ''
# is there more?
if (num_start_index != postnum_index) :
number_string=compound_string[num_start_index:postnum_index]
num_multiplier = 1.
if verbose:
self.logger.debug('Trying to interpret %s as a number.', number_string)
if len(number_string) != 0:
num_multiplier = float(number_string)
else:
num_multiplier = 1.
# We've handled this element, so pop it into the
# matrix and continue.
if (this_z <= max_z_index) :
z_array[this_z - 1] = z_array[this_z - 1] + num_multiplier
else:
self.logger.debug('zcompound: z_array smaller than %s', max_z_index)
return np.zeros(0)
# And deal with what's left
remaining_string=compound_string[postnum_index:last_char_index + 1]
if len(remaining_string) > 0:
z_array = self.zcompound(remaining_string, z_array, paren_multiplier=True)
return z_array
def zatwt(self, z_array):
maxz=z_array.size
atwt=0.
for i in range(maxz):
if (z_array[i] != 0.):
if i+1 == 1: this_atwt=1.00794
elif i+1 == 2: this_atwt=4.0026
elif i+1 == 3: this_atwt=6.941
elif i+1 == 4: this_atwt=9.01218
elif i+1 == 5: this_atwt=10.81
elif i+1 == 6: this_atwt=12.011
elif i+1 == 7: this_atwt=14.0067
elif i+1 == 8: this_atwt=15.9994
elif i+1 == 9: this_atwt=18.9984
elif i+1 == 10: this_atwt=21.179
elif i+1 == 11: this_atwt=22.98977
elif i+1 == 12: this_atwt=24.305
elif i+1 == 13: this_atwt=26.98154
elif i+1 == 14: this_atwt=28.0855
elif i+1 == 15: this_atwt=30.97376
elif i+1 == 16: this_atwt=32.06
elif i+1 == 17: this_atwt=35.453
elif i+1 == 18: this_atwt=39.948
elif i+1 == 19: this_atwt=39.0983
elif i+1 == 20: this_atwt=40.08
elif i+1 == 21: this_atwt=44.9559
elif i+1 == 22: this_atwt=47.88
elif i+1 == 23: this_atwt=50.9415
elif i+1 == 24: this_atwt=51.996
elif i+1 == 25: this_atwt=54.9380
elif i+1 == 26: this_atwt=55.847
elif i+1 == 27: this_atwt=58.9332
elif i+1 == 28: this_atwt=58.69
elif i+1 == 29: this_atwt=63.546
elif i+1 == 30: this_atwt=65.38
elif i+1 == 31: this_atwt=69.72
elif i+1 == 32: this_atwt=72.59
elif i+1 == 33: this_atwt=74.9216
elif i+1 == 34: this_atwt=78.96
elif i+1 == 35: this_atwt=79.904
elif i+1 == 36: this_atwt=83.80
elif i+1 == 37: this_atwt=85.4678
elif i+1 == 38: this_atwt=87.62
elif i+1 == 39: this_atwt=88.9059
elif i+1 == 40: this_atwt=91.22
elif i+1 == 41: this_atwt=92.9064
elif i+1 == 42: this_atwt=95.94
elif i+1 == 43: this_atwt=98.
elif i+1 == 44: this_atwt=101.07
elif i+1 == 45: this_atwt=102.9055
elif i+1 == 46: this_atwt=106.42
elif i+1 == 47: this_atwt=107.8682
elif i+1 == 48: this_atwt=112.41
elif i+1 == 49: this_atwt=114.82
elif i+1 == 50: this_atwt=118.69
elif i+1 == 51: this_atwt=121.75
elif i+1 == 52: this_atwt=127.60
elif i+1 == 53: this_atwt=126.9054
elif i+1 == 54: this_atwt=131.29
elif i+1 == 55: this_atwt=132.9054
elif i+1 == 56: this_atwt=137.33
elif i+1 == 57: this_atwt=138.9055
elif i+1 == 58: this_atwt=140.12
elif i+1 == 59: this_atwt=140.9077
elif i+1 == 60: this_atwt=144.24
elif i+1 == 61: this_atwt=145.
elif i+1 == 62: this_atwt=150.36
elif i+1 == 63: this_atwt=151.96
elif i+1 == 64: this_atwt=157.25
elif i+1 == 65: this_atwt=158.9254
elif i+1 == 66: this_atwt=162.5
elif i+1 == 67: this_atwt=164.9304
elif i+1 == 68: this_atwt=167.26
elif i+1 == 69: this_atwt=168.9342
elif i+1 == 70: this_atwt=173.04
elif i+1 == 71: this_atwt=174.967
elif i+1 == 72: this_atwt=178.49
elif i+1 == 73: this_atwt=180.9479
elif i+1 == 74: this_atwt=183.85
elif i+1 == 75: this_atwt=186.207
elif i+1 == 76: this_atwt=190.2
elif i+1 == 77: this_atwt=192.22
elif i+1 == 78: this_atwt=195.08
elif i+1 == 79: this_atwt=196.9665
elif i+1 == 80: this_atwt=200.59
elif i+1 == 81: this_atwt=204.383
elif i+1 == 82: this_atwt=207.2
elif i+1 == 83: this_atwt=208.9804
elif i+1 == 84: this_atwt=209.
elif i+1 == 85: this_atwt=210.
elif i+1 == 86: this_atwt=222.
elif i+1 == 87: this_atwt=223.
elif i+1 == 88: this_atwt=226.0254
elif i+1 == 89: this_atwt=227.0278
elif i+1 == 90: this_atwt=232.0381
elif i+1 == 91: this_atwt=231.0359
elif i+1 == 92: this_atwt=238.0289
else: this_atwt=0.
atwt = atwt + z_array[i] * this_atwt
return atwt
def extra(self, ielement = -1):
energies, f1, f2, n_extra, energies_extra, f1_extra, f2_extra = self.read(ielement, all = False)
if not n_extra == None and n_extra != 0:
energies_all=np.concatenate((energies,energies_extra), axis=0)
f1_all=np.concatenate((f1,f1_extra), axis=0)
f2_all=np.concatenate((f2,f2_extra), axis=0)
sort_order=energies_all.argsort()
energies_all=energies_all[sort_order]
f1_all=f1_all[sort_order]
f2_all=f2_all[sort_order]
else:
energies_all=energies
f1_all=f1
f2_all=f2
return energies, f1, f2, energies_extra, f1_extra, f2_extra
def read(self, ielement=-1, all=True):
# If we don't specifiy element return all energies
if ielement == -1:
all = True
verbose = False
expected_pos = 0
filename = 'reference/henke.xdr'
try:
file = open(str(filename), 'rb')
except:
try:
filename = '../reference/henke.xdr'
file = open(str(filename), 'rb')
except:
self.logger.error('Could not open file %s', filename)
return None, None, None, None, None, None, None
if verbose:
self.logger.debug('File: %s', filename)
buf = file.read()
u = Unpacker(buf)
if all:
n_elements = u.unpack_int()
n_energies = u.unpack_int()
if verbose:
self.logger.debug('n_energies: %s', n_energies)
self.logger.debug('n_elements: %s', n_elements)
expected_pos = expected_pos + 2 * 4
self.logger.debug('Actual, expected file position before reading energies: %s %s', u.get_position(), expected_pos)
energies = u.unpack_farray(n_energies, u.unpack_float)
energies = np.array(energies)
if verbose:
self.logger.debug('energies: %s', energies)
f1 = np.zeros((n_elements, n_energies))
f2 = np.zeros((n_elements, n_energies))
this_f1 = np.zeros((n_energies))
this_f2 = np.zeros((n_energies))
if verbose:
expected_pos = expected_pos + 4 * n_energies
self.logger.debug('Actual, expected file position before reading elements: %s %s', u.get_position(), expected_pos)
for i_element in range(n_elements):
this_f1 = u.unpack_farray(n_energies, u.unpack_float)
this_f2 = u.unpack_farray(n_energies, u.unpack_float)
f1[i_element, :] = this_f1
f2[i_element, :] = this_f2
#self.logger.debug( f1
if verbose:
expected_pos = expected_pos + n_elements * n_energies * 2 * 4
self.logger.debug('Actual, expected file position before reading n_extra_energies: %s %s', u.get_position(), expected_pos)
n_extra_energies = u.unpack_int()
if verbose:
self.logger.debug('n_extra_energies: %s', n_extra_energies)
if verbose:
expected_pos = expected_pos + 4
self.logger.debug('Actual, expected file position before reading extras: %s %s', u.get_position(), expected_pos)
n_extra = np.zeros((n_elements), dtype = np.int)
extra_energies = np.zeros((n_elements, n_extra_energies))
extra_f1 = np.zeros((n_elements, n_extra_energies))
extra_f2 = np.zeros((n_elements, n_extra_energies))
this_n_extra = 0
this_extra_energies = np.zeros((n_extra_energies))
this_extra_f1 = np.zeros((n_extra_energies))
this_extra_f2 = np.zeros((n_extra_energies))
for i_element in range(n_elements):
this_n_extra = u.unpack_int()
this_extra_energies = u.unpack_farray(n_extra_energies, u.unpack_float)
this_extra_f1 = u.unpack_farray(n_extra_energies, u.unpack_float)
this_extra_f2 = u.unpack_farray(n_extra_energies, u.unpack_float)
n_extra[i_element] = this_n_extra
extra_energies[i_element, :] = this_extra_energies
extra_f1[i_element, :] = this_extra_f1
extra_f2[i_element, :] = this_extra_f2
else:
n_elements = u.unpack_int()
n_energies = u.unpack_int()
energies = u.unpack_farray(n_energies, u.unpack_float)
energies = np.array(energies)
if verbose:
self.logger.debug('energies: %s', energies)
byte_offset = 4 + 4 + 4 * n_energies + 8 * ielement * n_energies
u.set_position(byte_offset)
f1 = u.unpack_farray(n_energies, u.unpack_float)
f2 = u.unpack_farray(n_energies, u.unpack_float)
byte_offset = 4 + 4 + 4 * n_energies + 8 * n_elements * n_energies
u.set_position(byte_offset)
n_extra_energies = u.unpack_int()
if verbose:
self.logger.debug('n_extra_energies %s', n_extra_energies)
# Now we have the above plus i_element times the quantity:
# (2 for n_extra, and n_extra_energies each of three floats)
byte_offset = 4l + 4l + 4l * n_energies + 8l * n_elements * n_energies + 4l + ielement * (4l + 12l * n_extra_energies)
u.set_position(byte_offset)
n_extra = u.unpack_int()
this_extra_energies = u.unpack_farray(n_extra_energies, u.unpack_float)
this_extra_f1 = u.unpack_farray(n_extra_energies, u.unpack_float)
this_extra_f2 = u.unpack_farray(n_extra_energies, u.unpack_float)
extra_energies = this_extra_energies[0:n_extra]
extra_f1 = this_extra_f1[0:n_extra]
extra_f2 = this_extra_f2[0:n_extra]
file.close()
return energies, f1, f2, n_extra, extra_energies, extra_f1, extra_f2
# -----------------------------------------------------------------------------
def array(self, compound_name, density, graze_mrad=0):
z_array = []
z_array, atwt = self.compound(compound_name,density)
if len(z_array) == 0:
z_array = self.zcompound(compound_name, z_array)
atwt = self.zatwt(z_array)
maxz = 92
first_time = 1
for i in range(maxz):
if z_array[i] != 0.0:
energies, this_f1, this_f2, n_extra, extra_energies, extra_f1, extra_f2 = self.read(ielement=i)
if energies == None:
continue
self.logger.debug('this_f1.shape: %s', this_f1.shape)
if first_time == 1:
f1 = z_array[i] * this_f1
f2 = z_array[i] * this_f2
first_time = 0
else:
f1 = f1 + z_array[i] * this_f1
f2 = f2 + z_array[i] * this_f2
num_energies = len(energies)
AVOGADRO=6.02204531e23
HC_ANGSTROMS=12398.52
RE=2.817938070e-13 # in cm
if atwt != 0.0:
molecules_per_cc = density * AVOGADRO / atwt
else:
molecules_per_cc = 0.0
wavelength_angstroms = HC_ANGSTROMS/energies
# This constant has wavelength in angstroms and then
# they are converted to centimeters.
constant = RE * (1.0e-16 * wavelength_angstroms * wavelength_angstroms) * molecules_per_cc / (2.0 * np.math.pi)
delta = constant * f1
beta = constant * f2
# Alpha is in inverse meters squared
alpha = 1.e4 * density * AVOGADRO * RE / (2. * np.math.pi * atwt)
#alpha = alpha[0]
if graze_mrad == 0.0:
reflect = np.ones((num_energies))
else:
theta = 1.0e-3 * graze_mrad
sinth = np.sin(theta)
sinth2 = sinth * sinth
coscot = np.cos(theta)
coscot = coscot * coscot / sinth
alpha = 2.0 * delta - delta * delta + beta * beta
gamma = 2.0 * (1.0 - delta) * beta
rhosq = 0.5 * (sinth2 - alpha + np.sqrt((sinth2 - alpha)*(sinth2-alpha) + gamma*gamma) )
rho = np.sqrt(rhosq)
i_sigma = (4.0 * rhosq * (sinth - rho) * (sinth - rho) + \
gamma * gamma) / \
(4.0 * rhosq * (sinth + rho) * (sinth + rho) + \
gamma * gamma)
piosig = (4.0 * rhosq * (rho - coscot) * (rho - coscot) + \
gamma * gamma) / \
(4.0 * rhosq * (rho + coscot) * (rho + coscot) + \
gamma * gamma)
reflect= 50.0 * i_sigma * (1 + piosig)
denom = energies*4.*np.math.pi*beta
zeroes = np.where(denom == 0.)
nonzeroes = np.where(denom != 0.)
denom[zeroes] = 1e-8
inverse_mu = np.array((len(energies)))
inverse_mu = 1.239852 / denom
if len(zeroes) > 0:
inverse_mu[zeroes] = np.inf
return energies, f1, f2, delta, beta, graze_mrad, reflect, inverse_mu, atwt, alpha
# -----------------------------------------------------------------------------
def get_henke(self, compound_name, density, energy):
if len(compound_name) == 0:
self.logger.warning('henke, compound_name, density, energy, f1, f2, delta, beta, graze_mrad, reflect, inverse_mu=inverse_mu inverse_mu is 1/e absorption length in microns. atwt is the atom-averaged atomic weight for the compound')
return None, None, None, None, None, None, None, None
enarr, f1arr, f2arr, deltaarr, betaarr, graze_mrad, reflect_arr, inverse_mu, atwt, alpha = self.array(compound_name, density)
num_energies = len(enarr)
high_index = 0
while (energy > enarr[high_index]) and (high_index < (num_energies - 1)):
high_index = high_index + 1
if high_index == 0:
high_index = 1
low_index = high_index - 1
ln_lower_energy = np.math.log(enarr[low_index])
ln_higher_energy = np.math.log(enarr[high_index])
fraction = (np.math.log(energy) - ln_lower_energy) / (ln_higher_energy - ln_lower_energy)
f1_lower = f1arr[low_index]
f1_higher = f1arr[high_index]
f1 = f1_lower + fraction * (f1_higher - f1_lower)
ln_f2_lower = np.math.log(np.abs(f2arr(low_index)))
ln_f2_higher = np.math.log(np.abs(f2arr(high_index)))
f2 = np.math.exp(ln_f2_lower + fraction * (ln_f2_higher - ln_f2_lower))
delta_lower = deltaarr[low_index]
delta_higher = deltaarr[high_index]
delta = delta_lower + fraction * (delta_higher - delta_lower)
ln_beta_lower = np.math.log(np.abs(betaarr(low_index)))
ln_beta_higher = np.math.log(np.abs(betaarr(high_index)))
beta = np.math.exp(ln_beta_lower + fraction * (ln_beta_higher - ln_beta_lower))
reflect_lower = reflect_arr[low_index]
reflect_higher = reflect_arr[high_index]
reflect = reflect_lower + fraction * (reflect_higher - reflect_lower)
if beta != 0.0:
inverse_mu = 1.239852/(energy*4.*np.math.pi*beta)
else:
inverse_mu = np.Inf
return f1, f2, delta, beta, graze_mrad, reflect, inverse_mu, atwt
# -----------------------------------------------------------------------------
def get_henke_single(self, name, density, energy_array):
AVOGADRO = 6.02204531e23
HC_ANGSTROMS = 12398.52
RE = 2.817938070e-13 # in cm
z_array, atwt = self.compound(name.strip(), density)
if len(z_array) == 0:
z_array = self.zcompound(name, z_array)
atwt = self.zatwt(z_array)
wo = np.where(z_array > 0)[0]
if len(wo) == 0 :
self.logger.warning('Warning: get_henke_single() name=%s encountered error, will return', name)
return 0, 0, 0, 0
z = wo+1
if (atwt != 0.0):
molecules_per_cc = density * AVOGADRO / atwt
else:
molecules_per_cc = 0.0
if len(wo) > 1:
energies_all, f1_all, f2_all, energies_extra, f1_extra, f2_extra = self.extra(ielement=z[0])
else:
energies_all, f1_all, f2_all, energies_extra, f1_extra, f2_extra = self.extra(ielement=z[0] - 1)
if isinstance(energy_array, float):
n_array = 1
else:
n_array = len(energy_array)
f1_array = np.zeros((n_array))
f2_array = np.zeros((n_array))
delta_array = np.zeros((n_array))
beta_array = np.zeros((n_array))
for i in range(n_array) :
energy = energy_array
wavelength_angstroms = HC_ANGSTROMS/energy
# This constant has wavelength in angstroms and then
# they are converted to centimeters.
constant = RE * (1.0e-16 * wavelength_angstroms * wavelength_angstroms) * \
molecules_per_cc / (2.0 * np.pi)
wo = np.where(energies_all > energy)[0]
# the first value that is larger than energy must be the closest value
if len(wo) == 0:
hi_e_ind = 0
else:
hi_e_ind = wo[0]
wo = np.where(energies_all < energy)[0]
# the last value that is smaller than energy must be the closest value
if len(wo) == 0 :
lo_e_ind = len(energies_all) - 1
else:
lo_e_ind = wo[-1]
ln_lower_energy = np.math.log(energies_all[lo_e_ind])
ln_higher_energy = np.math.log(energies_all[hi_e_ind])
fraction = (np.math.log(energy) - ln_lower_energy) / (ln_higher_energy - ln_lower_energy)
f1_lower = f1_all[lo_e_ind]
f1_higher = f1_all[hi_e_ind]
f1_array[i] = f1_lower + fraction * (f1_higher - f1_lower)
ln_f2_lower = np.math.log(np.abs(f2_all[lo_e_ind]))
ln_f2_higher = np.math.log(np.abs(f2_all[hi_e_ind]))
f2_array[i] = np.math.exp(ln_f2_lower + fraction * (ln_f2_higher - ln_f2_lower))
delta_array[i] = constant * f1_array[i]
beta_array[i] = constant * f2_array[i]
return f1_array, f2_array, delta_array, beta_array
|
"""JIRA utils used internally."""
from __future__ import unicode_literals
import threading
from jira.resilientsession import raise_on_error
class CaseInsensitiveDict(dict):
"""A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['accept'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.lower()``s, the
behavior is undefined.
"""
def __init__(self, *args, **kw):
super(CaseInsensitiveDict, self).__init__(*args, **kw)
self.itemlist = {}
for key, value in super(CaseInsensitiveDict, self).items():
if key != key.lower():
self[key.lower()] = value
self.pop(key, None)
# self.itemlist[key.lower()] = value
def __setitem__(self, key, value):
"""Overwrite [] implementation."""
super(CaseInsensitiveDict, self).__setitem__(key.lower(), value)
# def __iter__(self):
# return iter(self.itemlist)
# def keys(self):
# return self.itemlist
# def values(self):
# return [self[key] for key in self]
# def itervalues(self):
# return (self[key] for key in self)
def threaded_requests(requests):
for fn, url, request_args in requests:
th = threading.Thread(
target=fn, args=(url,), kwargs=request_args, name=url,
)
th.start()
for th in threading.enumerate():
if th.name.startswith('http'):
th.join()
def json_loads(r):
raise_on_error(r)
try:
return r.json()
except ValueError:
# json.loads() fails with empty bodies
if not r.text:
return {}
raise
|
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.get_clients_request import GetClientsRequest # noqa: E501
from swagger_client.rest import ApiException
class TestGetClientsRequest(unittest.TestCase):
"""GetClientsRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetClientsRequest(self):
"""Test GetClientsRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.get_clients_request.GetClientsRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
"""Production settings and globals."""
from base import *
ALLOWED_HOSTS = [
'devncode.it',
'euoserver.devncode.it',
'localhost',
'127.0.0.1',
]
MEDIA_ROOT = normpath(join(SITE_ROOT, '../media'))
MEDIA_URL = '/media/'
STATIC_ROOT = normpath(join(SITE_ROOT, '../assets'))
STATIC_URL = '/static/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_HOST = 'localhost'
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = None
EMAIL_PORT = 25
EMAIL_SUBJECT_PREFIX = '[%s] ' % SITE_NAME
EMAIL_USE_TLS = False
SERVER_EMAIL = 'marco.cotrufo@devncode.it'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "[%(asctime)s] %(levelname)s [%(name)s] %(message)s",
'datefmt': "%d/%b/%Y %H:%M:%S"
},
},
'handlers': {
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': SITE_ROOT + "/../logs/django.log",
'maxBytes': 50000,
'backupCount': 10,
'formatter': 'standard',
},
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'backend': {
'handlers': ['logfile', 'console'],
'handlers': ['logfile'],
'level': 'DEBUG',
'propagate': True
},
'euoserver': {
'handlers': ['logfile', 'console'],
'level': 'DEBUG',
'propagate': True
},
'django': {
'handlers': ['logfile', 'console'],
'level': 'INFO',
'propagate': True
}
},
}
SECRET_KEY = env('SECRET_KEY')
|
from migen.fhdl.std import *
from migen.flow.plumbing import Buffer
from migen.actorlib.fifo import SyncFIFO, AsyncFIFO
from migen.flow.network import DataFlowGraph, CompositeActor
from ezusbfifo import SimUSBActor, AsyncUSBActor
from mimisc.actors.plumbing import Relax
class Echo(Module):
def __init__(self, usb_actor):
g = DataFlowGraph()
fifo = SyncFIFO([('d', 16)], 64)
in_buffer = Relax([('d', 16)])
out_buffer = Relax([('d', 16)])
g.add_connection(in_buffer, fifo)
g.add_connection(fifo, out_buffer)
g.add_connection(out_buffer, usb_actor)
g.add_connection(usb_actor, in_buffer)
self.submodules.composite = CompositeActor(g)
self.busy = self.composite.busy
if __name__ == '__main__':
import sys
try:
command = sys.argv.pop(1)
except IndexError:
command = 'help'
if command == 'build':
from mimisc.platforms import ztex_115d as board
plat = board.Platform(manual_timing=True)
fx2_fifo = plat.request('fx2_fifo')
clk_if = plat.request('clk_if')
echo = Echo(AsyncUSBActor(fx2_fifo))
clk_ezusbfifo = Signal()
clk_ezusbfifo_ub = Signal()
clk_sys = Signal()
clk_sys_ub = Signal()
fraction = (3, 1)
echo.specials += [
Instance('DCM_SP',
Instance.Input('CLKIN', clk_if),
Instance.Input('CLKFB', clk_ezusbfifo),
Instance.Input('RST', 0),
Instance.Input('DSSEN', 0),
Instance.Input('PSCLK', 0),
Instance.Input('PSEN', 0),
Instance.Input('PSINCDEC', 0),
Instance.Output('CLK0', clk_ezusbfifo_ub),
Instance.Parameter('STARTUP_WAIT', True),
),
Instance('BUFG',
Instance.Input('I', clk_ezusbfifo_ub),
Instance.Output('O', clk_ezusbfifo),
),
Instance('DCM_CLKGEN',
Instance.Input('CLKIN', clk_if),
Instance.Input('RST', 0),
Instance.Input('FREEZEDCM', 0),
Instance.Input('PROGDATA', 0),
Instance.Input('PROGEN', 0),
Instance.Input('PROGCLK', 0),
Instance.Output('CLKFX', clk_sys_ub),
Instance.Parameter('CLKFX_MULTIPLY', fraction[0]),
Instance.Parameter('CLKFX_DIVIDE', fraction[1]),
Instance.Parameter('STARTUP_WAIT', True),
),
Instance('BUFG',
Instance.Input('I', clk_sys_ub),
Instance.Output('O', clk_sys),
),
]
echo.clock_domains.cd_sys = ClockDomain(
'sys', reset_less=True)
echo.clock_domains.cd_ezusbfifo = ClockDomain(
'ezusbfifo', reset_less=True)
echo.comb += [
echo.cd_sys.clk.eq(clk_sys),
echo.cd_ezusbfifo.clk.eq(clk_ezusbfifo)
]
plat.add_platform_command("""
NET "{clk_if}" TNM_NET = "GRP_clk_if";
TIMESPEC "TS_clk_if" = PERIOD "GRP_clk_if" 33.33 ns HIGH 50%;
NET "{clk_sys}" TNM_NET = "GRP_clk_sys";
NET "{clk_ezusbfifo}" TNM_NET = "GRP_clk_ezusbfifo";
TIMESPEC "TS_cdc_fwd" =
FROM "GRP_clk_sys" TO "GRP_clk_ezusbfifo"
[delay] ns DATAPATHONLY;
TIMESPEC "TS_cdc_bwd" =
FROM "GRP_clk_ezusbfifo" TO "GRP_clk_sys"
[delay] ns DATAPATHONLY;
OFFSET = IN 15 ns VALID 30 ns BEFORE "{clk_if}";
OFFSET = OUT 15 ns AFTER "{clk_if}";
""".replace('[delay]',
str(0.5 * 33.33 * fraction[1] / fraction[0])),
clk_if=clk_if,
clk_sys=clk_sys,
clk_ezusbfifo=clk_ezusbfifo,
)
plat.build_cmdline(echo)
elif command == 'sim':
from migen.sim.generic import Simulator, TopLevel
echo = Echo(SimUSBActor(loop=True))
sim = Simulator(echo, TopLevel("echo.vcd"))
with sim:
sim.run()
else:
print('usage: python %s build|sim [options]' % sys.argv[0])
|
"""
@package mi.dataset.parser.test
@file marine-integrations/mi/dataset/parser/test/test_flort_dj_cspp.py
@author Jeremy Amundson
@brief Test code for a flort_dj_cspp data parser
"""
import os
from nose.plugins.attrib import attr
from mi.core.log import get_logger
from mi.dataset.test.test_parser import ParserUnitTestCase
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.parser.cspp_base import METADATA_PARTICLE_CLASS_KEY, DATA_PARTICLE_CLASS_KEY
from mi.dataset.parser.flort_dj_cspp import \
FlortDjCsppParser, \
FlortDjCsppMetadataRecoveredDataParticle, \
FlortDjCsppInstrumentRecoveredDataParticle, FlortDjCsppMetadataTelemeteredDataParticle, \
FlortDjCsppInstrumentTelemeteredDataParticle
from mi.core.exceptions import RecoverableSampleException
from mi.idk.config import Config
log = get_logger()
RESOURCE_PATH = os.path.join(Config().base_dir(), 'mi', 'dataset', 'driver', 'flort_dj', 'cspp', 'resource')
TEST_RECOVERED = 'first_data_recovered.yml'
@attr('UNIT', group='mi')
class FlortDjCsppParserUnitTestCase(ParserUnitTestCase):
"""
flort_dj_cspp Parser unit test suite
"""
def setUp(self):
ParserUnitTestCase.setUp(self)
self._recovered_config = {
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: FlortDjCsppMetadataRecoveredDataParticle,
DATA_PARTICLE_CLASS_KEY: FlortDjCsppInstrumentRecoveredDataParticle
}
}
self._telemetered_config = {
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: FlortDjCsppMetadataTelemeteredDataParticle,
DATA_PARTICLE_CLASS_KEY: FlortDjCsppInstrumentTelemeteredDataParticle,
}
}
def test_simple(self):
"""
retrieves and verifies the first 6 particles
"""
file_path = os.path.join(RESOURCE_PATH, 'first_data.txt')
stream_handle = open(file_path, 'rU')
parser = FlortDjCsppParser(self._recovered_config,
stream_handle,
self.exception_callback)
particles = parser.get_records(20)
self.assert_particles(particles, 'first_data_20_recovered.yml', RESOURCE_PATH)
self.assertEqual(len(self.exception_callback_value), 0)
stream_handle.close()
def test_simple_telem(self):
"""
retrieves and verifies the first 6 particles
"""
file_path = os.path.join(RESOURCE_PATH, 'first_data.txt')
stream_handle = open(file_path, 'rU')
parser = FlortDjCsppParser(self._telemetered_config,
stream_handle,
self.exception_callback)
particles = parser.get_records(20)
self.assert_particles(particles, 'first_data_20_telemetered.yml', RESOURCE_PATH)
self.assertEqual(len(self.exception_callback_value), 0)
stream_handle.close()
def test_long_stream(self):
"""
retrieve all of particles, verify the expected number, confirm results
"""
file_path = os.path.join(RESOURCE_PATH, 'first_data.txt')
stream_handle = open(file_path, 'rU')
parser = FlortDjCsppParser(self._recovered_config,
stream_handle,
self.exception_callback)
particles = parser.get_records(1000)
self.assertTrue(len(particles) == 193)
self.assert_particles(particles, 'first_data_recovered.yml', RESOURCE_PATH)
self.assertEqual(len(self.exception_callback_value), 0)
stream_handle.close()
def test_long_stream_telem(self):
"""
retrieve all of particles, verify the expected number, confirm results
"""
file_path = os.path.join(RESOURCE_PATH, 'first_data.txt')
stream_handle = open(file_path, 'rU')
parser = FlortDjCsppParser(self._telemetered_config,
stream_handle,
self.exception_callback)
particles = parser.get_records(1000)
self.assertTrue(len(particles) == 193)
self.assert_particles(particles, 'first_data_telemetered.yml', RESOURCE_PATH)
self.assertEqual(len(self.exception_callback_value), 0)
stream_handle.close()
def test_bad_data(self):
"""
Ensure that bad data is skipped when it exists. A variety of malformed
records are used in order to verify this
"""
file_path = os.path.join(RESOURCE_PATH, 'BAD.txt')
stream_handle = open(file_path, 'rU')
parser = FlortDjCsppParser(self._recovered_config,
stream_handle,
self.exception_callback)
particles = parser.get_records(4)
self.assert_particles(particles, 'BAD_recovered.yml', RESOURCE_PATH)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
stream_handle.close()
def test_bad_data_telem(self):
"""
Ensure that bad data is skipped when it exists. A variety of malformed
records are used in order to verify this
"""
file_path = os.path.join(RESOURCE_PATH, 'BAD.txt')
stream_handle = open(file_path, 'rU')
parser = FlortDjCsppParser(self._telemetered_config,
stream_handle,
self.exception_callback)
particles = parser.get_records(4)
self.assert_particles(particles, 'BAD_telemetered.yml', RESOURCE_PATH)
self.assertIsInstance(self.exception_callback_value[0], RecoverableSampleException)
stream_handle.close()
|
import unittest
import textform
class TestStringMethods(unittest.TestCase):
def test_basic(self):
r = textform.format('hello world', [])
self.assertEqual(r, 'hello world')
r = textform.format('', [])
self.assertEqual(r, '')
def test_wrong_number_of_values(self):
with self.assertRaises(textform.Mismatch):
textform.format('foo', [1])
with self.assertRaises(textform.Mismatch):
textform.format('@>>>', [1, 2])
with self.assertRaises(textform.Mismatch):
textform.format('@>>>', [])
def test3(self):
r = textform.format('@<<<<<< @|||||| @>>>>>>', (101, 202, 303))
self.assertEqual(r, '101 202 303')
r = textform.format(': @<<<<<< : @|||||| : @>>>>>> :', (101, 202, 303))
self.assertEqual(r, ': 101 : 202 : 303 :')
def test_left_middle_right(self):
r = textform.format('@<<<<<<', ['foo'])
self.assertEqual(r, 'foo')
r = textform.format('@>>>>>>', ['foo'])
self.assertEqual(r, ' foo')
r = textform.format('@||||||', ['foo'])
self.assertEqual(r, ' foo')
# Leading whitespace in the template is preserved.
# Trailing whitespace is not.
def test_left_middle_right_spaces(self):
r = textform.format(' @<<<<<< ', ['foo'])
self.assertEqual(r, ' foo')
r = textform.format(' @>>>>>> ', ['foo'])
self.assertEqual(r, ' foo')
r = textform.format(' @|||||| ', ['foo'])
self.assertEqual(r, ' foo')
def test_multiline(self):
t = 'now is the time for all good men to come to the aid of their party'.split()
r = textform.format('@<<<<<<<<<:@|||||||||:@>>>>>>>>>',
[' '.join(t),
' '.join(reversed(t)),
' '.join(t[:-3]).upper()])
print('\n{}\n{}\n{}'.format('=' * 70, r, '=' * 70))
self.assertEqual(r,
'now is the: party :NOW IS THE\n' +
'time for : their of : TIME FOR\n' +
'all good :aid the to: ALL GOOD\n' +
'men to : come to : MEN TO\n' +
'come to : men good : COME TO\n' +
'the aid of: all for : THE AID\n' +
'their : time the :\n' +
'party : is now :')
r = textform.format('@<<<<<<<<<:@|||||||||:@>>>>>>>>>',
[' '.join(t[:-3]),
' '.join(reversed(t)),
' '.join(t).upper()])
print('\n{}\n{}\n{}'.format('=' * 70, r, '=' * 70))
self.assertEqual(r,
'now is the: party :NOW IS THE\n' +
'time for : their of : TIME FOR\n' +
'all good :aid the to: ALL GOOD\n' +
'men to : come to : MEN TO\n' +
'come to : men good : COME TO\n' +
'the aid : all for :THE AID OF\n' +
' : time the : THEIR\n' +
' : is now : PARTY')
r = textform.format('@<<<<<<<<<:@|||||||||:@>>>>>>>>>',
[' '.join(t),
' '.join(reversed(t[:-3])),
' '.join(t).upper()])
print('\n{}\n{}\n{}'.format('=' * 70, r, '=' * 70))
self.assertEqual(r,
'now is the:aid the to:NOW IS THE\n' +
'time for : come to : TIME FOR\n' +
'all good : men good : ALL GOOD\n' +
'men to : all for : MEN TO\n' +
'come to : time the : COME TO\n' +
'the aid of: is now :THE AID OF\n' +
'their : : THEIR\n' +
'party : : PARTY')
r = textform.format('@<<<<<<<<<<:@>>>>>>>>>>',
['now-is-the-time-for-all-good-men',
'hello-world'])
print('\n{}\n{}\n{}'.format('=' * 70, r, '=' * 70))
self.assertEqual(r,
'now-is-the-:hello-world\n' +
'time-for- :\n' +
'all-good- :\n' +
'men :')
r = textform.format('@<<<<<<<<:@|||||||||:@>>>>>>>>>',
['now is the time for all',
'good men to',
'come to the aid of their party'])
print('\n{}\n{}\n{}'.format('=' * 70, r, '=' * 70))
self.assertEqual(r,
'now is : good men : come to\n' +
'the time : to :the aid of\n' +
'for all : : their\n' +
' : : party')
if __name__ == '__main__':
unittest.main()
|
import memcache
from . import HandlerBase
from uuid import uuid4
class Handler(HandlerBase):
def __init__(self, host='127.0.0.1', port=11211):
self.db = memcache.Client(['%s:%s' % (host, port)], debug=0)
def set(self, sid, data, ttl=0):
if self.db.set(sid, data, ttl):
return sid
def get(self, sid):
return self.db.get(sid)
def delete(self, sid):
return self.db.delete(sid)
def make_sid(self):
return uuid4().hex
|
"""This is a module providing FSM actions for Install Operations plugins."""
def a_error(plugin_ctx, ctx):
"""Display the error message."""
message = ctx.ctrl.after.strip().splitlines()[-1]
plugin_ctx.error(message)
return False
|
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = ("honeypot",)
SECRET_KEY = "honeyisfrombees"
MIDDLEWARE_CLASSES = ()
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
],
},
},
]
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth import get_permission_codename
import logging
logger = logging.getLogger(__name__)
try:
from django.utils.timezone import now
except ImportError:
from datetime import datetime
now = datetime.now
class DorsaleBaseAdmin(admin.ModelAdmin):
"""
ModelAdmin for DorsaleBaseModels, automaticalls sets createdby,
createdon, lastchangedby, lastchangedon, site and deleted fields.
Beware, this overrides "queryset" and "has_change_permissions"!
TODO: Permissions
TODO: see http://www.stereoplex.com/blog/filtering-dropdown-lists-in-the-django-admin
"""
view_on_site = False
def save_model(self, request, obj, form, change):
if not change:
if hasattr(obj, 'createdby'):
obj.createdby = request.user
if hasattr(obj, 'createdon'):
obj.createdon = now()
if hasattr(obj, 'deleted'):
obj.deleted = False
if hasattr(obj, 'site'):
from django.contrib.sites.models import Site
obj.site = Site.objects.get_current()
# we could allow superusers to change the site
if hasattr(obj, 'lastchangedby'):
obj.lastchangedby = request.user
if hasattr(obj, 'lastchangedon'):
obj.lastchangedon = now()
try:
obj.save(user=request.user)
except TypeError as ex:
logger.info('Object.save doesn’t accept user parameter.')
obj.save()
def queryset(self, request):
# TODO: query (group) permissions
qs = self.model._default_manager.get_queryset()
ordering = self.ordering or ()
if not request.user.is_superuser and hasattr(self.model, 'createdby'):
qs = qs.filter(createdby=request.user)
if ordering:
qs = qs.order_by(*ordering)
return qs
def has_class_permission(self, request, obj=None):
return super(DorsaleBaseAdmin, self).has_change_permission(request, obj)
def has_change_permission(self, request, obj=None):
if not self.has_class_permission(request, obj):
return False
if obj is not None \
and not request.user.is_superuser \
and request.user.id != obj.createdby_id:
# TODO: Permissions!
return False
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
try:
from registration.models import RegistrationProfile
# RegistrationProfile should not show up in admin
admin.site.unregister(RegistrationProfile)
except:
pass
# print "registration was activated before dorsale!"
|
from distutils.core import setup, Extension
ctc = Extension('constant_time_compare',
sources=['src/constant_time_compare.c'], language='c')
setup(name='constant_time_compare',
version='1.3',
description='This package includes a secure constant time comparison function written in C',
author='Levi Gross',
author_email='yriv@yrivtebff.pbz'.decode('rot13'),
url="https://github.com/levigross/constant_time_compare",
platforms=['any'],
ext_modules=[ctc])
|
"""
Base module variables
"""
from __future__ import unicode_literals
__version__ = 'dev'
__author__ = 'The CRN developers'
__copyright__ = 'Copyright 2016, Center for Reproducible Neuroscience, Stanford University'
__credits__ = ['Craig Moodie', 'Ross Blair', 'Oscar Esteban', 'Chris Gorgolewski', 'Shoshana Berleant',
'Russell A. Poldrack']
__license__ = '3-clause BSD'
__maintainer__ = 'Ross Blair'
__email__ = 'crn.poldracklab@gmail.com'
__status__ = 'Prototype'
__url__ = 'https://github.com/poldracklab/fmriprep'
__packagename__ = 'fmriprep'
__description__ = """fMRIprep is a functional magnetic resonance image pre-processing pipeline that
is designed to provide an easily accessible, state-of-the-art interface that is robust to differences
in scan acquisition protocols and that requires minimal user input, while providing easily interpretable
and comprehensive error and output reporting."""
__longdesc__ = """
This package is a functional magnetic resonance image preprocessing pipeline that is designed to
provide an easily accessible, state-of-the-art interface that is robust to differences in scan
acquisition protocols and that requires minimal user input, while providing easily interpretable
and comprehensive error and output reporting. This open-source neuroimaging data processing tool
is being developed as a part of the MRI image analysis and reproducibility platform offered by the
CRN. This pipeline is heavily influenced by the `Human Connectome Project analysis pipelines
<https://github.com/Washington-University/Pipelines>`_ and, as such, the backbone of this pipeline
is a python reimplementation of the HCP `GenericfMRIVolumeProcessingPipeline.sh` script. However, a
major difference is that this pipeline is executed using a `nipype workflow framework
<http://nipype.readthedocs.io/en/latest/>`_. This allows for each call to a software module or binary
to be controlled within the workflows, which removes the need for manual curation at every stage, while
still providing all the output and error information that would be necessary for debugging and interpretation
purposes. The fmriprep pipeline primarily utilizes FSL tools, but also utilizes ANTs tools at several stages
such as skull stripping and template registration. This pipeline was designed to provide the best software
implementation for each state of preprocessing, and will be updated as newer and better neuroimaging software
become available.
"""
DOWNLOAD_URL = ('https://pypi.python.org/packages/source/f/fmriprep/' +
'fmriprep-%s.tar.gz').format('__version__')
REQUIRES = [
'numpy',
'lockfile',
'future',
'scikit-learn',
'matplotlib',
'nilearn',
'sklearn',
'nibabel',
'pandas',
'grabbit',
'pybids>=0.0.1',
'nitime',
'niworkflows',
'nipype>=0.13.0rc1'
]
LINKS_REQUIRES = []
TESTS_REQUIRES = [
"mock",
"codecov"
]
EXTRA_REQUIRES = {
'doc': ['sphinx'],
'tests': TESTS_REQUIRES,
'duecredit': ['duecredit']
}
EXTRA_REQUIRES['all'] = [val for _, val in list(EXTRA_REQUIRES.items())]
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: MRI processing',
'Topic :: Scientific/Engineering :: Biomedical Imaging',
'License :: OSI Approved :: 3-clause BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5'
]
|
from fractions import Fraction as Fract
import sys
def sqr(x): return x*x
def greenq(x,y,x2,y2): return 2*(x2-x)*(y2-y)
def redq(x,y,x2,y2): return sqr(x2-x)-sqr(y2-y)
def blueq(x,y,x2,y2): return sqr(x2-x)+sqr(y2-y)
xs,ys=[],[]
depth = 20
for m in range(-depth,depth):
for n in range(-depth,depth):
if redq(0,0,m,n)==0: continue
if greenq(0,0,m,n)==0: continue
bq,rq,gq = blueq(0,0,m,n),redq(0,0,m,n),greenq(0,0,m,n)
x = Fract( Fract(bq,gq), blueq(0,0,Fract(rq,gq),Fract(bq,gq)) )
y = Fract( Fract(rq,gq), blueq(0,0,Fract(bq,gq),Fract(bq,gq)) )
xs += [x]
ys += [y]
max=max(xs+ys)
for i in range(0,2):
print xs[i],',',ys[i],
print '....'
for i in range(0,len(xs)):
xs[i] = Fract( xs[i], max )
ys[i] = Fract( ys[i], max )
print len(xs), 'points'
import numpy as np
import matplotlib.pylab as plt
fig,ax = plt.subplots(figsize=(8,8))
ax.set_ylim([-1.2,1.2])
ax.set_xlim([-1.2,1.2])
for i in range(0,len(xs)):
xs[i]=xs[i]#+zs[i]/4
ys[i]=ys[i]#+zs[i]/4
ax.scatter(xs,ys)
plt.show()
|
airports_red = list()
j = 0
for lineB in open("airports.dat","r",encoding="utf-8-sig"):
letalisca = lineB.split(",") # Podatke iz vrstice zapišemo v seznam stringov
for line0B in open("routes.dat","r",encoding="utf-8-sig"):
poti = line0B.split(",")
if len(letalisca) == 12 and len(poti) == 9: # preverimo, če so podatki pravilno zapisani
if letalisca[0] == poti[3] or letalisca[0] == poti[5]:
airports_red.append(lineB)
j+=1
print (j, "vpisov") # sledenje vpisu podatkov v datoteko
break
print ("Število vrstic:",j)
f = open("airports_red.dat", "w+",encoding="utf-8")
f.write("".join(airports_red))
f.close()
print("\nKončano, csv oblika: airports_red.dat")
airlines_red = list()
j = 0
for lineB in open("airlines.dat","r",encoding="utf-8-sig"):
druzba = lineB.split(",")
for line0B in open("routes.dat","r",encoding="utf-8-sig"):
leti = line0B.split(",")
if len(druzba) == 8 and len(leti) == 9:
if druzba[0] == leti[1]:
airlines_red.append(lineB)
j+=1
print (j, "vpisov")
break
print ("Število vrstic:",j)
f = open("airlines_red.dat", "w+",encoding="utf-8")
f.write("".join(airlines_red))
f.close()
print("\nKončano, csv oblika: airlines_red.dat")
|
from moderation.tests.utils.testsettingsmanager import SettingsTestCase
from moderation.tests.utils import setup_moderation, teardown_moderation
from moderation.tests.apps.test_app2.models import Book
class AutoDiscoverAcceptanceTestCase(SettingsTestCase):
'''
As a developer I want to have a way auto discover all apps that have module
moderator and register it with moderation.
'''
test_settings = 'moderation.tests.settings.auto_discover'
def setUp(self):
setup_moderation()
def tearDown(self):
teardown_moderation()
def test_all_app_containing_moderator_module_should_be_registered(self):
import moderation.tests.urls.auto_discover
from moderation import moderation
self.assertTrue(Book in moderation._registered_models)
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("MLPClassifier" , "BreastCancer" , "oracle")
|
from rest_framework import status, views, response, generics
from oscar.core.loading import get_model
from oscarapi.permissions import IsOwner
from oscarapi.views.utils import BasketPermissionMixin
from oscarapi.loading import get_api_classes
from oscarapi.signals import oscarapi_post_checkout
Order = get_model('order', 'Order')
OrderLine = get_model('order', 'Line')
OrderLineAttribute = get_model('order', 'LineAttribute')
__all__ = (
'CheckoutView',
'OrderList', 'OrderDetail',
'OrderLineList', 'OrderLineDetail',
'OrderLineAttributeDetail'
)
(OrderSerializer,
CheckoutSerializer,
OrderLineSerializer,
OrderLineAttributeSerializer
) \
= get_api_classes('oscarapi.serializers.checkout',
(
'OrderSerializer',
'CheckoutSerializer',
'OrderLineSerializer',
'OrderLineAttributeSerializer'
))
class OrderList(generics.ListAPIView):
serializer_class = OrderSerializer
permission_classes = (IsOwner,)
def get_queryset(self):
qs = Order.objects.all()
return qs.filter(user=self.request.user)
class OrderDetail(generics.RetrieveAPIView):
queryset = Order.objects.all()
serializer_class = OrderSerializer
permission_classes = (IsOwner,)
class OrderLineList(generics.ListAPIView):
queryset = OrderLine.objects.all()
serializer_class = OrderLineSerializer
def get(self, request, pk=None, format=None):
if pk is not None:
self.queryset = self.queryset.filter(
order__id=pk, order__user=request.user)
elif not request.user.is_staff:
self.permission_denied(request)
return super(OrderLineList, self).get(request, format)
class OrderLineDetail(generics.RetrieveAPIView):
queryset = OrderLine.objects.all()
serializer_class = OrderLineSerializer
def get(self, request, pk=None, format=None):
if not request.user.is_staff:
self.queryset = self.queryset.filter(
order__id=pk, order__user=request.user)
return super(OrderLineDetail, self).get(request, format)
class OrderLineAttributeDetail(generics.RetrieveAPIView):
queryset = OrderLineAttribute.objects.all()
serializer_class = OrderLineAttributeSerializer
class CheckoutView(BasketPermissionMixin, views.APIView):
"""
Prepare an order for checkout.
POST(basket, shipping_address,
[total, shipping_method_code, shipping_charge, billing_address]):
{
"basket": "http://testserver/oscarapi/baskets/1/",
"guest_email": "foo@example.com",
"total": "100.0",
"shipping_charge": {
"currency": "EUR",
"excl_tax": "10.0",
"tax": "0.6"
},
"shipping_method_code": "no-shipping-required",
"shipping_address": {
"country": "http://127.0.0.1:8000/oscarapi/countries/NL/",
"first_name": "Henk",
"last_name": "Van den Heuvel",
"line1": "Roemerlaan 44",
"line2": "",
"line3": "",
"line4": "Kroekingen",
"notes": "Niet STUK MAKEN OK!!!!",
"phone_number": "+31 26 370 4887",
"postcode": "7777KK",
"state": "Gerendrecht",
"title": "Mr"
}
}
returns the order object.
"""
order_serializer_class = OrderSerializer
serializer_class = CheckoutSerializer
def post(self, request, format=None):
# TODO: Make it possible to create orders with options.
# at the moment, no options are passed to this method, which means they
# are also not created.
data_basket = self.get_data_basket(request.data, format)
basket = self.check_basket_permission(request,
basket_pk=data_basket.pk)
# by now an error should have been raised if someone was messing
# around with the basket, so asume invariant
assert(data_basket == basket)
c_ser = self.serializer_class(
data=request.data, context={'request': request})
if c_ser.is_valid():
order = c_ser.save()
basket.freeze()
o_ser = self.order_serializer_class(
order, context={'request': request})
oscarapi_post_checkout.send(
sender=self, order=order, user=request.user,
request=request, response=response)
return response.Response(o_ser.data)
return response.Response(c_ser.errors, status.HTTP_406_NOT_ACCEPTABLE)
|
import json
from binascii import b2a_hex
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
import unittest
from unittest import skipUnless
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.geometry.test_data import TestDataMixin
from django.utils.six.moves import xrange
if HAS_GDAL:
from django.contrib.gis.gdal import (OGRGeometry, OGRGeomType,
OGRException, OGRIndexError, SpatialReference, CoordTransform,
GDAL_VERSION)
@skipUnless(HAS_GDAL, "GDAL is required")
class OGRGeomTest(unittest.TestCase, TestDataMixin):
"This tests the OGR Geometry."
def test00a_geomtype(self):
"Testing OGRGeomType object."
# OGRGeomType should initialize on all these inputs.
OGRGeomType(1)
OGRGeomType(7)
OGRGeomType('point')
OGRGeomType('GeometrycollectioN')
OGRGeomType('LINearrING')
OGRGeomType('Unknown')
# Should throw TypeError on this input
self.assertRaises(OGRException, OGRGeomType, 23)
self.assertRaises(OGRException, OGRGeomType, 'fooD')
self.assertRaises(OGRException, OGRGeomType, 9)
# Equivalence can take strings, ints, and other OGRGeomTypes
self.assertEqual(OGRGeomType(1), OGRGeomType(1))
self.assertEqual(OGRGeomType(7), 'GeometryCollection')
self.assertEqual(OGRGeomType('point'), 'POINT')
self.assertNotEqual(OGRGeomType('point'), 2)
self.assertEqual(OGRGeomType('unknown'), 0)
self.assertEqual(OGRGeomType(6), 'MULtiPolyGON')
self.assertEqual(OGRGeomType(1), OGRGeomType('point'))
self.assertNotEqual(OGRGeomType('POINT'), OGRGeomType(6))
# Testing the Django field name equivalent property.
self.assertEqual('PointField', OGRGeomType('Point').django)
self.assertEqual('GeometryField', OGRGeomType('Geometry').django)
self.assertEqual('GeometryField', OGRGeomType('Unknown').django)
self.assertIsNone(OGRGeomType('none').django)
# 'Geometry' initialization implies an unknown geometry type.
gt = OGRGeomType('Geometry')
self.assertEqual(0, gt.num)
self.assertEqual('Unknown', gt.name)
def test00b_geomtype_25d(self):
"Testing OGRGeomType object with 25D types."
wkb25bit = OGRGeomType.wkb25bit
self.assertEqual(OGRGeomType(wkb25bit + 1), 'Point25D')
self.assertEqual(OGRGeomType('MultiLineString25D'), (5 + wkb25bit))
self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django)
def test01a_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
self.assertEqual(g.wkt, geom.wkt)
def test01a_ewkt(self):
"Testing EWKT input/output."
for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'):
# First with ewkt output when no SRID in EWKT
self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt)
# No test consumption with an SRID specified.
ewkt_val = 'SRID=4326;%s' % ewkt_val
geom = OGRGeometry(ewkt_val)
self.assertEqual(ewkt_val, geom.ewkt)
self.assertEqual(4326, geom.srs.srid)
def test01b_gml(self):
"Testing GML output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
exp_gml = g.gml
if GDAL_VERSION >= (1, 8):
# In GDAL 1.8, the non-conformant GML tag <gml:GeometryCollection> was
# replaced with <gml:MultiGeometry>.
exp_gml = exp_gml.replace('GeometryCollection', 'MultiGeometry')
self.assertEqual(exp_gml, geom.gml)
def test01c_hex(self):
"Testing HEX input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
self.assertEqual(g.hex.encode(), geom1.hex)
# Constructing w/HEX
geom2 = OGRGeometry(g.hex)
self.assertEqual(geom1, geom2)
def test01d_wkb(self):
"Testing WKB input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
wkb = geom1.wkb
self.assertEqual(b2a_hex(wkb).upper(), g.hex.encode())
# Constructing w/WKB.
geom2 = OGRGeometry(wkb)
self.assertEqual(geom1, geom2)
def test01e_json(self):
"Testing GeoJSON input/output."
for g in self.geometries.json_geoms:
geom = OGRGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
# Loading jsons to prevent decimal differences
self.assertEqual(json.loads(g.json), json.loads(geom.json))
self.assertEqual(json.loads(g.json), json.loads(geom.geojson))
self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json))
def test02_points(self):
"Testing Point objects."
OGRGeometry('POINT(0 0)')
for p in self.geometries.points:
if not hasattr(p, 'z'): # No 3D
pnt = OGRGeometry(p.wkt)
self.assertEqual(1, pnt.geom_type)
self.assertEqual('POINT', pnt.geom_name)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual((p.x, p.y), pnt.tuple)
def test03_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mgeom1 = OGRGeometry(mp.wkt) # First one from WKT
self.assertEqual(4, mgeom1.geom_type)
self.assertEqual('MULTIPOINT', mgeom1.geom_name)
mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint
mgeom3 = OGRGeometry('MULTIPOINT')
for g in mgeom1:
mgeom2.add(g) # adding each point from the multipoints
mgeom3.add(g.wkt) # should take WKT as well
self.assertEqual(mgeom1, mgeom2) # they should equal
self.assertEqual(mgeom1, mgeom3)
self.assertEqual(mp.coords, mgeom2.coords)
self.assertEqual(mp.n_p, mgeom2.point_count)
def test04_linestring(self):
"Testing LineString objects."
prev = OGRGeometry('POINT(0 0)')
for ls in self.geometries.linestrings:
linestr = OGRGeometry(ls.wkt)
self.assertEqual(2, linestr.geom_type)
self.assertEqual('LINESTRING', linestr.geom_name)
self.assertEqual(ls.n_p, linestr.point_count)
self.assertEqual(ls.coords, linestr.tuple)
self.assertEqual(linestr, OGRGeometry(ls.wkt))
self.assertNotEqual(linestr, prev)
self.assertRaises(OGRIndexError, linestr.__getitem__, len(linestr))
prev = linestr
# Testing the x, y properties.
x = [tmpx for tmpx, tmpy in ls.coords]
y = [tmpy for tmpx, tmpy in ls.coords]
self.assertEqual(x, linestr.x)
self.assertEqual(y, linestr.y)
def test05_multilinestring(self):
"Testing MultiLineString objects."
prev = OGRGeometry('POINT(0 0)')
for mls in self.geometries.multilinestrings:
mlinestr = OGRGeometry(mls.wkt)
self.assertEqual(5, mlinestr.geom_type)
self.assertEqual('MULTILINESTRING', mlinestr.geom_name)
self.assertEqual(mls.n_p, mlinestr.point_count)
self.assertEqual(mls.coords, mlinestr.tuple)
self.assertEqual(mlinestr, OGRGeometry(mls.wkt))
self.assertNotEqual(mlinestr, prev)
prev = mlinestr
for ls in mlinestr:
self.assertEqual(2, ls.geom_type)
self.assertEqual('LINESTRING', ls.geom_name)
self.assertRaises(OGRIndexError, mlinestr.__getitem__, len(mlinestr))
def test06_linearring(self):
"Testing LinearRing objects."
prev = OGRGeometry('POINT(0 0)')
for rr in self.geometries.linearrings:
lr = OGRGeometry(rr.wkt)
#self.assertEqual(101, lr.geom_type.num)
self.assertEqual('LINEARRING', lr.geom_name)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(lr, OGRGeometry(rr.wkt))
self.assertNotEqual(lr, prev)
prev = lr
def test07a_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180, -90, 180, 90)
p = OGRGeometry.from_bbox(bbox)
self.assertEqual(bbox, p.extent)
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.polygons:
poly = OGRGeometry(p.wkt)
self.assertEqual(3, poly.geom_type)
self.assertEqual('POLYGON', poly.geom_name)
self.assertEqual(p.n_p, poly.point_count)
self.assertEqual(p.n_i + 1, len(poly))
# Testing area & centroid.
self.assertAlmostEqual(p.area, poly.area, 9)
x, y = poly.centroid.tuple
self.assertAlmostEqual(p.centroid[0], x, 9)
self.assertAlmostEqual(p.centroid[1], y, 9)
# Testing equivalence
self.assertEqual(poly, OGRGeometry(p.wkt))
self.assertNotEqual(poly, prev)
if p.ext_ring_cs:
ring = poly[0]
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple)
self.assertEqual(len(p.ext_ring_cs), ring.point_count)
for r in poly:
self.assertEqual('LINEARRING', r.geom_name)
def test07b_closepolygons(self):
"Testing closing Polygon objects."
# Both rings in this geometry are not closed.
poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))')
self.assertEqual(8, poly.point_count)
with self.assertRaises(OGRException):
poly.centroid
poly.close_rings()
self.assertEqual(10, poly.point_count) # Two closing points should've been added
self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid)
def test08_multipolygons(self):
"Testing MultiPolygon objects."
OGRGeometry('POINT(0 0)')
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
self.assertEqual(6, mpoly.geom_type)
self.assertEqual('MULTIPOLYGON', mpoly.geom_name)
if mp.valid:
self.assertEqual(mp.n_p, mpoly.point_count)
self.assertEqual(mp.num_geom, len(mpoly))
self.assertRaises(OGRIndexError, mpoly.__getitem__, len(mpoly))
for p in mpoly:
self.assertEqual('POLYGON', p.geom_name)
self.assertEqual(3, p.geom_type)
self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt)
def test09a_srs(self):
"Testing OGR Geometries with Spatial Reference objects."
for mp in self.geometries.multipolygons:
# Creating a geometry w/spatial reference
sr = SpatialReference('WGS84')
mpoly = OGRGeometry(mp.wkt, sr)
self.assertEqual(sr.wkt, mpoly.srs.wkt)
# Ensuring that SRS is propagated to clones.
klone = mpoly.clone()
self.assertEqual(sr.wkt, klone.srs.wkt)
# Ensuring all children geometries (polygons and their rings) all
# return the assigned spatial reference as well.
for poly in mpoly:
self.assertEqual(sr.wkt, poly.srs.wkt)
for ring in poly:
self.assertEqual(sr.wkt, ring.srs.wkt)
# Ensuring SRS propagate in topological ops.
a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr)
b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr)
diff = a.difference(b)
union = a.union(b)
self.assertEqual(sr.wkt, diff.srs.wkt)
self.assertEqual(sr.srid, union.srs.srid)
# Instantiating w/an integer SRID
mpoly = OGRGeometry(mp.wkt, 4326)
self.assertEqual(4326, mpoly.srid)
mpoly.srs = SpatialReference(4269)
self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolyogn after the spatial reference
# has been re-assigned.
for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)
poly.srs = 32140
for ring in poly:
# Changing each ring in the polygon
self.assertEqual(32140, ring.srs.srid)
self.assertEqual('NAD83 / Texas South Central', ring.srs.name)
ring.srs = str(SpatialReference(4326)) # back to WGS84
self.assertEqual(4326, ring.srs.srid)
# Using the `srid` property.
ring.srid = 4322
self.assertEqual('WGS 72', ring.srs.name)
self.assertEqual(4322, ring.srid)
def test09b_srs_transform(self):
"Testing transform()."
orig = OGRGeometry('POINT (-104.609 38.255)', 4326)
trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using an srid, a SpatialReference object, and a CoordTransform object
# or transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(SpatialReference('EPSG:2774'))
ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test09c_transform_dim(self):
"Testing coordinate dimension is the same on transformed geometries."
ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326)
ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774)
prec = 3
ls_orig.transform(ls_trans.srs)
# Making sure the coordinate dimension is still 2D.
self.assertEqual(2, ls_orig.coord_dim)
self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec)
self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec)
def test10_difference(self):
"Testing difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test11_intersection(self):
"Testing intersects() and intersection()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt)
self.assertTrue(a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test12_symdifference(self):
"Testing sym_difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test13_union(self):
"Testing union()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
u1 = OGRGeometry(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test14_add(self):
"Testing GeometryCollection.add()."
# Can't insert a Point into a MultiPolygon.
mp = OGRGeometry('MultiPolygon')
pnt = OGRGeometry('POINT(5 23)')
self.assertRaises(OGRException, mp.add, pnt)
# GeometryCollection.add may take an OGRGeometry (if another collection
# of the same type all child geoms will be added individually) or WKT.
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
mp1 = OGRGeometry('MultiPolygon')
mp2 = OGRGeometry('MultiPolygon')
mp3 = OGRGeometry('MultiPolygon')
for poly in mpoly:
mp1.add(poly) # Adding a geometry at a time
mp2.add(poly.wkt) # Adding WKT
mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once.
for tmp in (mp1, mp2, mp3):
self.assertEqual(mpoly, tmp)
def test15_extent(self):
"Testing `extent` property."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)')
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
# Testing on the 'real world' Polygon.
poly = OGRGeometry(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test16_25D(self):
"Testing 2.5D geometries."
pnt_25d = OGRGeometry('POINT(1 2 3)')
self.assertEqual('Point25D', pnt_25d.geom_type.name)
self.assertEqual(3.0, pnt_25d.z)
self.assertEqual(3, pnt_25d.coord_dim)
ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)')
self.assertEqual('LineString25D', ls_25d.geom_type.name)
self.assertEqual([1.0, 2.0, 3.0], ls_25d.z)
self.assertEqual(3, ls_25d.coord_dim)
def test17_pickle(self):
"Testing pickle support."
g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
g2 = pickle.loads(pickle.dumps(g1))
self.assertEqual(g1, g2)
self.assertEqual(4326, g2.srs.srid)
self.assertEqual(g1.srs.wkt, g2.srs.wkt)
def test18_ogrgeometry_transform_workaround(self):
"Testing coordinate dimensions on geometries after transformation."
# A bug in GDAL versions prior to 1.7 changes the coordinate
# dimension of a geometry after it has been transformed.
# This test ensures that the bug workarounds employed within
# `OGRGeometry.transform` indeed work.
wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))"
wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))"
srid = 4326
# For both the 2D and 3D MultiLineString, ensure _both_ the dimension
# of the collection and the component LineString have the expected
# coordinate dimension after transform.
geom = OGRGeometry(wkt_2d, srid)
geom.transform(srid)
self.assertEqual(2, geom.coord_dim)
self.assertEqual(2, geom[0].coord_dim)
self.assertEqual(wkt_2d, geom.wkt)
geom = OGRGeometry(wkt_3d, srid)
geom.transform(srid)
self.assertEqual(3, geom.coord_dim)
self.assertEqual(3, geom[0].coord_dim)
self.assertEqual(wkt_3d, geom.wkt)
def test19_equivalence_regression(self):
"Testing equivalence methods with non-OGRGeometry instances."
self.assertIsNotNone(OGRGeometry('POINT(0 0)'))
self.assertNotEqual(OGRGeometry('LINESTRING(0 0, 1 1)'), 3)
|
import numpy as np
import itertools
from copy import deepcopy
from .variables import create_variable
from ..errors import InvalidConfigError, InvalidVariableNameError
class Design_space(object):
"""
Class to handle the input domain of the function.
The format of a input domain, possibly with restrictions:
The domain is defined as a list of dictionaries contains a list of attributes, e.g.:
- Arm bandit
space =[{'name': 'var_1', 'type': 'bandit', 'domain': [(-1,1),(1,0),(0,1)]},
{'name': 'var_2', 'type': 'bandit', 'domain': [(-1,4),(0,0),(1,2)]}]
- Continuous domain
space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality':1},
{'name': 'var_2', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':2},
{'name': 'var_3', 'type': 'bandit', 'domain': [(-1,1),(1,0),(0,1)], 'dimensionality':2},
{'name': 'var_4', 'type': 'bandit', 'domain': [(-1,4),(0,0),(1,2)]},
{'name': 'var_5', 'type': 'discrete', 'domain': (0,1,2,3)}]
- Discrete domain
space =[ {'name': 'var_3', 'type': 'discrete', 'domain': (0,1,2,3)}]
{'name': 'var_3', 'type': 'discrete', 'domain': (-10,10)}]
- Mixed domain
space =[{'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality' :1},
{'name': 'var_4', 'type': 'continuous', 'domain':(-3,1), 'dimensionality' :2},
{'name': 'var_3', 'type': 'discrete', 'domain': (0,1,2,3)}]
Restrictions can be added to the problem. Each restriction is of the form c(x) <= 0 where c(x) is a function of
the input variables previously defined in the space. Restrictions should be written as a list
of dictionaries. For instance, this is an example of an space coupled with a constraint
space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality' :2}]
constraints = [ {'name': 'const_1', 'constraint': 'x[:,0]**2 + x[:,1]**2 - 1'}]
If no constraints are provided the hypercube determined by the bounds constraints are used.
Note about the internal representation of the vatiables: for variables in which the dimaensionality
has been specified in the domain, a subindex is internally asigned. For instance if the variables
is called 'var1' and has dimensionality 3, the first three positions in the internal representation
of the domain will be occupied by variables 'var1_1', 'var1_2' and 'var1_3'. If no dimensionality
is added, the internal naming remains the same. For instance, in the example above 'var3'
should be fixed its original name.
param space: list of dictionaries as indicated above.
param constraints: list of dictionaries as indicated above (default, none)
"""
supported_types = ['continuous', 'discrete', 'bandit','categorical']
def __init__(self, space, constraints=None, store_noncontinuous = False):
## --- Complete and expand attributes
self.store_noncontinuous = store_noncontinuous
self.config_space = space
## --- Transform input config space into the objects used to run the optimization
self._translate_space(self.config_space)
self._expand_space()
self._compute_variables_indices()
self._create_variables_dic()
## -- Compute raw and model dimensionalities
self.objective_dimensionality = len(self.space_expanded)
self.model_input_dims = [v.dimensionality_in_model for v in self.space_expanded]
self.model_dimensionality = sum(self.model_input_dims)
# Because of the misspelling API used to expect "constrain" as a key
# This fixes the API but also supports the old form
if constraints is not None:
for c in constraints:
if 'constrain' in c:
c['constraint'] = c['constrain']
self.constraints = constraints
@staticmethod
def fromConfig(space, constraints):
import six
from ast import literal_eval
for d in space:
if isinstance(d['dimensionality'],six.string_types):
d['dimensionality'] = int(d['dimensionality'])
d['domain'] = literal_eval(d['domain'])
return Design_space(space, None if len(constraints)==0 else constraints)
def _expand_config_space(self):
"""
Expands the config input space into a list of diccionaries, one for each variable_dic
in which the dimensionality is always one.
Example: It would transform
config_space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality':1},
{'name': 'var_2', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':2},
into
config_expande_space =[ {'name': 'var_1', 'type': 'continuous', 'domain':(-1,1), 'dimensionality':1},
{'name': 'var_2', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':1},
{'name': 'var_2_1', 'type': 'continuous', 'domain':(-3,1), 'dimensionality':1}]
"""
self.config_space_expanded = []
for variable in self.config_space:
variable_dic = variable.copy()
if 'dimensionality' in variable_dic.keys():
dimensionality = variable_dic['dimensionality']
variable_dic['dimensionality'] = 1
variables_set = [variable_dic.copy() for d in range(dimensionality)]
k=1
for variable in variables_set:
variable['name'] = variable['name'] + '_'+str(k)
k+=1
self.config_space_expanded += variables_set
else:
self.config_space_expanded += [variable_dic]
def _compute_variables_indices(self):
"""
Computes and saves the index location of each variable (as a list) in the objectives
space and in the model space. If no categorical variables are available, these two are
equivalent.
"""
counter_objective = 0
counter_model = 0
for variable in self.space_expanded:
variable.set_index_in_objective([counter_objective])
counter_objective +=1
if variable.type is not 'categorical':
variable.set_index_in_model([counter_model])
counter_model +=1
else:
num_categories = len(variable.domain)
variable.set_index_in_model(list(range(counter_model,counter_model + num_categories)))
counter_model +=num_categories
def find_variable(self,variable_name):
if variable_name not in self.name_to_variable.keys():
raise InvalidVariableNameError('Name of variable not in the input domain')
else:
return self.name_to_variable[variable_name]
def _create_variables_dic(self):
"""
Returns the variable by passing its name
"""
self.name_to_variable = {}
for variable in self.space_expanded:
self.name_to_variable[variable.name] = variable
def _translate_space(self, space):
"""
Translates a list of dictionaries into internal list of variables
"""
self.space = []
self.dimensionality = 0
self.has_types = d = {t: False for t in self.supported_types}
for i, d in enumerate(space):
descriptor = deepcopy(d)
descriptor['name'] = descriptor.get('name', 'var_' + str(i))
descriptor['type'] = descriptor.get('type', 'continuous')
if 'domain' not in descriptor:
raise InvalidConfigError('Domain attribute is missing for variable ' + descriptor['name'])
variable = create_variable(descriptor)
self.space.append(variable)
self.dimensionality += variable.dimensionality
self.has_types[variable.type] = True
# Check if there are any bandit and non-bandit variables together in the space
if any(v.is_bandit() for v in self.space) and any(not v.is_bandit() for v in self.space):
raise InvalidConfigError('Invalid mixed domain configuration. Bandit variables cannot be mixed with other types.')
def _expand_space(self):
"""
Creates an internal list where the variables with dimensionality larger than one are expanded.
This list is the one that is used internally to do the optimization.
"""
## --- Expand the config space
self._expand_config_space()
## --- Expand the space
self.space_expanded = []
for variable in self.space:
self.space_expanded += variable.expand()
def objective_to_model(self, x_objective):
''' This function serves as interface between objective input vectors and
model input vectors'''
x_model = []
for k in range(self.objective_dimensionality):
variable = self.space_expanded[k]
new_entry = variable.objective_to_model(x_objective[0,k])
x_model += new_entry
return x_model
def unzip_inputs(self,X):
if self._has_bandit():
Z = X
else:
Z = []
for k in range(X.shape[0]):
Z.append(self.objective_to_model(X[k,:][None,:]))
return np.atleast_2d(Z)
def zip_inputs(self,X):
if self._has_bandit():
Z = X
else:
Z = []
for k in range(X.shape[0]):
Z.append(self.model_to_objective(X[k,:][None,:]))
return np.atleast_2d(Z)
def model_to_objective(self, x_model):
''' This function serves as interface between model input vectors and
objective input vectors
'''
idx_model = 0
x_objective = []
for idx_obj in range(self.objective_dimensionality):
variable = self.space_expanded[idx_obj]
new_entry = variable.model_to_objective(x_model, idx_model)
x_objective += new_entry
idx_model += variable.dimensionality_in_model
return x_objective
def has_constraints(self):
"""
Checks if the problem has constraints. Note that the coordinates of the constraints are defined
in terms of the model inputs and not in terms of the objective inputs. This means that if bandit or
discre varaibles are in place, the restrictions should reflect this fact (TODO: implement the
mapping of constraints defined on the objective to constraints defined on the model).
"""
return self.constraints is not None
def get_bounds(self):
"""
Extracts the bounds of all the inputs of the domain of the *model*
"""
bounds = []
for variable in self.space_expanded:
bounds += variable.get_bounds()
return bounds
def has_continuous(self):
"""
Returns `true` if the space contains at least one continuous variable, and `false` otherwise
"""
return any(v.is_continuous() for v in self.space)
def _has_bandit(self):
return any(v.is_bandit() for v in self.space)
def get_subspace(self, dims):
'''
Extracts subspace from the reference of a list of variables in the inputs
of the model.
'''
subspace = []
k = 0
for variable in self.space_expanded:
if k in dims:
subspace.append(variable)
k += variable.dimensionality_in_model
return subspace
def indicator_constraints(self,x):
"""
Returns array of ones and zeros indicating if x is within the constraints
"""
x = np.atleast_2d(x)
I_x = np.ones((x.shape[0],1))
if self.constraints is not None:
for d in self.constraints:
try:
exec('constraint = lambda x:' + d['constraint'], globals())
ind_x = (constraint(x) <= 0) * 1
I_x *= ind_x.reshape(x.shape[0],1)
except:
print('Fail to compile the constraint: ' + str(d))
raise
return I_x
def input_dim(self):
"""
Extracts the input dimension of the domain.
"""
n_cont = len(self.get_continuous_dims())
n_disc = len(self.get_discrete_dims())
return n_cont + n_disc
def round_optimum(self, x):
"""
Rounds some value x to a feasible value in the design space.
x is expected to be a vector or an array with a single row
"""
x = np.array(x)
if not ((x.ndim == 1) or (x.ndim == 2 and x.shape[0] == 1)):
raise ValueError("Unexpected dimentionality of x. Got {}, expected (1, N) or (N,)".format(x.ndim))
if x.ndim == 2:
x = x[0]
x_rounded = []
value_index = 0
for variable in self.space_expanded:
var_value = x[value_index : value_index + variable.dimensionality_in_model]
var_value_rounded = variable.round(var_value)
x_rounded.append(var_value_rounded)
value_index += variable.dimensionality_in_model
return np.atleast_2d(np.concatenate(x_rounded))
###
### ---- Atributes for the continuous variables
###
def get_continuous_bounds(self):
"""
Extracts the bounds of the continuous variables.
"""
bounds = []
for d in self.space:
if d.type == 'continuous':
bounds.extend([d.domain]*d.dimensionality)
return bounds
def get_continuous_dims(self):
"""
Returns the dimension of the continuous components of the domain.
"""
continuous_dims = []
for i in range(self.dimensionality):
if self.space_expanded[i].type == 'continuous':
continuous_dims += [i]
return continuous_dims
def get_continuous_space(self):
"""
Extracts the list of dictionaries with continuous components
"""
return [d for d in self.space if d.type == 'continuous']
###
### ---- Atributes for the discrete variables
###
def get_discrete_grid(self):
"""
Computes a Numpy array with the grid of points that results after crossing the possible outputs of the discrete
variables
"""
sets_grid = []
for d in self.space:
if d.type == 'discrete':
sets_grid.extend([d.domain]*d.dimensionality)
return np.array(list(itertools.product(*sets_grid)))
def get_discrete_dims(self):
"""
Returns the dimension of the discrete components of the domain.
"""
discrete_dims = []
for i in range(self.dimensionality):
if self.space_expanded[i].type == 'discrete':
discrete_dims += [i]
return discrete_dims
def get_discrete_space(self):
"""
Extracts the list of dictionaries with continuous components
"""
return [d for d in self.space if d.type == 'discrete']
###
### ---- Atributes for the bandits variables
###
def get_bandit(self):
"""
Extracts the arms of the bandit if any.
"""
arms_bandit = []
for d in self.space:
if d.type == 'bandit':
arms_bandit += tuple(map(tuple, d.domain))
return np.asarray(arms_bandit)
def bounds_to_space(bounds):
"""
Takes as input a list of tuples with bounds, and create a dictionary to be processed by the class Design_space. This function
us used to keep the compatibility with previous versions of GPyOpt in which only bounded continuous optimization was possible
(and the optimization domain passed as a list of tuples).
"""
space = []
for k in range(len(bounds)):
space += [{'name': 'var_'+str(k+1), 'type': 'continuous', 'domain':bounds[k], 'dimensionality':1}]
return space
|
import numpy as np
from . import settings
def wrap_180(values):
values_new = values % 360.
values_new[values_new > 180.] -= 360
return values_new
def find_coordinate_range(transform, extent, coord_types):
'''
Find the range of coordinates to use for ticks/grids
Parameters
----------
pix2world : func
Function to transform pixel to world coordinates. Should take two
values (the pixel coordinates) and return two values (the world
coordinates).
extent : iterable
The range of the image viewport in pixel coordinates, given as [xmin,
xmax, ymin, ymax].
coord_types : list of str
Whether each coordinate is a ``'longitude'``, ``'latitude'``, or
``'scalar'`` value.
'''
# Sample coordinates on a NX x NY grid.
NX = NY = settings.COORDINATE_RANGE_SAMPLES
x = np.linspace(extent[0], extent[1], NX + 1)
y = np.linspace(extent[2], extent[3], NY + 1)
xp, yp = np.meshgrid(x, y)
world = transform.transform(np.vstack([xp.ravel(), yp.ravel()]).transpose())
ranges = []
for coord_index, coord_type in enumerate(coord_types):
xw = world[:, coord_index].reshape(xp.shape)
if coord_type in ['longitude', 'latitude']:
# Iron out coordinates along first row
wjump = xw[0, 1:] - xw[0, :-1]
reset = np.abs(wjump) > 180.
if np.any(reset):
wjump = wjump + np.sign(wjump) * 180.
wjump = 360. * (wjump / 360.).astype(int)
xw[0, 1:][reset] -= wjump[reset]
# Now iron out coordinates along all columns, starting with first row.
wjump = xw[1:] - xw[:1]
reset = np.abs(wjump) > 180.
if np.any(reset):
wjump = wjump + np.sign(wjump) * 180.
wjump = 360. * (wjump / 360.).astype(int)
xw[1:][reset] -= wjump[reset]
xw_min = np.nanmin(xw)
xw_max = np.nanmax(xw)
# Check if range is smaller when normalizing to the range 0 to 360
if coord_type in ['longitude', 'latitude']:
xw_min_check = np.min(xw % 360.)
xw_max_check = np.max(xw % 360.)
if xw_max - xw_min < 360. and xw_max - xw_min >= xw_max_check - xw_min_check:
xw_min = xw_min_check
xw_max = xw_max_check
# Check if range is smaller when normalizing to the range -180 to 180
if coord_type in ['longitude', 'latitude']:
xw_min_check = np.min(wrap_180(xw))
xw_max_check = np.max(wrap_180(xw))
if xw_max_check - xw_min_check < 360. and xw_max - xw_min >= xw_max_check - xw_min_check:
xw_min = xw_min_check
xw_max = xw_max_check
x_range = xw_max - xw_min
if coord_type == 'longitude':
if x_range > 300.:
xw_min = 0.
xw_max = 360 - np.spacing(360.)
elif xw_min < 0.:
xw_min = max(-180., xw_min - 0.1 * x_range)
xw_max = min(+180., xw_max + 0.1 * x_range)
else:
xw_min = max(0., xw_min - 0.1 * x_range)
xw_max = min(360., xw_max + 0.1 * x_range)
elif coord_type == 'latitude':
xw_min = max(-90., xw_min - 0.1 * x_range)
xw_max = min(+90., xw_max + 0.1 * x_range)
ranges.append((xw_min, xw_max))
return ranges
|
from collections import Counter
from dt import datetime
from datetime import date
import enumerations
import pytz
import re
flags = re.IGNORECASE|re.MULTILINE|re.UNICODE
global tzinfo
tzinfo = pytz.UTC
month_to_number = enumerations.month_to_number
def num(text):
if text:
if text.isdigit():
return int(text)
else:
text_title = text.title()
if text_title in month_to_number:
return month_to_number[text_title]
def normalize_year(y):
current_year = date.today().year
# we run int(y) to convert to number just in case arabic
y_str = str(int(y)).rstrip('s').rstrip("'")
if len(y_str) == 2:
y_int = int(y_str)
if y_int > int(str(current_year+1)[2:]):
y_int = int(str(current_year-100)[0:2] + y_str)
else:
y_int = int(str(current_year)[0:2] + y_str)
return y_int
else:
return int(y)
def generate_patterns():
global patterns
patterns = {}
# iterate through the names of the variables in the enumerations
for key in dir(enumerations):
# ignore inherited methods that come with most python modules
# also ignore short variables of 1 length
if not key.startswith("__") and len(key) > 1 and isinstance(getattr(enumerations, key), list):
pattern = "(?:" + "|".join(getattr(enumerations, key)) + ")"
# check to see if pattern is in unicode
# if it's not convert it
if isinstance(pattern, str):
pattern = pattern.decode("utf-8")
patterns[key] = pattern
#merge months as regular name, abbreviation and number all together
patterns['day'] = u'(?P<day>' + patterns['days_of_the_month_as_numbers'] + u'|' + patterns['days_of_the_month_as_ordinal'] + ')(?!\d{2,4})'
#merge months as regular name, abbreviation and number all together
# makes sure that it doesn't pull out 3 as the month in January 23, 2015
#patterns['month'] = u'(?<! \d)(?P<month>' + patterns['months_verbose'] + u'|' + patterns['months_abbreviated'] + u'|' + patterns['months_as_numbers'] + u"(?:\/" + patterns['months_verbose'] + u")?" + u')'
patterns['month'] = u'(?<! \d)(?P<month>' + patterns['months_verbose'] + u'|' + patterns['months_abbreviated'] + u'|' + patterns['months_as_numbers'] + u')' + u"(?:" + "/" + patterns['months_verbose'] + u")?"
# matches the year as two digits or four
# tried to match the four digits first
# (?!, \d{2,4}) makes sure it doesn't pick out 23 as the year in January 23, 2015
patterns['year'] = u"(?P<year>" + patterns['years'] + u")"
# spaces or punctuation separatings days, months and years
# blank space, comma, dash, period, backslash
# todo: write code for forward slash, an escape character
#patterns['punctuation'] = u"(?P<punctuation>, |:| |,|-|\.|\/|)"
patterns['punctuation'] = u"(?:, |:| |,|-|\.|\/|)"
patterns['punctuation_nocomma'] = u"(?: |-|\.|\/)"
#patterns['punctuation_second'] = u"\g<punctuation>"
patterns['punctuation_second'] = patterns['punctuation']
patterns['dmy'] = u"(?P<dmy>" + patterns['day'].replace("day", "day_dmy") + patterns['punctuation'].replace("punctuation","punctuation_dmy") + patterns['month'].replace("month","month_dmy") + patterns['punctuation_second'].replace("punctuation","punctuation_dmy") + patterns['year'].replace("year", "year_dmy") + u")" + u"(?!-\d{1,2})"
print "patterns_dmy is"
print patterns['dmy']
patterns['mdy'] = u"(?P<mdy>" + patterns['month'].replace("month", "month_mdy") + patterns['punctuation'].replace("punctuation","punctuation_mdy") + patterns['day'].replace("day","day_mdy") + "(?:" + patterns['punctuation_second'].replace("punctuation","punctuation_mdy") + "|, )" + patterns['year'].replace("mdy","year_mdy") + u")" + u"(?!-\d{1,2})"
patterns['ymd'] = u"(?<![\dA-Za-z])" + u"(?P<ymd>" + patterns['year'].replace("year","year_ymd") + patterns['punctuation'].replace("punctuation","punctuation_ymd") + patterns['month'].replace("month","month_ymd") + patterns['punctuation_second'].replace("punctuation","punctuation_ymd") + patterns['day'].replace("day","day_ymd") + u")" + u"(?!-\d{1,2}-\d{1,2})(?![\dA-Za-z])"
patterns['my'] = u"(?P<my>" + patterns['month'].replace("month","month_my") + patterns['punctuation_nocomma'] + patterns['year'].replace("year","year_my") + u")"
patterns['date'] = u"(?P<date>" + patterns['mdy'] + "|" + patterns['dmy'] + "|" + patterns['ymd'] + "|" + patterns['my'] + u")"
global patterns
generate_patterns()
def date_from_dict(match):
month = match['month']
if month.isdigit():
month = int(month)
else:
month = month_to_number[month.title()]
try:
day = int(match.group("day"))
except Exception as e:
#print "exception is", e
day = 1
try:
return datetime(int(match.group("year")), month, day, tzinfo=tzinfo)
except Exception as e:
print e
def is_date_in_list(date, list_of_dates):
return any((are_dates_same(date, d) for d in list_of_dates))
def are_dates_same(a,b):
for level in ("year","month","day"):
if level in a and level in b and a[level] != b[level]:
return False
return True
def remove_duplicates(seq):
seen = set()
seen_add = seen.add
return [ x for x in seq if not (x in seen or seen_add(x))]
def extract_dates(text, sorting=None):
global patterns
# convert to unicode if the text is in a bytestring
# we conver to unicode because it is easier to work with
# and it handles text in foreign languages much better
if isinstance(text, str):
text = text.decode('utf-8')
matches = []
completes = []
partials = []
for match in re.finditer(re.compile(patterns['date'], flags), text):
# print "match is", match.groupdict()
# this goes through the dictionary and removes empties and changes the keys back, e.g. from month_myd to month
match = dict((k.split("_")[0], num(v)) for k, v in match.groupdict().iteritems() if num(v))
if all(k in match for k in ("day","month", "year")):
completes.append(match)
else:
partials.append(match)
#print "\ncompletes are", completes
# iterate through partials
# if a more specific date is given in the completes, drop the partial
# for example if Feb 1, 2014 is picked up and February 2014, too, drop February 2014
partials = [partial for partial in partials if not is_date_in_list(partial, completes)]
#print "\npartials are", partials
# convert completes and partials and return list ordered by:
# complete/partial, most common, most recent
for d in completes:
try:
print datetime(normalize_year(d['year']),int(d['month']),int(d['day']))
except Exception as e:
print d['year'], d['month'], d['day']
completes = [datetime(normalize_year(d['year']),int(d['month']),int(d['day'])) for d in completes]
if sorting:
counter = Counter(completes)
completes = remove_duplicates(sorted(completes, key = lambda x: (counter[x], x.toordinal()), reverse=True))
#average_date = mean([d for d in completes])
return completes
e=extract_dates
def getFirstDateFromText(text):
print "starting getFirstDateFromText"
global patterns
# convert to unicode if the text is in a bytestring
# we conver to unicode because it is easier to work with
# and it handles text in foreign languages much better
if isinstance(text, str):
text = text.decode('utf-8')
for match in re.finditer(re.compile(patterns['date'], flags), text):
print "\nmatch is", match.group(0)
if not isDefinitelyNotDate(match.group(0)):
match = dict((k.split("_")[0], num(v)) for k, v in match.groupdict().iteritems() if num(v))
print "match is", match
if all(k in match for k in ("day","month", "year")):
print "returning getFirstDateFromText"
return datetime(normalize_year(match['year']),int(match['month']),int(match['day']), tzinfo=tzinfo)
print "finishing getFirstDateFromText"
g = getPageDate = getFirstDateFromText
def isDefinitelyNotDate(text):
if re.match("\d{1,2}-\d{1,2}.\d{1,2}",text, flags=flags):
return True
return False
|
from sympycore.arithmetic.evalf import *
from sympycore.arithmetic.evalf import mpmath, compile_mpmath
from sympycore.calculus import Symbol, I, Number, Exp, Sin, Cos, E, pi
import math
import cmath
def test_evalf():
expr1 = Number(1)/3
expr2 = Sin(E)**2 + Cos(E)**2 - 1
expr3 = Exp(I) - Cos(1) - I*Sin(1)
assert abs(pi.evalf(15) - math.pi) < 1e-14, str( abs(pi.evalf(15) - math.pi))
assert abs(expr1.evalf(30) - expr1) < 1e-29
assert abs(expr2.evalf(30)) < 1e-29, `abs(expr2.evalf(30))`
assert abs(expr2.evalf(100)) < 1e-99
assert abs(expr2.evalf(300)) < 1e-99
#assert abs(expr3.evalf(20)) < 1e-19
def test_compiled():
x = Symbol('x')
y = Symbol('y')
f1 = compile_mpmath([], Exp(2))
f2 = compile_mpmath('x', Exp(x))
f3 = compile_mpmath(['x', 'y'], Cos(x)+Sin(y)*I)
mpmath.mp.dps = 15
assert abs(f1() - math.exp(2)) < 1e-14
assert abs(f2(2) - math.exp(2)) < 1e-14
assert abs(f3(3,4) - (cmath.cos(3)+cmath.sin(4)*1j)) < 1e-14
|
from pcaspy.cas import gdd
import time
while True:
gddValue = gdd()
gddValue.put(range(1000000))
v = gddValue.get()
gddCtrl = gdd.createDD(34) # gddAppType_dbr_ctrl_double
gddCtrl[1].put('eV')
gddCtrl[2].put(0)
gddCtrl[3].put(1)
gddCtrl[4].put(0)
gddCtrl[5].put(1)
gddCtrl[6].put(0)
gddCtrl[7].put(1)
gddCtrl[8].put(0)
gddCtrl[9].put(1)
gddCtrl[10].put(3)
gddCtrl[11].put(gddValue)
time.sleep(0.1)
|
import datetime
from dateutil import tz
def current_update():
""" Return a datetime object representing the current Canadian
Weather Office forecast update """
# The data is updated 5:00 UTC and 17:00 UTC every day. We need
# to see which update to get.
hour = 5
date = datetime.datetime.now().replace(
tzinfo=tz.gettz('America/Toronto'))
# XXX: We're adding 3 to all of these because the forecasts
# start 3 hours out from the update — none of these are current
# conditions.
if date.hour > (5 + 3) and date.hour < (17 + 3):
# The datetime of the 00 Canadian Weather Office update
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
else:
# Adjust the date back one if we're less than 5.
if date.hour < (5 + 3):
date = date.replace(day=date.day - 1)
# The datetime of the 12 Canadian Weather Office update
date = date.replace(hour=12, minute=0, second=0, microsecond=0)
return date
def next_update():
""" Return a datetime object representing the next Canadian Weather
Office forecast update """
return current_update() + datetime.timedelta(hours=12)
|
'''active knives'''
from threading import local
from collections import deque
from contextlib import contextmanager
from stuf.utils import clsname
from knife._compat import loads, optimize
class _ActiveMixin(local):
'''active knife mixin'''
def __init__(self, *things, **kw):
'''
Initialize :mod:`knife`.
:argument things: incoming things
:keyword integer snapshots: snapshots to keep (default: ``5``)
'''
incoming = deque()
incoming.extend(things)
super(_ActiveMixin, self).__init__(incoming, deque(), **kw)
# working things
self._work = deque()
# holding things
self._hold = deque()
@property
@contextmanager
def _chain(self, d=optimize):
# take snapshot
snapshot = d(self._in)
# rebalance incoming with outcoming
if self._history:
self._in.clear()
self._in.extend(self._out)
# make snapshot original snapshot?
else:
self._original = snapshot
# place snapshot at beginning of snapshot stack
self._history.appendleft(snapshot)
# move incoming things to working things
self._work.extend(self._in)
yield
out = self._out
# clear outgoing things
out.clear()
# extend outgoing things with holding things
out.extend(self._hold)
# clear working things
self._work.clear()
# clear holding things
self._hold.clear()
@property
def _iterable(self):
# derived from Raymond Hettinger Python Cookbook recipe # 577155
call = self._work.popleft
try:
while 1:
yield call()
except IndexError:
pass
def _append(self, thing):
# append thing after other holding things
self._hold.append(thing)
return self
def _xtend(self, things):
# place things after holding things
self._hold.extend(things)
return self
def _prependit(self, things, d=optimize):
# take snapshot
snapshot = d(self._in)
# make snapshot original snapshot?
if self._original is None:
self._original = snapshot
# place snapshot at beginning of snapshot stack
self._history.appendleft(snapshot)
# place thing before other holding things
self._in.extendleft(reversed(things))
return self
def _appendit(self, things, d=optimize):
# take snapshot
snapshot = d(self._in)
# make snapshot original snapshot?
if self._original is None:
self._original = snapshot
# place snapshot at beginning of snapshot stack
self._history.appendleft(snapshot)
# place things after other incoming things
self._in.extend(things)
return self
def _pipeit(self, knife):
knife.clear()
knife._history.clear()
knife._history.extend(self._history)
knife._original = self._original
knife._baseline = self._baseline
knife._out.extend(self._out)
knife._worker = self._worker
knife._args = self._args
knife._kw = self._kw
knife._wrapper = self._wrapper
knife._pipe = self
return knife
def _unpipeit(self):
piped = self._pipe
piped.clear()
piped._history.clear()
piped._history.extend(self._history)
piped._original = self._original
piped._baseline = self._baseline
piped._out.extend(self._out)
piped._worker = self._worker
piped._args = self._args
piped._kw = self._kw
piped._wrapper = self._wrapper
self.clear()
return piped
def _repr(self, clsname_=clsname, list_=list):
# object representation
return self._REPR.format(
self.__module__,
clsname_(self),
list_(self._in),
list_(self._work),
list_(self._hold),
list_(self._out),
)
def _len(self, len=len):
# length of incoming things
return len(self._in)
class _OutMixin(_ActiveMixin):
'''active output mixin'''
def _undo(self, snapshot=0, loads_=loads):
# clear everything
self.clear()
# if specified, use a specific snapshot
if snapshot:
self._history.rotate(-(snapshot - 1))
self._in.extend(loads_(self._history.popleft()))
return self
def _snapshot(self, d=optimize):
# take baseline snapshot of incoming things
self._baseline = d(self._in)
return self
def _rollback(self, loads_=loads):
# clear everything
self.clear()
# clear snapshots
self._clearsp()
# revert to baseline snapshot of incoming things
self._in.extend(loads_(self._baseline))
return self
def _revert(self, loads_=loads):
# clear everything
self.clear()
# clear snapshots
self._clearsp()
# clear baseline
self._baseline = None
# restore original snapshot of incoming things
self._in.extend(loads_(self._original))
return self
def _clear(self, list_=list):
# clear worker
self._worker = None
# clear worker positional arguments
self._args = ()
# clear worker keyword arguments
self._kw = {}
# default iterable wrapper
self._wrapper = list_
# clear pipe
self._pipe = None
# clear incoming things
self._in.clear()
# clear working things
self._work.clear()
# clear holding things
self._hold.clear()
# clear outgoing things
self._out.clear()
return self
def _iterate(self, iter_=iter):
return iter_(self._out)
def _peek(self, len_=len, list_=list):
wrap, out = self._wrapper, self._in
value = list_(wrap(i) for i in out) if self._each else wrap(out)
self._each = False
self._wrapper = list_
return value[0] if len_(value) == 1 else value
def _get(self, len_=len, list_=list):
wrap, out = self._wrapper, self._out
value = list_(wrap(i) for i in out) if self._each else wrap(out)
self._each = False
self._wrapper = list_
return value[0] if len_(value) == 1 else value
|
from functools import wraps
from urllib.parse import urlparse
from django.conf import settings
from django.contrib.auth.mixins import PermissionRequiredMixin as DjangoPermissionRequiredMixin
from django.contrib.auth.models import Permission
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404, resolve_url
DEFAULT_403 = getattr(settings, 'DJEM_DEFAULT_403', False)
def get_user_log_verbosity():
return getattr(settings, 'DJEM_PERM_LOG_VERBOSITY', 0)
class ObjectPermissionsBackend:
def authenticate(self, *args, **kwargs):
return None
def _get_model_permission(self, perm, user_obj):
verbosity = get_user_log_verbosity()
if not verbosity:
access = user_obj.has_perm(perm)
else:
access = user_obj.logged_has_perm(perm)
# Add the log from the model-level check to the log for the
# object-level check, replacing the "result" line
log = user_obj.get_last_log(raw=True)
log.pop()
log.append('Model-level Result: {}\n'.format('Granted' if access else 'Denied'))
user_obj.log(*log)
return access
def _get_object_permission(self, perm, user_obj, obj, from_name):
"""
Test if a user has a permission on a specific model object.
``from_name`` can be either "user" or "group", to determine permissions
using the user object itself or the groups it belongs to, respectively.
"""
if not user_obj.is_active: # pragma: no cover
# An inactive user won't normally get this far as they would not
# pass the model-level permissions check
return False
try:
perm_cache = user_obj._olp_cache
except AttributeError:
# OLP cache dictionary will not exist by default if not using
# OLPMixin and no permissions have yet been checked on this user
# object
perm_cache = user_obj._olp_cache = {}
perm_cache_name = '{0}-{1}-{2}'.format(from_name, perm, obj.pk)
if perm_cache_name not in perm_cache:
access_fn_name = '_{0}_can_{1}'.format(
from_name,
perm.split('.')[-1]
)
access_fn = getattr(obj, access_fn_name, None)
if not access_fn:
# No function defined on obj to determine access - assume
# access should be granted if no explicit object-level logic
# exists to determine otherwise
access = None
else:
try:
if from_name == 'user':
access = access_fn(user_obj)
else:
access = access_fn(user_obj.groups.all())
except PermissionDenied:
access = False
perm_cache[perm_cache_name] = access
return perm_cache[perm_cache_name]
def _get_object_permissions(self, user_obj, obj, from_name=None):
"""
Return a set of the permissions a user has on a specific model object.
``from_name`` can be either "user" or "group", to determine permissions
using the user object itself or the groups it belongs to, respectively.
It can also be None to determine the users permissions from both sources.
"""
if not obj or not user_obj.is_active or user_obj.is_anonymous:
return set()
perms_for_model = Permission.objects.filter(
content_type__app_label=obj._meta.app_label,
content_type__model=obj._meta.model_name,
).values_list('content_type__app_label', 'codename')
perms_for_model = ['{0}.{1}'.format(app, name) for app, name in perms_for_model]
if user_obj.is_superuser and not getattr(settings, 'DJEM_UNIVERSAL_OLP', False):
# Superusers get all permissions, regardless of obj or from_name,
# unless using "universal" OLP, in which case they are subject to
# the same OLP logic as regular users
perms = set(perms_for_model)
else:
# If using any level of automated logging for permissions, create a
# temporary log to act as the target for any log entries (either
# automatic entries appended by this backend or any manual entries
# that may be present in object-level access methods). The usual
# automatic log started as part of OLPMixin.has_perm() will not
# have been created, so this acts as a replacement.
log_verbosity = get_user_log_verbosity()
if log_verbosity:
user_obj.start_log('temp-{0}'.format(obj.pk))
perms = set()
for perm in perms_for_model:
if not self._get_model_permission(perm, user_obj):
continue
user_access = None
group_access = None
# Check user first, unless only checking for group
if from_name != 'group':
user_access = self._get_object_permission(perm, user_obj, obj, 'user')
# Check group if user didn't grant the permission, unless only
# checking for user
if not user_access and from_name != 'user':
group_access = self._get_object_permission(perm, user_obj, obj, 'group')
# The permission is granted if either of the user or group
# checks grant it, or if neither of them have a defined
# object-level access method
if user_access or group_access or (user_access is None and group_access is None):
perms.add(perm)
# Remove the temporary log, if one was created
if log_verbosity:
user_obj.discard_log()
return perms
def get_user_permissions(self, user_obj, obj=None):
return self._get_object_permissions(user_obj, obj, 'user')
def get_group_permissions(self, user_obj, obj=None):
return self._get_object_permissions(user_obj, obj, 'group')
def get_all_permissions(self, user_obj, obj=None):
return self._get_object_permissions(user_obj, obj)
def has_perm(self, user_obj, perm, obj=None):
if not obj:
return False # not dealing with non-object permissions
if not self._get_model_permission(perm, user_obj):
return False
user_access = self._get_object_permission(perm, user_obj, obj, 'user')
group_access = None
# Check group if user didn't grant the permission
if not user_access:
group_access = self._get_object_permission(perm, user_obj, obj, 'group')
# The permission is granted if either of the user or group
# checks grant it, or if neither of them have a defined
# object-level access method
return user_access or group_access or (user_access is None and group_access is None)
def _check_perms(perms, user, view_kwargs):
for perm in perms:
if isinstance(perm, str):
obj = None
else:
perm, obj_arg = perm # expand two-tuple
obj_pk = view_kwargs[obj_arg]
# Get the model this permission belongs to
try:
perm_app, perm_code = perm.split('.')
perm_obj = Permission.objects.get(
content_type__app_label=perm_app,
codename=perm_code
)
except (ValueError, Permission.DoesNotExist):
# Treat malformed (missing a '.') or non-existent
# permission names as permission denied
raise PermissionDenied
model = perm_obj.content_type.model_class()
# Get the object instance using the inferred model and the
# primary key passed to the view
obj = get_object_or_404(model, pk=obj_pk)
# Swap out the primary key with the instance itself in the view
# kwargs, so the view doesn't have to query for it again
view_kwargs[obj_arg] = obj
if not user.has_perm(perm, obj):
raise PermissionDenied
def permission_required(*perms, **kwargs):
"""
Replacement for Django's ``permission_required`` decorator, providing
support for object-level permissions. Instead of accepting either a string
or an iterable of strings naming the permission/s to check, this version
accepts multiple positional arguments, one for each permission to check.
These arguments can be either strings or two-tuples. If two-tuples, the
items should be:
- a string naming the permission to check (in the <app label>.<permission code>
format)
- a string naming the keyword argument of the view that contains the
primary key of the object to check the permission against
Behaviour of the ``login_url`` and ``raise_exception`` keyword arguments is
as per the original, except that the default value for ``raise_exception``
can be specified with the ``DJEM_DEFAULT_403`` setting.
"""
login_url = kwargs.pop('login_url', None)
raise_exception = kwargs.pop('raise_exception', DEFAULT_403)
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
# First, check if the user has the permission (even anon users)
try:
_check_perms(perms, request.user, kwargs)
except PermissionDenied:
# In case the 403 handler should be called, raise the exception
if raise_exception:
raise
else:
return view_func(request, *args, **kwargs)
# As the last resort, show the login form
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme)
and (not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, resolved_login_url)
return _wrapped_view
return decorator
class PermissionRequiredMixin(DjangoPermissionRequiredMixin):
"""
CBV mixin which verifies that the current user has all specified
permissions, on the specified object where applicable.
"""
raise_exception = DEFAULT_403
def has_permission(self, view_kwargs):
perms = self.get_permission_required()
try:
_check_perms(perms, self.request.user, view_kwargs)
except PermissionDenied:
return False
else:
return True
# Overridden to pass kwargs to has_permission() and skip the immediate
# parent's dispatch() when calling the super method (because it attempts
# to call has_permission without the kwargs).
def dispatch(self, request, *args, **kwargs):
if not self.has_permission(kwargs):
return self.handle_no_permission()
# Skip DjangoPermissionRequiredMixin.dispatch() and call *its* parent directly
return super(DjangoPermissionRequiredMixin, self).dispatch(request, *args, **kwargs)
|
""" Vanilla RNN
@author Graham Taylor
"""
import numpy as np
import theano
import theano.tensor as T
from sklearn.base import BaseEstimator
import logging
import time
import os
import datetime
import pickle as pickle
import math
import matplotlib.pyplot as plt
plt.ion()
mode = theano.Mode(linker='cvm')
class RNN(object):
""" Recurrent neural network class
Supported output types:
real : linear output units, use mean-squared error
binary : binary output units, use cross-entropy error
softmax : single softmax out, use cross-entropy error
"""
def __init__(self, input, n_in, n_hidden, n_out, activation=T.tanh,
output_type='real', use_symbolic_softmax=False):
self.input = input
self.activation = activation
self.output_type = output_type
# when using HF, SoftmaxGrad.grad is not implemented
# use a symbolic softmax which is slightly slower than T.nnet.softmax
# See: http://groups.google.com/group/theano-dev/browse_thread/
# thread/3930bd5a6a67d27a
if use_symbolic_softmax:
def symbolic_softmax(x):
e = T.exp(x)
return e / T.sum(e, axis=1).dimshuffle(0, 'x')
self.softmax = symbolic_softmax
else:
self.softmax = T.nnet.softmax
# recurrent weights as a shared variable
W_init = np.asarray(np.random.uniform(size=(n_hidden, n_hidden),
low=-.01, high=.01),
dtype=theano.config.floatX)
self.W = theano.shared(value=W_init, name='W')
# input to hidden layer weights
W_in_init = np.asarray(np.random.uniform(size=(n_in, n_hidden),
low=-.01, high=.01),
dtype=theano.config.floatX)
self.W_in = theano.shared(value=W_in_init, name='W_in')
# hidden to output layer weights
W_out_init = np.asarray(np.random.uniform(size=(n_hidden, n_out),
low=-.01, high=.01),
dtype=theano.config.floatX)
self.W_out = theano.shared(value=W_out_init, name='W_out')
h0_init = np.zeros((n_hidden,), dtype=theano.config.floatX)
self.h0 = theano.shared(value=h0_init, name='h0')
bh_init = np.zeros((n_hidden,), dtype=theano.config.floatX)
self.bh = theano.shared(value=bh_init, name='bh')
by_init = np.zeros((n_out,), dtype=theano.config.floatX)
self.by = theano.shared(value=by_init, name='by')
self.params = [self.W, self.W_in, self.W_out, self.h0,
self.bh, self.by]
# for every parameter, we maintain it's last update
# the idea here is to use "momentum"
# keep moving mostly in the same direction
self.updates = {}
for param in self.params:
init = np.zeros(param.get_value(borrow=True).shape,
dtype=theano.config.floatX)
self.updates[param] = theano.shared(init)
# recurrent function (using tanh activation function) and linear output
# activation function
def step(x_t, h_tm1):
h_t = self.activation(T.dot(x_t, self.W_in) + \
T.dot(h_tm1, self.W) + self.bh)
y_t = T.dot(h_t, self.W_out) + self.by
return h_t, y_t
# the hidden state `h` for the entire sequence, and the output for the
# entire sequence `y` (first dimension is always time)
[self.h, self.y_pred], _ = theano.scan(step,
sequences=self.input,
outputs_info=[self.h0, None])
# L1 norm ; one regularization option is to enforce L1 norm to
# be small
self.L1 = 0
self.L1 += abs(self.W.sum())
self.L1 += abs(self.W_in.sum())
self.L1 += abs(self.W_out.sum())
# square of L2 norm ; one regularization option is to enforce
# square of L2 norm to be small
self.L2_sqr = 0
self.L2_sqr += (self.W ** 2).sum()
self.L2_sqr += (self.W_in ** 2).sum()
self.L2_sqr += (self.W_out ** 2).sum()
if self.output_type == 'real':
self.loss = lambda y: self.mse(y)
elif self.output_type == 'binary':
# push through sigmoid
self.p_y_given_x = T.nnet.sigmoid(self.y_pred) # apply sigmoid
self.y_out = T.round(self.p_y_given_x) # round to {0,1}
self.loss = lambda y: self.nll_binary(y)
elif self.output_type == 'softmax':
# push through softmax, computing vector of class-membership
# probabilities in symbolic form
self.p_y_given_x = self.softmax(self.y_pred)
# compute prediction as class whose probability is maximal
self.y_out = T.argmax(self.p_y_given_x, axis=-1)
self.loss = lambda y: self.nll_multiclass(y)
else:
raise NotImplementedError
def mse(self, y):
# error between output and target
return T.mean((self.y_pred - y) ** 2)
def nll_binary(self, y):
# negative log likelihood based on binary cross entropy error
return T.mean(T.nnet.binary_crossentropy(self.p_y_given_x, y))
def nll_multiclass(self, y):
# negative log likelihood based on multiclass cross entropy error
# y.shape[0] is (symbolically) the number of rows in y, i.e.,
# number of time steps (call it T) in the sequence
# T.arange(y.shape[0]) is a symbolic vector which will contain
# [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of
# Log-Probabilities (call it LP) with one row per example and
# one column per class LP[T.arange(y.shape[0]),y] is a vector
# v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,
# LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is
# the mean (across minibatch examples) of the elements in v,
# i.e., the mean log-likelihood across the minibatch.
return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])
def errors(self, y):
"""Return a float representing the number of errors in the sequence
over the total number of examples in the sequence ; zero one
loss over the size of the sequence
:type y: theano.tensor.TensorType
:param y: corresponds to a vector that gives for each example the
correct label
"""
# check if y has same dimension of y_pred
if y.ndim != self.y_out.ndim:
raise TypeError('y should have the same shape as self.y_out',
('y', y.type, 'y_out', self.y_out.type))
if self.output_type in ('binary', 'softmax'):
# check if y is of the correct datatype
if y.dtype.startswith('int'):
# the T.neq operator returns a vector of 0s and 1s, where 1
# represents a mistake in prediction
return T.mean(T.neq(self.y_out, y))
else:
raise NotImplementedError()
class MetaRNN(BaseEstimator):
def __init__(self, n_in=5, n_hidden=50, n_out=5, learning_rate=0.01,
n_epochs=100, L1_reg=0.00, L2_reg=0.00, learning_rate_decay=1,
activation='tanh', output_type='real',
final_momentum=0.9, initial_momentum=0.5,
momentum_switchover=5,
use_symbolic_softmax=False):
self.n_in = int(n_in)
self.n_hidden = int(n_hidden)
self.n_out = int(n_out)
self.learning_rate = float(learning_rate)
self.learning_rate_decay = float(learning_rate_decay)
self.n_epochs = int(n_epochs)
self.L1_reg = float(L1_reg)
self.L2_reg = float(L2_reg)
self.activation = activation
self.output_type = output_type
self.initial_momentum = float(initial_momentum)
self.final_momentum = float(final_momentum)
self.momentum_switchover = int(momentum_switchover)
self.use_symbolic_softmax = use_symbolic_softmax
self.ready()
def ready(self):
# input (where first dimension is time)
self.x = T.matrix()
# target (where first dimension is time)
if self.output_type == 'real':
self.y = T.matrix(name='y', dtype=theano.config.floatX)
elif self.output_type == 'binary':
self.y = T.matrix(name='y', dtype='int32')
elif self.output_type == 'softmax': # only vector labels supported
self.y = T.vector(name='y', dtype='int32')
else:
raise NotImplementedError
# initial hidden state of the RNN
self.h0 = T.vector()
# learning rate
self.lr = T.scalar()
if self.activation == 'tanh':
activation = T.tanh
elif self.activation == 'sigmoid':
activation = T.nnet.sigmoid
elif self.activation == 'relu':
activation = lambda x: x * (x > 0)
elif self.activation == 'cappedrelu':
activation = lambda x: T.minimum(x * (x > 0), 6)
else:
raise NotImplementedError
self.rnn = RNN(input=self.x, n_in=self.n_in,
n_hidden=self.n_hidden, n_out=self.n_out,
activation=activation, output_type=self.output_type,
use_symbolic_softmax=self.use_symbolic_softmax)
if self.output_type == 'real':
self.predict = theano.function(inputs=[self.x, ],
outputs=self.rnn.y_pred,
mode=mode)
elif self.output_type == 'binary':
self.predict_proba = theano.function(inputs=[self.x, ],
outputs=self.rnn.p_y_given_x, mode=mode)
self.predict = theano.function(inputs=[self.x, ],
outputs=T.round(self.rnn.p_y_given_x),
mode=mode)
elif self.output_type == 'softmax':
self.predict_proba = theano.function(inputs=[self.x, ],
outputs=self.rnn.p_y_given_x, mode=mode)
self.predict = theano.function(inputs=[self.x, ],
outputs=self.rnn.y_out, mode=mode)
else:
raise NotImplementedError
def shared_dataset(self, data_xy):
""" Load the dataset into shared variables """
data_x, data_y = data_xy
shared_x = theano.shared(np.asarray(data_x,
dtype=theano.config.floatX))
shared_y = theano.shared(np.asarray(data_y,
dtype=theano.config.floatX))
if self.output_type in ('binary', 'softmax'):
return shared_x, T.cast(shared_y, 'int32')
else:
return shared_x, shared_y
def __getstate__(self):
""" Return state sequence."""
params = self._get_params() # parameters set in constructor
weights = [p.get_value() for p in self.rnn.params]
state = (params, weights)
return state
def _set_weights(self, weights):
""" Set fittable parameters from weights sequence.
Parameters must be in the order defined by self.params:
W, W_in, W_out, h0, bh, by
"""
i = iter(weights)
for param in self.rnn.params:
param.set_value(i.next())
def __setstate__(self, state):
""" Set parameters from state sequence.
Parameters must be in the order defined by self.params:
W, W_in, W_out, h0, bh, by
"""
params, weights = state
self.set_params(**params)
self.ready()
self._set_weights(weights)
def save(self, fpath='.', fname=None):
""" Save a pickled representation of Model state. """
fpathstart, fpathext = os.path.splitext(fpath)
if fpathext == '.pkl':
# User supplied an absolute path to a pickle file
fpath, fname = os.path.split(fpath)
elif fname is None:
# Generate filename based on date
date_obj = datetime.datetime.now()
date_str = date_obj.strftime('%Y-%m-%d-%H:%M:%S')
class_name = self.__class__.__name__
fname = '%s.%s.pkl' % (class_name, date_str)
fabspath = os.path.join(fpath, fname)
logging.info("Saving to %s ..." % fabspath)
file = open(fabspath, 'wb')
state = self.__getstate__()
pickle.dump(state, file, protocol=pickle.HIGHEST_PROTOCOL)
file.close()
def load(self, path):
""" Load model parameters from path. """
logging.info("Loading from %s ..." % path)
file = open(path, 'rb')
state = pickle.load(file)
self.__setstate__(state)
file.close()
def fit(self, X_train, Y_train, X_test=None, Y_test=None,
validation_frequency=100):
""" Fit model
Pass in X_test, Y_test to compute test error and report during
training.
X_train : ndarray (n_seq x n_steps x n_in)
Y_train : ndarray (n_seq x n_steps x n_out)
validation_frequency : int
in terms of number of sequences (or number of weight updates)
"""
f = file('trainOutput.txt','a+')
if X_test is not None:
assert(Y_test is not None)
self.interactive = True
test_set_x, test_set_y = self.shared_dataset((X_test, Y_test))
else:
self.interactive = False
train_set_x, train_set_y = self.shared_dataset((X_train, Y_train))
n_train = train_set_x.get_value(borrow=True).shape[0]
if self.interactive:
n_test = test_set_x.get_value(borrow=True).shape[0]
######################
# BUILD ACTUAL MODEL #
######################
logging.info('... building the model')
index = T.lscalar('index') # index to a case
# learning rate (may change)
l_r = T.scalar('l_r', dtype=theano.config.floatX)
mom = T.scalar('mom', dtype=theano.config.floatX) # momentum
cost = self.rnn.loss(self.y) \
+ self.L1_reg * self.rnn.L1 \
+ self.L2_reg * self.rnn.L2_sqr
compute_train_error = theano.function(inputs=[index, ],
outputs=self.rnn.loss(self.y),
givens={
self.x: train_set_x[index],
self.y: train_set_y[index]},
mode=mode)
if self.interactive:
compute_test_error = theano.function(inputs=[index, ],
outputs=self.rnn.loss(self.y),
givens={
self.x: test_set_x[index],
self.y: test_set_y[index]},
mode=mode)
# compute the gradient of cost with respect to theta = (W, W_in, W_out)
# gradients on the weights using BPTT
gparams = []
for param in self.rnn.params:
gparam = T.grad(cost, param)
gparams.append(gparam)
updates = {}
for param, gparam in zip(self.rnn.params, gparams):
weight_update = self.rnn.updates[param]
upd = mom * weight_update - l_r * gparam
updates[weight_update] = upd
updates[param] = param + upd
# compiling a Theano function `train_model` that returns the
# cost, but in the same time updates the parameter of the
# model based on the rules defined in `updates`
train_model = theano.function(inputs=[index, l_r, mom],
outputs=cost,
updates=updates,
givens={
self.x: train_set_x[index],
self.y: train_set_y[index]},
mode=mode)
###############
# TRAIN MODEL #
###############
logging.info('... training')
epoch = 0
while (epoch < self.n_epochs):
epoch = epoch + 1
for idx in xrange(n_train):
effective_momentum = self.final_momentum \
if epoch > self.momentum_switchover \
else self.initial_momentum
example_cost = train_model(idx, self.learning_rate,
effective_momentum)
# iteration number (how many weight updates have we made?)
# epoch is 1-based, index is 0 based
iter = (epoch - 1) * n_train + idx + 1
if iter % validation_frequency == 0:
# compute loss on training set
train_losses = [compute_train_error(i)
for i in xrange(n_train)]
this_train_loss = np.mean(train_losses)
if self.interactive:
test_losses = [compute_test_error(i)
for i in xrange(n_test)]
this_test_loss = np.mean(test_losses)
f.write('epoch %i, seq %i/%i, tr loss %f '
'te loss %f lr: %f \n' % \
(epoch, idx + 1, n_train,
this_train_loss, this_test_loss, self.learning_rate))
print('epoch %i, seq %i/%i, tr loss %f '
'te loss %f lr: %f' % \
(epoch, idx + 1, n_train,
this_train_loss, this_test_loss, self.learning_rate))
else:
f.write('epoch %i, seq %i/%i, train loss %f '
'lr: %f \n' % \
(epoch, idx + 1, n_train, this_train_loss,
self.learning_rate))
print('epoch %i, seq %i/%i, train loss %f '
'lr: %f' % \
(epoch, idx + 1, n_train, this_train_loss,
self.learning_rate))
self.learning_rate *= self.learning_rate_decay
f.close()
def test_real():
""" Test RNN with real-valued outputs. """
n_hidden = 200
n_in = 20
n_out = 5
n_steps = 10
n_seq = 100
np.random.seed(0)
# simple lag test
seq = np.random.randn(n_seq, n_steps, n_in)
targets = np.zeros((n_seq, n_steps, n_out))
targets[:, 1:, 0] = seq[:, :-1, 3] # delayed 1
targets[:, 1:, 1] = seq[:, :-1, 2] # delayed 1
targets[:, 2:, 2] = seq[:, :-2, 0] # delayed 2
targets += 0.01 * np.random.standard_normal(targets.shape)
model = MetaRNN(n_in=n_in, n_hidden=n_hidden, n_out=n_out,
learning_rate=0.001, learning_rate_decay=0.999,
n_epochs=400, activation='tanh')
model.fit(seq, targets, validation_frequency=1000)
[seqNum,lineNum,colNum] = targets.shape
print(seqNum,lineNum,colNum)
error = [0 for i in range(colNum)]
plt.close('all')
fig = plt.figure()
ax1 = plt.subplot(211)
plt.plot(seq[0])
ax1.set_title('input')
ax2 = plt.subplot(212)
true_targets = plt.plot(targets[0])
guess = model.predict(seq[0])
guessed_targets = plt.plot(guess, linestyle='--')
for i, x in enumerate(guessed_targets):
x.set_color(true_targets[i].get_color())
ax2.set_title('solid: true output, dashed: model output')
dif = abs(guess - targets[0])
[linedif,coldif] = dif.shape
print(linedif,coldif)
errorsum = 0
for i in range (colNum):
sum = 0
for j in range (lineNum):
sum += dif[j][i] ** 2
error[i] = math.sqrt(sum/lineNum)
errorsum += error[i]
print(error[i])
print("average error = ", errorsum/colNum)
def test_binary(multiple_out=False, n_epochs=250):
""" Test RNN with binary outputs. """
n_hidden = 40
n_in = 11
n_out = 17
n_steps = 10
n_seq = 500
np.random.seed(0)
# simple lag test
seqlist = []
count = 0
data = []
for l in open("inputdata-b10-500-10.txt"):
count += 1
row = [int(x) for x in l.split()]
if len(row) > 0:
data.append(row)
if (count == n_steps):
count = 0
if len(data) >0:
seqlist.append(data)
data = []
seqarray = np.asarray(seqlist)
seq = seqarray[:,:,:n_in]
targets = seqarray[:,:,n_in:]
seqlistTest = []
count = 0
dataTest = []
for l in open("inputdata-b10-200-10.txt"):
count += 1
row = [int(x) for x in l.split()]
if len(row) > 0:
dataTest.append(row)
if (count == n_steps):
count = 0
if len(dataTest) >0:
seqlistTest.append(dataTest)
dataTest = []
seqarrayTest = np.asarray(seqlistTest)
seqTest = seqarrayTest[:,:,:n_in]
targetsTest = seqarrayTest[:,:,n_in:]
model = MetaRNN(n_in=n_in, n_hidden=n_hidden, n_out=n_out,
learning_rate=0.03, learning_rate_decay=0.999,
n_epochs=n_epochs, activation='tanh', output_type='binary')
#model.fit(seq, targets, validation_frequency=1000)
model.fit(seq, targets, seqTest, targetsTest, validation_frequency=1000)
ferror = file('errorRate.txt','a+')
[seqNum,lineNum,colNum] = targetsTest.shape
#print (seqTest.shape)
seqs = xrange(seqNum)
error = [0 for i in range(lineNum*seqNum)]
errorsum = 0
for k in seqs:
guess = model.predict_proba(seqTest[k])
dif = abs(guess - targetsTest[k])
[lineDif,colDif] = dif.shape
#print(lineDif,colDif)
for i in range (lineDif):
ki = k*lineDif+i
for j in range (colDif):
if (dif[i][j] > 0.5):
error[ki] += 1
ferror.write('error %d = %d \n' % (ki,error[ki]))
if (error[ki]>0):
errorsum += 1
print(errorsum)
errorRate = errorsum/1.0/seqNum/lineNum
ferror.write("average error = %f \n" % (errorRate))
def test_softmax(n_epochs=250):
""" Test RNN with softmax outputs. """
n_hidden = 10
n_in = 5
n_steps = 10
n_seq = 100
n_classes = 3
n_out = n_classes # restricted to single softmax per time step
np.random.seed(0)
# simple lag test
seq = np.random.randn(n_seq, n_steps, n_in)
targets = np.zeros((n_seq, n_steps), dtype=np.int)
thresh = 0.5
# if lag 1 (dim 3) is greater than lag 2 (dim 0) + thresh
# class 1
# if lag 1 (dim 3) is less than lag 2 (dim 0) - thresh
# class 2
# if lag 2(dim0) - thresh <= lag 1 (dim 3) <= lag2(dim0) + thresh
# class 0
targets[:, 2:][seq[:, 1:-1, 3] > seq[:, :-2, 0] + thresh] = 1
targets[:, 2:][seq[:, 1:-1, 3] < seq[:, :-2, 0] - thresh] = 2
#targets[:, 2:, 0] = np.cast[np.int](seq[:, 1:-1, 3] > seq[:, :-2, 0])
model = MetaRNN(n_in=n_in, n_hidden=n_hidden, n_out=n_out,
learning_rate=0.001, learning_rate_decay=0.999,
n_epochs=n_epochs, activation='tanh',
output_type='softmax', use_symbolic_softmax=False)
model.fit(seq, targets, validation_frequency=1000)
seqs = xrange(10)
[seqNum,lineNum,colNum] = seq.shape
print(seqNum,lineNum,colNum)
error = [0 for i in range(colNum)]
plt.close('all')
for seq_num in seqs:
fig = plt.figure()
ax1 = plt.subplot(211)
plt.plot(seq[seq_num])
ax1.set_title('input??')
ax2 = plt.subplot(212)
# blue line will represent true classes
true_targets = plt.step(xrange(n_steps), targets[seq_num], marker='o')
# show probabilities (in b/w) output by model
guess = model.predict_proba(seq[seq_num])
guessed_probs = plt.imshow(guess.T, interpolation='nearest',
cmap='gray')
ax2.set_title('blue: true class, grayscale: probs assigned by model')
dif = abs(seq[seq_num] - targets[seq_num])
for i in range (colNum):
sum = 0
for j in range (lineNum):
sum += dif[i,j] ** 2
error[i] = math.sqrt(sum/lineNum)
print(error[i])
if __name__ == "__main__":
logging.basicConfig(
level = logging.INFO,
format = 'LINE %(lineno)-4d %(levelname)-8s %(message)s',
datafmt = '%m-%d %H:%M',
filename = "D:/logresult20160123/one.log",
filemode = 'w')
t0 = time.time()
#test_real()
# problem takes more epochs to solve
test_binary(multiple_out=True, n_epochs=28)
#test_softmax(n_epochs=250)
print ("Elapsed time: %f" % (time.time() - t0))
|
from distutils.core import setup
import os
import translatable
setup(
name = 'django-translatable',
packages = ['translatable',],
version = translatable.__version__,
description = "Django app providing simple translatable models system",
long_description = open(os.path.join(os.path.dirname(__file__), 'README.txt')).read(),
author = "Kossouho",
author_email = 'artscoop93@gmail.com',
url = 'https://github.com/artscoop/django-translatable',
license = 'BSD License',
platforms = ['OS Independent',],
install_requires=[
'Django>=1.8',
],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Internationalization',
]
)
|
"""
Export all open fonts as UFO
Iterate through all open fonts, and export UFOs.
Note:
This script will look for a UFO named <vbf_name_without_extension>.ufo.
If it can't find this UFO, it will make a new one.
If it can find the UFO, it will only export:
glyphs
feature data
font.lib
alignment zones (if the installed robofab supports them)
Kerning and kerning groups are NOT exported. This is to make it
easier to edit the glyphs in FontLab, but edit the kerning in the UFO,
with (for instance) MetricsMachine.
This script is useful for workflows which edit in FontLab,
but take interpolation, metrics and OTF compilation outside.
When running, the script will first save and close all open fonts.
Then it will open the vfb's one by one, and export, then close.
Finally it will open all fonts again.
"""
from robofab.world import AllFonts, OpenFont
from robofab.tools.glyphNameSchemes import glyphNameToShortFileName
from robofab.glifLib import GlyphSet
from robofab.objects.objectsFL import RGlyph
from robofab.ufoLib import makeUFOPath, UFOWriter
from robofab.interface.all.dialogs import ProgressBar
try:
from robofab.objects.objectsFL import PostScriptFontHintValues, postScriptHintDataLibKey
supportHints = True
except ImportError:
supportHints = False
import os
paths = []
for f in AllFonts():
paths.append(f.path)
f.close()
for p in paths:
ufoPath = p.replace(".vfb", ".ufo")
if os.path.exists(ufoPath):
# the ufo exists, only export the glyphs and the features
print("There is a UFO for this font already, exporting glyphs.")
path = os.path.join(os.path.dirname(ufoPath), os.path.basename(ufoPath), "glyphs")
f = OpenFont(p)
fl.CallCommand(fl_cmd.FontSortByCodepage)
gs = GlyphSet(path, glyphNameToFileNameFunc=glyphNameToShortFileName)
for g in f:
print("saving glyph %s in %s"%(g.name, path))
gs.writeGlyph(g.name, g, g.drawPoints)
gs.writeContents()
# make a new writer
u = UFOWriter(ufoPath)
# font info
print("exporting font info..")
u.writeInfo(f.info)
# features
print("exporting features..")
glyphOrder = []
for nakedGlyph in f.naked().glyphs:
glyph = RGlyph(nakedGlyph)
glyphOrder.append(glyph.name)
assert None not in glyphOrder, glyphOrder
# We make a shallow copy if lib, since we add some stuff for export
# that doesn't need to be retained in memory.
fontLib = dict(f.lib)
if supportHints:
psh = PostScriptFontHintValues(f)
d = psh.asDict()
fontLib[postScriptHintDataLibKey] = d
fontLib["org.robofab.glyphOrder"] = glyphOrder
f._writeOpenTypeFeaturesToLib(fontLib)
print("fontLib", fontLib)
u.writeLib(fontLib)
f.close()
else:
print("Making a new UFO at", ufoPath)
f = OpenFont(p)
f.writeUFO()
f.close()
for p in paths:
OpenFont(p)
|
"""Module containing the list default command."""
from command import Command
class List(Command):
"""Command 'list'.
This command should be used as a container to list informations
such as sthe different bundles, routes and so on.
"""
name = "list"
brief = "list specific informations"
description = \
"This command is used to list specific informations, like " \
"the created bundles, the available plugins, the connected " \
"routes and so on."
|
from __future__ import print_function
from ploy.common import BaseInstance, BaseMaster, StartupScriptMixin
from ploy.config import HooksMassager
from ploy.config import StartupScriptMassager
import logging
log = logging.getLogger('ploy.dummy_plugin')
class MockSock(object):
def close(self):
log.info('sock.close')
class MockTransport(object):
sock = MockSock()
class MockClient(object):
def get_transport(self):
log.info('client.get_transport')
return MockTransport()
def close(self):
log.info('client.close')
class Instance(BaseInstance, StartupScriptMixin):
sectiongroupname = 'dummy-instance'
max_startup_script_size = 1024
def get_host(self):
return self.config['host']
def get_massagers(self):
return get_instance_massagers()
def snapshot(self):
log.info('snapshot: %s', self.id)
def start(self, overrides=None):
self.startup_script(overrides=overrides)
log.info('start: %s %s', self.id, overrides)
# this is here to get full coverage of the cmd_start method in common.py
if list(overrides.keys()) != ['instances']:
return overrides
def status(self):
log.info('status: %s', self.id)
def stop(self):
log.info('stop: %s', self.id)
def terminate(self):
log.info('terminate: %s', self.id)
def init_ssh_key(self, user=None):
host = self.get_host()
port = self.config.get('port', 22)
log.info('init_ssh_key: %s %s', self.id, user)
if user is None:
user = self.config.get('user', 'root')
return dict(
user=user,
host=host,
port=port,
client=MockClient(),
UserKnownHostsFile=self.master.known_hosts)
class Master(BaseMaster):
sectiongroupname = 'dummy-instance'
instance_class = Instance
def list_dummy(argv, help):
print("list_dummy")
def get_instance_massagers(sectiongroupname='instance'):
return [
HooksMassager(sectiongroupname, 'hooks'),
StartupScriptMassager(sectiongroupname, 'startup_script')]
def get_list_commands(ctrl):
return [('dummy', list_dummy)]
def get_massagers():
return get_instance_massagers('dummy-instance')
def get_masters(ctrl):
masters = ctrl.config.get('dummy-master', {'default': {}})
for master, master_config in masters.items():
yield Master(ctrl, master, master_config)
plugin = dict(
get_list_commands=get_list_commands,
get_massagers=get_massagers,
get_masters=get_masters)
|
"""
============
rdflib.store
============
``Context-aware``: An RDF store capable of storing statements within contexts
is considered context-aware. Essentially, such a store is able to partition
the RDF model it represents into individual, named, and addressable
sub-graphs.
Relevant Notation3 reference regarding formulae, quoted statements, and such:
http://www.w3.org/DesignIssues/Notation3.html
``Formula-aware``: An RDF store capable of distinguishing between statements
that are asserted and statements that are quoted is considered formula-aware.
``Conjunctive Graph``: This refers to the 'top-level' Graph. It is the
aggregation of all the contexts within it and is also the appropriate,
absolute boundary for closed world assumptions / models.
For the sake of persistence, Conjunctive Graphs must be distinguished by
identifiers (that may not necessarily be RDF identifiers or may be an RDF
identifier normalized - SHA1/MD5 perhaps - for database naming purposes ).
``Conjunctive Query``: Any query that doesn't limit the store to search
within a named context only. Such a query expects a context-aware store to
search the entire asserted universe (the conjunctive graph). A formula-aware
store is expected not to include quoted statements when matching such a query.
"""
VALID_STORE = 1
CORRUPTED_STORE = 0
NO_STORE = -1
UNKNOWN = None
from rdflib.events import Dispatcher, Event
__all__ = ['StoreCreatedEvent', 'TripleAddedEvent', 'TripleRemovedEvent', 'NodePickler', 'Store']
class StoreCreatedEvent(Event):
"""
This event is fired when the Store is created, it has the folloing attribute:
- 'configuration' string that is used to create the store
"""
class TripleAddedEvent(Event):
"""
This event is fired when a triple is added, it has the following attributes:
- 'triple' added to the graph
- 'context' of the triple if any
- 'graph' that the triple was added to
"""
class TripleRemovedEvent(Event):
"""
This event is fired when a triple is removed, it has the following attributes:
- 'triple' removed from the graph
- 'context' of the triple if any
- 'graph' that the triple was removed from
"""
from cPickle import Pickler, Unpickler, UnpicklingError
try:
from io import BytesIO
except ImportError:
from cStringIO import StringIO as BytesIO
class NodePickler(object):
def __init__(self):
self._objects = {}
self._ids = {}
self._get_object = self._objects.__getitem__
def _get_ids(self, key):
try:
return self._ids.get(key)
except TypeError, e:
return None
def register(self, object, id):
self._objects[id] = object
self._ids[object] = id
def loads(self, s):
up = Unpickler(BytesIO(s))
up.persistent_load = self._get_object
try:
return up.load()
except KeyError, e:
raise UnpicklingError, "Could not find Node class for %s" % e
def dumps(self, obj, protocol=None, bin=None):
src = BytesIO()
p = Pickler(src)
p.persistent_id = self._get_ids
p.dump(obj)
return src.getvalue()
class Store(object):
#Properties
context_aware = False
formula_aware = False
transaction_aware = False
batch_unification = False
def __init__(self, configuration=None, identifier=None):
"""
identifier: URIRef of the Store. Defaults to CWD
configuration: string containing infomation open can use to
connect to datastore.
"""
self.__node_pickler = None
self.dispatcher = Dispatcher()
if configuration:
self.open(configuration)
def __get_node_pickler(self):
if self.__node_pickler is None:
from rdflib.term import URIRef
from rdflib.term import BNode
from rdflib.term import Literal
from rdflib.graph import Graph, QuotedGraph, GraphValue
from rdflib.term import Variable
from rdflib.term import Statement
self.__node_pickler = np = NodePickler()
np.register(self, "S")
np.register(URIRef, "U")
np.register(BNode, "B")
np.register(Literal, "L")
np.register(Graph, "G")
np.register(QuotedGraph, "Q")
np.register(Variable, "V")
np.register(Statement, "s")
np.register(GraphValue, "v")
return self.__node_pickler
node_pickler = property(__get_node_pickler)
#Database management methods
def create(self, configuration):
self.dispatcher.dispatch(StoreCreatedEvent(configuration=configuration))
def open(self, configuration, create=False):
"""
Opens the store specified by the configuration string. If
create is True a store will be created if it does not already
exist. If create is False and a store does not already exist
an exception is raised. An exception is also raised if a store
exists, but there is insufficient permissions to open the
store. This should return one of VALID_STORE,CORRUPTED_STORE,or NO_STORE
"""
return UNKNOWN
def close(self, commit_pending_transaction=False):
"""
This closes the database connection. The commit_pending_transaction parameter specifies whether to
commit all pending transactions before closing (if the store is transactional).
"""
def destroy(self, configuration):
"""
This destroys the instance of the store identified by the configuration string.
"""
def gc(self):
"""
Allows the store to perform any needed garbage collection
"""
pass
#RDF APIs
def add(self, (subject, predicate, object), context, quoted=False):
"""
Adds the given statement to a specific context or to the model. The quoted argument
is interpreted by formula-aware stores to indicate this statement is quoted/hypothetical
It should be an error to not specify a context and have the quoted argument be True.
It should also be an error for the quoted argument to be True when the store is not formula-aware.
"""
self.dispatcher.dispatch(TripleAddedEvent(triple=(subject, predicate, object), context=context))
def addN(self, quads):
"""
Adds each item in the list of statements to a specific context. The quoted argument
is interpreted by formula-aware stores to indicate this statement is quoted/hypothetical.
Note that the default implementation is a redirect to add
"""
for s,p,o,c in quads:
assert c is not None, "Context associated with %s %s %s is None!"%(s,p,o)
self.add(
(s,p,o),
c
)
def remove(self, (subject, predicate, object), context=None):
""" Remove the set of triples matching the pattern from the store """
self.dispatcher.dispatch(TripleRemovedEvent(triple=(subject, predicate, object), context=context))
def triples_choices(self, (subject, predicate, object_),context=None):
"""
A variant of triples that can take a list of terms instead of a single
term in any slot. Stores can implement this to optimize the response time
from the default 'fallback' implementation, which will iterate
over each term in the list and dispatch to tripless
"""
if isinstance(object_,list):
assert not isinstance(subject,list), "object_ / subject are both lists"
assert not isinstance(predicate,list), "object_ / predicate are both lists"
if object_:
for obj in object_:
for (s1, p1, o1), cg in self.triples((subject,predicate,obj),context):
yield (s1, p1, o1), cg
else:
for (s1, p1, o1), cg in self.triples((subject,predicate,None),context):
yield (s1, p1, o1), cg
elif isinstance(subject,list):
assert not isinstance(predicate,list), "subject / predicate are both lists"
if subject:
for subj in subject:
for (s1, p1, o1), cg in self.triples((subj,predicate,object_),context):
yield (s1, p1, o1), cg
else:
for (s1, p1, o1), cg in self.triples((None,predicate,object_),context):
yield (s1, p1, o1), cg
elif isinstance(predicate,list):
assert not isinstance(subject,list), "predicate / subject are both lists"
if predicate:
for pred in predicate:
for (s1, p1, o1), cg in self.triples((subject,pred,object_),context):
yield (s1, p1, o1), cg
else:
for (s1, p1, o1), cg in self.triples((subject,None,object_),context):
yield (s1, p1, o1), cg
def triples(self, triple_pattern, context=None):
"""
A generator over all the triples matching the pattern. Pattern can
include any objects for used for comparing against nodes in the store, for
example, REGEXTerm, URIRef, Literal, BNode, Variable, Graph, QuotedGraph, Date? DateRange?
A conjunctive query can be indicated by either providing a value of None
for the context or the identifier associated with the Conjunctive Graph (if it's context aware).
"""
subject, predicate, object = triple_pattern
# variants of triples will be done if / when optimization is needed
def __len__(self, context=None):
"""
Number of statements in the store. This should only account for non-quoted (asserted) statements
if the context is not specified, otherwise it should return the number of statements in the formula or context given.
"""
def contexts(self, triple=None):
"""
Generator over all contexts in the graph. If triple is specified, a generator over all
contexts the triple is in.
"""
# Optional Namespace methods
def bind(self, prefix, namespace):
""" """
def prefix(self, namespace):
""" """
def namespace(self, prefix):
""" """
def namespaces(self):
""" """
if False:
yield None
# Optional Transactional methods
def commit(self):
""" """
def rollback(self):
""" """
|
'''
anaconda upload CONDA_PACKAGE_1.bz2
* [Uploading a Conda Package](http://docs.anaconda.org/using.html#Uploading)
* [Uploading a PyPI Package](http://docs.anaconda.org/using.html#UploadingPypiPackages)
'''
from __future__ import unicode_literals
import argparse
from glob import glob
import logging
import os
from os.path import exists
import sys
from binstar_client import errors, requests_ext
from binstar_client.utils import bool_input
from binstar_client.utils import get_server_api
from binstar_client.utils import get_config
from binstar_client.utils import upload_print_callback
from binstar_client.utils.detect import detect_package_type, get_attrs
try:
input = raw_input
except NameError:
input = input
log = logging.getLogger('binstar.upload')
def create_release(aserver_api, username, package_name, version, description, announce=None):
aserver_api.add_release(username, package_name, version, [],
announce, description)
def create_release_interactive(aserver_api, username, package_name, version):
log.info('\nThe release %s/%s/%s does not exist' % (username, package_name, version))
if not bool_input('Would you like to create it now?'):
log.info('good-bye')
raise SystemExit(-1)
description = input('Enter a short description of the release:\n')
log.info("\nAnnouncements are emailed to your package followers.")
make_announcement = bool_input('Would you like to make an announcement to the package followers?', False)
if make_announcement:
announce = input('Markdown Announcement:\n')
else:
announce = ''
aserver_api.add_release(username, package_name, version, [],
announce, description)
def determine_package_type(filename, args):
"""
return the file type from the inspected package or from the
-t/--package-type argument
"""
if args.package_type:
package_type = args.package_type
else:
log.info('detecting package type ...')
sys.stdout.flush()
package_type = detect_package_type(filename)
if package_type is None:
raise errors.BinstarError('Could not detect package type of file %r please specify package type with option --package-type' % filename)
log.info(package_type)
return package_type
def get_package_name(args, package_attrs, filename, package_type):
if args.package:
if 'name' in package_attrs and package_attrs['name'].lower() != args.package.lower():
msg = 'Package name on the command line " %s" does not match the package name in the file "%s"'
raise errors.BinstarError(msg % (args.package.lower(), package_attrs['name'].lower()))
package_name = args.package
else:
if 'name' not in package_attrs:
raise errors.BinstarError("Could not detect package name for package type %s, please use the --package option" % (package_type,))
package_name = package_attrs['name']
return package_name
def get_version(args, release_attrs, package_type):
if args.version:
version = args.version
else:
if 'version' not in release_attrs:
raise errors.BinstarError("Could not detect package version for package type %s, please use the --version option" % (package_type,))
version = release_attrs['version']
return version
def add_package(aserver_api, args, username, package_name, package_attrs, package_type):
try:
aserver_api.package(username, package_name)
except errors.NotFound:
if not args.auto_register:
raise errors.UserError('Anaconda Cloud package %s/%s does not exist. '
'Please run "anaconda package --create" to create this package namespace in the cloud.' % (username, package_name))
else:
if args.summary:
summary = args.summary
else:
if 'summary' not in package_attrs:
raise errors.BinstarError("Could not detect package summary for package type %s, please use the --summary option" % (package_type,))
summary = package_attrs['summary']
aserver_api.add_package(username, package_name, summary, package_attrs.get('license'),
public=True)
def add_release(aserver_api, args, username, package_name, version, release_attrs):
try:
aserver_api.release(username, package_name, version)
except errors.NotFound:
if args.mode == 'interactive':
create_release_interactive(aserver_api, username, package_name, version)
else:
create_release(aserver_api, username, package_name, version, release_attrs['description'])
def remove_existing_file(aserver_api, args, username, package_name, version, file_attrs):
try:
aserver_api.distribution(username, package_name, version, file_attrs['basename'])
except errors.NotFound:
return False
else:
if args.mode == 'force':
log.warning('Distribution %s already exists ... removing' % (file_attrs['basename'],))
aserver_api.remove_dist(username, package_name, version, file_attrs['basename'])
if args.mode == 'interactive':
if bool_input('Distribution %s already exists. Would you like to replace it?' % (file_attrs['basename'],)):
aserver_api.remove_dist(username, package_name, version, file_attrs['basename'])
else:
log.info('Not replacing distribution %s' % (file_attrs['basename'],))
return True
def main(args):
aserver_api = get_server_api(args.token, args.site, args.log_level)
if args.user:
username = args.user
else:
user = aserver_api.user()
username = user['login']
uploaded_packages = []
# Flatten file list because of 'windows_glob' function
files = [f for fglob in args.files for f in fglob]
for filename in files:
if not exists(filename):
raise errors.BinstarError('file %s does not exist' % (filename))
package_type = determine_package_type(filename, args)
log.info('extracting package attributes for upload ...')
sys.stdout.flush()
try:
package_attrs, release_attrs, file_attrs = get_attrs(package_type,
filename, parser_args=args)
except Exception:
if args.show_traceback:
raise
raise errors.BinstarError('Trouble reading metadata from %r. Is this a valid %s package' % (filename, package_type))
if args.build_id:
file_attrs['attrs']['binstar_build'] = args.build_id
log.info('done')
package_name = get_package_name(args, package_attrs, filename, package_type)
version = get_version(args, release_attrs, package_type)
add_package(aserver_api, args, username, package_name, package_attrs, package_type)
add_release(aserver_api, args, username, package_name, version, release_attrs)
binstar_package_type = file_attrs.pop('binstar_package_type', package_type)
with open(filename, 'rb') as fd:
log.info('\nUploading file %s/%s/%s/%s ... ' % (username, package_name, version, file_attrs['basename']))
sys.stdout.flush()
if remove_existing_file(aserver_api, args, username, package_name, version, file_attrs):
continue
try:
upload_info = aserver_api.upload(username, package_name, version, file_attrs['basename'],
fd, binstar_package_type,
args.description,
dependencies=file_attrs.get('dependencies'),
attrs=file_attrs['attrs'],
channels=args.labels,
callback=upload_print_callback(args))
except errors.Conflict:
full_name = '%s/%s/%s/%s' % (username, package_name, version, file_attrs['basename'])
log.info('Distribution already exists. Please use the -i/--interactive or --force options or `anaconda remove %s`' % full_name)
raise
except requests_ext.OpenSslError:
requests_ext.warn_openssl()
if args.show_traceback != 'never':
raise
else:
raise errors.BinstarError('Could not upload package')
uploaded_packages.append([package_name, upload_info])
log.info("\n\nUpload(s) Complete\n")
for package, upload_info in uploaded_packages:
package_url = upload_info.get('url', 'https://anaconda.org/%s/%s' % (username, package))
log.info("Package located at:\n%s\n" % package_url)
def windows_glob(item):
if os.name == 'nt' and '*' in item:
return glob(item)
else:
return [item]
def add_parser(subparsers):
description = 'Upload packages to Anaconda Cloud'
parser = subparsers.add_parser('upload',
formatter_class=argparse.RawDescriptionHelpFormatter,
help=description, description=description,
epilog=__doc__)
parser.add_argument('files', nargs='+', help='Distributions to upload', default=[], type=windows_glob)
label_help = (
'{deprecation}Add this file to a specific {label}. '
'Warning: if the file {label}s do not include "main",'
'the file will not show up in your user {label}')
parser.add_argument('-c', '--channel', action='append', default=[], dest='labels',
help=label_help.format(deprecation='[DEPRECATED]\n', label='channel'),
metavar='CHANNELS')
parser.add_argument('-l', '--label', action='append', dest='labels',
help=label_help.format(deprecation='', label='label'))
parser.add_argument('--no-progress', help="Don't show upload progress", action='store_true')
parser.add_argument('-u', '--user', help='User account, defaults to the current user')
mgroup = parser.add_argument_group('metadata options')
mgroup.add_argument('-p', '--package', help='Defaults to the package name in the uploaded file')
mgroup.add_argument('-v', '--version', help='Defaults to the package version in the uploaded file')
mgroup.add_argument('-s', '--summary', help='Set the summary of the package')
mgroup.add_argument('-t', '--package-type', help='Set the package type, defaults to autodetect')
mgroup.add_argument('-d', '--description', help='description of the file(s)')
mgroup.add_argument('--thumbnail', help='Notebook\'s thumbnail image')
register_group = parser.add_mutually_exclusive_group()
register_group.add_argument("--no-register", dest="auto_register", action="store_false",
help='Don\'t create a new package namespace if it does not exist')
register_group.add_argument("--register", dest="auto_register", action="store_true",
help='Create a new package namespace if it does not exist')
parser.set_defaults(auto_register=bool(get_config().get('auto_register', True)))
parser.add_argument('--build-id', help='Anaconda Cloud Build ID (internal only)')
group = parser.add_mutually_exclusive_group()
group.add_argument('-i', '--interactive', action='store_const', help='Run an interactive prompt if any packages are missing',
dest='mode', const='interactive')
group.add_argument('-f', '--fail', help='Fail if a package or release does not exist (default)',
action='store_const', dest='mode', const='fail')
group.add_argument('--force', help='Force a package upload regardless of errors',
action='store_const', dest='mode', const='force')
parser.set_defaults(main=main)
|
from celery.task import task
from django.db import transaction
from tardis.tardis_portal.staging import stage_file
from tardis.tardis_portal.models import Dataset_File
try:
from tardis.tardis_portal.filters import FilterInitMiddleware
FilterInitMiddleware()
except Exception:
pass
try:
from tardis.tardis_portal.logging_middleware import LoggingMiddleware
LoggingMiddleware()
except Exception:
pass
@task(name="tardis_portal.verify_files", ignore_result=True)
def verify_files():
for datafile in Dataset_File.objects.filter(verified=False):
if datafile.stay_remote or datafile.is_local():
verify_as_remote.delay(datafile.id)
else:
make_local_copy.delay(datafile.id)
@task(name="tardis_portal.verify_as_remote", ignore_result=True)
def verify_as_remote(datafile_id):
datafile = Dataset_File.objects.get(id=datafile_id)
# Check that we still need to verify - it might have been done already
if datafile.verified:
return
# Use a transaction for safety
with transaction.commit_on_success():
# Get datafile locked for write (to prevent concurrent actions)
datafile = Dataset_File.objects.select_for_update().get(id=datafile.id)
# Second check after lock (concurrency paranoia)
if not datafile.verified:
datafile.verify()
@task(name="tardis_portal.make_local_copy", ignore_result=True)
def make_local_copy(datafile_id):
datafile = Dataset_File.objects.get(id=datafile_id)
# Check that we still need to verify - it might have been done already
if datafile.is_local():
return
# Use a transaction for safety
with transaction.commit_on_success():
# Get datafile locked for write (to prevent concurrent actions)
datafile = Dataset_File.objects.select_for_update().get(id=datafile_id)
# Second check after lock (concurrency paranoia)
if not datafile.is_local():
stage_file(datafile)
|
from os.path import join, basename, abspath, relpath
import pytest
from pinner.api import check_requirement, find_requirements
from pinner.exceptions import *
def test_requirement_needs_version():
with pytest.raises(UnpinnedDependency):
check_requirement('Django')
with pytest.raises(NotStrictSpec):
check_requirement('pytest>=2.6')
with pytest.raises(NotStrictSpec):
check_requirement('requirements-parser<=1')
def test_requirement_needs_revision():
with pytest.raises(UnpinnedVcs):
check_requirement('-e git+git://github.com/mitsuhiko/jinja2.git#egg=jinja2')
with pytest.raises(NotStrictVcs):
check_requirement('git://git.myproject.org/MyProject.git@master#egg=MyProject')
def test_requirement_has_version():
assert check_requirement('coveralls==0.5')
assert check_requirement('-r otherfile.pip'), 'Skipping'
def test_requirement_has_revision():
assert check_requirement('-e hg+https://bitbucket.org/coagulant/django-autoslug@903a9fd#egg=django-autoslug')
assert check_requirement('-e svn+http://svn.myproject.org/svn/MyProject/trunk@2019#egg=MyProject')
assert check_requirement('-e git+https://github.com/mvasilkov/django-google-charts@abcde1#egg=django-google-charts')
def test_find_requirements():
test_dir = abspath(join(basename(__file__), '..', 'test_project'))
assert list([relpath(path, test_dir) for path in find_requirements(test_dir)]) == [
'reqs.txt',
'requirements-dev.txt',
'requirements.pip',
'requirements.txt',
'requirements/local.txt',
'requirements/production.pip'
]
|
from __future__ import print_function
import sys
sys.path.append('..')
from os import mkdir
from os.path import basename, exists, join as joinpath
from sasmodels.core import load_model_info
try:
from typing import Optional, BinaryIO, List, Dict
except ImportError:
pass
else:
from sasmodels.modelinfo import ModelInfo
TEMPLATE = """\
..
Generated from doc/gentoc.py -- DO NOT EDIT --
.. _%(label)s:
%(bar)s
%(title)s
%(bar)s
.. toctree::
"""
MODEL_TOC_PATH = "guide/models"
def _make_category(category_name, label, title, parent=None):
# type: (str, str, str, Optional[BinaryIO]) -> BinaryIO
file = open(joinpath(MODEL_TOC_PATH, category_name+".rst"), "w")
file.write(TEMPLATE%{'label':label, 'title':title, 'bar':'*'*len(title)})
if parent:
_add_subcategory(category_name, parent)
return file
def _add_subcategory(category_name, parent):
# type: (str, BinaryIO) -> None
parent.write(" %s.rst\n"%category_name)
def _add_model(file, model_name):
# type: (IO[str], str) -> None
file.write(" ../../model/%s.rst\n"%model_name)
def _maybe_make_category(category, models, cat_files, model_toc):
# type: (str, List[str], Dict[str, BinaryIO], BinaryIO) -> None
if category not in cat_files:
print("Unexpected category %s containing"%category, models, file=sys.stderr)
title = category.capitalize()+" Functions"
cat_files[category] = _make_category(category, category, title, model_toc)
def generate_toc(model_files):
# type: (List[str]) -> None
if not model_files:
print("gentoc needs a list of model files", file=sys.stderr)
# find all categories
category = {} # type: Dict[str, List[str]]
for item in model_files:
# assume model is in sasmodels/models/name.py, and ignore the full path
model_name = basename(item)[:-3]
if model_name.startswith('_'):
continue
model_info = load_model_info(model_name)
if model_info.category is None:
print("Missing category for", item, file=sys.stderr)
else:
category.setdefault(model_info.category, []).append(model_name)
# Check category names
for k, v in category.items():
if len(v) == 1:
print("Category %s contains only %s"%(k, v[0]), file=sys.stderr)
# Generate category files for the table of contents.
# Initially we had "shape functions" as an additional TOC level, but we
# have revised it so that the individual shape categories now go at
# the top level. Judicious rearrangement of comments will make the
# "shape functions" level reappear.
# We are forcing shape-independent, structure-factor and custom-models
# to come at the end of the TOC. All other categories will come in
# alphabetical order before them.
if not exists(MODEL_TOC_PATH):
mkdir(MODEL_TOC_PATH)
model_toc = _make_category(
'index', 'Models', 'Model Functions')
#shape_toc = _make_category(
# 'shape', 'Shapes', 'Shape Functions', model_toc)
free_toc = _make_category(
'shape-independent', 'Shape-independent',
'Shape-Independent Functions')
struct_toc = _make_category(
'structure-factor', 'Structure-factor', 'Structure Factors')
#custom_toc = _make_category(
# 'custom-models', 'Custom-models', 'Custom Models')
# remember to top level categories
cat_files = {
#'shape':shape_toc,
'shape':model_toc,
'shape-independent':free_toc,
'structure-factor': struct_toc,
#'custom': custom_toc,
}
# Process the model lists
for k, v in sorted(category.items()):
if ':' in k:
cat, subcat = k.split(':')
_maybe_make_category(cat, v, cat_files, model_toc)
cat_file = cat_files[cat]
label = "-".join((cat, subcat))
filename = label
title = subcat.capitalize() + " Functions"
sub_toc = _make_category(filename, label, title, cat_file)
for model in sorted(v):
_add_model(sub_toc, model)
sub_toc.close()
else:
_maybe_make_category(k, v, cat_files, model_toc)
cat_file = cat_files[k]
for model in sorted(v):
_add_model(cat_file, model)
#_add_subcategory('shape', model_toc)
_add_subcategory('shape-independent', model_toc)
_add_subcategory('structure-factor', model_toc)
#_add_subcategory('custom-models', model_toc)
# Close the top-level category files
#model_toc.close()
for f in cat_files.values():
f.close()
if __name__ == "__main__":
generate_toc(sys.argv[1:])
|
import re
import os
import sys
import yaml
NUM_TO_KEEP = 5
PACKAGE_DEFS = {}
FOUND_PACKAGES = {}
if __name__ == '__main__':
with open('freight-clean.yml', 'r') as f:
yml = yaml.load(f)
NUM_TO_KEEP = yml.get('settings', {}).get('num-to-keep', 5)
PACKAGE_DEFS = {}
for name, pkgSettings in yml['packages'].items():
PACKAGE_DEFS[name] = pkgSettings
'(?P<pkgID>[a-z0-9\-]+)_(?P<version>[0-9a-z\.]+)~release-(?P<sortkey>[0-9]+)\.amd64\.deb'
PACKAGE_DEFS[name]['regex'] = re.compile(pkgSettings['regex'])
for root, _, files in os.walk('/var/lib/freight/apt/jessie'):
for file in files:
fullpath = os.path.join(root, file)
filename = os.path.basename(fullpath)
_, ext = os.path.splitext(fullpath)
if ext not in ('.deb'):
continue
for pkgID, packagecfg in PACKAGE_DEFS.items():
m = packagecfg['regex'].match(filename)
if m:
sortkey = m.group('sortkey')
fPkgID = m.group('pkgID')
if fPkgID is None:
fPkgID = pkgID
if fPkgID not in FOUND_PACKAGES:
FOUND_PACKAGES[fPkgID] = {}
FOUND_PACKAGES[fPkgID][sortkey] = fullpath
break
for pkgID, archDict in FOUND_PACKAGES.items():
pkgsLeft = NUM_TO_KEEP
print('Scanning for outdated {} packages...'.format(pkgID))
for sortKey in reversed(sorted(list(archDict.keys()))):
fullpath = archDict[sortKey]
if pkgsLeft <= 0:
if os.path.isfile(fullpath):
print(' RM {}'.format(fullpath))
os.remove(fullpath)
#else:
# print(' KEEP {}'.format(fullpath))
pkgsLeft -= 1
|
from troposphere import (ecs, Ref, Join)
import yaml, os
class Compose2TaskDefinition(object):
def __init__(self, compose_file, name_image_map):
self.compose_file = compose_file
self.container_definitions = {}
f = open(self.compose_file)
data_map = yaml.safe_load(f)
f.close()
defs = []
for name in data_map:
environment = []
port_mappings = Ref("AWS::NoValue")
volumes_from = Ref("AWS::NoValue")
if data_map[name].get("ports") is not None:
port_mappings = []
for pm in data_map[name].get("ports"):
port_mappings.append(
ecs.PortMapping(
ContainerPort=pm.split(":")[1],
HostPort=pm.split(":")[0],
)
)
if data_map[name].get("volumes_from") is not None:
volumes_from = []
for vf in data_map[name].get("volumes_from"):
volumes_from.append(
ecs.VolumesFrom(
SourceContainer=vf
)
)
for env in data_map[name].get("environment", []):
environment.append(ecs.Environment(
Name=env.split("=", 1)[0],
Value=env.split("=", 1)[1]
))
self.container_definitions[name] = ecs.ContainerDefinition(
Name=name,
Image=name_image_map[name],
Cpu=data_map[name].get("cpu_shares", Ref("AWS::NoValue")),
# TODO Convert correct with M and G
Memory=data_map[name]["mem_limit"][:-1],
Essential=True,
PortMappings=port_mappings,
Links=data_map[name].get("links", Ref("AWS::NoValue")),
VolumesFrom=volumes_from,
Environment=environment,
LogConfiguration=ecs.LogConfiguration(
LogDriver="awslogs",
Options={
"awslogs-region": Ref("AWS::Region"),
"awslogs-group": Ref("AWS::StackName"),
"awslogs-stream-prefix": name
}
)
)
defs.append(
self.container_definitions[name]
)
def get_container_definition(self, name):
return self.container_definitions[name]
def get_task_definition(self, title):
defs = []
for name in self.container_definitions:
defs.append(self.container_definitions[name])
return ecs.TaskDefinition(
title,
ContainerDefinitions=defs,
Volumes=[]
)
def environment(self, name):
return ContainerEnvironment(self.get_container_definition(name))
class Compose2TaskDefinitionDataDog(Compose2TaskDefinition):
def __init__(self, compose_file, name_image_map, datadog_image, api_key):
super(Compose2TaskDefinitionDataDog, self).__init__(compose_file, name_image_map)
self.api_key = api_key
self.datadog_image = datadog_image
self.container_definitions["ddagent"] = ecs.ContainerDefinition(
Name="ddagent",
Image=datadog_image,
Cpu=10,
Memory=128,
Essential=True,
MountPoints=[
ecs.MountPoint(
ContainerPath="/var/run/docker.sock",
SourceVolume="docker_sock"
),
ecs.MountPoint(
ContainerPath="/host/sys/fs/cgroup",
SourceVolume="cgroup",
ReadOnly=True
),
ecs.MountPoint(
ContainerPath="/host/proc",
SourceVolume="proc",
ReadOnly=True
)
],
Environment=[
ecs.Environment(
Name="API_KEY",
Value=self.api_key
)
],
LogConfiguration=ecs.LogConfiguration(
LogDriver="awslogs",
Options={
"awslogs-region": Ref("AWS::Region"),
"awslogs-group": Ref("AWS::StackName"),
"awslogs-stream-prefix": "ddagent"
}
)
)
def get_task_definition(self, title):
val = super(Compose2TaskDefinitionDataDog, self).get_task_definition(title)
val.Volumes.extend([
ecs.Volume(
Host=ecs.Host(
SourcePath="/var/run/docker.sock"
),
Name="docker_sock"
),
ecs.Volume(
Host=ecs.Host(
SourcePath="/proc/"
),
Name="proc"
),
ecs.Volume(
Host=ecs.Host(
SourcePath="/cgroup/"
),
Name="cgroup"
),
])
return val
class ContainerEnvironment(object):
def __init__(self, container):
self._items = {}
self.container = container
def __setitem__(self, key, value):
for e in self.container.Environment:
if e.Name == key:
e.Value = value
return
self.container.Environment.append(ecs.Environment(
Name=key,
Value=value
))
def __getitem__(self, key):
for e in self.container.Environment:
if e.Name == key:
return e.Value
return None
def __len__(self):
return len(self.container.Environment)
def __iter__(self):
ret = {}
for e in self.container.Environment:
ret[e.Name] = e.Value
return iter(ret)
|
import re
_DAYS_IN_MONTH = (31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
_INTERPOLATION = re.compile('\${(\w*)}')
def is_leap_year(year):
"""Return whether the given year is a leap year."""
if year % 4:
return False
if year % 400 == 0:
return True
return year % 100 != 0
def get_number_of_days(yyyymm):
"""Return number of days of the given month."""
month, year = map(int, (yyyymm[-2:], yyyymm[:-2]))
n = _DAYS_IN_MONTH[month - 1]
if month == 2 and is_leap_year(year):
n += 1
return n
def interpolate(s, **bindings):
"""Substitute ``${var}`` fragments in ``s`` by their value
provided in ``bindings``.
"""
if not bindings:
return s
def _sub(matchobj):
var = matchobj.group(1)
return bindings.get(var, matchobj.group(0))
return _INTERPOLATION.sub(_sub, s)
|
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('diagnosticos', '0003_auto_20150407_2123'),
]
operations = [
migrations.AlterField(
model_name='diagnosticos',
name='fecha',
field=models.DateField(default=datetime.datetime(2015, 4, 25, 14, 59, 14, 459617), help_text='Formato: dd/mm/yyyy'),
preserve_default=True,
),
migrations.AlterField(
model_name='diagnosticos',
name='hora',
field=models.TimeField(default=datetime.datetime(2015, 4, 25, 14, 59, 14, 459671), help_text='Formato: hh:mm'),
preserve_default=True,
),
]
|
'''
RSS feeds for blog posts
@copyright: Copyright 2012 Faraz Masood Khan, mk.faraz@gmail.com
@author: Faraz Masood Khan
'''
from foo.blog.models import Post
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.feedgenerator import Rss201rev2Feed
from django.contrib.syndication.views import Feed
site = Site.objects.get_current()
class RssFeedGenerator(Rss201rev2Feed):
mime_type = u'application/rss+xml'
def add_root_elements(self, handler):
super(RssFeedGenerator, self).add_root_elements(handler)
handler.startElement(u'image', {})
handler.addQuickElement(u"url", u'http://%s%s%s' % (site.domain, settings.STATIC_URL, 'blog/img/logo-50x50.png'))
handler.addQuickElement(u"title", unicode(site.name))
handler.addQuickElement(u"link", u'http://%s' % site.domain)
handler.endElement(u'image')
class LatestFeed(Feed):
feed_type = RssFeedGenerator
def title(self):
return unicode(site.name)
def link(self):
return '';
def description(self):
return settings.META_DESCRIPTION
def items(self):
return Post.latest().order_by('-published')[:30]
def item_title(self, feed):
return unicode(feed.title)
def item_description(self, feed):
return feed.content
def item_link(self, feed):
return feed.get_absolute_url()
def item_guid(self, feed):
return self.item_link(feed)
def item_author_name(self, feed):
return unicode(feed.author)
def item_pubdate(self, feed):
return feed.published
def item_categories(self, feed):
return '' #feed.categories.split(',')
|
import numpy as np
import scipy.linalg as lg
import matplotlib.pyplot as plt
from parametrix.monte_carlo.estimators import MC_Simulations_MSE
from parametrix.bayesian_linear_model.signal_models import M_Bayesian_L
from parametrix.bayesian_linear_model.estimators import E_Bayesian_L
from parametrix.bayesian_linear_model.statistics import S_x_Bayesian_L
""" This scripts shows the evolution of the MSE versus signal length for 2 linear estimators:
* Exact Least Square estimator (LSE),
* Least Square estimator (LSE) with model mismatch.
The Cramer Rao is also plotted for comparison. """
H=np.array([[1,2],[3,4],[1,6]])
m_x=np.array([3,5])
C_x=np.array([[1,0.2],[0.2,1]])
C_w=0.1*np.array([[1,0.2,0.05],[0.2,0.5,0.02],[0.05,0.02,0.9]])
signal=M_Bayesian_L(H,m_x,C_x,C_w)
estimator=E_Bayesian_L(H,m_x,C_x,C_w,name="Bayesian estimator")
statistic=S_x_Bayesian_L()
mc=MC_Simulations_MSE("SNR",np.arange(0,100,10),estimator,statistic_list=[statistic],short_param_names="x")
output=mc.trials(signal,nb_trials=1000,verbose=1,plot=1)
plt.show()
|
"""
Tests for dit.profiles.entropy_triangle. Known examples taken from http://arxiv.org/abs/1409.4708 .
"""
from __future__ import division
import pytest
from dit import Distribution
from dit.profiles import EntropyTriangle, EntropyTriangle2
ex1 = Distribution(['000', '001', '010', '011', '100', '101', '110', '111'], [1/8]*8)
ex2 = Distribution(['000', '111'], [1/2]*2)
ex3 = Distribution(['000', '001', '110', '111'], [1/4]*4)
ex4 = Distribution(['000', '011', '101', '110'], [1/4]*4)
@pytest.mark.parametrize(('d', 'val'), [
(ex1, (0, 0, 1)),
(ex2, (0, 1, 0)),
(ex3, (0, 2/3, 1/3)),
(ex4, (0, 1, 0)),
])
def test_et_1(d, val):
"""
Test EntropyTriangle against known values.
"""
assert EntropyTriangle(d).points[0] == val
@pytest.mark.parametrize('val', [(0, 0, 1), (0, 1, 0), (0, 2/3, 1/3), (0, 1, 0)])
def test_et_2(val):
"""
Test EntropyTriangle against known values.
"""
et = EntropyTriangle([ex1, ex2, ex3, ex4])
assert val in et.points
@pytest.mark.parametrize(('d', 'val'), [
(ex1, (1, 0, 0)),
(ex2, (0, 2/3, 1/3)),
(ex3, (1/3, 1/3, 1/3)),
(ex4, (0, 1/3, 2/3)),
])
def test_et2_1(d, val):
"""
Test EntropyTriangle2 against known values.
"""
assert EntropyTriangle2(d).points[0] == val
@pytest.mark.parametrize('val', [(1, 0, 0), (0, 2/3, 1/3), (1/3, 1/3, 1/3), (0, 1/3, 2/3)])
def test_et2_2(val):
"""
Test EntropyTriangle against known values.
"""
et = EntropyTriangle2([ex1, ex2, ex3, ex4])
assert val in et.points
|
from __future__ import absolute_import
from six.moves.urllib.parse import urlencode
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.utils import timezone
from sentry.models import EventUser, GroupTagValue, OrganizationMemberTeam
from sentry.testutils import APITestCase
class OrganizationUserIssuesSearchTest(APITestCase):
def setUp(self):
super(OrganizationUserIssuesSearchTest, self).setUp()
self.org = self.create_organization()
self.org.flags.allow_joinleave = False
self.org.save()
self.team1 = self.create_team(organization=self.org)
self.team2 = self.create_team(organization=self.org)
self.project1 = self.create_project(team=self.team1)
self.project2 = self.create_project(team=self.team2)
group1 = self.create_group(
project=self.project1, last_seen=timezone.now() - timedelta(minutes=1)
)
group2 = self.create_group(project=self.project2)
EventUser.objects.create(email='foo@example.com', project=self.project1)
EventUser.objects.create(email='bar@example.com', project=self.project1)
EventUser.objects.create(email='foo@example.com', project=self.project2)
GroupTagValue.objects.create(
key='sentry:user',
value='email:foo@example.com',
group_id=group1.id,
project_id=self.project1.id
)
GroupTagValue.objects.create(
key='sentry:user',
value='email:bar@example.com',
group_id=group1.id,
project_id=self.project1.id
)
GroupTagValue.objects.create(
key='sentry:user',
value='email:foo@example.com',
group_id=group2.id,
project_id=self.project2.id
)
def get_url(self):
return reverse('sentry-api-0-organization-issue-search', args=[self.org.slug])
def test_no_team_access(self):
user = self.create_user()
self.create_member(user=user, organization=self.org)
self.login_as(user=user)
url = '%s?%s' % (self.get_url(), urlencode({'email': 'foo@example.com'}))
response = self.client.get(url, format='json')
assert response.status_code == 200
assert len(response.data) == 0
def test_has_access(self):
user = self.create_user()
member = self.create_member(user=user, organization=self.org)
self.login_as(user=user)
OrganizationMemberTeam.objects.create(
team=self.team1,
organizationmember=member,
is_active=True,
)
url = '%s?%s' % (self.get_url(), urlencode({'email': 'foo@example.com'}))
response = self.client.get(url, format='json')
# result shouldn't include results from team2/project2 or bar@example.com
assert response.status_code == 200
assert len(response.data) == 1
assert response.data[0]['project']['slug'] == self.project1.slug
OrganizationMemberTeam.objects.create(
team=self.team2,
organizationmember=member,
is_active=True,
)
response = self.client.get(url, format='json')
# now result should include results from team2/project2
assert response.status_code == 200
assert len(response.data) == 2
assert response.data[0]['project']['slug'] == self.project2.slug
assert response.data[1]['project']['slug'] == self.project1.slug
|
from __future__ import print_function, division, absolute_import
from marvin.tests.api.conftest import ApiPage
import pytest
@pytest.mark.parametrize('page', [('api', 'mangaid2plateifu')], ids=['mangaid2plateifu'], indirect=True)
class TestGeneralMangaid2Plateifu(object):
@pytest.mark.parametrize('reqtype', [('get'), ('post')])
def test_getplateifu_success(self, galaxy, page, params, reqtype):
data = galaxy.plateifu
page.load_page(reqtype, page.url.format(mangaid=galaxy.mangaid), params=params)
page.assert_success(data)
@pytest.mark.parametrize('reqtype', [('get'), ('post')])
@pytest.mark.parametrize('mangaid', [('1209232')], ids=['badid'])
def test_getplateifu_noresult(self, mangaid, page, params, reqtype):
data = None
error = "manga2plateifu failed with error: no plate-ifus found for mangaid={0}".format(mangaid)
page.load_page(reqtype, page.url.format(mangaid=mangaid), params=params)
assert page.json['status'] == -1
assert page.json['error'] == error
@pytest.mark.parametrize('reqtype', [('get'), ('post')])
@pytest.mark.parametrize('mangaid, missing, errmsg', [(None, 'release', 'Missing data for required field.'),
('12', 'mangaid', 'Length must be between 4 and 20.')],
ids=['norelease', 'shortname'])
def test_getplateifu_failure(self, galaxy, page, reqtype, params, mangaid, missing, errmsg):
if mangaid is None:
page.route_no_valid_params(page.url.format(mangaid=galaxy.mangaid), missing, reqtype=reqtype, errmsg=errmsg)
else:
page.route_no_valid_params(page.url.format(mangaid=mangaid), missing, reqtype=reqtype, params=params, errmsg=errmsg)
@pytest.mark.parametrize('page', [('api', 'nsa_full')], ids=['nsa_full'], indirect=True)
class TestGeneralNSAFull(object):
@pytest.mark.parametrize('reqtype', [('get'), ('post')])
def test_getnsa_success(self, galaxy, page, params, reqtype):
page.load_page(reqtype, page.url.format(mangaid=galaxy.mangaid), params=params)
page.assert_success(galaxy.nsa_data['nsa'])
@pytest.mark.parametrize('reqtype', [('get'), ('post')])
@pytest.mark.parametrize('mangaid', [('1209232')], ids=['badid'])
def test_getnsa_noresult(self, mangaid, page, params, reqtype):
error = "get_nsa_data failed with error: get_nsa_data: cannot find NSA row for mangaid={0}".format(mangaid)
page.load_page(reqtype, page.url.format(mangaid=mangaid), params=params)
assert page.json['data'] is None
assert page.json['status'] == -1
assert page.json['error'] == error
@pytest.mark.parametrize('page', [('api', 'nsa_drpall')], ids=['nsa_drpall'], indirect=True)
class TestGeneralNSADrpall(object):
@pytest.mark.parametrize('reqtype', [('get'), ('post')])
def test_getnsa_success(self, galaxy, page, params, reqtype):
page.load_page(reqtype, page.url.format(mangaid=galaxy.mangaid), params=params)
page.assert_success(galaxy.nsa_data['drpall'])
@pytest.mark.parametrize('reqtype', [('get'), ('post')])
@pytest.mark.parametrize('mangaid', [('1209232')], ids=['badid'])
def test_getnsa_noresult(self, mangaid, page, params, reqtype):
error = "get_nsa_data failed with error: no plate-ifus found for mangaid={0}".format(mangaid)
page.load_page(reqtype, page.url.format(mangaid=mangaid), params=params)
assert page.json['data'] is None
assert page.json['status'] == -1
assert page.json['error'] == error
|
def extractTaffyTranslations(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
tagmap = [
('CCM', 'Close Combat Mage', 'translated'),
('CC', 'Cheating Craft', 'translated'),
('KSM', 'Key of Sunken Moon', 'translated'),
('YBCB', 'Yu Brothers\' Case Book', 'translated'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
"""
negotiated.py
Created by Thomas Mangin on 2012-07-19.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
from exabgp.bgp.message.open.asn import ASN
from exabgp.bgp.message.open.asn import AS_TRANS
from exabgp.bgp.message.open.holdtime import HoldTime
from exabgp.bgp.message.open.capability import Capability
from exabgp.bgp.message.open.capability import REFRESH
from exabgp.bgp.message.open.routerid import RouterID
class Negotiated (object):
def __init__ (self,neighbor):
self.neighbor = neighbor
self.sent_open = None
self.received_open = None
self.holdtime = HoldTime(0)
self.local_as = ASN(0)
self.peer_as = ASN(0)
self.families = []
self.asn4 = False
self.addpath = RequirePath()
self.multisession = False
self.msg_size = 4096
self.operational = False
self.refresh = REFRESH.absent
self.aigp = None
def sent (self,sent_open):
self.sent_open = sent_open
if self.received_open:
self._negotiate()
def received (self,received_open):
self.received_open = received_open
if self.sent_open:
self._negotiate()
#else:
# import pdb; pdb.set_trace()
def _negotiate (self):
sent_capa = self.sent_open.capabilities
recv_capa = self.received_open.capabilities
self.holdtime = HoldTime(min(self.sent_open.hold_time,self.received_open.hold_time))
self.addpath.setup(self.sent_open,self.received_open)
self.asn4 = sent_capa.announced(Capability.ID.FOUR_BYTES_ASN) and recv_capa.announced(Capability.ID.FOUR_BYTES_ASN)
self.operational = sent_capa.announced(Capability.ID.OPERATIONAL) and recv_capa.announced(Capability.ID.OPERATIONAL)
self.local_as = self.sent_open.asn
self.peer_as = self.received_open.asn
if self.received_open.asn == AS_TRANS:
self.peer_as = recv_capa[Capability.ID.FOUR_BYTES_ASN]
self.families = []
if recv_capa.announced(Capability.ID.MULTIPROTOCOL) \
and sent_capa.announced(Capability.ID.MULTIPROTOCOL):
for family in recv_capa[Capability.ID.MULTIPROTOCOL]:
if family in sent_capa[Capability.ID.MULTIPROTOCOL]:
self.families.append(family)
if recv_capa.announced(Capability.ID.ENHANCED_ROUTE_REFRESH) and sent_capa.announced(Capability.ID.ENHANCED_ROUTE_REFRESH):
self.refresh=REFRESH.enhanced
elif recv_capa.announced(Capability.ID.ROUTE_REFRESH) and sent_capa.announced(Capability.ID.ROUTE_REFRESH):
self.refresh=REFRESH.normal
self.multisession = sent_capa.announced(Capability.ID.MULTISESSION) and recv_capa.announced(Capability.ID.MULTISESSION)
self.multisession |= sent_capa.announced(Capability.ID.MULTISESSION_CISCO) and recv_capa.announced(Capability.ID.MULTISESSION_CISCO)
if self.multisession:
sent_ms_capa = set(sent_capa[Capability.ID.MULTISESSION])
recv_ms_capa = set(recv_capa[Capability.ID.MULTISESSION])
if sent_ms_capa == set([]):
sent_ms_capa = set([Capability.ID.MULTIPROTOCOL])
if recv_ms_capa == set([]):
recv_ms_capa = set([Capability.ID.MULTIPROTOCOL])
if sent_ms_capa != recv_ms_capa:
self.multisession = (2,8,'multisession, our peer did not reply with the same sessionid')
# The way we implement MS-BGP, we only send one MP per session
# therefore we can not collide due to the way we generate the configuration
for capa in sent_ms_capa:
# no need to check that the capability exists, we generated it
# checked it is what we sent and only send MULTIPROTOCOL
if sent_capa[capa] != recv_capa[capa]:
self.multisession = (2,8,'when checking session id, capability %s did not match' % str(capa))
break
elif sent_capa.announced(Capability.ID.MULTISESSION):
self.multisession = (2,9,'multisession is mandatory with this peer')
# XXX: Does not work as the capa is not yet defined
#if received_open.capabilities.announced(Capability.ID.EXTENDED_MESSAGE) \
#and sent_open.capabilities.announced(Capability.ID.EXTENDED_MESSAGE):
# if self.peer.bgp.received_open_size:
# self.received_open_size = self.peer.bgp.received_open_size - 19
def validate (self,neighbor):
if not self.asn4:
if neighbor.local_as.asn4():
return (2,0,'peer does not speak ASN4, we are stuck')
else:
# we will use RFC 4893 to convey new ASN to the peer
# XXX: FIXME
pass
if self.peer_as != neighbor.peer_as:
return (2,2,'ASN in OPEN (%d) did not match ASN expected (%d)' % (self.received_open.asn,neighbor.peer_as))
# RFC 6286 : http://tools.ietf.org/html/rfc6286
#if message.router_id == RouterID('0.0.0.0'):
# message.router_id = RouterID(ip)
if self.received_open.router_id == RouterID('0.0.0.0'):
return (2,3,'0.0.0.0 is an invalid router_id')
if self.received_open.asn == neighbor.local_as:
# router-id must be unique within an ASN
if self.received_open.router_id == neighbor.router_id:
return (2,3,'BGP Indendifier collision, same router-id (%s) on both side of this IBGP session' % self.received_open.router_id)
if self.received_open.hold_time and self.received_open.hold_time < 3:
return (2,6,'Hold Time is invalid (%d)' % self.received_open.hold_time)
if self.multisession not in (True,False):
# XXX: FIXME: should we not use a string and perform a split like we do elswhere ?
# XXX: FIXME: or should we use this trick in the other case ?
return self.multisession
return None
class RequirePath (object):
REFUSE = 0
ACCEPT = 1
ANNOUNCE = 2
def __init__ (self):
self._send = {}
self._receive = {}
def setup (self,received_open,sent_open):
# A Dict always returning False
class FalseDict (dict):
def __getitem__(self,key):
return False
receive = received_open.capabilities.get(Capability.ID.ADD_PATH,FalseDict())
send = sent_open.capabilities.get(Capability.ID.ADD_PATH,FalseDict())
# python 2.4 compatibility mean no simple union but using sets.Set
union = []
union.extend(send.keys())
union.extend([k for k in receive.keys() if k not in send.keys()])
for k in union:
self._send[k] = bool(receive.get(k,self.REFUSE) & self.ANNOUNCE and send.get(k,self.REFUSE) & self.ACCEPT)
self._receive[k] = bool(receive.get(k,self.REFUSE) & self.ACCEPT and send.get(k,self.REFUSE) & self.ANNOUNCE)
def send (self,afi,safi):
return self._send.get((afi,safi),False)
def receive (self,afi,safi):
return self._receive.get((afi,safi),False)
|
import math
import torch
import torch.nn.functional as F
from fairseq import metrics, modules, utils
from fairseq.criterions import FairseqCriterion, register_criterion
@register_criterion('masked_lm')
class MaskedLmLoss(FairseqCriterion):
"""
Implementation for the loss used in masked language model (MLM) training.
"""
def __init__(self, task, tpu):
super().__init__(task)
self.tpu = tpu
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
masked_tokens = sample['target'].ne(self.padding_idx)
sample_size = masked_tokens.int().sum()
# Rare: when all tokens are masked, project all tokens.
# We use torch.where to avoid device-to-host transfers,
# except on CPU where torch.where is not well supported
# (see github.com/pytorch/pytorch/issues/26247).
if self.tpu:
masked_tokens = None # always project all tokens on TPU
elif masked_tokens.device == torch.device('cpu'):
if not masked_tokens.any():
masked_tokens = None
else:
masked_tokens = torch.where(
masked_tokens.any(),
masked_tokens,
masked_tokens.new([True]),
)
logits = model(**sample['net_input'], masked_tokens=masked_tokens)[0]
targets = model.get_targets(sample, [logits])
if masked_tokens is not None:
targets = targets[masked_tokens]
loss = modules.cross_entropy(
logits.view(-1, logits.size(-1)),
targets.view(-1),
reduction='sum',
ignore_index=self.padding_idx,
)
logging_output = {
'loss': loss if self.tpu else loss.data,
'ntokens': sample['ntokens'],
'nsentences': sample['nsentences'],
'sample_size': sample_size,
}
return loss, sample_size, logging_output
@staticmethod
def reduce_metrics(logging_outputs) -> None:
"""Aggregate logging outputs from data parallel training."""
loss_sum = sum(log.get('loss', 0) for log in logging_outputs)
sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)
metrics.log_scalar('loss', loss_sum / sample_size / math.log(2), sample_size, round=3)
metrics.log_derived('ppl', lambda meters: utils.get_perplexity(meters['loss'].avg))
@staticmethod
def logging_outputs_can_be_summed() -> bool:
"""
Whether the logging outputs returned by `forward` can be summed
across workers prior to calling `reduce_metrics`. Setting this
to True will improves distributed training speed.
"""
return True
|
import datetime
import unittest
from django.conf import settings
from django.core import management
from django.core.management.color import no_style
from django.db import backend, connection, DEFAULT_DB_ALIAS
from django.db.backends.signals import connection_created
from django.test import TestCase
from regressiontests.backends import models
class Callproc(unittest.TestCase):
def test_dbms_session(self):
# If the backend is Oracle, test that we can call a standard
# stored procedure through our cursor wrapper.
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] == 'django.db.backends.oracle':
convert_unicode = backend.convert_unicode
cursor = connection.cursor()
cursor.callproc(convert_unicode('DBMS_SESSION.SET_IDENTIFIER'),
[convert_unicode('_django_testing!'),])
return True
else:
return True
def test_cursor_var(self):
# If the backend is Oracle, test that we can pass cursor variables
# as query parameters.
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] == 'django.db.backends.oracle':
cursor = connection.cursor()
var = cursor.var(backend.Database.STRING)
cursor.execute("BEGIN %s := 'X'; END; ", [var])
self.assertEqual(var.getvalue(), 'X')
class LongString(unittest.TestCase):
def test_long_string(self):
# If the backend is Oracle, test that we can save a text longer
# than 4000 chars and read it properly
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] == 'django.db.backends.oracle':
c = connection.cursor()
c.execute('CREATE TABLE ltext ("TEXT" NCLOB)')
long_str = ''.join([unicode(x) for x in xrange(4000)])
c.execute('INSERT INTO ltext VALUES (%s)',[long_str])
c.execute('SELECT text FROM ltext')
row = c.fetchone()
self.assertEquals(long_str, row[0].read())
c.execute('DROP TABLE ltext')
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') - see
#12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
years = models.SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.datetime(2010, 1, 1, 0, 0)])
def test_django_extract(self):
"""
Test the custom ``django_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') - see #12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
classes = models.SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
cursor = connection.cursor()
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.table_name_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
self.assertRaises(Exception, cursor.executemany, query, [(1,2,3),])
self.assertRaises(Exception, cursor.executemany, query, [(1,),])
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.mysql':
class LongNameTest(TestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""Test an m2m save of a model with a long name and a long m2m field name doesn't error as on Django >=1.2 this now uses object saves. Ref #8901"""
obj = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = models.Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""Test that sequence resetting as part of a flush with model with long name and long pk name doesn't error. Ref #8901"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sequences = [
{
'column': VLM._meta.pk.column,
'table': VLM._meta.db_table
},
]
cursor = connection.cursor()
for statement in connection.ops.sql_flush(no_style(), tables, sequences):
cursor.execute(statement)
def connection_created_test(sender, **kwargs):
print 'connection_created signal'
__test__ = {'API_TESTS': """
>>> from django.db.backends.postgresql import version as pg_version
>>> pg_version._parse_version("PostgreSQL 8.3.1 on i386-apple-darwin9.2.2, compiled by GCC i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 (Apple Inc. build 5478)")
(8, 3, 1)
>>> pg_version._parse_version("PostgreSQL 8.3.6")
(8, 3, 6)
>>> pg_version._parse_version("PostgreSQL 8.3")
(8, 3, None)
>>> pg_version._parse_version("EnterpriseDB 8.3")
(8, 3, None)
>>> pg_version._parse_version("PostgreSQL 8.3 beta4")
(8, 3, None)
>>> pg_version._parse_version("PostgreSQL 8.4beta1")
(8, 4, None)
"""}
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.sqlite3':
__test__['API_TESTS'] += """
>>> connection_created.connect(connection_created_test)
>>> connection.close() # Ensure the connection is closed
>>> cursor = connection.cursor()
connection_created signal
>>> connection_created.disconnect(connection_created_test)
>>> cursor = connection.cursor()
"""
if __name__ == '__main__':
unittest.main()
|
import unittest
import numpy as np
import scipy.sparse as sp
from multimodal.lib.array_utils import normalize_features
from multimodal.evaluation import (evaluate_label_reco,
evaluate_NN_label,
chose_examples)
class TestLabelEvaluation(unittest.TestCase):
def test(self):
labels = [2, 0]
reco = np.array([[.1, .5, .6, .1],
[.6, .5, .2, .1]])
good = evaluate_label_reco(reco, labels)
self.assertEqual(good, 1.)
bad = evaluate_label_reco(reco[[1, 0], :], labels)
self.assertEqual(bad, 0.)
medium = evaluate_label_reco(reco[[1, 1], :], labels)
self.assertEqual(medium, .5)
def test_fails_on_multiple_labels(self):
labels = [[2], [0]]
reco = np.array([[.1, .5, .6, .1],
[.6, .5, .2, .1]])
with self.assertRaises(AssertionError):
evaluate_label_reco(reco, labels)
class TestNNEvaluation(unittest.TestCase):
def setUp(self):
self.labels_a = np.random.randint(10, size=13)
self.labels_b = [i for i in reversed(range(10))]
# Encode label on third coordinate of a and fourth of b
self.a = np.random.random((13, 5))
for i in range(13):
self.a[i, 2] = self.labels_a[i]
self.b = np.random.random((10, 5))
for i in range(10):
self.b[i, 3] = self.labels_b[i]
def fake_metrics(self, a, b, axis=-1):
assert(axis == -1) # Test does not work if not...
return 1. - (a[:, :, 2] == b[:, :, 3])
def test_good_on_fake_measure(self):
self.assertEqual(evaluate_NN_label(self.a, self.b, self.labels_a,
self.labels_b, self.fake_metrics
), 1.)
def test_bad_on_fake_measure(self):
self.assertEqual(evaluate_NN_label(self.a, 1 + self.b,
self.labels_a, self.labels_b,
self.fake_metrics), 0.)
def test_on_fake_measure_sparse(self):
a = sp.lil_matrix(self.a).tocsr()
b = sp.lil_matrix(self.b).tocsr()
self.assertEqual(
evaluate_NN_label(a, b, self.labels_a, self.labels_b,
self.fake_metrics),
1.)
self.assertEqual(
evaluate_NN_label(a, 1 + self.b, self.labels_a,
self.labels_b, self.fake_metrics),
0.)
class TestChoseExamples(unittest.TestCase):
def setUp(self):
self.label_set = list(range(3))
self.labels = self.label_set * 5
np.random.seed(0)
np.random.shuffle(self.labels)
def test_choses_as_many_examples_as_labels(self):
r = chose_examples(self.labels, self.label_set)
self.assertEqual(len(r), len(self.label_set))
r = chose_examples(self.labels) # And without giving labels
self.assertEqual(len(r), len(self.label_set))
def test_choses_twice_as_many_examples_as_labels(self):
r = chose_examples(self.labels, self.label_set, number=2)
self.assertEqual(len(r), 2 * len(self.label_set))
def test_all_chosen_are_indices(self):
r = chose_examples(self.labels, self.label_set, number=2)
assert(all([0 <= i < len(self.labels) for i in r]))
def test_all_labels_are_chosen_once(self):
r = chose_examples(self.labels, self.label_set)
lab = [self.labels[i] for i in r]
assert(all([lab.count(l) == 1 for l in self.label_set]))
def test_all_labels_are_chosen_twice(self):
r = chose_examples(self.labels, self.label_set, number=2)
lab = [self.labels[i] for i in r]
assert(all([lab.count(l) == 2 for l in self.label_set]))
class TestNormalizeFeatures(unittest.TestCase):
def setUp(self):
self.mat = np.random.random((32, 13))
self.mat = 10. * self.mat * (self.mat < .2)
def test_on_sparse_same_shape(self):
m = self.mat
m[0, :] += 1 # Ensures that no column has zero sum
m = sp.csc_matrix(m)
norm = normalize_features(m)
assert(np.allclose(norm.sum(axis=0), 1))
self.assertEqual(norm.shape, m.shape)
def test_removes_columns_sparse(self):
m = self.mat
m[0, :] += 1 # Ensures that no column has zero sum
m[:, [1, 3]] = 0 # Ensures column 1 and 3 have zero sum
m = sp.csc_matrix(m)
norm = normalize_features(m)
assert(np.allclose(norm.sum(axis=0), 1))
self.assertEqual(norm.shape, (m.shape[0], m.shape[1] - 2))
def test_on_dense_same_shape(self):
m = self.mat
m[0, :] += 1 # Ensures that no column has zero sum
norm = normalize_features(m)
assert(np.allclose(norm.sum(axis=0), 1))
self.assertEqual(norm.shape, m.shape)
def test_removes_columns_dense(self):
m = self.mat
m[0, :] += 1 # Ensures that no column has zero sum
m[:, [1, 3]] = 0 # Ensures column 1 and 3 have zero sum
norm = normalize_features(m)
assert(np.allclose(norm.sum(axis=0), 1))
self.assertEqual(norm.shape, (m.shape[0], m.shape[1] - 2))
def test_same_on_dense_and_sparse(self):
m1 = sp.csc_matrix(self.mat)
m2 = sp.csr_matrix(self.mat)
n = normalize_features(self.mat)
n1 = normalize_features(m1)
n2 = normalize_features(m2)
assert(np.allclose(n1.todense(), n))
assert(np.allclose(n2.todense(), n))
def test_does_not_modify(self):
m = self.mat.copy()
normalize_features(m)
ms = sp.csr_matrix(m)
normalize_features(ms)
assert(np.allclose(m, self.mat))
assert(np.allclose(ms.todense(), self.mat))
def test_OK(self):
n = normalize_features(np.array([[1., 0., 1.5, .1],
[1., 0., .5, .3]]))
ok = np.array([[.5, .75, .25],
[.5, .25, .75]])
assert(np.allclose(n, ok))
|
import roslib; roslib.load_manifest('hanse_navigation')
import rospy
import smach
import smach_ros
import math
import numpy
import collections
import actionlib
import tf
from tf.transformations import euler_from_quaternion
from hanse_navigation.msg import NavigateAction, NavigateFeedback, NavigateResult
from hanse_navigation.cfg import NavigationConfig
from dynamic_reconfigure.server import Server
from hanse_msgs.msg import sollSpeed
from std_msgs.msg import Float64, Float32
from geometry_msgs.msg import PoseStamped, Point, Twist, Vector3
from sensor_msgs.msg import Imu
from nav_msgs.msg import Path
class Config:
hysteresis_goal = 0
hysteresis_heading = 0
forward_max_speed = 0
forward_max_dist = 0
angular_min_speed = 0
angular_max_speed = 0
p_heading = 0
simulator = False
class Global:
abortFlag = False
currentHeading = 0.0
currentPosition = Point()
# goalzeug
path = collections.deque() # enthaelt PoseStamped
currentGoal = None
headingToGoal = 0
distanceToGoal = 0
class States:
Idle = 'Idle'
Paused = 'Paused'
FailedToGotoGoal = 'FailedToGotoGoal'
ReachedGoal = 'ReachedGoal'
AdjustDepth = 'AdjustDepth'
AdjustHeading = 'AdjustHeading'
MoveForward = 'MoveForward'
class Transitions:
HeadingAdjusted = 'HeadingAdjusted'
DepthAdjusted = 'DepthAdjusted'
CloseEnoughToGoal = 'CloseEnoughToGoal'
HasGoal = 'HasGoal'
HeadingAdjustmentNeeded = 'HeadingAdjustmentNeeded'
Idle = 'Idle'
Aborted = 'Aborted'
Exit = 'Exit'
class Idle(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=[Transitions.HasGoal, Transitions.Exit])
def execute(self, userdata):
rospy.loginfo('Executing state Idle')
# beim idlen soll das auv sich nicht bewegen
setMotorSpeed(0,0)
# warten bis ein pfad vorgegeben wird
while not rospy.is_shutdown():
Global.abortFlag = False
if len(Global.path) > 0:
Global.currentGoal = Global.path.popleft()
return Transitions.HasGoal
rospy.sleep(0.1)
return Transitions.Exit
class AdjustDepth(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=[Transitions.DepthAdjusted, Transitions.Aborted])
def execute(self, userdata):
rospy.loginfo('Executing state AdjustDepth')
while not rospy.is_shutdown():
# pruefen ob aktuelle navigation abgebrochen werden soll
if Global.abortFlag:
return Transitions.Aborted
rospy.sleep(1.0)
if True:
return Transitions.DepthAdjusted
class AdjustHeading(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=[Transitions.HeadingAdjusted, Transitions.Aborted, Transitions.Exit])
def execute(self, userdata):
rospy.loginfo('Executing state AdjustHeading')
while not rospy.is_shutdown():
# pruefen ob aktuelle navigation abgebrochen werden soll
if Global.abortFlag:
return Transitions.Aborted
#diffHeading = Global.headingToGoal - Global.currentHeading
diffHeading = normalize_angle(Global.headingToGoal - Global.currentHeading)
rospy.loginfo('diffHeading = ' + repr(diffHeading))
# pruefen ob heading in akzeptablem bereich ist
if math.fabs(diffHeading) < Config.hysteresis_heading:
return Transitions.HeadingAdjusted
# angular speed berechnen (positive: rotate left (counter clockwise))
maxAngSpeed = Config.angular_max_speed
minAngSpeed = Config.angular_min_speed
val = Config.p_heading * diffHeading
if val > 0: val = numpy.clip(val, minAngSpeed, maxAngSpeed)
if val < 0: val = numpy.clip(val, -maxAngSpeed, -minAngSpeed)
if Config.simulator:
setMotorSpeed(0, -val)
else:
setMotorSpeed(0, val)
rospy.sleep(0.1)
return Transitions.Exit
class MoveForward(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=[Transitions.CloseEnoughToGoal, Transitions.HeadingAdjustmentNeeded, Transitions.Aborted, Transitions.Exit])
def execute(self, userdata):
rospy.loginfo('Executing state MoveForward')
while not rospy.is_shutdown():
# pruefen ob aktuelle navigation abgebrochen werden soll
if Global.abortFlag:
return Transitions.Aborted
# pruefen ob auv nah genug am ziel ist
if closeEnoughToGoal():
return Transitions.CloseEnoughToGoal
# pruefen, ob heading korrigiert werden muss
if math.fabs(normalize_angle(Global.headingToGoal - Global.currentHeading)) > Config.hysteresis_heading:
return Transitions.HeadingAdjustmentNeeded
forwardspeed = Config.forward_max_speed
# Move slower if we are close to the goal.
if Global.distanceToGoal < Config.forward_max_dist:
forwardspeed -= 0.5 * Config.forward_max_speed * (1 - Global.distanceToGoal / Config.forward_max_dist)
setMotorSpeed(forwardspeed, 0)
rospy.sleep(0.1)
return Transitions.Exit
class ReachedGoal(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=[Transitions.Idle])
def execute(self, userdata):
rospy.loginfo('Executing state ReachedGoal')
setMotorSpeed(0,0)
Global.currentGoal = None
Global.actionServer.set_succeeded() # action als erfolgreich abgeschlossen markieren
return Transitions.Idle
def closeEnoughToGoal():
if Global.currentGoal == None:
return False
distanceToGoal = math.sqrt( math.pow(Global.currentPosition.x - Global.currentGoal.pose.position.x, 2) +
math.pow(Global.currentPosition.y - Global.currentGoal.pose.position.y, 2) )
rospy.loginfo('distance to goal: ' + repr(distanceToGoal))
return distanceToGoal < Config.hysteresis_goal
#rospy.loginfo('imuCallback: ' + repr(Global.currentHeading))
def positionCallback(msg):
Global.currentPosition = msg.pose.position
# wenn zur zeit zu einem ziel navigiert wird, relevante werte aktualisieren
if Global.currentGoal!=None:
dx = Global.currentGoal.pose.position.x - Global.currentPosition.x
dy = Global.currentGoal.pose.position.y - Global.currentPosition.y
Global.headingToGoal = normalize_angle(math.atan2(dy, dx))
Global.distanceToGoal = math.sqrt(dx*dx + dy*dy)
rospy.loginfo('headingToGoal='+repr(Global.headingToGoal)+' ### currentHeading='+repr(Global.currentHeading))
q = msg.pose.orientation
(roll,pitch,yaw) = euler_from_quaternion([q.x, q.y, q.z, q.w])
Global.currentHeading = yaw
def goalCallback(msg):
Global.actionServer.set_aborted()
Global.abortFlag = True
Global.path = collections.deque()
Global.path.append(msg)
def timerCallback(event):
p = Path()
p.header.frame_id = '/map'
# aktuelle position hinzufuegen
currentPose = PoseStamped()
currentPose.header.frame_id = '/map'
currentPose.pose.position = Global.currentPosition
p.poses.append(currentPose)
# wegpunkte hinzufuegen
if Global.currentGoal != None:
p.poses.append(Global.currentGoal)
for goal in Global.path:
p.poses.append(goal)
pub_path.publish(p)
def configCallback(config, level):
rospy.loginfo('Reconfiugre Request: ' + repr(config['forward_max_speed']))
Config.hysteresis_heading = config['hysteresis_heading']
Config.hysteresis_goal = config['hysteresis_goal']
Config.forward_max_speed = config['forward_max_speed']
Config.forward_max_dist = config['forward_max_dist']
Config.angular_min_speed = config['angular_min_speed']
Config.angular_max_speed = config['angular_max_speed']
Config.p_heading = config['p_heading']
Config.simulator = config['simulator']
return config
def setMotorSpeed(lin, ang):
rospy.loginfo("angularoutput: " + repr(ang))
twist = Twist(linear=Vector3(x=lin,z=0), angular=Vector3(z=ang))
pub_cmd_vel.publish(twist)
#left = lin*127 + ang*127
#right = lin*127 - ang*127
# auf den wertebereich -127 bis 127 beschraenken
#left = numpy.clip(left, -127, 127)
#right = numpy.clip(right, -127, 127)
# nachrichten an motoren publishen
#pub_motor_left.publish(sollSpeed(data = left))
#pub_motor_right.publish(sollSpeed(data = right))
def normalize_angle_positive(angle):
return math.fmod(math.fmod(angle, 2.0*math.pi) + 2.0*math.pi, 2.0*math.pi)
def normalize_angle(angle):
a = normalize_angle_positive(angle)
if a > math.pi:
a -= 2.0 *math.pi
return a
class NavigateActionServer(object):
# create messages that are used to publish feedback/result
_feedback = NavigateFeedback()
_result = NavigateResult()
def __init__(self, name):
self._action_name = name
self._as = actionlib.SimpleActionServer(self._action_name, NavigateAction, execute_cb=self.execute_cb)
self._as.start()
def set_succeeded(self):
rospy.loginfo('NavigateActionServer.set_succeeded')
self._result.successful = True
self._as.set_succeeded(self._result)
def set_aborted(self):
rospy.loginfo('NavigateActionServer.set_aborted')
Global.abortFlag = True
self._as.set_aborted()
Global.path = collections.deque()
Global.currentGoal = None
def execute_cb(self, goal):
rospy.loginfo('NavigateActionServer.execute_cb' + repr(goal))
Global.path = collections.deque()
Global.path.append(goal.goal)
# schleife wird erst verlassen, wenn der actionserver nicht mehr aktiv ist oder das aktuelle goal gewechselt hat
rospy.loginfo('is_active='+repr(self._as.is_active()) + ' ### ' + repr(self._as.current_goal.get_goal()==goal))
while self._as.is_active() and self._as.current_goal.get_goal()==goal:
rospy.loginfo('is_active='+repr(self._as.is_active()) + ' ### ' + repr(self._as.current_goal.get_goal()==goal))
# bei preempt das aktuelle goal abbrechen
if self._as.is_preempt_requested():
self.set_aborted()
return
# publish aktuelle position als feedback
pose = PoseStamped()
pose.pose.position = Global.currentPosition
self._feedback.current_position = pose
self._as.publish_feedback(self._feedback)
rospy.sleep(0.2)
return ''
if __name__ == '__main__':
rospy.init_node('navigation')
# actionserver starten
Global.actionServer = NavigateActionServer(rospy.get_name())
# Config server
configSrv = Server(NavigationConfig, configCallback)
# Subscriber/Publisher
if Config.simulator:
rospy.Subscriber('posemeter', PoseStamped, positionCallback)
else:
rospy.Subscriber('position/estimate', PoseStamped, positionCallback)
rospy.Subscriber('/goal', PoseStamped, goalCallback)
pub_cmd_vel = rospy.Publisher('commands/cmd_vel_behaviour', Twist)
#pub_motor_left = rospy.Publisher('/hanse/motors/left', sollSpeed)
#pub_motor_right = rospy.Publisher('/hanse/motors/right', sollSpeed)
pub_path = rospy.Publisher('/path', Path)
# timer zum publishen des aktuellen path
rospy.Timer(rospy.Duration(1.0), timerCallback)
# Create a SMACH state machine
sm = smach.StateMachine(outcomes=[Transitions.Exit])
# Open the container
with sm:
# Add states to the container
smach.StateMachine.add(States.Idle, Idle(),
transitions={Transitions.HasGoal : 'GotoGoal'})
smach.StateMachine.add(States.ReachedGoal, ReachedGoal(),
transitions={Transitions.Idle : States.Idle})
# Create the sub SMACH state machine
sm_sub = smach.StateMachine(outcomes=[Transitions.CloseEnoughToGoal, Transitions.Aborted, Transitions.Exit])
# Open the container
with sm_sub:
# Add states to the container
smach.StateMachine.add(States.AdjustDepth, AdjustDepth(),
transitions={Transitions.DepthAdjusted : States.AdjustHeading})
smach.StateMachine.add(States.AdjustHeading, AdjustHeading(),
transitions={Transitions.HeadingAdjusted : States.MoveForward})
smach.StateMachine.add(States.MoveForward, MoveForward(),
transitions={Transitions.HeadingAdjustmentNeeded : States.AdjustHeading})
smach.StateMachine.add('GotoGoal', sm_sub,
transitions={Transitions.CloseEnoughToGoal : States.ReachedGoal,
Transitions.Aborted : States.Idle})
# Create and start the introspection server
sis = smach_ros.IntrospectionServer('server_name', sm, '/SM_ROOT')
sis.start()
# Execute SMACH plan
outcome = sm.execute()
rospy.loginfo('state machine stopped')
#rospy.spin()
sis.stop()
|
import random
from CellModeller.Regulation.ModuleRegulator import ModuleRegulator
from CellModeller.Biophysics.BacterialModels.CLBacterium import CLBacterium
from CellModeller.GUI import Renderers
import numpy
import math
max_cells = 400000
cell_colors = numpy.random.uniform(0,1,(9,3))
def setup(sim):
# Set biophysics, signalling, and regulation models
biophys = CLBacterium(sim, max_substeps=8, max_cells=max_cells, max_contacts=32, max_sqs=192**2, jitter_z=False, reg_param=0.04, gamma=40)
#biophys.addPlane((0,0,-0.5), (0,0,1), 1.0)
#biophys.addPlane((0,0,0.5), (0,0,-1), math.sqrt(7.5e-4))
regul = ModuleRegulator(sim, __file__) # use this file for reg too
# Only biophys and regulation
sim.init(biophys, regul, None, None)
sim.addCell(cellType=0, pos=(0,0,0))
#sim.addCell(cellType=0, pos=(0,-10.0,0))
#sim.addCell(cellType=1, pos=(0,10.0,0))
#sim.addCell(cellType=0, pos=(16,16,0))
#sim.addCell(cellType=1, pos=(0,16,0))
#sim.addCell(cellType=2, pos=(-16,16,0))
#sim.addCell(cellType=3, pos=(16,0,0))
#sim.addCell(cellType=4, pos=(0,0,0))
#sim.addCell(cellType=5, pos=(-16,0,0))
#sim.addCell(cellType=6, pos=(16,-16,0))
#sim.addCell(cellType=7, pos=(0,-16,0))
#sim.addCell(cellType=8, pos=(-16,-16,0))
# Add some objects to draw the models
therenderer = Renderers.GLBacteriumRenderer(sim)
sim.addRenderer(therenderer)
sim.savePickle = True
sim.pickleSteps = 20
def init(cell):
cell.targetVol = 3.5 + random.uniform(0.0,0.5)
cell.growthRate = 1.0
def numSignals():
return 0
def numSpecies():
return 0
def update(cells):
for (id, cell) in cells.iteritems():
cell.color = cell_colors[cell.cellType]
if cell.volume > cell.targetVol:
cell.asymm = [1,1]
cell.divideFlag = True
def divide(parent, d1, d2):
d1.targetVol = 3.5 + random.uniform(0.0,0.5)
d2.targetVol = 3.5 + random.uniform(0.0,0.5)
|
""" Make base line result averages from the output of the get_results command
"""
import sys
import re
print "Making baseline averages with data from:"
for line in sys.stdin:
searchObj = re.search(r'jid\(', line, re.M | re.I)
if searchObj:
# Do some substitution work here, see comments above
#info = re.sub(r'\S*jid.*-\d+\s', "", line)
info = re.sub(r'\S*jid.*:[\d-]+\s', "", line)
print info,
if __name__ == '__main__':
sys.exit()
|
from __future__ import division, print_function
from collections import defaultdict
from vod.entropy import kullback_leiber_divergence
import numpy as np
import plac
import sys
def load_text_file(features_fpath, classes, use):
#TODO: stemming and category names abbrv
num_classes = len(set(classes))
count_class = [0] * num_classes
prob_col = defaultdict(float)
count_class_col = defaultdict(lambda: defaultdict(float))
with open(features_fpath) as features_file:
for curr_line, line in enumerate(features_file):
spl = line.split()
class_num = classes[curr_line]
if use == 'user':
count_class_col[spl[1]][class_num] += 1
prob_col[spl[1]] += 1
elif use == 'cat':
if len(spl) > 2:
count_class_col[spl[2]][class_num] += 1
prob_col[spl[2]] += 1
else:
for token in spl[3:]:
prob_col[token] += 1
count_class_col[token][class_num] += 1
count_class[int(class_num)] += 1
prob_class = np.array(count_class, dtype='f')
prob_class /= prob_class.sum()
prob_class_col = {}
sum_col = sum(prob_col.values())
for token in count_class_col:
prob_col[token] = prob_col[token] / sum_col
aux = np.zeros(num_classes, dtype='f')
for class_num in xrange(num_classes):
aux[class_num] = count_class_col[token][class_num]
aux /= aux.sum()
prob_class_col[token] = aux
return prob_class, prob_col, prob_class_col
def load_svm_file(features_fpath, classes):
col_dict = {
'EXTERNAL':8,
'FEATURED':9,
'INTERNAL':10,
'MOBILE':11,
'SEARCH':12,
'SOCIAL':13,
'VIRAL':14
}
num_classes = len(set(classes))
count_class = [0] * num_classes
prob_col = defaultdict(float)
count_class_col = defaultdict(lambda: defaultdict(float))
with open(features_fpath) as features_file:
curr_line = 0
for line in features_file:
if '#' in line:
continue
spl = line.split()
for ref_name, col_id in col_dict.items():
ref_abbrv = ref_name
class_num = classes[curr_line]
weight = float(spl[col_id])
prob_col[ref_abbrv] += weight
count_class[int(class_num)] += 1
count_class_col[ref_abbrv][class_num] += weight
curr_line += 1
prob_class = np.array(count_class, dtype='f')
prob_class /= prob_class.sum()
prob_class_col = {}
sum_col = sum(prob_col.values())
for token in count_class_col:
prob_col[token] = prob_col[token] / sum_col
aux = np.zeros(num_classes, dtype='f')
for class_num in xrange(num_classes):
aux[class_num] = count_class_col[token][class_num]
aux /= aux.sum()
prob_class_col[token] = aux
return prob_class, prob_col, prob_class_col
@plac.annotations(features_fpath=plac.Annotation('Input file', type=str),
classes_fpath=plac.Annotation('Video classes file', type=str),
use=plac.Annotation('Indicates which information to use',
type=str,
choices=['user', 'tags', 'cat', 'ref']))
def main(features_fpath, classes_fpath, use):
classes = np.loadtxt(classes_fpath)
if use in {'user', 'tags', 'cat'}:
prob_class, prob_col, prob_class_col = load_text_file(features_fpath,
classes, use)
else:
prob_class, prob_col, prob_class_col = load_svm_file(features_fpath,
classes)
info_gains = []
mutual_info = 0
for token in prob_class_col:
dkl = kullback_leiber_divergence(prob_class_col[token], prob_class)
mutual_info += prob_col[token] * dkl
info_gains.append((dkl, token))
print('Mutual info: ', mutual_info)
for dkl, token in sorted(info_gains, reverse=True):
print(dkl, token)
if __name__ == '__main__':
sys.exit(plac.call(main))
|
import unittest
from paws.conf import Config, env
class ConfTest(unittest.TestCase):
def test_attr(self):
class Config(Conf):
FOO : int = 0
self.assertEqual(Config.FOO, 0)
|
from PLC.Faults import *
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.Table import Row
from PLC.Auth import Auth
from PLC.Namespace import hostname_to_hrn
from PLC.Peers import Peers
from PLC.Sites import Sites
from PLC.Nodes import Node, Nodes
from PLC.TagTypes import TagTypes
from PLC.NodeTags import NodeTags, NodeTag
admin_only = [ 'key', 'session', 'boot_nonce', 'site_id']
can_update = ['hostname', 'node_type', 'boot_state', 'model', 'version'] + admin_only
class UpdateNode(Method):
"""
Updates a node. Only the fields specified in node_fields are
updated, all other fields are left untouched.
PIs and techs can update only the nodes at their sites. Only
admins can update the key, session, and boot_nonce fields.
Returns 1 if successful, faults otherwise.
"""
roles = ['admin', 'pi', 'tech']
accepted_fields = Row.accepted_fields(can_update,Node.fields)
# xxx check the related_fields feature
accepted_fields.update(Node.related_fields)
accepted_fields.update(Node.tags)
accepts = [
Auth(),
Mixed(Node.fields['node_id'],
Node.fields['hostname']),
accepted_fields
]
returns = Parameter(int, '1 if successful')
def call(self, auth, node_id_or_hostname, node_fields):
# split provided fields
[native,related,tags,rejected] = Row.split_fields(node_fields,[Node.fields,Node.related_fields,Node.tags])
# type checking
native = Row.check_fields (native, self.accepted_fields)
if rejected:
raise PLCInvalidArgument("Cannot update Node column(s) %r"%rejected)
# Authenticated function
assert self.caller is not None
# Remove admin only fields
if 'admin' not in self.caller['roles']:
for key in admin_only:
if key in native:
del native[key]
# Get account information
nodes = Nodes(self.api, [node_id_or_hostname])
if not nodes:
raise PLCInvalidArgument("No such node %r"%node_id_or_hostname)
node = nodes[0]
if node['peer_id'] is not None:
raise PLCInvalidArgument("Not a local node %r"%node_id_or_hostname)
# If we are not an admin, make sure that the caller is a
# member of the site at which the node is located.
if 'admin' not in self.caller['roles']:
if node['site_id'] not in self.caller['site_ids']:
raise PLCPermissionDenied("Not allowed to delete nodes from specified site")
# Make requested associations
for (k,v) in related.items():
node.associate(auth, k,v)
node.update(native)
node.update_last_updated(commit=False)
node.sync(commit=True)
# if hostname was modifed make sure to update the hrn
# tag
if 'hostname' in native:
root_auth = self.api.config.PLC_HRN_ROOT
# sub auth is the login base of this node's site
sites = Sites(self.api, node['site_id'], ['login_base'])
site = sites[0]
login_base = site['login_base']
tags['hrn'] = hostname_to_hrn(root_auth, login_base, node['hostname'])
for (tagname,value) in tags.items():
# the tagtype instance is assumed to exist, just check that
tag_types = TagTypes(self.api,{'tagname':tagname})
if not tag_types:
raise PLCInvalidArgument("No such TagType %s"%tagname)
tag_type = tag_types[0]
node_tags=NodeTags(self.api,{'tagname':tagname,'node_id':node['node_id']})
if not node_tags:
node_tag = NodeTag(self.api)
node_tag['node_id'] = node['node_id']
node_tag['tag_type_id'] = tag_type['tag_type_id']
node_tag['tagname'] = tagname
node_tag['value'] = value
node_tag.sync()
else:
node_tag = node_tags[0]
node_tag['value'] = value
node_tag.sync()
# Logging variables
self.event_objects = {'Node': [node['node_id']]}
if 'hostname' in node:
self.message = 'Node %s updated'%node['hostname']
else:
self.message = 'Node %d updated'%node['node_id']
self.message += " [%s]." % (", ".join(list(node_fields.keys())),)
if 'boot_state' in list(node_fields.keys()):
self.message += ' boot_state updated to %s' % node_fields['boot_state']
return 1
|
from django import template
from django.utils.encoding import iri_to_uri
register = template.Library()
class PrefixNode(template.Node):
def __repr__(self):
return "<PrefixNode for %r>" % self.name
def __init__(self, varname=None, name=None):
if name is None:
raise template.TemplateSyntaxError(
"Prefix nodes must be given a name to return.")
self.varname = varname
self.name = name
@classmethod
def handle_token(cls, parser, token, name):
"""
Class method to parse prefix node and return a Node.
"""
tokens = token.contents.split()
if len(tokens) > 1 and tokens[1] != 'as':
raise template.TemplateSyntaxError(
"First argument in '%s' must be 'as'" % tokens[0])
if len(tokens) > 1:
varname = tokens[2]
else:
varname = None
return cls(varname, name)
@classmethod
def handle_simple(cls, name):
try:
from staticfiles.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
def render(self, context):
prefix = self.handle_simple(self.name)
if self.varname is None:
return prefix
context[self.varname] = prefix
return ''
@register.tag
def get_static_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.STATIC_URL``.
Usage::
{% get_static_prefix [as varname] %}
Examples::
{% get_static_prefix %}
{% get_static_prefix as static_prefix %}
"""
return PrefixNode.handle_token(parser, token, "STATIC_URL")
@register.tag
def get_media_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.MEDIA_URL``.
Usage::
{% get_media_prefix [as varname] %}
Examples::
{% get_media_prefix %}
{% get_media_prefix as media_prefix %}
"""
return PrefixNode.handle_token(parser, token, "MEDIA_URL")
|
def pal(value):
return str(value) == str(value)[::-1]
p = 1
for i in xrange(999, 99, -1):
for j in xrange(999, 99, -1):
n = i * j
if n > p and pal(n):
p = n
print p
|
from __future__ import print_function
from BinPy import *
class FlipFlop:
"""
Super Class for all FlipFlops
"""
def __init__(self, enable, clk, a, b):
self.a = a
self.b = b
self.clk = clk
self.clkoldval = 1
self.enable = enable
def Enable(self):
self.enable.state = 1
def Disable(self):
self.enable.state = 0
def setff(self):
# Sets the FlipFlop
self.a.state = 1
self.b.state = 0
return [self.a(), self.b()]
def resetff(self):
# Resets the FlipFlop
self.a.state = 0
self.b.state = 1
return [self.a(), self.b()]
class SRLatch(FlipFlop):
"""
S and R are the two primary inputs.
They are enabled by the third input enable.
Clock is used to trigger the Latch.
Outputs are a ( q ) and b ( ~q )
To Use :
Set the inputs of SRLatch and to trigger any change in input use\
trigger() method.
"""
def __init__(
self,
S,
R,
enable,
clk,
preset=Connector(1),
clear=Connector(1),
a=Connector(0),
b=Connector(1)):
FlipFlop.__init__(self, enable, clk, a, b)
# Initiated to support numerical inputs --> See trigger method's doc
self.S = Connector(0)
self.R = Connector(1)
self.preset = Connector(1)
self.clear = Connector(1)
# Initiated to initiate the gates
self.enabledS = Connector(0)
self.enabledR = Connector(1)
# Initiating the gates with inputs - Will be overwritten when the
# self.setInputs() is called 4 lines hence.
# This is just to initiate the gates.
self.en1 = AND(S, enable)
self.en2 = AND(R, enable)
self.g1 = NOR(self.enabledS, a)
self.g2 = NOR(self.enabledR, b)
self.setInputs(S=S, R=R, enable=enable, preset=preset, clear=clear)
self.setOutputs(A=a, B=b)
def setInputs(self, **inputs):
"""
Sets the input connectors of SRLatch.
Give input parameters as a dictionary
Ex.: sr1.setInputs(S = S, R = R)
Ex.2: sr2.setInputs(enable = en1)
[ where S, R, foo are all Connector class instances. ]
This is done to support partial change in input [ only S or R etc ]
Note:
1) When inputs are given as type-int - The S and R states alone are
changed. The connections remain intact.
2) Setting the inputs does not trigger the Latch.
Use trigger separately to trigger any change.
"""
# To support both upper and lower case
for key in inputs:
if key.lower() == 's':
# To support both numerical values or Connector instances
if isinstance(inputs[key], Connector):
self.S = inputs[key]
else:
self.S.state = int(inputs[key])
elif key.lower() == 'r':
if isinstance(inputs[key], Connector):
self.R = inputs[key]
else:
self.R.state = int(inputs[key])
elif key.lower() == 'enable':
if isinstance(inputs[key], Connector):
self.enable = inputs[key]
else:
self.enable.state = int(inputs[key])
elif key.lower() == 'clk':
if isinstance(inputs[key], Connector):
self.clk = inputs[key]
else:
self.clk.state = int(inputs[key])
elif key.lower() == "preset":
if isinstance(inputs[key], Connector):
self.preset = inputs[key]
else:
self.preset.state = int(inputs[key])
elif key.lower() == "clear":
if isinstance(inputs[key], Connector):
self.clear = inputs[key]
else:
self.clear.state = int(inputs[key])
else:
print("ERROR: Unknow parameter passed" + str(key))
if not (bool(self.S) ^ bool(self.R)):
print("ERROR: Invalid State - Resetting the Latch")
self.S.state = 0
self.R.state = 1
if not (self.preset or self.clear):
print("ERROR: Invalid State - Resetting the Latch")
self.preset.state = 1
self.clear.state = 1
self.en1.setInput(0, self.S)
self.en1.setInput(1, self.enable)
self.en1.setOutput(self.enabledS)
self.en2.setInput(0, self.R)
self.en2.setInput(1, self.enable)
self.en2.setOutput(self.enabledR)
self.g1.setInput(0, self.enabledS)
self.g1.setInput(1, self.a)
self.g2.setInput(0, self.enabledR)
self.g2.setInput(1, self.b)
def setOutputs(self, **outputs):
for key in outputs:
if not isinstance(outputs[key], Connector):
raise Exception("ERROR: Output not a connector instance")
if key.lower() == 'a':
self.a = outputs[key]
elif key.lower() == 'b':
self.b = outputs[key]
else:
print("ERROR: Unknow parameter passed" + str(key))
self.g1.setOutput(self.b)
self.g1.setInput(1, self.a)
self.g2.setOutput(self.a)
self.g2.setInput(1, self.b)
def trigger(self):
if self.clear.state == 1 and self.preset.state == 0:
return self.setff()
elif self.preset.state == 1 and self.clear.state == 0:
return self.resetff()
elif not(self.clear.state or self.preset.state):
print("Error: Invalid State - Resetting the Latch")
self.clear.state = 1
self.preset.state = 1
else:
if self.clkoldval == 1 and self.clk.state == 0:
if bool(self.S) and bool(self.R):
print("ERROR: Invalid State - Resetting the Latch")
self.S.state = 0
self.R.state = 1
self.enable.trigger()
# This will trigger the gates which will trigger the a and b
self.clkoldval = self.clk.state
# stores the current clock state
return [self.a(), self.b()]
def __call__(self):
return self.trigger()
def state(self):
"""Returns the current state of the SRLatch"""
return [self.a(), self.b()]
class DFlipFlop(FlipFlop):
"""
DATA Flip Flop ( Negative edge triggered )
D is the primary input.
enable activates the Flip Flop.
( Negative edge triggered )
Clock triggers the output
Outputs are a ( q ) and b ( ~q )
"""
def __init__(
self,
D,
enable,
clk,
preset=Connector(1),
clear=Connector(1),
a=Connector(0),
b=Connector(0)):
FlipFlop.__init__(self, enable, clk, a, b)
# Initiated to support numerical inputs --> See trigger method's doc
self.D = Connector(0)
self.g1 = AND(self.D, self.enable)
self.g2 = NOT(self.a)
self.preset = Connector(1)
self.clear = Connector(1)
self.setInputs(D=D, enable=enable, preset=preset, clear=clear)
self.setOutputs(A=a, B=b)
def setInputs(self, **inputs):
"""
Sets the input connectors of DFlipFlop.
Give input parameters as a dictionary
Ex.: dff.setInputs(D = dconnector, enable = enable_connector)
Ex.2: dff.setInputs(enable = foo)
Usage of **inputs is to pass parameters as dict to to support \
partial change in input [ D or enable alone ]
Note:
1) When inputs are given as type-int - The D state alone is
changed. The connections remain intact.
2) Setting the inputs does not trigger the Latch.
Use trigger separately to trigger any change.
"""
# To support both upper and lower case
for key in inputs:
if key.lower() == "d":
# To support both numerical/boolean values or Connector
# instances
if isinstance(inputs[key], Connector):
self.D = inputs[key]
else:
self.D.state = int(inputs[key])
elif key.lower() == "enable":
if isinstance(inputs[key], Connector):
self.enable = inputs[key]
else:
self.enable.state = int(inputs[key])
elif key.lower() == "clk":
if isinstance(inputs[key], Connector):
self.clk = inputs[key]
else:
self.clk.state = int(inputs[key])
elif key.lower() == "preset":
if isinstance(inputs[key], Connector):
self.preset = inputs[key]
else:
self.preset.state = int(inputs[key])
elif key.lower() == "clear":
if isinstance(inputs[key], Connector):
self.clear = inputs[key]
else:
self.clear.state = int(inputs[key])
else:
print("ERROR: Unknow parameter passed" + str(key))
if not(self.preset.state or self.clear.state):
print("ERROR : Invalid State - Resetting the Latch")
self.preset.state = 1
self.clear.state = 1
self.g1.setInput(0, self.D)
self.g1.setInput(1, self.enable)
self.g1.setOutput(self.a)
self.g2.setInput(self.a)
self.g2.setOutput(self.b)
def setOutputs(self, **outputs):
for key in outputs:
if not isinstance(outputs[key], Connector):
raise Exception("ERROR: Output not a connector instance")
if key.lower() == "a":
self.a = outputs[key]
elif key.lower() == "b":
self.b = outputs[key]
else:
print("ERROR: Unknow parameter passed" + str(key))
self.g1.setOutput(self.a)
self.g2.setInput(self.a)
self.g2.setOutput(self.b)
def trigger(self):
if self.clear.state == 1 and self.preset.state == 0:
return self.setff()
elif self.preset.state == 1 and self.clear.state == 0:
return self.resetff()
elif not(self.clear.state or self.preset.state):
print("Error: Invalid State - Resetting the Latch")
self.clear.state = 1
self.preset.state = 1
else:
if self.clkoldval == 1 and self.clk.state == 0:
self.D.trigger()
self.clkoldval = self.clk.state
return [self.a(), self.b()]
def __call__(self, **inputs):
"""Call to the FlipFlop instance will invoke the trigger method"""
return self.trigger(**inputs)
def state(self):
"""Returns the current state of the DFlipflop"""
return [self.a(), self.b()]
class JKFlipFlop(FlipFlop):
"""
J K Flip Flop - Negative edge triggered
J and K are the two primary inputs.
They are enabled by the third input enable.
Clock triggers the Flip flop.
Outputs are a ( q ) and b ( ~q )
To Use :
Set the inputs of JKFlipFlop and to trigger any change in input \
use trigger() method.
call to the JKFlipFlop instance also triggers it and returns the \
current state as a list
"""
def __init__(
self,
J,
K,
enable,
clk,
preset=Connector(1),
clear=Connector(1),
a=Connector(0),
b=Connector(1)):
FlipFlop.__init__(self, enable, clk, a, b)
self.J = Connector(0)
self.K = Connector(0)
self.preset = Connector(1)
self.clear = Connector(1)
self.setInputs(J=J, K=K, enable=enable, preset=preset, clear=clear)
self.setOutputs(A=a, B=b)
self.J.tap(self, "input")
self.K.tap(self, "input")
self.enable.tap(self, "input")
self.clk.tap(self, "input")
self.a.tap(self, "output")
self.b.tap(self, "output")
def setInputs(self, **inputs):
"""
Sets the input connectors of Jk Flip flop.
Give input parameters as a dictionary
Ex.: jk1.setInputs(J = J, K = K)
Ex.2: jk2.setInputs(enable = foo)
Where J, K, foo are all Connector class instances.
This is done to support partial change in input [ only J or K etc ]
Note:
1) When inputs are given as type-int - The J and K states alone are
changed. The connections remain intact.
2) Setting the inputs does not trigger the Latch.
Use trigger separately to trigger any change.
"""
for key in inputs:
# To support both upper and lower case
if key.lower() == "j":
# To support both numerical/boolean values or Connector
# instances
if isinstance(inputs[key], Connector):
self.J = inputs[key]
else:
self.J.state = int(inputs[key])
elif key.lower() == "k":
if isinstance(inputs[key], Connector):
self.K = inputs[key]
else:
self.K.state = int(inputs[key])
elif key.lower() == "enable":
if isinstance(inputs[key], Connector):
self.enable = inputs[key]
else:
self.enable.state = int(inputs[key])
elif key.lower() == "clk":
if isinstance(inputs[key], Connector):
self.clk = inputs[key]
else:
self.clk.state = int(inputs[key])
elif key.lower() == "preset":
if isinstance(inputs[key], Connector):
self.preset = inputs[key]
else:
self.preset.state = int(inputs[key])
elif key.lower() == "clear":
if isinstance(inputs[key], Connector):
self.clear = inputs[key]
else:
self.clear.state = int(inputs[key])
else:
print("ERROR: Unknow parameter passed" + str(key))
if not(self.preset.state or self.clear.state):
print("ERROR : Invalid State - Resetting the Latch")
self.preset.state = 1
self.clear.state = 1
self.J.tap(self, "input")
self.K.tap(self, "input")
self.enable.tap(self, "input")
self.clk.tap(self, "input")
def setOutputs(self, **outputs):
for key in outputs:
if not isinstance(outputs[key], Connector):
raise Exception("ERROR: Output not a connector instance")
if key.lower() == "a":
self.a = outputs[key]
elif key.lower() == "b":
self.b = outputs[key]
else:
print("ERROR: Unknow parameter passed" + str(key))
self.a.tap(self, "output")
self.b.tap(self, "output")
def trigger(self):
"""
Trigger will update the output when any of the inputs change.
"""
if self.clear.state == 1 and self.preset.state == 0:
return self.setff()
elif self.preset.state == 1 and self.clear.state == 0:
return self.resetff()
elif not(self.clear.state or self.preset.state):
print("Error: Invalid State - Resetting the Latch")
self.clear.state = 1
self.preset.state = 1
else:
# Using behavioural Modelling
if self.clkoldval == 1 and self.clk.state == 0:
if bool(self.enable):
if bool(self.J) and bool(self.K):
self.a.state = 0 if bool(self.a) else 1
elif not bool(self.J) and bool(self.K):
self.a.state = 0
elif bool(self.J) and not bool(self.K):
self.a.state = 1
self.b.state = 0 if self.a.state else 1
self.a.trigger()
self.b.trigger()
self.clkoldval = self.clk.state
return [self.a(), self.b()]
def __call__(self):
return self.trigger()
def state(self):
return [self.a(), self.b()]
class TFlipFlop(JKFlipFlop):
"""
Toggle Flip Flop. Negative edge triggered.
Inputs are T and enable.
Clock triggers the circuit
Outputs are:
a = ( q )
b = ( q~ )
"""
def __init__(
self,
T,
enable,
clk,
preset=Connector(1),
clear=Connector(1),
a=Connector(),
b=Connector()):
JKFlipFlop.__init__(self, T, T, enable, clk, preset, clear, a, b)
def setOutputs(self, **outputs):
JKFlipFlop.setOutputs(self, **outputs)
def trigger(self):
JKFlipFlop.trigger(self)
# Triggering of the outputs is done by the JKFlipFlop Module.
def state(self):
return [self.a(), self.b()]
def __call__(self):
self.trigger()
return [self.a(), self.b()]
|
from django import forms
from django.utils.safestring import mark_safe
import datetime
import re
from django.forms.widgets import Widget, Select
from django.utils.dates import MONTHS
class DatePicker(forms.TextInput):
def render(self, name, value=None, attrs=None):
input = self.renderTimeField(name, value)
script = self.renderScript(name)
return input + script
def renderTimeField(self, fieldId, value):
if value == None:
value = ''
return '<input type="date" name="%s" value="%s"/>' % (fieldId, value)
def renderScript(self, fieldId):
return '<script type="text/javascript">if(typeof jQuery != \'undefined\') {$("input[name=%s]").datepicker({inline: true, dateFormat: "yy-mm-dd"});}</script>' % (fieldId)
class TimePicker(forms.TextInput):
def render(self, name, value=None, attrs=None):
input = self.renderTimeField(name, value)
script = self.renderScript(name)
return input + script
def renderTimeField(self, fieldId, value):
if value == None:
value = ''
return '<input type="time" name="%s" value="%s"/>' % (fieldId, value)
def renderScript(self, fieldId):
return '<script type="text/javascript">if(typeof jQuery != \'undefined\') {$("input[name=%s]").timepicker();}</script>' % (fieldId)
RE_DATE = re.compile(r'(\d{4})-(\d\d?)-(\d\d?)$')
class MonthYearWidget(Widget):
"""
A Widget that splits date input into two <select> boxes for month and year,
with 'day' defaulting to the first of the month.
Based on SelectDateWidget, in
django/trunk/django/forms/extras/widgets.py
"""
none_value = (0, '---')
month_field = '%s_month'
year_field = '%s_year'
def __init__(self, attrs=None, years=None, required=True):
# years is an optional list/tuple of years to use in the "year" select box.
self.attrs = attrs or {}
self.required = required
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year+10)
def render(self, name, value, attrs=None):
try:
year_val, month_val = value.year, value.month
except AttributeError:
year_val = month_val = None
if isinstance(value, basestring):
match = RE_DATE.match(value)
if match:
year_val, month_val, day_val = [int(v) for v in match.groups()]
output = []
if 'id' in self.attrs:
id_ = self.attrs['id']
else:
id_ = 'id_%s' % name
month_choices = MONTHS.items()
if not (self.required and value):
month_choices.append(self.none_value)
month_choices.sort()
local_attrs = self.build_attrs(id=self.month_field % id_)
s = Select(choices=month_choices)
select_html = s.render(self.month_field % name, month_val, local_attrs)
output.append(select_html)
year_choices = [(i, i) for i in self.years]
if not (self.required and value):
year_choices.insert(0, self.none_value)
local_attrs['id'] = self.year_field % id_
s = Select(choices=year_choices)
select_html = s.render(self.year_field % name, year_val, local_attrs)
output.append(select_html)
return mark_safe(u'\n'.join(output))
def id_for_label(self, id_):
return '%s_month' % id_
id_for_label = classmethod(id_for_label)
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
if y == m == "0":
return None
if y and m:
return '%s-%s-%s' % (y, m, 1)
return data.get(name, None)
|
from flask.ext.wtf import Form
from wtforms import StringField
from wtforms.validators import DataRequired
class SettingsForm(Form):
filename = StringField('filename', validators=[DataRequired()])
|
from django.contrib import admin
from tidings.models import Watch, WatchFilter
class FilterInline(admin.TabularInline):
model = WatchFilter
class WatchAdmin(admin.ModelAdmin):
list_filter = ['content_type', 'event_type']
raw_id_fields = ['user']
inlines = [FilterInline]
class WatchFilterAdmin(admin.ModelAdmin):
list_filter = ['name']
raw_id_fields = ['watch']
admin.site.register(Watch, WatchAdmin)
admin.site.register(WatchFilter, WatchFilterAdmin)
|
import warnings
from sympy import (plot_implicit, cos, Symbol, symbols, Eq, sin, re, And, Or, exp, I,
tan, pi)
from sympy.plotting.plot import unset_show
from tempfile import NamedTemporaryFile, mkdtemp
from sympy.utilities.pytest import skip, warns
from sympy.external import import_module
from sympy.utilities.tmpfiles import TmpFileManager, cleanup_tmp_files
import os
unset_show()
def tmp_file(dir=None, name=''):
return NamedTemporaryFile(
suffix='.png', dir=dir, delete=False).name
def plot_and_save(expr, *args, **kwargs):
name = kwargs.pop('name', '')
dir = kwargs.pop('dir', None)
p = plot_implicit(expr, *args, **kwargs)
p.save(tmp_file(dir=dir, name=name))
# Close the plot to avoid a warning from matplotlib
p._backend.close()
def plot_implicit_tests(name):
temp_dir = mkdtemp()
TmpFileManager.tmp_folder(temp_dir)
x = Symbol('x')
y = Symbol('y')
z = Symbol('z')
#implicit plot tests
plot_and_save(Eq(y, cos(x)), (x, -5, 5), (y, -2, 2), name=name, dir=temp_dir)
plot_and_save(Eq(y**2, x**3 - x), (x, -5, 5),
(y, -4, 4), name=name, dir=temp_dir)
plot_and_save(y > 1 / x, (x, -5, 5),
(y, -2, 2), name=name, dir=temp_dir)
plot_and_save(y < 1 / tan(x), (x, -5, 5),
(y, -2, 2), name=name, dir=temp_dir)
plot_and_save(y >= 2 * sin(x) * cos(x), (x, -5, 5),
(y, -2, 2), name=name, dir=temp_dir)
plot_and_save(y <= x**2, (x, -3, 3),
(y, -1, 5), name=name, dir=temp_dir)
#Test all input args for plot_implicit
plot_and_save(Eq(y**2, x**3 - x), dir=temp_dir)
plot_and_save(Eq(y**2, x**3 - x), adaptive=False, dir=temp_dir)
plot_and_save(Eq(y**2, x**3 - x), adaptive=False, points=500, dir=temp_dir)
plot_and_save(y > x, (x, -5, 5), dir=temp_dir)
plot_and_save(And(y > exp(x), y > x + 2), dir=temp_dir)
plot_and_save(Or(y > x, y > -x), dir=temp_dir)
plot_and_save(x**2 - 1, (x, -5, 5), dir=temp_dir)
plot_and_save(x**2 - 1, dir=temp_dir)
plot_and_save(y > x, depth=-5, dir=temp_dir)
plot_and_save(y > x, depth=5, dir=temp_dir)
plot_and_save(y > cos(x), adaptive=False, dir=temp_dir)
plot_and_save(y < cos(x), adaptive=False, dir=temp_dir)
plot_and_save(And(y > cos(x), Or(y > x, Eq(y, x))), dir=temp_dir)
plot_and_save(y - cos(pi / x), dir=temp_dir)
#Test plots which cannot be rendered using the adaptive algorithm
with warns(UserWarning, match="Adaptive meshing could not be applied"):
plot_and_save(Eq(y, re(cos(x) + I*sin(x))), name=name, dir=temp_dir)
plot_and_save(x**2 - 1, title='An implicit plot', dir=temp_dir)
def test_line_color():
x, y = symbols('x, y')
p = plot_implicit(x**2 + y**2 - 1, line_color="green", show=False)
assert p._series[0].line_color == "green"
p = plot_implicit(x**2 + y**2 - 1, line_color='r', show=False)
assert p._series[0].line_color == "r"
def test_matplotlib():
matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,))
if matplotlib:
try:
plot_implicit_tests('test')
test_line_color()
finally:
TmpFileManager.cleanup()
else:
skip("Matplotlib not the default backend")
def test_region_and():
matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,))
if not matplotlib:
skip("Matplotlib not the default backend")
from matplotlib.testing.compare import compare_images
test_directory = os.path.dirname(os.path.abspath(__file__))
try:
temp_dir = mkdtemp()
TmpFileManager.tmp_folder(temp_dir)
x, y = symbols('x y')
r1 = (x - 1)**2 + y**2 < 2
r2 = (x + 1)**2 + y**2 < 2
test_filename = tmp_file(dir=temp_dir, name="test_region_and")
cmp_filename = os.path.join(test_directory, "test_region_and.png")
p = plot_implicit(r1 & r2, x, y)
p.save(test_filename)
compare_images(cmp_filename, test_filename, 0.005)
test_filename = tmp_file(dir=temp_dir, name="test_region_or")
cmp_filename = os.path.join(test_directory, "test_region_or.png")
p = plot_implicit(r1 | r2, x, y)
p.save(test_filename)
compare_images(cmp_filename, test_filename, 0.005)
test_filename = tmp_file(dir=temp_dir, name="test_region_not")
cmp_filename = os.path.join(test_directory, "test_region_not.png")
p = plot_implicit(~r1, x, y)
p.save(test_filename)
compare_images(cmp_filename, test_filename, 0.005)
test_filename = tmp_file(dir=temp_dir, name="test_region_xor")
cmp_filename = os.path.join(test_directory, "test_region_xor.png")
p = plot_implicit(r1 ^ r2, x, y)
p.save(test_filename)
compare_images(cmp_filename, test_filename, 0.005)
finally:
TmpFileManager.cleanup()
|
from django.http import HttpResponse
from django.views.generic import View
import json
import redis
import requests
redis_instance = redis.StrictRedis()
class GetJson(View):
""" Returns next departures per stop, only for currently active lines. """
def get(self, request, *args, **kwargs):
data = redis_instance.get("realtime-bus")
if data:
data = json.loads(data)
else:
data = []
return HttpResponse(json.dumps(data), content_type="application/json")
class GetData(View):
def get(self, request, *args, **kwargs):
resp = requests.get("http://localhost:5019/" + kwargs.get("url"))
return HttpResponse(resp.content, content_type=resp.headers.get('content-type'))
class Poikkeustiedotteet(View):
def get(self, request, *args, **kwargs):
data = redis_instance.get("hsl-poikkeusinfo")
return HttpResponse(data, content_type="application/json")
|
from cudatree import RandomForestClassifier, load_data, timer
from cudatree import util
from hybridforest import RandomForestClassifier as hybridForest
import numpy as np
import math
debug = False
verbose = False
bootstrap = False
n_estimators = 100
def benchmark_cuda(dataset, bfs_threshold = None):
x_train, y_train = load_data(dataset)
#Just use this forest to compile the code.
throw_away = RandomForestClassifier(n_estimators = 1, bootstrap = bootstrap, verbose = False,
max_features = None, debug = debug)
throw_away.fit(x_train, y_train, bfs_threshold = bfs_threshold)
with timer("%s benchmark cuda (bfs_threshold = %s)" % (dataset, bfs_threshold)):
forest = RandomForestClassifier(n_estimators = n_estimators, bootstrap = bootstrap, verbose = verbose,
max_features = None, debug = debug)
forest.fit(x_train, y_train, bfs_threshold = bfs_threshold)
forest = None
def benchmark_hybrid(dataset, bfs_threshold = None):
x_train, y_train = load_data(dataset)
#Just use this forest to compile the code.
throw_away = hybridForest(n_estimators = 2, bootstrap = bootstrap,
max_features = None)
throw_away.fit(x_train, y_train, bfs_threshold = bfs_threshold)
with timer("%s benchmark hybrid (bfs_threshold = %s)" % (dataset, bfs_threshold)):
forest = hybridForest(n_estimators = n_estimators, bootstrap = bootstrap, n_jobs = 2,
max_features = None)
forest.fit(x_train, y_train, bfs_threshold = bfs_threshold)
forest = None
benchmark_hybrid("covtype", None)
benchmark_hybrid("poker")
benchmark_hybrid("cf100")
"""
benchmark_hybrid("cf100")
benchmark_hybrid("kdd")
benchmark_hybrid("covtype")
benchmark_hybrid("cf10")
benchmark_cuda("cf100", True)
benchmark_cuda("kdd", True)
benchmark_cuda("covtype", True)
benchmark_cuda("cf10", True)
"""
|
"""
Flaskr
~~~~~~
A microblog example application written as Flask tutorial with
Flask and sqlite3.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from sqlite3 import dbapi2 as sqlite3
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
app = Flask(__name__)
app.config.update(dict(
DATABASE='/tmp/flaskr.db',
DEBUG=True,
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
app.config.from_envvar('FLASKR_SETTINGS', silent=True)
def connect_db():
"""Connects to the specific database."""
rv = sqlite3.connect(app.config['DATABASE'])
rv.row_factory = sqlite3.Row
return rv
def init_db():
"""Creates the database tables."""
with app.app_context():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if not hasattr(g, 'sqlite_db'):
g.sqlite_db = connect_db()
return g.sqlite_db
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, 'sqlite_db'):
g.sqlite_db.close()
@app.route('/')
def show_entries():
db = get_db()
cur = db.execute('select title, text from entries order by id desc')
entries = cur.fetchall()
return render_template('show_entries.html', entries=entries)
@app.route('/add', methods=['POST'])
def add_entry():
if not session.get('logged_in'):
abort(401)
db = get_db()
db.execute('insert into entries (title, text) values (?, ?)',
[request.form['title'], request.form['text']])
db.commit()
flash('New entry was successfully posted')
return redirect(url_for('show_entries'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'Invalid username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'Invalid password'
else:
session['logged_in'] = True
flash('You were logged in')
return redirect(url_for('show_entries'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_entries'))
if __name__ == '__main__':
init_db()
app.run()
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Rule.rule'
db.add_column('poll_rule', 'rule',
self.gf('django.db.models.fields.IntegerField')(max_length=10, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Rule.rule'
db.delete_column('poll_rule', 'rule')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'eav.attribute': {
'Meta': {'ordering': "['name']", 'unique_together': "(('site', 'slug'),)", 'object_name': 'Attribute'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'datatype': ('eav.fields.EavDatatypeField', [], {'max_length': '6'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'enum_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['eav.EnumGroup']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'slug': ('eav.fields.EavSlugField', [], {'max_length': '50'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
'eav.enumgroup': {
'Meta': {'object_name': 'EnumGroup'},
'enums': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['eav.EnumValue']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'eav.enumvalue': {
'Meta': {'object_name': 'EnumValue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'eav.value': {
'Meta': {'object_name': 'Value'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['eav.Attribute']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'entity_ct': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'value_entities'", 'to': "orm['contenttypes.ContentType']"}),
'entity_id': ('django.db.models.fields.IntegerField', [], {}),
'generic_value_ct': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'value_values'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'generic_value_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value_bool': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'value_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'value_enum': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'eav_values'", 'null': 'True', 'to': "orm['eav.EnumValue']"}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_int': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'locations.location': {
'Meta': {'object_name': 'Location'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'parent_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'point': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Point']", 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['locations.Location']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations'", 'null': 'True', 'to': "orm['locations.LocationType']"})
},
'locations.locationtype': {
'Meta': {'object_name': 'LocationType'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'primary_key': 'True'})
},
'locations.point': {
'Meta': {'object_name': 'Point'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'max_digits': '13', 'decimal_places': '10'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'max_digits': '13', 'decimal_places': '10'})
},
'poll.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'color': ('django.db.models.fields.CharField', [], {'max_length': '6'}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'error_category': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'poll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'categories'", 'to': "orm['poll.Poll']"}),
'priority': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'response': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True'})
},
'poll.poll': {
'Meta': {'ordering': "['-end_date']", 'object_name': 'Poll'},
'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'polls'", 'symmetrical': 'False', 'to': "orm['rapidsms.Contact']"}),
'default_response': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'messages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['rapidsms_httprouter.Message']", 'null': 'True', 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'response_type': ('django.db.models.fields.CharField', [], {'default': "'a'", 'max_length': '1', 'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'type': ('django.db.models.fields.SlugField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'poll.response': {
'Meta': {'object_name': 'Response'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'responses'", 'null': 'True', 'to': "orm['rapidsms.Contact']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'has_errors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_responses'", 'null': 'True', 'to': "orm['rapidsms_httprouter.Message']"}),
'poll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'responses'", 'to': "orm['poll.Poll']"})
},
'poll.responsecategory': {
'Meta': {'object_name': 'ResponseCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['poll.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_override': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'response': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'categories'", 'to': "orm['poll.Response']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'poll.rule': {
'Meta': {'object_name': 'Rule'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rules'", 'to': "orm['poll.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'regex': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'rule': ('django.db.models.fields.IntegerField', [], {'max_length': '10', 'null': 'True'}),
'rule_string': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True'}),
'rule_type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'poll.translation': {
'Meta': {'unique_together': "(('field', 'language'),)", 'object_name': 'Translation'},
'field': ('django.db.models.fields.TextField', [], {'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5', 'db_index': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'rapidsms.backend': {
'Meta': {'object_name': 'Backend'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'rapidsms.connection': {
'Meta': {'unique_together': "(('backend', 'identity'),)", 'object_name': 'Connection'},
'backend': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rapidsms.Backend']"}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identity': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'rapidsms.contact': {
'Meta': {'object_name': 'Contact'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'birthdate': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'health_facility': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_caregiver': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reporting_location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Location']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'contact'", 'unique': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'village': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'villagers'", 'null': 'True', 'to': "orm['locations.Location']"}),
'village_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'rapidsms_httprouter.message': {
'Meta': {'object_name': 'Message'},
'application': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'null': 'True', 'to': "orm['rapidsms_httprouter.MessageBatch']"}),
'connection': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': "orm['rapidsms.Connection']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'direction': ('django.db.models.fields.CharField', [], {'max_length': '1', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_response_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'responses'", 'null': 'True', 'to': "orm['rapidsms_httprouter.Message']"}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '10', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'db_index': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'db_index': 'True'})
},
'rapidsms_httprouter.messagebatch': {
'Meta': {'object_name': 'MessageBatch'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['poll']
|
import shlex
class Player(object):
def __init__(self, location):
self.location = location
self.location.here.append(self)
self.playing = True
def get_input(self):
return raw_input(">")
def process_input(self, input):
parts = shlex.split(input)
if len(parts) == 0:
return []
if len(parts) == 1:
parts.append("")
verb = parts[0]
noun = " ".join(parts[1:])
handler = self.find_handler(verb, noun)
if handler is None:
return [input +
"? I don't know how to do that!"]
return handler(self, noun)
def find_handler(self, verb, noun):
if noun != "":
object = [x for x in self.location.here
if x is not self and
x.name == noun and
verb in x.actions]
if len(object) > 0:
return getattr(object[0], verb)
if verb.lower() in self.actions:
return getattr(self, verb)
elif verb.lower() in self.location.actions:
return getattr(self.location, verb)
def look(self, player, noun):
return [self.location.name,
self.location.description]
def quit(self, player, noun):
self.playing = False
return ["bye bye!"]
actions = ['look', 'quit']
if __name__ == '__main__':
import cave
empty_cave = cave.Cave(
"Empty Cave",
"A desolate, empty cave, waiting for someone to fill it.")
player = Player(empty_cave)
print player.location.name
print player.location.description
while player.playing:
input = player.get_input()
result = player.process_input(input)
print "\n".join(result)
"""
look self.look
inv self.inv
go north self.location.go
north self.location.go
look sword sword.look
get sword sword.get
kill orc orc.kill
"""
|
from django.conf.urls import patterns
from django.conf.urls import url
from . import views
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<pk>\d+)/$', views.DetailView.as_view(), name='detail'),
url(r'^(?P<pk>\d+)/results/$', views.ResultsView.as_view(), name='results'),
url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'),
url(r'^/suggestions/$', views.suggestionView, name='suggestions'),
url(r'^/uploadSuggestion/$', views.uploadSuggestion, name='uploadSuggestion'),
)
|
from django.apps import apps
from rest_framework import viewsets
from rest_framework.serializers import ModelSerializer
def get_model_class(class_name):
app_name, model_name = class_name.split('.')
return apps.get_model(app_name, model_name)
class GenericViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows {models} to be viewed or edited.
"""
def get_serializer_class(self):
'''Get serializer from concrete django model class or
create generic serializer which is sufficient for most cases:
class MyModel:
serializer_class = MyModelSerializer
'''
def get_generic_serializer_class(model_class):
'''Just as last options create generic serializer'''
class Serializer(ModelSerializer):
class Meta:
model = model_class
return Serializer
# merge default serializer with custom
serializer = getattr(self.model_class, 'serializer_class', None)
generic_serializer = get_generic_serializer_class(self.model_class)
if serializer:
Serializer = type('Serializer',
(generic_serializer, serializer), {})
else:
return generic_serializer
return Serializer
def get_queryset(self):
return self.model_class.objects.all()
@property
def model_class(self):
'''load and returns model class from class_name parameter'''
if not hasattr(self, '_model_class'):
self._model_class = get_model_class(
self.kwargs['class_name'])
return self._model_class
|
import cv2
import numpy as np
def draw_circles(image, circles, color=(0,255,0), thickness=1, center_color=(0,0,255), center_thickness=2):
if circles is None:
return
for circle in np.uint16(np.around(circles))[0,:]:
center = tuple(circle[:2])
radius = circle[2]
cv2.circle(image, center, radius, color=color, thickness=thickness)
cv2.circle(image, center, center_thickness, color=center_color, thickness=cv2.FILLED)
def draw_roi(image, roi, color=(255,0,0), thickness=2):
roi = tuple(map(tuple, roi))
cv2.rectangle(image, *roi, color=color, thickness=thickness)
def extract_roi(image, roi):
return image[roi[0][1]:roi[1][1], roi[0][0]:roi[1][0]]
|
"""
Core protocol implementation
"""
import os
import socket
import sys
import threading
import time
import weakref
from hashlib import md5, sha1
import paramiko
from paramiko import util
from paramiko.auth_handler import AuthHandler
from paramiko.channel import Channel
from paramiko.common import xffffffff, cMSG_CHANNEL_OPEN, cMSG_IGNORE, \
cMSG_GLOBAL_REQUEST, DEBUG, MSG_KEXINIT, MSG_IGNORE, MSG_DISCONNECT, \
MSG_DEBUG, ERROR, WARNING, cMSG_UNIMPLEMENTED, INFO, cMSG_KEXINIT, \
cMSG_NEWKEYS, MSG_NEWKEYS, cMSG_REQUEST_SUCCESS, cMSG_REQUEST_FAILURE, \
CONNECTION_FAILED_CODE, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, \
OPEN_SUCCEEDED, cMSG_CHANNEL_OPEN_FAILURE, cMSG_CHANNEL_OPEN_SUCCESS, \
MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE, \
MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, MSG_CHANNEL_OPEN, \
MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE, MSG_CHANNEL_DATA, \
MSG_CHANNEL_EXTENDED_DATA, MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_REQUEST, \
MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE
from paramiko.compress import ZlibCompressor, ZlibDecompressor
from paramiko.dsskey import DSSKey
from paramiko.kex_gex import KexGex
from paramiko.kex_group1 import KexGroup1
from paramiko.message import Message
from paramiko.packet import Packetizer, NeedRekeyException
from paramiko.primes import ModulusPack
from paramiko.py3compat import string_types, long, byte_ord, b
from paramiko.rsakey import RSAKey
from paramiko.ecdsakey import ECDSAKey
from paramiko.server import ServerInterface
from paramiko.sftp_client import SFTPClient
from paramiko.ssh_exception import (SSHException, BadAuthenticationType,
ChannelException, ProxyCommandFailure)
from paramiko.util import retry_on_signal
from Crypto.Cipher import Blowfish, AES, DES3, ARC4
try:
from Crypto.Util import Counter
except ImportError:
from paramiko.util import Counter
_active_threads = []
def _join_lingering_threads():
for thr in _active_threads:
thr.stop_thread()
import atexit
atexit.register(_join_lingering_threads)
class Transport (threading.Thread):
"""
An SSH Transport attaches to a stream (usually a socket), negotiates an
encrypted session, authenticates, and then creates stream tunnels, called
`channels <.Channel>`, across the session. Multiple channels can be
multiplexed across a single session (and often are, in the case of port
forwardings).
"""
_PROTO_ID = '2.0'
_CLIENT_ID = 'paramiko_%s' % paramiko.__version__
_preferred_ciphers = ('aes128-ctr', 'aes256-ctr', 'aes128-cbc', 'blowfish-cbc',
'aes256-cbc', '3des-cbc', 'arcfour128', 'arcfour256')
_preferred_macs = ('hmac-sha1', 'hmac-md5', 'hmac-sha1-96', 'hmac-md5-96')
_preferred_keys = ('ssh-rsa', 'ssh-dss', 'ecdsa-sha2-nistp256')
_preferred_kex = ('diffie-hellman-group1-sha1', 'diffie-hellman-group-exchange-sha1')
_preferred_compression = ('none',)
_cipher_info = {
'aes128-ctr': {'class': AES, 'mode': AES.MODE_CTR, 'block-size': 16, 'key-size': 16},
'aes256-ctr': {'class': AES, 'mode': AES.MODE_CTR, 'block-size': 16, 'key-size': 32},
'blowfish-cbc': {'class': Blowfish, 'mode': Blowfish.MODE_CBC, 'block-size': 8, 'key-size': 16},
'aes128-cbc': {'class': AES, 'mode': AES.MODE_CBC, 'block-size': 16, 'key-size': 16},
'aes256-cbc': {'class': AES, 'mode': AES.MODE_CBC, 'block-size': 16, 'key-size': 32},
'3des-cbc': {'class': DES3, 'mode': DES3.MODE_CBC, 'block-size': 8, 'key-size': 24},
'arcfour128': {'class': ARC4, 'mode': None, 'block-size': 8, 'key-size': 16},
'arcfour256': {'class': ARC4, 'mode': None, 'block-size': 8, 'key-size': 32},
}
_mac_info = {
'hmac-sha1': {'class': sha1, 'size': 20},
'hmac-sha1-96': {'class': sha1, 'size': 12},
'hmac-md5': {'class': md5, 'size': 16},
'hmac-md5-96': {'class': md5, 'size': 12},
}
_key_info = {
'ssh-rsa': RSAKey,
'ssh-dss': DSSKey,
'ecdsa-sha2-nistp256': ECDSAKey,
}
_kex_info = {
'diffie-hellman-group1-sha1': KexGroup1,
'diffie-hellman-group-exchange-sha1': KexGex,
}
_compression_info = {
# zlib@openssh.com is just zlib, but only turned on after a successful
# authentication. openssh servers may only offer this type because
# they've had troubles with security holes in zlib in the past.
'zlib@openssh.com': (ZlibCompressor, ZlibDecompressor),
'zlib': (ZlibCompressor, ZlibDecompressor),
'none': (None, None),
}
_modulus_pack = None
def __init__(self, sock):
"""
Create a new SSH session over an existing socket, or socket-like
object. This only creates the `.Transport` object; it doesn't begin the
SSH session yet. Use `connect` or `start_client` to begin a client
session, or `start_server` to begin a server session.
If the object is not actually a socket, it must have the following
methods:
- ``send(str)``: Writes from 1 to ``len(str)`` bytes, and returns an
int representing the number of bytes written. Returns
0 or raises ``EOFError`` if the stream has been closed.
- ``recv(int)``: Reads from 1 to ``int`` bytes and returns them as a
string. Returns 0 or raises ``EOFError`` if the stream has been
closed.
- ``close()``: Closes the socket.
- ``settimeout(n)``: Sets a (float) timeout on I/O operations.
For ease of use, you may also pass in an address (as a tuple) or a host
string as the ``sock`` argument. (A host string is a hostname with an
optional port (separated by ``":"``) which will be converted into a
tuple of ``(hostname, port)``.) A socket will be connected to this
address and used for communication. Exceptions from the ``socket``
call may be thrown in this case.
:param socket sock:
a socket or socket-like object to create the session over.
"""
self.active = False
if isinstance(sock, string_types):
# convert "host:port" into (host, port)
hl = sock.split(':', 1)
if len(hl) == 1:
sock = (hl[0], 22)
else:
sock = (hl[0], int(hl[1]))
if type(sock) is tuple:
# connect to the given (host, port)
hostname, port = sock
reason = 'No suitable address family'
for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM):
if socktype == socket.SOCK_STREAM:
af = family
addr = sockaddr
sock = socket.socket(af, socket.SOCK_STREAM)
try:
retry_on_signal(lambda: sock.connect((hostname, port)))
except socket.error as e:
reason = str(e)
else:
break
else:
raise SSHException(
'Unable to connect to %s: %s' % (hostname, reason))
# okay, normal socket-ish flow here...
threading.Thread.__init__(self)
self.setDaemon(True)
self.sock = sock
# Python < 2.3 doesn't have the settimeout method - RogerB
try:
# we set the timeout so we can check self.active periodically to
# see if we should bail. socket.timeout exception is never
# propagated.
self.sock.settimeout(0.1)
except AttributeError:
pass
# negotiated crypto parameters
self.packetizer = Packetizer(sock)
self.local_version = 'SSH-' + self._PROTO_ID + '-' + self._CLIENT_ID
self.remote_version = ''
self.local_cipher = self.remote_cipher = ''
self.local_kex_init = self.remote_kex_init = None
self.local_mac = self.remote_mac = None
self.local_compression = self.remote_compression = None
self.session_id = None
self.host_key_type = None
self.host_key = None
# state used during negotiation
self.kex_engine = None
self.H = None
self.K = None
self.initial_kex_done = False
self.in_kex = False
self.authenticated = False
self._expected_packet = tuple()
self.lock = threading.Lock() # synchronization (always higher level than write_lock)
# tracking open channels
self._channels = ChannelMap()
self.channel_events = {} # (id -> Event)
self.channels_seen = {} # (id -> True)
self._channel_counter = 1
self.window_size = 65536
self.max_packet_size = 34816
self._forward_agent_handler = None
self._x11_handler = None
self._tcp_handler = None
self.saved_exception = None
self.clear_to_send = threading.Event()
self.clear_to_send_lock = threading.Lock()
self.clear_to_send_timeout = 30.0
self.log_name = 'paramiko.transport'
self.logger = util.get_logger(self.log_name)
self.packetizer.set_log(self.logger)
self.auth_handler = None
self.global_response = None # response Message from an arbitrary global request
self.completion_event = None # user-defined event callbacks
self.banner_timeout = 15 # how long (seconds) to wait for the SSH banner
# server mode:
self.server_mode = False
self.server_object = None
self.server_key_dict = {}
self.server_accepts = []
self.server_accept_cv = threading.Condition(self.lock)
self.subsystem_table = {}
def __repr__(self):
"""
Returns a string representation of this object, for debugging.
"""
out = '<paramiko.Transport at %s' % hex(long(id(self)) & xffffffff)
if not self.active:
out += ' (unconnected)'
else:
if self.local_cipher != '':
out += ' (cipher %s, %d bits)' % (self.local_cipher,
self._cipher_info[self.local_cipher]['key-size'] * 8)
if self.is_authenticated():
out += ' (active; %d open channel(s))' % len(self._channels)
elif self.initial_kex_done:
out += ' (connected; awaiting auth)'
else:
out += ' (connecting)'
out += '>'
return out
def atfork(self):
"""
Terminate this Transport without closing the session. On posix
systems, if a Transport is open during process forking, both parent
and child will share the underlying socket, but only one process can
use the connection (without corrupting the session). Use this method
to clean up a Transport object without disrupting the other process.
.. versionadded:: 1.5.3
"""
self.close()
def get_security_options(self):
"""
Return a `.SecurityOptions` object which can be used to tweak the
encryption algorithms this transport will permit (for encryption,
digest/hash operations, public keys, and key exchanges) and the order
of preference for them.
"""
return SecurityOptions(self)
def start_client(self, event=None):
"""
Negotiate a new SSH2 session as a client. This is the first step after
creating a new `.Transport`. A separate thread is created for protocol
negotiation.
If an event is passed in, this method returns immediately. When
negotiation is done (successful or not), the given ``Event`` will
be triggered. On failure, `is_active` will return ``False``.
(Since 1.4) If ``event`` is ``None``, this method will not return until
negotation is done. On success, the method returns normally.
Otherwise an SSHException is raised.
After a successful negotiation, you will usually want to authenticate,
calling `auth_password <Transport.auth_password>` or
`auth_publickey <Transport.auth_publickey>`.
.. note:: `connect` is a simpler method for connecting as a client.
.. note:: After calling this method (or `start_server` or `connect`),
you should no longer directly read from or write to the original
socket object.
:param .threading.Event event:
an event to trigger when negotiation is complete (optional)
:raises SSHException: if negotiation fails (and no ``event`` was passed
in)
"""
self.active = True
if event is not None:
# async, return immediately and let the app poll for completion
self.completion_event = event
self.start()
return
# synchronous, wait for a result
self.completion_event = event = threading.Event()
self.start()
while True:
event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e
raise SSHException('Negotiation failed.')
if event.isSet():
break
def start_server(self, event=None, server=None):
"""
Negotiate a new SSH2 session as a server. This is the first step after
creating a new `.Transport` and setting up your server host key(s). A
separate thread is created for protocol negotiation.
If an event is passed in, this method returns immediately. When
negotiation is done (successful or not), the given ``Event`` will
be triggered. On failure, `is_active` will return ``False``.
(Since 1.4) If ``event`` is ``None``, this method will not return until
negotation is done. On success, the method returns normally.
Otherwise an SSHException is raised.
After a successful negotiation, the client will need to authenticate.
Override the methods `get_allowed_auths
<.ServerInterface.get_allowed_auths>`, `check_auth_none
<.ServerInterface.check_auth_none>`, `check_auth_password
<.ServerInterface.check_auth_password>`, and `check_auth_publickey
<.ServerInterface.check_auth_publickey>` in the given ``server`` object
to control the authentication process.
After a successful authentication, the client should request to open a
channel. Override `check_channel_request
<.ServerInterface.check_channel_request>` in the given ``server``
object to allow channels to be opened.
.. note::
After calling this method (or `start_client` or `connect`), you
should no longer directly read from or write to the original socket
object.
:param .threading.Event event:
an event to trigger when negotiation is complete.
:param .ServerInterface server:
an object used to perform authentication and create `channels
<.Channel>`
:raises SSHException: if negotiation fails (and no ``event`` was passed
in)
"""
if server is None:
server = ServerInterface()
self.server_mode = True
self.server_object = server
self.active = True
if event is not None:
# async, return immediately and let the app poll for completion
self.completion_event = event
self.start()
return
# synchronous, wait for a result
self.completion_event = event = threading.Event()
self.start()
while True:
event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e
raise SSHException('Negotiation failed.')
if event.isSet():
break
def add_server_key(self, key):
"""
Add a host key to the list of keys used for server mode. When behaving
as a server, the host key is used to sign certain packets during the
SSH2 negotiation, so that the client can trust that we are who we say
we are. Because this is used for signing, the key must contain private
key info, not just the public half. Only one key of each type (RSA or
DSS) is kept.
:param .PKey key:
the host key to add, usually an `.RSAKey` or `.DSSKey`.
"""
self.server_key_dict[key.get_name()] = key
def get_server_key(self):
"""
Return the active host key, in server mode. After negotiating with the
client, this method will return the negotiated host key. If only one
type of host key was set with `add_server_key`, that's the only key
that will ever be returned. But in cases where you have set more than
one type of host key (for example, an RSA key and a DSS key), the key
type will be negotiated by the client, and this method will return the
key of the type agreed on. If the host key has not been negotiated
yet, ``None`` is returned. In client mode, the behavior is undefined.
:return:
host key (`.PKey`) of the type negotiated by the client, or
``None``.
"""
try:
return self.server_key_dict[self.host_key_type]
except KeyError:
pass
return None
def load_server_moduli(filename=None):
"""
(optional)
Load a file of prime moduli for use in doing group-exchange key
negotiation in server mode. It's a rather obscure option and can be
safely ignored.
In server mode, the remote client may request "group-exchange" key
negotiation, which asks the server to send a random prime number that
fits certain criteria. These primes are pretty difficult to compute,
so they can't be generated on demand. But many systems contain a file
of suitable primes (usually named something like ``/etc/ssh/moduli``).
If you call `load_server_moduli` and it returns ``True``, then this
file of primes has been loaded and we will support "group-exchange" in
server mode. Otherwise server mode will just claim that it doesn't
support that method of key negotiation.
:param str filename:
optional path to the moduli file, if you happen to know that it's
not in a standard location.
:return:
True if a moduli file was successfully loaded; False otherwise.
.. note:: This has no effect when used in client mode.
"""
Transport._modulus_pack = ModulusPack()
# places to look for the openssh "moduli" file
file_list = ['/etc/ssh/moduli', '/usr/local/etc/moduli']
if filename is not None:
file_list.insert(0, filename)
for fn in file_list:
try:
Transport._modulus_pack.read_file(fn)
return True
except IOError:
pass
# none succeeded
Transport._modulus_pack = None
return False
load_server_moduli = staticmethod(load_server_moduli)
def close(self):
"""
Close this session, and any open channels that are tied to it.
"""
if not self.active:
return
self.stop_thread()
for chan in list(self._channels.values()):
chan._unlink()
self.sock.close()
def get_remote_server_key(self):
"""
Return the host key of the server (in client mode).
.. note::
Previously this call returned a tuple of ``(key type, key
string)``. You can get the same effect by calling `.PKey.get_name`
for the key type, and ``str(key)`` for the key string.
:raises SSHException: if no session is currently active.
:return: public key (`.PKey`) of the remote server
"""
if (not self.active) or (not self.initial_kex_done):
raise SSHException('No existing session')
return self.host_key
def is_active(self):
"""
Return true if this session is active (open).
:return:
True if the session is still active (open); False if the session is
closed
"""
return self.active
def open_session(self):
"""
Request a new channel to the server, of type ``"session"``. This is
just an alias for calling `open_channel` with an argument of
``"session"``.
:return: a new `.Channel`
:raises SSHException: if the request is rejected or the session ends
prematurely
"""
return self.open_channel('session')
def open_x11_channel(self, src_addr=None):
"""
Request a new channel to the client, of type ``"x11"``. This
is just an alias for ``open_channel('x11', src_addr=src_addr)``.
:param tuple src_addr:
the source address (``(str, int)``) of the x11 server (port is the
x11 port, ie. 6010)
:return: a new `.Channel`
:raises SSHException: if the request is rejected or the session ends
prematurely
"""
return self.open_channel('x11', src_addr=src_addr)
def open_forward_agent_channel(self):
"""
Request a new channel to the client, of type
``"auth-agent@openssh.com"``.
This is just an alias for ``open_channel('auth-agent@openssh.com')``.
:return: a new `.Channel`
:raises SSHException:
if the request is rejected or the session ends prematurely
"""
return self.open_channel('auth-agent@openssh.com')
def open_forwarded_tcpip_channel(self, src_addr, dest_addr):
"""
Request a new channel back to the client, of type ``"forwarded-tcpip"``.
This is used after a client has requested port forwarding, for sending
incoming connections back to the client.
:param src_addr: originator's address
:param dest_addr: local (server) connected address
"""
return self.open_channel('forwarded-tcpip', dest_addr, src_addr)
def open_channel(self, kind, dest_addr=None, src_addr=None):
"""
Request a new channel to the server. `Channels <.Channel>` are
socket-like objects used for the actual transfer of data across the
session. You may only request a channel after negotiating encryption
(using `connect` or `start_client`) and authenticating.
:param str kind:
the kind of channel requested (usually ``"session"``,
``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``)
:param tuple dest_addr:
the destination address (address + port tuple) of this port
forwarding, if ``kind`` is ``"forwarded-tcpip"`` or
``"direct-tcpip"`` (ignored for other channel types)
:param src_addr: the source address of this port forwarding, if
``kind`` is ``"forwarded-tcpip"``, ``"direct-tcpip"``, or ``"x11"``
:return: a new `.Channel` on success
:raises SSHException: if the request is rejected or the session ends
prematurely
"""
if not self.active:
raise SSHException('SSH session not active')
self.lock.acquire()
try:
chanid = self._next_channel()
m = Message()
m.add_byte(cMSG_CHANNEL_OPEN)
m.add_string(kind)
m.add_int(chanid)
m.add_int(self.window_size)
m.add_int(self.max_packet_size)
if (kind == 'forwarded-tcpip') or (kind == 'direct-tcpip'):
m.add_string(dest_addr[0])
m.add_int(dest_addr[1])
m.add_string(src_addr[0])
m.add_int(src_addr[1])
elif kind == 'x11':
m.add_string(src_addr[0])
m.add_int(src_addr[1])
chan = Channel(chanid)
self._channels.put(chanid, chan)
self.channel_events[chanid] = event = threading.Event()
self.channels_seen[chanid] = True
chan._set_transport(self)
chan._set_window(self.window_size, self.max_packet_size)
finally:
self.lock.release()
self._send_user_message(m)
while True:
event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is None:
e = SSHException('Unable to open channel.')
raise e
if event.isSet():
break
chan = self._channels.get(chanid)
if chan is not None:
return chan
e = self.get_exception()
if e is None:
e = SSHException('Unable to open channel.')
raise e
def request_port_forward(self, address, port, handler=None):
"""
Ask the server to forward TCP connections from a listening port on
the server, across this SSH session.
If a handler is given, that handler is called from a different thread
whenever a forwarded connection arrives. The handler parameters are::
handler(channel, (origin_addr, origin_port), (server_addr, server_port))
where ``server_addr`` and ``server_port`` are the address and port that
the server was listening on.
If no handler is set, the default behavior is to send new incoming
forwarded connections into the accept queue, to be picked up via
`accept`.
:param str address: the address to bind when forwarding
:param int port:
the port to forward, or 0 to ask the server to allocate any port
:param callable handler:
optional handler for incoming forwarded connections, of the form
``func(Channel, (str, int), (str, int))``.
:return: the port number (`int`) allocated by the server
:raises SSHException: if the server refused the TCP forward request
"""
if not self.active:
raise SSHException('SSH session not active')
port = int(port)
response = self.global_request('tcpip-forward', (address, port), wait=True)
if response is None:
raise SSHException('TCP forwarding request denied')
if port == 0:
port = response.get_int()
if handler is None:
def default_handler(channel, src_addr, dest_addr_port):
#src_addr, src_port = src_addr_port
#dest_addr, dest_port = dest_addr_port
self._queue_incoming_channel(channel)
handler = default_handler
self._tcp_handler = handler
return port
def cancel_port_forward(self, address, port):
"""
Ask the server to cancel a previous port-forwarding request. No more
connections to the given address & port will be forwarded across this
ssh connection.
:param str address: the address to stop forwarding
:param int port: the port to stop forwarding
"""
if not self.active:
return
self._tcp_handler = None
self.global_request('cancel-tcpip-forward', (address, port), wait=True)
def open_sftp_client(self):
"""
Create an SFTP client channel from an open transport. On success, an
SFTP session will be opened with the remote host, and a new
`.SFTPClient` object will be returned.
:return:
a new `.SFTPClient` referring to an sftp session (channel) across
this transport
"""
return SFTPClient.from_transport(self)
def send_ignore(self, byte_count=None):
"""
Send a junk packet across the encrypted link. This is sometimes used
to add "noise" to a connection to confuse would-be attackers. It can
also be used as a keep-alive for long lived connections traversing
firewalls.
:param int byte_count:
the number of random bytes to send in the payload of the ignored
packet -- defaults to a random number from 10 to 41.
"""
m = Message()
m.add_byte(cMSG_IGNORE)
if byte_count is None:
byte_count = (byte_ord(os.urandom(1)) % 32) + 10
m.add_bytes(os.urandom(byte_count))
self._send_user_message(m)
def renegotiate_keys(self):
"""
Force this session to switch to new keys. Normally this is done
automatically after the session hits a certain number of packets or
bytes sent or received, but this method gives you the option of forcing
new keys whenever you want. Negotiating new keys causes a pause in
traffic both ways as the two sides swap keys and do computations. This
method returns when the session has switched to new keys.
:raises SSHException: if the key renegotiation failed (which causes the
session to end)
"""
self.completion_event = threading.Event()
self._send_kex_init()
while True:
self.completion_event.wait(0.1)
if not self.active:
e = self.get_exception()
if e is not None:
raise e
raise SSHException('Negotiation failed.')
if self.completion_event.isSet():
break
return
def set_keepalive(self, interval):
"""
Turn on/off keepalive packets (default is off). If this is set, after
``interval`` seconds without sending any data over the connection, a
"keepalive" packet will be sent (and ignored by the remote host). This
can be useful to keep connections alive over a NAT, for example.
:param int interval:
seconds to wait before sending a keepalive packet (or
0 to disable keepalives).
"""
self.packetizer.set_keepalive(interval,
lambda x=weakref.proxy(self): x.global_request('keepalive@lag.net', wait=False))
def global_request(self, kind, data=None, wait=True):
"""
Make a global request to the remote host. These are normally
extensions to the SSH2 protocol.
:param str kind: name of the request.
:param tuple data:
an optional tuple containing additional data to attach to the
request.
:param bool wait:
``True`` if this method should not return until a response is
received; ``False`` otherwise.
:return:
a `.Message` containing possible additional data if the request was
successful (or an empty `.Message` if ``wait`` was ``False``);
``None`` if the request was denied.
"""
if wait:
self.completion_event = threading.Event()
m = Message()
m.add_byte(cMSG_GLOBAL_REQUEST)
m.add_string(kind)
m.add_boolean(wait)
if data is not None:
m.add(*data)
self._log(DEBUG, 'Sending global request "%s"' % kind)
self._send_user_message(m)
if not wait:
return None
while True:
self.completion_event.wait(0.1)
if not self.active:
return None
if self.completion_event.isSet():
break
return self.global_response
def accept(self, timeout=None):
"""
Return the next channel opened by the client over this transport, in
server mode. If no channel is opened before the given timeout, ``None``
is returned.
:param int timeout:
seconds to wait for a channel, or ``None`` to wait forever
:return: a new `.Channel` opened by the client
"""
self.lock.acquire()
try:
if len(self.server_accepts) > 0:
chan = self.server_accepts.pop(0)
else:
self.server_accept_cv.wait(timeout)
if len(self.server_accepts) > 0:
chan = self.server_accepts.pop(0)
else:
# timeout
chan = None
finally:
self.lock.release()
return chan
def connect(self, hostkey=None, username='', password=None, pkey=None):
"""
Negotiate an SSH2 session, and optionally verify the server's host key
and authenticate using a password or private key. This is a shortcut
for `start_client`, `get_remote_server_key`, and
`Transport.auth_password` or `Transport.auth_publickey`. Use those
methods if you want more control.
You can use this method immediately after creating a Transport to
negotiate encryption with a server. If it fails, an exception will be
thrown. On success, the method will return cleanly, and an encrypted
session exists. You may immediately call `open_channel` or
`open_session` to get a `.Channel` object, which is used for data
transfer.
.. note::
If you fail to supply a password or private key, this method may
succeed, but a subsequent `open_channel` or `open_session` call may
fail because you haven't authenticated yet.
:param .PKey hostkey:
the host key expected from the server, or ``None`` if you don't
want to do host key verification.
:param str username: the username to authenticate as.
:param str password:
a password to use for authentication, if you want to use password
authentication; otherwise ``None``.
:param .PKey pkey:
a private key to use for authentication, if you want to use private
key authentication; otherwise ``None``.
:raises SSHException: if the SSH2 negotiation fails, the host key
supplied by the server is incorrect, or authentication fails.
"""
if hostkey is not None:
self._preferred_keys = [hostkey.get_name()]
self.start_client()
# check host key if we were given one
if hostkey is not None:
key = self.get_remote_server_key()
if (key.get_name() != hostkey.get_name()) or (key.asbytes() != hostkey.asbytes()):
self._log(DEBUG, 'Bad host key from server')
self._log(DEBUG, 'Expected: %s: %s' % (hostkey.get_name(), repr(hostkey.asbytes())))
self._log(DEBUG, 'Got : %s: %s' % (key.get_name(), repr(key.asbytes())))
raise SSHException('Bad host key from server')
self._log(DEBUG, 'Host key verified (%s)' % hostkey.get_name())
if (pkey is not None) or (password is not None):
if password is not None:
self._log(DEBUG, 'Attempting password auth...')
self.auth_password(username, password)
else:
self._log(DEBUG, 'Attempting public-key auth...')
self.auth_publickey(username, pkey)
return
def get_exception(self):
"""
Return any exception that happened during the last server request.
This can be used to fetch more specific error information after using
calls like `start_client`. The exception (if any) is cleared after
this call.
:return:
an exception, or ``None`` if there is no stored exception.
.. versionadded:: 1.1
"""
self.lock.acquire()
try:
e = self.saved_exception
self.saved_exception = None
return e
finally:
self.lock.release()
def set_subsystem_handler(self, name, handler, *larg, **kwarg):
"""
Set the handler class for a subsystem in server mode. If a request
for this subsystem is made on an open ssh channel later, this handler
will be constructed and called -- see `.SubsystemHandler` for more
detailed documentation.
Any extra parameters (including keyword arguments) are saved and
passed to the `.SubsystemHandler` constructor later.
:param str name: name of the subsystem.
:param class handler:
subclass of `.SubsystemHandler` that handles this subsystem.
"""
try:
self.lock.acquire()
self.subsystem_table[name] = (handler, larg, kwarg)
finally:
self.lock.release()
def is_authenticated(self):
"""
Return true if this session is active and authenticated.
:return:
True if the session is still open and has been authenticated
successfully; False if authentication failed and/or the session is
closed.
"""
return self.active and (self.auth_handler is not None) and self.auth_handler.is_authenticated()
def get_username(self):
"""
Return the username this connection is authenticated for. If the
session is not authenticated (or authentication failed), this method
returns ``None``.
:return: username that was authenticated (a `str`), or ``None``.
"""
if not self.active or (self.auth_handler is None):
return None
return self.auth_handler.get_username()
def get_banner(self):
"""
Return the banner supplied by the server upon connect. If no banner is
supplied, this method returns C{None}.
@return: server supplied banner, or C{None}.
@rtype: string
"""
if not self.active or (self.auth_handler is None):
return None
return self.auth_handler.banner
def auth_none(self, username):
"""
Try to authenticate to the server using no authentication at all.
This will almost always fail. It may be useful for determining the
list of authentication types supported by the server, by catching the
`.BadAuthenticationType` exception raised.
:param str username: the username to authenticate as
:return:
`list` of auth types permissible for the next stage of
authentication (normally empty)
:raises BadAuthenticationType: if "none" authentication isn't allowed
by the server for this user
:raises SSHException: if the authentication failed due to a network
error
.. versionadded:: 1.5
"""
if (not self.active) or (not self.initial_kex_done):
raise SSHException('No existing session')
my_event = threading.Event()
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_none(username, my_event)
return self.auth_handler.wait_for_response(my_event)
def auth_password(self, username, password, event=None, fallback=True):
"""
Authenticate to the server using a password. The username and password
are sent over an encrypted link.
If an ``event`` is passed in, this method will return immediately, and
the event will be triggered once authentication succeeds or fails. On
success, `is_authenticated` will return ``True``. On failure, you may
use `get_exception` to get more detailed error information.
Since 1.1, if no event is passed, this method will block until the
authentication succeeds or fails. On failure, an exception is raised.
Otherwise, the method simply returns.
Since 1.5, if no event is passed and ``fallback`` is ``True`` (the
default), if the server doesn't support plain password authentication
but does support so-called "keyboard-interactive" mode, an attempt
will be made to authenticate using this interactive mode. If it fails,
the normal exception will be thrown as if the attempt had never been
made. This is useful for some recent Gentoo and Debian distributions,
which turn off plain password authentication in a misguided belief
that interactive authentication is "more secure". (It's not.)
If the server requires multi-step authentication (which is very rare),
this method will return a list of auth types permissible for the next
step. Otherwise, in the normal case, an empty list is returned.
:param str username: the username to authenticate as
:param basestring password: the password to authenticate with
:param .threading.Event event:
an event to trigger when the authentication attempt is complete
(whether it was successful or not)
:param bool fallback:
``True`` if an attempt at an automated "interactive" password auth
should be made if the server doesn't support normal password auth
:return:
`list` of auth types permissible for the next stage of
authentication (normally empty)
:raises BadAuthenticationType: if password authentication isn't
allowed by the server for this user (and no event was passed in)
:raises AuthenticationException: if the authentication failed (and no
event was passed in)
:raises SSHException: if there was a network error
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to send the password unless we're on a secure link
raise SSHException('No existing session')
if event is None:
my_event = threading.Event()
else:
my_event = event
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_password(username, password, my_event)
if event is not None:
# caller wants to wait for event themselves
return []
try:
return self.auth_handler.wait_for_response(my_event)
except BadAuthenticationType as e:
# if password auth isn't allowed, but keyboard-interactive *is*, try to fudge it
if not fallback or ('keyboard-interactive' not in e.allowed_types):
raise
try:
def handler(title, instructions, fields):
if len(fields) > 1:
raise SSHException('Fallback authentication failed.')
if len(fields) == 0:
# for some reason, at least on os x, a 2nd request will
# be made with zero fields requested. maybe it's just
# to try to fake out automated scripting of the exact
# type we're doing here. *shrug* :)
return []
return [password]
return self.auth_interactive(username, handler)
except SSHException:
# attempt failed; just raise the original exception
raise e
def auth_publickey(self, username, key, event=None):
"""
Authenticate to the server using a private key. The key is used to
sign data from the server, so it must include the private part.
If an ``event`` is passed in, this method will return immediately, and
the event will be triggered once authentication succeeds or fails. On
success, `is_authenticated` will return ``True``. On failure, you may
use `get_exception` to get more detailed error information.
Since 1.1, if no event is passed, this method will block until the
authentication succeeds or fails. On failure, an exception is raised.
Otherwise, the method simply returns.
If the server requires multi-step authentication (which is very rare),
this method will return a list of auth types permissible for the next
step. Otherwise, in the normal case, an empty list is returned.
:param str username: the username to authenticate as
:param .PKey key: the private key to authenticate with
:param .threading.Event event:
an event to trigger when the authentication attempt is complete
(whether it was successful or not)
:return:
`list` of auth types permissible for the next stage of
authentication (normally empty)
:raises BadAuthenticationType: if public-key authentication isn't
allowed by the server for this user (and no event was passed in)
:raises AuthenticationException: if the authentication failed (and no
event was passed in)
:raises SSHException: if there was a network error
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to authenticate unless we're on a secure link
raise SSHException('No existing session')
if event is None:
my_event = threading.Event()
else:
my_event = event
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_publickey(username, key, my_event)
if event is not None:
# caller wants to wait for event themselves
return []
return self.auth_handler.wait_for_response(my_event)
def auth_interactive(self, username, handler, submethods=''):
"""
Authenticate to the server interactively. A handler is used to answer
arbitrary questions from the server. On many servers, this is just a
dumb wrapper around PAM.
This method will block until the authentication succeeds or fails,
peroidically calling the handler asynchronously to get answers to
authentication questions. The handler may be called more than once
if the server continues to ask questions.
The handler is expected to be a callable that will handle calls of the
form: ``handler(title, instructions, prompt_list)``. The ``title`` is
meant to be a dialog-window title, and the ``instructions`` are user
instructions (both are strings). ``prompt_list`` will be a list of
prompts, each prompt being a tuple of ``(str, bool)``. The string is
the prompt and the boolean indicates whether the user text should be
echoed.
A sample call would thus be:
``handler('title', 'instructions', [('Password:', False)])``.
The handler should return a list or tuple of answers to the server's
questions.
If the server requires multi-step authentication (which is very rare),
this method will return a list of auth types permissible for the next
step. Otherwise, in the normal case, an empty list is returned.
:param str username: the username to authenticate as
:param callable handler: a handler for responding to server questions
:param str submethods: a string list of desired submethods (optional)
:return:
`list` of auth types permissible for the next stage of
authentication (normally empty).
:raises BadAuthenticationType: if public-key authentication isn't
allowed by the server for this user
:raises AuthenticationException: if the authentication failed
:raises SSHException: if there was a network error
.. versionadded:: 1.5
"""
if (not self.active) or (not self.initial_kex_done):
# we should never try to authenticate unless we're on a secure link
raise SSHException('No existing session')
my_event = threading.Event()
self.auth_handler = AuthHandler(self)
self.auth_handler.auth_interactive(username, handler, my_event, submethods)
return self.auth_handler.wait_for_response(my_event)
def set_log_channel(self, name):
"""
Set the channel for this transport's logging. The default is
``"paramiko.transport"`` but it can be set to anything you want. (See
the `.logging` module for more info.) SSH Channels will log to a
sub-channel of the one specified.
:param str name: new channel name for logging
.. versionadded:: 1.1
"""
self.log_name = name
self.logger = util.get_logger(name)
self.packetizer.set_log(self.logger)
def get_log_channel(self):
"""
Return the channel name used for this transport's logging.
:return: channel name as a `str`
.. versionadded:: 1.2
"""
return self.log_name
def set_hexdump(self, hexdump):
"""
Turn on/off logging a hex dump of protocol traffic at DEBUG level in
the logs. Normally you would want this off (which is the default),
but if you are debugging something, it may be useful.
:param bool hexdump:
``True`` to log protocol traffix (in hex) to the log; ``False``
otherwise.
"""
self.packetizer.set_hexdump(hexdump)
def get_hexdump(self):
"""
Return ``True`` if the transport is currently logging hex dumps of
protocol traffic.
:return: ``True`` if hex dumps are being logged, else ``False``.
.. versionadded:: 1.4
"""
return self.packetizer.get_hexdump()
def use_compression(self, compress=True):
"""
Turn on/off compression. This will only have an affect before starting
the transport (ie before calling `connect`, etc). By default,
compression is off since it negatively affects interactive sessions.
:param bool compress:
``True`` to ask the remote client/server to compress traffic;
``False`` to refuse compression
.. versionadded:: 1.5.2
"""
if compress:
self._preferred_compression = ('zlib@openssh.com', 'zlib', 'none')
else:
self._preferred_compression = ('none',)
def getpeername(self):
"""
Return the address of the remote side of this Transport, if possible.
This is effectively a wrapper around ``'getpeername'`` on the underlying
socket. If the socket-like object has no ``'getpeername'`` method,
then ``("unknown", 0)`` is returned.
:return:
the address of the remote host, if known, as a ``(str, int)``
tuple.
"""
gp = getattr(self.sock, 'getpeername', None)
if gp is None:
return 'unknown', 0
return gp()
def stop_thread(self):
self.active = False
self.packetizer.close()
while self.isAlive():
self.join(10)
### internals...
def _log(self, level, msg, *args):
if issubclass(type(msg), list):
for m in msg:
self.logger.log(level, m)
else:
self.logger.log(level, msg, *args)
def _get_modulus_pack(self):
"""used by KexGex to find primes for group exchange"""
return self._modulus_pack
def _next_channel(self):
"""you are holding the lock"""
chanid = self._channel_counter
while self._channels.get(chanid) is not None:
self._channel_counter = (self._channel_counter + 1) & 0xffffff
chanid = self._channel_counter
self._channel_counter = (self._channel_counter + 1) & 0xffffff
return chanid
def _unlink_channel(self, chanid):
"""used by a Channel to remove itself from the active channel list"""
self._channels.delete(chanid)
def _send_message(self, data):
self.packetizer.send_message(data)
def _send_user_message(self, data):
"""
send a message, but block if we're in key negotiation. this is used
for user-initiated requests.
"""
start = time.time()
while True:
self.clear_to_send.wait(0.1)
if not self.active:
self._log(DEBUG, 'Dropping user packet because connection is dead.')
return
self.clear_to_send_lock.acquire()
if self.clear_to_send.isSet():
break
self.clear_to_send_lock.release()
if time.time() > start + self.clear_to_send_timeout:
raise SSHException('Key-exchange timed out waiting for key negotiation')
try:
self._send_message(data)
finally:
self.clear_to_send_lock.release()
def _set_K_H(self, k, h):
"""used by a kex object to set the K (root key) and H (exchange hash)"""
self.K = k
self.H = h
if self.session_id is None:
self.session_id = h
def _expect_packet(self, *ptypes):
"""used by a kex object to register the next packet type it expects to see"""
self._expected_packet = tuple(ptypes)
def _verify_key(self, host_key, sig):
key = self._key_info[self.host_key_type](Message(host_key))
if key is None:
raise SSHException('Unknown host key type')
if not key.verify_ssh_sig(self.H, Message(sig)):
raise SSHException('Signature verification (%s) failed.' % self.host_key_type)
self.host_key = key
def _compute_key(self, id, nbytes):
"""id is 'A' - 'F' for the various keys used by ssh"""
m = Message()
m.add_mpint(self.K)
m.add_bytes(self.H)
m.add_byte(b(id))
m.add_bytes(self.session_id)
out = sofar = sha1(m.asbytes()).digest()
while len(out) < nbytes:
m = Message()
m.add_mpint(self.K)
m.add_bytes(self.H)
m.add_bytes(sofar)
digest = sha1(m.asbytes()).digest()
out += digest
sofar += digest
return out[:nbytes]
def _get_cipher(self, name, key, iv):
if name not in self._cipher_info:
raise SSHException('Unknown client cipher ' + name)
if name in ('arcfour128', 'arcfour256'):
# arcfour cipher
cipher = self._cipher_info[name]['class'].new(key)
# as per RFC 4345, the first 1536 bytes of keystream
# generated by the cipher MUST be discarded
cipher.encrypt(" " * 1536)
return cipher
elif name.endswith("-ctr"):
# CTR modes, we need a counter
counter = Counter.new(nbits=self._cipher_info[name]['block-size'] * 8, initial_value=util.inflate_long(iv, True))
return self._cipher_info[name]['class'].new(key, self._cipher_info[name]['mode'], iv, counter)
else:
return self._cipher_info[name]['class'].new(key, self._cipher_info[name]['mode'], iv)
def _set_forward_agent_handler(self, handler):
if handler is None:
def default_handler(channel):
self._queue_incoming_channel(channel)
self._forward_agent_handler = default_handler
else:
self._forward_agent_handler = handler
def _set_x11_handler(self, handler):
# only called if a channel has turned on x11 forwarding
if handler is None:
# by default, use the same mechanism as accept()
def default_handler(channel, src_addr_port):
self._queue_incoming_channel(channel)
self._x11_handler = default_handler
else:
self._x11_handler = handler
def _queue_incoming_channel(self, channel):
self.lock.acquire()
try:
self.server_accepts.append(channel)
self.server_accept_cv.notify()
finally:
self.lock.release()
def run(self):
# (use the exposed "run" method, because if we specify a thread target
# of a private method, threading.Thread will keep a reference to it
# indefinitely, creating a GC cycle and not letting Transport ever be
# GC'd. it's a bug in Thread.)
# Hold reference to 'sys' so we can test sys.modules to detect
# interpreter shutdown.
self.sys = sys
# active=True occurs before the thread is launched, to avoid a race
_active_threads.append(self)
if self.server_mode:
self._log(DEBUG, 'starting thread (server mode): %s' % hex(long(id(self)) & xffffffff))
else:
self._log(DEBUG, 'starting thread (client mode): %s' % hex(long(id(self)) & xffffffff))
try:
try:
self.packetizer.write_all(b(self.local_version + '\r\n'))
self._check_banner()
self._send_kex_init()
self._expect_packet(MSG_KEXINIT)
while self.active:
if self.packetizer.need_rekey() and not self.in_kex:
self._send_kex_init()
try:
ptype, m = self.packetizer.read_message()
except NeedRekeyException:
continue
if ptype == MSG_IGNORE:
continue
elif ptype == MSG_DISCONNECT:
self._parse_disconnect(m)
self.active = False
self.packetizer.close()
break
elif ptype == MSG_DEBUG:
self._parse_debug(m)
continue
if len(self._expected_packet) > 0:
if ptype not in self._expected_packet:
raise SSHException('Expecting packet from %r, got %d' % (self._expected_packet, ptype))
self._expected_packet = tuple()
if (ptype >= 30) and (ptype <= 39):
self.kex_engine.parse_next(ptype, m)
continue
if ptype in self._handler_table:
self._handler_table[ptype](self, m)
elif ptype in self._channel_handler_table:
chanid = m.get_int()
chan = self._channels.get(chanid)
if chan is not None:
self._channel_handler_table[ptype](chan, m)
elif chanid in self.channels_seen:
self._log(DEBUG, 'Ignoring message for dead channel %d' % chanid)
else:
self._log(ERROR, 'Channel request for unknown channel %d' % chanid)
self.active = False
self.packetizer.close()
elif (self.auth_handler is not None) and (ptype in self.auth_handler._handler_table):
self.auth_handler._handler_table[ptype](self.auth_handler, m)
else:
self._log(WARNING, 'Oops, unhandled type %d' % ptype)
msg = Message()
msg.add_byte(cMSG_UNIMPLEMENTED)
msg.add_int(m.seqno)
self._send_message(msg)
except SSHException as e:
self._log(ERROR, 'Exception: ' + str(e))
self._log(ERROR, util.tb_strings())
self.saved_exception = e
except EOFError as e:
self._log(DEBUG, 'EOF in transport thread')
#self._log(DEBUG, util.tb_strings())
self.saved_exception = e
except socket.error as e:
if type(e.args) is tuple:
if e.args:
emsg = '%s (%d)' % (e.args[1], e.args[0])
else: # empty tuple, e.g. socket.timeout
emsg = str(e) or repr(e)
else:
emsg = e.args
self._log(ERROR, 'Socket exception: ' + emsg)
self.saved_exception = e
except Exception as e:
self._log(ERROR, 'Unknown exception: ' + str(e))
self._log(ERROR, util.tb_strings())
self.saved_exception = e
_active_threads.remove(self)
for chan in list(self._channels.values()):
chan._unlink()
if self.active:
self.active = False
self.packetizer.close()
if self.completion_event is not None:
self.completion_event.set()
if self.auth_handler is not None:
self.auth_handler.abort()
for event in self.channel_events.values():
event.set()
try:
self.lock.acquire()
self.server_accept_cv.notify()
finally:
self.lock.release()
self.sock.close()
except:
# Don't raise spurious 'NoneType has no attribute X' errors when we
# wake up during interpreter shutdown. Or rather -- raise
# everything *if* sys.modules (used as a convenient sentinel)
# appears to still exist.
if self.sys.modules is not None:
raise
### protocol stages
def _negotiate_keys(self, m):
# throws SSHException on anything unusual
self.clear_to_send_lock.acquire()
try:
self.clear_to_send.clear()
finally:
self.clear_to_send_lock.release()
if self.local_kex_init is None:
# remote side wants to renegotiate
self._send_kex_init()
self._parse_kex_init(m)
self.kex_engine.start_kex()
def _check_banner(self):
# this is slow, but we only have to do it once
for i in range(100):
# give them 15 seconds for the first line, then just 2 seconds
# each additional line. (some sites have very high latency.)
if i == 0:
timeout = self.banner_timeout
else:
timeout = 2
try:
buf = self.packetizer.readline(timeout)
except ProxyCommandFailure:
raise
except Exception as e:
raise SSHException('Error reading SSH protocol banner' + str(e))
if buf[:4] == 'SSH-':
break
self._log(DEBUG, 'Banner: ' + buf)
if buf[:4] != 'SSH-':
raise SSHException('Indecipherable protocol version "' + buf + '"')
# save this server version string for later
self.remote_version = buf
# pull off any attached comment
comment = ''
i = buf.find(' ')
if i >= 0:
comment = buf[i+1:]
buf = buf[:i]
# parse out version string and make sure it matches
segs = buf.split('-', 2)
if len(segs) < 3:
raise SSHException('Invalid SSH banner')
version = segs[1]
client = segs[2]
if version != '1.99' and version != '2.0':
raise SSHException('Incompatible version (%s instead of 2.0)' % (version,))
self._log(INFO, 'Connected (version %s, client %s)' % (version, client))
def _send_kex_init(self):
"""
announce to the other side that we'd like to negotiate keys, and what
kind of key negotiation we support.
"""
self.clear_to_send_lock.acquire()
try:
self.clear_to_send.clear()
finally:
self.clear_to_send_lock.release()
self.in_kex = True
if self.server_mode:
if (self._modulus_pack is None) and ('diffie-hellman-group-exchange-sha1' in self._preferred_kex):
# can't do group-exchange if we don't have a pack of potential primes
pkex = list(self.get_security_options().kex)
pkex.remove('diffie-hellman-group-exchange-sha1')
self.get_security_options().kex = pkex
available_server_keys = list(filter(list(self.server_key_dict.keys()).__contains__,
self._preferred_keys))
else:
available_server_keys = self._preferred_keys
m = Message()
m.add_byte(cMSG_KEXINIT)
m.add_bytes(os.urandom(16))
m.add_list(self._preferred_kex)
m.add_list(available_server_keys)
m.add_list(self._preferred_ciphers)
m.add_list(self._preferred_ciphers)
m.add_list(self._preferred_macs)
m.add_list(self._preferred_macs)
m.add_list(self._preferred_compression)
m.add_list(self._preferred_compression)
m.add_string(bytes())
m.add_string(bytes())
m.add_boolean(False)
m.add_int(0)
# save a copy for later (needed to compute a hash)
self.local_kex_init = m.asbytes()
self._send_message(m)
def _parse_kex_init(self, m):
cookie = m.get_bytes(16)
kex_algo_list = m.get_list()
server_key_algo_list = m.get_list()
client_encrypt_algo_list = m.get_list()
server_encrypt_algo_list = m.get_list()
client_mac_algo_list = m.get_list()
server_mac_algo_list = m.get_list()
client_compress_algo_list = m.get_list()
server_compress_algo_list = m.get_list()
client_lang_list = m.get_list()
server_lang_list = m.get_list()
kex_follows = m.get_boolean()
unused = m.get_int()
self._log(DEBUG, 'kex algos:' + str(kex_algo_list) + ' server key:' + str(server_key_algo_list) +
' client encrypt:' + str(client_encrypt_algo_list) +
' server encrypt:' + str(server_encrypt_algo_list) +
' client mac:' + str(client_mac_algo_list) +
' server mac:' + str(server_mac_algo_list) +
' client compress:' + str(client_compress_algo_list) +
' server compress:' + str(server_compress_algo_list) +
' client lang:' + str(client_lang_list) +
' server lang:' + str(server_lang_list) +
' kex follows?' + str(kex_follows))
# as a server, we pick the first item in the client's list that we support.
# as a client, we pick the first item in our list that the server supports.
if self.server_mode:
agreed_kex = list(filter(self._preferred_kex.__contains__, kex_algo_list))
else:
agreed_kex = list(filter(kex_algo_list.__contains__, self._preferred_kex))
if len(agreed_kex) == 0:
raise SSHException('Incompatible ssh peer (no acceptable kex algorithm)')
self.kex_engine = self._kex_info[agreed_kex[0]](self)
if self.server_mode:
available_server_keys = list(filter(list(self.server_key_dict.keys()).__contains__,
self._preferred_keys))
agreed_keys = list(filter(available_server_keys.__contains__, server_key_algo_list))
else:
agreed_keys = list(filter(server_key_algo_list.__contains__, self._preferred_keys))
if len(agreed_keys) == 0:
raise SSHException('Incompatible ssh peer (no acceptable host key)')
self.host_key_type = agreed_keys[0]
if self.server_mode and (self.get_server_key() is None):
raise SSHException('Incompatible ssh peer (can\'t match requested host key type)')
if self.server_mode:
agreed_local_ciphers = list(filter(self._preferred_ciphers.__contains__,
server_encrypt_algo_list))
agreed_remote_ciphers = list(filter(self._preferred_ciphers.__contains__,
client_encrypt_algo_list))
else:
agreed_local_ciphers = list(filter(client_encrypt_algo_list.__contains__,
self._preferred_ciphers))
agreed_remote_ciphers = list(filter(server_encrypt_algo_list.__contains__,
self._preferred_ciphers))
if (len(agreed_local_ciphers) == 0) or (len(agreed_remote_ciphers) == 0):
raise SSHException('Incompatible ssh server (no acceptable ciphers)')
self.local_cipher = agreed_local_ciphers[0]
self.remote_cipher = agreed_remote_ciphers[0]
self._log(DEBUG, 'Ciphers agreed: local=%s, remote=%s' % (self.local_cipher, self.remote_cipher))
if self.server_mode:
agreed_remote_macs = list(filter(self._preferred_macs.__contains__, client_mac_algo_list))
agreed_local_macs = list(filter(self._preferred_macs.__contains__, server_mac_algo_list))
else:
agreed_local_macs = list(filter(client_mac_algo_list.__contains__, self._preferred_macs))
agreed_remote_macs = list(filter(server_mac_algo_list.__contains__, self._preferred_macs))
if (len(agreed_local_macs) == 0) or (len(agreed_remote_macs) == 0):
raise SSHException('Incompatible ssh server (no acceptable macs)')
self.local_mac = agreed_local_macs[0]
self.remote_mac = agreed_remote_macs[0]
if self.server_mode:
agreed_remote_compression = list(filter(self._preferred_compression.__contains__, client_compress_algo_list))
agreed_local_compression = list(filter(self._preferred_compression.__contains__, server_compress_algo_list))
else:
agreed_local_compression = list(filter(client_compress_algo_list.__contains__, self._preferred_compression))
agreed_remote_compression = list(filter(server_compress_algo_list.__contains__, self._preferred_compression))
if (len(agreed_local_compression) == 0) or (len(agreed_remote_compression) == 0):
raise SSHException('Incompatible ssh server (no acceptable compression) %r %r %r' % (agreed_local_compression, agreed_remote_compression, self._preferred_compression))
self.local_compression = agreed_local_compression[0]
self.remote_compression = agreed_remote_compression[0]
self._log(DEBUG, 'using kex %s; server key type %s; cipher: local %s, remote %s; mac: local %s, remote %s; compression: local %s, remote %s' %
(agreed_kex[0], self.host_key_type, self.local_cipher, self.remote_cipher, self.local_mac,
self.remote_mac, self.local_compression, self.remote_compression))
# save for computing hash later...
# now wait! openssh has a bug (and others might too) where there are
# actually some extra bytes (one NUL byte in openssh's case) added to
# the end of the packet but not parsed. turns out we need to throw
# away those bytes because they aren't part of the hash.
self.remote_kex_init = cMSG_KEXINIT + m.get_so_far()
def _activate_inbound(self):
"""switch on newly negotiated encryption parameters for inbound traffic"""
block_size = self._cipher_info[self.remote_cipher]['block-size']
if self.server_mode:
IV_in = self._compute_key('A', block_size)
key_in = self._compute_key('C', self._cipher_info[self.remote_cipher]['key-size'])
else:
IV_in = self._compute_key('B', block_size)
key_in = self._compute_key('D', self._cipher_info[self.remote_cipher]['key-size'])
engine = self._get_cipher(self.remote_cipher, key_in, IV_in)
mac_size = self._mac_info[self.remote_mac]['size']
mac_engine = self._mac_info[self.remote_mac]['class']
# initial mac keys are done in the hash's natural size (not the potentially truncated
# transmission size)
if self.server_mode:
mac_key = self._compute_key('E', mac_engine().digest_size)
else:
mac_key = self._compute_key('F', mac_engine().digest_size)
self.packetizer.set_inbound_cipher(engine, block_size, mac_engine, mac_size, mac_key)
compress_in = self._compression_info[self.remote_compression][1]
if (compress_in is not None) and ((self.remote_compression != 'zlib@openssh.com') or self.authenticated):
self._log(DEBUG, 'Switching on inbound compression ...')
self.packetizer.set_inbound_compressor(compress_in())
def _activate_outbound(self):
"""switch on newly negotiated encryption parameters for outbound traffic"""
m = Message()
m.add_byte(cMSG_NEWKEYS)
self._send_message(m)
block_size = self._cipher_info[self.local_cipher]['block-size']
if self.server_mode:
IV_out = self._compute_key('B', block_size)
key_out = self._compute_key('D', self._cipher_info[self.local_cipher]['key-size'])
else:
IV_out = self._compute_key('A', block_size)
key_out = self._compute_key('C', self._cipher_info[self.local_cipher]['key-size'])
engine = self._get_cipher(self.local_cipher, key_out, IV_out)
mac_size = self._mac_info[self.local_mac]['size']
mac_engine = self._mac_info[self.local_mac]['class']
# initial mac keys are done in the hash's natural size (not the potentially truncated
# transmission size)
if self.server_mode:
mac_key = self._compute_key('F', mac_engine().digest_size)
else:
mac_key = self._compute_key('E', mac_engine().digest_size)
sdctr = self.local_cipher.endswith('-ctr')
self.packetizer.set_outbound_cipher(engine, block_size, mac_engine, mac_size, mac_key, sdctr)
compress_out = self._compression_info[self.local_compression][0]
if (compress_out is not None) and ((self.local_compression != 'zlib@openssh.com') or self.authenticated):
self._log(DEBUG, 'Switching on outbound compression ...')
self.packetizer.set_outbound_compressor(compress_out())
if not self.packetizer.need_rekey():
self.in_kex = False
# we always expect to receive NEWKEYS now
self._expect_packet(MSG_NEWKEYS)
def _auth_trigger(self):
self.authenticated = True
# delayed initiation of compression
if self.local_compression == 'zlib@openssh.com':
compress_out = self._compression_info[self.local_compression][0]
self._log(DEBUG, 'Switching on outbound compression ...')
self.packetizer.set_outbound_compressor(compress_out())
if self.remote_compression == 'zlib@openssh.com':
compress_in = self._compression_info[self.remote_compression][1]
self._log(DEBUG, 'Switching on inbound compression ...')
self.packetizer.set_inbound_compressor(compress_in())
def _parse_newkeys(self, m):
self._log(DEBUG, 'Switch to new keys ...')
self._activate_inbound()
# can also free a bunch of stuff here
self.local_kex_init = self.remote_kex_init = None
self.K = None
self.kex_engine = None
if self.server_mode and (self.auth_handler is None):
# create auth handler for server mode
self.auth_handler = AuthHandler(self)
if not self.initial_kex_done:
# this was the first key exchange
self.initial_kex_done = True
# send an event?
if self.completion_event is not None:
self.completion_event.set()
# it's now okay to send data again (if this was a re-key)
if not self.packetizer.need_rekey():
self.in_kex = False
self.clear_to_send_lock.acquire()
try:
self.clear_to_send.set()
finally:
self.clear_to_send_lock.release()
return
def _parse_disconnect(self, m):
code = m.get_int()
desc = m.get_text()
self._log(INFO, 'Disconnect (code %d): %s' % (code, desc))
def _parse_global_request(self, m):
kind = m.get_text()
self._log(DEBUG, 'Received global request "%s"' % kind)
want_reply = m.get_boolean()
if not self.server_mode:
self._log(DEBUG, 'Rejecting "%s" global request from server.' % kind)
ok = False
elif kind == 'tcpip-forward':
address = m.get_text()
port = m.get_int()
ok = self.server_object.check_port_forward_request(address, port)
if ok:
ok = (ok,)
elif kind == 'cancel-tcpip-forward':
address = m.get_text()
port = m.get_int()
self.server_object.cancel_port_forward_request(address, port)
ok = True
else:
ok = self.server_object.check_global_request(kind, m)
extra = ()
if type(ok) is tuple:
extra = ok
ok = True
if want_reply:
msg = Message()
if ok:
msg.add_byte(cMSG_REQUEST_SUCCESS)
msg.add(*extra)
else:
msg.add_byte(cMSG_REQUEST_FAILURE)
self._send_message(msg)
def _parse_request_success(self, m):
self._log(DEBUG, 'Global request successful.')
self.global_response = m
if self.completion_event is not None:
self.completion_event.set()
def _parse_request_failure(self, m):
self._log(DEBUG, 'Global request denied.')
self.global_response = None
if self.completion_event is not None:
self.completion_event.set()
def _parse_channel_open_success(self, m):
chanid = m.get_int()
server_chanid = m.get_int()
server_window_size = m.get_int()
server_max_packet_size = m.get_int()
chan = self._channels.get(chanid)
if chan is None:
self._log(WARNING, 'Success for unrequested channel! [??]')
return
self.lock.acquire()
try:
chan._set_remote_channel(server_chanid, server_window_size, server_max_packet_size)
self._log(INFO, 'Secsh channel %d opened.' % chanid)
if chanid in self.channel_events:
self.channel_events[chanid].set()
del self.channel_events[chanid]
finally:
self.lock.release()
return
def _parse_channel_open_failure(self, m):
chanid = m.get_int()
reason = m.get_int()
reason_str = m.get_text()
lang = m.get_text()
reason_text = CONNECTION_FAILED_CODE.get(reason, '(unknown code)')
self._log(INFO, 'Secsh channel %d open FAILED: %s: %s' % (chanid, reason_str, reason_text))
self.lock.acquire()
try:
self.saved_exception = ChannelException(reason, reason_text)
if chanid in self.channel_events:
self._channels.delete(chanid)
if chanid in self.channel_events:
self.channel_events[chanid].set()
del self.channel_events[chanid]
finally:
self.lock.release()
return
def _parse_channel_open(self, m):
kind = m.get_text()
chanid = m.get_int()
initial_window_size = m.get_int()
max_packet_size = m.get_int()
reject = False
if (kind == 'auth-agent@openssh.com') and (self._forward_agent_handler is not None):
self._log(DEBUG, 'Incoming forward agent connection')
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
elif (kind == 'x11') and (self._x11_handler is not None):
origin_addr = m.get_text()
origin_port = m.get_int()
self._log(DEBUG, 'Incoming x11 connection from %s:%d' % (origin_addr, origin_port))
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
elif (kind == 'forwarded-tcpip') and (self._tcp_handler is not None):
server_addr = m.get_text()
server_port = m.get_int()
origin_addr = m.get_text()
origin_port = m.get_int()
self._log(DEBUG, 'Incoming tcp forwarded connection from %s:%d' % (origin_addr, origin_port))
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
elif not self.server_mode:
self._log(DEBUG, 'Rejecting "%s" channel request from server.' % kind)
reject = True
reason = OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
else:
self.lock.acquire()
try:
my_chanid = self._next_channel()
finally:
self.lock.release()
if kind == 'direct-tcpip':
# handle direct-tcpip requests comming from the client
dest_addr = m.get_text()
dest_port = m.get_int()
origin_addr = m.get_text()
origin_port = m.get_int()
reason = self.server_object.check_channel_direct_tcpip_request(
my_chanid, (origin_addr, origin_port), (dest_addr, dest_port))
else:
reason = self.server_object.check_channel_request(kind, my_chanid)
if reason != OPEN_SUCCEEDED:
self._log(DEBUG, 'Rejecting "%s" channel request from client.' % kind)
reject = True
if reject:
msg = Message()
msg.add_byte(cMSG_CHANNEL_OPEN_FAILURE)
msg.add_int(chanid)
msg.add_int(reason)
msg.add_string('')
msg.add_string('en')
self._send_message(msg)
return
chan = Channel(my_chanid)
self.lock.acquire()
try:
self._channels.put(my_chanid, chan)
self.channels_seen[my_chanid] = True
chan._set_transport(self)
chan._set_window(self.window_size, self.max_packet_size)
chan._set_remote_channel(chanid, initial_window_size, max_packet_size)
finally:
self.lock.release()
m = Message()
m.add_byte(cMSG_CHANNEL_OPEN_SUCCESS)
m.add_int(chanid)
m.add_int(my_chanid)
m.add_int(self.window_size)
m.add_int(self.max_packet_size)
self._send_message(m)
self._log(INFO, 'Secsh channel %d (%s) opened.', my_chanid, kind)
if kind == 'auth-agent@openssh.com':
self._forward_agent_handler(chan)
elif kind == 'x11':
self._x11_handler(chan, (origin_addr, origin_port))
elif kind == 'forwarded-tcpip':
chan.origin_addr = (origin_addr, origin_port)
self._tcp_handler(chan, (origin_addr, origin_port), (server_addr, server_port))
else:
self._queue_incoming_channel(chan)
def _parse_debug(self, m):
always_display = m.get_boolean()
msg = m.get_string()
lang = m.get_string()
self._log(DEBUG, 'Debug msg: ' + util.safe_string(msg))
def _get_subsystem_handler(self, name):
try:
self.lock.acquire()
if name not in self.subsystem_table:
return None, [], {}
return self.subsystem_table[name]
finally:
self.lock.release()
_handler_table = {
MSG_NEWKEYS: _parse_newkeys,
MSG_GLOBAL_REQUEST: _parse_global_request,
MSG_REQUEST_SUCCESS: _parse_request_success,
MSG_REQUEST_FAILURE: _parse_request_failure,
MSG_CHANNEL_OPEN_SUCCESS: _parse_channel_open_success,
MSG_CHANNEL_OPEN_FAILURE: _parse_channel_open_failure,
MSG_CHANNEL_OPEN: _parse_channel_open,
MSG_KEXINIT: _negotiate_keys,
}
_channel_handler_table = {
MSG_CHANNEL_SUCCESS: Channel._request_success,
MSG_CHANNEL_FAILURE: Channel._request_failed,
MSG_CHANNEL_DATA: Channel._feed,
MSG_CHANNEL_EXTENDED_DATA: Channel._feed_extended,
MSG_CHANNEL_WINDOW_ADJUST: Channel._window_adjust,
MSG_CHANNEL_REQUEST: Channel._handle_request,
MSG_CHANNEL_EOF: Channel._handle_eof,
MSG_CHANNEL_CLOSE: Channel._handle_close,
}
class SecurityOptions (object):
"""
Simple object containing the security preferences of an ssh transport.
These are tuples of acceptable ciphers, digests, key types, and key
exchange algorithms, listed in order of preference.
Changing the contents and/or order of these fields affects the underlying
`.Transport` (but only if you change them before starting the session).
If you try to add an algorithm that paramiko doesn't recognize,
``ValueError`` will be raised. If you try to assign something besides a
tuple to one of the fields, ``TypeError`` will be raised.
"""
#__slots__ = [ 'ciphers', 'digests', 'key_types', 'kex', 'compression', '_transport' ]
__slots__ = '_transport'
def __init__(self, transport):
self._transport = transport
def __repr__(self):
"""
Returns a string representation of this object, for debugging.
"""
return '<paramiko.SecurityOptions for %s>' % repr(self._transport)
def _get_ciphers(self):
return self._transport._preferred_ciphers
def _get_digests(self):
return self._transport._preferred_macs
def _get_key_types(self):
return self._transport._preferred_keys
def _get_kex(self):
return self._transport._preferred_kex
def _get_compression(self):
return self._transport._preferred_compression
def _set(self, name, orig, x):
if type(x) is list:
x = tuple(x)
if type(x) is not tuple:
raise TypeError('expected tuple or list')
possible = list(getattr(self._transport, orig).keys())
forbidden = [n for n in x if n not in possible]
if len(forbidden) > 0:
raise ValueError('unknown cipher')
setattr(self._transport, name, x)
def _set_ciphers(self, x):
self._set('_preferred_ciphers', '_cipher_info', x)
def _set_digests(self, x):
self._set('_preferred_macs', '_mac_info', x)
def _set_key_types(self, x):
self._set('_preferred_keys', '_key_info', x)
def _set_kex(self, x):
self._set('_preferred_kex', '_kex_info', x)
def _set_compression(self, x):
self._set('_preferred_compression', '_compression_info', x)
ciphers = property(_get_ciphers, _set_ciphers, None,
"Symmetric encryption ciphers")
digests = property(_get_digests, _set_digests, None,
"Digest (one-way hash) algorithms")
key_types = property(_get_key_types, _set_key_types, None,
"Public-key algorithms")
kex = property(_get_kex, _set_kex, None, "Key exchange algorithms")
compression = property(_get_compression, _set_compression, None,
"Compression algorithms")
class ChannelMap (object):
def __init__(self):
# (id -> Channel)
self._map = weakref.WeakValueDictionary()
self._lock = threading.Lock()
def put(self, chanid, chan):
self._lock.acquire()
try:
self._map[chanid] = chan
finally:
self._lock.release()
def get(self, chanid):
self._lock.acquire()
try:
return self._map.get(chanid, None)
finally:
self._lock.release()
def delete(self, chanid):
self._lock.acquire()
try:
try:
del self._map[chanid]
except KeyError:
pass
finally:
self._lock.release()
def values(self):
self._lock.acquire()
try:
return list(self._map.values())
finally:
self._lock.release()
def __len__(self):
self._lock.acquire()
try:
return len(self._map)
finally:
self._lock.release()
|
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('CHANGELOG.rst') as f:
changelog = f.read()
setup(
name='devpi-builder',
use_scm_version=True,
packages=find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
url='https://github.com/blue-yonder/devpi-builder',
description='Devpi-builder takes a requirements.txt and incrementally fills a devpi index with wheels of the listed python packages.',
long_description='%s\n\n%s' % (readme, changelog),
license='new BSD',
install_requires=[
'devpi-plumber>=0.2.3',
'setuptools',
'wheel',
'pip>=1.5.3',
'junit-xml'
],
setup_requires=[
'setuptools_scm',
'nose',
'nose-progressive',
],
tests_require=[
'nose',
'mock',
'coverage',
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Topic :: System :: Archiving :: Packaging',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
entry_points={
'console_scripts': [
'devpi-builder = devpi_builder.cli:main',
],
},
)
|
import math
from typing import Tuple
import torch
from torch import Tensor
from torchvision.transforms import functional as F
class RandomMixup(torch.nn.Module):
"""Randomly apply Mixup to the provided batch and targets.
The class implements the data augmentations as described in the paper
`"mixup: Beyond Empirical Risk Minimization" <https://arxiv.org/abs/1710.09412>`_.
Args:
num_classes (int): number of classes used for one-hot encoding.
p (float): probability of the batch being transformed. Default value is 0.5.
alpha (float): hyperparameter of the Beta distribution used for mixup.
Default value is 1.0.
inplace (bool): boolean to make this transform inplace. Default set to False.
"""
def __init__(self, num_classes: int, p: float = 0.5, alpha: float = 1.0, inplace: bool = False) -> None:
super().__init__()
assert num_classes > 0, "Please provide a valid positive value for the num_classes."
assert alpha > 0, "Alpha param can't be zero."
self.num_classes = num_classes
self.p = p
self.alpha = alpha
self.inplace = inplace
def forward(self, batch: Tensor, target: Tensor) -> Tuple[Tensor, Tensor]:
"""
Args:
batch (Tensor): Float tensor of size (B, C, H, W)
target (Tensor): Integer tensor of size (B, )
Returns:
Tensor: Randomly transformed batch.
"""
if batch.ndim != 4:
raise ValueError(f"Batch ndim should be 4. Got {batch.ndim}")
if target.ndim != 1:
raise ValueError(f"Target ndim should be 1. Got {target.ndim}")
if not batch.is_floating_point():
raise TypeError(f"Batch dtype should be a float tensor. Got {batch.dtype}.")
if target.dtype != torch.int64:
raise TypeError(f"Target dtype should be torch.int64. Got {target.dtype}")
if not self.inplace:
batch = batch.clone()
target = target.clone()
if target.ndim == 1:
target = torch.nn.functional.one_hot(target, num_classes=self.num_classes).to(dtype=batch.dtype)
if torch.rand(1).item() >= self.p:
return batch, target
# It's faster to roll the batch by one instead of shuffling it to create image pairs
batch_rolled = batch.roll(1, 0)
target_rolled = target.roll(1, 0)
# Implemented as on mixup paper, page 3.
lambda_param = float(torch._sample_dirichlet(torch.tensor([self.alpha, self.alpha]))[0])
batch_rolled.mul_(1.0 - lambda_param)
batch.mul_(lambda_param).add_(batch_rolled)
target_rolled.mul_(1.0 - lambda_param)
target.mul_(lambda_param).add_(target_rolled)
return batch, target
def __repr__(self) -> str:
s = (
f"{self.__class__.__name__}("
f"num_classes={self.num_classes}"
f", p={self.p}"
f", alpha={self.alpha}"
f", inplace={self.inplace}"
f")"
)
return s
class RandomCutmix(torch.nn.Module):
"""Randomly apply Cutmix to the provided batch and targets.
The class implements the data augmentations as described in the paper
`"CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features"
<https://arxiv.org/abs/1905.04899>`_.
Args:
num_classes (int): number of classes used for one-hot encoding.
p (float): probability of the batch being transformed. Default value is 0.5.
alpha (float): hyperparameter of the Beta distribution used for cutmix.
Default value is 1.0.
inplace (bool): boolean to make this transform inplace. Default set to False.
"""
def __init__(self, num_classes: int, p: float = 0.5, alpha: float = 1.0, inplace: bool = False) -> None:
super().__init__()
assert num_classes > 0, "Please provide a valid positive value for the num_classes."
assert alpha > 0, "Alpha param can't be zero."
self.num_classes = num_classes
self.p = p
self.alpha = alpha
self.inplace = inplace
def forward(self, batch: Tensor, target: Tensor) -> Tuple[Tensor, Tensor]:
"""
Args:
batch (Tensor): Float tensor of size (B, C, H, W)
target (Tensor): Integer tensor of size (B, )
Returns:
Tensor: Randomly transformed batch.
"""
if batch.ndim != 4:
raise ValueError(f"Batch ndim should be 4. Got {batch.ndim}")
if target.ndim != 1:
raise ValueError(f"Target ndim should be 1. Got {target.ndim}")
if not batch.is_floating_point():
raise TypeError(f"Batch dtype should be a float tensor. Got {batch.dtype}.")
if target.dtype != torch.int64:
raise TypeError(f"Target dtype should be torch.int64. Got {target.dtype}")
if not self.inplace:
batch = batch.clone()
target = target.clone()
if target.ndim == 1:
target = torch.nn.functional.one_hot(target, num_classes=self.num_classes).to(dtype=batch.dtype)
if torch.rand(1).item() >= self.p:
return batch, target
# It's faster to roll the batch by one instead of shuffling it to create image pairs
batch_rolled = batch.roll(1, 0)
target_rolled = target.roll(1, 0)
# Implemented as on cutmix paper, page 12 (with minor corrections on typos).
lambda_param = float(torch._sample_dirichlet(torch.tensor([self.alpha, self.alpha]))[0])
_, H, W = F.get_dimensions(batch)
r_x = torch.randint(W, (1,))
r_y = torch.randint(H, (1,))
r = 0.5 * math.sqrt(1.0 - lambda_param)
r_w_half = int(r * W)
r_h_half = int(r * H)
x1 = int(torch.clamp(r_x - r_w_half, min=0))
y1 = int(torch.clamp(r_y - r_h_half, min=0))
x2 = int(torch.clamp(r_x + r_w_half, max=W))
y2 = int(torch.clamp(r_y + r_h_half, max=H))
batch[:, :, y1:y2, x1:x2] = batch_rolled[:, :, y1:y2, x1:x2]
lambda_param = float(1.0 - (x2 - x1) * (y2 - y1) / (W * H))
target_rolled.mul_(1.0 - lambda_param)
target.mul_(lambda_param).add_(target_rolled)
return batch, target
def __repr__(self) -> str:
s = (
f"{self.__class__.__name__}("
f"num_classes={self.num_classes}"
f", p={self.p}"
f", alpha={self.alpha}"
f", inplace={self.inplace}"
f")"
)
return s
|
import tests.periodicities.period_test as per
per.buildModel((5 , 'BH' , 100));
|
from django.db import models
from django.db.models import Q
from django.utils.encoding import force_text, python_2_unicode_compatible
from cms.models import CMSPlugin, Placeholder
@python_2_unicode_compatible
class AliasPluginModel(CMSPlugin):
cmsplugin_ptr = models.OneToOneField(CMSPlugin, related_name='cms_aliasplugin', parent_link=True)
plugin = models.ForeignKey(CMSPlugin, editable=False, related_name="alias_reference", null=True)
alias_placeholder = models.ForeignKey(Placeholder, editable=False, related_name="alias_placeholder", null=True)
class Meta:
app_label = 'cms'
def __str__(self):
if self.plugin_id:
return "(%s) %s" % (force_text(self.plugin.get_plugin_name()), self.plugin.get_plugin_instance()[0])
else:
return force_text(self.alias_placeholder.get_label())
def is_recursive(self):
if self.plugin_id:
placeholder = self.plugin.placeholder_id
else:
placeholder = self.alias_placeholder_id
if not placeholder:
return False
plugins = AliasPluginModel.objects.filter(
plugin_type='AliasPlugin',
placeholder=placeholder,
)
plugins = plugins.filter(Q(plugin=self) | Q(alias_placeholder=self.placeholder_id))
return plugins.exists()
|
import tuplespace
ts = tuplespace.TupleSpace()
print ts.set('event:1', 'some event')
print ts.set('event:2', 'some event')
while True:
print ts.take('event*')
|
from datetime import datetime, timedelta
from django.db import models, transaction
from django.utils.translation import ugettext_lazy as _
from registration.managers import RegistrationManager
from registration.user import User
class RegistrationProfile(models.Model):
"""A simple profile which stores an activation key for use during user
account registration.
Generally, you will not want to interact directly with instances of this
model; the provided manager includes methods for creating and activating
new accounts, as well as for cleaning out accounts which have never been
activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do so. This
model's sole purpose is to store data temporarily during account
registration and activation.
"""
user = models.OneToOneField(User, related_name='registration_profile',
verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
# for moderated account registration, this denotes the user has verified
# they account e.g. clicked on a link sent to their email address
verified = models.BooleanField(_('verified'), default=False)
# denotes this user has been activated from a registration's perspective.
# note, this is independent of ``User.is_active``
activated = models.BooleanField(_('activated'), default=False)
# denotes the user has been moderated
moderated = models.BooleanField(_('moderated'), default=False)
# denotes the user has been moderated
moderator = models.ForeignKey(User, related_name='moderated_profiles',
null=True, verbose_name=_('moderator'))
# the time the user was moderated
moderation_time = models.DateTimeField(_('moderation_time'), null=True)
objects = RegistrationManager()
class Meta(object):
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u'Registration Profile for %s' % self.user
def save(self, *args, **kwargs):
if not self.moderation_time and self.moderated:
self.moderation_time = datetime.now()
super(RegistrationProfile, self).save(*args, **kwargs)
@transaction.commit_on_success
def activate(self):
user = self.user
user.is_active = True
user.save()
self.activated = True
self.save()
return user
def activation_expired(self, activation_days=None):
"""Determine whether this ``RegistrationProfile``'s activation key has
expired, returning a boolean -- ``True`` if the key has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the flag ``activated`` will be
set to true
2. Otherwise, the date the user signed up is incremented by the number
of days specified in the setting ``ACCOUNT_ACTIVATION_DAYS`` (which
should be the number of days after signup during which a user is
allowed to activate their account); if the result is less than or equal
to the current date, the key has expired and this method returns
``True``.
"""
# if this is not set or is 0, always return False (no expiration)
if not activation_days:
return False
# check is this has expired given the activation days
expiration_date = timedelta(days=activation_days)
return self.user.date_joined + expiration_date <= datetime.now()
# for the admin..
activation_expired.boolean = True
|
import unittest
from pyasm.x86asm import assembler, CDECL, STDCALL, PYTHON
from pyasm.x86cpToMemory import CpToMemory
class test_python_funcs(unittest.TestCase):
def test_simple_function(self):
a = assembler()
a.ADStr("hello_world", "Hello world!\n\0")
a.AP("test_print", PYTHON)
a.AddLocal("self")
a.AddLocal("args")
#a.AI("INT 3")
a.AI("PUSH hello_world")
a.AI("CALL PySys_WriteStdout")
a.AI("MOV EAX,%s" % id(None))
a.AI("ADD [EAX],0x1") #refcount
a.EP()
a.AP("test_print2", PYTHON)
a.AddLocal("self")
a.AddLocal("args")
#a.AI("INT 3")
a.AI("PUSH hello_world")
a.AI("CALL PySys_WriteStdout")
a.AI("MOV EAX,%s" % id(None))
a.AI("ADD [EAX],0x1") #refcount
a.EP()
mem = CpToMemory(a.Compile())
mem.MakeMemory()
mem.BindPythonFunctions(globals())
test_print("Foo")
test_print2('bar')
if __name__ == "__main__":
unittest.main()
|
from tornado import gen
from tornadowebapi import exceptions
from tornadowebapi.resource import Resource
from tornadowebapi.resource_handler import ResourceHandler
from tornadowebapi.traitlets import Unicode
from remoteappmanager.webapi.decorators import authenticated
class Container(Resource):
"""Represents a container as seen from the administrator.
It can only be stopped.
"""
docker_id = Unicode(allow_empty=False, strip=True, scope="output")
name = Unicode(allow_empty=False, strip=True, scope="output")
image_name = Unicode(allow_empty=False, strip=True, scope="output")
image_id = Unicode(allow_empty=False, strip=True, scope="output")
mapping_id = Unicode(allow_empty=False, strip=True, scope="output")
user = Unicode(allow_empty=False, strip=True, scope="output")
realm = Unicode(allow_empty=False, strip=True, scope="output")
class ContainerHandler(ResourceHandler):
resource_class = Container
@gen.coroutine
@authenticated
def delete(self, resource, **kwargs):
"""Stop the container."""
identifier = resource.identifier
container_manager = self.application.container_manager
container = yield container_manager.find_container(
url_id=identifier)
if not container:
raise exceptions.NotFound()
try:
yield self.application.reverse_proxy.unregister(container.urlpath)
except Exception:
# If we can't remove the reverse proxy, we cannot do much more
# than log the problem and keep going, because we want to stop
# the container regardless.
self.log.exception(
"Could not remove reverse proxy for id {}".format(
identifier))
try:
yield container_manager.stop_and_remove_container(
container.docker_id)
except Exception:
self.log.exception(
"Could not stop and remove container for id {}".format(
identifier))
@gen.coroutine
@authenticated
def items(self, items_response, **kwargs):
"""Get all the currently running containers."""
manager = self.application.container_manager
containers = (yield manager.find_containers())
items = []
for c in containers:
item = Container(identifier=c.url_id)
item.fill(c)
items.append(item)
items_response.set(items)
|
import Queue
import threading
import time
import traceback
from logger import Logger
from task import ExecutableTask, Task
class ThreadPool(object):
def __init__(self):
self._init_core_worker()
def _init_core_worker(self):
self.core_worker = CoreWorker()
self.core_worker.setDaemon(True)
def add_task(self, task):
self.core_worker.add_task(task)
def start(self):
self.core_worker.start()
def wait(self):
self.core_worker.wait()
class CoreWorker(threading.Thread):
def __init__(self, workers_num=6):
threading.Thread.__init__(self)
self.queue = Queue.Queue()
self.workers = []
self.workers_num = workers_num
self._init_workers()
def add_task(self, task):
self.queue.put(task)
def run(self):
map(lambda thread: thread.start(), self.workers)
def _init_workers(self):
for i in range(0, self.workers_num):
worker = Worker(self.queue)
worker.setDaemon(True)
self.workers.append(worker)
def wait(self):
self.queue.join()
class Worker(threading.Thread):
def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue
self._tpl_debug_message = '[worker_thread] {}'
def run(self):
while True:
try:
task = self.queue.get()
if isinstance(task, ExecutableTask):
task.execute()
except Exception as e:
self.debug('worker thread catch exception')
self.debug(traceback.format_exc())
finally:
self.queue.task_done()
def debug(self, message):
Logger.debug(self._tpl_debug_message.format(message))
class TaskEngine(object):
def __init__(self, logger):
self._logger = logger
self.queue = Queue.Queue()
self.condition = threading.Condition()
self.start_time = 0
self.cost_time = 0
self.pool = ThreadPool()
self.pool.start()
self._interrupt_exception = None
self._init_attr()
self.tpl_logger_message = '[task_engine] {}'
def _init_attr(self):
self.root_tasks = []
self.tasks_dict = {}
self.tasks_depth_dict = {}
self.sorted_tasks = []
def debug(self, message):
Logger.debug(self.tpl_logger_message.format(message))
def add_root_task(self, task):
if isinstance(task, list):
map(lambda t: self._add_root_task(t), task)
else:
self._add_root_task(task)
def start(self):
self.start_time = time.time()
self._interrupt_exception = None
self._prepare()
self.wait()
if self._interrupt_exception is not None:
raise self._interrupt_exception
def finish(self):
self.cost_time = time.time() - self.start_time
self.debug('it takes task engine {}s to execute tasks.'.format(round(self.cost_time, 2)))
self._init_attr()
self.notify()
def is_all_tasks_finished(self):
tasks = self.tasks_dict.values()
for task in tasks:
if task.status != 3 and task.status != -1:
return False
return True
def get_running_tasks(self):
tasks = self.tasks_dict.values()
return [task for task in tasks if task.status != 3 and task.status != -1]
def wait(self):
self.condition.acquire()
self.condition.wait()
self.condition.release()
def notify(self):
self.condition.acquire()
self.condition.notify()
self.condition.release()
def interrupt(self, exception):
self._interrupt_exception = exception
self.debug('task engine occurs exception, engine will exit.')
def _add_root_task(self, task):
if isinstance(task, Task) and task not in self.root_tasks:
self.root_tasks.append(task)
def _prepare(self):
tasks_queue = Queue.Queue()
for task in self.root_tasks:
tasks_queue.put(task)
has_added_tasks = []
while not tasks_queue.empty():
task = tasks_queue.get()
has_added_tasks.append(task)
if not self.tasks_dict.has_key(task.name):
self.tasks_dict[task.name] = task
for child in task.child_tasks:
if child not in has_added_tasks:
tasks_queue.put(child)
depth_array = []
for task in self.tasks_dict.values():
depth = TaskEngine.calculate_task_depth(task)
if self.tasks_depth_dict.has_key(depth):
self.tasks_depth_dict[depth].append(task)
else:
self.tasks_depth_dict[depth] = []
self.tasks_depth_dict[depth].append(task)
depth_array.append(depth)
depth_array.sort()
for depth in depth_array:
tasks = self.tasks_depth_dict[depth]
for task in tasks:
self.debug("depth: {}, task: {}".format(depth, task))
self.sorted_tasks.append(task)
self._logger.set_sorted_tasks(self.sorted_tasks)
for task in self.sorted_tasks:
self.pool.add_task(ExecutableTask(task, self))
@staticmethod
def calculate_task_depth(task):
depth = []
parent_task_queue = Queue.Queue()
parent_task_queue.put(task)
while not parent_task_queue.empty():
has_recursive_task = False
parent_task = parent_task_queue.get()
if parent_task.name not in depth:
depth.append(parent_task.name)
for parent in parent_task.parent_tasks:
if parent.name == task.name:
has_recursive_task = True
parent_task.parent_tasks.remove(parent);
break;
if parent.name not in depth:
parent_task_queue.put(parent)
if has_recursive_task:
Logger.info("[CAUTIONS] Recursive Tasks Detected! Drop {} for {} !!".format(depth.pop(), task.name))
return len(depth)
|
import datetime
import unittest
from types import GeneratorType
from phoxpy import exceptions
from phoxpy import xml
from phoxpy import xmlcodec
class XMLDecodeTestCase(unittest.TestCase):
def test_decode_fallback(self):
self.assertRaises(ValueError, xml.decode, '<foooooo/>')
def test_decode_untyped(self):
value = xml.decode('<f v="42" />')
self.assertEqual(value, '42')
def test_decode_boolean_false(self):
value = xml.decode('<f t="B" v="false" />')
self.assertEqual(value, False)
def test_decode_boolean_true(self):
value = xml.decode('<f t="B" v="true" />')
self.assertEqual(value, True)
def test_fail_decode_invalid_boolean(self):
self.assertRaises(ValueError, xml.decode, '<f t="B" v="foo" />')
def test_decode_int(self):
value = xml.decode('<f t="I" v="42" />')
self.assertEqual(value, 42)
def test_decode_long(self):
value = xml.decode('<f t="L" v="100500" />')
self.assertEqual(value, 100500L)
def test_decode_float(self):
value = xml.decode('<f t="F" v="3.14" />')
self.assertEqual(value, 3.14)
def test_decode_text(self):
value = xml.decode('<f t="S" v="привет, world!" />')
self.assertEqual(value, u'привет, world!')
def test_decode_date(self):
value = xml.decode('<f t="D" v="14.02.2009 02:30:31" />')
self.assertEqual(value, datetime.datetime(2009, 2, 14, 2, 30, 31))
def test_decode_reference(self):
value = xml.decode('<r i="42" />')
self.assertTrue(isinstance(value, xmlcodec.Reference))
self.assertEqual(value, '42')
def test_decode_sequence(self):
value = xml.decode('<s><f t="I" v="1"/><f t="I" v="2"/><f t="I" v="3"/></s>')
self.assertTrue(isinstance(value, GeneratorType))
self.assertEqual(list(value), [1, 2, 3])
def test_decode_empty_sequence(self):
value = xml.decode('<s/>')
self.assertTrue(isinstance(value, GeneratorType))
self.assertEqual(list(value), [])
def test_decode_object(self):
value = xml.decode('<o><f n="foo" t="I" v="42"/><f n="bar" t="S" v="baz"/></o>')
self.assertTrue(isinstance(value, dict))
self.assertEqual(value, {'foo': 42, 'bar': 'baz'})
def test_decode_empty_object(self):
value = xml.decode('<o/>')
self.assertTrue(isinstance(value, dict))
self.assertEqual(value, {})
def test_decode_complex(self):
value = xml.decode('''<s>
<o></o>
<o>
<s n="foo"><r i="foo"/><r i="bar"/><r i="baz"/></s>
<s n="zoo">
<o><s n="bar"><r i="bar"/><r i="baz"/></s></o>
<o><s n="baz"><r i="baz"/></s></o>
</s>
</o></s>
''')
self.assertTrue(isinstance(value, GeneratorType))
self.assertEqual(list(value), [
{},
{'foo': [xmlcodec.Reference('foo'),
xmlcodec.Reference('bar'),
xmlcodec.Reference('baz')],
'zoo': [
{'bar': [xmlcodec.Reference('bar'),
xmlcodec.Reference('baz')]},
{'baz': [xmlcodec.Reference('baz')]}
]}
])
def test_decode_object_with_attributes(self):
value = xml.decode('<o id="test"><f n="foo" t="S" v="bar"/></o>')
self.assertTrue('id' in value)
self.assertTrue(isinstance(value['id'], xmlcodec.Attribute))
self.assertEqual(value['id'], 'test')
def test_fail_decode_unnamed_object_item(self):
self.assertRaises(ValueError, xml.decode, '<o><s/></o>')
def test_fail_decode_if_collision_occurs(self):
self.assertRaises(AssertionError,
xml.decode,
'<o id="test"><f n="id" t="S" v="bar"/></o>')
def test_fail_decode_unknown(self):
self.assertRaises(ValueError, xml.decode, '<foo/>')
def test_decode_error(self):
self.assertRaises(exceptions.UnknownUser,
xml.decode,
'<error code="500" description="foo"/>')
class XMLEncodeTestCase(unittest.TestCase):
def test_decode_fallback(self):
self.assertRaises(ValueError, xml.encode, object())
def test_encode_none(self):
elem = xml.encode(None)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' not in elem.attrib)
self.assertTrue('v' not in elem.attrib)
def test_encode_false(self):
elem = xml.encode(False)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' in elem.attrib)
self.assertEqual(elem.attrib['t'], 'B')
self.assertTrue('v' in elem.attrib)
self.assertEqual(elem.attrib['v'], 'false')
def test_encode_true(self):
elem = xml.encode(True)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' in elem.attrib)
self.assertEqual(elem.attrib['t'], 'B')
self.assertTrue('v' in elem.attrib)
self.assertEqual(elem.attrib['v'], 'true')
def test_encode_int(self):
elem = xml.encode(42)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' in elem.attrib)
self.assertEqual(elem.attrib['t'], 'I')
self.assertTrue('v' in elem.attrib)
self.assertEqual(elem.attrib['v'], '42')
def test_encode_long(self):
elem = xml.encode(100500L)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' in elem.attrib)
self.assertEqual(elem.attrib['t'], 'L')
self.assertTrue('v' in elem.attrib)
self.assertEqual(elem.attrib['v'], '100500')
def test_encode_float(self):
elem = xml.encode(3.14)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' in elem.attrib)
self.assertEqual(elem.attrib['t'], 'F')
self.assertTrue('v' in elem.attrib)
self.assertEqual(elem.attrib['v'], '3.14')
def test_encode_string(self):
elem = xml.encode('foo')
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' in elem.attrib)
self.assertEqual(elem.attrib['t'], 'S')
self.assertTrue('v' in elem.attrib)
self.assertEqual(elem.attrib['v'], 'foo')
def test_encode_ref(self):
elem = xml.encode(xmlcodec.Reference('foo'))
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'r')
self.assertTrue('i' in elem.attrib)
self.assertEqual(elem.attrib['i'], 'foo')
def test_encode_ref_id(self):
elem = xml.encode(xmlcodec.Reference(42))
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'r')
self.assertTrue('i' in elem.attrib)
self.assertEqual(elem.attrib['i'], '42')
def test_encode_datetime(self):
elem = xml.encode(datetime.datetime(2009, 2, 14, 2, 31, 30))
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'f')
self.assertTrue('t' in elem.attrib)
self.assertEqual(elem.attrib['t'], 'D')
self.assertTrue('v' in elem.attrib)
self.assertEqual(elem.attrib['v'], '14.02.2009 02:31:30')
def test_encode_empty_list(self):
elem = xml.encode(list())
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 's')
self.assertEqual(len(elem), 0)
def test_encode_list(self):
elem = xml.encode(['foo', 42, 'baz'])
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 's')
self.assertEqual(len(elem), 3)
item = elem[0]
self.assertEqual(item.tag, 'f')
self.assertEqual(item.attrib['v'], 'foo')
item = elem[1]
self.assertEqual(item.tag, 'f')
self.assertEqual(item.attrib['v'], '42')
item = elem[2]
self.assertEqual(item.tag, 'f')
self.assertEqual(item.attrib['v'], 'baz')
def test_encode_tuple(self):
elem = xml.encode(tuple())
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 's')
def test_encode_set(self):
elem = xml.encode(set(list([1,2,3])))
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 's')
def test_encode_frozenset(self):
elem = xml.encode(frozenset(list([1,2,3])))
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 's')
def test_encode_object(self):
elem = xml.encode({'foo': 'bar'})
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'o')
item = elem[0]
self.assertTrue('n' in item.attrib)
self.assertEqual(item.attrib['n'], 'foo')
self.assertTrue('t' in item.attrib)
self.assertEqual(item.attrib['t'], 'S')
self.assertTrue('v' in item.attrib)
self.assertEqual(item.attrib['v'], 'bar')
def test_encode_object_with_attribs(self):
elem = xml.encode({'id': xmlcodec.Attribute('foo'), 'bar': 'baz'})
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'o')
self.assertTrue('id' in elem.attrib)
self.assertEqual(elem.attrib['id'], 'foo')
def test_encode_object_skips_none_values(self):
elem = xml.encode({'id': None, 'foo': 'bar'})
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'o')
self.assertEqual(len(elem), 1)
self.assertEqual(elem[0].attrib['v'], 'bar')
def test_encode_inherited_item(self):
class Dummy(dict):
pass
item = Dummy({'id': xmlcodec.Attribute('foo'), 'bar': 'baz'})
elem = xml.encode(item)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'o')
self.assertTrue('id' in elem.attrib)
self.assertEqual(elem.attrib['id'], 'foo')
def test_encode_lis_base_exception(self):
exc = exceptions.LisBaseException('foobarbaz')
exc.code = 123
elem = xml.encode(exc)
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'error')
self.assertTrue('code' in elem.attrib)
self.assertEqual(elem.attrib['code'], '123')
self.assertTrue('description' in elem.attrib)
self.assertEqual(elem.attrib['description'], 'foobarbaz')
def test_encode_some_lis_exception(self):
elem = xml.encode(exceptions.UnknownUser('foobarbaz'))
self.assertTrue(isinstance(elem, xml.ElementType))
self.assertEqual(elem.tag, 'error')
self.assertTrue('code' in elem.attrib)
self.assertEqual(elem.attrib['code'], '500')
self.assertTrue('description' in elem.attrib)
self.assertEqual(elem.attrib['description'], 'foobarbaz')
if __name__ == '__main__':
unittest.main()
|
import os
import sys
try:
from setuptools import setup
# hush pyflakes
setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
setup(
name='requests-aws',
version='0.1.8',
author='Paul Tax',
author_email='paultax@gmail.com',
include_package_data=True,
install_requires=['requests>=0.14.0'],
py_modules=['awsauth'],
url='https://github.com/tax/python-requests-aws',
license='BSD licence, see LICENCE.txt',
description='AWS authentication for Amazon S3 for the python requests module',
long_description=open('README.md').read(),
)
|
from __future__ import unicode_literals
from shop.models.defaults.order import Order # nopyflakes - materialize the default models
from shop.models.defaults.order_item import OrderItem # nopyflakes - materialize the default models
from shop.models.defaults.order_shipping import OrderShipping # nopyflakes - materialize the default model
from shop.models.defaults.cart import Cart # nopyflakes - materialize the default model
from shop.models.defaults.cart_item import CartItem # nopyflakes - materialize the default model
from shop.models.notification import Notification # nopyflakes - materialize the default model
from .auth import Customer
from . import address
from . import shopmodels
from . import commodity
|
"""Utilities to work with blueprints."""
from __future__ import print_function
import os
from chromite.lib import brick_lib
from chromite.lib import workspace_lib
BRICKS_FIELD = 'bricks'
BSP_FIELD = 'bsp'
_IMPLICIT_PACKAGES = (
'virtual/target-os',
'virtual/target-os-dev',
'virtual/target-os-test',
)
class BlueprintNotFoundError(Exception):
"""The blueprint does not exist."""
class BlueprintCreationError(Exception):
"""Blueprint creation failed."""
class Blueprint(object):
"""Encapsulates the interaction with a blueprint."""
def __init__(self, blueprint_loc, initial_config=None):
"""Instantiates a blueprint object.
Args:
blueprint_loc: blueprint locator. This can be a relative path to CWD, an
absolute path, or a relative path to the root of the workspace prefixed
with '//'.
initial_config: A dictionary of key-value pairs to seed a new blueprint
with if the specified blueprint doesn't already exist.
Raises:
BlueprintNotFoundError: No blueprint exists at |blueprint_loc| and no
|initial_config| was given to create a new one.
BlueprintCreationError: |initial_config| was specified but a file
already exists at |blueprint_loc|.
"""
self._path = (workspace_lib.LocatorToPath(blueprint_loc)
if workspace_lib.IsLocator(blueprint_loc) else blueprint_loc)
self._locator = workspace_lib.PathToLocator(self._path)
if initial_config is not None:
self._CreateBlueprintConfig(initial_config)
try:
self.config = workspace_lib.ReadConfigFile(self._path)
except IOError:
raise BlueprintNotFoundError('Blueprint %s not found.' % self._path)
@property
def path(self):
return self._path
@property
def locator(self):
return self._locator
def _CreateBlueprintConfig(self, config):
"""Create an initial blueprint config file.
Converts all brick paths in |config| into locators then saves the
configuration file to |self._path|.
Currently fails if |self._path| already exists, but could be
generalized to allow re-writing config files if needed.
Args:
config: configuration dictionary.
Raises:
BlueprintCreationError: A brick in |config| doesn't exist or an
error occurred while saving the config file.
"""
if os.path.exists(self._path):
raise BlueprintCreationError('File already exists at %s.' % self._path)
try:
# Turn brick specifications into locators. If bricks or BSPs are
# unspecified, assign default values so the config file has the proper
# structure for easy manual editing.
if config.get(BRICKS_FIELD):
config[BRICKS_FIELD] = [brick_lib.Brick(b).brick_locator
for b in config[BRICKS_FIELD]]
else:
config[BRICKS_FIELD] = []
if config.get(BSP_FIELD):
config[BSP_FIELD] = brick_lib.Brick(config[BSP_FIELD]).brick_locator
else:
config[BSP_FIELD] = None
# Create the config file.
workspace_lib.WriteConfigFile(self._path, config)
except (brick_lib.BrickNotFound, workspace_lib.ConfigFileError) as e:
raise BlueprintCreationError('Blueprint creation failed. %s' % e)
def GetBricks(self):
"""Returns the bricks field of a blueprint."""
return self.config.get(BRICKS_FIELD, [])
def GetBSP(self):
"""Returns the BSP field of a blueprint."""
return self.config.get(BSP_FIELD)
def FriendlyName(self):
"""Returns the friendly name for this blueprint."""
return workspace_lib.LocatorToFriendlyName(self._locator)
def GetUsedBricks(self):
"""Returns the set of bricks used by this blueprint."""
brick_map = {}
for top_brick in self.GetBricks() + [self.GetBSP()]:
for b in brick_lib.Brick(top_brick).BrickStack():
brick_map[b.brick_locator] = b
return brick_map.values()
def GetPackages(self, with_implicit=True):
"""Returns the list of packages needed by this blueprint.
This includes the main packages for the bricks and the bsp of this
blueprint. We don't add the main packages of the bricks dependencies to
allow inheriting a brick without inheriting its required packages.
Args:
with_implicit: If True, include packages that are implicitly required by
the core system.
"""
packages = []
for locator in self.GetBricks() + [self.GetBSP()]:
packages.extend(brick_lib.Brick(locator).MainPackages())
if with_implicit:
packages.extend(_IMPLICIT_PACKAGES)
return packages
|
import os
import tempfile
from nose.tools import eq_, raises
from helper import TestCase
from appvalidator.zip import ZipPackage
RESOURCES_PATH = os.path.join(os.path.dirname(__file__), 'resources')
def get_path(fn):
return os.path.join(RESOURCES_PATH, fn)
class TestZipManager(TestCase):
def setUp(self):
self.z = ZipPackage(get_path('xpi/install_rdf_only.xpi'))
super(TestZipManager, self).setUp()
def test_open(self):
"""Test that the manager will open the package."""
assert self.z is not None
def test_get_list(self):
"""Test that the manager can read the file listing."""
assert not self.z.contents_cache
assert self.z.package_contents()
assert self.z.contents_cache # Spelling check!
self.z.contents_cache = 'foo'
eq_(self.z.package_contents(), 'foo')
def test_get_list_broken_fail(self):
"""
Test that the manager will generate a new package listing when broken
files have been detecetd.
"""
assert not self.z.contents_cache
assert self.z.package_contents()
assert self.z.contents_cache # Spelling check!
self.z.broken_files.add("foo")
self.z.contents_cache = "foo"
assert self.z.package_contents() != "foo"
def test_valid_name(self):
"Test that the manager can retrieve the correct file name."
assert 'install.rdf' in self.z.package_contents()
def test_read_file(self):
"""Test that a file can be read from the package."""
assert self.z.read('install.rdf') is not None
class TestWriteZip(TestCase):
def test_write_file(self):
"""Test that a file can be written in UTF-8 to the package."""
with tempfile.NamedTemporaryFile(delete=False) as t:
temp_fn = t.name
try:
z = ZipPackage(temp_fn, mode='w')
f, d = 'install.rdf', '注目のコレクション'.decode('utf-8')
z.write(f, d)
eq_(z.read(f), d.encode('utf-8'))
finally:
os.unlink(temp_fn)
class TestBadZipFile(TestCase):
@raises(IOError)
def test_missing_file(self):
"""Tests that the XPI manager correctly reports a missing XPI file."""
ZipPackage("foo.bar")
def test_corrupt_zip(self):
"""Tests that the XPI manager correctly reports a missing XPI file."""
x = ZipPackage(get_path("corrupt.xpi"))
try:
x.read("install.rdf")
except Exception:
pass
else:
raise "Exception should have been raised on corrupt file access."
assert "install.rdf" in x.broken_files
|
from django.db import models
from django.core.exceptions import ValidationError
import cyder
from cyder.cydns.domain.models import Domain
from cyder.cydns.models import CydnsRecord
from cyder.cydns.validation import validate_name
from cyder.cydns.mixins import ObjectUrlMixin
from cyder.cydns.validation import validate_srv_label, validate_srv_port
from cyder.cydns.validation import validate_srv_priority, validate_srv_weight
from cyder.cydns.validation import validate_srv_name
class SRV(models.Model, ObjectUrlMixin):
"""
>>> SRV(domain=domain, label=label, target=target, port=port,
... priority=priority, weight=weight)
"""
id = models.AutoField(primary_key=True)
label = models.CharField(max_length=100, blank=True, null=True,
validators=[validate_srv_label])
domain = models.ForeignKey(Domain, null=False)
fqdn = models.CharField(max_length=255, blank=True, null=True,
validators=[validate_srv_name])
# fqdn = label + domain.name <--- see set_fqdn
target = models.CharField(max_length=100,
validators=[validate_name])
port = models.PositiveIntegerField(null=False,
validators=[validate_srv_port])
priority = models.PositiveIntegerField(null=False,
validators=[validate_srv_priority])
weight = models.PositiveIntegerField(null=False,
validators=[validate_srv_weight])
def details(self):
return (
('FQDN', self.fqdn),
('Record Type', 'SRV'),
('Targer', self.target),
('Port', self.port),
('Priority', self.priority),
('Weight', self.weight),
)
class Meta:
db_table = 'srv'
unique_together = ('label', 'domain', 'target', 'port',
'priority', 'weight')
def delete(self, *args, **kwargs):
super(SRV, self).delete(*args, **kwargs)
def save(self, *args, **kwargs):
self.full_clean()
self.domain.dirty = True
self.domain.clean()
super(SRV, self).save(*args, **kwargs)
def clean(self):
self.set_fqdn()
self.check_for_cname()
self.check_for_delegation()
def __str__(self):
return "{0} {1} {2} {3} {4} {5} {6}".format(self.fqdn, 'IN', 'SRV',
self.priority, self.weight,
self.port, self.target)
def __repr__(self):
return "<{0}>".format(str(self))
def set_fqdn(self):
try:
self.fqdn = "{0}.{1}".format(self.label, self.domain.name)
except ObjectDoesNotExist:
return
def check_for_delegation(self):
"""If an object's domain is delegated it should not be able to
be changed. Delegated domains cannot have objects created in
them.
"""
if not self.domain.delegated:
return
if not self.pk: # We don't exist yet.
raise ValidationError("No objects can be created in the {0}"
"domain. It is delegated.".
format(self.domain.name))
def check_for_cname(self):
""""If a CNAME RR is preent at a node, no other data should be
present; this ensures that the data for a canonical name and
its aliases cannot be different."
-- `RFC 1034 <http://tools.ietf.org/html/rfc1034>`_
Call this function in models that can't overlap with an existing
CNAME.
"""
CNAME = cyder.cydns.cname.models.CNAME
if CNAME.objects.filter(fqdn=self.fqdn).exists():
raise ValidationError("A CNAME with this name already exists.")
|
from __future__ import absolute_import, print_function
import logging
from time import time
from urllib import urlencode
from uuid import uuid4
from sentry.auth import Provider, AuthView
from sentry.auth.exceptions import IdentityNotValid
from sentry.http import safe_urlopen, safe_urlread
from sentry.utils import json
from sentry.utils.http import absolute_uri
ERR_INVALID_STATE = 'An error occurred while validating your request.'
class OAuth2Login(AuthView):
authorize_url = None
client_id = None
scope = ''
def __init__(self, authorize_url=None, client_id=None, scope=None, *args,
**kwargs):
super(OAuth2Login, self).__init__(*args, **kwargs)
if authorize_url is not None:
self.authorize_url = authorize_url
if client_id is not None:
self.client_id = client_id
if scope is not None:
self.scope = scope
def get_scope(self):
return self.scope
def get_authorize_url(self):
return self.authorize_url
def get_authorize_params(self, state, redirect_uri):
return {
"client_id": self.client_id,
"response_type": "code",
"scope": self.get_scope(),
"state": state,
"redirect_uri": redirect_uri,
}
def dispatch(self, request, helper):
if 'code' in request.GET:
return helper.next_step()
state = str(uuid4())
params = self.get_authorize_params(
state=state,
redirect_uri=absolute_uri(helper.get_redirect_url()),
)
redirect_uri = '{}?{}'.format(
self.get_authorize_url(), urlencode(params)
)
helper.bind_state('state', state)
return self.redirect(redirect_uri)
class OAuth2Callback(AuthView):
access_token_url = None
client_id = None
client_secret = None
def __init__(self, access_token_url=None, client_id=None,
client_secret=None, *args, **kwargs):
super(OAuth2Callback, self).__init__(*args, **kwargs)
if access_token_url is not None:
self.access_token_url = access_token_url
if client_id is not None:
self.client_id = client_id
if client_secret is not None:
self.client_secret = client_secret
def get_token_params(self, code, redirect_uri):
return {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect_uri,
"client_id": self.client_id,
"client_secret": self.client_secret,
}
def exchange_token(self, request, helper, code):
# TODO: this needs the auth yet
data = self.get_token_params(
code=code,
redirect_uri=absolute_uri(helper.get_redirect_url()),
)
req = safe_urlopen(self.access_token_url, data=data)
body = safe_urlread(req)
return json.loads(body)
def dispatch(self, request, helper):
error = request.GET.get('error')
state = request.GET.get('state')
code = request.GET.get('code')
if error:
return helper.error(error)
if state != helper.fetch_state('state'):
return helper.error(ERR_INVALID_STATE)
data = self.exchange_token(request, helper, code)
if 'error_description' in data:
return helper.error(data['error_description'])
if 'error' in data:
logging.info('Error exchanging token: %s', data['error'])
return helper.error('Unable to retrieve your token')
# we can either expect the API to be implicit and say "im looking for
# blah within state data" or we need to pass implementation + call a
# hook here
helper.bind_state('data', data)
return helper.next_step()
class OAuth2Provider(Provider):
def get_auth_pipeline(self):
return [OAuth2Login(), OAuth2Callback()]
def get_refresh_token_url(self):
raise NotImplementedError
def get_refresh_token_params(self, refresh_token):
return {
"client_id": self.client_id,
"client_secret": self.client_secret,
"grant_type": "refresh_token",
"refresh_token": refresh_token,
}
def get_oauth_data(self, payload):
return {
'access_token': payload['access_token'],
'refresh_token': payload.get('refresh_token'),
'token_type': payload['token_type'],
'expires': time() + payload['expires_in'],
}
def build_identity(self, state):
# data = state['data']
# return {
# 'id': '',
# 'email': '',
# 'name': '',
# 'data': self.get_oauth_data(data),
# }
raise NotImplementedError
def refresh_identity(self, auth_identity):
refresh_token = auth_identity.data.get('refresh_token')
if not refresh_token:
raise IdentityNotValid
data = self.get_refresh_token_params(
refresh_token=refresh_token,
)
req = safe_urlopen(self.get_refresh_token_url(), data=data)
try:
body = safe_urlread(req)
payload = json.loads(body)
except Exception:
payload = {}
error = payload.get('error', 'unknown_error')
error_description = payload.get('error_description', 'no description available')
formatted_error = 'HTTP {} ({}): {}'.format(
req.status_code, error, error_description
)
if req.status_code == 401:
raise IdentityNotValid(formatted_error)
if req.status_code == 400:
# this may not be common, but at the very least Google will return
# an invalid grant when a user is suspended
if error == 'invalid_grant':
raise IdentityNotValid(formatted_error)
if req.status_code != 200:
raise Exception(formatted_error)
auth_identity.data = self.build_oauth_data(payload)
auth_identity.update(data=auth_identity.data)
return True
|
"""
===========================
Reading an inverse operator
===========================
The inverse operator's source space is shown in 3D.
"""
import mne
from mne.datasets import sample
from mne.minimum_norm import read_inverse_operator
from mne.viz import set_3d_view
print(__doc__)
data_path = sample.data_path()
subjects_dir = data_path / 'subjects'
meg_path = data_path / 'MEG' / 'sample'
fname_trans = meg_path / 'sample_audvis_raw-trans.fif'
inv_fname = meg_path / 'sample_audvis-meg-oct-6-meg-inv.fif'
inv = read_inverse_operator(inv_fname)
print("Method: %s" % inv['methods'])
print("fMRI prior: %s" % inv['fmri_prior'])
print("Number of sources: %s" % inv['nsource'])
print("Number of channels: %s" % inv['nchan'])
src = inv['src'] # get the source space
print("Number of vertices on the left hemisphere: %d" % len(src[0]['rr']))
print("Number of triangles on left hemisphere: %d" % len(src[0]['use_tris']))
print("Number of vertices on the right hemisphere: %d" % len(src[1]['rr']))
print("Number of triangles on right hemisphere: %d" % len(src[1]['use_tris']))
fig = mne.viz.plot_alignment(subject='sample', subjects_dir=subjects_dir,
trans=fname_trans, surfaces='white', src=src)
set_3d_view(fig, focalpoint=(0., 0., 0.06))
|
"""
Not stable therefore removed in the Beta. Might come back later.
"""
import time
import socket
import logging
import gc
class Portrange(object):
def __init__(self):
pass
@staticmethod
def get_kind():
"""
return sensor kind
"""
return "mpportrange"
@staticmethod
def get_sensordef():
"""
Definition of the sensor and data to be shown in the PRTG WebGUI
"""
sensordefinition = {
"kind": Portrange.get_kind(),
"name": "Port Range",
"description": "Checks the availability of a port range on a target system",
"help": "Checks the availability of a port range on a target system",
"tag": "mpportrangesensor",
"groups": [
{
"name": " portspecific",
"caption": "Port specific",
"fields": [
{
"type": "integer",
"name": "timeout",
"caption": "Timeout (in s)",
"required": "1",
"default": 60,
"minimum": 1,
"maximum": 900,
"help": "If the reply takes longer than this value the request is aborted "
"and an error message is triggered. Max. value is 900 sec. (=15 min.)"
},
{
"type": "integer",
"name": "startport",
"caption": "Port",
"required": "1",
"default": 110,
"minimum": 1,
"maximum": 65534,
"help": "Specify the port ranges starting port"
},
{
"type": "integer",
"name": "endport",
"caption": "Port",
"required": "1",
"default": 110,
"minimum": 1,
"maximum": 65534,
"help": "Specify the port ranges end port"
}
]
}
]
}
return sensordefinition
def portrange(self, target, timeout, start, end):
remote_server = socket.gethostbyname(target)
numberofports = int(end) - int(start)
result = 1234
a = 0
start_time = time.time()
for port in range(int(start), int(end)):
try:
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.settimeout(float(timeout))
result = conn.connect_ex((remote_server, int(port)))
conn.close()
except socket.gaierror as e:
print e
except socket.timeout as e:
print e
except Exception as e:
print "test %s" % e
if result == 0:
a += 1
else:
raise Exception('port %s not open' % port)
end_time = time.time()
response_time = (end_time - start_time) * 1000
if a == numberofports:
channel_list = [
{
"name": "Available",
"mode": "float",
"kind": "TimeResponse",
"value": float(response_time)
}
]
return channel_list
else:
raise Exception
@staticmethod
def get_data(data):
port = Portrange()
try:
port_data = port.portrange(data['host'], data['timeout'], data['startport'], data['endport'])
except Exception as e:
logging.error("Ooops Something went wrong with '%s' sensor %s. Error: %s" % (port.get_kind(),
data['sensorid'], e))
sensor_data = {
"sensorid": int(data['sensorid']),
"error": "Exception",
"code": 1,
"message": "Port check failed or ports closed. See log for details"
}
return sensor_data
sensor_data = {
"sensorid": int(data['sensorid']),
"message": "OK Ports open",
"channel": port_data
}
del port
gc.collect()
return sensor_data
|
import datetime
import json
import random
import string
import unicodedata
import urllib
import dns.resolver
import config
from aws import awsutils
from db_utils import db
def to_json(inst, cls, bonusProps=[]):
"""
Jsonify the sql alchemy query result.
Inspired from http://stackoverflow.com/a/9746249
"""
convert = dict()
# add your coversions for things like datetime's
# and what-not that aren't serializable.
d = dict()
for c in cls.__table__.columns:
if not hasattr(inst, c.name): # If the field was inherited
continue
v = getattr(inst, c.name)
if c.type in convert.keys() and v is not None:
try:
d[c.name] = convert[c.type](v)
except:
d[c.name] = "Error: Failed to covert using ", str(convert[c.type])
elif v is None:
d[c.name] = str()
else:
d[c.name] = v
for p in bonusProps:
d[p] = getattr(inst, p)
return d
class ServiceProvider(db.Model):
id = db.Column(db.Integer, primary_key=True)
codops = db.Column(db.String(255), index=True)
short_name = db.Column(db.String(8))
medium_name = db.Column(db.String(16))
long_name = db.Column(db.String(128))
short_description = db.Column(db.String(180))
long_description = db.Column(db.String(1200))
url_default = db.Column(db.String(255))
postal_name = db.Column(db.String(255))
street = db.Column(db.String(255))
city = db.Column(db.String(255))
zipcode = db.Column(db.String(25))
phone_number = db.Column(db.String(128))
keywords = db.Column(db.String(255))
default_language = db.Column(db.String(5))
location_country = db.Column(db.String(5))
default_logo_image_id = db.Column(db.Integer,
db.ForeignKey('logo_image.id', use_alter=True, name='fk_default_logo_id'))
default_logo_image = db.relationship("LogoImage", foreign_keys=[default_logo_image_id])
stations = db.relationship('Station', backref='service_provider', lazy='dynamic')
def __init__(self, codops):
self.codops = codops
def __eq__(self, other):
return other is not None and self.id == other.id
def check_aws(self):
return awsutils.check_serviceprovider(self)
def escape_slash_rfc3986(self, value):
if value:
return value.replace('/', '%2F')
return ''
@property
def default_logo_image_data(self):
if self.default_logo_image:
return self.default_logo_image.json
else:
return None
@property
def epg_country(self):
if self.location_country:
ecc = Ecc.query.filter_by(iso=self.location_country).first()
if ecc:
return ecc.name
return None
@property
def epg_postal(self):
if self.postal_name:
return "postal:%s/%s/%s/%s/%s" % (
self.escape_slash_rfc3986(self.postal_name), self.escape_slash_rfc3986(self.street),
self.escape_slash_rfc3986(self.city), self.escape_slash_rfc3986(self.zipcode),
self.escape_slash_rfc3986(self.epg_country))
return None
@property
def epg_phone_number(self):
if self.phone_number:
return "tel:%s" % (self.phone_number)
return None
@property
def fqdn(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.DOMAIN)
return None
@property
def vis_fqdn(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOVIS_DNS)
return None
@property
def epg_fqdn(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOEPG_DNS)
return None
@property
def spi_fqdn(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOSPI_DNS)
return None
@property
def tag_fqdn(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOTAG_DNS)
return None
@property
def vis_service(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOVIS_SERVICE_DEFAULT)
return None
@property
def epg_service(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOEPG_SERVICE_DEFAULT)
return None
@property
def tag_service(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOTAG_SERVICE_DEFAULT)
return None
@property
def spi_service(self):
if self.codops:
return "%s.%s" % (self.codops.lower(), config.RADIOSPI_SERVICE_DEFAULT)
return None
@property
def image_url_prefix(self):
if config.STANDALONE:
return config.LOGO_PUBLIC_URL + "/"
else:
return awsutils.get_public_urlprefix(self)
@property
def json(self):
return to_json(self, self.__class__, ['default_logo_image_data', 'image_url_prefix',
'epg_postal', 'epg_phone_number', 'epg_country', 'fqdn',
'vis_fqdn', 'epg_fqdn', 'tag_fqdn',
'vis_service', 'epg_service', 'tag_service'])
class Station(db.Model):
id = db.Column(db.Integer, primary_key=True)
orga = db.Column(db.Integer, index=True)
parent = db.Column(db.Integer)
name = db.Column(db.String(80))
short_name = db.Column(db.String(8))
medium_name = db.Column(db.String(16))
long_name = db.Column(db.String(128))
short_description = db.Column(db.String(180))
long_description = db.Column(db.String(1200))
url_default = db.Column(db.String(255))
random_password = db.Column(db.String(32))
postal_name = db.Column(db.String(255))
street = db.Column(db.String(255))
city = db.Column(db.String(255))
zipcode = db.Column(db.String(25))
phone_number = db.Column(db.String(128))
sms_number = db.Column(db.String(128))
sms_body = db.Column(db.String(255))
sms_description = db.Column(db.String(255))
email_address = db.Column(db.String(255))
email_description = db.Column(db.String(255))
keywords = db.Column(db.String(255))
default_language = db.Column(db.String(5))
location_country = db.Column(db.String(5))
# Services
# fqdn_station_prefix = db.Column(db.String(255)) maybe to add due to filtering issue in Alchemy
radiovis_enabled = db.Column(db.Boolean, index=True)
radiovis_service = db.Column(db.String(255))
radioepg_enabled = db.Column(db.Boolean, index=True)
radioepgpi_enabled = db.Column(db.Boolean, default=False, index=True)
radioepg_service = db.Column(db.String(255))
radiotag_enabled = db.Column(db.Boolean, index=True)
radiotag_service = db.Column(db.String(255))
radiospi_enabled = db.Column(db.Boolean, index=True)
radiospi_service = db.Column(db.String(255))
service_provider_id = db.Column(db.Integer, db.ForeignKey('service_provider.id'))
ip_allowed = db.Column(db.String(256)) # A list of ip/subnet, with , between
genres = db.Column(db.Text())
channels = db.relationship('Channel', backref='station', lazy='dynamic')
shows = db.relationship('Show', backref='station', lazy='dynamic')
schedules = db.relationship('Schedule', backref='station', lazy='dynamic')
servicefollowingentries = db.relationship('GenericServiceFollowingEntry', backref='station', lazy='dynamic')
# epg_picture_id = db.Column(db.Integer, db.ForeignKey('picture_forEPG.id'))
default_logo_image_id = db.Column(db.Integer,
db.ForeignKey('logo_image.id', use_alter=True, name='fk_epg_default_logo_id'))
default_logo_image = db.relationship("LogoImage", foreign_keys=[default_logo_image_id])
fk_client = db.Column(db.Integer, db.ForeignKey('clients.id', use_alter=True, name='station_clients_id_fk'))
client = db.relationship("Clients", foreign_keys=[fk_client])
__table_args__ = (db.Index('ix_station_spid_radioepg_enabled', "service_provider_id", "radioepg_enabled"),)
def __getitem__(self, item):
return getattr(self, item)
def __eq__(self, other):
return other is not None and self.id == other.id
def escape_slash_rfc3986(self, value):
if value:
return value.replace('/', '%2F')
return None
@property
def service_provider_data(self):
if self.service_provider:
return self.service_provider.json
else:
return None
@property
def epg_country(self):
if self.location_country:
ecc = Ecc.query.filter_by(iso=self.location_country).first()
if ecc:
return ecc.name
return None
@property
def epg_postal(self):
if self.postal_name:
return "postal:%s/%s/%s/%s/%s" % (
self.escape_slash_rfc3986(self.postal_name), self.escape_slash_rfc3986(self.street),
self.escape_slash_rfc3986(self.city), self.escape_slash_rfc3986(self.zipcode),
self.escape_slash_rfc3986(self.epg_country))
return None
@property
def epg_phone_number(self):
if self.phone_number:
return "tel:%s" % (self.phone_number)
return None
@property
def epg_email(self):
if self.email_address:
return "mailto:%s" % (self.email_address)
return None
@property
def epg_sms(self):
if self.sms_body:
if self.sms_body:
return "sms:%s?%s" % (self.sms_number, urllib.urlencode({'body': self.sms_body}))
else:
return "sms:%s" % (self.sms_number)
return None
@property
def genres_list(self):
try:
return json.loads(self.genres)
except:
return []
@property
def default_logo_image_data(self):
if self.default_logo_image:
return self.default_logo_image.json
else:
return None
@property
def fqdn(self):
if self.service_provider:
if self.service_provider.codops:
return "%s.%s.%s" % (
filter(lambda x: x in string.ascii_letters + string.digits, self.ascii_name.lower()),
self.service_provider.codops.lower(), config.DOMAIN)
return None
@property
def service_identifier(self):
if self.service_provider:
if self.service_provider.codops:
return "ebu%s%s" % (self.id, self.service_provider.codops.lower())
return None
def __init__(self, orga, name=u''):
self.orga = orga
self.name = name
@property
def ascii_name(self):
return unicodedata.normalize('NFKD', self.name if self.name else u'').encode('ascii', 'ignore')
def gen_random_password(self):
self.random_password = ''.join(random.choice(string.ascii_letters + string.digits) for x in range(32))
def check_aws(self):
return awsutils.check_station(self)
@property
def short_name_to_use(self):
"""Return the shortname, based on the name or the short one"""
return (self.short_name or self.name)[:8] if self.short_name or self.name else u''
@property
def fqdn_prefix(self):
return filter(lambda x: x in string.ascii_letters + string.digits, self.ascii_name.lower())
@property
def fqdn(self):
if self.service_provider:
return "%s.%s" % (self.fqdn_prefix, self.service_provider.fqdn)
return None
@property
def stomp_username(self):
return str(self.id) + '.' + filter(lambda x: x in string.ascii_letters + string.digits, self.ascii_name.lower())
@property
def json(self):
return to_json(self, self.__class__,
['stomp_username', 'short_name_to_use', 'service_provider_data', 'default_logo_image_data',
'epg_country', 'epg_postal', 'epg_phone_number', 'epg_sms', 'epg_email',
'genres_list', 'ascii_name', 'fqdn', 'fqdn_prefix', 'service_identifier'])
class Ecc(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
iso = db.Column(db.String(2), index=True)
pi = db.Column(db.String(2))
ecc = db.Column(db.String(3))
__table_args__ = (db.Index('ix_ecc_pi_ecc', "pi", "ecc"),)
def __repr__(self):
return '<Ecc %r>' % self.name
@property
def json(self):
return to_json(self, self.__class__)
class CountryCode(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255))
iso = db.Column(db.String(2), index=True)
cc = db.Column(db.String(3))
__tablename__ = "country_code"
def __repr__(self):
return '<Cc %r>' % self.name
@property
def json(self):
return to_json(self, self.__class__)
class Clients(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(500))
orga = db.Column(db.Integer)
identifier = db.Column(db.String(128))
email = db.Column(db.String(255))
def __eq__(self, other):
return other is not None and self.id == other.id
def __hash__(self):
return self.id
@property
def json(self):
return to_json(self, self.__class__)
class Channel(db.Model):
id = db.Column(db.Integer, primary_key=True)
station_id = db.Column(db.Integer, db.ForeignKey('station.id'))
name = db.Column(db.String(255))
TYPE_ID_CHOICES = [('fm', 'VHF/FM', ['ecc_id', 'pi', 'frequency']),
('dab', 'DAB', ['ecc_id', 'eid', 'sid', 'scids', 'appty_uatype', 'pa', 'mime_type']),
('drm', 'DRM', ['sid']),
('amss', 'AMSS', ['sid']),
('hd', 'HD Radio', ['cc', 'tx', 'mid']),
('id', 'IP', ['fqdn', 'serviceIdentifier', 'stream_url', 'mime_type', 'bitrate'])
]
TO_IGNORE_IN_DNS = ['stream_url', 'mime_type', 'bitrate']
type_id = db.Column(db.String(5), index=True)
# FM
ecc_id = db.Column(db.Integer, db.ForeignKey('ecc.id'))
pi = db.Column(db.String(4))
frequency = db.Column(db.String(5))
# DAB/DAB+
eid = db.Column(db.String(4))
sid = db.Column(db.String(8))
scids = db.Column(db.String(3))
appty_uatype = db.Column(db.String(6))
pa = db.Column(db.Integer)
# IP
stream_url = db.Column(db.String(255))
bitrate = db.Column(db.Integer)
mime_type = db.Column(db.String(64))
# hd
tx = db.Column(db.String(5))
cc = db.Column(db.String(3))
mid = db.Column(db.Integer)
# ID
fqdn = db.Column(db.String(255))
serviceIdentifier = db.Column(db.String(16))
fk_client = db.Column(db.Integer, db.ForeignKey('clients.id', use_alter=True, name='channel_clients_id_fk'))
client = db.relationship("Clients", foreign_keys=[fk_client])
default_picture_id = db.Column(db.Integer, db.ForeignKey('picture.id'))
servicefollowingentries = db.relationship('GenericServiceFollowingEntry', backref='channel', lazy='dynamic')
def __repr__(self):
return '<Channel %r[%s]>' % (self.name, self.station.__repr__)
def updateservicefollowingentry(self):
"""Updates the existing service following entry linked to the channel if one"""
entries = self.servicefollowingentries.all()
for entry in entries:
if self.type_id == 'dab' and not self.mime_type:
entry.mime_type = 'audio/mpeg'
else:
entry.mime_type = self.mime_type
if self.type_id == 'id' and not self.mime_type:
entry.mime_type = 'audio/mpeg'
else:
entry.mime_type = self.mime_type
if self.type_id == 'id' and not self.bitrate:
entry.bitrate = 128
else:
entry.bitrate = self.bitrate
db.session.commit()
@property
def servicefollowingentry(self):
"""Return (or create) the associated service following entry"""
# Find in exisiting objects
entries = self.servicefollowingentries.all()
if len(entries) > 0:
return entries[0]
# Create a new one
object = GenericServiceFollowingEntry()
object.channel_id = self.id
object.active = True
object.cost = 100
object.offset = 0
# Mime Type and default values
if self.type_id == 'id':
object.cost = 100
object.offset = 2000
if not self.mime_type:
object.mime_type = 'audio/mpeg'
else:
object.mime_type = self.mime_type
if not self.bitrate:
object.bitrate = 128
else:
object.bitrate = self.bitrate
if self.type_id == 'fm':
object.cost = 50
if self.type_id == 'dab':
object.cost = 20
if not self.mime_type:
object.mime_type = 'audio/mpeg'
else:
object.mime_type = self.mime_type
db.session.add(object)
db.session.commit()
return object
@property
def service_identifier(self):
ecc = Ecc.query.filter_by(id=self.ecc_id).first()
gcc = ecc.pi + ecc.ecc
if self.type_id == "fm":
return "fm/{}/{}/{}".format(gcc, self.pi, self.frequency)
elif self.type_id == "dab":
return "dab/{}/{}/{}/{}".format(gcc, self.eid, self.sid, self.scids)
@property
def topic(self):
return self.topic_no_slash + '/'
@property
def topic_no_slash(self):
return '/topic/' + '/'.join(self.dns_entry.split('.')[::-1])
def generate_dns_entry(self, return_iso):
val = self.type_id
for (t, _, props) in Channel.TYPE_ID_CHOICES:
if t == self.type_id:
for v in props:
if getattr(self, v) is not None:
value = str(getattr(self, v)).lower()
if v == 'ecc_id': # Special case
cc_obj = Ecc.query.filter_by(id=value).first()
if return_iso:
value = (cc_obj.iso).lower()
else:
value = (cc_obj.pi + cc_obj.ecc).lower()
# Exclude certain parameters from the RadioDNS FQDN construction
if v in Channel.TO_IGNORE_IN_DNS: continue
if v == 'mid' and value == "1": continue
val = value + '.' + val
return val
@property
def dns_entry(self):
return self.generate_dns_entry(False)
@property
def dns_entry_iso(self):
return self.generate_dns_entry(True)
@property
def radiodns_entry(self):
return self.dns_entry + '.radiodns.org.'
@property
def station_name(self):
if self.station:
return self.station.name
else:
return ''
@property
def station_ascii_name(self):
if self.station:
return self.station.ascii_name
else:
return ''
@property
def default_picture_data(self):
if self.default_picture:
return self.default_picture.json
else:
return None
@property
def json(self):
return to_json(self, self.__class__,
['topic', 'station_json', 'radiodns_entry', 'station_name', 'station_ascii_name',
'default_picture_data', 'topic_no_slash', 'client_json'])
@property
def client_json(self):
return self.client.json if self.client else {'name': 'default'}
@property
def station_json(self):
if self.station:
return self.station.json
else:
return None
@property
def dns_values(self):
fqdn = ''
vis = ''
epg = ''
tag = ''
dns_entry = self.radiodns_entry
# Special case with *
if dns_entry[0] == '*':
dns_entry = '10800' + dns_entry[1:]
# Find radiodns servers
ns = str(dns.resolver.query('radiodns.org', 'NS')[0])
ip = str(dns.resolver.query(ns, 'A')[0])
# Build a resolver using radiodns.org nameserver, timeout of 2, to be sure to have the latested FQDN
resolver = dns.resolver.Resolver()
resolver.lifetime = 2 # Timeout of 2
resolver.nameservers = [ip] # Use radiodns.org servers
try:
fqdn = str(resolver.query(dns_entry, 'CNAME')[0])
except:
pass
# Build resolver for others queries using local nameserver
resolver = dns.resolver.Resolver()
resolver.lifetime = 2 # Timeout of 2
if fqdn:
try:
vis = str(resolver.query('_radiovis._tcp.' + fqdn, 'SRV')[0])
except:
pass
try:
epg = str(resolver.query('_radioepg._tcp.' + fqdn, 'SRV')[0])
except:
pass
try:
tag = str(resolver.query('_radiotag._tcp.' + fqdn, 'SRV')[0])
except:
pass
return (fqdn, vis, epg, tag)
@property
def epg_uri(self):
# Special Case Urls / Streaming / Ip / Ids
if self.type_id == 'id' and self.stream_url:
return self.stream_url
splited = self.dns_entry.split('.')
return splited[-1] + ':' + '.'.join(splited[::-1][1:])
class Picture(db.Model):
id = db.Column(db.Integer, primary_key=True)
orga = db.Column(db.Integer)
name = db.Column(db.String(80))
filename = db.Column(db.String(255))
radiotext = db.Column(db.String(255))
radiolink = db.Column(db.String(255))
image_url_prefix = db.Column(db.String(255))
channels = db.relationship('Channel', backref='default_picture', lazy='dynamic')
def __init__(self, orga):
self.orga = orga
def __repr__(self):
return '<Picture %r[%s]>' % (self.name, self.orga)
@property
def clean_filename(self):
if not self.filename:
return ''
return self.filename.split('/')[-1]
@property
def public_url(self):
return "%s%s" % (self.image_url_prefix, self.filename)
@property
def json(self):
return to_json(self, self.__class__, ['clean_filename', 'public_url'])
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
topic = db.Column(db.String(255), index=True)
body = db.Column(db.Text())
headers = db.Column(db.Text())
reception_timestamp = db.Column(db.Integer(), index=True)
@property
def reception_date(self):
return datetime.datetime.fromtimestamp(self.reception_timestamp)
@property
def json(self):
return to_json(self, self.__class__, ['reception_date'])
class Show(db.Model):
id = db.Column(db.Integer, primary_key=True)
orga = db.Column(db.Integer)
medium_name = db.Column(db.String(255))
long_name = db.Column(db.String(255))
description = db.Column(db.String(255))
color = db.Column(db.String(7))
station_id = db.Column(db.Integer, db.ForeignKey('station.id'))
schedules = db.relationship('Schedule', backref='show', lazy='dynamic')
def __init__(self, orga):
self.orga = orga
def __repr__(self):
return '<Show %r[%s]>' % (self.medium_name, self.orga)
@property
def json(self):
return to_json(self, self.__class__, [])
class Schedule(db.Model):
id = db.Column(db.Integer, primary_key=True)
show_id = db.Column(db.Integer, db.ForeignKey('show.id'))
station_id = db.Column(db.Integer, db.ForeignKey('station.id'))
day = db.Column(db.Integer)
start_hour = db.Column(db.Integer)
start_minute = db.Column(db.Integer)
length = db.Column(db.Integer)
@property
def seconds_from_base(self):
"""The number, in seconds of start, based on monday 00:00"""
return self.day * 24 * 60 * 60 + self.start_hour * 60 * 60 + self.start_minute * 60
@property
def duration(self):
return 'PT' + str(int(self.length / 60)) + 'H' + str(self.length % 60) + 'M'
@property
def date_of_start_time(self):
"""Return the start time as a date, assuming start_date has been set as a reference"""
import datetime
return self.start_date + datetime.timedelta(days=self.day, hours=self.start_hour, minutes=self.start_minute)
@property
def start_time(self):
"""Return the start time as a string, assuming start_date has been set as a reference"""
timetime_format = '%Y-%m-%dT%H:%M:%S%z'
if not hasattr(self, 'start_date'):
return ''
return self.date_of_start_time.strftime(timetime_format)
@property
def json_show(self):
return self.show.json
@property
def json(self):
return to_json(self, self.__class__, ['json_show', 'start_time', 'duration'])
class GenericServiceFollowingEntry(db.Model):
"""A generic entry for service following"""
"""If channel id is set, object is linked to a channel, otherwise station_id and channel_uri must be set, linking to a station"""
id = db.Column(db.Integer, primary_key=True)
active = db.Column(db.Boolean)
cost = db.Column(db.Integer)
offset = db.Column(db.Integer)
mime_type = db.Column(db.String(255))
bitrate = db.Column(db.Integer)
channel_id = db.Column(db.Integer, db.ForeignKey('channel.id'), nullable=True)
station_id = db.Column(db.Integer, db.ForeignKey('station.id'), nullable=True)
channel_uri = db.Column(db.String(255), nullable=True)
@property
def channel_name(self):
"""The name of the channel, if linked to a channel"""
if self.channel:
return self.channel.name
return ''
@property
def channel_type(self):
"""The type of the channel, if linked to a channel"""
if self.channel:
return self.channel.type_id
return ''
@property
def uri(self):
"""The uri to use"""
if self.channel:
return self.channel.epg_uri
else:
return self.channel_uri
@property
def type(self):
if self.channel:
return 'channel'
else:
return 'ip'
@property
def json(self):
return to_json(self, self.__class__, ['channel_name', 'uri', 'type', 'channel_type'])
class LogoImage(db.Model):
id = db.Column(db.Integer, primary_key=True)
orga = db.Column(db.Integer)
codops = db.Column(db.String(255))
name = db.Column(db.String(255))
filename = db.Column(db.String(255))
url32x32 = db.Column(db.String(255))
url112x32 = db.Column(db.String(255))
url128x128 = db.Column(db.String(255))
url320x240 = db.Column(db.String(255))
url600x600 = db.Column(db.String(255))
service_provider_id = db.Column(db.Integer, db.ForeignKey('service_provider.id'))
service_provider = db.relationship("ServiceProvider", backref='logo_images', uselist=False,
foreign_keys=[service_provider_id])
stations = db.relationship('Station', backref='epg_picture', lazy='dynamic')
def __init__(self, orga):
self.orga = orga
def __repr__(self):
return '<LogoImage %r[%s]>' % (self.filename, self.orga)
@property
def clean_filename(self):
if not self.filename:
return ''
return self.filename.split('/')[-1]
@property
def public_url(self):
if self.service_provider:
return "%s%s" % (self.service_provider.image_url_prefix, self.filename)
else:
return None
@property
def public_32x32_url(self):
if self.url32x32:
return "%s%s" % (self.service_provider.image_url_prefix, self.url32x32)
else:
return self.public_url
@property
def public_112x32_url(self):
if self.url112x32:
return "%s%s" % (self.service_provider.image_url_prefix, self.url112x32)
else:
return self.public_url
@property
def public_128x128_url(self):
if self.url128x128:
return "%s%s" % (self.service_provider.image_url_prefix, self.url128x128)
else:
return self.public_url
@property
def public_320x240_url(self):
if self.url320x240:
return "%s%s" % (self.service_provider.image_url_prefix, self.url320x240)
else:
return self.public_url
@property
def public_600x600_url(self):
if self.url600x600:
return "%s%s" % (self.service_provider.image_url_prefix, self.url600x600)
else:
return self.public_url
@property
def json(self):
return to_json(self, self.__class__, ['clean_filename', 'public_url',
'public_32x32_url', 'public_112x32_url', 'public_128x128_url',
'public_320x240_url', 'public_600x600_url'])
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import print_function, unicode_literals
from atomic_reactor.core import DockerTasker
from atomic_reactor.inner import DockerBuildWorkflow
from atomic_reactor.plugin import PostBuildPluginsRunner
from atomic_reactor.plugins.post_tag_and_push import TagAndPushPlugin
from atomic_reactor.plugins.post_tag_by_labels import TagByLabelsPlugin
from atomic_reactor.util import ImageName
from atomic_reactor.constants import INSPECT_CONFIG
from tests.constants import LOCALHOST_REGISTRY, TEST_IMAGE, INPUT_IMAGE, MOCK
if MOCK:
from tests.docker_mock import mock_docker
class Y(object):
pass
class X(object):
image_id = INPUT_IMAGE
source = Y()
source.dockerfile_path = None
source.path = None
base_image = ImageName(repo="qwe", tag="asd")
image = ImageName.parse("test-image:unique_tag_123")
def test_tag_by_labels_plugin(tmpdir):
if MOCK:
mock_docker()
tasker = DockerTasker()
workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, "test-image")
version = "1.0"
release = "1"
workflow.built_image_inspect = {
INSPECT_CONFIG: {
"Labels": {
"Name": TEST_IMAGE,
"Version": version,
"Release": release
}
}
}
workflow.push_conf.add_docker_registry(LOCALHOST_REGISTRY, insecure=True)
image = ImageName(repo=TEST_IMAGE,
tag="%s_%s" % (version, release),
registry=LOCALHOST_REGISTRY)
setattr(workflow, 'builder', X)
runner = PostBuildPluginsRunner(
tasker,
workflow,
[{
'name': TagByLabelsPlugin.key,
}]
)
output = runner.run()
assert TagByLabelsPlugin.key in output.keys()
assert len(workflow.tag_conf.images) == 4
images = [i.to_str() for i in workflow.tag_conf.images]
primary_images = [i.to_str() for i in workflow.tag_conf.primary_images]
unique_images = [i.to_str() for i in workflow.tag_conf.unique_images]
assert ("%s:%s" % (TEST_IMAGE, "unique_tag_123")) in images
assert ("%s:%s-%s" % (TEST_IMAGE, version, release)) in images
assert ("%s:%s" % (TEST_IMAGE, version)) in images
assert ("%s:latest" % (TEST_IMAGE, )) in images
assert ("%s:%s" % (TEST_IMAGE, "unique_tag_123")) in unique_images
assert ("%s:%s-%s" % (TEST_IMAGE, version, release)) in primary_images
assert ("%s:%s" % (TEST_IMAGE, version)) in primary_images
assert ("%s:latest" % (TEST_IMAGE, )) in primary_images
tasker.remove_image(image)
|
"""
Django dummy settings for docs project.
"""
import os
import sys
sys.path.insert(0, os.getcwd())
sys.path.insert(0, os.path.join(os.getcwd(), os.pardir))
SITE_ID = 666
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {"default": {
"NAME": ":memory:",
"ENGINE": "django.db.backends.sqlite3",
}}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'dajaxice',
'editlive',
)
|
from fabric.api import *
def ssh_config(host):
from os.path import expanduser
from paramiko.config import SSHConfig
def hostinfo(host, config):
hive = config.lookup(host)
if 'hostname' in hive:
host = hive['hostname']
if 'user' in hive:
host = '%s@%s' % (hive['user'], host)
if 'port' in hive:
host = '%s:%s' % (host, hive['port'])
return host
try:
config_file = file(expanduser('~/.ssh/config'))
except IOError:
pass
else:
config = SSHConfig()
config.parse(config_file)
key = config.lookup(host).get('identityfile', None)
key_filename = expanduser(key)
env.key_filename = [key_filename] if key_filename else []
return hostinfo(host, config)
|
from __future__ import unicode_literals, print_function, division
from django.db import models
from editor_md.models import EditorMdField
class Blog(models.Model):
title = models.CharField(max_length=100, verbose_name="标题", blank=True)
content = EditorMdField(imagepath="editor_md_image/", verbose_name="文章内容", blank=True)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.