code
stringlengths 1
199k
|
|---|
import os, sys
sys.path.insert( 0, os.path.dirname( __file__ ) )
from common import delete
try:
assert sys.argv[2]
except IndexError:
print 'usage: %s key url [purge (true/false)] ' % os.path.basename( sys.argv[0] )
sys.exit( 1 )
try:
data = {}
data[ 'purge' ] = sys.argv[3]
except IndexError:
pass
delete( sys.argv[1], sys.argv[2], data )
|
import rospy
import time
from collections import deque
class Publisher(object):
def __init__(self):
self.publishers = {}
self.queue = deque()
def add_publisher(self, alias, publisher):
self.publishers[alias] = publisher
def publish(self):
while len(self.queue) > 0:
alias, msg = self.queue.popleft()
print "publishing " + alias + ":" + str(msg)
self.publishers[alias].publish(msg)
def append(self, alias, msg):
self.queue.append((alias, msg))
|
import os, h5py, numpy
from scipy.sparse import csc_matrix
import ml2h5.task
from ml2h5 import VERSION_MLDATA
from ml2h5.converter import ALLOWED_SEPERATORS
class BaseHandler(object):
"""Base handler class.
It is the base for classes to handle different data formats.
It implicitely handles HDF5.
@cvar str_type: string type to be used for variable length strings in h5py
@type str_type: numpy.dtype
@ivar fname: name of file to handle
@type fname: string
@ivar seperator: seperator to seperate variables in examples
@type seperator: string
"""
str_type = h5py.new_vlen(numpy.str)
def __init__(self, fname, seperator=None, compression=None, merge=False):
"""
@param fname: name of in-file
@type fname: string
@param seperator: seperator used to seperate examples
@type seperator: string
"""
self.fname = fname
self.compression = compression
self.set_seperator(seperator)
self.merge = merge
def set_seperator(self, seperator):
"""Set the seperator to seperate variables in examples.
@param seperator: seperator to use
@type seperator: string
"""
if seperator in ALLOWED_SEPERATORS:
self.seperator = seperator
else:
raise AttributeError(_("Seperator '%s' not allowed!" % seperator))
def warn(self, msg):
"""Print a warning message.
@param msg: message to print
@type msg: string
"""
return
print('WARNING: ' + msg)
def _convert_to_ndarray(self,path,val):
"""converts a attribut to a set of ndarrays depending on the datatype
@param path: path of the attribute in the h5 file
@type path: string
@param val: data of the attribute
@type val: csc_matrix/ndarray
@rtype: list of (string,ndarray) tuples
"""
A=val
out=[]
dt = h5py.special_dtype(vlen=str)
if type(A)==csc_matrix: # sparse
out.append((path+'_indices', A.indices))
out.append((path+'_indptr', A.indptr))
out.append((path, A.data))
elif type(A)==list and len(A)>0 and type(A[0])==str:
out.append((path, numpy.array(A, dtype=dt)))
else: # dense
out.append((path, numpy.array(A)))
return out
def get_data_as_list(self,data):
""" this needs to `transpose' the data """
dl=[]
group=self.get_data_group(data)
lengths=dict()
for o in data['ordering']:
x=data[group][o]
#if numpy.issubdtype(x.dtype, numpy.int):
# data[group][o]=x.astype(numpy.float64)
try:
lengths[o]=data[group][o].shape[1]
except (AttributeError, IndexError):
lengths[o]=len(data[group][o])
l=set(lengths.values())
assert(len(l)==1)
l=l.pop()
for i in range(l):
line=[]
for o in data['ordering']:
try:
line.extend(data[group][o][:,i])
except:
line.append(data[group][o][i])
dl.append(line)
return dl
def get_name(self):
"""Get dataset name from non-HDF5 file
@return: comment
@rtype: string
"""
# without str() it might barf
return str(os.path.basename(self.fname).split('.')[0])
def get_data_group(self, data):
if data and 'group' in data:
return data['group']
else:
return 'data'
def get_descr_group(self, data):
if data and 'group' in data:
return data['group'] + '_descr'
else:
return 'data_descr'
def get_datatype(self, values):
"""Get data type of given values.
@param values: list of values to check
@type values: list
@return: data type to use for conversion
@rtype: numpy.int32/numpy.double/self.str_type
"""
dtype = None
for v in values:
if isinstance(v, int):
dtype = numpy.int32
elif isinstance(v, float):
dtype = numpy.double
else: # maybe int/double in string
try:
tmp = int(v)
if not dtype: # a previous nan might set it to double
dtype = numpy.int32
except ValueError:
try:
tmp = float(v)
dtype = numpy.double
except ValueError:
return self.str_type
return dtype
def read(self):
"""Get data and description in-memory
Retrieve contents from file.
@return: example names, ordering and the examples
@rtype: dict of: list of names, list of ordering and dict of examples
"""
# we want the exception handled elsewhere
if not h5py.is_hdf5(self.fname):
return
h5 = h5py.File(self.fname, 'r')
contents = {
'name': h5.attrs['name'],
'comment': h5.attrs['comment'],
'mldata': h5.attrs['mldata'],
}
if contents['comment']=='Task file':
contents['task']=dict()
contents['ordering']=list()
group='task'
for field in ml2h5.task.task_data_fields:
if field in h5[group]:
contents['ordering'].append(field)
else:
contents['data']=dict()
contents['ordering']=h5['/data_descr/ordering'][...].tolist()
group='data'
contents['group']=group
if '/%s_descr/names' % group in h5:
contents['names']=h5['/%s_descr/names' % group][...].tolist()
if '/%s_descr/types' % group in h5:
contents['types'] = h5['/%s_descr/types' % group ][...]
for name in contents['ordering']:
vname='/%s/%s' % (group, name)
sp_indices=vname+'_indices'
sp_indptr=vname+'_indptr'
if sp_indices in h5['/%s' % group] and sp_indptr in h5['/%s' % group]:
contents[group][name] = csc_matrix((h5[vname], h5[sp_indices], h5[sp_indptr])
)
else:
d = numpy.array(h5[vname],order='F')
try:
d=d['vlen']
except:
pass
contents[group][name] = d
h5.close()
return contents
def read_data_as_array(self):
"""Read data from file, and return an array
@return: an array with all data
@rtype: numpy ndarray
"""
contents = self.read()
#group = self.get_data_group(data)
data = contents['data']
ordering = contents['ordering']
if len(data[ordering[0]].shape)>1:
num_examples = data[ordering[0]].shape[1]
else:
num_examples = len(data[ordering[0]])
data_array = numpy.zeros((0, num_examples))
for cur_feat in ordering:
data_array = numpy.vstack([data_array, data[cur_feat]])
return data_array.T
def _get_merged(self, data):
"""Merge given data where appropriate.
String arrays are not merged, but all int and all double are merged
into one matrix.
@param data: data structure as returned by read()
@type data: dict
@return: merged data structure
@rtype: dict
"""
merged = {}
ordering = []
path = ''
idx = 0
merging = None
group = self.get_data_group(data)
for name in data['ordering']:
val = data[group][name]
if type(val) == csc_matrix:
merging = None
path = name
merged[path] = val
ordering.append(path)
continue
if name.endswith('_indices') or name.endswith('_indptr'):
merging = None
path = name
merged[path] = val
continue
if len(val) < 1: continue
t = type(val[0])
if t in [numpy.int32, numpy.int64]:
if merging == 'int':
merged[path].append(val)
else:
merging = 'int'
path = 'int' + str(idx)
ordering.append(path)
merged[path] = [val]
idx += 1
elif t == numpy.double:
if merging == 'double':
merged[path].append(val)
else:
merging = 'double'
path = 'double' + str(idx)
ordering.append(path)
merged[path] = [val]
idx += 1
else: # string or matrix
merging = None
if name.find('/') != -1: # / sep belongs to hdf5 path
path = name.replace('/', '+')
data['ordering'][data['ordering'].index(name)] = path
else:
path = name
ordering.append(path)
merged[path] = val
data[group] = {}
for k in merged:
if len(merged[k])==1:
merged[k] = merged[k][0]
data[group][k] = numpy.array(merged[k])
data['ordering'] = ordering
return data
def write(self, data):
"""Write given data to HDF5 file.
@param data: data to write to HDF5 file.
@type data: dict of lists
"""
# we want the exception handled elsewhere
h5 = h5py.File(self.fname, 'w')
h5.attrs['name'] = data['name']
h5.attrs['mldata'] = VERSION_MLDATA
h5.attrs['comment'] = data['comment']
data_group = self.get_data_group(data)
descr_group = self.get_descr_group(data)
try:
group = h5.create_group('/%s' % data_group)
for path, val in data[data_group].items():
for path, val in self._convert_to_ndarray(path,val):
group.create_dataset(path, data=val, compression=self.compression)
group = h5.create_group('/%s' % descr_group)
names = numpy.array(data['names']).astype(self.str_type)
if names.size > 0: # simple 'if names' throws exception if array
group.create_dataset('names', data=names, compression=self.compression)
ordering = numpy.array(data['ordering']).astype(self.str_type)
if ordering.size > 0:
group.create_dataset('ordering', data=ordering, compression=self.compression)
if 'types' in data:
types = numpy.array(data['types']).astype(self.str_type)
group.create_dataset('types', data=types, compression=self.compression)
except: # just do some clean-up
h5.close()
os.remove(self.fname)
raise
else:
h5.close()
|
from __future__ import unicode_literals
import re
from hashlib import sha1
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
determine_ext,
float_or_none,
int_or_none,
unified_strdate,
)
class ProSiebenSat1BaseIE(InfoExtractor):
def _extract_video_info(self, url, clip_id):
client_location = url
video = self._download_json(
'http://vas.sim-technik.de/vas/live/v2/videos',
clip_id, 'Downloading videos JSON', query={
'access_token': self._TOKEN,
'client_location': client_location,
'client_name': self._CLIENT_NAME,
'ids': clip_id,
})[0]
if video.get('is_protected') is True:
raise ExtractorError('This video is DRM protected.', expected=True)
duration = float_or_none(video.get('duration'))
source_ids = [compat_str(source['id']) for source in video['sources']]
client_id = self._SALT[:2] + sha1(''.join([clip_id, self._SALT, self._TOKEN, client_location, self._SALT, self._CLIENT_NAME]).encode('utf-8')).hexdigest()
sources = self._download_json(
'http://vas.sim-technik.de/vas/live/v2/videos/%s/sources' % clip_id,
clip_id, 'Downloading sources JSON', query={
'access_token': self._TOKEN,
'client_id': client_id,
'client_location': client_location,
'client_name': self._CLIENT_NAME,
})
server_id = sources['server_id']
def fix_bitrate(bitrate):
bitrate = int_or_none(bitrate)
if not bitrate:
return None
return (bitrate // 1000) if bitrate % 1000 == 0 else bitrate
formats = []
for source_id in source_ids:
client_id = self._SALT[:2] + sha1(''.join([self._SALT, clip_id, self._TOKEN, server_id, client_location, source_id, self._SALT, self._CLIENT_NAME]).encode('utf-8')).hexdigest()
urls = self._download_json(
'http://vas.sim-technik.de/vas/live/v2/videos/%s/sources/url' % clip_id,
clip_id, 'Downloading urls JSON', fatal=False, query={
'access_token': self._TOKEN,
'client_id': client_id,
'client_location': client_location,
'client_name': self._CLIENT_NAME,
'server_id': server_id,
'source_ids': source_id,
})
if not urls:
continue
if urls.get('status_code') != 0:
raise ExtractorError('This video is unavailable', expected=True)
urls_sources = urls['sources']
if isinstance(urls_sources, dict):
urls_sources = urls_sources.values()
for source in urls_sources:
source_url = source.get('url')
if not source_url:
continue
protocol = source.get('protocol')
mimetype = source.get('mimetype')
if mimetype == 'application/f4m+xml' or 'f4mgenerator' in source_url or determine_ext(source_url) == 'f4m':
formats.extend(self._extract_f4m_formats(
source_url, clip_id, f4m_id='hds', fatal=False))
elif mimetype == 'application/x-mpegURL':
formats.extend(self._extract_m3u8_formats(
source_url, clip_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
elif mimetype == 'application/dash+xml':
formats.extend(self._extract_mpd_formats(
source_url, clip_id, mpd_id='dash', fatal=False))
else:
tbr = fix_bitrate(source['bitrate'])
if protocol in ('rtmp', 'rtmpe'):
mobj = re.search(r'^(?P<url>rtmpe?://[^/]+)/(?P<path>.+)$', source_url)
if not mobj:
continue
path = mobj.group('path')
mp4colon_index = path.rfind('mp4:')
app = path[:mp4colon_index]
play_path = path[mp4colon_index:]
formats.append({
'url': '%s/%s' % (mobj.group('url'), app),
'app': app,
'play_path': play_path,
'player_url': 'http://livepassdl.conviva.com/hf/ver/2.79.0.17083/LivePassModuleMain.swf',
'page_url': 'http://www.prosieben.de',
'tbr': tbr,
'ext': 'flv',
'format_id': 'rtmp%s' % ('-%d' % tbr if tbr else ''),
})
else:
formats.append({
'url': source_url,
'tbr': tbr,
'format_id': 'http%s' % ('-%d' % tbr if tbr else ''),
})
self._sort_formats(formats)
return {
'duration': duration,
'formats': formats,
}
class ProSiebenSat1IE(ProSiebenSat1BaseIE):
IE_NAME = 'prosiebensat1'
IE_DESC = 'ProSiebenSat.1 Digital'
_VALID_URL = r'''(?x)
https?://
(?:www\.)?
(?:
(?:
prosieben(?:maxx)?|sixx|sat1(?:gold)?|kabeleins(?:doku)?|the-voice-of-germany|7tv|advopedia
)\.(?:de|at|ch)|
ran\.de|fem\.com|advopedia\.de
)
/(?P<id>.+)
'''
_TESTS = [
{
# Tests changes introduced in https://github.com/rg3/youtube-dl/pull/6242
# in response to fixing https://github.com/rg3/youtube-dl/issues/6215:
# - malformed f4m manifest support
# - proper handling of URLs starting with `https?://` in 2.0 manifests
# - recursive child f4m manifests extraction
'url': 'http://www.prosieben.de/tv/circus-halligalli/videos/218-staffel-2-episode-18-jahresrueckblick-ganze-folge',
'info_dict': {
'id': '2104602',
'ext': 'mp4',
'title': 'Episode 18 - Staffel 2',
'description': 'md5:8733c81b702ea472e069bc48bb658fc1',
'upload_date': '20131231',
'duration': 5845.04,
},
},
{
'url': 'http://www.prosieben.de/videokatalog/Gesellschaft/Leben/Trends/video-Lady-Umstyling-f%C3%BCr-Audrina-Rebekka-Audrina-Fergen-billig-aussehen-Battal-Modica-700544.html',
'info_dict': {
'id': '2570327',
'ext': 'mp4',
'title': 'Lady-Umstyling für Audrina',
'description': 'md5:4c16d0c17a3461a0d43ea4084e96319d',
'upload_date': '20131014',
'duration': 606.76,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Seems to be broken',
},
{
'url': 'http://www.prosiebenmaxx.de/tv/experience/video/144-countdown-fuer-die-autowerkstatt-ganze-folge',
'info_dict': {
'id': '2429369',
'ext': 'mp4',
'title': 'Countdown für die Autowerkstatt',
'description': 'md5:809fc051a457b5d8666013bc40698817',
'upload_date': '20140223',
'duration': 2595.04,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'This video is unavailable',
},
{
'url': 'http://www.sixx.de/stars-style/video/sexy-laufen-in-ugg-boots-clip',
'info_dict': {
'id': '2904997',
'ext': 'mp4',
'title': 'Sexy laufen in Ugg Boots',
'description': 'md5:edf42b8bd5bc4e5da4db4222c5acb7d6',
'upload_date': '20140122',
'duration': 245.32,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'This video is unavailable',
},
{
'url': 'http://www.sat1.de/film/der-ruecktritt/video/im-interview-kai-wiesinger-clip',
'info_dict': {
'id': '2906572',
'ext': 'mp4',
'title': 'Im Interview: Kai Wiesinger',
'description': 'md5:e4e5370652ec63b95023e914190b4eb9',
'upload_date': '20140203',
'duration': 522.56,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'This video is unavailable',
},
{
'url': 'http://www.kabeleins.de/tv/rosins-restaurants/videos/jagd-auf-fertigkost-im-elsthal-teil-2-ganze-folge',
'info_dict': {
'id': '2992323',
'ext': 'mp4',
'title': 'Jagd auf Fertigkost im Elsthal - Teil 2',
'description': 'md5:2669cde3febe9bce13904f701e774eb6',
'upload_date': '20141014',
'duration': 2410.44,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'This video is unavailable',
},
{
'url': 'http://www.ran.de/fussball/bundesliga/video/schalke-toennies-moechte-raul-zurueck-ganze-folge',
'info_dict': {
'id': '3004256',
'ext': 'mp4',
'title': 'Schalke: Tönnies möchte Raul zurück',
'description': 'md5:4b5b271d9bcde223b54390754c8ece3f',
'upload_date': '20140226',
'duration': 228.96,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'This video is unavailable',
},
{
'url': 'http://www.the-voice-of-germany.de/video/31-andreas-kuemmert-rocket-man-clip',
'info_dict': {
'id': '2572814',
'ext': 'mp4',
'title': 'Andreas Kümmert: Rocket Man',
'description': 'md5:6ddb02b0781c6adf778afea606652e38',
'upload_date': '20131017',
'duration': 469.88,
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://www.fem.com/wellness/videos/wellness-video-clip-kurztripps-zum-valentinstag.html',
'info_dict': {
'id': '2156342',
'ext': 'mp4',
'title': 'Kurztrips zum Valentinstag',
'description': 'Romantischer Kurztrip zum Valentinstag? Nina Heinemann verrät, was sich hier wirklich lohnt.',
'duration': 307.24,
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://www.prosieben.de/tv/joko-gegen-klaas/videos/playlists/episode-8-ganze-folge-playlist',
'info_dict': {
'id': '439664',
'title': 'Episode 8 - Ganze Folge - Playlist',
'description': 'md5:63b8963e71f481782aeea877658dec84',
},
'playlist_count': 2,
'skip': 'This video is unavailable',
},
{
'url': 'http://www.7tv.de/circus-halligalli/615-best-of-circus-halligalli-ganze-folge',
'info_dict': {
'id': '4187506',
'ext': 'mp4',
'title': 'Best of Circus HalliGalli',
'description': 'md5:8849752efd90b9772c9db6fdf87fb9e9',
'upload_date': '20151229',
},
'params': {
'skip_download': True,
},
},
{
# geo restricted to Germany
'url': 'http://www.kabeleinsdoku.de/tv/mayday-alarm-im-cockpit/video/102-notlandung-im-hudson-river-ganze-folge',
'only_matching': True,
},
{
# geo restricted to Germany
'url': 'http://www.sat1gold.de/tv/edel-starck/video/11-staffel-1-episode-1-partner-wider-willen-ganze-folge',
'only_matching': True,
},
{
'url': 'http://www.sat1gold.de/tv/edel-starck/playlist/die-gesamte-1-staffel',
'only_matching': True,
},
{
'url': 'http://www.advopedia.de/videos/lenssen-klaert-auf/lenssen-klaert-auf-folge-8-staffel-3-feiertage-und-freie-tage',
'only_matching': True,
},
]
_TOKEN = 'prosieben'
_SALT = '01!8d8F_)r9]4s[qeuXfP%'
_CLIENT_NAME = 'kolibri-2.0.19-splec4'
_CLIPID_REGEXES = [
r'"clip_id"\s*:\s+"(\d+)"',
r'clipid: "(\d+)"',
r'clip[iI]d=(\d+)',
r'clip[iI]d\s*=\s*["\'](\d+)',
r"'itemImageUrl'\s*:\s*'/dynamic/thumbnails/full/\d+/(\d+)",
]
_TITLE_REGEXES = [
r'<h2 class="subtitle" itemprop="name">\s*(.+?)</h2>',
r'<header class="clearfix">\s*<h3>(.+?)</h3>',
r'<!-- start video -->\s*<h1>(.+?)</h1>',
r'<h1 class="att-name">\s*(.+?)</h1>',
r'<header class="module_header">\s*<h2>([^<]+)</h2>\s*</header>',
r'<h2 class="video-title" itemprop="name">\s*(.+?)</h2>',
r'<div[^>]+id="veeseoTitle"[^>]*>(.+?)</div>',
]
_DESCRIPTION_REGEXES = [
r'<p itemprop="description">\s*(.+?)</p>',
r'<div class="videoDecription">\s*<p><strong>Beschreibung</strong>: (.+?)</p>',
r'<div class="g-plusone" data-size="medium"></div>\s*</div>\s*</header>\s*(.+?)\s*<footer>',
r'<p class="att-description">\s*(.+?)\s*</p>',
r'<p class="video-description" itemprop="description">\s*(.+?)</p>',
r'<div[^>]+id="veeseoDescription"[^>]*>(.+?)</div>',
]
_UPLOAD_DATE_REGEXES = [
r'<meta property="og:published_time" content="(.+?)">',
r'<span>\s*(\d{2}\.\d{2}\.\d{4} \d{2}:\d{2}) \|\s*<span itemprop="duration"',
r'<footer>\s*(\d{2}\.\d{2}\.\d{4}) \d{2}:\d{2} Uhr',
r'<span style="padding-left: 4px;line-height:20px; color:#404040">(\d{2}\.\d{2}\.\d{4})</span>',
r'(\d{2}\.\d{2}\.\d{4}) \| \d{2}:\d{2} Min<br/>',
]
_PAGE_TYPE_REGEXES = [
r'<meta name="page_type" content="([^"]+)">',
r"'itemType'\s*:\s*'([^']*)'",
]
_PLAYLIST_ID_REGEXES = [
r'content[iI]d=(\d+)',
r"'itemId'\s*:\s*'([^']*)'",
]
_PLAYLIST_CLIP_REGEXES = [
r'(?s)data-qvt=.+?<a href="([^"]+)"',
]
def _extract_clip(self, url, webpage):
clip_id = self._html_search_regex(
self._CLIPID_REGEXES, webpage, 'clip id')
title = self._html_search_regex(self._TITLE_REGEXES, webpage, 'title')
info = self._extract_video_info(url, clip_id)
description = self._html_search_regex(
self._DESCRIPTION_REGEXES, webpage, 'description', default=None)
if description is None:
description = self._og_search_description(webpage)
thumbnail = self._og_search_thumbnail(webpage)
upload_date = unified_strdate(self._html_search_regex(
self._UPLOAD_DATE_REGEXES, webpage, 'upload date', default=None))
info.update({
'id': clip_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'upload_date': upload_date,
})
return info
def _extract_playlist(self, url, webpage):
playlist_id = self._html_search_regex(
self._PLAYLIST_ID_REGEXES, webpage, 'playlist id')
playlist = self._parse_json(
self._search_regex(
r'var\s+contentResources\s*=\s*(\[.+?\]);\s*</script',
webpage, 'playlist'),
playlist_id)
entries = []
for item in playlist:
clip_id = item.get('id') or item.get('upc')
if not clip_id:
continue
info = self._extract_video_info(url, clip_id)
info.update({
'id': clip_id,
'title': item.get('title') or item.get('teaser', {}).get('headline'),
'description': item.get('teaser', {}).get('description'),
'thumbnail': item.get('poster'),
'duration': float_or_none(item.get('duration')),
'series': item.get('tvShowTitle'),
'uploader': item.get('broadcastPublisher'),
})
entries.append(info)
return self.playlist_result(entries, playlist_id)
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
page_type = self._search_regex(
self._PAGE_TYPE_REGEXES, webpage,
'page type', default='clip').lower()
if page_type == 'clip':
return self._extract_clip(url, webpage)
elif page_type == 'playlist':
return self._extract_playlist(url, webpage)
else:
raise ExtractorError(
'Unsupported page type %s' % page_type, expected=True)
|
from random import *
import numpy
import pdb
import cPickle
import bz2
import sys
import pylab
import nupic.bindings.algorithms as algo
from nupic.bindings.math import GetNumpyDataType
type = GetNumpyDataType('NTA_Real')
type = 'float32'
def simple():
print "Simple"
numpy.random.seed(42)
n_dims = 2
n_class = 4
size = 200
labels = numpy.random.random_integers(0, n_class-1, size)
samples = numpy.zeros((size, n_dims), dtype=type)
do_plot = False
print "Generating data"
centers = numpy.array([[0,0],[0,1],[1,0],[1,1]])
for i in range(0, size):
t = 6.28 * numpy.random.random_sample()
samples[i][0] = 2 * centers[labels[i]][0] + .5*numpy.random.random() * numpy.cos(t)
samples[i][1] = 2 * centers[labels[i]][1] + .5*numpy.random.random() * numpy.sin(t)
classifier = algo.svm_dense(0, n_dims, probability=True, seed=42)
print "Adding sample vectors"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
print "Displaying problem"
problem = classifier.get_problem()
print "Problem size:", problem.size()
print "Problem dimensionality:", problem.n_dims()
print "Problem samples:"
s = numpy.zeros((problem.size(), problem.n_dims()+1), dtype=type)
problem.get_samples(s)
print s
if do_plot:
pylab.ion()
pylab.plot(s[s[:,0]==0,1], s[s[:,0]==0,2], '.', color='r')
pylab.plot(s[s[:,0]==1,1], s[s[:,0]==1,2], '+', color='b')
pylab.plot(s[s[:,0]==2,1], s[s[:,0]==2,2], '^', color='g')
pylab.plot(s[s[:,0]==3,1], s[s[:,0]==3,2], 'v', color='g')
print "Training"
classifier.train(gamma = 1./3., C = 100, eps=1e-1)
print "Displaying model"
model = classifier.get_model()
print "Number of support vectors:", model.size()
print "Number of classes:", model.n_class()
print "Number of dimensions: ", model.n_dims()
print "Support vectors:"
sv = numpy.zeros((model.size(), model.n_dims()), dtype=type)
model.get_support_vectors(sv)
print sv
if do_plot:
pylab.plot(sv[:,0], sv[:,1], 'o', color='g')
print "Support vector coefficients:"
svc = numpy.zeros((model.n_class()-1, model.size()), dtype=type)
model.get_support_vector_coefficients(svc)
print svc
print "Hyperplanes (for linear kernel only):"
h = model.get_hyperplanes()
print h
if do_plot:
xmin = numpy.min(samples[:,0])
xmax = numpy.max(samples[:,0])
xstep = (xmax - xmin) / 10
X = numpy.arange(xmin, xmax, xstep)
ymin = numpy.min(samples[:,1])
ymax = numpy.max(samples[:,1])
ystep = (ymax - ymin) / 10
Y = numpy.arange(ymin, ymax, ystep)
points = numpy.zeros((len(X), len(Y)))
for i,x in enumerate(X):
for j,y in enumerate(Y):
proba = numpy.zeros(model.n_class(), dtype=type)
classifier.predict_probability(numpy.array([x,y]), proba)
points[i,j] = proba[0]
pylab.contour(X,Y,points)
print "Cross-validation"
print classifier.cross_validate(2, gamma = .5, C = 10, eps = 1e-3)
print "Predicting"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
proba = numpy.zeros(model.n_class(), dtype=type)
print x, ': real=', y,
print 'p1=', classifier.predict(x),
print 'p2=', classifier.predict_probability(x, proba),
print 'proba=', proba
print "Discarding problem"
classifier.discard_problem()
print "Predicting after discarding the problem"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
proba = numpy.zeros(model.n_class(), dtype=type)
print x, ': real=', y,
print 'p1=', classifier.predict(x),
print 'p2=', classifier.predict_probability(x, proba),
print 'proba=', proba
def persistence():
print "Persistence"
numpy.random.seed(42)
n_dims = 2
n_class = 12
size = 100
labels = numpy.random.random_integers(0, 256, size)
samples = numpy.zeros((size, n_dims), dtype=type)
print "Generating data"
for i in range(0, size):
t = 6.28 * numpy.random.random_sample()
samples[i][0] = 2 * labels[i] + 1.5 * numpy.cos(t)
samples[i][1] = 2 * labels[i] + 1.5 * numpy.sin(t)
print "Creating dense classifier"
classifier = algo.svm_dense(0, n_dims = n_dims, seed=42)
print "Adding sample vectors to dense classifier"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
print "Pickling dense classifier"
cPickle.dump(classifier, open('test', 'wb'))
classifier = cPickle.load(open('test', 'rb'))
print "Training dense classifier"
classifier.train(gamma = 1, C = 10, eps=1e-1)
print "Predicting with dense classifier"
print classifier.predict(samples[0])
print "Creating 0/1 classifier"
classifier01 = algo.svm_01(n_dims = n_dims, seed=42)
print "Adding sample vectors to 0/1 classifier"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier01.add_sample(float(y), x)
print "Training 0/1 classifier"
classifier01.train(gamma = 1./3., C = 100, eps=1e-1)
print "Pickling 0/1 classifier"
cPickle.dump(classifier01, open('test', 'wb'))
classifier01 = cPickle.load(open('test', 'rb'))
print "Predicting with 0/1 classifier"
print classifier01.predict(numpy.array(samples[0], dtype=type))
def cross_validation():
return
print "Cross validation"
numpy.random.seed(42)
labels = [0, 1, 1, 2, 1, 2]
samples = [[0, 0, 0], [0, 1, 0], [1, 0, 1], [1, 1, 1], [1, 1, 0], [0, 1, 1]]
classifier = algo.svm_dense(0, n_dims = 3, seed=42)
print "Adding sample vectors"
for y, x_list in zip(labels, samples):
x = numpy.array(x_list, dtype=type)
classifier.add_sample(float(y), x)
cPickle.dump(classifier, open('test', 'wb'))
classifier = cPickle.load(open('test', 'rb'))
print "Training"
classifier.train(gamma = 1./3., C = 100, eps=1e-1)
print "Cross validation =",
print classifier.cross_validate(3, gamma = .5, C = 10, eps = 1e-3)
simple()
persistence()
cross_validation()
|
from cantilever_divingboard import *
freq_min = 1e3
freq_max = 1e5
omega_min = 100e3
initial_guess = (50e-6, 1e-6, 1e-6,
30e-6, 1e-6, 1e-6, 500e-9, 5., 1e15)
constraints = ((30e-6, 100e-6), (500e-9, 20e-6), (1e-6, 10e-6),
(2e-6, 100e-6), (500e-9, 5e-6), (500e-9, 20e-6), (30e-9, 10e-6),
(1., 10.), (1e15, 4e19))
x = optimize_cantilever(initial_guess, constraints, freq_min, freq_max, omega_min)
c = cantilever_divingboard(freq_min, freq_max, x)
c.print_performance()
|
"""
Django settings for spa_movies project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'ss!@cvdm$38bkbuk5hw!_csg(_@kfl3_)3vi$!@_2q(f!l1q!q'
DEBUG = True
ALLOWED_HOSTS = ['davgibbs.pythonanywhere.com', '127.0.0.1']
SECURE_SSL_REDIRECT = True
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'movies.apps.MoviesConfig',
'rest_framework_swagger',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'spa_movies.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'spa_movies.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static")
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
SESSION_COOKIE_AGE = 60 * 60 * 3
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticatedOrReadOnly',
)
}
|
HOST = [ 'nagios', 'nagios1' ]
PORT = 6557
|
from module.plugins.internal.XFSHoster import XFSHoster, create_getInfo
class SendmywayCom(XFSHoster):
__name__ = "SendmywayCom"
__type__ = "hoster"
__version__ = "0.04"
__pattern__ = r'http://(?:www\.)?sendmyway\.com/\w{12}'
__description__ = """SendMyWay hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
HOSTER_DOMAIN = "sendmyway.com"
NAME_PATTERN = r'<p class="file-name" ><.*?>\s*(?P<N>.+)'
SIZE_PATTERN = r'<small>\((?P<S>\d+) bytes\)</small>'
getInfo = create_getInfo(SendmywayCom)
|
from __future__ import unicode_literals
from frappe import _
app_name = "erpnext"
app_title = "ERPNext"
app_publisher = "Frappe Technologies Pvt. Ltd."
app_description = """ERP made simple"""
app_icon = "fa fa-th"
app_color = "#e74c3c"
app_email = "info@erpnext.com"
app_license = "GNU General Public License (v3)"
source_link = "https://github.com/frappe/erpnext"
develop_version = '12.x.x-develop'
app_include_js = "assets/js/erpnext.min.js"
app_include_css = "assets/css/erpnext.css"
web_include_js = "assets/js/erpnext-web.min.js"
web_include_css = "assets/css/erpnext-web.css"
doctype_js = {
"Communication": "public/js/communication.js",
"Event": "public/js/event.js"
}
welcome_email = "erpnext.setup.utils.welcome_email"
setup_wizard_requires = "assets/erpnext/js/setup_wizard.js"
setup_wizard_stages = "erpnext.setup.setup_wizard.setup_wizard.get_setup_stages"
setup_wizard_test = "erpnext.setup.setup_wizard.test_setup_wizard.run_setup_wizard_test"
before_install = "erpnext.setup.install.check_setup_wizard_not_completed"
after_install = "erpnext.setup.install.after_install"
boot_session = "erpnext.startup.boot.boot_session"
notification_config = "erpnext.startup.notifications.get_notification_config"
get_help_messages = "erpnext.utilities.activation.get_help_messages"
get_user_progress_slides = "erpnext.utilities.user_progress.get_user_progress_slides"
update_and_get_user_progress = "erpnext.utilities.user_progress_utils.update_default_domain_actions_and_get_state"
on_session_creation = "erpnext.shopping_cart.utils.set_cart_count"
on_logout = "erpnext.shopping_cart.utils.clear_cart_count"
treeviews = ['Account', 'Cost Center', 'Warehouse', 'Item Group', 'Customer Group', 'Sales Person', 'Territory', 'Assessment Group']
update_website_context = "erpnext.shopping_cart.utils.update_website_context"
my_account_context = "erpnext.shopping_cart.utils.update_my_account_context"
email_append_to = ["Job Applicant", "Lead", "Opportunity", "Issue"]
calendars = ["Task", "Work Order", "Leave Application", "Sales Order", "Holiday List", "Course Schedule"]
domains = {
'Agriculture': 'erpnext.domains.agriculture',
'Distribution': 'erpnext.domains.distribution',
'Education': 'erpnext.domains.education',
'Healthcare': 'erpnext.domains.healthcare',
'Hospitality': 'erpnext.domains.hospitality',
'Manufacturing': 'erpnext.domains.manufacturing',
'Non Profit': 'erpnext.domains.non_profit',
'Retail': 'erpnext.domains.retail',
'Services': 'erpnext.domains.services',
}
website_generators = ["Item Group", "Item", "BOM", "Sales Partner",
"Job Opening", "Student Admission"]
website_context = {
"favicon": "/assets/erpnext/images/favicon.png",
"splash_image": "/assets/erpnext/images/erp-icon.svg"
}
website_route_rules = [
{"from_route": "/orders", "to_route": "Sales Order"},
{"from_route": "/orders/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Order",
"parents": [{"label": _("Orders"), "route": "orders"}]
}
},
{"from_route": "/invoices", "to_route": "Sales Invoice"},
{"from_route": "/invoices/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Invoice",
"parents": [{"label": _("Invoices"), "route": "invoices"}]
}
},
{"from_route": "/supplier-quotations", "to_route": "Supplier Quotation"},
{"from_route": "/supplier-quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Supplier Quotation",
"parents": [{"label": _("Supplier Quotation"), "route": "supplier-quotations"}]
}
},
{"from_route": "/quotations", "to_route": "Quotation"},
{"from_route": "/quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Quotation",
"parents": [{"label": _("Quotations"), "route": "quotations"}]
}
},
{"from_route": "/shipments", "to_route": "Delivery Note"},
{"from_route": "/shipments/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Delivery Note",
"parents": [{"label": _("Shipments"), "route": "shipments"}]
}
},
{"from_route": "/rfq", "to_route": "Request for Quotation"},
{"from_route": "/rfq/<path:name>", "to_route": "rfq",
"defaults": {
"doctype": "Request for Quotation",
"parents": [{"label": _("Request for Quotation"), "route": "rfq"}]
}
},
{"from_route": "/addresses", "to_route": "Address"},
{"from_route": "/addresses/<path:name>", "to_route": "addresses",
"defaults": {
"doctype": "Address",
"parents": [{"label": _("Addresses"), "route": "addresses"}]
}
},
{"from_route": "/jobs", "to_route": "Job Opening"},
{"from_route": "/admissions", "to_route": "Student Admission"},
{"from_route": "/boms", "to_route": "BOM"},
{"from_route": "/timesheets", "to_route": "Timesheet"},
]
standard_portal_menu_items = [
{"title": _("Personal Details"), "route": "/personal-details", "reference_doctype": "Patient", "role": "Patient"},
{"title": _("Projects"), "route": "/project", "reference_doctype": "Project"},
{"title": _("Request for Quotations"), "route": "/rfq", "reference_doctype": "Request for Quotation", "role": "Supplier"},
{"title": _("Supplier Quotation"), "route": "/supplier-quotations", "reference_doctype": "Supplier Quotation", "role": "Supplier"},
{"title": _("Quotations"), "route": "/quotations", "reference_doctype": "Quotation", "role":"Customer"},
{"title": _("Orders"), "route": "/orders", "reference_doctype": "Sales Order", "role":"Customer"},
{"title": _("Invoices"), "route": "/invoices", "reference_doctype": "Sales Invoice", "role":"Customer"},
{"title": _("Shipments"), "route": "/shipments", "reference_doctype": "Delivery Note", "role":"Customer"},
{"title": _("Issues"), "route": "/issues", "reference_doctype": "Issue", "role":"Customer"},
{"title": _("Addresses"), "route": "/addresses", "reference_doctype": "Address"},
{"title": _("Timesheets"), "route": "/timesheets", "reference_doctype": "Timesheet", "role":"Customer"},
{"title": _("Timesheets"), "route": "/timesheets", "reference_doctype": "Timesheet", "role":"Customer"},
{"title": _("Lab Test"), "route": "/lab-test", "reference_doctype": "Lab Test", "role":"Patient"},
{"title": _("Prescription"), "route": "/prescription", "reference_doctype": "Patient Encounter", "role":"Patient"},
{"title": _("Patient Appointment"), "route": "/patient-appointments", "reference_doctype": "Patient Appointment", "role":"Patient"},
{"title": _("Fees"), "route": "/fees", "reference_doctype": "Fees", "role":"Student"},
{"title": _("Newsletter"), "route": "/newsletters", "reference_doctype": "Newsletter"},
{"title": _("Admission"), "route": "/admissions", "reference_doctype": "Student Admission"},
{"title": _("Certification"), "route": "/certification", "reference_doctype": "Certification Application"},
]
default_roles = [
{'role': 'Customer', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Supplier', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Student', 'doctype':'Student', 'email_field': 'student_email_id'},
]
has_website_permission = {
"Sales Order": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Sales Invoice": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Supplier Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Delivery Note": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Issue": "erpnext.support.doctype.issue.issue.has_website_permission",
"Timesheet": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Lab Test": "erpnext.healthcare.web_form.lab_test.lab_test.has_website_permission",
"Patient Encounter": "erpnext.healthcare.web_form.prescription.prescription.has_website_permission",
"Patient Appointment": "erpnext.healthcare.web_form.patient_appointments.patient_appointments.has_website_permission",
"Patient": "erpnext.healthcare.web_form.personal_details.personal_details.has_website_permission"
}
dump_report_map = "erpnext.startup.report_data_map.data_map"
before_tests = "erpnext.setup.utils.before_tests"
standard_queries = {
"Customer": "erpnext.selling.doctype.customer.customer.get_customer_list",
"Healthcare Practitioner": "erpnext.healthcare.doctype.healthcare_practitioner.healthcare_practitioner.get_practitioner_list"
}
doc_events = {
"Stock Entry": {
"on_submit": "erpnext.stock.doctype.material_request.material_request.update_completed_and_requested_qty",
"on_cancel": "erpnext.stock.doctype.material_request.material_request.update_completed_and_requested_qty"
},
"User": {
"after_insert": "frappe.contacts.doctype.contact.contact.update_contact",
"validate": "erpnext.hr.doctype.employee.employee.validate_employee_role",
"on_update": ["erpnext.hr.doctype.employee.employee.update_user_permissions",
"erpnext.portal.utils.set_default_role"]
},
("Sales Taxes and Charges Template", 'Price List'): {
"on_update": "erpnext.shopping_cart.doctype.shopping_cart_settings.shopping_cart_settings.validate_cart_settings"
},
"Website Settings": {
"validate": "erpnext.portal.doctype.products_settings.products_settings.home_page_is_products"
},
"Sales Invoice": {
"on_submit": ["erpnext.regional.france.utils.create_transaction_log", "erpnext.regional.italy.utils.sales_invoice_on_submit"],
"on_cancel": "erpnext.regional.italy.utils.sales_invoice_on_cancel",
"on_trash": "erpnext.regional.check_deletion_permission"
},
"Payment Entry": {
"on_submit": ["erpnext.regional.france.utils.create_transaction_log", "erpnext.accounts.doctype.payment_request.payment_request.make_status_as_paid"],
"on_trash": "erpnext.regional.check_deletion_permission"
},
'Address': {
'validate': ['erpnext.regional.india.utils.validate_gstin_for_india', 'erpnext.regional.italy.utils.set_state_code']
},
('Sales Invoice', 'Purchase Invoice', 'Delivery Note'): {
'validate': 'erpnext.regional.india.utils.set_place_of_supply'
},
"Contact":{
"on_trash": "erpnext.support.doctype.issue.issue.update_issue"
}
}
scheduler_events = {
"all": [
"erpnext.projects.doctype.project.project.project_status_update_reminder"
],
"hourly": [
'erpnext.hr.doctype.daily_work_summary_group.daily_work_summary_group.trigger_emails',
"erpnext.accounts.doctype.subscription.subscription.process_all",
"erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_mws_settings.schedule_get_order_details",
"erpnext.projects.doctype.project.project.hourly_reminder",
"erpnext.projects.doctype.project.project.collect_project_status"
],
"daily": [
"erpnext.stock.reorder_item.reorder_item",
"erpnext.setup.doctype.email_digest.email_digest.send",
"erpnext.support.doctype.issue.issue.auto_close_tickets",
"erpnext.crm.doctype.opportunity.opportunity.auto_close_opportunity",
"erpnext.controllers.accounts_controller.update_invoice_status",
"erpnext.accounts.doctype.fiscal_year.fiscal_year.auto_create_fiscal_year",
"erpnext.hr.doctype.employee.employee.send_birthday_reminders",
"erpnext.projects.doctype.task.task.set_tasks_as_overdue",
"erpnext.assets.doctype.asset.depreciation.post_depreciation_entries",
"erpnext.hr.doctype.daily_work_summary_group.daily_work_summary_group.send_summary",
"erpnext.stock.doctype.serial_no.serial_no.update_maintenance_status",
"erpnext.buying.doctype.supplier_scorecard.supplier_scorecard.refresh_scorecards",
"erpnext.setup.doctype.company.company.cache_companies_monthly_sales_history",
"erpnext.assets.doctype.asset.asset.update_maintenance_status",
"erpnext.assets.doctype.asset.asset.make_post_gl_entry",
"erpnext.crm.doctype.contract.contract.update_status_for_contracts",
"erpnext.projects.doctype.project.project.update_project_sales_billing",
"erpnext.projects.doctype.project.project.send_project_status_email_to_users"
],
"daily_long": [
"erpnext.manufacturing.doctype.bom_update_tool.bom_update_tool.update_latest_price_in_all_boms"
],
"monthly": [
"erpnext.accounts.deferred_revenue.convert_deferred_revenue_to_income",
"erpnext.accounts.deferred_revenue.convert_deferred_expense_to_expense",
"erpnext.hr.utils.allocate_earned_leaves"
]
}
email_brand_image = "assets/erpnext/images/erpnext-logo.jpg"
default_mail_footer = """
<span>
Sent via
<a class="text-muted" href="https://erpnext.com?source=via_email_footer" target="_blank">
ERPNext
</a>
</span>
"""
get_translated_dict = {
("doctype", "Global Defaults"): "frappe.geo.country_info.get_translated_dict"
}
bot_parsers = [
'erpnext.utilities.bot.FindItemBot',
]
get_site_info = 'erpnext.utilities.get_site_info'
payment_gateway_enabled = "erpnext.accounts.utils.create_payment_gateway_account"
regional_overrides = {
'France': {
'erpnext.tests.test_regional.test_method': 'erpnext.regional.france.utils.test_method'
},
'India': {
'erpnext.tests.test_regional.test_method': 'erpnext.regional.india.utils.test_method',
'erpnext.controllers.taxes_and_totals.get_itemised_tax_breakup_header': 'erpnext.regional.india.utils.get_itemised_tax_breakup_header',
'erpnext.controllers.taxes_and_totals.get_itemised_tax_breakup_data': 'erpnext.regional.india.utils.get_itemised_tax_breakup_data',
'erpnext.accounts.party.get_regional_address_details': 'erpnext.regional.india.utils.get_regional_address_details',
'erpnext.hr.utils.calculate_annual_eligible_hra_exemption': 'erpnext.regional.india.utils.calculate_annual_eligible_hra_exemption',
'erpnext.hr.utils.calculate_hra_exemption_for_period': 'erpnext.regional.india.utils.calculate_hra_exemption_for_period'
},
'United Arab Emirates': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.united_arab_emirates.utils.update_itemised_tax_data'
},
'Saudi Arabia': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.united_arab_emirates.utils.update_itemised_tax_data'
},
'Italy': {
'erpnext.controllers.taxes_and_totals.update_itemised_tax_data': 'erpnext.regional.italy.utils.update_itemised_tax_data',
'erpnext.controllers.accounts_controller.validate_regional': 'erpnext.regional.italy.utils.sales_invoice_validate',
}
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pattern', '0014_pattern_editnumber'),
]
operations = [
migrations.AddField(
model_name='pattern',
name='json',
field=models.TextField(null=True),
),
]
|
import pyarchey.pyarchey as py
def test_slack():
o = py.Output()
assert o.readDistro('./test/slack.test') == ('Slackware', 'Slackware 14.1')
def test_arch():
o = py.Output()
assert o.readDistro('./test/arch.test') == ('Arch Linux', 'Arch Linux')
def test_raspbian():
o = py.Output()
assert o.readDistro('./test/raspbian.test') == ('Raspbian', 'Raspbian 7 (wheezy)')
|
"""__Main__."""
import sys
import os
import logging
import argparse
import traceback
import shelve
from datetime import datetime
from CONSTANTS import CONSTANTS
from settings.settings import load_config, load_core, load_remote, load_email
from settings.settings import load_html, load_sms
from core import read_structure, readStructureFromFile, updateStructure
from core import clean_video_db, syncDirTree, transferLongVersions
from core import executeToDoFile, build_html_report, umount
from core import check_and_correct_videos_errors, clean_remote
from core import get_new_file_ids_from_structure, mount, check_mkv_videos
from notifications import send_sms_notification, send_mail_report, send_mail_log
def get_args():
"""Get args."""
parser = argparse.ArgumentParser(description='pyHomeVM')
parser.add_argument('-c', '--config_file_path',
action='store',
default='settings/dev_config.cfg',
help='path to config file that is to be used.')
parser.add_argument('-s', '--sms', help='Enables sms notifications',
action='store_true')
parser.add_argument('-l', '--log', help='Enables log sending by e-mail',
action='store_true')
parser.add_argument('-r', '--report',
help='Enables html report sending by e-mail',
action='store_true')
parser.add_argument('-rem', '--remote',
help='Enables transfer of long versions to remote storage',
action='store_true')
parser.add_argument('-b', '--backup',
help='Enables backup of first videos',
action='store_true')
parser.add_argument('-stats',
help='Gets you statistics about your videos',
action='store_true')
args = parser.parse_args()
return args
def load_logger():
"""Load logger."""
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(CONSTANTS['log_file_path'])
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def main(argv=None):
"""Run main."""
start_time = datetime.now()
args = get_args() # Get args
logger = load_logger() # Set logger
logger.info('PROGRAM STARTED')
pid = str(os.getpid())
pidfile = "/tmp/pyHomeVM.pid"
config = load_config(args.config_file_path) # load config file
if os.path.isfile(pidfile):
logger.info('Program already running')
html = load_html(config)
email = load_email(config)
send_mail_log(CONSTANTS['log_file_path'], email, html)
sys.exit()
file(pidfile, 'w').write(pid)
(ffmpeg, local) = load_core(config) # load core configs
remote = load_remote(config)
html = load_html(config)
sms = load_sms(config)
email = load_email(config)
if(args.log):
email = load_email(config)
if(args.report):
html = load_html(config)
if(args.remote):
remote = load_remote(config)
if(args.sms):
sms = load_sms(config)
video_db = shelve.open(CONSTANTS['video_db_path'], writeback=True)
try:
if not os.path.exists(CONSTANTS['structure_file_path']):
raise Exception("Directory structure definition file not found.")
past_structure = readStructureFromFile(CONSTANTS)
except Exception:
logger.info(traceback.format_exc())
logger.info('{} not found'.format(CONSTANTS['structure_file_path']))
past_structure = {} # Start as new
new_structure = read_structure(local)
video_ids = get_new_file_ids_from_structure(new_structure, video_db)
check_and_correct_videos_errors(video_ids, video_db, local, ffmpeg)
logger.info('Checked for errors and corrupted')
html_data = updateStructure(
past_structure,
read_structure(local),
local,
ffmpeg,
remote,
video_db)
sms_sent_file = os.path.join(CONSTANTS['script_root_dir'], 'sms_sent')
if(mount(remote)):
logger.info('Mount succesfull')
syncDirTree(local, remote)
transferLongVersions(local, remote, video_db)
if(os.path.isfile(CONSTANTS['todo_file_path'])):
executeToDoFile(CONSTANTS['todo_file_path'], local, CONSTANTS)
if(os.path.exists(sms_sent_file)):
os.remove(sms_sent_file)
logger.info('sms_sent file has been deleted')
clean_remote(remote)
umount(remote)
else:
logger.info('Mount unssuccesfull')
if(not os.path.exists(sms_sent_file) and args.sms):
send_sms_notification(sms)
logger.info('Sms sent')
with open(sms_sent_file, 'w') as sms_not:
msg = 'SMS has been sent {}'.format(CONSTANTS['TODAY'])
sms_not.write(msg)
logger.info(msg)
if(args.report and (
html_data['new'] != '' or
html_data['modified'] != '' or
html_data['deleted'] != '' or
html_data['moved'] != '')):
html_report = build_html_report(html_data, CONSTANTS, html)
send_mail_report(html_report, email)
logger.info('Mail report sent')
if(args.log):
send_mail_log(CONSTANTS['log_file_path'], email, html)
logger.info('log file sent')
clean_video_db(video_db)
check_mkv_videos(local, video_db)
logger.info('DB cleaned')
video_db.close()
logger.info('Script ran in {}'.format(datetime.now() - start_time))
os.unlink(pidfile)
if __name__ == "__main__":
sys.exit(main())
|
import gensim, logging
class SemanticVector:
model = ''
def __init__(self, structure):
self.structure = structure
def model_word2vec(self, min_count=15, window=15, size=100):
print 'preparing sentences list'
sentences = self.structure.prepare_list_of_words_in_sentences()
print 'start modeling'
self.model = gensim.models.Word2Vec(sentences, size=size, window=window, min_count=min_count, workers=4, sample=0.001, sg=0)
return self.model
def save_model(self, name):
self.model.save(name)
def load_model(self, name):
self.model = gensim.models.Word2Vec.load(name)
|
import logging
from scap.model.oval_5 import PE_SUBSYSTEM_ENUMERATION
from scap.model.oval_5.defs.EntityStateType import EntityStateType
logger = logging.getLogger(__name__)
class EntityStatePeSubsystemType(EntityStateType):
MODEL_MAP = {
}
def get_value_enum(self):
return PE_SUBSYSTEM_ENUMERATION
|
import numpy
import scipy
import random
from gnuradio import gr, gr_unittest
import blocks_swig as blocks
import digital_swig as digital
import channels_swig as channels
from ofdm_txrx import ofdm_tx, ofdm_rx
from utils import tagged_streams
LOG_DEBUG_INFO=False
class ofdm_tx_fg (gr.top_block):
def __init__(self, data, len_tag_key):
gr.top_block.__init__(self, "ofdm_tx")
tx_data, tags = tagged_streams.packets_to_vectors((data,), len_tag_key)
src = blocks.vector_source_b(data, False, 1, tags)
self.tx = ofdm_tx(packet_length_tag_key=len_tag_key, debug_log=LOG_DEBUG_INFO)
self.sink = blocks.vector_sink_c()
self.connect(src, self.tx, self.sink)
def get_tx_samples(self):
return self.sink.data()
class ofdm_rx_fg (gr.top_block):
def __init__(self, samples, len_tag_key, channel=None, prepend_zeros=100):
gr.top_block.__init__(self, "ofdm_rx")
if prepend_zeros:
samples = (0,) * prepend_zeros + tuple(samples)
src = blocks.vector_source_c(tuple(samples) + (0,) * 1000)
self.rx = ofdm_rx(frame_length_tag_key=len_tag_key, debug_log=LOG_DEBUG_INFO)
if channel is not None:
self.connect(src, channel, self.rx)
else:
self.connect(src, self.rx)
self.sink = blocks.vector_sink_b()
self.connect(self.rx, self.sink)
def get_rx_bytes(self):
return self.sink.data()
class test_ofdm_txrx (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_tx (self):
""" Just make sure the Tx works in general """
len_tag_key = 'frame_len'
n_bytes = 52
n_samples_expected = (numpy.ceil(1.0 * (n_bytes + 4) / 6) + 3) * 80
test_data = [random.randint(0, 255) for x in range(n_bytes)]
tx_data, tags = tagged_streams.packets_to_vectors((test_data,), len_tag_key)
src = blocks.vector_source_b(test_data, False, 1, tags)
tx = ofdm_tx(packet_length_tag_key=len_tag_key)
tx_fg = ofdm_tx_fg(test_data, len_tag_key)
tx_fg.run()
self.assertEqual(len(tx_fg.get_tx_samples()), n_samples_expected)
def test_002_rx_only_noise(self):
""" Run the RX with only noise, check it doesn't crash
or return a burst. """
len_tag_key = 'frame_len'
samples = (0,) * 1000
channel = channels.channel_model(0.1)
rx_fg = ofdm_rx_fg(samples, len_tag_key, channel)
rx_fg.run()
self.assertEqual(len(rx_fg.get_rx_bytes()), 0)
def test_003_tx1packet(self):
""" Transmit one packet, with slight AWGN and slight frequency + timing offset.
Check packet is received and no bit errors have occurred. """
len_tag_key = 'frame_len'
n_bytes = 21
fft_len = 64
test_data = tuple([random.randint(0, 255) for x in range(n_bytes)])
# 1.0/fft_len is one sub-carrier, a fine freq offset stays below that
freq_offset = 1.0 / fft_len * 0.7
#channel = channels.channel_model(0.01, freq_offset)
channel = None
# Tx
tx_fg = ofdm_tx_fg(test_data, len_tag_key)
tx_fg.run()
tx_samples = tx_fg.get_tx_samples()
# Rx
rx_fg = ofdm_rx_fg(tx_samples, len_tag_key, channel, prepend_zeros=100)
rx_fg.run()
rx_data = rx_fg.get_rx_bytes()
self.assertEqual(tuple(tx_fg.tx.sync_word1), tuple(rx_fg.rx.sync_word1))
self.assertEqual(tuple(tx_fg.tx.sync_word2), tuple(rx_fg.rx.sync_word2))
self.assertEqual(test_data, rx_data)
def test_004_tx1packet_large_fO(self):
""" Transmit one packet, with slight AWGN and large frequency offset.
Check packet is received and no bit errors have occurred. """
fft_len = 64
len_tag_key = 'frame_len'
n_bytes = 21
test_data = tuple([random.randint(0, 255) for x in range(n_bytes)])
#test_data = tuple([255 for x in range(n_bytes)])
# 1.0/fft_len is one sub-carrier
frequency_offset = 1.0 / fft_len * 2.5
channel = channels.channel_model(0.00001, frequency_offset)
# Tx
tx_fg = ofdm_tx_fg(test_data, len_tag_key)
tx_fg.run()
tx_samples = tx_fg.get_tx_samples()
# Rx
rx_fg = ofdm_rx_fg(tx_samples, len_tag_key, channel, prepend_zeros=100)
rx_fg.run()
rx_data = rx_fg.get_rx_bytes()
self.assertEqual(test_data, rx_data)
if __name__ == '__main__':
gr_unittest.run(test_ofdm_txrx, "test_ofdm_txrx.xml")
|
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String, UniqueConstraint
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('sqlite:///challenge.sqlite', echo=False)
Session = sessionmaker(bind=engine)
session = Session()
Base = declarative_base()
class Endereco(Base):
__tablename__ = "endereco"
#id = Column(Integer, primary_key=True)
logradouro = Column(String)
bairro = Column(String)
cidade = Column(String)
estado = Column(String)
cep = Column(String, primary_key=True)
__table_args__ = (UniqueConstraint('cep'),)
def __repr__(self):
return "{}".format(self.cep)
Base.metadata.create_all(engine)
|
"""
Contains code for nicely reporting errors to the user.
"""
import logging
import traceback
from PyQt4 import QtGui
from xVClient import ClientGlobals
mainlog = logging.getLogger("")
FatalError = 1
"""Fatal error, forces termination of application."""
NormalError = 2
"""Normal error, this has impact but does not crash the program."""
WarningError = 3
"""Warning, this does not affect function but should cause concern."""
NoticeError = 4
"""General information."""
def ShowError(message, severity=NormalError, parent=None):
"""
Displays an error message to the user and waits for a response.
"""
dlg = QtGui.QMessageBox(parent)
dlg.setText(message)
if severity == FatalError:
dlg.setIcon(QtGui.QMessageBox.Critical)
dlg.setWindowTitle("Fatal Error")
elif severity == NormalError:
dlg.setIcon(QtGui.QMessageBox.Critical)
dlg.setWindowTitle("Error")
elif severity == WarningError:
dlg.setIcon(QtGui.QMessageBox.Warning)
dlg.setWindowTitle("Warning")
elif severity == NoticeError:
dlg.setIcon(QtGui.QMessageBox.Information)
dlg.setWindowTitle("Notice")
else:
dlg.setIcon(QtGui.QMessageBox.NoIcon)
dlg.setWindowTitle("Message")
dlg.exec_()
def ShowException(severity=NormalError, start_msg='An error has occurred!', parent=None):
'''
Displays the currently-handled exception in an error box.
'''
msg = start_msg + "\n\n" + traceback.format_exc()
ShowError(msg, severity, parent)
class ErrorMessageHandler(logging.Handler):
'''
Logging handler that displays messages in Qt message boxes.
'''
def __init__(self, parent=None):
'''
Creates a new handler.
@type parent: QtGui.QWidget
@param parent: Parent widget for errors to be displayed under.
'''
super(ErrorMessageHandler,self).__init__()
self.Parent = parent
'''Parent widget for errors to be displayed under.'''
def _ShowError(self, message):
'''
Shows an error message and returns immediately.
@type message: string
@param message: Message to display.
'''
app = ClientGlobals.Application
wnd = QtGui.QMessageBox(parent=self.Parent)
wnd.setIcon(QtGui.QMessageBox.Critical)
wnd.setWindowTitle("Error")
wnd.setStandardButtons(QtGui.QMessageBox.Ok)
wnd.setText(message)
wnd.exec_()
def emit(self, record):
self._ShowError(record.getMessage())
def ConfigureLogging(parent=None):
'''
Configures the logging mechanism to report errors as dialog boxes.
@type parent: QtGui.QWidget
@param parent: Parent widget for errors to be displayed under.
'''
# Set up the error handler (output to a message box).
handler = ErrorMessageHandler(parent)
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
handler.setLevel(logging.ERROR)
mainlog.addHandler(handler)
# Send lower-level messages to stderr.
lowhandler = logging.StreamHandler()
lowhandler.setFormatter(formatter)
lowhandler.setLevel(logging.DEBUG)
mainlog.addHandler(lowhandler)
# Make sure that the logger catches all levels of messages.
mainlog.setLevel(logging.DEBUG)
|
import fs
import numpy as np
import h5py
import pm_setup
file = h5py.File('force_%s.h5' % fs.config_precision(), 'r')
ref_id = file['id'][:]
ref_force = file['f'][:]
file.close()
fs.msg.set_loglevel(0)
particles = pm_setup.force()
particle_id = particles.id
particle_force = particles.force
if fs.comm.this_node() == 0:
assert(np.all(particle_id == ref_id))
print('pm_force id OK')
force_rms = np.std(ref_force)
diff = particle_force - ref_force
diff_rms = np.std(diff)
print('pm_force rms error %e / %e' % (diff_rms, force_rms))
diff_max = np.max(np.abs(diff))
print('pm_force max error %e / %e' % (diff_max, force_rms))
eps = np.finfo(particle_force.dtype).eps
assert(diff_rms < 20*eps)
assert(diff_max < 1000*eps)
print('pm_force OK')
|
__author__ = 'Marko Čibej'
import argparse
from svgmapper import *
from helper import logger
def main(config, resources=None, maps=None, simulate=False):
logger.info('Starting job')
with SvgMapper() as mapper:
mapper.load_config(config, resources)
if maps:
mapper.replace_targets(maps)
if not simulate:
mapper.run()
logger.info('Finished')
def parse_args():
parser = argparse.ArgumentParser(description='Transform maps in SVG format in various ways.')
parser.add_argument('config_file', help='The name of the configuration file')
parser.add_argument('-r', '--resource', help='Additional resource file(s)',
action='append', metavar='resource_file')
parser.add_argument('-m', '--map', help='Map(s) to run instead of those listed in config file', metavar='map_name')
parser.add_argument('-v', '--verbosity', help='Set verbosity: 0=errors only, 1=warnings, 2=info, 3=debug',
type=int, choices=range(0, 3), dest='verbosity')
parser.add_argument('-l', '--log', help='Output to named log file', metavar=('level(0-3)', 'logFile'), nargs=2)
parser.add_argument('-s', '--simulate', help='Don\'t actually do anything, just parse all the configurations',
action='store_true')
return parser.parse_args()
def set_logging(the_log, verbosity):
log_levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
logger.setLevel(logging.DEBUG)
if the_log:
level = log_levels[int(the_log[0])]
lf = logging.FileHandler(the_log[1], mode='w')
lf.setLevel(level)
lf.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logger.addHandler(lf)
lc = logging.StreamHandler()
if verbosity:
lc.setLevel(log_levels[verbosity])
else:
lc.setLevel(log_levels[2])
logger.addHandler(lc)
|
from distutils.core import setup
import os
import sys
def main():
SHARE_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"share")
data_files = []
# don't trash the users system icons!!
black_list = ['index.theme', 'index.theme~']
for path, dirs, files in os.walk(SHARE_PATH):
data_files.append(tuple((path.replace(SHARE_PATH,"share", 1),
[os.path.join(path, file) for file in files if file not in
black_list])))
setup(name="caffeine",
version="2.4.1",
description="""A status bar application able to temporarily prevent
the activation of both the screensaver and the "sleep" powersaving
mode.""",
author="The Caffeine Developers",
author_email="bnsmith@gmail.com",
url="https://launchpad.net/caffeine",
packages=["caffeine"],
data_files=data_files,
scripts=[os.path.join("bin", "caffeine")]
)
if __name__ == "__main__":
main()
|
import numpy as np
import cv2
from scipy import interpolate
from random import randint
import IPython
from alan.rgbd.basic_imaging import cos,sin
from alan.synthetic.synthetic_util import rand_sign
from alan.core.points import Point
"""
generates rope using non-holonomic car model dynamics (moves with turn radius)
generates labels at ends of rope
parameters:
h, w of image matrix
l, w of rope
returns:
image matrix with rope drawn
[left label, right label]
"""
def get_rope_car(h = 420, w = 420, rope_l_pixels = 800 , rope_w_pixels = 8, pix_per_step = 10, steps_per_curve = 10, lo_turn_delta = 5, hi_turn_delta = 10):
#randomize start
init_pos = np.array([randint(0, w - 1), randint(0, h - 1), randint(0, 360)])
all_positions = np.array([init_pos])
#dependent parameter (use float division)
num_curves = int(rope_l_pixels/(steps_per_curve * pix_per_step * 1.0))
#point generation
for c in range(num_curves):
turn_delta = rand_sign() * randint(lo_turn_delta, hi_turn_delta)
for s in range(steps_per_curve):
curr_pos = all_positions[-1]
delta_pos = np.array([pix_per_step * cos(curr_pos[2]), pix_per_step * sin(curr_pos[2]), turn_delta])
all_positions = np.append(all_positions, [curr_pos + delta_pos], axis = 0)
#center the points (avoid leaving image bounds)
mid_x_points = (min(all_positions[:,0]) + max(all_positions[:,0]))/2.0
mid_y_points = (min(all_positions[:,1]) + max(all_positions[:,1]))/2.0
for pos in all_positions:
pos[0] -= (mid_x_points - w/2.0)
pos[1] -= (mid_y_points - h/2.0)
#draw rope
image = np.zeros((h, w))
prev_pos = all_positions[0]
for curr_pos in all_positions[1:]:
cv2.line(image, (int(prev_pos[0]), int(prev_pos[1])), (int(curr_pos[0]), int(curr_pos[1])), 255, rope_w_pixels)
prev_pos = curr_pos
#get endpoint labels, sorted by x
labels = [all_positions[0], all_positions[-1]]
if labels[0][0] > labels[1][0]:
labels = [labels[1], labels[0]]
#labels = [[l[0], l[1], l[2] + 90] for l in labels]
#Ignoring Rotation for Now
labels = [[l[0], l[1], 0] for l in labels]
#rejection sampling
for num_label in range(2):
c_label = labels[num_label]
#case 1- endpoints not in image
if check_bounds(c_label, [w, h]) == -1:
return image, labels, -1
#case 2- endpoint on top of other rope segment
if check_overlap(c_label, [w, h], image, rope_w_pixels) == -1:
return image, labels, -1
return image, labels, 1
def check_bounds(label, bounds):
bound_tolerance = 5
for dim in range(2):
if label[dim] < bound_tolerance or label[dim] > (bounds[dim] - 1 - bound_tolerance):
return -1
return 0
def check_overlap(label, bounds, image, rope_w_pixels):
lb = []
ub = []
for dim in range(2):
lb.append(int(max(0, label[dim] - rope_w_pixels)))
ub.append(int(min(bounds[dim] - 1, label[dim] + rope_w_pixels)))
pixel_sum = 0
for x in range(lb[0], ub[0]):
for y in range(lb[1], ub[1]):
pixel_sum += (image[y][x]/255.0)
#if more than 60% of adjacent (2 * rope_w x 2 * rope_w) pixels are white, endpoint is probably lying on rope
expected_sum = 0.6 * (ub[1] - lb[1]) * (ub[0] - lb[0])
if pixel_sum > expected_sum:
return -1
return 0
|
__author__ = 'nicolas'
from os.path import expanduser
from ordereddict import OrderedDict
from Bio import SwissProt
import time
import MySQLdb as mdb
"""
Fuck!
from ordereddict import OrderedDict
import MySQLdb as mdb
dicc = {}
dictdebug_empty = OrderedDict()
dictdebug = dictdebug_empty
dictdebug['hola'] = 'chau'
print(dictdebug.items())
print(dictdebug_empty.items())
dictdebug_empty.clear()
print(dictdebug_empty.items())
print(dictdebug.items())
"""
start_time = time.time()
database = "ptmdb"
tabla_cuentas = "sprot_count1"
tabla_ptms = "sprot_ptms1"
file_name = "uniprot_sprot.dat"
desde = 0
hasta = 542783 # Hay 542782 entradas de AC??
con = mdb.connect('localhost', 'nicolas', passwd="nicolaslfp", db=database)
cur = con.cursor()
cur.execute("SELECT VERSION()")
cur.execute("USE " + database)
print("USE ptmdb;")
uniprot_file = expanduser("~") + '/QB9_Files/' + file_name
output_file = expanduser("~") + '/QB9-git/QB9/resources/output.txt'
def count_amino_acids_ext(seq): # Defino una función que toma una secuencia y los cuenta
prot_dic2 = prot_dic
for aa in prot_dic2:
prot_dic2[aa] = seq.count(aa)
return prot_dic2 # y devuelve un dict ordenado con pares AA, #AA
abc = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
prot_dic = OrderedDict((k, 0) for k in abc)
ptmrecords = ["MOD_RES", "LIPID", "CARBOHYD", "DISULFID", "CROSSLNK"]
neqs = ["Probable", "Potential", "By similarity"] # Y "Experimental"
categories = OrderedDict()
categories['AC'] = "varchar(30) NOT NULL" # accesion number
categories['FT'] = "varchar(30) NOT NULL"
categories['STATUS'] = "varchar(30) NOT NULL"
categories['PTM'] = "varchar(100) NOT NULL"
categories['FROM_RES'] = "varchar(10) NOT NULL"
categories['TO_RES'] = "varchar(10) NOT NULL"
categories['FROM_AA'] = "varchar(10) NOT NULL" # vamo a implementar el target directamente!!!! =D
categories['TO_AA'] = "varchar(10) NOT NULL"
categories['SQ'] = "text(45000) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
categories['LENGTH'] = "varchar(200) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
categories['ORG'] = "text(500) NOT NULL" # organism
categories['OC'] = "varchar(30) NOT NULL" # organism classification, vamos solo con el dominio
categories['OX'] = "varchar(200) NOT NULL" # taxonomic ID
categories['HO'] = "text(500)" # host organism
categories['inumber'] = "varchar(200) NOT NULL"
empty_data = OrderedDict()
for gato in categories: # usando las keys de categories y un valor por defecto todo vacío no es nulo ¿cómo hago?
empty_data[gato] = 'NOFT'
empty_data['FROM_RES'] = '?'
empty_data['TO_RES'] = '?'
empty_data['FROM_AA'] = '?'
empty_data['TO_AA'] = '?'
data = empty_data.copy() # este es el diccionario de registros vacío que voy a usar
print("DROP TABLE " + tabla_cuentas + ";")
print("DROP TABLE " + tabla_ptms + ";")
prot_dic_def_items = []
prot_dic_def = OrderedDict((k, 'SMALLINT') for k in abc)
for cat, value in prot_dic_def.items(): # concatenaciones key y valor
prot_dic_def_items.append(cat + ' ' + value) # guardadaes en la lista
table_def = ', '.join(prot_dic_def_items) # definicion de la tabla
print("CREATE TABLE IF NOT EXISTS "
+ tabla_cuentas
+ " (AC VARCHAR(30) UNIQUE, OC_ID VARCHAR(30), LENGTH MEDIUMINT,"
+ table_def
+ ") ENGINE=InnoDB;")
print("commit;")
table_def_items = [] # lista para concatenaciones de key y valor
for cat, value in categories.items(): # concatenaciones key y valor
table_def_items.append(cat + ' ' + value) # guardadaes en la lista
table_def_2 = ', '.join(table_def_items) # definicion de la tabla
print("CREATE TABLE IF NOT EXISTS " + tabla_ptms + " (" + table_def_2 + ") ENGINE=InnoDB;")
print("commit;")
i = 0
j = 0
ptm = ''
out = []
listap = []
listaq = []
listar = []
olista = []
interes = []
with open(uniprot_file) as uniprot: # esto me abre y cierra el archivo al final
for record in SwissProt.parse(uniprot): # parseando los records de uniprot
i += 1
if i % 100 == 0:
print("commit;")
data = empty_data.copy() # en vez de vaciar el diccionario, le asigno el dafault sin enlazarlo al vacío
# Acá cargo los datos generales para las PTMs de una proteína/entrada de uniprot (instancias de entradas)
# tienen que cargarse en el orden de las columnas en la ptmdb y el del insert
# print(record.accessions[0])
data['AC'] = record.accessions[0] # solo el principal, el resto nose.
data['SQ'] = record.sequence
data['LENGTH'] = record.sequence_length # todo acá hay un problema? no entran las de mas de 999 residuos
data['ORG'] = record.organism # el bicho
data['OC'] = record.organism_classification[0] # el dominio del bicho
data['OX'] = record.taxonomy_id[0] # Id taxonomica del bicho
del olista[:]
if not record.host_organism:
data['HO'] = 'No host'
else:
for o in record.host_organism:
olista.append((o.split(";"))[0])
data['HO'] = ', '.join(olista) # y esto el host del virus ¿o parásito?
data['inumber'] = str(i) # solo para debuguear =) ver hasta donde llegó
# Generar y guardar el insert del #AA en la secuencia
del listaq[:]
contenido_aa = count_amino_acids_ext(record.sequence) # Guardo el dict con partes AA, #AA de la secuencia
for q in contenido_aa.itervalues():
listaq.append(str(q)) # y los pongo en una lista
sql_insert_values_q = ', '.join(listaq)
if i >= desde:
print("INSERT INTO " + tabla_cuentas + " VALUES ('"
+ record.accessions[0] + "', '"
+ record.organism_classification[0] + "', "
+ str(record.sequence_length)
+ ", " + sql_insert_values_q + ");")
# print("commit;")
# con.commit()
# Acá empiezo con los features, hay alguno interesante?
features = record.features # todo insertar los FTs en otra tabla junto con OC; OX, OR...?
del out[:]
del interes[:]
for a in range(0, len(features)): # guardar los campos "candidato" del FT en una lista llamada out
out.append(features[a][0])
interes = list(set(out).intersection(ptmrecords)) # armar un set con los interesantes y hacerlo lista interes
if interes: # si interes no está vacía, entonces hay algo para cargar
# todo evitar duplicados de secuencia, relacion via AC?
# ahora cargo cada PTM en data (subinstancias de entrada)
for feature in features: # iterar los features de la entrada
if feature[0] in interes: # si el titulo del FT interesa, proseguir ¡mejora un poco! =D
for tipo in interes: # iterear los tipos interesantes encontrados en el feature
if feature[0] in tipo: # si el feature evaluado interesante, cargar los datos en data[]
A = feature[1] # de el residuo tal (va a ser el mismo que el siguiente si está solo)
B = feature[2] # hacia el otro. OJO hay algunos desconocidos indicados con un "?"
C = feature[3] # este tiene la posta?
D = feature[4] # este aparece a veces? todo wtf?
# reiniciar FT, FROM y TO
data['FT'] = 'NOFT'
data['FROM_RES'] = '?'
data['TO_RES'] = '?'
data['FROM_AA'] = '?'
data['TO_AA'] = '?'
# Asignar FT
data['FT'] = feature[0]
data['FROM_RES'] = A
data['TO_RES'] = B
# reiniciar PTM y STATUS
ptm = ''
data['PTM'] = 'NOFT'
data['STATUS'] = "Experimental"
# Asignar STATUS y PTM
if C: # si C (el que tiene el nombre de la PTM y el STATUS) contiene algo
for neq in neqs: # iterar los STATUS posibles
if neq in C: # si C contiene el STATUS pirulo
data['STATUS'] = neq # asignar el valor a STATUS
C = C.replace('(' + neq + ")", '') # hay que sacar esta porquería
C = C.replace(neq, '')
# hay que sacar esta porquería si no aparece con paréntesis
break # esto corta con el loop más "cercano" en indentación
ptm = ((C.split(" /"))[0].split(';')[0]). \
rstrip(" ").rstrip(".").rstrip(" ")
# Obs: a veces las mods tienen identificadores estables que empiezan con "/"
# así que hay que sacarlo. y otas cosas después de un ";" CHAU.
# También hay CROSSLNKs con otras anotaciones, que los hace aparecer como únicas
# al contarlas, pero en realidad son casi iguales todo quizás ocurre con otras?
# Ver http://web.expasy.org/docs/userman.html#FT_line
# También le saco espacios y puntos al final.
# Odio esto del formato... todo no hay algo que lo haga mejor?
if tipo == 'DISULFID': # si el tipo es disulfuro, no hay mucho que decir.
ptm = "S-cysteinyl 3-(oxidosulfanyl)alanine (Cys-Cys)"
data['FROM_AA'] = 'C'
data['TO_AA'] = 'C'
else: # pero si no lo es, guardamos cosas normalmente.
# Asignar target residue
if A != '?':
data['FROM_AA'] = data['SQ'][int(data['FROM_RES'])-1]
else:
data['FROM_AA'] = '?'
if B != '?':
data['TO_AA'] = data['SQ'][int(data['TO_RES'])-1]
else:
data['TO_AA'] = '?'
if ptm.find("with") != -1: # si la ptm contiene la palabra "with" (caso crosslink)
ptm = ptm.split(" (with")[0].split(" (int")[0] # pero si la contiene, recortar
data['PTM'] = ptm
del listap[:]
for p in data.itervalues(): # itero los valores de los datos que fui cargando al dict.
listap.append(str(p).replace("'", "''")) # y los pongo en una lista
sql_insert_values_p = '\'' + \
'\', \''.join(listap) + \
'\''
# Que después uno como van en el INSERT
# El insert, en el que reemplazo ' por '', para escaparlas en sql
if i >= desde: # para hacerlo en partes
print(("INSERT INTO " + tabla_ptms + " VALUES (%r);"
% sql_insert_values_p).replace("-...", "").replace("\"", '').replace('.', ''))
# print("commit;")
# con.commit()
# unir los elementos de values con comas
else:
# Si, en cambio, la entrada no tiene FT insteresantes, solo cargo los datos generales y defaults
del listar[:]
for r in data.itervalues():
listar.append(str(r).replace("'", "''"))
sql_insert_values_r = '\'' + '\', \''.join(listar) + '\''
if i >= desde: # para hacerlo en partes
print(("INSERT INTO " + tabla_ptms + " VALUES (%r);"
% sql_insert_values_r).replace("\"", '').replace('.', ''))
# print("commit;")
# con.commit()
if i >= hasta: # segun uniprot el número de entradas de secuencias es 54247468
# print("\n")
# print(i)
break
|
from odoo import models, fields, api, _
from odoo.exceptions import ValidationError, UserError
class AccountInvoiceLine(models.Model):
_inherit = 'account.invoice.line'
start_date = fields.Date('Start Date')
end_date = fields.Date('End Date')
must_have_dates = fields.Boolean(
related='product_id.must_have_dates', readonly=True)
@api.multi
@api.constrains('start_date', 'end_date')
def _check_start_end_dates(self):
for invline in self:
if invline.start_date and not invline.end_date:
raise ValidationError(
_("Missing End Date for invoice line with "
"Description '%s'.")
% (invline.name))
if invline.end_date and not invline.start_date:
raise ValidationError(
_("Missing Start Date for invoice line with "
"Description '%s'.")
% (invline.name))
if invline.end_date and invline.start_date and \
invline.start_date > invline.end_date:
raise ValidationError(
_("Start Date should be before or be the same as "
"End Date for invoice line with Description '%s'.")
% (invline.name))
# Note : we can't check invline.product_id.must_have_dates
# have start_date and end_date here, because it would
# block automatic invoice generation/import. So we do the check
# upon validation of the invoice (see below the function
# action_move_create)
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
def inv_line_characteristic_hashcode(self, invoice_line):
"""Add start and end dates to hashcode used when the option "Group
Invoice Lines" is active on the Account Journal"""
code = super(AccountInvoice, self).inv_line_characteristic_hashcode(
invoice_line)
hashcode = '%s-%s-%s' % (
code,
invoice_line.get('start_date', 'False'),
invoice_line.get('end_date', 'False'),
)
return hashcode
@api.model
def line_get_convert(self, line, part):
"""Copy from invoice to move lines"""
res = super(AccountInvoice, self).line_get_convert(line, part)
res['start_date'] = line.get('start_date', False)
res['end_date'] = line.get('end_date', False)
return res
@api.model
def invoice_line_move_line_get(self):
"""Copy from invoice line to move lines"""
res = super(AccountInvoice, self).invoice_line_move_line_get()
ailo = self.env['account.invoice.line']
for move_line_dict in res:
iline = ailo.browse(move_line_dict['invl_id'])
move_line_dict['start_date'] = iline.start_date
move_line_dict['end_date'] = iline.end_date
return res
@api.multi
def action_move_create(self):
"""Check that products with must_have_dates=True have
Start and End Dates"""
for invoice in self:
for iline in invoice.invoice_line_ids:
if iline.product_id and iline.product_id.must_have_dates:
if not iline.start_date or not iline.end_date:
raise UserError(_(
"Missing Start Date and End Date for invoice "
"line with Product '%s' which has the "
"property 'Must Have Start and End Dates'.")
% (iline.product_id.name))
return super(AccountInvoice, self).action_move_create()
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('agentex', '0014_remove_decision_datacollect'),
]
operations = [
migrations.AddField(
model_name='datapoint',
name='sorttimestamp',
field=models.DateTimeField(null=True, blank=True),
),
]
|
import numpy as np
import struct
import wave
from winsound import PlaySound, SND_FILENAME, SND_ASYNC
import matplotlib.pyplot as plt
CHUNK = 1 << 8
def play(filename):
PlaySound(filename, SND_FILENAME | SND_ASYNC)
fn = r"D:\b.wav"
f = wave.open(fn)
print(f.getparams())
ch = f.getnchannels()
sw = f.getsampwidth()
n = f.getnframes()
data = bytearray()
while len(data) < n * ch * sw:
data.extend(f.readframes(CHUNK))
data = np.array(struct.unpack('{n}h'.format(n=n * ch), data))
w = np.fft.fft(data)
freqs = np.fft.fftfreq(len(w))
module = np.abs(w)
idmax = module.argmax()
print(abs(freqs[idmax]) * f.getframerate())
plt.specgram(data)
plt.show()
|
import tensorflow as tf
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import seq2seq
import numpy as np
class Model():
def __init__(self, args, infer=False):
self.args = args
if infer:
args.batch_size = 1
args.seq_length = 1
if args.model == 'rnn':
cell_fn = rnn_cell.BasicRNNCell
elif args.model == 'gru':
cell_fn = rnn_cell.GRUCell
elif args.model == 'lstm':
cell_fn = rnn_cell.BasicLSTMCell
else:
raise Exception("model type not supported: {}".format(args.model))
cell = cell_fn(args.rnn_size)
self.cell = cell = rnn_cell.MultiRNNCell([cell] * args.num_layers)
self.input_data = tf.placeholder(tf.float32, [args.batch_size, args.seq_length], name="input")
self.targets = tf.placeholder(tf.int32, [args.batch_size, args.seq_length], name="targets")
self.initial_state = cell.zero_state(args.batch_size, tf.float32)
inputs_data = tf.split(1, args.seq_length, self.input_data)
args.vocab_size = 1
with tf.variable_scope('rnnlm'):
softmax_w = tf.get_variable("softmax_w", [args.rnn_size, args.vocab_size])
softmax_b = tf.get_variable("softmax_b", [args.vocab_size])
# with tf.device("/cpu:0"):
# embedding = tf.get_variable("embedding", [args.vocab_size, args.rnn_size])
# inputs = tf.split(1, args.seq_length, tf.nn.embedding_lookup(embedding, self.input_data))
#inputs = tf.split(1, args.seq_length, self.input_data)
# inputs = [tf.squeeze(input_, [1]) for input_ in inputs]
#def loop(prev, _):
# prev = tf.matmul(prev, softmax_w) + softmax_b
# prev_symbol = tf.stop_gradient(tf.argmax(prev, 1))
# return tf.nn.embedding_lookup(embedding, prev_symbol)
#outputs, last_state = seq2seq.rnn_decoder(inputs, self.initial_state, cell, loop_function=loop if infer else None, scope='rnnlm')
outputs, last_state = seq2seq.rnn_decoder(inputs_data, self.initial_state, cell)
output = tf.reshape(tf.concat(1, outputs), [-1, args.rnn_size])
self.logits = tf.matmul(output, softmax_w) + softmax_b
self.probs = tf.nn.softmax(self.logits)
#loss = seq2seq.sequence_loss_by_example([self.logits],
# [tf.reshape(self.targets, [-1])],
# [tf.ones([args.batch_size * args.seq_length])],
# args.vocab_size)
self.reg_cost = tf.reduce_sum(1e-1 * (tf.nn.l2_loss(softmax_w)))
target = tf.cast(self.targets, tf.float32)
self.target_vector = tf.reshape(target, [-1])
loss = tf.pow(self.logits / self.target_vector, 2)
self.cost = tf.reduce_sum(loss) / args.batch_size / args.seq_length + self.reg_cost
self.final_state = last_state
self.lr = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars),
args.grad_clip)
optimizer = tf.train.AdamOptimizer(self.lr)
self.train_op = optimizer.apply_gradients(zip(grads, tvars))
def sample(self, sess, chars, vocab, num=200, prime='The ', sampling_type=1):
state = self.cell.zero_state(1, tf.float32).eval()
for char in prime[:-1]:
x = np.zeros((1, 1))
x[0, 0] = vocab[char]
feed = {self.input_data: x, self.initial_state:state}
[state] = sess.run([self.final_state], feed)
def weighted_pick(weights):
t = np.cumsum(weights)
s = np.sum(weights)
return(int(np.searchsorted(t, np.random.rand(1)*s)))
ret = prime
char = prime[-1]
for n in range(num):
x = np.zeros((1, 1))
x[0, 0] = vocab[char]
feed = {self.input_data: x, self.initial_state:state}
[probs, state] = sess.run([self.probs, self.final_state], feed)
p = probs[0]
if sampling_type == 0:
sample = np.argmax(p)
elif sampling_type == 2:
if char == ' ':
sample = weighted_pick(p)
else:
sample = np.argmax(p)
else: # sampling_type == 1 default:
sample = weighted_pick(p)
pred = chars[sample]
ret += pred
char = pred
return ret
|
import terrariumLogging
logger = terrariumLogging.logging.getLogger(__name__)
from pathlib import Path
import inspect
from importlib import import_module
import sys
import statistics
from hashlib import md5
from time import time, sleep
from operator import itemgetter
from func_timeout import func_timeout, FunctionTimedOut
import RPi.GPIO as GPIO
from retry import retry
from gpiozero import MCP3008
import smbus2
from bluepy.btle import Scanner
from terrariumUtils import terrariumUtils, terrariumCache, classproperty
class terrariumSensorException(TypeError):
'''There is a problem with loading a hardware sensor.'''
pass
class terrariumSensorUnknownHardwareException(terrariumSensorException):
pass
class terrariumSensorInvalidSensorTypeException(terrariumSensorException):
pass
class terrariumSensorLoadingException(terrariumSensorException):
pass
class terrariumSensorUpdateException(terrariumSensorException):
pass
class terrariumSensor(object):
HARDWARE = None
TYPES = []
NAME = None
_CACHE_TIMEOUT = 30
_UPDATE_TIME_OUT = 10
@classproperty
def available_hardware(__cls__):
__CACHE_KEY = 'known_sensors'
cache = terrariumCache()
known_sensors = cache.get_data(__CACHE_KEY)
if known_sensors is None:
known_sensors = {}
all_types = []
# Start dynamically loading sensors (based on: https://www.bnmetrics.com/blog/dynamic-import-in-python3)
for file in sorted(Path(__file__).parent.glob('*_sensor.py')):
imported_module = import_module( '.' + file.stem, package='{}'.format(__name__))
for i in dir(imported_module):
attribute = getattr(imported_module, i)
if inspect.isclass(attribute) and attribute != __cls__ and issubclass(attribute, __cls__):
setattr(sys.modules[__name__], file.stem, attribute)
if attribute.HARDWARE is not None:
known_sensors[attribute.HARDWARE] = attribute
all_types += attribute.TYPES
# Update sensors that do not have a known type. Those are remote and scripts sensors
all_types = list(set(all_types))
for hardware in known_sensors:
if len(known_sensors[hardware].TYPES) == 0:
known_sensors[hardware].TYPES = all_types
cache.set_data(__CACHE_KEY,known_sensors,-1)
return known_sensors
# Return a list with type and names of supported switches
@classproperty
def available_sensors(__cls__):
data = []
all_types = ['conductivity'] # For now 'conductivity' is only available through script or remote
for (hardware_type, sensor) in __cls__.available_hardware.items():
if sensor.NAME is not None:
data.append({'hardware' : hardware_type, 'name' : sensor.NAME, 'types' : sensor.TYPES})
all_types += sensor.TYPES
# Remote and script sensors can handle all the known types
all_types = list(set(all_types))
for sensor in data:
if len(sensor['types']) == 0:
sensor['types'] = all_types
return sorted(data, key=itemgetter('name'))
@classproperty
def sensor_types(__cls__):
sensor_types = []
for sensor in __cls__.available_sensors:
sensor_types += sensor['types']
return sorted(list(set(sensor_types)))
# Return polymorph sensor....
def __new__(cls, sensor_id, hardware_type, sensor_type, address, name = '', unit_value_callback = None, trigger_callback = None):
known_sensors = terrariumSensor.available_hardware
if hardware_type not in known_sensors:
raise terrariumSensorUnknownHardwareException(f'Trying to load an unknown hardware device {hardware_type} at address {address} with name {name}')
if sensor_type not in known_sensors[hardware_type].TYPES:
raise terrariumSensorInvalidSensorTypeException(f'Hardware does not have a {sensor_type} sensor at address {address} with name {name}')
return super(terrariumSensor, cls).__new__(known_sensors[hardware_type])
def __init__(self, id, _, sensor_type, address, name = '', unit_value_callback = None, trigger_callback = None):
self._device = {'id' : None,
'name' : None,
'address' : None,
'type' : sensor_type, # Readonly property
'device' : None,
'cache_key' : None,
'power_mngt' : None,
'erratic_errors' : 0,
'last_update' : 0,
'value' : None}
self._sensor_cache = terrariumCache()
self.__unit_value_callback = unit_value_callback
self.__trigger_callback = trigger_callback
# Set the properties
self.id = id
self.name = name
self.address = address
# Load hardware can update the address value that is used for making a unique ID when not set
self.load_hardware()
# REMINDER: We do not take a measurement at this point. That is up to the developer to explicit request an update.
def __power_management(self, on):
# Some kind of 'power management' with the last gpio pin number :) https://raspberrypi.stackexchange.com/questions/68123/preventing-corrosion-on-yl-69
if self._device['power_mngt'] is not None:
logger.debug(f'Sensor {self} has power management enabled')
if on:
logger.debug('Enable power to the sensor {self} now.')
GPIO.output(self._device['power_mngt'], GPIO.HIGH)
sleep(1)
else:
logger.debug('Close power to the sensor {self} now.')
GPIO.output(self._device['power_mngt'], GPIO.LOW)
@property
def __sensor_cache_key(self):
if self._device['cache_key'] is None:
self._device['cache_key'] = md5(f'{self.HARDWARE}{self.address}'.encode()).hexdigest()
return self._device['cache_key']
@property
def id(self):
if self._device['id'] is None:
self._device['id'] = md5(f'{self.HARDWARE}{self.address}{self.type}'.encode()).hexdigest()
return self._device['id']
@id.setter
def id(self, value):
if value is not None:
self._device['id'] = value.strip()
@property
def hardware(self):
return self.HARDWARE
@property
def name(self):
return self._device['name']
@name.setter
def name(self, value):
if '' != value.strip():
self._device['name'] = value.strip()
@property
def address(self):
return self._device['address']
@property
def _address(self):
address = [ part.strip() for part in self.address.split(',') if '' != part.strip()]
return address
@address.setter
def address(self, value):
value = terrariumUtils.clean_address(value)
if value is not None and '' != value:
self._device['address'] = value
# Readonly property
@property
def device(self):
return self._device['device']
# Readonly property
@property
def sensor_type(self):
return self._device['type']
# Readonly property
@property
def type(self):
return self._device['type']
@property
def value(self):
return self._device['value']
@property
def last_update(self):
return self._device['last_update']
@property
def erratic(self):
return self._device['erratic_errors']
@erratic.setter
def erratic(self, value):
self._device['erratic_errors'] = value
def get_hardware_state(self):
pass
@retry(terrariumSensorLoadingException, tries=3, delay=0.5, max_delay=2, logger=logger)
def load_hardware(self, reload = False):
# Get hardware cache key based on the combination of hardware and address
hardware_cache_key = md5(f'HW-{self.HARDWARE}-{self.address}'.encode()).hexdigest()
# Load hardware device from cache
hardware = self._sensor_cache.get_data(hardware_cache_key)
if reload or hardware is None:
# Could not find valid hardware cache. So create a new hardware device
try:
hardware = func_timeout(self._UPDATE_TIME_OUT, self._load_hardware)
if hardware is not None:
# Store the hardware in the cache for unlimited of time
self._sensor_cache.set_data(hardware_cache_key,hardware,-1)
else:
# Raise error that hard is not loaded with an unknown message :(
raise terrariumSensorLoadingException(f'Unable to load sensor {self}: Did not return a device.')
except FunctionTimedOut:
# What ever fails... does not matter, as the data is still None and will raise a terrariumSensorUpdateException and trigger the retry
raise terrariumSensorLoadingException(f'Unable to load sensor {self}: timed out ({self._UPDATE_TIME_OUT} seconds) during loading.')
except Exception as ex:
raise terrariumSensorLoadingException(f'Unable to load sensor {self}: {ex}')
self._device['device'] = hardware
# Check for power management features and enable it if set
if self._device['power_mngt'] is not None:
GPIO.setup(self._device['power_mngt'], GPIO.OUT)
# When we get Runtime errors retry up to 3 times
@retry(terrariumSensorUpdateException, tries=3, delay=0.5, max_delay=2, logger=logger)
def get_data(self):
data = None
self.__power_management(True)
try:
data = func_timeout(self._UPDATE_TIME_OUT, self._get_data)
except FunctionTimedOut:
# What ever fails... does not matter, as the data is still None and will raise a terrariumSensorUpdateException and trigger the retry
logger.error(f'Sensor {self} timed out after {self._UPDATE_TIME_OUT} seconds during updating...')
except Exception as ex:
logger.error(f'Sensor {self} has exception: {ex}')
self.__power_management(False)
if data is None:
raise terrariumSensorUpdateException(f'Invalid reading from sensor {self}')
return data
def update(self, force = False):
if self._device['device'] is None:
raise terrariumSensorLoadingException(f'Sensor {self} is not loaded! Can not update!')
starttime = time()
data = self._sensor_cache.get_data(self.__sensor_cache_key)
if (data is None or force) and self._sensor_cache.set_running(self.__sensor_cache_key):
logger.debug(f'Start getting new data from sensor {self}')
try:
data = self.get_data()
self._sensor_cache.set_data(self.__sensor_cache_key,data, self._CACHE_TIMEOUT)
except Exception as ex:
logger.error(f'Error updating sensor {self}. Check your hardware! {ex}')
self._sensor_cache.clear_running(self.__sensor_cache_key)
current = None if data is None or self.sensor_type not in data else data[self.sensor_type]
if current is None:
self._sensor_cache.clear_data(self.__sensor_cache_key)
else:
self._device['last_update'] = int(starttime)
self._device['value'] = current
return current
def stop(self):
if self._device['power_mngt'] is not None:
GPIO.cleanup(self._device['power_mngt'])
def __repr__(self):
return f'{self.NAME} {self.type} named \'{self.name}\' at address \'{self.address}\''
# Auto discovery of known and connected sensors
@staticmethod
def scan_sensors(unit_value_callback = None, trigger_callback = None, **kwargs):
for (hardware_type,sensor_device) in terrariumSensor.available_hardware.items():
try:
for sensor in sensor_device._scan_sensors(unit_value_callback, trigger_callback, **kwargs):
yield sensor
except AttributeError as ex:
# Scanning not supported, just ignore
pass
class terrariumAnalogSensor(terrariumSensor):
HARDWARE = None
TYPES = []
NAME = None
__AMOUNT_OF_MEASUREMENTS = 5
def _load_hardware(self):
address = self._address
# Load the analog converter here
device = MCP3008(channel=int(address[0]), device=0 if len(address) == 1 or int(address[1]) < 0 else int(address[1]))
return device
def _get_data(self):
# This will return the measured voltage of the analog device.
values = []
for counter in range(self.__AMOUNT_OF_MEASUREMENTS):
value = self.device.value
if terrariumUtils.is_float(value):
values.append(float(value))
sleep(0.2)
# sort values from low to high
values.sort()
# Calculate average. Exclude the min and max value.
return statistics.mean(values[1:-1])
class terrariumI2CSensor(terrariumSensor):
@property
def _address(self):
address = super()._address
if type(address[0]) is str:
if not address[0].startswith('0x'):
address[0] = '0x' + address[0]
address[0] = int(address[0],16)
return address
def _open_hardware(self):
address = self._address
return smbus2.SMBus(1 if len(address) == 1 or int(address[1]) < 1 else int(address[1]))
def _load_hardware(self):
address = self._address
device = (address[0], smbus2.SMBus(1 if len(address) == 1 or int(address[1]) < 1 else int(address[1])))
return device
# def __exit__(self):
# print('I2C close with block')
class terrariumI2CSensorMixin():
# control constants
SOFTRESET = 0xFE
SOFTRESET_TIMEOUT = 0.1
TEMPERATURE_TRIGGER_NO_HOLD = 0xF3
TEMPERATURE_WAIT_TIME = 0.1
HUMIDITY_TRIGGER_NO_HOLD = 0xF5
HUMIDITY_WAIT_TIME = 0.1
def __soft_reset(self, i2c_bus):
i2c_bus.write_byte(self.device[0], self.SOFTRESET)
sleep(self.SOFTRESET_TIMEOUT)
def __get_data(self,i2c_bus, trigger, timeout):
data1 = data2 = None
# Send request for data
i2c_bus.write_byte(self.device[0], trigger)
sleep(timeout)
data1 = i2c_bus.read_byte(self.device[0])
try:
data2 = i2c_bus.read_byte(self.device[0])
except Exception as ex:
data2 = data1
return (data1,data2)
def _get_data(self):
data = {}
with self._open_hardware() as i2c_bus:
# Datasheet recommend do Soft Reset before measurement:
self.__soft_reset(i2c_bus)
if 'temperature' in self.TYPES:
bytedata = self.__get_data(i2c_bus, self.TEMPERATURE_TRIGGER_NO_HOLD,self.TEMPERATURE_WAIT_TIME)
data['temperature'] = ((bytedata[0]*256.0+bytedata[1])*175.72/65536.0)-46.85
if 'humidity' in self.TYPES:
bytedata = self.__get_data(i2c_bus, self.HUMIDITY_TRIGGER_NO_HOLD,self.HUMIDITY_WAIT_TIME)
data['humidity'] = ((bytedata[0]*256.0+bytedata[1])*125.0/65536.0)-6.0
return data
"""
TCA9548A I2C switch driver, Texas instruments
8 bidirectional translating switches
I2C SMBus protocol
Manual: tca9548.pdf
Source: https://github.com/IRNAS/tca9548a-python/blob/master/tca9548a.py
Added option for different I2C bus
"""
class TCA9548A(object):
def __init__(self, address, bus = 1):
"""Init smbus channel and tca driver on specified address."""
try:
self.PORTS_COUNT = 8 # number of switches
self.i2c_bus = smbus2.SMBus(bus)
self.i2c_address = address
if self.get_control_register() is None:
raise ValueError
except ValueError:
logger.error("No device found on specified address!")
self.i2c_bus = None
except:
logger.error("Bus on channel {} is not available.".format(bus))
logger.info("Available busses are listed as /dev/i2c*")
self.i2c_bus = None
def get_control_register(self):
"""Read value (length: 1 byte) from control register."""
try:
value = self.i2c_bus.read_byte(self.i2c_address)
return value
except:
return None
def get_channel(self, ch_num):
"""Get channel state (specified with ch_num), return 0=disabled or 1=enabled."""
if ch_num < 0 or ch_num > self.PORTS_COUNT - 1:
return None
register = self.get_control_register()
if register is None:
return None
value = ((register >> ch_num) & 1)
return value
def set_control_register(self, value):
"""Write value (length: 1 byte) to control register."""
try:
if value < 0 or value > 255:
return False
self.i2c_bus.write_byte(self.i2c_address, value)
return True
except:
return False
def set_channel(self, ch_num, state):
"""Change state (0=disable, 1=enable) of a channel specified in ch_num."""
if ch_num < 0 or ch_num > self.PORTS_COUNT - 1:
return False
if state != 0 and state != 1:
return False
current_value = self.get_control_register()
if current_value is None:
return False
if state:
new_value = current_value | 1 << ch_num
else:
new_value = current_value & (255 - (1 << ch_num))
return_value = self.set_control_register(new_value)
return return_value
def __del__(self):
"""Driver destructor."""
self.i2c_bus = None
class terrariumBluetoothSensor(terrariumSensor):
__MIN_DB = -90
__SCAN_TIME = 3
@property
def _address(self):
address = super()._address
if len(address) == 1:
address.append(0)
elif len(address) == 2:
address[1] = int(address[1]) if terrariumUtils.is_float(address[1]) and terrariumUtils.is_float(address[1]) > 0 else 0
return address
@staticmethod
def _scan_sensors(sensorclass, ids = [], unit_value_callback = None, trigger_callback = None):
# Due to multiple bluetooth dongles, we are looping 10 times to see which devices can scan. Exit after first success
ok = True
for counter in range(10):
try:
devices = Scanner(counter).scan(terrariumBluetoothSensor.__SCAN_TIME)
for device in devices:
if device.rssi > terrariumBluetoothSensor.__MIN_DB and device.getValueText(9) is not None and device.getValueText(9).lower() in ids:
for sensor_type in sensorclass.TYPES:
logger.debug(sensor_type, sensorclass, device.addr)
yield terrariumSensor(None,
sensorclass.HARDWARE,
sensor_type,
device.addr + ('' if counter == 0 else f',{counter}'),
f'{sensorclass.NAME} measuring {sensor_type}',
unit_value_callback = unit_value_callback,
trigger_callback = trigger_callback)
# we found devices, so this device is ok! Stop trying more bluetooth devices
break
except Exception as ex:
ok = False
if not ok:
logger.warning('Bluetooth scanning is not enabled for normal users or there are zero Bluetooth LE devices available.... bluetooth is disabled!')
return []
|
import os
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
import my_config
path = my_config.ROOT_DIR # Please create your config file
file = my_config.FILE # Please create your config file
import wave
def time_series(file, i_ch = 0):
with wave.open(file,'r') as wav_file:
# Extract Raw Audio from Wav File
signal = wav_file.readframes(-1)
signal = np.fromstring(signal, 'Int16')
# Split the data into channels
channels = [[] for channel in range(wav_file.getnchannels())]
for index, datum in enumerate(signal):
channels[index%len(channels)].append(datum)
#Get time from indices
fs = wav_file.getframerate()
Time = np.linspace(0, len(signal)/len(channels)/fs, num=len(signal)/len(channels))
# return
return fs, Time, channels[i_ch]
fs, t, y = time_series(os.path.join(path, file), i_ch = 0)
plt.figure(1)
plt.plot(t, y)
plt.title('Time series (Fs = {})'.format(fs))
plt.xlabel('Time [s]')
plt.ylabel('Signal')
plt.grid()
from scipy.signal import detrend
y_detrend = detrend(y)
plt.figure(2)
plt.plot(t, y_detrend)
plt.title('Time series (Fs = {})'.format(fs))
plt.xlabel('Time [s]')
plt.ylabel('Signal-detrend')
plt.grid()
from scipy.signal import correlate, convolve
corr = correlate(y_detrend, y_detrend, mode = 'full')
n_data = np.minimum(len(t), len(corr))
plt.figure(3)
plt.plot(t[0:n_data], corr[0:n_data])
plt.title('Auto-Correlation (Fs = {})'.format(fs))
plt.xlabel('Time Lag [s]')
plt.ylabel('Auto-Correlation')
plt.grid()
from scipy.signal import butter, lfilter
cutoff = 500
N = 4 # filter oder
Wn = cutoff / (fs * 0.5)
b, a = butter(N, Wn , btype = 'low', analog = False)
y_filtered = lfilter(b, a, y_detrend) # low pass filter
plt.figure(4)
plt.plot(t, y_filtered)
plt.title('Time series (Fs = {}) (Cutoff Freq. = {})'.format(fs, cutoff))
plt.xlabel('Time [s]')
plt.ylabel('Signal - filtered')
plt.grid()
T = 1.0 / fs # time interval
n_sample = len(y_filtered)
freq = np.linspace(0.0, 1.0/(2.0*T), n_sample//2)
yf = sp.fft(y_filtered)
plt.figure(5)
plt.plot(freq, 2.0/n_sample * np.abs(yf[0:n_sample//2]))
plt.title('FFT')
plt.xlabel('Freq. [Hz]')
plt.ylabel('Fourier Coef.')
plt.grid()
from scipy.signal import welch
nperseg = fs // 4 # size of sagment to fft
noverlap = nperseg // 100 * 90 # segments overlaped rate 90%
f, Pxx = welch(y_filtered, fs = fs, nperseg= nperseg, noverlap = noverlap, window = sp.signal.hamming(nperseg))
plt.figure(6)
plt.plot(f, Pxx)
plt.title('PSD')
plt.xlabel('Freq. [Hz]')
plt.ylabel('Power')
plt.grid()
from scipy.signal import spectrogram
nperseg = fs // 4 # size of sagment to fft
noverlap = nperseg // 100 * 90 # segments overlaped at 90%
f, t, Sxx = spectrogram(y_filtered, fs = fs, nperseg= nperseg, noverlap = noverlap, window = sp.signal.hamming(nperseg))
plt.figure(7)
plt.pcolormesh(t, f, Sxx)
plt.title('Spectrogram')
plt.xlabel('Time [s]')
plt.ylabel('Freq. [Hz]')
plt.grid()
plt.show()
|
self.Step (Message = "Receptionist-N ->> Klient-N [genvej: fokus-modtagerliste] (måske)")
self.Step (Message = "Receptionist-N ->> Klient-N [retter modtagerlisten]")
|
import re
import scipy.interpolate
import numpy as np
vectors = []
class OmnetVector:
def __init__(self,file_input):
self.vectors = {}
self.dataTime = {}
self.dataValues = {}
self.maxtime = 0
self.attrs = {}
for line in file_input:
m = re.search("([0-9]+)\t([0-9]+)\t([0-9.e\-+]+)\t([0-9.e\-+na]+)",line)
#m = re.search("([0-9]+)",line)
if m:
vector = int(m.group(1))
if not vector in self.dataTime:
self.dataTime[vector] = []
self.dataValues[vector] = []
time = float(m.group(3))
self.dataTime[vector].append(time)
self.maxtime = max(self.maxtime,time)
self.dataValues[vector].append(float(m.group(4)))
else:
# vector 7 Net802154.host[0].ipApp[0] referenceChangeStat:vector ETV
m = re.search("vector *([0-9]*) *([^ ]*) *(.*):vector",line)
if m:
number = int(m.group(1))
module = m.group(2)
name = m.group(3)
if not name in self.vectors:
self.vectors[name] = {}
self.vectors[name][module] = number
else:
m = re.search("attr ([^ ]*) ([^ ]*)\n",line)
if m:
self.attrs[m.group(1)] = m.group(2)
def get_vector(self,name,module,resample=None):
num = self.vectors[name][module]
(time,values) = (self.dataTime[num],self.dataValues[num])
if resample != None:
newpoints = np.arange(0,self.maxtime,resample)
lastvalue = values[-1]
return (newpoints, scipy.interpolate.interp1d(time,values,'zero',assume_sorted=True,
bounds_error=False,fill_value=(0,lastvalue)).__call__(newpoints))
else:
return (time,values)
def get_attr(self,name):
return self.attrs[name]
|
"""598. Split Divisibilities
https://projecteuler.net/problem=598
Consider the number 48.
There are five pairs of integers $a$ and $b$ ($a \leq b$) such that $a \times
b=48$: (1,48), (2,24), (3,16), (4,12) and (6,8).
It can be seen that both 6 and 8 have 4 divisors.
So of those five pairs one consists of two integers with the same number of
divisors.
In general:
Let $C(n)$ be the number of pairs of positive integers $a \times b=n$, ($a
\leq b$) such that $a$ and $b$ have the same number of divisors;
so $C(48)=1$.
You are given $C(10!)=3$: (1680, 2160), (1800, 2016) and (1890,1920).
Find $C(100!)$
"""
|
import pyes
import os
from models import *
from sqlalchemy import select
from downloader import download
import utils
import re
import time
class Search(object):
def __init__(self,host,index,map_name,mapping=None,id_key=None):
self.es = pyes.ES(host)
self.index = index
self.map_name = map_name
self.mapping = mapping
self.id_key = id_key
def create_index(self):
self.es.create_index_if_missing(self.index)
if self.mapping:
if self.id_key:
self.es.put_mapping(self.map_name,{
self.map_name:{
'_id':{
'path':self.id_key
},
'properties':self.mapping}
},[self.index])
else:
self.es.put_mapping(self.map_name,{
self.map_name:{
'properties':self.mapping
}
},[self.index])
self.es.refresh(self.index)
def index_item(self,item):
self.es.index(item,self.index,self.map_name)
self.es.refresh(self.index)
def convert_to_document(revision):
temp = {}
rev_key = [ i for i in dir(revision) if not re.match('^_',i) ]
bill_key = [ i for i in dir(revision.bill) if not re.match('^_',i) ]
for key in rev_key:
if key != 'metadata' and key != 'bill':
temp[key] = getattr(revision,key)
for key in bill_key:
if key != 'metadata' and key!='id' and key!='bill_revs':
temp[key] = getattr(revision.bill,key)
full_path = download(temp['url'])
if full_path:
temp['document'] = pyes.file_to_attachment(full_path)
return temp
def initial_index():
host = '127.0.0.1:9200'
index = 'bill-index'
map_name = 'bill-type'
mapping = {
'document':{
'type':'attachment',
'fields':{
"title" : { "store" : "yes" },
"file" : {
"term_vector":"with_positions_offsets",
"store":"yes"
}
}
},
'name':{
'type':'string',
'store':'yes',
'boost':1.0,
'index':'analyzed'
},
'long_name':{
'type':'string',
'store':'yes',
'boost':1.0,
'index':'analyzed'
},
'status':{
'type':'string',
'store':'yes',
},
'year':{
'type':'integer',
'store':'yes'
},
'read_by':{
'type':'string',
'store':'yes',
'index':'analyzed'
},
'date_presented':{
'type':'date',
'store':'yes'
},
'bill_id':{
'type':'integer',
'store':'yes'
},
'id':{
'type':'integer',
'store':'yes'
}
}
search = Search(host,index,map_name,mapping)
search.create_index()
initdb()
session = DBSession()
revision = (session.query(BillRevision)
.join((BillRevision.bill,Bill)).all()
)
for rev in revision:
temp = convert_to_document(rev)
search.index_item(temp)
time.sleep(5)
def index_single(rev_id):
host = '127.0.0.1:9200'
index = 'bill-index'
map_name = 'bill-type'
initdb()
session = DBSession()
revision = (session.query(BillRevision).get(rev_id)
)
temp = convert_to_document(revision)
search = Search(host,index,map_name)
search.index_item(temp)
if __name__ == '__main__':
initial_index()
|
import requests
from bs4 import BeautifulSoup
def trade_spider(max_pages):
page = 1
while page <= max_pages:
url = "https://thenewboston.com/videos.php?cat=98&video=20144" #+ str(page)
source_code = request.get(url)
plain_text = source_code.text
soup = BeautifulSoup(plain_text)
for link in soup.findAll("a", {"class": "itemname"}):
href = link.get("href")
print("href")
trade_spider(1)
def get_single_item_data(item_url):
source_code = request.get(item_url)
plain_text = source_code.text
soup = BeautifulSoup(plain_text)
for item_name in soup.findAll("a", {"class": "i-name"}):
print(item_name.string)
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangorest.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
for i in range(0):
i += 1
for j in range(0, 1, 3):
j += 1
for k in range(9, 1, -9):
k += 1
for n in range(0, 1.1): # Error on this line
n += 1
for m in range(4, 5):
m += 1
|
import datetime
from sqlalchemy import MetaData, Table, Column, String, Integer
from Interface import AbstractDataObject
from utils.String import attributes_repr
def define_event_table(meta: MetaData):
return Table(
'events', meta,
Column('book_id', String, primary_key=True),
Column('reader_id', String, primary_key=True),
Column('event_date', String, primary_key=True),
Column('event_type', String, primary_key=True),
Column('times', Integer),
)
class Event(AbstractDataObject):
__attributes__ = ('book_id', 'reader_id', 'event_date', 'event_type')
__repr__ = attributes_repr
def __init__(self, book_id: str, reader_id: str, event_date: str, event_type: str):
self.book_id = book_id
self.reader_id = reader_id
self.event_date = event_date
self.event_type = event_type
self.times = 1
def __eq__(self, other):
if type(self) != type(other):
return False
else:
if self.event_date == other.event_date:
if self.book_id == other.book_id and self.reader_id == other.reader_id and \
self.event_type == other.event_type:
return True
else:
return False
else:
return False
@property
def date(self):
return datetime.datetime.strptime(self.event_date, '%Y%m%d').date()
def update_from(self, value):
if type(value) == type(self):
if self == value:
self.times += 1
else:
pass
else:
raise NotImplementedError
return self
def compare_by(self, **kwargs):
for tag in kwargs:
if tag not in self.__attributes__:
raise AttributeError('Event has no attribute {}'.format(tag))
else:
if kwargs[tag] != getattr(self, tag):
return False
return True
@classmethod
def init_from(cls, value):
if isinstance(value, dict):
return cls(
book_id=value['sysID'],
reader_id=value['userID'],
event_date=value['event_date'],
event_type=value['event_type'],
)
else:
raise NotImplementedError
@property
def hashable_key(self):
return '|'.join([self.book_id, self.reader_id, self.event_date, self.event_type])
|
import logging
import os
import urllib
from cvmfsreplica.cvmfsreplicaex import PluginConfigurationFailure
from cvmfsreplica.interfaces import RepositoryPluginAcceptanceInterface
import cvmfsreplica.pluginsmanagement as pm
class Updatedserver(RepositoryPluginAcceptanceInterface):
def __init__(self, repository, conf):
self.log = logging.getLogger('cvmfsreplica.updatedserver')
self.repository = repository
self.conf = conf
try:
self.url = self.repository.cvmfsconf.get('CVMFS_STRATUM0')
self.reportplugins = pm.readplugins(self.repository,
'repository',
'report',
self.conf.namespace('acceptance.updatedserver.',
exclude=True)
)
except:
raise PluginConfigurationFailure('failed to initialize Updatedserver plugin')
self.log.debug('plugin Updatedserver initialized properly')
#def verify(self):
# '''
# checks if file .cvmfspublished
# was updated more recently than variable
# repository.last_published
# '''
# try:
# # FIXME
# # maybe we should try a couple of times in case of failures before failing definitely
# for line in urllib.urlopen('%s/.cvmfspublished' %self.url).readlines():
# if line.startswith('T'):
# time = int(line[1:-1])
# break
# out = time > self.repository.last_published
# if out == False:
# self._notify_failure('No new content at the server for repository %s' \
# %self.repository.repositoryname)
# return out
# except:
# self.log.warning('file %s/.cvmfspublished cannot be read. Returning False' %self.url)
# return False
def verify(self):
'''
checks if the revision number in local copy of .cvmfspublished
is different that the revision number of remote .cvmfspublished
'''
try:
# FIXME
# maybe we should try a couple of times in case of failures before failing definitely
for line in urllib.urlopen('%s/.cvmfspublished' %self.url).readlines():
if line.startswith('S'):
serverrevision = int(line[1:-1])
break
# read the local revision number
cvmfs_upstream_storage = self.repository._get_cvmfs_upstream_storage() # FIXME, this should not be here
localfile = '%s/.cvmfspublished' %cvmfs_upstream_storage
if not os.path.isfile(localfile):
self.log.warning('local file %s does not exist. Returning True' %localfile)
return True
else:
# FIXME: too much duplicated code
for line in open(localfile):
if line.startswith('S'):
localrevision = int(line[1:-1])
break
out = (serverrevision != localrevision)
if out == False:
self._notify_failure('No new content at the server for repository %s' \
%self.repository.repositoryname)
return out
except:
self.log.warning('file %s/.cvmfspublished cannot be read. Returning False' %self.url)
return False
def _notify_failure(self, msg):
for report in self.reportplugins:
report.notifyfailure(msg)
|
from django.db import models
class Licensor(models.Model):
name = models.CharField(max_length=255, unique=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ['name']
|
import socket
import sys
import time
server_add = './bob_system_socket'
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
message = sys.argv[1]+" "+sys.argv[2]
if sys.argv[1] == 'set':
message+= " "+sys.argv[3]
else:
message+= " null"
try:
sock.connect(server_add)
except socket.error, msg:
print >>sys.stderr, msg
sys.exit(1)
sock.send(message)
data = sock.recv(1024)
if data: print 'reply from server:', data
time.sleep(1)
sock.close()
|
import inspect
from django.utils.translation import activate
class MenuItemMixin:
"""
This mixins injects attributes that start with the 'menu_' prefix into
the context generated by the view it is applied to.
This behavior can be used to highlight an item of a navigation component.
"""
def get_context_data(self, **kwargs):
context = super(MenuItemMixin, self).get_context_data(**kwargs)
vattrs = inspect.getmembers(self, lambda a: not (inspect.isroutine(a)))
menu_kwargs = dict(a for a in vattrs if a[0].startswith("menu_"))
context.update(menu_kwargs)
return context
class ActivateLegacyLanguageViewMixin:
""" """
def activate_legacy_language(self, *args, **kwargs):
if "lang" in kwargs and kwargs["lang"] == "en" or self.request.GET.get("lang") == "en":
activate("en")
else:
activate("fr")
|
import xauth
import xtemplate
import xutils
import os
import re
import sys
import platform
import xconfig
from xutils import dateutil
from xutils import fsutil
from xutils import Storage
from xutils import mem_util
try:
import sqlite3
except ImportError:
sqlite3 = None
def get_xnote_version():
return xconfig.get_global_config("system.version")
def get_mem_info():
mem_used = 0
mem_total = 0
result = mem_util.get_mem_info()
mem_used = result.mem_used
sys_mem_used = result.sys_mem_used
sys_mem_total = result.sys_mem_total
return "%s/%s/%s" % (mem_used, sys_mem_used, sys_mem_total)
def get_python_version():
return sys.version
def get_startup_time():
return dateutil.format_time(xconfig.START_TIME)
def get_free_data_space():
try:
size = fsutil.get_free_space(xconfig.get_system_dir("data"))
return xutils.format_size(size)
except:
xutils.print_exc()
return "<未知>"
class SystemInfoItem:
def __init__(self, name = "", value = ""):
self.name = name
self.value = value
class InfoHandler:
@xauth.login_required("admin")
def GET(self):
items = [
SystemInfoItem("Python版本", value = get_python_version()),
SystemInfoItem("Xnote版本", value = get_xnote_version()),
SystemInfoItem("内存信息", value = get_mem_info()),
SystemInfoItem("磁盘可用容量", get_free_data_space()),
SystemInfoItem("sqlite版本", sqlite3.sqlite_version if sqlite3 != None else ''),
SystemInfoItem("CPU型号", platform.processor()),
SystemInfoItem("操作系统", platform.system()),
SystemInfoItem("操作系统版本", platform.version()),
SystemInfoItem("系统启动时间", get_startup_time()),
]
return xtemplate.render("system/page/system_info.html", items = items,
runtime_id = xconfig.RUNTIME_ID)
xurls = (
r"/system/info", InfoHandler
)
|
from bottle import run, get, post, view, request, redirect, route, static_file, template
import bottle
import json
import threading
import requests
import time
import sys
messages = set([])
@bottle.route('/static/<path:path>')
def server_static(path):
return static_file(path, root='static')
@get('/chat')
@view('chat')
def chat():
name = request.query.name
return dict(msg=list(messages), name=name)
@route('/')
def index():
redirect('chat')
@post('/send')
def sendmsg():
name = request.forms.getunicode('name')
msg = request.forms.getunicode('msg')
global messages
if name != None and msg != None:
messages.add((name, msg))
redirect('chat?name=' + name)
else:
redirect('chat')
run(host='localhost', port=int(sys.argv[1]))
|
'''
This is a simple example of how to use the dbm.gnu module of the
standard python library
NOTES:
- the attempt to insert None as value throws an exception.
so only strings and bytes are allowed.
'''
import dbm.gnu # for open
d = dbm.gnu.open('/tmp/foo.gdbm', 'c')
d['one'] = 'ehad'
d['two'] = 'shtaim'
d['three'] = None
d.close()
|
import gc
import os
import argparse
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
from util import generate_features
def get_arguments():
parser = argparse.ArgumentParser(description='Generate features using a previously trained model')
parser.add_argument('data', type=str, help='File containing the input smiles matrices')
parser.add_argument('model', type=str, help='The model file')
parser.add_argument('features', type=str, help='Output file that will contain the generated features')
parser.add_argument('--batch_size', type=int, default=100, help='Size of the batches (default: 100)')
return parser.parse_args()
args = get_arguments()
generate_features.generate_features(args.data, args.model, args.features, args.batch_size)
gc.collect()
|
"""
Diabicus: A calculator that plays music, lights up, and displays facts.
Copyright (C) 2016 Michael Lipschultz
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import math
import os
import re
from functools import reduce
from .compute import ComputationError
def is_int(val):
""" Returns True if val is an int or a float with 0 fractional part """
return isinstance(val, int) or (isinstance(val, float) and val % 1 == 0)
def is_rational(val):
"""
Returns True if val is an int or float and not irrational.
Determining irrationality is done through the is_irrational method.
"""
return isinstance(val, (int, float)) and not is_irrational(val)
def is_irrational(val):
"""
Returns True if val is irrational.
Irrationality is determined by whether val is transcendental (as
determined by is_transcendental) or sqrt(2) or golden ratio.
"""
return is_transcendental(val) or val in {2**.5, GOLDEN_RATIO}
def is_transcendental(val):
""" Returns True if val is transcendental (i.e. pi or e). """
return val in (math.pi, math.e)
def is_real(val):
""" Returns True if val is int or float. """
return isinstance(val, (int, float))
def is_complex(val):
""" Returns True if val is complex. """
return isinstance(val, complex)
def is_surreal(val):
""" Returns True if val is surreal (currently always returns False). """
return False
def is_number(val):
""" Returns True if val is int, float, or complex. """
return isinstance(val, (int, float, complex))
def is_error(val):
""" Returns True if val is a ComputationError. """
return isinstance(val, ComputationError)
GOLDEN_RATIO = (1 + 5**0.5) / 2
GRAHAMS_NUMBER = False
I = complex(0, 1)
PI_DIGITS = (3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5, 8, 9, 7, 9, 3, 2, 3, 8, 4, 6, 2,
6, 4, 3, 3, 8, 3, 2, 7, 9, 5, 0, 2, 8, 8, 4, 1, 9, 7, 1, 6, 9, 3,
9, 9, 3, 7, 5, 1, 0, 5, 8, 2, 0, 9, 7, 4, 9, 4, 4, 5, 9, 2, 3, 0,
7, 8, 1, 6, 4, 0, 6, 2, 8, 6, 2, 0, 8, 9, 9, 8, 6, 2, 8, 0, 3, 4,
8, 2, 5, 3, 4, 2, 1, 1, 7, 0, 6, 7, 9, 8, 2, 1, 4
)
PRIME_NUMBERS = []
def __load_primes():
"""
Loads a comma-delimited list of prime numbers into PRIME_NUMBERS.
Prime numbers are loaded from the file prime_numbers.csv in the same
location as this python file and stores them into the global
variable PRIME_NUMBERS.
"""
global PRIME_NUMBERS
path = os.path.dirname(__file__)
with open(os.path.join(path, 'prime_numbers.csv')) as fin:
PRIME_NUMBERS = [int(v) for v in fin.read().split(',')]
__load_primes()
def is_prime(number):
""" Returns True if number is a prime number. """
return is_int(number) and number > 1 and int(number) in PRIME_NUMBERS
FACTORS_ALL = 'all'
FACTORS_PROPER = 'proper'
FACTORS_PRIME = 'prime'
def factors(num, form=FACTORS_PROPER):
"""
Return a list of factors for the provided number.
If form is FACTORS_PRIME, then the list will only contain the prime
factors of num. The product of the values in the list will always
return num. That is, if the number is a product of more than one of
the same prime (e.g. 12 = 2*2*3), then the list will contain those
duplicates (e.g. [2, 2, 3] in the example).
If form is FACTORS_ALL, then the list will contain all positive
integers that exactly divide num. For example, with num=12, the
list returned is [1, 2, 3, 4, 12].
If form is FACTORS_PROPER (default), then the list will be the same
as FACTORS_ALL, except the list will not include num. So, for
num=12, the list returned would be [1, 2, 3, 4].
If num is not an integer (as determined by is_int) greater than 1,
return empty list.
"""
if not is_int(num) or num < 2:
return []
if form == FACTORS_PRIME:
primes = []
i = 2
while num % i == 0:
primes.append(i)
num /= i
i = 3
while num > 1:
while num % i == 0:
primes.append(i)
num /= i
i += 2
return primes
else:
all_factors = reduce(list.__add__,
([i, num//i] for i in range(1, int(num**0.5) + 1) if num % i == 0)
)
if form == FACTORS_PROPER:
all_factors.remove(num)
return all_factors
FIBONACCI_NUMBERS = [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233,
377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711,
28657, 46368, 75025, 121393, 196418, 317811, 514229,
832040, 1346269
]
LUCAS_NUMBERS = (2, 1, 3, 4, 7, 11, 18, 29, 47, 76, 123, 199, 322, 521, 843,
1364, 2207, 3571, 5778, 9349, 15127, 24476, 39603, 64079,
103682, 167761, 271443, 439204, 710647, 1149851, 1860498,
3010349, 4870847, 7881196, 12752043, 20633239, 33385282
)
def is_subsequence_of(needle, haystack):
"""
Returns True if needle occurs as a consecutive subsequence in haystack.
Both needle and haystack must be ordered containers. The values in
needle must appear in haystack in the order they appear in needle
and must be consecutive in haystack.
For example, with needle=[1,2,3] and haystack=[1,1,2,3,4], the
function returns True since needle starts at index 1 in haystack.
With needle=[1,2,4] and haystack=[1,1,2,3,4], the function returns
False since, although the values do appear in haystack in the
correct order, they are not consecutive.
An empty needle will always return False.
"""
if len(needle) == 0:
return False
for offset in (i for i, x in enumerate(haystack) if x == needle[0]):
if offset + len(needle) > len(haystack):
return False
matches = [needle[i] == haystack[offset+i] for i in range(1, len(needle))]
if len(matches) == len(needle)-1 and all(matches):
return True
return False
def is_close(num1, num2, threshold=1e-5, method='raw'):
"""
Returns True if num1 is within threshold of num2.
If method is 'raw', then the closeness is determined by the absolute
value of the difference between num1 and num2.
If method is 'pct', then the absolute value of percent difference is
calculated and used.
num1 and num2 can be iterable. If one is iterable, then as long as
one value in the iterable object is close to the other number, the
function returns True. If both are iterable, then as long as one
value in num1 is close to one value in num2, the function returns
True.
"""
if isinstance(num1, ComputationError) or isinstance(num2, ComputationError):
return False
elif hasattr(num1, '__iter__'):
return any(is_close(n, num2, threshold) for n in num1)
elif hasattr(num2, '__iter__'):
return any(is_close(num1, n, threshold) for n in num2)
elif ((isinstance(num1, complex) or isinstance(num2, complex))
and not isinstance(num1, type(num2))):
return False
else:
if method == 'pct':
if num1 == num2 and num1 == 0:
return True
else:
return abs(num1-num2) / max([abs(v) for v in (num1, num2) if v != 0]) < threshold
else:
return abs(num1-num2) < threshold
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_bd
short_description: Manage Bridge Domains (BD) on Cisco ACI Fabrics (fv:BD)
description:
- Manages Bridge Domains (BD) on Cisco ACI Fabrics.
- More information from the internal APIC class
I(fv:BD) at U(https://developer.cisco.com/media/mim-ref/MO-fvBD.html).
author:
- Swetha Chunduri (@schunduri)
- Dag Wieers (@dagwieers)
- Jacob McGill (@jmcgill298)
requirements:
- ACI Fabric 1.0(3f)+
version_added: '2.4'
notes:
- The C(tenant) used must exist before using this module in your playbook.
The M(aci_tenant) module can be used for this.
options:
arp_flooding:
description:
- Determines if the Bridge Domain should flood ARP traffic.
- The APIC defaults new Bridge Domains to C(no).
choices: [ no, yes ]
default: no
bd:
description:
- The name of the Bridge Domain.
aliases: [ bd_name, name ]
bd_type:
description:
- The type of traffic on the Bridge Domain.
- The APIC defaults new Bridge Domains to C(ethernet).
choices: [ ethernet, fc ]
default: ethernet
description:
description:
- Description for the Bridge Domain.
enable_multicast:
description:
- Determines if PIM is enabled
- The APIC defaults new Bridge Domains to C(no).
choices: [ no, yes ]
default: no
enable_routing:
description:
- Determines if IP forwarding should be allowed.
- The APIC defaults new Bridge Domains to C(yes).
choices: [ no, yes ]
default: yes
endpoint_clear:
description:
- Clears all End Points in all Leaves when C(yes).
- The APIC defaults new Bridge Domains to C(no).
- The value is not reset to disabled once End Points have been cleared; that requires a second task.
choices: [ no, yes ]
default: no
endpoint_move_detect:
description:
- Determines if GARP should be enabled to detect when End Points move.
- The APIC defaults new Bridge Domains to C(garp).
choices: [ default, garp ]
default: garp
endpoint_retention_action:
description:
- Determines if the Bridge Domain should inherit or resolve the End Point Retention Policy.
- The APIC defaults new Bridge Domain to End Point Retention Policies to C(resolve).
choices: [ inherit, resolve ]
default: resolve
endpoint_retention_policy:
description:
- The name of the End Point Retention Policy the Bridge Domain should use when
overriding the default End Point Retention Policy.
igmp_snoop_policy:
description:
- The name of the IGMP Snooping Policy the Bridge Domain should use when
overriding the default IGMP Snooping Policy.
ip_learning:
description:
- Determines if the Bridge Domain should learn End Point IPs.
- The APIC defaults new Bridge Domains to C(yes).
choices: [ no, yes ]
ipv6_nd_policy:
description:
- The name of the IPv6 Neighbor Discovery Policy the Bridge Domain should use when
overridding the default IPV6 ND Policy.
l2_unknown_unicast:
description:
- Determines what forwarding method to use for unknown l2 destinations.
- The APIC defaults new Bridge domains to C(proxy).
choices: [ proxy, flood ]
default: proxy
l3_unknown_multicast:
description:
- Determines the forwarding method to use for unknown multicast destinations.
- The APCI defaults new Bridge Domains to C(flood).
choices: [ flood, opt-flood ]
default: flood
limit_ip_learn:
description:
- Determines if the BD should limit IP learning to only subnets owned by the Bridge Domain.
- The APIC defaults new Bridge Domains to C(yes).
choices: [ no, yes ]
default: yes
multi_dest:
description:
- Determines the forwarding method for L2 multicast, broadcast, and link layer traffic.
- The APIC defaults new Bridge Domains to C(bd-flood).
choices: [ bd-flood, drop, encap-flood ]
default: bd-flood
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
tenant:
description:
- The name of the Tenant.
aliases: [ tenant_name ]
vrf:
description:
- The name of the VRF.
aliases: [ vrf_name ]
'''
EXAMPLES = r'''
- name: Add Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: false
state: present
tenant: prod
bd: web_servers
vrf: prod_vrf
- name: Add an FC Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: false
state: present
tenant: prod
bd: storage
bd_type: fc
vrf: fc_vrf
enable_routing: no
- name: Modify a Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: present
tenant: prod
bd: web_servers
arp_flooding: yes
l2_unknown_unicast: flood
- name: Query All Bridge Domains
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: query
- name: Query a Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: query
tenant: prod
bd: web_servers
- name: Delete a Bridge Domain
aci_bd:
host: "{{ inventory_hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: true
state: absent
tenant: prod
bd: web_servers
'''
RETURN = r''' # '''
from ansible.module_utils.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec
argument_spec.update(
arp_flooding=dict(choices=['no', 'yes']),
bd=dict(type='str', aliases=['bd_name', 'name']),
bd_type=dict(type='str', choices=['ethernet', 'fc']),
description=dict(type='str'),
enable_multicast=dict(type='str', choices=['no', 'yes']),
enable_routing=dict(type='str', choices=['no', 'yes']),
endpoint_clear=dict(type='str', choices=['no', 'yes']),
endpoint_move_detect=dict(type='str', choices=['default', 'garp']),
endpoint_retention_action=dict(type='str', choices=['inherit', 'resolve']),
endpoint_retention_policy=dict(type='str'),
igmp_snoop_policy=dict(type='str'),
ip_learning=dict(type='str', choices=['no', 'yes']),
ipv6_nd_policy=dict(type='str'),
l2_unknown_unicast=dict(choices=['proxy', 'flood']),
l3_unknown_multicast=dict(choices=['flood', 'opt-flood']),
limit_ip_learn=dict(type='str', choices=['no', 'yes']),
multi_dest=dict(choices=['bd-flood', 'drop', 'encap-flood']),
state=dict(choices=['absent', 'present', 'query'], type='str', default='present'),
tenant=dict(type='str', aliases=['tenant_name']),
vrf=dict(type='str', aliases=['vrf_name']),
gateway_ip=dict(type='str', removed_in_version='2.4'), # Deprecated starting from v2.4
method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6
scope=dict(type='str', removed_in_version='2.4'), # Deprecated starting from v2.4
subnet_mask=dict(type='str', removed_in_version='2.4'), # Deprecated starting from v2.4
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['bd', 'tenant']],
['state', 'present', ['bd', 'tenant']],
],
)
arp_flooding = module.params['arp_flooding']
bd = module.params['bd']
bd_type = module.params['bd_type']
if bd_type == 'ethernet':
# ethernet type is represented as regular, but that is not clear to the users
bd_type = 'regular'
description = module.params['description']
enable_multicast = module.params['enable_multicast']
enable_routing = module.params['enable_routing']
endpoint_clear = module.params['endpoint_clear']
endpoint_move_detect = module.params['endpoint_move_detect']
if endpoint_move_detect == 'default':
# the ACI default setting is an empty string, but that is not a good input value
endpoint_move_detect = ''
endpoint_retention_action = module.params['endpoint_retention_action']
endpoint_retention_policy = module.params['endpoint_retention_policy']
igmp_snoop_policy = module.params['igmp_snoop_policy']
ip_learning = module.params['ip_learning']
ipv6_nd_policy = module.params['ipv6_nd_policy']
l2_unknown_unicast = module.params['l2_unknown_unicast']
l3_unknown_multicast = module.params['l3_unknown_multicast']
limit_ip_learn = module.params['limit_ip_learn']
multi_dest = module.params['multi_dest']
state = module.params['state']
tenant = module.params['tenant']
vrf = module.params['vrf']
# Give warning when fvSubnet parameters are passed as those have been moved to the aci_subnet module
if module.params['gateway_ip'] or module.params['subnet_mask'] or module.params['scope']:
module._warnings = ["The support for managing Subnets has been moved to its own module, aci_subnet. \
The new modules still supports 'gateway_ip' and 'subnet_mask' along with more features"]
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{}'.format(tenant),
filter_target='(fvTenant.name, "{}")'.format(tenant),
module_object=tenant,
),
subclass_1=dict(
aci_class='fvBD',
aci_rn='BD-{}'.format(bd),
filter_target='(fvBD.name, "{}")'.format(bd),
module_object=bd,
),
child_classes=['fvRsCtx', 'fvRsIgmpsn', 'fvRsBDToNdP', 'fvRsBdToEpRet'],
)
aci.get_existing()
if state == 'present':
# Filter out module params with null values
aci.payload(
aci_class='fvBD',
class_config=dict(
arpFlood=arp_flooding,
descr=description,
epClear=endpoint_clear,
epMoveDetectMode=endpoint_move_detect,
ipLearning=ip_learning,
limitIpLearnToSubnets=limit_ip_learn,
mcastAllow=enable_multicast,
multiDstPktAct=multi_dest,
name=bd,
type=bd_type,
unicastRoute=enable_routing,
unkMacUcastAct=l2_unknown_unicast,
unkMcastAct=l3_unknown_multicast,
),
child_configs=[
{'fvRsCtx': {'attributes': {'tnFvCtxName': vrf}}},
{'fvRsIgmpsn': {'attributes': {'tnIgmpSnoopPolName': igmp_snoop_policy}}},
{'fvRsBDToNdP': {'attributes': {'tnNdIfPolName': ipv6_nd_policy}}},
{'fvRsBdToEpRet': {'attributes': {'resolveAct': endpoint_retention_action, 'tnFvEpRetPolName': endpoint_retention_policy}}},
],
)
# generate config diff which will be used as POST request body
aci.get_diff(aci_class='fvBD')
# submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
module.exit_json(**aci.result)
if __name__ == "__main__":
main()
|
print("Hello")
|
import sys
import time
from envirophat import light, weather, motion, analog
def write():
try:
p = round(weather.pressure(),2)
c = light.light()
print('{"light": '+str(c)+', "pressure": '+str(p)+' }')
except KeyboardInterrupt:
pass
write()
|
class NodeSuperimposeTr(object):
def __call__(self, g, node_uid, aug):
conv = {}
for uid in aug:
if uid == aug.get_root():
g[node_uid] = aug[uid]
conv[uid] = node_uid
else:
conv[uid] = g.add_object(aug[uid])
for uid in aug:
for edge in aug.neighbors(uid):
g.add_arrow(conv[uid], conv[edge])
|
"""
Pitch follower via DFT peak with Tkinter GUI
"""
import sys
from audiolazy import (tostream, AudioIO, freq2str, sHz, chunks,
lowpass, envelope, pi, thub, Stream, maverage)
from numpy.fft import rfft
def limiter(sig, threshold=.1, size=256, env=envelope.rms, cutoff=pi/2048):
sig = thub(sig, 2)
return sig * Stream( 1. if el <= threshold else threshold / el
for el in maverage(size)(env(sig, cutoff=cutoff)) )
@tostream
def dft_pitch(sig, size=2048, hop=None):
for blk in Stream(sig).blocks(size=size, hop=hop):
dft_data = rfft(blk)
idx, vmax = max(enumerate(dft_data),
key=lambda el: abs(el[1]) / (2 * el[0] / size + 1)
)
yield 2 * pi * idx / size
def pitch_from_mic(upd_time_in_ms):
rate = 44100
s, Hz = sHz(rate)
api = sys.argv[1] if sys.argv[1:] else None # Choose API via command-line
chunks.size = 1 if api == "jack" else 16
with AudioIO(api=api) as recorder:
snd = recorder.record(rate=rate)
sndlow = lowpass(400 * Hz)(limiter(snd, cutoff=20 * Hz))
hop = int(upd_time_in_ms * 1e-3 * s)
for pitch in freq2str(dft_pitch(sndlow, size=2*hop, hop=hop) / Hz):
yield pitch
if __name__ == "__main__":
try:
import tkinter
except ImportError:
import Tkinter as tkinter
import threading
import re
# Window (Tk init), text label and button
tk = tkinter.Tk()
tk.title(__doc__.strip().splitlines()[0])
lbldata = tkinter.StringVar(tk)
lbltext = tkinter.Label(tk, textvariable=lbldata, font=("Purisa", 72),
width=10)
lbltext.pack(expand=True, fill=tkinter.BOTH)
btnclose = tkinter.Button(tk, text="Close", command=tk.destroy,
default="active")
btnclose.pack(fill=tkinter.X)
# Needed data
regex_note = re.compile(r"^([A-Gb#]*-?[0-9]*)([?+-]?)(.*?%?)$")
upd_time_in_ms = 200
# Update functions for each thread
def upd_value(): # Recording thread
pitches = iter(pitch_from_mic(upd_time_in_ms))
while not tk.should_finish:
tk.value = next(pitches)
def upd_timer(): # GUI mainloop thread
lbldata.set("\n".join(regex_note.findall(tk.value)[0]))
tk.after(upd_time_in_ms, upd_timer)
# Multi-thread management initialization
tk.should_finish = False
tk.value = freq2str(0) # Starting value
lbldata.set(tk.value)
tk.upd_thread = threading.Thread(target=upd_value)
# Go
tk.upd_thread.start()
tk.after_idle(upd_timer)
tk.mainloop()
tk.should_finish = True
tk.upd_thread.join()
|
import os
import tempfile
import zipfile
from PyQt5 import QtCore, QtWidgets
import util
from vaults.modvault import utils
FormClass, BaseClass = util.THEME.loadUiType("vaults/modvault/upload.ui")
class UploadModWidget(FormClass, BaseClass):
def __init__(self, parent, modDir, modinfo, *args, **kwargs):
BaseClass.__init__(self, *args, **kwargs)
self.setupUi(self)
self.parent = parent
self.client = self.parent.client # type - ClientWindow
self.modinfo = modinfo
self.modDir = modDir
util.THEME.stylesheets_reloaded.connect(self.load_stylesheet)
self.load_stylesheet()
self.setWindowTitle("Uploading Mod")
self.Name.setText(modinfo.name)
self.Version.setText(str(modinfo.version))
if modinfo.ui_only:
self.isUILabel.setText("is UI Only")
else:
self.isUILabel.setText("not UI Only")
self.UID.setText(modinfo.uid)
self.Description.setPlainText(modinfo.description)
if modinfo.icon != "":
self.IconURI.setText(utils.iconPathToFull(modinfo.icon))
self.updateThumbnail()
else:
self.Thumbnail.setPixmap(
util.THEME.pixmap("games/unknown_map.png"),
)
self.UploadButton.pressed.connect(self.upload)
def load_stylesheet(self):
self.setStyleSheet(util.THEME.readstylesheet("client/client.css"))
@QtCore.pyqtSlot()
def upload(self):
n = self.Name.text()
if any([(i in n) for i in '"<*>|?/\\:']):
QtWidgets.QMessageBox.information(
self.client,
"Invalid Name",
"The mod name contains invalid characters: /\\<>|?:\"",
)
return
iconpath = utils.iconPathToFull(self.modinfo.icon)
infolder = False
if (
iconpath != ""
and (
os.path.commonprefix([
os.path.normcase(self.modDir),
os.path.normcase(iconpath),
])
== os.path.normcase(self.modDir)
)
): # the icon is in the game folder
# localpath = utils.fullPathToIcon(iconpath)
infolder = True
if iconpath != "" and not infolder:
QtWidgets.QMessageBox.information(
self.client,
"Invalid Icon File",
(
"The file {} is not located inside the modfolder. Copy the"
" icon file to your modfolder and change the mod_info.lua "
"accordingly".format(iconpath)
),
)
return
try:
temp = tempfile.NamedTemporaryFile(
mode='w+b', suffix=".zip", delete=False,
)
zipped = zipfile.ZipFile(temp, "w", zipfile.ZIP_DEFLATED)
zipdir(self.modDir, zipped, os.path.basename(self.modDir))
zipped.close()
temp.flush()
except BaseException:
QtWidgets.QMessageBox.critical(
self.client,
"Mod uploading error",
"Something went wrong zipping the mod files.",
)
return
# qfile = QtCore.QFile(temp.name)
# TODO: implement uploading via API
...
@QtCore.pyqtSlot()
def updateThumbnail(self):
iconfilename = utils.iconPathToFull(self.modinfo.icon)
if iconfilename == "":
return False
if os.path.splitext(iconfilename)[1].lower() == ".dds":
old = iconfilename
iconfilename = os.path.join(
self.modDir,
os.path.splitext(os.path.basename(iconfilename))[0] + ".png",
)
succes = utils.generateThumbnail(old, iconfilename)
if not succes:
QtWidgets.QMessageBox.information(
self.client,
"Invalid Icon File",
(
"Because FAF can't read DDS files, it tried to convert"
" it to a png. This failed. Try something else"
),
)
return False
try:
self.Thumbnail.setPixmap(util.THEME.pixmap(iconfilename, False))
except BaseException:
QtWidgets.QMessageBox.information(
self.client,
"Invalid Icon File",
"This was not a valid icon file. Please pick a png or jpeg",
)
return False
self.modinfo.thumbnail = utils.fullPathToIcon(iconfilename)
self.IconURI.setText(iconfilename)
return True
def zipdir(path, zipf, fname):
# zips the entire directory path to zipf. Every file in the zipfile starts
# with fname. So if path is "/foo/bar/hello" and fname is "test" then every
# file in zipf is of the form "/test/*.*"
path = os.path.normcase(path)
if path[-1] in r'\/':
path = path[:-1]
for root, dirs, files in os.walk(path):
for f in files:
name = os.path.join(os.path.normcase(root), f)
n = name[len(os.path.commonprefix([name, path])):]
if n[0] == "\\":
n = n[1:]
zipf.write(name, os.path.join(fname, n))
|
from django.shortcuts import render_to_response, get_object_or_404
from django.views.decorators.cache import cache_page
from weblate.trans import appsettings
from django.core.servers.basehttp import FileWrapper
from django.utils.translation import ugettext as _
import django.utils.translation
from django.template import RequestContext, loader
from django.http import (
HttpResponse, HttpResponseRedirect, HttpResponseNotFound, Http404
)
from django.contrib import messages
from django.contrib.auth.decorators import (
login_required, permission_required, user_passes_test
)
from django.contrib.auth.models import AnonymousUser
from django.db.models import Q, Count, Sum
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.utils.safestring import mark_safe
from weblate.trans.models import (
Project, SubProject, Translation, Unit, Suggestion, Check,
Dictionary, Change, Comment, get_versions
)
from weblate.lang.models import Language
from weblate.trans.checks import CHECKS
from weblate.trans.forms import (
TranslationForm, UploadForm, SimpleUploadForm, ExtraUploadForm, SearchForm,
MergeForm, AutoForm, WordForm, DictUploadForm, ReviewForm, LetterForm,
AntispamForm, CommentForm
)
from weblate.trans.util import join_plural
from weblate.accounts.models import Profile, send_notification_email
import weblate
from whoosh.analysis import StandardAnalyzer, StemmingAnalyzer
import datetime
import logging
import os.path
import json
import csv
from xml.etree import ElementTree
import urllib2
class FixedFileWrapper(FileWrapper):
def __iter__(self):
self.filelike.seek(0)
return self
logger = logging.getLogger('weblate')
def home(request):
'''
Home page of Weblate showing list of projects, stats
and user links if logged in.
'''
projects = Project.objects.all_acl(request.user)
acl_projects = projects
if projects.count() == 1:
projects = SubProject.objects.filter(project=projects[0])
# Warn about not filled in username (usually caused by migration of
# users from older system
if not request.user.is_anonymous() and request.user.get_full_name() == '':
messages.warning(
request,
_('Please set your full name in your profile.')
)
# Load user translations if user is authenticated
usertranslations = None
if request.user.is_authenticated():
profile = request.user.get_profile()
usertranslations = Translation.objects.filter(
language__in=profile.languages.all()
).order_by(
'subproject__project__name', 'subproject__name'
)
# Some stats
top_translations = Profile.objects.order_by('-translated')[:10]
top_suggestions = Profile.objects.order_by('-suggested')[:10]
last_changes = Change.objects.filter(
translation__subproject__project__in=acl_projects,
).order_by( '-timestamp')[:10]
return render_to_response('index.html', RequestContext(request, {
'projects': projects,
'top_translations': top_translations,
'top_suggestions': top_suggestions,
'last_changes': last_changes,
'last_changes_rss': reverse('rss'),
'usertranslations': usertranslations,
}))
def show_checks(request):
'''
List of failing checks.
'''
allchecks = Check.objects.filter(
ignore=False
).values('check').annotate(count=Count('id'))
return render_to_response('checks.html', RequestContext(request, {
'checks': allchecks,
'title': _('Failing checks'),
}))
def show_check(request, name):
'''
Details about failing check.
'''
try:
check = CHECKS[name]
except KeyError:
raise Http404('No check matches the given query.')
checks = Check.objects.filter(
check=name, ignore=False
).values('project__slug').annotate(count=Count('id'))
return render_to_response('check.html', RequestContext(request, {
'checks': checks,
'title': check.name,
'check': check,
}))
def show_check_project(request, name, project):
'''
Show checks failing in a project.
'''
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
try:
check = CHECKS[name]
except KeyError:
raise Http404('No check matches the given query.')
units = Unit.objects.none()
if check.target:
langs = Check.objects.filter(
check=name, project=prj, ignore=False
).values_list('language', flat=True).distinct()
for lang in langs:
checks = Check.objects.filter(
check=name, project=prj, language=lang, ignore=False
).values_list('checksum', flat=True)
res = Unit.objects.filter(
checksum__in=checks,
translation__language=lang,
translation__subproject__project=prj,
translated=True
).values(
'translation__subproject__slug',
'translation__subproject__project__slug'
).annotate(count=Count('id'))
units |= res
if check.source:
checks = Check.objects.filter(
check=name,
project=prj,
language=None,
ignore=False
).values_list(
'checksum', flat=True
)
for subproject in prj.subproject_set.all():
lang = subproject.translation_set.all()[0].language
res = Unit.objects.filter(
checksum__in=checks,
translation__language=lang,
translation__subproject=subproject
).values(
'translation__subproject__slug',
'translation__subproject__project__slug'
).annotate(count=Count('id'))
units |= res
return render_to_response('check_project.html', RequestContext(request, {
'checks': units,
'title': '%s/%s' % (prj.__unicode__(), check.name),
'check': check,
'project': prj,
}))
def show_check_subproject(request, name, project, subproject):
'''
Show checks failing in a subproject.
'''
subprj = get_object_or_404(
SubProject,
slug=subproject,
project__slug=project
)
subprj.check_acl(request)
try:
check = CHECKS[name]
except KeyError:
raise Http404('No check matches the given query.')
units = Unit.objects.none()
if check.target:
langs = Check.objects.filter(
check=name,
project=subprj.project,
ignore=False
).values_list(
'language', flat=True
).distinct()
for lang in langs:
checks = Check.objects.filter(
check=name,
project=subprj.project,
language=lang,
ignore=False
).values_list('checksum', flat=True)
res = Unit.objects.filter(
translation__subproject=subprj,
checksum__in=checks,
translation__language=lang,
translated=True
).values(
'translation__language__code'
).annotate(count=Count('id'))
units |= res
source_checks = []
if check.source:
checks = Check.objects.filter(
check=name, project=subprj.project,
language=None,
ignore=False
).values_list('checksum', flat=True)
lang = subprj.translation_set.all()[0].language
res = Unit.objects.filter(
translation__subproject=subprj,
checksum__in=checks,
translation__language=lang
).count()
if res > 0:
source_checks.append(res)
return render_to_response(
'check_subproject.html',
RequestContext(request, {
'checks': units,
'source_checks': source_checks,
'anychecks': len(units) + len(source_checks) > 0,
'title': '%s/%s' % (subprj.__unicode__(), check.name),
'check': check,
'subproject': subprj,
})
)
def show_languages(request):
return render_to_response('languages.html', RequestContext(request, {
'languages': Language.objects.have_translation(),
'title': _('Languages'),
}))
def show_language(request, lang):
obj = get_object_or_404(Language, code=lang)
last_changes = Change.objects.filter(
translation__language=obj
).order_by('-timestamp')[:10]
dicts = Dictionary.objects.filter(
language=obj
).values_list('project', flat=True).distinct()
return render_to_response('language.html', RequestContext(request, {
'object': obj,
'last_changes': last_changes,
'last_changes_rss': reverse('rss-language', kwargs={'lang': obj.code}),
'dicts': Project.objects.filter(id__in=dicts),
}))
def show_dictionaries(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
dicts = Translation.objects.filter(
subproject__project=obj
).values_list('language', flat=True).distinct()
return render_to_response('dictionaries.html', RequestContext(request, {
'title': _('Dictionaries'),
'dicts': Language.objects.filter(id__in=dicts),
'project': obj,
}))
@login_required
@permission_required('trans.change_dictionary')
def edit_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
word = get_object_or_404(
Dictionary,
project=prj,
language=lang,
id=request.GET.get('id')
)
if request.method == 'POST':
form = WordForm(request.POST)
if form.is_valid():
word.source = form.cleaned_data['source']
word.target = form.cleaned_data['target']
word.save()
return HttpResponseRedirect(reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code}
))
else:
form = WordForm(
initial={'source': word.source, 'target': word.target}
)
return render_to_response('edit_dictionary.html', RequestContext(request, {
'title': _('%(language)s dictionary for %(project)s') %
{'language': lang, 'project': prj},
'project': prj,
'language': lang,
'form': form,
}))
@login_required
@permission_required('trans.delete_dictionary')
def delete_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
word = get_object_or_404(
Dictionary,
project=prj,
language=lang,
id=request.POST.get('id')
)
word.delete()
return HttpResponseRedirect(reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code})
)
@login_required
@permission_required('trans.upload_dictionary')
def upload_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
if request.method == 'POST':
form = DictUploadForm(request.POST, request.FILES)
if form.is_valid():
try:
count = Dictionary.objects.upload(
prj,
lang,
request.FILES['file'],
form.cleaned_data['overwrite']
)
if count == 0:
messages.warning(
request,
_('No words to import found in file.')
)
else:
messages.info(
request,
_('Imported %d words from file.') % count
)
except Exception as e:
messages.error(
request,
_('File content merge failed: %s' % unicode(e))
)
else:
messages.error(request, _('Failed to process form!'))
else:
messages.error(request, _('Failed to process form!'))
return HttpResponseRedirect(reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code}
))
def download_dictionary(request, project, lang):
'''
Exports dictionary.
'''
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
# Parse parameters
export_format = None
if 'format' in request.GET:
export_format = request.GET['format']
if not export_format in ['csv', 'po']:
export_format = 'csv'
# Grab all words
words = Dictionary.objects.filter(
project=prj,
language=lang
).order_by('source')
if export_format == 'csv':
response = HttpResponse(mimetype='text/csv; charset=utf-8')
filename = 'dictionary-%s-%s.csv' % (prj.slug, lang.code)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
writer = csv.writer(response)
for word in words.iterator():
writer.writerow((
word.source.encode('utf8'), word.target.encode('utf8')
))
return response
elif export_format == 'po':
from translate.storage.po import pounit, pofile
response = HttpResponse(mimetype='text/x-po; charset=utf-8')
filename = 'dictionary-%s-%s.po' % (prj.slug, lang.code)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
store = pofile()
site = Site.objects.get_current()
store.updateheader(
add=True,
language=lang.code,
x_generator='Weblate %s' % weblate.VERSION,
project_id_version='%s dictionary for %s' % (lang.name, prj.name),
language_team='%s <http://%s%s>' % (
lang.name,
site.domain,
reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code}
),
)
)
for word in words.iterator():
unit = pounit(word.source)
unit.target = word.target
store.addunit(unit)
store.savefile(response)
return response
def show_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
if (request.method == 'POST'
and request.user.has_perm('trans.add_dictionary')):
form = WordForm(request.POST)
if form.is_valid():
Dictionary.objects.create(
project=prj,
language=lang,
source=form.cleaned_data['source'],
target=form.cleaned_data['target']
)
return HttpResponseRedirect(request.get_full_path())
else:
form = WordForm()
uploadform = DictUploadForm()
words = Dictionary.objects.filter(
project=prj, language=lang
).order_by('source')
limit = request.GET.get('limit', 25)
page = request.GET.get('page', 1)
letterform = LetterForm(request.GET)
if letterform.is_valid() and letterform.cleaned_data['letter'] != '':
words = words.filter(
source__istartswith=letterform.cleaned_data['letter']
)
letter = letterform.cleaned_data['letter']
else:
letter = ''
paginator = Paginator(words, limit)
try:
words = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
words = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
words = paginator.page(paginator.num_pages)
return render_to_response('dictionary.html', RequestContext(request, {
'title': _('%(language)s dictionary for %(project)s') %
{'language': lang, 'project': prj},
'project': prj,
'language': lang,
'words': words,
'form': form,
'uploadform': uploadform,
'letterform': letterform,
'letter': letter,
}))
def show_engage(request, project, lang=None):
# Get project object
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
# Handle language parameter
language = None
if lang is not None:
try:
django.utils.translation.activate(lang)
except:
# Ignore failure on activating language
pass
try:
language = Language.objects.get(code=lang)
except Language.DoesNotExist:
pass
context = {
'object': obj,
'project': obj.name,
'languages': obj.get_language_count(),
'total': obj.get_total(),
'percent': obj.get_translated_percent(language),
'url': obj.get_absolute_url(),
'language': language,
}
# Render text
if language is None:
status_text = _(
'<a href="%(url)s">Translation project for %(project)s</a> '
'currently contains %(total)s strings for translation and is '
'<a href="%(url)s">being translated into %(languages)s languages'
'</a>. Overall, these translations are %(percent)s%% complete.'
)
else:
# Translators: line of text in engagement widget, please use your
# language name instead of English
status_text = _(
'<a href="%(url)s">Translation project for %(project)s</a> into '
'English currently contains %(total)s strings for translation and '
'is %(percent)s%% complete.'
)
if 'English' in status_text:
status_text = status_text.replace('English', language.name)
context['status_text'] = mark_safe(status_text % context)
return render_to_response('engage.html', RequestContext(request, context))
def show_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
dicts = Dictionary.objects.filter(
project=obj
).values_list(
'language', flat=True
).distinct()
last_changes = Change.objects.filter(
translation__subproject__project=obj
).order_by('-timestamp')[:10]
return render_to_response('project.html', RequestContext(request, {
'object': obj,
'dicts': Language.objects.filter(id__in=dicts),
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-project',
kwargs={'project': obj.slug}
),
}))
def show_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
last_changes = Change.objects.filter(
translation__subproject=obj
).order_by('-timestamp')[:10]
return render_to_response('subproject.html', RequestContext(request, {
'object': obj,
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-subproject',
kwargs={'subproject': obj.slug, 'project': obj.project.slug}
),
}))
@login_required
@permission_required('trans.automatic_translation')
def auto_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
obj.commit_pending()
autoform = AutoForm(obj, request.POST)
change = None
if not obj.subproject.locked and autoform.is_valid():
if autoform.cleaned_data['inconsistent']:
units = obj.unit_set.filter_type('inconsistent', obj)
elif autoform.cleaned_data['overwrite']:
units = obj.unit_set.all()
else:
units = obj.unit_set.filter(translated=False)
sources = Unit.objects.filter(
translation__language=obj.language,
translated=True
)
if autoform.cleaned_data['subproject'] == '':
sources = sources.filter(
translation__subproject__project=obj.subproject.project
).exclude(
translation=obj
)
else:
subprj = SubProject.objects.get(
project=obj.subproject.project,
slug=autoform.cleaned_data['subproject']
)
sources = sources.filter(translation__subproject=subprj)
for unit in units.iterator():
update = sources.filter(checksum=unit.checksum)
if update.exists():
# Get first entry
update = update[0]
# No save if translation is same
if unit.fuzzy == update.fuzzy and unit.target == update.target:
continue
# Copy translation
unit.fuzzy = update.fuzzy
unit.target = update.target
# Create signle change object for whole merge
if change is None:
change = Change.objects.create(
unit=unit,
translation=unit.translation,
user=request.user
)
# Save unit to backend
unit.save_backend(request, False, False)
messages.info(request, _('Automatic translation completed.'))
else:
messages.error(request, _('Failed to process form!'))
return HttpResponseRedirect(obj.get_absolute_url())
def review_source(request, project, subproject):
'''
Listing of source strings to review.
'''
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if not obj.translation_set.exists():
raise Http404('No translation exists in this subproject.')
# Grab first translation in subproject
# (this assumes all have same source strings)
source = obj.translation_set.all()[0]
# Grab search type and page number
rqtype = request.GET.get('type', 'all')
limit = request.GET.get('limit', 50)
page = request.GET.get('page', 1)
# Fiter units
sources = source.unit_set.filter_type(rqtype, source)
paginator = Paginator(sources, limit)
try:
sources = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
sources = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
sources = paginator.page(paginator.num_pages)
return render_to_response('source-review.html', RequestContext(request, {
'object': obj,
'source': source,
'sources': sources,
'title': _('Review source strings in %s') % obj.__unicode__(),
}))
def show_source(request, project, subproject):
'''
Show source strings summary and checks.
'''
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if not obj.translation_set.exists():
raise Http404('No translation exists in this subproject.')
# Grab first translation in subproject
# (this assumes all have same source strings)
source = obj.translation_set.all()[0]
return render_to_response('source.html', RequestContext(request, {
'object': obj,
'source': source,
'title': _('Source strings in %s') % obj.__unicode__(),
}))
def show_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
last_changes = Change.objects.filter(
translation=obj
).order_by('-timestamp')[:10]
# Check locks
obj.is_locked(request)
# How much is user allowed to configure upload?
if request.user.has_perm('trans.author_translation'):
form = ExtraUploadForm()
elif request.user.has_perm('trans.overwrite_translation'):
form = UploadForm()
else:
form = SimpleUploadForm()
# Is user allowed to do automatic translation?
if request.user.has_perm('trans.automatic_translation'):
autoform = AutoForm(obj)
else:
autoform = None
# Search form for everybody
search_form = SearchForm()
# Review form for logged in users
if request.user.is_anonymous():
review_form = None
else:
review_form = ReviewForm(
initial={
'date': datetime.date.today() - datetime.timedelta(days=31)
}
)
return render_to_response('translation.html', RequestContext(request, {
'object': obj,
'form': form,
'autoform': autoform,
'search_form': search_form,
'review_form': review_form,
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-translation',
kwargs={
'lang': obj.language.code,
'subproject': obj.subproject.slug,
'project': obj.subproject.project.slug
}
),
}))
@login_required
@permission_required('trans.commit_translation')
def commit_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
obj.commit_pending()
messages.info(request, _('All pending translations were committed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.commit_translation')
def commit_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
obj.commit_pending()
messages.info(request, _('All pending translations were committed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.commit_translation')
def commit_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
obj.commit_pending()
messages.info(request, _('All pending translations were committed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.update_translation')
def update_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
if obj.do_update(request):
messages.info(request, _('All repositories were updated.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.update_translation')
def update_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if obj.do_update(request):
messages.info(request, _('All repositories were updated.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.update_translation')
def update_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if obj.do_update(request):
messages.info(request, _('All repositories were updated.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.push_translation')
def push_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
if obj.do_push(request):
messages.info(request, _('All repositories were pushed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.push_translation')
def push_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if obj.do_push(request):
messages.info(request, _('All repositories were pushed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.push_translation')
def push_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if obj.do_push(request):
messages.info(request, _('All repositories were pushed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.reset_translation')
def reset_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
if obj.do_reset(request):
messages.info(request, _('All repositories have been reset.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.reset_translation')
def reset_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if obj.do_reset(request):
messages.info(request, _('All repositories have been reset.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.reset_translation')
def reset_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if obj.do_reset(request):
messages.info(request, _('All repositories have been reset.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_translation')
def lock_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_user_locked(request):
obj.create_lock(request.user, True)
messages.info(request, _('Translation is now locked for you.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
def update_lock(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_user_locked(request):
obj.update_lock_time()
return HttpResponse('ok')
@login_required
@permission_required('trans.lock_translation')
def unlock_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_user_locked(request):
obj.create_lock(None)
messages.info(
request,
_('Translation is now open for translation updates.')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def lock_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
obj.commit_pending()
obj.locked = True
obj.save()
messages.info(
request,
_('Subproject is now locked for translation updates!')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def unlock_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
obj.locked = False
obj.save()
messages.info(
request,
_('Subproject is now open for translation updates.')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def lock_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
obj.commit_pending()
for subproject in obj.subproject_set.all():
subproject.locked = True
subproject.save()
messages.info(
request,
_('All subprojects are now locked for translation updates!')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def unlock_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
for subproject in obj.subproject_set.all():
subproject.locked = False
subproject.save()
messages.info(request, _('Project is now open for translation updates.'))
return HttpResponseRedirect(obj.get_absolute_url())
def download_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
# Retrieve ttkit store to get extension and mime type
store = obj.get_store()
srcfilename = obj.get_filename()
if store.Mimetypes is None:
# Properties files do not expose mimetype
mime = 'text/plain'
else:
mime = store.Mimetypes[0]
if store.Extensions is None:
# Typo in translate-toolkit 1.9, see
# https://github.com/translate/translate/pull/10
if hasattr(store, 'Exensions'):
ext = store.Exensions[0]
else:
ext = 'txt'
else:
ext = store.Extensions[0]
# Construct file name (do not use real filename as it is usually not
# that useful)
filename = '%s-%s-%s.%s' % (project, subproject, lang, ext)
# Django wrapper for sending file
wrapper = FixedFileWrapper(file(srcfilename))
response = HttpResponse(wrapper, mimetype=mime)
# Fill in response headers
response['Content-Disposition'] = 'attachment; filename=%s' % filename
response['Content-Length'] = os.path.getsize(srcfilename)
return response
def bool2str(val):
if val:
return 'on'
return ''
def parse_search_url(request):
# Check where we are
rqtype = request.REQUEST.get('type', 'all')
direction = request.REQUEST.get('dir', 'forward')
pos = request.REQUEST.get('pos', '-1')
try:
pos = int(pos)
except:
pos = -1
# Pre-process search form
if request.method == 'POST':
search_form = SearchForm(request.POST)
else:
search_form = SearchForm(request.GET)
if search_form.is_valid():
search_query = search_form.cleaned_data['q']
search_type = search_form.cleaned_data['search']
if search_type == '':
search_type = 'ftx'
search_source = search_form.cleaned_data['src']
search_target = search_form.cleaned_data['tgt']
search_context = search_form.cleaned_data['ctx']
# Sane defaults
if not search_context and not search_source and not search_target:
search_source = True
search_target = True
search_url = '&q=%s&src=%s&tgt=%s&ctx=%s&search=%s' % (
search_query,
bool2str(search_source),
bool2str(search_target),
bool2str(search_context),
search_type,
)
else:
search_query = ''
search_type = 'ftx'
search_source = True
search_target = True
search_context = False
search_url = ''
if 'date' in request.REQUEST:
search_url += '&date=%s' % request.REQUEST['date']
return (
rqtype,
direction,
pos,
search_query,
search_type,
search_source,
search_target,
search_context,
search_url
)
def get_filter_name(rqtype, search_query):
'''
Returns name of current filter.
'''
if search_query != '':
return _('Search for "%s"') % search_query
if rqtype == 'all':
return None
elif rqtype == 'fuzzy':
return _('Fuzzy strings')
elif rqtype == 'untranslated':
return _('Untranslated strings')
elif rqtype == 'suggestions':
return _('Strings with suggestions')
elif rqtype == 'allchecks':
return _('Strings with any failing checks')
elif rqtype in CHECKS:
return CHECKS[rqtype].name
else:
return None
def translate(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
# Check locks
project_locked, user_locked, own_lock = obj.is_locked(request, True)
locked = project_locked or user_locked
if request.user.is_authenticated():
profile = request.user.get_profile()
antispam = None
else:
profile = None
antispam = AntispamForm()
secondary = None
unit = None
rqtype, direction, pos, search_query, search_type, search_source, search_target, search_context, search_url = parse_search_url(request)
# Any form submitted?
if request.method == 'POST':
# Antispam protection
if not request.user.is_authenticated():
antispam = AntispamForm(request.POST)
if not antispam.is_valid():
# Silently redirect to next entry
return HttpResponseRedirect('%s?type=%s&pos=%d%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
form = TranslationForm(request.POST)
if form.is_valid() and not project_locked:
# Check whether translation is not outdated
obj.check_sync()
try:
try:
unit = Unit.objects.get(
checksum=form.cleaned_data['checksum'],
translation=obj
)
except Unit.MultipleObjectsReturned:
# Possible temporary inconsistency caused by ongoing update
# of repo, let's pretend everyting is okay
unit = Unit.objects.filter(
checksum=form.cleaned_data['checksum'],
translation=obj
)[0]
if 'suggest' in request.POST:
# Handle suggesion saving
user = request.user
if isinstance(user, AnonymousUser):
user = None
if form.cleaned_data['target'] == len(form.cleaned_data['target']) * ['']:
messages.error(request, _('Your suggestion is empty!'))
# Stay on same entry
return HttpResponseRedirect(
'%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
)
)
# Create the suggestion
sug = Suggestion.objects.create(
target=join_plural(form.cleaned_data['target']),
checksum=unit.checksum,
language=unit.translation.language,
project=unit.translation.subproject.project,
user=user)
# Record in change
Change.objects.create(
unit=unit,
action=Change.ACTION_SUGGESTION,
translation=unit.translation,
user=user
)
# Invalidate counts cache
unit.translation.invalidate_cache('suggestions')
# Invite user to become translator if there is nobody else
recent_changes = Change.objects.content().filter(
translation=unit.translation,
).exclude(
user=None
).order_by('-timestamp')
if recent_changes.count() == 0 or True:
messages.info(
request,
_('There is currently no active translator for this translation, please consider becoming a translator as your suggestion might otherwise remain unreviewed.')
)
# Notify subscribed users
subscriptions = Profile.objects.subscribed_new_suggestion(
obj.subproject.project,
obj.language,
request.user
)
for subscription in subscriptions:
subscription.notify_new_suggestion(obj, sug, unit)
# Update suggestion stats
if profile is not None:
profile.suggested += 1
profile.save()
elif not request.user.is_authenticated():
# We accept translations only from authenticated
messages.error(
request,
_('You need to log in to be able to save translations!')
)
elif not request.user.has_perm('trans.save_translation'):
# Need privilege to save
messages.error(
request,
_('You don\'t have privileges to save translations!')
)
elif not user_locked:
# Remember old checks
oldchecks = set(
unit.active_checks().values_list('check', flat=True)
)
# Update unit and save it
unit.target = join_plural(form.cleaned_data['target'])
unit.fuzzy = form.cleaned_data['fuzzy']
saved = unit.save_backend(request)
if saved:
# Get new set of checks
newchecks = set(
unit.active_checks().values_list('check', flat=True)
)
# Did we introduce any new failures?
if newchecks > oldchecks:
# Show message to user
messages.error(
request,
_('Some checks have failed on your translation!')
)
# Stay on same entry
return HttpResponseRedirect(
'%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
)
)
# Redirect to next entry
return HttpResponseRedirect('%s?type=%s&pos=%d%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
except Unit.DoesNotExist:
logger.error(
'message %s disappeared!',
form.cleaned_data['checksum']
)
messages.error(
request,
_('Message you wanted to translate is no longer available!')
)
# Handle translation merging
if 'merge' in request.GET and not locked:
if not request.user.has_perm('trans.save_translation'):
# Need privilege to save
messages.error(
request,
_('You don\'t have privileges to save translations!')
)
else:
try:
mergeform = MergeForm(request.GET)
if mergeform.is_valid():
try:
unit = Unit.objects.get(
checksum=mergeform.cleaned_data['checksum'],
translation=obj
)
except Unit.MultipleObjectsReturned:
# Possible temporary inconsistency caused by ongoing
# update of repo, let's pretend everyting is okay
unit = Unit.objects.filter(
checksum=mergeform.cleaned_data['checksum'],
translation=obj
)[0]
merged = Unit.objects.get(
pk=mergeform.cleaned_data['merge']
)
if unit.checksum != merged.checksum:
messages.error(
request,
_('Can not merge different messages!')
)
else:
# Store unit
unit.target = merged.target
unit.fuzzy = merged.fuzzy
saved = unit.save_backend(request)
# Update stats if there was change
if saved:
profile.translated += 1
profile.save()
# Redirect to next entry
return HttpResponseRedirect('%s?type=%s&pos=%d%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
except Unit.DoesNotExist:
logger.error(
'message %s disappeared!',
form.cleaned_data['checksum']
)
messages.error(
request,
_('Message you wanted to translate is no longer available!')
)
# Handle accepting/deleting suggestions
if not locked and ('accept' in request.GET or 'delete' in request.GET):
# Check for authenticated users
if not request.user.is_authenticated():
messages.error(request, _('You need to log in to be able to manage suggestions!'))
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
# Parse suggestion ID
if 'accept' in request.GET:
if not request.user.has_perm('trans.accept_suggestion'):
messages.error(request, _('You do not have privilege to accept suggestions!'))
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
sugid = request.GET['accept']
else:
if not request.user.has_perm('trans.delete_suggestion'):
messages.error(request, _('You do not have privilege to delete suggestions!'))
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
sugid = request.GET['delete']
try:
sugid = int(sugid)
suggestion = Suggestion.objects.get(pk=sugid)
except:
suggestion = None
if suggestion is not None:
if 'accept' in request.GET:
# Accept suggesiont
suggestion.accept(request)
# Invalidate caches
for unit in Unit.objects.filter(checksum=suggestion.checksum):
unit.translation.invalidate_cache('suggestions')
# Delete suggestion in both cases (accepted ones are no longer
# needed)
suggestion.delete()
else:
messages.error(request, _('Invalid suggestion!'))
# Redirect to same entry for possible editing
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
reviewform = ReviewForm(request.GET)
if reviewform.is_valid():
allunits = obj.unit_set.review(
reviewform.cleaned_data['date'],
request.user
)
# Review
if direction == 'stay':
units = allunits.filter(position=pos)
elif direction == 'back':
units = allunits.filter(position__lt=pos).order_by('-position')
else:
units = allunits.filter(position__gt=pos)
elif search_query != '':
# Apply search conditions
if search_type == 'exact':
query = Q()
if search_source:
query |= Q(source=search_query)
if search_target:
query |= Q(target=search_query)
if search_context:
query |= Q(context=search_query)
allunits = obj.unit_set.filter(query)
elif search_type == 'substring':
query = Q()
if search_source:
query |= Q(source__icontains=search_query)
if search_target:
query |= Q(target__icontains=search_query)
if search_context:
query |= Q(context__icontains=search_query)
allunits = obj.unit_set.filter(query)
else:
allunits = obj.unit_set.search(
search_query,
search_source,
search_context,
search_target
)
if direction == 'stay':
units = obj.unit_set.filter(position=pos)
elif direction == 'back':
units = allunits.filter(position__lt=pos).order_by('-position')
else:
units = allunits.filter(position__gt=pos)
elif 'checksum' in request.GET:
allunits = obj.unit_set.filter(checksum=request.GET['checksum'])
units = allunits
else:
allunits = obj.unit_set.filter_type(rqtype, obj)
# What unit set is about to show
if direction == 'stay':
units = obj.unit_set.filter(position=pos)
elif direction == 'back':
units = allunits.filter(position__lt=pos).order_by('-position')
else:
units = allunits.filter(position__gt=pos)
# If we failed to get unit above or on no POST
if unit is None:
# Grab actual unit
try:
unit = units[0]
except IndexError:
messages.info(request, _('You have reached end of translating.'))
return HttpResponseRedirect(obj.get_absolute_url())
# Show secondary languages for logged in users
if profile:
secondary_langs = profile.secondary_languages.exclude(
id=unit.translation.language.id
)
project = unit.translation.subproject.project
secondary = Unit.objects.filter(
checksum=unit.checksum,
translated=True,
translation__subproject__project=project,
translation__language__in=secondary_langs,
)
# distinct('target') works with Django 1.4 so let's emulate that
# based on presumption we won't get too many results
targets = {}
res = []
for lang in secondary:
if lang.target in targets:
continue
targets[lang.target] = 1
res.append(lang)
secondary = res
# Prepare form
form = TranslationForm(initial={
'checksum': unit.checksum,
'target': (unit.translation.language, unit.get_target_plurals()),
'fuzzy': unit.fuzzy,
})
total = obj.unit_set.all().count()
filter_count = allunits.count()
return render_to_response(
'translate.html',
RequestContext(request, {
'object': obj,
'unit': unit,
'last_changes': unit.change_set.all()[:10],
'total': total,
'type': rqtype,
'filter_name': get_filter_name(rqtype, search_query),
'filter_count': filter_count,
'filter_pos': filter_count + 1 - units.count(),
'form': form,
'antispam': antispam,
'comment_form': CommentForm(),
'target_language': obj.language.code.replace('_', '-').lower(),
'update_lock': own_lock,
'secondary': secondary,
'search_query': search_query,
'search_url': search_url,
'search_source': bool2str(search_source),
'search_type': search_type,
'search_target': bool2str(search_target),
'search_context': bool2str(search_context),
'locked': locked,
'user_locked': user_locked,
'project_locked': project_locked,
},
))
@login_required
def comment(request, pk):
'''
Adds new comment.
'''
obj = get_object_or_404(Unit, pk=pk)
obj.check_acl(request)
if request.POST.get('type', '') == 'source':
lang = None
else:
lang = obj.translation.language
form = CommentForm(request.POST)
if form.is_valid():
new_comment = Comment.objects.create(
user=request.user,
checksum=obj.checksum,
project=obj.translation.subproject.project,
comment=form.cleaned_data['comment'],
language=lang
)
Change.objects.create(
unit=obj,
action=Change.ACTION_COMMENT,
translation=obj.translation,
user=request.user
)
# Invalidate counts cache
if lang is None:
obj.translation.invalidate_cache('sourcecomments')
else:
obj.translation.invalidate_cache('targetcomments')
messages.info(request, _('Posted new comment'))
# Notify subscribed users
subscriptions = Profile.objects.subscribed_new_comment(
obj.translation.subproject.project,
lang,
request.user
)
for subscription in subscriptions:
subscription.notify_new_comment(obj, new_comment)
# Notify upstream
if lang is None and obj.translation.subproject.report_source_bugs != '':
send_notification_email(
'en',
obj.translation.subproject.report_source_bugs,
'new_comment',
obj.translation,
{
'unit': obj,
'comment': new_comment,
'subproject': obj.translation.subproject,
},
from_email=request.user.email,
)
else:
messages.error(request, _('Failed to add comment!'))
return HttpResponseRedirect(obj.get_absolute_url())
def get_string(request, checksum):
'''
AJAX handler for getting raw string.
'''
units = Unit.objects.filter(checksum=checksum)
if units.count() == 0:
return HttpResponse('')
units[0].check_acl(request)
return HttpResponse(units[0].get_source_plurals()[0])
def get_similar(request, unit_id):
'''
AJAX handler for getting similar strings.
'''
unit = get_object_or_404(Unit, pk=int(unit_id))
unit.check_acl(request)
similar_units = Unit.objects.similar(unit)
# distinct('target') works with Django 1.4 so let's emulate that
# based on presumption we won't get too many results
targets = {}
res = []
for similar in similar_units:
if similar.target in targets:
continue
targets[similar.target] = 1
res.append(similar)
similar = res
return render_to_response('js/similar.html', RequestContext(request, {
'similar': similar,
}))
def get_other(request, unit_id):
'''
AJAX handler for same strings in other subprojects.
'''
unit = get_object_or_404(Unit, pk=int(unit_id))
unit.check_acl(request)
other = Unit.objects.same(unit)
rqtype, direction, pos, search_query, search_type, search_source, search_target, search_context, search_url = parse_search_url(request)
return render_to_response('js/other.html', RequestContext(request, {
'other': other,
'unit': unit,
'type': rqtype,
'search_url': search_url,
}))
def get_dictionary(request, unit_id):
'''
Lists words from dictionary for current translation.
'''
unit = get_object_or_404(Unit, pk=int(unit_id))
unit.check_acl(request)
words = set()
# Prepare analyzers
# - standard analyzer simply splits words
# - stemming extracts stems, to catch things like plurals
analyzers = (StandardAnalyzer(), StemmingAnalyzer())
# Extract words from all plurals and from context
for text in unit.get_source_plurals() + [unit.context]:
for analyzer in analyzers:
words = words.union([token.text for token in analyzer(text)])
# Grab all words in the dictionary
dictionary = Dictionary.objects.filter(
project = unit.translation.subproject.project,
language = unit.translation.language
)
if len(words) == 0:
# No extracted words, no dictionary
dictionary = dictionary.none()
else:
# Build the query (can not use __in as we want case insensitive lookup)
query = Q()
for word in words:
query |= Q(source__iexact=word)
# Filter dictionary
dictionary = dictionary.filter(query)
return render_to_response('js/dictionary.html', RequestContext(request, {
'dictionary': dictionary,
}))
@login_required
@permission_required('trans.ignore_check')
def ignore_check(request, check_id):
obj = get_object_or_404(Check, pk=int(check_id))
obj.project.check_acl(request)
# Mark check for ignoring
obj.ignore = True
obj.save()
# Invalidate caches
for unit in Unit.objects.filter(checksum=obj.checksum):
unit.translation.invalidate_cache()
# response for AJAX
return HttpResponse('ok')
@login_required
@permission_required('trans.upload_translation')
def upload_translation(request, project, subproject, lang):
'''
Handling of translation uploads.
'''
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_locked(request) and request.method == 'POST':
if request.user.has_perm('trans.author_translation'):
form = ExtraUploadForm(request.POST, request.FILES)
elif request.user.has_perm('trans.overwrite_translation'):
form = UploadForm(request.POST, request.FILES)
else:
form = SimpleUploadForm(request.POST, request.FILES)
if form.is_valid():
if request.user.has_perm('trans.author_translation') and form.cleaned_data['author_name'] != '' and form.cleaned_data['author_email'] != '':
author = '%s <%s>' % (form.cleaned_data['author_name'], form.cleaned_data['author_email'])
else:
author = None
if request.user.has_perm('trans.overwrite_translation'):
overwrite = form.cleaned_data['overwrite']
else:
overwrite = False
try:
ret = obj.merge_upload(request, request.FILES['file'], overwrite, author, merge_header=form.cleaned_data['merge_header'])
if ret:
messages.info(request, _('File content successfully merged into translation.'))
else:
messages.info(request, _('There were no new strings in uploaded file.'))
except Exception as e:
messages.error(request, _('File content merge failed: %s' % unicode(e)))
return HttpResponseRedirect(obj.get_absolute_url())
def not_found(request):
'''
Error handler showing list of available projects.
'''
template = loader.get_template('404.html')
return HttpResponseNotFound(
template.render(RequestContext(request, {
'request_path': request.path,
'title': _('Page Not Found'),
'projects': Project.objects.all_acl(request.user),
}
)))
@cache_page(30 * 24 * 3600)
def js_config(request):
'''
Generates settings for javascript. Includes things like
API keys for translaiton services or list of languages they
support.
'''
# Apertium support
if appsettings.MT_APERTIUM_KEY is not None and appsettings.MT_APERTIUM_KEY != '':
try:
listpairs = urllib2.urlopen('http://api.apertium.org/json/listPairs?key=%s' % appsettings.MT_APERTIUM_KEY)
pairs = listpairs.read()
parsed = json.loads(pairs)
apertium_langs = [p['targetLanguage'] for p in parsed['responseData'] if p['sourceLanguage'] == 'en']
except Exception as e:
logger.error('failed to get supported languages from Apertium, using defaults (%s)', str(e))
apertium_langs = ['gl', 'ca', 'es', 'eo']
else:
apertium_langs = None
# Microsoft translator support
if appsettings.MT_MICROSOFT_KEY is not None and appsettings.MT_MICROSOFT_KEY != '':
try:
listpairs = urllib2.urlopen('http://api.microsofttranslator.com/V2/Http.svc/GetLanguagesForTranslate?appID=%s' % appsettings.MT_MICROSOFT_KEY)
data = listpairs.read()
parsed = ElementTree.fromstring(data)
microsoft_langs = [p.text for p in parsed.getchildren()]
except Exception as e:
logger.error('failed to get supported languages from Microsoft, using defaults (%s)', str(e))
microsoft_langs = [
'ar', 'bg', 'ca', 'zh-CHS', 'zh-CHT', 'cs', 'da', 'nl', 'en',
'et', 'fi', 'fr', 'de', 'el', 'ht', 'he', 'hi', 'mww', 'hu',
'id', 'it', 'ja', 'ko', 'lv', 'lt', 'no', 'fa', 'pl', 'pt',
'ro', 'ru', 'sk', 'sl', 'es', 'sv', 'th', 'tr', 'uk', 'vi'
]
else:
microsoft_langs = None
return render_to_response('js/config.js', RequestContext(request, {
'apertium_langs': apertium_langs,
'microsoft_langs': microsoft_langs,
}),
mimetype = 'application/javascript')
def about(request):
context = {}
versions = get_versions()
totals = Profile.objects.aggregate(Sum('translated'), Sum('suggested'))
total_strings = 0
for project in SubProject.objects.iterator():
try:
total_strings += project.translation_set.all()[0].total
except Translation.DoesNotExist:
pass
context['title'] = _('About Weblate')
context['total_translations'] = totals['translated__sum']
context['total_suggestions'] = totals['suggested__sum']
context['total_users'] = Profile.objects.count()
context['total_strings'] = total_strings
context['total_languages'] = Language.objects.filter(
translation__total__gt=0
).distinct().count()
context['total_checks'] = Check.objects.count()
context['ignored_checks'] = Check.objects.filter(ignore=True).count()
context['versions'] = versions
return render_to_response('about.html', RequestContext(request, context))
@user_passes_test(lambda u: u.has_perm('trans.commit_translation') or u.has_perm('trans.update_translation'))
def git_status_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
return render_to_response('js/git-status.html', RequestContext(request, {
'object': obj,
}))
@user_passes_test(lambda u: u.has_perm('trans.commit_translation') or u.has_perm('trans.update_translation'))
def git_status_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
return render_to_response('js/git-status.html', RequestContext(request, {
'object': obj,
}))
@user_passes_test(lambda u: u.has_perm('trans.commit_translation') or u.has_perm('trans.update_translation'))
def git_status_translation(request, project, subproject, lang):
obj = get_object_or_404(Translation, language__code=lang, subproject__slug=subproject, subproject__project__slug=project, enabled=True)
obj.check_acl(request)
return render_to_response('js/git-status.html', RequestContext(request, {
'object': obj,
}))
def data_root(request):
site = Site.objects.get_current()
return render_to_response('data-root.html', RequestContext(request, {
'site_domain': site.domain,
'api_docs': weblate.get_doc_url('api', 'exports'),
'rss_docs': weblate.get_doc_url('api', 'rss'),
'projects': Project.objects.all_acl(request.user),
}))
def data_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
site = Site.objects.get_current()
return render_to_response('data.html', RequestContext(request, {
'object': obj,
'site_domain': site.domain,
'api_docs': weblate.get_doc_url('api', 'exports'),
'rss_docs': weblate.get_doc_url('api', 'rss'),
}))
|
"""Dummy test.
Pointless dummy test.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def inc(arg):
"""Return arg incremented by one."""
return arg + 1
def test_answer():
"""Assert 3+1 == 4."""
assert inc(3) == 4
|
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views.generic import View
from .models import \
Course, Registration, Task, TaskSubmission, ScoreProfile
from .forms import TaskSubmissionForm
class CourseListView(View):
template_name = 'courses/course_select.html'
@method_decorator(login_required)
def get(self, request, *args, **kwargs):
context = {
'courses': request.user.course_set.all(),
'profile': ScoreProfile.get_score_profile(request.user),
'highscore': ScoreProfile.objects.all().order_by('-score')[:10]
}
return render(request,
self.template_name,
context)
@method_decorator(login_required)
def post(self, request, *args, **kwargs):
pass
class ProfileView(View):
@method_decorator(login_required)
def get(self, request, *args, **kwargs):
context = {}
profile = ScoreProfile.get_score_profile(request.user)
context['username'] = request.user.username
context['rank'] = profile.current_rank
context['score'] = profile.score
context['courses'] = request.user.course_set.all()
context['valid_submissions'] = \
TaskSubmission.objects.filter(submitted_by=request.user,
valid=True).values_list('task',
flat=True)
return render(request, 'courses/profile.html', context)
class TaskSubmissionView(View):
form_class = TaskSubmissionForm
template_name = 'courses/task.html'
@method_decorator(login_required)
def get(self, request, *args, **kwargs):
context = self.get_context_data()
context['form'] = self.form_class()
context['subs'] = TaskSubmission.objects.filter(
submitted_by=request.user,
task=self.kwargs['task_id']
)
context['valid_subs'] = context['subs'].filter(
valid=True
)
return render(request, self.template_name, context)
@method_decorator(login_required)
def post(self, request, *args, **kwargs):
task = Task.objects.get(pk=self.kwargs['task_id'])
sub = TaskSubmission()
sub.task = task
sub.submitted_by = request.user
sub.valid = False
form = self.form_class(request.POST, request.FILES, instance=sub)
if form.is_valid():
form.save()
return self.get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = {}
context['task'] = Task.objects.get(pk=self.kwargs['task_id'])
return context
class CourseRegistrationView(View):
@method_decorator(login_required)
def post(self, request, *args, **kwargs):
course_id = request.POST['course_id']
course = Course.objects.get(pk=course_id)
if course:
Registration.objects.filter(user=request.user,
course=course).delete()
else:
return
if request.POST['sign_up'] == u'master':
Registration(user=request.user,
course=course,
granted=False,
code_master=True,
role=Registration.CODE_MASTER).save()
elif request.POST['sign_up'] == u'kid':
Registration(user=request.user,
course=course,
granted=False,
code_master=False,
role=Registration.KID).save()
elif request.POST['sign_up'] == u'reserve':
Registration(user=request.user,
course=course,
granted=False,
code_master=False,
role=Registration.RESERVE).save()
return
|
import hashlib
import re
import os
import pickle
from functools import partial
from externals.lib.misc import file_scan, update_dict
import logging
log = logging.getLogger(__name__)
VERSION = "0.0"
DEFAULT_DESTINATION = './files/'
DEFAULT_CACHE_FILENAME = 'hash_cache.pickle'
DEFAULT_FILE_EXTS = {'mp4', 'avi', 'rm', 'mkv', 'ogm', 'ssa', 'srt', 'ass'}
def hash_files(folder, file_regex=None, hasher=hashlib.sha256):
return {
f.hash: f
for f in file_scan(folder, file_regex=file_regex, hasher=hasher)
}
def hash_source_dest(source_folder=None, destination_folder=None, hasher=hashlib.sha256, file_exts=DEFAULT_FILE_EXTS, **kwargs):
file_regex = re.compile(r'.*\.({})$'.format('|'.join(file_exts)))
gen_hashs_folder = partial(hash_files, **dict(hasher=hasher, file_regex=file_regex))
return {
'source_files': gen_hashs_folder(source_folder),
'destination_files': gen_hashs_folder(destination_folder),
}
def symlink_matched_files(source_files=None, destination_files=None, destination_folder=None, dry_run=False, **kwargs):
for key in sorted(set(source_files.keys()).difference(set(destination_files.keys())), key=lambda key: source_files[key].file):
f = source_files[key]
log.debug(f.file)
if not dry_run:
try:
os.symlink(f.absolute, os.path.join(destination_folder, f.file))
except OSError:
log.info('unable to symlink {0}'.format(f.file))
def move_files():
pass
def get_args():
import argparse
parser = argparse.ArgumentParser(
description="""
Find the duplicates
""",
epilog=""" """
)
# Folders
parser.add_argument('-d', '--destination_folder', action='store', help='', default=DEFAULT_DESTINATION)
parser.add_argument('-s', '--source_folder', action='store', help='', required=True)
parser.add_argument('-e', '--file_exts', nargs='*', help='file exts to find', default=DEFAULT_FILE_EXTS)
# Operation
#parser.add_argument('-c', '--copy', action='store_true', help='copy files to destination (to be ready for importing)', default=False)
# Cache
parser.add_argument('--cache_filename', action='store', help='', default=DEFAULT_CACHE_FILENAME)
# Common
parser.add_argument('--dry_run', action='store_true', help='', default=False)
parser.add_argument('-v', '--verbose', action='store_true', help='', default=False)
parser.add_argument('--version', action='version', version=VERSION)
args = vars(parser.parse_args())
return args
def main():
args = get_args()
logging.basicConfig(level=logging.DEBUG if args['verbose'] else logging.INFO)
try:
with open(args['cache_filename'], 'rb') as f:
data = pickle.load(f)
except IOError:
with open(args['cache_filename'], 'wb') as f:
data = hash_source_dest(**args)
pickle.dump(data, f)
symlink_matched_files(**update_dict(args.copy(), data))
if __name__ == "__main__":
main()
|
from controlscript import *
print "This is a simple control script. It just does nothing and exits successfully."
print "Start parameter is %s, additional parameters are %s" % (start, arguments)
class DoNothing(ControlAction):
""" Control script action for exiting with error 1 on stop """
def __init__(self):
ControlAction.__init__(self, "Do nothing")
def start(self):
print "Do nothing on start"
print
def stop(self):
print "Do nothing on stop"
print
ControlScript([
DoNothing()
])
|
import logging
from django.core.management.base import BaseCommand
from catalog.core.visualization.data_access import visualization_cache
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = '''Build pandas dataframe cache of primary data'''
def handle(self, *args, **options):
visualization_cache.get_or_create_many()
|
"""
Created on Fri Jul 3 13:38:36 2015
@author: madengr
"""
from gnuradio import gr
import osmosdr
from gnuradio import filter as grfilter # Don't redefine Python's filter()
from gnuradio import blocks
from gnuradio import fft
from gnuradio.fft import window
from gnuradio import analog
from gnuradio import audio
import os
import time
import numpy as np
from gnuradio.filter import pfb
class BaseTuner(gr.hier_block2):
"""Some base methods that are the same between the known tuner types.
See TunerDemodNBFM and TunerDemodAM for better documentation.
"""
def set_center_freq(self, center_freq, rf_center_freq):
"""Sets baseband center frequency and file name
Sets baseband center frequency of frequency translating FIR filter
Also sets file name of wave file sink
If tuner is tuned to zero Hz then set to file name to /dev/null
Otherwise set file name to tuned RF frequency in MHz
Args:
center_freq (float): Baseband center frequency in Hz
rf_center_freq (float): RF center in Hz (for file name)
"""
# Since the frequency (hence file name) changed, then close it
self.blocks_wavfile_sink.close()
# If we never wrote any data to the wavfile sink, delete the file
self._delete_wavfile_if_empty()
# Set the frequency
self.freq_xlating_fir_filter_ccc.set_center_freq(center_freq)
self.center_freq = center_freq
# Set the file name
if self.center_freq == 0 or not self.record:
# If tuner at zero Hz, or record false, then file name to /dev/null
file_name = "/dev/null"
else:
# Otherwise use frequency and time stamp for file name
tstamp = "_" + str(int(time.time()))
file_freq = (rf_center_freq + self.center_freq)/1E6
file_freq = np.round(file_freq, 3)
file_name = 'wav/' + '{:.3f}'.format(file_freq) + tstamp + ".wav"
# Make sure the 'wav' directory exists
try:
os.mkdir('wav')
except OSError: # will need to add something here for Win support
pass # directory already exists
self.file_name = file_name
self.blocks_wavfile_sink.open(self.file_name)
def _delete_wavfile_if_empty(self):
"""Delete the current wavfile if it's empty."""
if (not self.record or not self.file_name or
self.file_name == '/dev/null'):
return
# If we never wrote any data to the wavfile sink, delete
# the (empty) wavfile
if os.stat(self.file_name).st_size in (44, 0): # ugly hack
os.unlink(self.file_name) # delete the file
def set_squelch(self, squelch_db):
"""Sets the threshold for both squelches
Args:
squelch_db (float): Squelch in dB
"""
self.analog_pwr_squelch_cc.set_threshold(squelch_db)
def __del__(self):
"""Called when the object is destroyed."""
# Make a best effort attempt to clean up our wavfile if it's empty
try:
self._delete_wavfile_if_empty()
except Exception:
pass # oh well, we're dying anyway
class TunerDemodNBFM(BaseTuner):
"""Tuner, demodulator, and recorder chain for narrow band FM demodulation
Kept as it's own class so multiple can be instantiated in parallel
Accepts complex baseband samples at 1 Msps minimum
Frequency translating FIR filter tunes from -samp_rate/2 to +samp_rate/2
The following sample rates assume 1 Msps input
First two stages of decimation are 5 each for a total of 25
Thus first two stages brings 1 Msps down to 40 ksps
The third stage decimates by int(samp_rate/1E6)
Thus output rate will vary from 40 ksps to 79.99 ksps
The channel is filtered to 12.5 KHz bandwidth followed by squelch
The squelch is non-blocking since samples will be added with other demods
The quadrature demod is followed by a forth stage of decimation by 5
This brings the sample rate down to 8 ksps to 15.98 ksps
The audio is low-pass filtered to 3.5 kHz bandwidth
The polyphase resampler resamples by samp_rate/(decims[1] * decims[0]**3)
This results in a constant 8 ksps, irrespective of RF sample rate
This 8 ksps audio stream may be added to other demos streams
The audio is run through an additional blocking squelch at -200 dB
This stops the sample flow so squelced audio is not recorded to file
The wav file sink stores 8-bit samples (grainy quality but compact)
Default demodulator center freqwuency is 0 Hz
This is desired since hardware DC removal reduces sensitivity at 0 Hz
NBFM demod of LO leakage will just be 0 amplitude
Args:
samp_rate (float): Input baseband sample rate in sps (1E6 minimum)
audio_rate (float): Output audio sample rate in sps (8 kHz minimum)
record (bool): Record audio to file if True
Attributes:
center_freq (float): Baseband center frequency in Hz
record (bool): Record audio to file if True
"""
# pylint: disable=too-many-instance-attributes
def __init__(self, samp_rate=4E6, audio_rate=8000, record=True):
gr.hier_block2.__init__(self, "TunerDemodNBFM",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_float))
# Default values
self.center_freq = 0
squelch_db = -60
self.quad_demod_gain = 0.050
self.file_name = "/dev/null"
self.record = record
# Decimation values for four stages of decimation
decims = (5, int(samp_rate/1E6))
# Low pass filter taps for decimation by 5
low_pass_filter_taps_0 = \
grfilter.firdes_low_pass(1, 1, 0.090, 0.010,
grfilter.firdes.WIN_HAMMING)
# Frequency translating FIR filter decimating by 5
self.freq_xlating_fir_filter_ccc = \
grfilter.freq_xlating_fir_filter_ccc(decims[0],
low_pass_filter_taps_0,
self.center_freq, samp_rate)
# FIR filter decimating by 5
fir_filter_ccc_0 = grfilter.fir_filter_ccc(decims[0],
low_pass_filter_taps_0)
# Low pass filter taps for decimation from samp_rate/25 to 40-79.9 ksps
# In other words, decimation by int(samp_rate/1E6)
# 12.5 kHz cutoff for NBFM channel bandwidth
low_pass_filter_taps_1 = grfilter.firdes_low_pass(
1, samp_rate/decims[0]**2, 12.5E3, 1E3, grfilter.firdes.WIN_HAMMING)
# FIR filter decimation by int(samp_rate/1E6)
fir_filter_ccc_1 = grfilter.fir_filter_ccc(decims[1],
low_pass_filter_taps_1)
# Non blocking power squelch
self.analog_pwr_squelch_cc = analog.pwr_squelch_cc(squelch_db,
1e-1, 0, False)
# Quadrature demod with gain set for decent audio
# The gain will be later multiplied by the 0 dB normalized volume
self.analog_quadrature_demod_cf = \
analog.quadrature_demod_cf(self.quad_demod_gain)
# 3.5 kHz cutoff for audio bandwidth
low_pass_filter_taps_2 = grfilter.firdes_low_pass(1,\
samp_rate/(decims[1] * decims[0]**2),\
3.5E3, 500, grfilter.firdes.WIN_HAMMING)
# FIR filter decimating by 5 from 40-79.9 ksps to 8-15.98 ksps
fir_filter_fff_0 = grfilter.fir_filter_fff(decims[0],
low_pass_filter_taps_2)
# Polyphase resampler allows arbitary RF sample rates
# Takes 8-15.98 ksps to a constant 8 ksps for audio
pfb_resamp = audio_rate/float(samp_rate/(decims[1] * decims[0]**3))
pfb_arb_resampler_fff = pfb.arb_resampler_fff(pfb_resamp, taps=None,
flt_size=32)
# Connect the blocks for the demod
self.connect(self, self.freq_xlating_fir_filter_ccc)
self.connect(self.freq_xlating_fir_filter_ccc, fir_filter_ccc_0)
self.connect(fir_filter_ccc_0, fir_filter_ccc_1)
self.connect(fir_filter_ccc_1, self.analog_pwr_squelch_cc)
self.connect(self.analog_pwr_squelch_cc,
self.analog_quadrature_demod_cf)
self.connect(self.analog_quadrature_demod_cf, fir_filter_fff_0)
self.connect(fir_filter_fff_0, pfb_arb_resampler_fff)
self.connect(pfb_arb_resampler_fff, self)
# Need to set this to a very low value of -200 since it is after demod
# Only want it to gate when the previuos squelch has gone to zero
analog_pwr_squelch_ff = analog.pwr_squelch_ff(-200, 1e-1, 0, True)
# File sink with single channel and 8 bits/sample
self.blocks_wavfile_sink = blocks.wavfile_sink(self.file_name, 1,
audio_rate, 8)
# Connect the blocks for recording
self.connect(pfb_arb_resampler_fff, analog_pwr_squelch_ff)
self.connect(analog_pwr_squelch_ff, self.blocks_wavfile_sink)
def set_volume(self, volume_db):
"""Sets the volume
Args:
volume_db (float): Volume in dB
"""
gain = self.quad_demod_gain * 10**(volume_db/20.0)
self.analog_quadrature_demod_cf.set_gain(gain)
class TunerDemodAM(BaseTuner):
"""Tuner, demodulator, and recorder chain for AM demodulation
Kept as it's own class so multiple can be instantiated in parallel
Accepts complex baseband samples at 1 Msps minimum
Frequency translating FIR filter tunes from -samp_rate/2 to +samp_rate/2
The following sample rates assume 1 Msps input
First two stages of decimation are 5 each for a total of 25
Thus first two stages brings 1 Msps down to 40 ksps
The third stage decimates by int(samp_rate/1E6)
Thus output rate will vary from 40 ksps to 79.99 ksps
The channel is filtered to 12.5 KHz bandwidth followed by squelch
The squelch is non-blocking since samples will be added with other demods
The AGC sets level (volume) prior to AM demod
The AM demod is followed by a forth stage of decimation by 5
This brings the sample rate down to 8 ksps to 15.98 ksps
The audio is low-pass filtered to 3.5 kHz bandwidth
The polyphase resampler resamples by samp_rate/(decims[1] * decims[0]**3)
This results in a constant 8 ksps, irrespective of RF sample rate
This 8 ksps audio stream may be added to other demos streams
The audio is run through an additional blocking squelch at -200 dB
This stops the sample flow so squelced audio is not recorded to file
The wav file sink stores 8-bit samples (grainy quality but compact)
Default demodulator center freqwuency is 0 Hz
This is desired since hardware DC removal reduces sensitivity at 0 Hz
AM demod of LO leakage will just be 0 amplitude
Args:
samp_rate (float): Input baseband sample rate in sps (1E6 minimum)
audio_rate (float): Output audio sample rate in sps (8 kHz minimum)
record (bool): Record audio to file if True
Attributes:
center_freq (float): Baseband center frequency in Hz
record (bool): Record audio to file if True
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-locals
def __init__(self, samp_rate=4E6, audio_rate=8000, record=True):
gr.hier_block2.__init__(self, "TunerDemodAM",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_float))
# Default values
self.center_freq = 0
squelch_db = -60
self.agc_ref = 0.1
self.file_name = "/dev/null"
self.record = record
# Decimation values for four stages of decimation
decims = (5, int(samp_rate/1E6))
# Low pass filter taps for decimation by 5
low_pass_filter_taps_0 = \
grfilter.firdes_low_pass(1, 1, 0.090, 0.010,
grfilter.firdes.WIN_HAMMING)
# Frequency translating FIR filter decimating by 5
self.freq_xlating_fir_filter_ccc = \
grfilter.freq_xlating_fir_filter_ccc(decims[0],
low_pass_filter_taps_0,
self.center_freq, samp_rate)
# FIR filter decimating by 5
fir_filter_ccc_0 = grfilter.fir_filter_ccc(decims[0],
low_pass_filter_taps_0)
# Low pass filter taps for decimation from samp_rate/25 to 40-79.9 ksps
# In other words, decimation by int(samp_rate/1E6)
# 12.5 kHz cutoff for NBFM channel bandwidth
low_pass_filter_taps_1 = grfilter.firdes_low_pass(
1, samp_rate/decims[0]**2, 12.5E3, 1E3, grfilter.firdes.WIN_HAMMING)
# FIR filter decimation by int(samp_rate/1E6)
fir_filter_ccc_1 = grfilter.fir_filter_ccc(decims[1],
low_pass_filter_taps_1)
# Non blocking power squelch
# Squelch level needs to be lower than NBFM or else choppy AM demod
self.analog_pwr_squelch_cc = analog.pwr_squelch_cc(squelch_db,
1e-1, 0, False)
# AGC with reference set for nomninal 0 dB volume
# Paramaters tweaked to prevent impulse during squelching
self.agc3_cc = analog.agc3_cc(1.0, 1E-4, self.agc_ref, 10, 1)
self.agc3_cc.set_max_gain(65536)
# AM demod with complex_to_mag()
# Can't use analog.am_demod_cf() since it won't work with N>2 demods
am_demod_cf = blocks.complex_to_mag(1)
# 3.5 kHz cutoff for audio bandwidth
low_pass_filter_taps_2 = grfilter.firdes_low_pass(1,\
samp_rate/(decims[1] * decims[0]**2),\
3.5E3, 500, grfilter.firdes.WIN_HAMMING)
# FIR filter decimating by 5 from 40-79.9 ksps to 8-15.98 ksps
fir_filter_fff_0 = grfilter.fir_filter_fff(decims[0],
low_pass_filter_taps_2)
# Polyphase resampler allows arbitary RF sample rates
# Takes 8-15.98 ksps to a constant 8 ksps for audio
pfb_resamp = audio_rate/float(samp_rate/(decims[1] * decims[0]**3))
pfb_arb_resampler_fff = pfb.arb_resampler_fff(pfb_resamp, taps=None,
flt_size=32)
# Connect the blocks for the demod
self.connect(self, self.freq_xlating_fir_filter_ccc)
self.connect(self.freq_xlating_fir_filter_ccc, fir_filter_ccc_0)
self.connect(fir_filter_ccc_0, fir_filter_ccc_1)
self.connect(fir_filter_ccc_1, self.analog_pwr_squelch_cc)
self.connect(self.analog_pwr_squelch_cc, self.agc3_cc)
self.connect(self.agc3_cc, am_demod_cf)
self.connect(am_demod_cf, fir_filter_fff_0)
self.connect(fir_filter_fff_0, pfb_arb_resampler_fff)
self.connect(pfb_arb_resampler_fff, self)
# Need to set this to a very low value of -200 since it is after demod
# Only want it to gate when the previuos squelch has gone to zero
analog_pwr_squelch_ff = analog.pwr_squelch_ff(-200, 1e-1, 0, True)
# File sink with single channel and 8 bits/sample
self.blocks_wavfile_sink = blocks.wavfile_sink(self.file_name, 1,
audio_rate, 8)
# Connect the blocks for recording
self.connect(pfb_arb_resampler_fff, analog_pwr_squelch_ff)
self.connect(analog_pwr_squelch_ff, self.blocks_wavfile_sink)
def set_volume(self, volume_db):
"""Sets the volume
Args:
volume_db (float): Volume in dB
"""
agc_ref = self.agc_ref * 10**(volume_db/20.0)
self.agc3_cc.set_reference(agc_ref)
class Receiver(gr.top_block):
"""Receiver for narrow band frequency modulation
Controls hardware and instantiates multiple tuner/demodulators
Generates FFT power spectrum for channel estimation
Args:
ask_samp_rate (float): Asking sample rate of hardware in sps (1E6 min)
num_demod (int): Number of parallel demodulators
type_demod (int): Type of demodulator (0=NBFM, 1=AM)
hw_args (string): Argument string to pass to harwdare
freq_correction (int): Frequency correction in ppm
record (bool): Record audio to file if True
Attributes:
center_freq (float): Hardware RF center frequency in Hz
samp_rate (float): Hardware sample rate in sps (1E6 min)
gain_db (int): Hardware RF gain in dB
squelch_db (int): Squelch in dB
volume_dB (int): Volume in dB
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-locals
# pylint: disable=too-many-arguments
def __init__(self, ask_samp_rate=4E6, num_demod=4, type_demod=0,
hw_args="uhd", freq_correction=0, record=True, play=True):
# Call the initialization method from the parent class
gr.top_block.__init__(self, "Receiver")
# Default values
self.center_freq = 144E6
self.gain_db = 10
self.squelch_db = -70
self.volume_db = 0
audio_rate = 8000
# Setup the USRP source, or use the USRP sim
self.src = osmosdr.source(args="numchan=" + str(1) + " " + hw_args)
self.src.set_sample_rate(ask_samp_rate)
self.src.set_gain(self.gain_db)
self.src.set_center_freq(self.center_freq)
self.src.set_freq_corr(freq_correction)
# Get the sample rate and center frequency from the hardware
self.samp_rate = self.src.get_sample_rate()
self.center_freq = self.src.get_center_freq()
# Set the I/Q bandwidth to 80 % of sample rate
self.src.set_bandwidth(0.8 * self.samp_rate)
# NBFM channel is about 10 KHz wide
# Want about 3 FFT bins to span a channel
# Use length FFT so 4 Msps / 1024 = 3906.25 Hz/bin
# This also means 3906.25 vectors/second
# Using below formula keeps FFT size a power of two
# Also keeps bin size constant for power of two sampling rates
# Use of 256 sets 3906.25 Hz/bin; increase to reduce bin size
samp_ratio = self.samp_rate / 1E6
fft_length = 256 * int(pow(2, np.ceil(np.log(samp_ratio)/np.log(2))))
# -----------Flow for FFT--------------
# Convert USRP steam to vector
stream_to_vector = blocks.stream_to_vector(gr.sizeof_gr_complex*1,
fft_length)
# Want about 1000 vector/sec
amount = int(round(self.samp_rate/fft_length/1000))
keep_one_in_n = blocks.keep_one_in_n(gr.sizeof_gr_complex*
fft_length, amount)
# Take FFT
fft_vcc = fft.fft_vcc(fft_length, True,
window.blackmanharris(fft_length), True, 1)
# Compute the power
complex_to_mag_squared = blocks.complex_to_mag_squared(fft_length)
# Video average and decimate from 1000 vector/sec to 10 vector/sec
integrate_ff = blocks.integrate_ff(100, fft_length)
# Probe vector
self.probe_signal_vf = blocks.probe_signal_vf(fft_length)
# Connect the blocks
self.connect(self.src, stream_to_vector, keep_one_in_n,
fft_vcc, complex_to_mag_squared,
integrate_ff, self.probe_signal_vf)
# -----------Flow for Demod--------------
# Create N parallel demodulators as a list of objects
# Default to NBFM demod
self.demodulators = []
for idx in range(num_demod):
if type_demod == 1:
self.demodulators.append(TunerDemodAM(self.samp_rate,
audio_rate, record))
else:
self.demodulators.append(TunerDemodNBFM(self.samp_rate,
audio_rate, record))
if play:
# Create an adder
add_ff = blocks.add_ff(1)
# Connect the demodulators between the source and adder
for idx, demodulator in enumerate(self.demodulators):
self.connect(self.src, demodulator, (add_ff, idx))
# Audio sink
audio_sink = audio.sink(audio_rate)
# Connect the summed outputs to the audio sink
self.connect(add_ff, audio_sink)
else:
# Just connect each demodulator to the receiver source
for demodulator in self.demodulators:
self.connect(self.src, demodulator)
def set_center_freq(self, center_freq):
"""Sets RF center frequency of hardware
Args:
center_freq (float): Hardware RF center frequency in Hz
"""
# Tune the hardware
self.src.set_center_freq(center_freq)
# Update center frequency with hardware center frequency
# Do this to account for slight hardware offsets
self.center_freq = self.src.get_center_freq()
def set_gain(self, gain_db):
"""Sets gain of RF hardware
Args:
gain_db (float): Hardware RF gain in dB
"""
self.src.set_gain(gain_db)
self.gain_db = self.src.get_gain()
def set_squelch(self, squelch_db):
"""Sets squelch of all demodulators and clamps range
Args:
squelch_db (float): Squelch in dB
"""
self.squelch_db = max(min(0, squelch_db), -100)
for demodulator in self.demodulators:
demodulator.set_squelch(self.squelch_db)
def set_volume(self, volume_db):
"""Sets volume of all demodulators and clamps range
Args:
volume_db (float): Volume in dB
"""
self.volume_db = max(min(20, volume_db), -20)
for demodulator in self.demodulators:
demodulator.set_volume(self.volume_db)
def get_demod_freqs(self):
"""Gets baseband frequencies of all demodulators
Returns:
List[float]: List of baseband center frequencies in Hz
"""
center_freqs = []
for demodulator in self.demodulators:
center_freqs.append(demodulator.center_freq)
return center_freqs
def main():
"""Test the receiver
Sets up the hadrware
Tunes a couple of demodulators
Prints the max power spectrum
"""
# Create receiver object
ask_samp_rate = 4E6
num_demod = 4
type_demod = 0
hw_args = "uhd"
freq_correction = 0
record = False
play = True
receiver = Receiver(ask_samp_rate, num_demod, type_demod, hw_args,
freq_correction, record, play)
# Start the receiver and wait for samples to accumulate
receiver.start()
time.sleep(1)
# Set frequency, gain, squelch, and volume
center_freq = 144.5E6
receiver.set_center_freq(center_freq)
receiver.set_gain(10)
print "\n"
print "Started %s at %.3f Msps" % (hw_args, receiver.samp_rate/1E6)
print "RX at %.3f MHz with %d dB gain" % (receiver.center_freq/1E6,
receiver.gain_db)
receiver.set_squelch(-60)
receiver.set_volume(0)
print "%d demods of type %d at %d dB squelch and %d dB volume" % \
(num_demod, type_demod, receiver.squelch_db, receiver.volume_db)
# Create some baseband channels to tune based on 144 MHz center
channels = np.zeros(num_demod)
channels[0] = 144.39E6 - receiver.center_freq # APRS
channels[1] = 144.6E6 - receiver.center_freq
# Tune demodulators to baseband channels
# If recording on, this creates empty wav file since manually tuning.
for idx, demodulator in enumerate(receiver.demodulators):
demodulator.set_center_freq(channels[idx], center_freq)
# Print demodulator info
for idx, channel in enumerate(channels):
print "Tuned demod %d to %.3f MHz" % (idx,
(channel+receiver.center_freq)
/1E6)
while 1:
# No need to go faster than 10 Hz rate of GNU Radio probe
# Just do 1 Hz here
time.sleep(1)
# Grab the FFT data and print max value
spectrum = receiver.probe_signal_vf.level()
print "Max spectrum of %.3f" % (np.max(spectrum))
# Stop the receiver
receiver.stop()
receiver.wait()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
"""Storage back-end for Mercurial.
This provides efficient delta storage with O(1) retrieve and append
and O(changes) merge between branches.
"""
from node import bin, hex, nullid, nullrev
from i18n import _
import ancestor, mdiff, parsers, error, util, dagutil
import struct, zlib, errno
_pack = struct.pack
_unpack = struct.unpack
_compress = zlib.compress
_decompress = zlib.decompress
_sha = util.sha1
REVLOGV0 = 0
REVLOGNG = 1
REVLOGNGINLINEDATA = (1 << 16)
REVLOGGENERALDELTA = (1 << 17)
REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
REVLOG_DEFAULT_FORMAT = REVLOGNG
REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGGENERALDELTA
REVIDX_KNOWN_FLAGS = 0
_maxinline = 131072
_chunksize = 1048576
RevlogError = error.RevlogError
LookupError = error.LookupError
def getoffset(q):
return int(q >> 16)
def gettype(q):
return int(q & 0xFFFF)
def offset_type(offset, type):
return long(long(offset) << 16 | type)
nullhash = _sha(nullid)
def hash(text, p1, p2):
"""generate a hash from the given text and its parent hashes
This hash combines both the current file contents and its history
in a manner that makes it easy to distinguish nodes with the same
content in the revision graph.
"""
# As of now, if one of the parent node is null, p2 is null
if p2 == nullid:
# deep copy of a hash is faster than creating one
s = nullhash.copy()
s.update(p1)
else:
# none of the parent nodes are nullid
l = [p1, p2]
l.sort()
s = _sha(l[0])
s.update(l[1])
s.update(text)
return s.digest()
def compress(text):
""" generate a possibly-compressed representation of text """
if not text:
return ("", text)
l = len(text)
bin = None
if l < 44:
pass
elif l > 1000000:
# zlib makes an internal copy, thus doubling memory usage for
# large files, so lets do this in pieces
z = zlib.compressobj()
p = []
pos = 0
while pos < l:
pos2 = pos + 2**20
p.append(z.compress(text[pos:pos2]))
pos = pos2
p.append(z.flush())
if sum(map(len, p)) < l:
bin = "".join(p)
else:
bin = _compress(text)
if bin is None or len(bin) > l:
if text[0] == '\0':
return ("", text)
return ('u', text)
return ("", bin)
def decompress(bin):
""" decompress the given input """
if not bin:
return bin
t = bin[0]
if t == '\0':
return bin
if t == 'x':
return _decompress(bin)
if t == 'u':
return bin[1:]
raise RevlogError(_("unknown compression type %r") % t)
indexformatv0 = ">4l20s20s20s"
v0shaoffset = 56
class revlogoldio(object):
def __init__(self):
self.size = struct.calcsize(indexformatv0)
def parseindex(self, data, inline):
s = self.size
index = []
nodemap = {nullid: nullrev}
n = off = 0
l = len(data)
while off + s <= l:
cur = data[off:off + s]
off += s
e = _unpack(indexformatv0, cur)
# transform to revlogv1 format
e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
index.append(e2)
nodemap[e[6]] = n
n += 1
# add the magic null revision at -1
index.append((0, 0, 0, -1, -1, -1, -1, nullid))
return index, nodemap, None
def packentry(self, entry, node, version, rev):
if gettype(entry[0]):
raise RevlogError(_("index entry flags need RevlogNG"))
e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
node(entry[5]), node(entry[6]), entry[7])
return _pack(indexformatv0, *e2)
indexformatng = ">Qiiiiii20s12x"
ngshaoffset = 32
versionformat = ">I"
class revlogio(object):
def __init__(self):
self.size = struct.calcsize(indexformatng)
def parseindex(self, data, inline):
# call the C implementation to parse the index data
index, cache = parsers.parse_index2(data, inline)
return index, getattr(index, 'nodemap', None), cache
def packentry(self, entry, node, version, rev):
p = _pack(indexformatng, *entry)
if rev == 0:
p = _pack(versionformat, version) + p[4:]
return p
class revlog(object):
"""
the underlying revision storage object
A revlog consists of two parts, an index and the revision data.
The index is a file with a fixed record size containing
information on each revision, including its nodeid (hash), the
nodeids of its parents, the position and offset of its data within
the data file, and the revision it's based on. Finally, each entry
contains a linkrev entry that can serve as a pointer to external
data.
The revision data itself is a linear collection of data chunks.
Each chunk represents a revision and is usually represented as a
delta against the previous chunk. To bound lookup time, runs of
deltas are limited to about 2 times the length of the original
version data. This makes retrieval of a version proportional to
its size, or O(1) relative to the number of revisions.
Both pieces of the revlog are written to in an append-only
fashion, which means we never need to rewrite a file to insert or
remove data, and can use some simple techniques to avoid the need
for locking while reading.
"""
def __init__(self, opener, indexfile):
"""
create a revlog object
opener is a function that abstracts the file opening operation
and can be used to implement COW semantics or the like.
"""
self.indexfile = indexfile
self.datafile = indexfile[:-2] + ".d"
self.opener = opener
self._cache = None
self._basecache = (0, 0)
self._chunkcache = (0, '')
self.index = []
self._pcache = {}
self._nodecache = {nullid: nullrev}
self._nodepos = None
v = REVLOG_DEFAULT_VERSION
opts = getattr(opener, 'options', None)
if opts is not None:
if 'revlogv1' in opts:
if 'generaldelta' in opts:
v |= REVLOGGENERALDELTA
else:
v = 0
i = ''
self._initempty = True
try:
f = self.opener(self.indexfile)
i = f.read()
f.close()
if len(i) > 0:
v = struct.unpack(versionformat, i[:4])[0]
self._initempty = False
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
self.version = v
self._inline = v & REVLOGNGINLINEDATA
self._generaldelta = v & REVLOGGENERALDELTA
flags = v & ~0xFFFF
fmt = v & 0xFFFF
if fmt == REVLOGV0 and flags:
raise RevlogError(_("index %s unknown flags %#04x for format v0")
% (self.indexfile, flags >> 16))
elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
raise RevlogError(_("index %s unknown flags %#04x for revlogng")
% (self.indexfile, flags >> 16))
elif fmt > REVLOGNG:
raise RevlogError(_("index %s unknown format %d")
% (self.indexfile, fmt))
self._io = revlogio()
if self.version == REVLOGV0:
self._io = revlogoldio()
try:
d = self._io.parseindex(i, self._inline)
except (ValueError, IndexError):
raise RevlogError(_("index %s is corrupted") % (self.indexfile))
self.index, nodemap, self._chunkcache = d
if nodemap is not None:
self.nodemap = self._nodecache = nodemap
if not self._chunkcache:
self._chunkclear()
def tip(self):
return self.node(len(self.index) - 2)
def __len__(self):
return len(self.index) - 1
def __iter__(self):
for i in xrange(len(self)):
yield i
@util.propertycache
def nodemap(self):
self.rev(self.node(0))
return self._nodecache
def hasnode(self, node):
try:
self.rev(node)
return True
except KeyError:
return False
def clearcaches(self):
try:
self._nodecache.clearcaches()
except AttributeError:
self._nodecache = {nullid: nullrev}
self._nodepos = None
def rev(self, node):
try:
return self._nodecache[node]
except RevlogError:
# parsers.c radix tree lookup failed
raise LookupError(node, self.indexfile, _('no node'))
except KeyError:
# pure python cache lookup failed
n = self._nodecache
i = self.index
p = self._nodepos
if p is None:
p = len(i) - 2
for r in xrange(p, -1, -1):
v = i[r][7]
n[v] = r
if v == node:
self._nodepos = r - 1
return r
raise LookupError(node, self.indexfile, _('no node'))
def node(self, rev):
return self.index[rev][7]
def linkrev(self, rev):
return self.index[rev][4]
def parents(self, node):
i = self.index
d = i[self.rev(node)]
return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
def parentrevs(self, rev):
return self.index[rev][5:7]
def start(self, rev):
return int(self.index[rev][0] >> 16)
def end(self, rev):
return self.start(rev) + self.length(rev)
def length(self, rev):
return self.index[rev][1]
def chainbase(self, rev):
index = self.index
base = index[rev][3]
while base != rev:
rev = base
base = index[rev][3]
return base
def flags(self, rev):
return self.index[rev][0] & 0xFFFF
def rawsize(self, rev):
"""return the length of the uncompressed text for a given revision"""
l = self.index[rev][2]
if l >= 0:
return l
t = self.revision(self.node(rev))
return len(t)
size = rawsize
def reachable(self, node, stop=None):
"""return the set of all nodes ancestral to a given node, including
the node itself, stopping when stop is matched"""
reachable = set((node,))
visit = [node]
if stop:
stopn = self.rev(stop)
else:
stopn = 0
while visit:
n = visit.pop(0)
if n == stop:
continue
if n == nullid:
continue
for p in self.parents(n):
if self.rev(p) < stopn:
continue
if p not in reachable:
reachable.add(p)
visit.append(p)
return reachable
def ancestors(self, *revs):
"""Generate the ancestors of 'revs' in reverse topological order.
Yield a sequence of revision numbers starting with the parents
of each revision in revs, i.e., each revision is *not* considered
an ancestor of itself. Results are in breadth-first order:
parents of each rev in revs, then parents of those, etc. Result
does not include the null revision."""
visit = list(revs)
seen = set([nullrev])
while visit:
for parent in self.parentrevs(visit.pop(0)):
if parent not in seen:
visit.append(parent)
seen.add(parent)
yield parent
def descendants(self, *revs):
"""Generate the descendants of 'revs' in revision order.
Yield a sequence of revision numbers starting with a child of
some rev in revs, i.e., each revision is *not* considered a
descendant of itself. Results are ordered by revision number (a
topological sort)."""
first = min(revs)
if first == nullrev:
for i in self:
yield i
return
seen = set(revs)
for i in xrange(first + 1, len(self)):
for x in self.parentrevs(i):
if x != nullrev and x in seen:
seen.add(i)
yield i
break
def findcommonmissing(self, common=None, heads=None):
"""Return a tuple of the ancestors of common and the ancestors of heads
that are not ancestors of common. In revset terminology, we return the
tuple:
::common, (::heads) - (::common)
The list is sorted by revision number, meaning it is
topologically sorted.
'heads' and 'common' are both lists of node IDs. If heads is
not supplied, uses all of the revlog's heads. If common is not
supplied, uses nullid."""
if common is None:
common = [nullid]
if heads is None:
heads = self.heads()
common = [self.rev(n) for n in common]
heads = [self.rev(n) for n in heads]
# we want the ancestors, but inclusive
has = set(self.ancestors(*common))
has.add(nullrev)
has.update(common)
# take all ancestors from heads that aren't in has
missing = set()
visit = [r for r in heads if r not in has]
while visit:
r = visit.pop(0)
if r in missing:
continue
else:
missing.add(r)
for p in self.parentrevs(r):
if p not in has:
visit.append(p)
missing = list(missing)
missing.sort()
return has, [self.node(r) for r in missing]
def findmissing(self, common=None, heads=None):
"""Return the ancestors of heads that are not ancestors of common.
More specifically, return a list of nodes N such that every N
satisfies the following constraints:
1. N is an ancestor of some node in 'heads'
2. N is not an ancestor of any node in 'common'
The list is sorted by revision number, meaning it is
topologically sorted.
'heads' and 'common' are both lists of node IDs. If heads is
not supplied, uses all of the revlog's heads. If common is not
supplied, uses nullid."""
_common, missing = self.findcommonmissing(common, heads)
return missing
def nodesbetween(self, roots=None, heads=None):
"""Return a topological path from 'roots' to 'heads'.
Return a tuple (nodes, outroots, outheads) where 'nodes' is a
topologically sorted list of all nodes N that satisfy both of
these constraints:
1. N is a descendant of some node in 'roots'
2. N is an ancestor of some node in 'heads'
Every node is considered to be both a descendant and an ancestor
of itself, so every reachable node in 'roots' and 'heads' will be
included in 'nodes'.
'outroots' is the list of reachable nodes in 'roots', i.e., the
subset of 'roots' that is returned in 'nodes'. Likewise,
'outheads' is the subset of 'heads' that is also in 'nodes'.
'roots' and 'heads' are both lists of node IDs. If 'roots' is
unspecified, uses nullid as the only root. If 'heads' is
unspecified, uses list of all of the revlog's heads."""
nonodes = ([], [], [])
if roots is not None:
roots = list(roots)
if not roots:
return nonodes
lowestrev = min([self.rev(n) for n in roots])
else:
roots = [nullid] # Everybody's a descendant of nullid
lowestrev = nullrev
if (lowestrev == nullrev) and (heads is None):
# We want _all_ the nodes!
return ([self.node(r) for r in self], [nullid], list(self.heads()))
if heads is None:
# All nodes are ancestors, so the latest ancestor is the last
# node.
highestrev = len(self) - 1
# Set ancestors to None to signal that every node is an ancestor.
ancestors = None
# Set heads to an empty dictionary for later discovery of heads
heads = {}
else:
heads = list(heads)
if not heads:
return nonodes
ancestors = set()
# Turn heads into a dictionary so we can remove 'fake' heads.
# Also, later we will be using it to filter out the heads we can't
# find from roots.
heads = dict.fromkeys(heads, False)
# Start at the top and keep marking parents until we're done.
nodestotag = set(heads)
# Remember where the top was so we can use it as a limit later.
highestrev = max([self.rev(n) for n in nodestotag])
while nodestotag:
# grab a node to tag
n = nodestotag.pop()
# Never tag nullid
if n == nullid:
continue
# A node's revision number represents its place in a
# topologically sorted list of nodes.
r = self.rev(n)
if r >= lowestrev:
if n not in ancestors:
# If we are possibly a descendant of one of the roots
# and we haven't already been marked as an ancestor
ancestors.add(n) # Mark as ancestor
# Add non-nullid parents to list of nodes to tag.
nodestotag.update([p for p in self.parents(n) if
p != nullid])
elif n in heads: # We've seen it before, is it a fake head?
# So it is, real heads should not be the ancestors of
# any other heads.
heads.pop(n)
if not ancestors:
return nonodes
# Now that we have our set of ancestors, we want to remove any
# roots that are not ancestors.
# If one of the roots was nullid, everything is included anyway.
if lowestrev > nullrev:
# But, since we weren't, let's recompute the lowest rev to not
# include roots that aren't ancestors.
# Filter out roots that aren't ancestors of heads
roots = [n for n in roots if n in ancestors]
# Recompute the lowest revision
if roots:
lowestrev = min([self.rev(n) for n in roots])
else:
# No more roots? Return empty list
return nonodes
else:
# We are descending from nullid, and don't need to care about
# any other roots.
lowestrev = nullrev
roots = [nullid]
# Transform our roots list into a set.
descendants = set(roots)
# Also, keep the original roots so we can filter out roots that aren't
# 'real' roots (i.e. are descended from other roots).
roots = descendants.copy()
# Our topologically sorted list of output nodes.
orderedout = []
# Don't start at nullid since we don't want nullid in our output list,
# and if nullid shows up in descedents, empty parents will look like
# they're descendants.
for r in xrange(max(lowestrev, 0), highestrev + 1):
n = self.node(r)
isdescendant = False
if lowestrev == nullrev: # Everybody is a descendant of nullid
isdescendant = True
elif n in descendants:
# n is already a descendant
isdescendant = True
# This check only needs to be done here because all the roots
# will start being marked is descendants before the loop.
if n in roots:
# If n was a root, check if it's a 'real' root.
p = tuple(self.parents(n))
# If any of its parents are descendants, it's not a root.
if (p[0] in descendants) or (p[1] in descendants):
roots.remove(n)
else:
p = tuple(self.parents(n))
# A node is a descendant if either of its parents are
# descendants. (We seeded the dependents list with the roots
# up there, remember?)
if (p[0] in descendants) or (p[1] in descendants):
descendants.add(n)
isdescendant = True
if isdescendant and ((ancestors is None) or (n in ancestors)):
# Only include nodes that are both descendants and ancestors.
orderedout.append(n)
if (ancestors is not None) and (n in heads):
# We're trying to figure out which heads are reachable
# from roots.
# Mark this head as having been reached
heads[n] = True
elif ancestors is None:
# Otherwise, we're trying to discover the heads.
# Assume this is a head because if it isn't, the next step
# will eventually remove it.
heads[n] = True
# But, obviously its parents aren't.
for p in self.parents(n):
heads.pop(p, None)
heads = [n for n, flag in heads.iteritems() if flag]
roots = list(roots)
assert orderedout
assert roots
assert heads
return (orderedout, roots, heads)
def headrevs(self):
count = len(self)
if not count:
return [nullrev]
ishead = [1] * (count + 1)
index = self.index
for r in xrange(count):
e = index[r]
ishead[e[5]] = ishead[e[6]] = 0
return [r for r in xrange(count) if ishead[r]]
def heads(self, start=None, stop=None):
"""return the list of all nodes that have no children
if start is specified, only heads that are descendants of
start will be returned
if stop is specified, it will consider all the revs from stop
as if they had no children
"""
if start is None and stop is None:
if not len(self):
return [nullid]
return [self.node(r) for r in self.headrevs()]
if start is None:
start = nullid
if stop is None:
stop = []
stoprevs = set([self.rev(n) for n in stop])
startrev = self.rev(start)
reachable = set((startrev,))
heads = set((startrev,))
parentrevs = self.parentrevs
for r in xrange(startrev + 1, len(self)):
for p in parentrevs(r):
if p in reachable:
if r not in stoprevs:
reachable.add(r)
heads.add(r)
if p in heads and p not in stoprevs:
heads.remove(p)
return [self.node(r) for r in heads]
def children(self, node):
"""find the children of a given node"""
c = []
p = self.rev(node)
for r in range(p + 1, len(self)):
prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
if prevs:
for pr in prevs:
if pr == p:
c.append(self.node(r))
elif p == nullrev:
c.append(self.node(r))
return c
def descendant(self, start, end):
if start == nullrev:
return True
for i in self.descendants(start):
if i == end:
return True
elif i > end:
break
return False
def ancestor(self, a, b):
"""calculate the least common ancestor of nodes a and b"""
# fast path, check if it is a descendant
a, b = self.rev(a), self.rev(b)
start, end = sorted((a, b))
if self.descendant(start, end):
return self.node(start)
def parents(rev):
return [p for p in self.parentrevs(rev) if p != nullrev]
c = ancestor.ancestor(a, b, parents)
if c is None:
return nullid
return self.node(c)
def _match(self, id):
if isinstance(id, (long, int)):
# rev
return self.node(id)
if len(id) == 20:
# possibly a binary node
# odds of a binary node being all hex in ASCII are 1 in 10**25
try:
node = id
self.rev(node) # quick search the index
return node
except LookupError:
pass # may be partial hex id
try:
# str(rev)
rev = int(id)
if str(rev) != id:
raise ValueError
if rev < 0:
rev = len(self) + rev
if rev < 0 or rev >= len(self):
raise ValueError
return self.node(rev)
except (ValueError, OverflowError):
pass
if len(id) == 40:
try:
# a full hex nodeid?
node = bin(id)
self.rev(node)
return node
except (TypeError, LookupError):
pass
def _partialmatch(self, id):
if id in self._pcache:
return self._pcache[id]
if len(id) < 40:
try:
# hex(node)[:...]
l = len(id) // 2 # grab an even number of digits
prefix = bin(id[:l * 2])
nl = [e[7] for e in self.index if e[7].startswith(prefix)]
nl = [n for n in nl if hex(n).startswith(id)]
if len(nl) > 0:
if len(nl) == 1:
self._pcache[id] = nl[0]
return nl[0]
raise LookupError(id, self.indexfile,
_('ambiguous identifier'))
return None
except TypeError:
pass
def lookup(self, id):
"""locate a node based on:
- revision number or str(revision number)
- nodeid or subset of hex nodeid
"""
n = self._match(id)
if n is not None:
return n
n = self._partialmatch(id)
if n:
return n
raise LookupError(id, self.indexfile, _('no match found'))
def cmp(self, node, text):
"""compare text with a given file revision
returns True if text is different than what is stored.
"""
p1, p2 = self.parents(node)
return hash(text, p1, p2) != node
def _addchunk(self, offset, data):
o, d = self._chunkcache
# try to add to existing cache
if o + len(d) == offset and len(d) + len(data) < _chunksize:
self._chunkcache = o, d + data
else:
self._chunkcache = offset, data
def _loadchunk(self, offset, length):
if self._inline:
df = self.opener(self.indexfile)
else:
df = self.opener(self.datafile)
readahead = max(65536, length)
df.seek(offset)
d = df.read(readahead)
df.close()
self._addchunk(offset, d)
if readahead > length:
return util.buffer(d, 0, length)
return d
def _getchunk(self, offset, length):
o, d = self._chunkcache
l = len(d)
# is it in the cache?
cachestart = offset - o
cacheend = cachestart + length
if cachestart >= 0 and cacheend <= l:
if cachestart == 0 and cacheend == l:
return d # avoid a copy
return util.buffer(d, cachestart, cacheend - cachestart)
return self._loadchunk(offset, length)
def _chunkraw(self, startrev, endrev):
start = self.start(startrev)
length = self.end(endrev) - start
if self._inline:
start += (startrev + 1) * self._io.size
return self._getchunk(start, length)
def _chunk(self, rev):
return decompress(self._chunkraw(rev, rev))
def _chunkbase(self, rev):
return self._chunk(rev)
def _chunkclear(self):
self._chunkcache = (0, '')
def deltaparent(self, rev):
"""return deltaparent of the given revision"""
base = self.index[rev][3]
if base == rev:
return nullrev
elif self._generaldelta:
return base
else:
return rev - 1
def revdiff(self, rev1, rev2):
"""return or calculate a delta between two revisions"""
if rev1 != nullrev and self.deltaparent(rev2) == rev1:
return str(self._chunk(rev2))
return mdiff.textdiff(self.revision(rev1),
self.revision(rev2))
def revision(self, nodeorrev):
"""return an uncompressed revision of a given node or revision
number.
"""
if isinstance(nodeorrev, int):
rev = nodeorrev
node = self.node(rev)
else:
node = nodeorrev
rev = None
cachedrev = None
if node == nullid:
return ""
if self._cache:
if self._cache[0] == node:
return self._cache[2]
cachedrev = self._cache[1]
# look up what we need to read
text = None
if rev is None:
rev = self.rev(node)
# check rev flags
if self.flags(rev) & ~REVIDX_KNOWN_FLAGS:
raise RevlogError(_('incompatible revision flag %x') %
(self.flags(rev) & ~REVIDX_KNOWN_FLAGS))
# build delta chain
chain = []
index = self.index # for performance
generaldelta = self._generaldelta
iterrev = rev
e = index[iterrev]
while iterrev != e[3] and iterrev != cachedrev:
chain.append(iterrev)
if generaldelta:
iterrev = e[3]
else:
iterrev -= 1
e = index[iterrev]
chain.reverse()
base = iterrev
if iterrev == cachedrev:
# cache hit
text = self._cache[2]
# drop cache to save memory
self._cache = None
self._chunkraw(base, rev)
if text is None:
text = str(self._chunkbase(base))
bins = [self._chunk(r) for r in chain]
text = mdiff.patches(text, bins)
text = self._checkhash(text, node, rev)
self._cache = (node, rev, text)
return text
def _checkhash(self, text, node, rev):
p1, p2 = self.parents(node)
if node != hash(text, p1, p2):
raise RevlogError(_("integrity check failed on %s:%d")
% (self.indexfile, rev))
return text
def checkinlinesize(self, tr, fp=None):
if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline:
return
trinfo = tr.find(self.indexfile)
if trinfo is None:
raise RevlogError(_("%s not found in the transaction")
% self.indexfile)
trindex = trinfo[2]
dataoff = self.start(trindex)
tr.add(self.datafile, dataoff)
if fp:
fp.flush()
fp.close()
df = self.opener(self.datafile, 'w')
try:
for r in self:
df.write(self._chunkraw(r, r))
finally:
df.close()
fp = self.opener(self.indexfile, 'w', atomictemp=True)
self.version &= ~(REVLOGNGINLINEDATA)
self._inline = False
for i in self:
e = self._io.packentry(self.index[i], self.node, self.version, i)
fp.write(e)
# if we don't call close, the temp file will never replace the
# real index
fp.close()
tr.replace(self.indexfile, trindex * self._io.size)
self._chunkclear()
def addrevision(self, text, transaction, link, p1, p2, cachedelta=None):
"""add a revision to the log
text - the revision data to add
transaction - the transaction object used for rollback
link - the linkrev data to add
p1, p2 - the parent nodeids of the revision
cachedelta - an optional precomputed delta
"""
node = hash(text, p1, p2)
if node in self.nodemap:
return node
dfh = None
if not self._inline:
dfh = self.opener(self.datafile, "a")
ifh = self.opener(self.indexfile, "a+")
try:
return self._addrevision(node, text, transaction, link, p1, p2,
cachedelta, ifh, dfh)
finally:
if dfh:
dfh.close()
ifh.close()
def _addrevision(self, node, text, transaction, link, p1, p2,
cachedelta, ifh, dfh):
"""internal function to add revisions to the log
see addrevision for argument descriptions.
invariants:
- text is optional (can be None); if not set, cachedelta must be set.
if both are set, they must correspond to eachother.
"""
btext = [text]
def buildtext():
if btext[0] is not None:
return btext[0]
# flush any pending writes here so we can read it in revision
if dfh:
dfh.flush()
ifh.flush()
basetext = self.revision(self.node(cachedelta[0]))
btext[0] = mdiff.patch(basetext, cachedelta[1])
chk = hash(btext[0], p1, p2)
if chk != node:
raise RevlogError(_("consistency error in delta"))
return btext[0]
def builddelta(rev):
# can we use the cached delta?
if cachedelta and cachedelta[0] == rev:
delta = cachedelta[1]
else:
t = buildtext()
ptext = self.revision(self.node(rev))
delta = mdiff.textdiff(ptext, t)
data = compress(delta)
l = len(data[1]) + len(data[0])
if basecache[0] == rev:
chainbase = basecache[1]
else:
chainbase = self.chainbase(rev)
dist = l + offset - self.start(chainbase)
if self._generaldelta:
base = rev
else:
base = chainbase
return dist, l, data, base, chainbase
curr = len(self)
prev = curr - 1
base = chainbase = curr
offset = self.end(prev)
flags = 0
d = None
basecache = self._basecache
p1r, p2r = self.rev(p1), self.rev(p2)
# should we try to build a delta?
if prev != nullrev:
if self._generaldelta:
if p1r >= basecache[1]:
d = builddelta(p1r)
elif p2r >= basecache[1]:
d = builddelta(p2r)
else:
d = builddelta(prev)
else:
d = builddelta(prev)
dist, l, data, base, chainbase = d
# full versions are inserted when the needed deltas
# become comparable to the uncompressed text
if text is None:
textlen = mdiff.patchedsize(self.rawsize(cachedelta[0]),
cachedelta[1])
else:
textlen = len(text)
if d is None or dist > textlen * 2:
text = buildtext()
data = compress(text)
l = len(data[1]) + len(data[0])
base = chainbase = curr
e = (offset_type(offset, flags), l, textlen,
base, link, p1r, p2r, node)
self.index.insert(-1, e)
self.nodemap[node] = curr
entry = self._io.packentry(e, self.node, self.version, curr)
if not self._inline:
transaction.add(self.datafile, offset)
transaction.add(self.indexfile, curr * len(entry))
if data[0]:
dfh.write(data[0])
dfh.write(data[1])
dfh.flush()
ifh.write(entry)
else:
offset += curr * self._io.size
transaction.add(self.indexfile, offset, curr)
ifh.write(entry)
ifh.write(data[0])
ifh.write(data[1])
self.checkinlinesize(transaction, ifh)
if type(text) == str: # only accept immutable objects
self._cache = (node, curr, text)
self._basecache = (curr, chainbase)
return node
def group(self, nodelist, bundler, reorder=None):
"""Calculate a delta group, yielding a sequence of changegroup chunks
(strings).
Given a list of changeset revs, return a set of deltas and
metadata corresponding to nodes. The first delta is
first parent(nodelist[0]) -> nodelist[0], the receiver is
guaranteed to have this parent as it has all history before
these changesets. In the case firstparent is nullrev the
changegroup starts with a full revision.
"""
# if we don't have any revisions touched by these changesets, bail
if len(nodelist) == 0:
yield bundler.close()
return
# for generaldelta revlogs, we linearize the revs; this will both be
# much quicker and generate a much smaller bundle
if (self._generaldelta and reorder is not False) or reorder:
dag = dagutil.revlogdag(self)
revs = set(self.rev(n) for n in nodelist)
revs = dag.linearize(revs)
else:
revs = sorted([self.rev(n) for n in nodelist])
# add the parent of the first rev
p = self.parentrevs(revs[0])[0]
revs.insert(0, p)
# build deltas
for r in xrange(len(revs) - 1):
prev, curr = revs[r], revs[r + 1]
for c in bundler.revchunk(self, curr, prev):
yield c
yield bundler.close()
def addgroup(self, bundle, linkmapper, transaction):
"""
add a delta group
given a set of deltas, add them to the revision log. the
first delta is against its parent, which should be in our
log, the rest are against the previous delta.
"""
# track the base of the current delta log
content = []
node = None
r = len(self)
end = 0
if r:
end = self.end(r - 1)
ifh = self.opener(self.indexfile, "a+")
isize = r * self._io.size
if self._inline:
transaction.add(self.indexfile, end + isize, r)
dfh = None
else:
transaction.add(self.indexfile, isize, r)
transaction.add(self.datafile, end)
dfh = self.opener(self.datafile, "a")
try:
# loop through our set of deltas
chain = None
while True:
chunkdata = bundle.deltachunk(chain)
if not chunkdata:
break
node = chunkdata['node']
p1 = chunkdata['p1']
p2 = chunkdata['p2']
cs = chunkdata['cs']
deltabase = chunkdata['deltabase']
delta = chunkdata['delta']
content.append(node)
link = linkmapper(cs)
if node in self.nodemap:
# this can happen if two branches make the same change
chain = node
continue
for p in (p1, p2):
if not p in self.nodemap:
raise LookupError(p, self.indexfile,
_('unknown parent'))
if deltabase not in self.nodemap:
raise LookupError(deltabase, self.indexfile,
_('unknown delta base'))
baserev = self.rev(deltabase)
chain = self._addrevision(node, None, transaction, link,
p1, p2, (baserev, delta), ifh, dfh)
if not dfh and not self._inline:
# addrevision switched from inline to conventional
# reopen the index
ifh.close()
dfh = self.opener(self.datafile, "a")
ifh = self.opener(self.indexfile, "a")
finally:
if dfh:
dfh.close()
ifh.close()
return content
def strip(self, minlink, transaction):
"""truncate the revlog on the first revision with a linkrev >= minlink
This function is called when we're stripping revision minlink and
its descendants from the repository.
We have to remove all revisions with linkrev >= minlink, because
the equivalent changelog revisions will be renumbered after the
strip.
So we truncate the revlog on the first of these revisions, and
trust that the caller has saved the revisions that shouldn't be
removed and that it'll re-add them after this truncation.
"""
if len(self) == 0:
return
for rev in self:
if self.index[rev][4] >= minlink:
break
else:
return
# first truncate the files on disk
end = self.start(rev)
if not self._inline:
transaction.add(self.datafile, end)
end = rev * self._io.size
else:
end += rev * self._io.size
transaction.add(self.indexfile, end)
# then reset internal state in memory to forget those revisions
self._cache = None
self._chunkclear()
for x in xrange(rev, len(self)):
del self.nodemap[self.node(x)]
del self.index[rev:-1]
def checksize(self):
expected = 0
if len(self):
expected = max(0, self.end(len(self) - 1))
try:
f = self.opener(self.datafile)
f.seek(0, 2)
actual = f.tell()
f.close()
dd = actual - expected
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
dd = 0
try:
f = self.opener(self.indexfile)
f.seek(0, 2)
actual = f.tell()
f.close()
s = self._io.size
i = max(0, actual // s)
di = actual - (i * s)
if self._inline:
databytes = 0
for r in self:
databytes += max(0, self.length(r))
dd = 0
di = actual - len(self) * s - databytes
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
di = 0
return (dd, di)
def files(self):
res = [self.indexfile]
if not self._inline:
res.append(self.datafile)
return res
|
from PyQt5 import QtWidgets
from view.analysis_widget import AnalysisWidget
class TemporalAnalysisWidget(AnalysisWidget):
# noinspection PyArgumentList
def __init__(self, mplCanvas):
"""
Construct the Temporal Analysis page in the main window. |br|
A ``ScatterPlot.mplCanvas`` will be shown on this page.
:param mplCanvas: The ``ScatterPlot.mplCanvas`` widget.
"""
super().__init__()
upperLabel = QtWidgets.QLabel("Temporal Distribution &Graph:")
upperLabel.setMargin(1)
upperLabel.setBuddy(mplCanvas)
lowerLabel = QtWidgets.QLabel("Temporal Correlation &Quotient:")
lowerLabel.setMargin(1)
lowerLabel.setBuddy(self.tableWidget)
mainLayout = QtWidgets.QVBoxLayout()
mainLayout.addWidget(upperLabel)
mainLayout.addWidget(mplCanvas)
mainLayout.addWidget(lowerLabel)
mainLayout.addWidget(self.tableWidget)
self.setLayout(mainLayout)
|
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '',''))
import numpy as np
from sklearn import svm
from math import sqrt
import sys
from sklearn.metrics import roc_auc_score
if len(sys.argv)<4:
sys.exit("python cross_validation_from_matrix_norm.py inputMatrix.libsvm C outfile")
c=float(sys.argv[2])
from sklearn.datasets import load_svmlight_file
km, target_array = load_svmlight_file(sys.argv[1])
kmgood=km[:,1:].todense()
gram=km[:,1:].todense()
for i in xrange(len(target_array)):
for j in xrange(0,len(target_array)):
#AUC cross validationprint i,j,kmgood[i,j],kmgood[i,i],kmgood[j,j]
gram[i,j]=kmgood[i,j]/sqrt(kmgood[i,i]*kmgood[j,j])
from sklearn import cross_validation
for rs in range(42,53):
f=open(str(sys.argv[3]+".seed"+str(rs)+".c"+str(c)),'w')
kf = cross_validation.StratifiedKFold(target_array, n_folds=10, shuffle=True,random_state=rs)
#print kf
#remove column zero because
#first entry of each line is the index
#gram=km[:,1:].todense()
f.write("Total examples "+str(len(gram))+"\n")
f.write("CV\t test_AUROC\n")
#print gram
# normalization
#for i in range(len(gram)):
# for j in range(len(gram)):
# gram[i,j]=gram[i,j]/sqrt(gram[i,i]+gram[j,j])
sc=[]
for train_index, test_index in kf:
#print("TRAIN:", train_index, "TEST:", test_index)
#generated train and test lists, incuding indices of the examples in training/test
#for the specific fold. Indices starts from 0 now
clf = svm.SVC(C=c, kernel='precomputed',probability=True)
train_gram = [] #[[] for x in xrange(0,len(train))]
test_gram = []# [[] for x in xrange(0,len(test))]
#generate train matrix and test matrix
index=-1
for row in gram:
index+=1
if index in train_index:
train_gram.append([gram[index,i] for i in train_index])
else:
test_gram.append([gram[index,i] for i in train_index])
#print gram
X_train, X_test, y_train, y_test = np.array(train_gram), np.array(test_gram), target_array[train_index], target_array[test_index]
#COMPUTE INNERKFOLD
kf = cross_validation.StratifiedKFold(y_train, n_folds=10, shuffle=True,random_state=rs)
inner_scores= cross_validation.cross_val_score(
clf, X_train, y_train, cv=kf, scoring='roc_auc')
#print "inner scores", inner_scores
print "Inner AUROC: %0.4f (+/- %0.4f)" % (inner_scores.mean(), inner_scores.std() / 2)
f.write(str(inner_scores.mean())+"\t")
clf.fit(X_train, y_train)
# predict on test examples
y_test_predicted=clf.predict_proba(X_test)
#print y_test_predicted
sc.append(roc_auc_score(y_test, y_test_predicted[:,1]))
f.write(str(roc_auc_score(y_test, y_test_predicted[:,1]))+"\n")
f.close()
scores=np.array(sc)
print "AUROC: %0.4f (+/- %0.4f)" % (scores.mean(), scores.std() / 2)
|
import os
import tempfile
import urllib
from glob import glob
import shutil
import time
import gtk
import gobject
gobject.threads_init()
if __name__ == "__main__":
import sys
sys.path.insert(0, "..")
from chirpui import inputdialog, common
try:
import serial
except ImportError,e:
common.log_exception()
common.show_error("\nThe Pyserial module is not installed!")
from chirp import platform, generic_xml, generic_csv, directory, util
from chirp import ic9x, kenwood_live, idrp, vx7, vx5, vx6
from chirp import CHIRP_VERSION, chirp_common, detect, errors
from chirp import icf, ic9x_icf
from chirpui import editorset, clone, miscwidgets, config, reporting, fips
from chirpui import bandplans
CONF = config.get()
KEEP_RECENT = 8
RB_BANDS = {
"--All--" : 0,
"10 meters (29MHz)" : 29,
"6 meters (54MHz)" : 5,
"2 meters (144MHz)" : 14,
"1.25 meters (220MHz)" : 22,
"70 centimeters (440MHz)" : 4,
"33 centimeters (900MHz)" : 9,
"23 centimeters (1.2GHz)" : 12,
}
def key_bands(band):
if band.startswith("-"):
return -1
amount, units, mhz = band.split(" ")
scale = units == "meters" and 100 or 1
return 100000 - (float(amount) * scale)
class ModifiedError(Exception):
pass
class ChirpMain(gtk.Window):
def get_current_editorset(self):
page = self.tabs.get_current_page()
if page is not None:
return self.tabs.get_nth_page(page)
else:
return None
def ev_tab_switched(self, pagenum=None):
def set_action_sensitive(action, sensitive):
self.menu_ag.get_action(action).set_sensitive(sensitive)
if pagenum is not None:
eset = self.tabs.get_nth_page(pagenum)
else:
eset = self.get_current_editorset()
upload_sens = bool(eset and
isinstance(eset.radio, chirp_common.CloneModeRadio))
if not eset or isinstance(eset.radio, chirp_common.LiveRadio):
save_sens = False
elif isinstance(eset.radio, chirp_common.NetworkSourceRadio):
save_sens = False
else:
save_sens = True
for i in ["import", "importsrc", "stock"]:
set_action_sensitive(i,
eset is not None and not eset.get_read_only())
for i in ["save", "saveas"]:
set_action_sensitive(i, save_sens)
for i in ["upload"]:
set_action_sensitive(i, upload_sens)
for i in ["cancelq"]:
set_action_sensitive(i, eset is not None and not save_sens)
for i in ["export", "close", "columns", "irbook", "irfinder",
"move_up", "move_dn", "exchange", "iradioreference",
"cut", "copy", "paste", "delete", "viewdeveloper"]:
set_action_sensitive(i, eset is not None)
def ev_status(self, editorset, msg):
self.sb_radio.pop(0)
self.sb_radio.push(0, msg)
def ev_usermsg(self, editorset, msg):
self.sb_general.pop(0)
self.sb_general.push(0, msg)
def ev_editor_selected(self, editorset, editortype):
mappings = {
"memedit" : ["view", "edit"],
}
for _editortype, actions in mappings.items():
for _action in actions:
action = self.menu_ag.get_action(_action)
action.set_sensitive(editortype.startswith(_editortype))
def _connect_editorset(self, eset):
eset.connect("want-close", self.do_close)
eset.connect("status", self.ev_status)
eset.connect("usermsg", self.ev_usermsg)
eset.connect("editor-selected", self.ev_editor_selected)
def do_diff_radio(self):
if self.tabs.get_n_pages() < 2:
common.show_error("Diff tabs requires at least two open tabs!")
return
esets = []
for i in range(0, self.tabs.get_n_pages()):
esets.append(self.tabs.get_nth_page(i))
d = gtk.Dialog(title="Diff Radios",
buttons=(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL),
parent=self)
choices = []
for eset in esets:
choices.append("%s %s (%s)" % (eset.rthread.radio.VENDOR,
eset.rthread.radio.MODEL,
eset.filename))
choice_a = miscwidgets.make_choice(choices, False, choices[0])
choice_a.show()
chan_a = gtk.SpinButton()
chan_a.get_adjustment().set_all(1, -1, 999, 1, 10, 0)
chan_a.show()
hbox = gtk.HBox(False, 3)
hbox.pack_start(choice_a, 1, 1, 1)
hbox.pack_start(chan_a, 0, 0, 0)
hbox.show()
d.vbox.pack_start(hbox, 0, 0, 0)
choice_b = miscwidgets.make_choice(choices, False, choices[1])
choice_b.show()
chan_b = gtk.SpinButton()
chan_b.get_adjustment().set_all(1, -1, 999, 1, 10, 0)
chan_b.show()
hbox = gtk.HBox(False, 3)
hbox.pack_start(choice_b, 1, 1, 1)
hbox.pack_start(chan_b, 0, 0, 0)
hbox.show()
d.vbox.pack_start(hbox, 0, 0, 0)
r = d.run()
sel_a = choice_a.get_active_text()
sel_chan_a = chan_a.get_value()
sel_b = choice_b.get_active_text()
sel_chan_b = chan_b.get_value()
d.destroy()
if r == gtk.RESPONSE_CANCEL:
return
if sel_a == sel_b:
common.show_error("Can't diff the same tab!")
return
print "Selected %s@%i and %s@%i" % (sel_a, sel_chan_a,
sel_b, sel_chan_b)
eset_a = esets[choices.index(sel_a)]
eset_b = esets[choices.index(sel_b)]
def _show_diff(mem_b, mem_a):
# Step 3: Show the diff
diff = common.simple_diff(mem_a, mem_b)
common.show_diff_blob("Differences", diff)
def _get_mem_b(mem_a):
# Step 2: Get memory b
job = common.RadioJob(_show_diff, "get_raw_memory", int(sel_chan_b))
job.set_cb_args(mem_a)
eset_b.rthread.submit(job)
if sel_chan_a >= 0 and sel_chan_b >= 0:
# Diff numbered memory
# Step 1: Get memory a
job = common.RadioJob(_get_mem_b, "get_raw_memory", int(sel_chan_a))
eset_a.rthread.submit(job)
elif isinstance(eset_a.rthread.radio, chirp_common.CloneModeRadio) and\
isinstance(eset_b.rthread.radio, chirp_common.CloneModeRadio):
# Diff whole (can do this without a job, since both are clone-mode)
a = util.hexprint(eset_a.rthread.radio._mmap.get_packed())
b = util.hexprint(eset_b.rthread.radio._mmap.get_packed())
common.show_diff_blob("Differences", common.simple_diff(a, b))
else:
common.show_error("Cannot diff whole live-mode radios!")
def do_new(self):
eset = editorset.EditorSet(_("Untitled") + ".csv", self)
self._connect_editorset(eset)
eset.prime()
eset.show()
tab = self.tabs.append_page(eset, eset.get_tab_label())
self.tabs.set_current_page(tab)
def _do_manual_select(self, filename):
radiolist = {}
for drv, radio in directory.DRV_TO_RADIO.items():
if not issubclass(radio, chirp_common.CloneModeRadio):
continue
radiolist["%s %s" % (radio.VENDOR, radio.MODEL)] = drv
lab = gtk.Label("""<b><big>Unable to detect model!</big></b>
If you think that it is valid, you can select a radio model below to force an open attempt. If selecting the model manually works, please file a bug on the website and attach your image. If selecting the model does not work, it is likely that you are trying to open some other type of file.
""")
lab.set_justify(gtk.JUSTIFY_FILL)
lab.set_line_wrap(True)
lab.set_use_markup(True)
lab.show()
choice = miscwidgets.make_choice(sorted(radiolist.keys()), False,
sorted(radiolist.keys())[0])
d = gtk.Dialog(title="Detection Failed",
buttons=(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
d.vbox.pack_start(lab, 0, 0, 0)
d.vbox.pack_start(choice, 0, 0, 0)
d.vbox.set_spacing(5)
choice.show()
d.set_default_size(400, 200)
#d.set_resizable(False)
r = d.run()
d.destroy()
if r != gtk.RESPONSE_OK:
return
try:
rc = directory.DRV_TO_RADIO[radiolist[choice.get_active_text()]]
return rc(filename)
except:
return
def do_open(self, fname=None, tempname=None):
if not fname:
types = [(_("CHIRP Radio Images") + " (*.img)", "*.img"),
(_("CHIRP Files") + " (*.chirp)", "*.chirp"),
(_("CSV Files") + " (*.csv)", "*.csv"),
(_("EVE Files (VX5)") + " (*.eve)", "*.eve"),
(_("ICF Files") + " (*.icf)", "*.icf"),
(_("VX5 Commander Files") + " (*.vx5)", "*.vx5"),
(_("VX6 Commander Files") + " (*.vx6)", "*.vx6"),
(_("VX7 Commander Files") + " (*.vx7)", "*.vx7"),
]
fname = platform.get_platform().gui_open_file(types=types)
if not fname:
return
self.record_recent_file(fname)
if icf.is_icf_file(fname):
a = common.ask_yesno_question(\
_("ICF files cannot be edited, only displayed or imported "
"into another file. Open in read-only mode?"),
self)
if not a:
return
read_only = True
else:
read_only = False
if icf.is_9x_icf(fname):
# We have to actually instantiate the IC9xICFRadio to get its
# sub-devices
radio = ic9x_icf.IC9xICFRadio(fname)
else:
try:
radio = directory.get_radio_by_image(fname)
except errors.ImageDetectFailed:
radio = self._do_manual_select(fname)
if not radio:
return
print "Manually selected %s" % radio
except Exception, e:
common.log_exception()
common.show_error(os.path.basename(fname) + ": " + str(e))
return
first_tab = False
try:
eset = editorset.EditorSet(radio, self,
filename=fname,
tempname=tempname)
except Exception, e:
common.log_exception()
common.show_error(
_("There was an error opening {fname}: {error}").format(
fname=fname,
error=e))
return
eset.set_read_only(read_only)
self._connect_editorset(eset)
eset.show()
self.tabs.append_page(eset, eset.get_tab_label())
if hasattr(eset.rthread.radio, "errors") and \
eset.rthread.radio.errors:
msg = _("{num} errors during open:").format(
num=len(eset.rthread.radio.errors))
common.show_error_text(msg,
"\r\n".join(eset.rthread.radio.errors))
def do_live_warning(self, radio):
d = gtk.MessageDialog(parent=self, buttons=gtk.BUTTONS_OK)
d.set_markup("<big><b>" + _("Note:") + "</b></big>")
msg = _("The {vendor} {model} operates in <b>live mode</b>. "
"This means that any changes you make are immediately sent "
"to the radio. Because of this, you cannot perform the "
"<u>Save</u> or <u>Upload</u> operations. If you wish to "
"edit the contents offline, please <u>Export</u> to a CSV "
"file, using the <b>File menu</b>.").format(vendor=radio.VENDOR,
model=radio.MODEL)
d.format_secondary_markup(msg)
again = gtk.CheckButton(_("Don't show this again"))
again.show()
d.vbox.pack_start(again, 0, 0, 0)
d.run()
CONF.set_bool("live_mode", again.get_active(), "noconfirm")
d.destroy()
def do_open_live(self, radio, tempname=None, read_only=False):
eset = editorset.EditorSet(radio, self, tempname=tempname)
eset.connect("want-close", self.do_close)
eset.connect("status", self.ev_status)
eset.set_read_only(read_only)
eset.show()
self.tabs.append_page(eset, eset.get_tab_label())
if isinstance(radio, chirp_common.LiveRadio):
reporting.report_model_usage(radio, "live", True)
if not CONF.get_bool("live_mode", "noconfirm"):
self.do_live_warning(radio)
def do_save(self, eset=None):
if not eset:
eset = self.get_current_editorset()
# For usability, allow Ctrl-S to short-circuit to Save-As if
# we are working on a yet-to-be-saved image
if not os.path.exists(eset.filename):
return self.do_saveas()
eset.save()
def do_saveas(self):
eset = self.get_current_editorset()
label = _("{vendor} {model} image file").format(\
vendor=eset.radio.VENDOR,
model=eset.radio.MODEL)
types = [(label + " (*.%s)" % eset.radio.FILE_EXTENSION,
eset.radio.FILE_EXTENSION)]
if isinstance(eset.radio, vx7.VX7Radio):
types += [(_("VX7 Commander") + " (*.vx7)", "vx7")]
elif isinstance(eset.radio, vx6.VX6Radio):
types += [(_("VX6 Commander") + " (*.vx6)", "vx6")]
elif isinstance(eset.radio, vx5.VX5Radio):
types += [(_("EVE") + " (*.eve)", "eve")]
types += [(_("VX5 Commander") + " (*.vx5)", "vx5")]
while True:
fname = platform.get_platform().gui_save_file(types=types)
if not fname:
return
if os.path.exists(fname):
dlg = inputdialog.OverwriteDialog(fname)
owrite = dlg.run()
dlg.destroy()
if owrite == gtk.RESPONSE_OK:
break
else:
break
try:
eset.save(fname)
except Exception,e:
d = inputdialog.ExceptionDialog(e)
d.run()
d.destroy()
def cb_clonein(self, radio, emsg=None):
radio.pipe.close()
reporting.report_model_usage(radio, "download", bool(emsg))
if not emsg:
self.do_open_live(radio, tempname="(" + _("Untitled") + ")")
else:
d = inputdialog.ExceptionDialog(emsg)
d.run()
d.destroy()
def cb_cloneout(self, radio, emsg= None):
radio.pipe.close()
reporting.report_model_usage(radio, "upload", True)
if emsg:
d = inputdialog.ExceptionDialog(emsg)
d.run()
d.destroy()
def _get_recent_list(self):
recent = []
for i in range(0, KEEP_RECENT):
fn = CONF.get("recent%i" % i, "state")
if fn:
recent.append(fn)
return recent
def _set_recent_list(self, recent):
for fn in recent:
CONF.set("recent%i" % recent.index(fn), fn, "state")
def update_recent_files(self):
i = 0
for fname in self._get_recent_list():
action_name = "recent%i" % i
path = "/MenuBar/file/recent"
old_action = self.menu_ag.get_action(action_name)
if old_action:
self.menu_ag.remove_action(old_action)
file_basename = os.path.basename(fname).replace("_", "__")
action = gtk.Action(action_name,
"_%i. %s" % (i+1, file_basename),
_("Open recent file {name}").format(name=fname),
"")
action.connect("activate", lambda a,f: self.do_open(f), fname)
mid = self.menu_uim.new_merge_id()
self.menu_uim.add_ui(mid, path,
action_name, action_name,
gtk.UI_MANAGER_MENUITEM, False)
self.menu_ag.add_action(action)
i += 1
def record_recent_file(self, filename):
recent_files = self._get_recent_list()
if filename not in recent_files:
if len(recent_files) == KEEP_RECENT:
del recent_files[-1]
recent_files.insert(0, filename)
self._set_recent_list(recent_files)
self.update_recent_files()
def import_stock_config(self, action, config):
eset = self.get_current_editorset()
count = eset.do_import(config)
def copy_shipped_stock_configs(self, stock_dir):
execpath = platform.get_platform().executable_path()
basepath = os.path.abspath(os.path.join(execpath, "stock_configs"))
if not os.path.exists(basepath):
basepath = "/usr/share/chirp/stock_configs"
files = glob(os.path.join(basepath, "*.csv"))
for fn in files:
if os.path.exists(os.path.join(stock_dir, os.path.basename(fn))):
print "Skipping existing stock config"
continue
try:
shutil.copy(fn, stock_dir)
print "Copying %s -> %s" % (fn, stock_dir)
except Exception, e:
print "ERROR: Unable to copy %s to %s: %s" % (fn, stock_dir, e)
return False
return True
def update_stock_configs(self):
stock_dir = platform.get_platform().config_file("stock_configs")
if not os.path.isdir(stock_dir):
try:
os.mkdir(stock_dir)
except Exception, e:
print "ERROR: Unable to create directory: %s" % stock_dir
return
if not self.copy_shipped_stock_configs(stock_dir):
return
def _do_import_action(config):
name = os.path.splitext(os.path.basename(config))[0]
action_name = "stock-%i" % configs.index(config)
path = "/MenuBar/radio/stock"
action = gtk.Action(action_name,
name,
_("Import stock "
"configuration {name}").format(name=name),
"")
action.connect("activate", self.import_stock_config, config)
mid = self.menu_uim.new_merge_id()
mid = self.menu_uim.add_ui(mid, path,
action_name, action_name,
gtk.UI_MANAGER_MENUITEM, False)
self.menu_ag.add_action(action)
def _do_open_action(config):
name = os.path.splitext(os.path.basename(config))[0]
action_name = "openstock-%i" % configs.index(config)
path = "/MenuBar/file/openstock"
action = gtk.Action(action_name,
name,
_("Open stock "
"configuration {name}").format(name=name),
"")
action.connect("activate", lambda a,c: self.do_open(c), config)
mid = self.menu_uim.new_merge_id()
mid = self.menu_uim.add_ui(mid, path,
action_name, action_name,
gtk.UI_MANAGER_MENUITEM, False)
self.menu_ag.add_action(action)
configs = glob(os.path.join(stock_dir, "*.csv"))
for config in configs:
_do_import_action(config)
_do_open_action(config)
def _confirm_experimental(self, rclass):
sql_key = "warn_experimental_%s" % directory.radio_class_id(rclass)
if CONF.is_defined(sql_key, "state") and \
not CONF.get_bool(sql_key, "state"):
return True
title = _("Proceed with experimental driver?")
text = rclass.get_prompts().experimental
msg = _("This radio's driver is experimental. "
"Do you want to proceed?")
resp, squelch = common.show_warning(msg, text,
title=title,
buttons=gtk.BUTTONS_YES_NO,
can_squelch=True)
if resp == gtk.RESPONSE_YES:
CONF.set_bool(sql_key, not squelch, "state")
return resp == gtk.RESPONSE_YES
def _show_instructions(self, radio, message):
if message is None:
return
if CONF.get_bool("clone_instructions", "noconfirm"):
return
d = gtk.MessageDialog(parent=self, buttons=gtk.BUTTONS_OK)
d.set_markup("<big><b>" + _("{name} Instructions").format(
name=radio.get_name()) + "</b></big>")
msg = _("{instructions}").format(instructions=message)
d.format_secondary_markup(msg)
again = gtk.CheckButton(_("Don't show instructions for any radio again"))
again.show()
d.vbox.pack_start(again, 0, 0, 0)
h_button_box = d.vbox.get_children()[2]
try:
ok_button = h_button_box.get_children()[0]
ok_button.grab_default()
ok_button.grab_focus()
except AttributeError:
# don't grab focus on GTK+ 2.0
pass
d.run()
d.destroy()
CONF.set_bool("clone_instructions", again.get_active(), "noconfirm")
def do_download(self, port=None, rtype=None):
d = clone.CloneSettingsDialog(parent=self)
settings = d.run()
d.destroy()
if not settings:
return
rclass = settings.radio_class
if issubclass(rclass, chirp_common.ExperimentalRadio) and \
not self._confirm_experimental(rclass):
# User does not want to proceed with experimental driver
return
self._show_instructions(rclass, rclass.get_prompts().pre_download)
print "User selected %s %s on port %s" % (rclass.VENDOR,
rclass.MODEL,
settings.port)
try:
ser = serial.Serial(port=settings.port,
baudrate=rclass.BAUD_RATE,
rtscts=rclass.HARDWARE_FLOW,
timeout=0.25)
ser.flushInput()
except serial.SerialException, e:
d = inputdialog.ExceptionDialog(e)
d.run()
d.destroy()
return
radio = settings.radio_class(ser)
fn = tempfile.mktemp()
if isinstance(radio, chirp_common.CloneModeRadio):
ct = clone.CloneThread(radio, "in", cb=self.cb_clonein, parent=self)
ct.start()
else:
self.do_open_live(radio)
def do_upload(self, port=None, rtype=None):
eset = self.get_current_editorset()
radio = eset.radio
settings = clone.CloneSettings()
settings.radio_class = radio.__class__
d = clone.CloneSettingsDialog(settings, parent=self)
settings = d.run()
d.destroy()
if not settings:
return
if isinstance(radio, chirp_common.ExperimentalRadio) and \
not self._confirm_experimental(radio.__class__):
# User does not want to proceed with experimental driver
return
try:
ser = serial.Serial(port=settings.port,
baudrate=radio.BAUD_RATE,
rtscts=radio.HARDWARE_FLOW,
timeout=0.25)
ser.flushInput()
except serial.SerialException, e:
d = inputdialog.ExceptionDialog(e)
d.run()
d.destroy()
return
self._show_instructions(radio, radio.get_prompts().pre_upload)
radio.set_pipe(ser)
ct = clone.CloneThread(radio, "out", cb=self.cb_cloneout, parent=self)
ct.start()
def do_close(self, tab_child=None):
if tab_child:
eset = tab_child
else:
eset = self.get_current_editorset()
if not eset:
return False
if eset.is_modified():
dlg = miscwidgets.YesNoDialog(title=_("Save Changes?"),
parent=self,
buttons=(gtk.STOCK_YES, gtk.RESPONSE_YES,
gtk.STOCK_NO, gtk.RESPONSE_NO,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
dlg.set_text(_("File is modified, save changes before closing?"))
res = dlg.run()
dlg.destroy()
if res == gtk.RESPONSE_YES:
self.do_save(eset)
elif res == gtk.RESPONSE_CANCEL:
raise ModifiedError()
eset.rthread.stop()
eset.rthread.join()
eset.prepare_close()
if eset.radio.pipe:
eset.radio.pipe.close()
if isinstance(eset.radio, chirp_common.LiveRadio):
action = self.menu_ag.get_action("openlive")
if action:
action.set_sensitive(True)
page = self.tabs.page_num(eset)
if page is not None:
self.tabs.remove_page(page)
return True
def do_import(self):
types = [(_("CHIRP Files") + " (*.chirp)", "*.chirp"),
(_("CHIRP Radio Images") + " (*.img)", "*.img"),
(_("CSV Files") + " (*.csv)", "*.csv"),
(_("EVE Files (VX5)") + " (*.eve)", "*.eve"),
(_("ICF Files") + " (*.icf)", "*.icf"),
(_("Kenwood HMK Files") + " (*.hmk)", "*.hmk"),
(_("Kenwood ITM Files") + " (*.itm)", "*.itm"),
(_("Travel Plus Files") + " (*.tpe)", "*.tpe"),
(_("VX5 Commander Files") + " (*.vx5)", "*.vx5"),
(_("VX6 Commander Files") + " (*.vx6)", "*.vx6"),
(_("VX7 Commander Files") + " (*.vx7)", "*.vx7")]
filen = platform.get_platform().gui_open_file(types=types)
if not filen:
return
eset = self.get_current_editorset()
count = eset.do_import(filen)
reporting.report_model_usage(eset.rthread.radio, "import", count > 0)
def do_repeaterbook_prompt(self):
if not CONF.get_bool("has_seen_credit", "repeaterbook"):
d = gtk.MessageDialog(parent=self, buttons=gtk.BUTTONS_OK)
d.set_markup("<big><big><b>RepeaterBook</b></big>\r\n" + \
"<i>North American Repeater Directory</i></big>")
d.format_secondary_markup("For more information about this " +\
"free service, please go to\r\n" +\
"http://www.repeaterbook.com")
d.run()
d.destroy()
CONF.set_bool("has_seen_credit", True, "repeaterbook")
default_state = "Oregon"
default_county = "--All--"
default_band = "--All--"
try:
try:
code = int(CONF.get("state", "repeaterbook"))
except:
code = CONF.get("state", "repeaterbook")
for k,v in fips.FIPS_STATES.items():
if code == v:
default_state = k
break
code = CONF.get("county", "repeaterbook")
for k,v in fips.FIPS_COUNTIES[fips.FIPS_STATES[default_state]].items():
if code == v:
default_county = k
break
code = int(CONF.get("band", "repeaterbook"))
for k,v in RB_BANDS.items():
if code == v:
default_band = k
break
except:
pass
state = miscwidgets.make_choice(sorted(fips.FIPS_STATES.keys()),
False, default_state)
county = miscwidgets.make_choice(sorted(fips.FIPS_COUNTIES[fips.FIPS_STATES[default_state]].keys()),
False, default_county)
band = miscwidgets.make_choice(sorted(RB_BANDS.keys(), key=key_bands),
False, default_band)
def _changed(box, county):
state = fips.FIPS_STATES[box.get_active_text()]
county.get_model().clear()
for fips_county in sorted(fips.FIPS_COUNTIES[state].keys()):
county.append_text(fips_county)
county.set_active(0)
state.connect("changed", _changed, county)
d = inputdialog.FieldDialog(title=_("RepeaterBook Query"), parent=self)
d.add_field("State", state)
d.add_field("County", county)
d.add_field("Band", band)
r = d.run()
d.destroy()
if r != gtk.RESPONSE_OK:
return False
code = fips.FIPS_STATES[state.get_active_text()]
county_id = fips.FIPS_COUNTIES[code][county.get_active_text()]
freq = RB_BANDS[band.get_active_text()]
CONF.set("state", str(code), "repeaterbook")
CONF.set("county", str(county_id), "repeaterbook")
CONF.set("band", str(freq), "repeaterbook")
return True
def do_repeaterbook(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_repeaterbook_prompt():
self.window.set_cursor(None)
return
try:
code = "%02i" % int(CONF.get("state", "repeaterbook"))
except:
try:
code = CONF.get("state", "repeaterbook")
except:
code = '41' # Oregon default
try:
county = CONF.get("county", "repeaterbook")
except:
county = '%' # --All-- default
try:
band = int(CONF.get("band", "repeaterbook"))
except:
band = 14 # 2m default
query = "http://www.repeaterbook.com/repeaters/downloads/chirp.php?" + \
"func=default&state_id=%s&band=%s&freq=%%&band6=%%&loc=%%" + \
"&county_id=%s&status_id=%%&features=%%&coverage=%%&use=%%"
query = query % (code, band and band or "%%", county and county or "%%")
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
fn = tempfile.mktemp(".csv")
filename, headers = urllib.urlretrieve(query, fn)
if not os.path.exists(filename):
print "Failed, headers were:"
print str(headers)
common.show_error(_("RepeaterBook query failed"))
self.window.set_cursor(None)
return
class RBRadio(generic_csv.CSVRadio,
chirp_common.NetworkSourceRadio):
VENDOR = "RepeaterBook"
MODEL = ""
try:
# Validate CSV
radio = RBRadio(filename)
if radio.errors:
reporting.report_misc_error("repeaterbook",
("query=%s\n" % query) +
("\n") +
("\n".join(radio.errors)))
except errors.InvalidDataError, e:
common.show_error(str(e))
self.window.set_cursor(None)
return
except Exception, e:
common.log_exception()
reporting.report_model_usage(radio, "import", True)
self.window.set_cursor(None)
if do_import:
eset = self.get_current_editorset()
count = eset.do_import(filename)
else:
self.do_open_live(radio, read_only=True)
def do_przemienniki_prompt(self):
d = inputdialog.FieldDialog(title='przemienniki.net query',
parent=self)
fields = {
"Country":
(miscwidgets.make_choice(['by', 'cz', 'de', 'lt', 'pl',
'sk', 'uk'], False),
lambda x: str(x.get_active_text())),
"Band":
(miscwidgets.make_choice(['10m', '4m', '6m', '2m', '70cm',
'23cm', '13cm', '3cm'], False, '2m'),
lambda x: str(x.get_active_text())),
"Mode":
(miscwidgets.make_choice(['fm', 'dv'], False),
lambda x: str(x.get_active_text())),
"Only Working":
(miscwidgets.make_choice(['', 'yes'], False),
lambda x: str(x.get_active_text())),
"Latitude": (gtk.Entry(), lambda x: float(x.get_text())),
"Longitude": (gtk.Entry(), lambda x: float(x.get_text())),
"Range": (gtk.Entry(), lambda x: int(x.get_text())),
}
for name in sorted(fields.keys()):
value, fn = fields[name]
d.add_field(name, value)
while d.run() == gtk.RESPONSE_OK:
query = "http://przemienniki.net/export/chirp.csv?"
args = []
for name, (value, fn) in fields.items():
if isinstance(value, gtk.Entry):
contents = value.get_text()
else:
contents = value.get_active_text()
if contents:
try:
_value = fn(value)
except ValueError:
common.show_error(_("Invalid value for %s") % name)
query = None
continue
args.append("=".join((name.replace(" ", "").lower(),
contents)))
query += "&".join(args)
print query
d.destroy()
return query
d.destroy()
return query
def do_przemienniki(self, do_import):
url = self.do_przemienniki_prompt()
if not url:
return
fn = tempfile.mktemp(".csv")
filename, headers = urllib.urlretrieve(url, fn)
if not os.path.exists(filename):
print "Failed, headers were:"
print str(headers)
common.show_error(_("Query failed"))
return
class PRRadio(generic_csv.CSVRadio,
chirp_common.NetworkSourceRadio):
VENDOR = "przemienniki.net"
MODEL = ""
try:
radio = PRRadio(filename)
except Exception, e:
common.show_error(str(e))
return
if do_import:
eset = self.get_current_editorset()
count = eset.do_import(filename)
else:
self.do_open_live(radio, read_only=True)
def do_rfinder_prompt(self):
fields = {"1Email" : (gtk.Entry(),
lambda x: "@" in x),
"2Password" : (gtk.Entry(),
lambda x: x),
"3Latitude" : (gtk.Entry(),
lambda x: float(x) < 90 and \
float(x) > -90),
"4Longitude": (gtk.Entry(),
lambda x: float(x) < 180 and \
float(x) > -180),
"5Range_in_Miles": (gtk.Entry(),
lambda x: int(x) > 0 and int(x) < 5000),
}
d = inputdialog.FieldDialog(title="RFinder Login", parent=self)
for k in sorted(fields.keys()):
d.add_field(k[1:].replace("_", " "), fields[k][0])
fields[k][0].set_text(CONF.get(k[1:], "rfinder") or "")
fields[k][0].set_visibility(k != "2Password")
while d.run() == gtk.RESPONSE_OK:
valid = True
for k in sorted(fields.keys()):
widget, validator = fields[k]
try:
if validator(widget.get_text()):
CONF.set(k[1:], widget.get_text(), "rfinder")
continue
except Exception:
pass
common.show_error("Invalid value for %s" % k[1:])
valid = False
break
if valid:
d.destroy()
return True
d.destroy()
return False
def do_rfinder(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_rfinder_prompt():
self.window.set_cursor(None)
return
lat = CONF.get_float("Latitude", "rfinder")
lon = CONF.get_float("Longitude", "rfinder")
passwd = CONF.get("Password", "rfinder")
email = CONF.get("Email", "rfinder")
miles = CONF.get_int("Range_in_Miles", "rfinder")
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
if do_import:
eset = self.get_current_editorset()
count = eset.do_import("rfinder://%s/%s/%f/%f/%i" % (email, passwd, lat, lon, miles))
else:
from chirp import rfinder
radio = rfinder.RFinderRadio(None)
radio.set_params((lat, lon), miles, email, passwd)
self.do_open_live(radio, read_only=True)
self.window.set_cursor(None)
def do_radioreference_prompt(self):
fields = {"1Username" : (gtk.Entry(), lambda x: x),
"2Password" : (gtk.Entry(), lambda x: x),
"3Zipcode" : (gtk.Entry(), lambda x: x),
}
d = inputdialog.FieldDialog(title=_("RadioReference.com Query"),
parent=self)
for k in sorted(fields.keys()):
d.add_field(k[1:], fields[k][0])
fields[k][0].set_text(CONF.get(k[1:], "radioreference") or "")
fields[k][0].set_visibility(k != "2Password")
while d.run() == gtk.RESPONSE_OK:
valid = True
for k in sorted(fields.keys()):
widget, validator = fields[k]
try:
if validator(widget.get_text()):
CONF.set(k[1:], widget.get_text(), "radioreference")
continue
except Exception:
pass
common.show_error("Invalid value for %s" % k[1:])
valid = False
break
if valid:
d.destroy()
return True
d.destroy()
return False
def do_radioreference(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_radioreference_prompt():
self.window.set_cursor(None)
return
username = CONF.get("Username", "radioreference")
passwd = CONF.get("Password", "radioreference")
zipcode = CONF.get("Zipcode", "radioreference")
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
if do_import:
eset = self.get_current_editorset()
count = eset.do_import("radioreference://%s/%s/%s" % (zipcode, username, passwd))
else:
try:
from chirp import radioreference
radio = radioreference.RadioReferenceRadio(None)
radio.set_params(zipcode, username, passwd)
self.do_open_live(radio, read_only=True)
except errors.RadioError, e:
common.show_error(e)
self.window.set_cursor(None)
def do_export(self):
types = [(_("CSV Files") + " (*.csv)", "csv"),
(_("CHIRP Files") + " (*.chirp)", "chirp"),
]
eset = self.get_current_editorset()
if os.path.exists(eset.filename):
base = os.path.basename(eset.filename)
if "." in base:
base = base[:base.rindex(".")]
defname = base
else:
defname = "radio"
filen = platform.get_platform().gui_save_file(default_name=defname,
types=types)
if not filen:
return
if os.path.exists(filen):
dlg = inputdialog.OverwriteDialog(filen)
owrite = dlg.run()
dlg.destroy()
if owrite != gtk.RESPONSE_OK:
return
os.remove(filen)
count = eset.do_export(filen)
reporting.report_model_usage(eset.rthread.radio, "export", count > 0)
def do_about(self):
d = gtk.AboutDialog()
d.set_transient_for(self)
import sys
verinfo = "GTK %s\nPyGTK %s\nPython %s\n" % ( \
".".join([str(x) for x in gtk.gtk_version]),
".".join([str(x) for x in gtk.pygtk_version]),
sys.version.split()[0])
d.set_name("CHIRP")
d.set_version(CHIRP_VERSION)
d.set_copyright("Copyright 2013 Dan Smith (KK7DS)")
d.set_website("http://chirp.danplanet.com")
d.set_authors(("Dan Smith KK7DS <dsmith@danplanet.com>",
_("With significant contributions from:"),
"Tom KD7LXL",
"Marco IZ3GME",
"Jim KC9HI"
))
d.set_translator_credits("Polish: Grzegorz SQ2RBY" +
os.linesep +
"Italian: Fabio IZ2QDH" +
os.linesep +
"Dutch: Michael PD4MT" +
os.linesep +
"German: Benjamin HB9EUK" +
os.linesep +
"Hungarian: Attila HA7JA" +
os.linesep +
"Russian: Dmitry Slukin" +
os.linesep +
"Portuguese (BR): Crezivando PP7CJ")
d.set_comments(verinfo)
d.run()
d.destroy()
def do_documentation(self):
d = gtk.MessageDialog(buttons=gtk.BUTTONS_OK, parent=self,
type=gtk.MESSAGE_INFO)
d.set_markup("<b><big>" + _("CHIRP Documentation") + "</big></b>\r\n")
msg = _("Documentation for CHIRP, including FAQs, and help for common "
"problems is available on the CHIRP web site, please go to\n\n"
"<a href=\"http://chirp.danplanet.com/projects/chirp/wiki/"
"Documentation\">"
"http://chirp.danplanet.com/projects/chirp/wiki/"
"Documentation</a>\n")
d.format_secondary_markup(msg.replace("\n","\r\n"))
d.run()
d.destroy()
def do_columns(self):
eset = self.get_current_editorset()
driver = directory.get_driver(eset.rthread.radio.__class__)
radio_name = "%s %s %s" % (eset.rthread.radio.VENDOR,
eset.rthread.radio.MODEL,
eset.rthread.radio.VARIANT)
d = gtk.Dialog(title=_("Select Columns"),
parent=self,
buttons=(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
vbox = gtk.VBox()
vbox.show()
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
sw.add_with_viewport(vbox)
sw.show()
d.vbox.pack_start(sw, 1, 1, 1)
d.set_size_request(-1, 300)
d.set_resizable(False)
label = gtk.Label(_("Visible columns for {radio}").format(radio=radio_name))
label.show()
vbox.pack_start(label)
fields = []
memedit = eset.get_current_editor() #.editors["memedit"]
unsupported = memedit.get_unsupported_columns()
for colspec in memedit.cols:
if colspec[0].startswith("_"):
continue
elif colspec[0] in unsupported:
continue
label = colspec[0]
visible = memedit.get_column_visible(memedit.col(label))
widget = gtk.CheckButton(label)
widget.set_active(visible)
fields.append(widget)
vbox.pack_start(widget, 1, 1, 1)
widget.show()
res = d.run()
selected_columns = []
if res == gtk.RESPONSE_OK:
for widget in fields:
colnum = memedit.col(widget.get_label())
memedit.set_column_visible(colnum, widget.get_active())
if widget.get_active():
selected_columns.append(widget.get_label())
d.destroy()
CONF.set(driver, ",".join(selected_columns), "memedit_columns")
def do_hide_unused(self, action):
eset = self.get_current_editorset()
if eset is None:
conf = config.get("memedit")
conf.set_bool("hide_unused", action.get_active())
else:
for editortype, editor in eset.editors.iteritems():
if "memedit" in editortype:
editor.set_hide_unused(action.get_active())
def do_clearq(self):
eset = self.get_current_editorset()
eset.rthread.flush()
def do_copy(self, cut):
eset = self.get_current_editorset()
eset.get_current_editor().copy_selection(cut)
def do_paste(self):
eset = self.get_current_editorset()
eset.get_current_editor().paste_selection()
def do_delete(self):
eset = self.get_current_editorset()
eset.get_current_editor().copy_selection(True)
def do_toggle_report(self, action):
if not action.get_active():
d = gtk.MessageDialog(buttons=gtk.BUTTONS_YES_NO,
parent=self)
d.set_markup("<b><big>" + _("Reporting is disabled") + "</big></b>")
msg = _("The reporting feature of CHIRP is designed to help "
"<u>improve quality</u> by allowing the authors to focus "
"on the radio drivers used most often and errors "
"experienced by the users. The reports contain no "
"identifying information and are used only for statistical "
"purposes by the authors. Your privacy is extremely "
"important, but <u>please consider leaving this feature "
"enabled to help make CHIRP better!</u>\n\n<b>Are you "
"sure you want to disable this feature?</b>")
d.format_secondary_markup(msg.replace("\n", "\r\n"))
r = d.run()
d.destroy()
if r == gtk.RESPONSE_NO:
action.set_active(not action.get_active())
conf = config.get()
conf.set_bool("no_report", not action.get_active())
def do_toggle_no_smart_tmode(self, action):
CONF.set_bool("no_smart_tmode", not action.get_active(), "memedit")
def do_toggle_developer(self, action):
conf = config.get()
conf.set_bool("developer", action.get_active(), "state")
for name in ["viewdeveloper", "loadmod"]:
devaction = self.menu_ag.get_action(name)
devaction.set_visible(action.get_active())
def do_change_language(self):
langs = ["Auto", "English", "Polish", "Italian", "Dutch", "German",
"Hungarian", "Russian", "Portuguese (BR)"]
d = inputdialog.ChoiceDialog(langs, parent=self,
title="Choose Language")
d.label.set_text(_("Choose a language or Auto to use the "
"operating system default. You will need to "
"restart the application before the change "
"will take effect"))
d.label.set_line_wrap(True)
r = d.run()
if r == gtk.RESPONSE_OK:
print "Chose language %s" % d.choice.get_active_text()
conf = config.get()
conf.set("language", d.choice.get_active_text(), "state")
d.destroy()
def load_module(self):
types = [(_("Python Modules") + "*.py", "*.py")]
filen = platform.get_platform().gui_open_file(types=types)
if not filen:
return
# We're in development mode, so we need to tell the directory to
# allow a loaded module to override an existing driver, against
# its normal better judgement
directory.enable_reregistrations()
try:
module = file(filen)
code = module.read()
module.close()
pyc = compile(code, filen, 'exec')
# See this for why:
# http://stackoverflow.com/questions/2904274/globals-and-locals-in-python-exec
exec(pyc, globals(), globals())
except Exception, e:
common.log_exception()
common.show_error("Unable to load module: %s" % e)
def mh(self, _action, *args):
action = _action.get_name()
if action == "quit":
gtk.main_quit()
elif action == "new":
self.do_new()
elif action == "open":
self.do_open()
elif action == "save":
self.do_save()
elif action == "saveas":
self.do_saveas()
elif action.startswith("download"):
self.do_download(*args)
elif action.startswith("upload"):
self.do_upload(*args)
elif action == "close":
self.do_close()
elif action == "import":
self.do_import()
elif action in ["qrfinder", "irfinder"]:
self.do_rfinder(action[0] == "i")
elif action in ["qradioreference", "iradioreference"]:
self.do_radioreference(action[0] == "i")
elif action == "export":
self.do_export()
elif action in ["qrbook", "irbook"]:
self.do_repeaterbook(action[0] == "i")
elif action in ["qpr", "ipr"]:
self.do_przemienniki(action[0] == "i")
elif action == "about":
self.do_about()
elif action == "documentation":
self.do_documentation()
elif action == "columns":
self.do_columns()
elif action == "hide_unused":
self.do_hide_unused(_action)
elif action == "cancelq":
self.do_clearq()
elif action == "report":
self.do_toggle_report(_action)
elif action == "channel_defaults":
# The memedit thread also has an instance of bandplans.
bp = bandplans.BandPlans(CONF)
bp.select_bandplan(self)
elif action == "no_smart_tmode":
self.do_toggle_no_smart_tmode(_action)
elif action == "developer":
self.do_toggle_developer(_action)
elif action in ["cut", "copy", "paste", "delete",
"move_up", "move_dn", "exchange",
"devshowraw", "devdiffraw"]:
self.get_current_editorset().get_current_editor().hotkey(_action)
elif action == "devdifftab":
self.do_diff_radio()
elif action == "language":
self.do_change_language()
elif action == "loadmod":
self.load_module()
else:
return
self.ev_tab_switched()
def make_menubar(self):
menu_xml = """
<ui>
<menubar name="MenuBar">
<menu action="file">
<menuitem action="new"/>
<menuitem action="open"/>
<menu action="openstock" name="openstock"/>
<menu action="recent" name="recent"/>
<menuitem action="save"/>
<menuitem action="saveas"/>
<menuitem action="loadmod"/>
<separator/>
<menuitem action="import"/>
<menuitem action="export"/>
<separator/>
<menuitem action="close"/>
<menuitem action="quit"/>
</menu>
<menu action="edit">
<menuitem action="cut"/>
<menuitem action="copy"/>
<menuitem action="paste"/>
<menuitem action="delete"/>
<separator/>
<menuitem action="move_up"/>
<menuitem action="move_dn"/>
<menuitem action="exchange"/>
</menu>
<menu action="view">
<menuitem action="columns"/>
<menuitem action="hide_unused"/>
<menuitem action="no_smart_tmode"/>
<menu action="viewdeveloper">
<menuitem action="devshowraw"/>
<menuitem action="devdiffraw"/>
<menuitem action="devdifftab"/>
</menu>
<menuitem action="language"/>
</menu>
<menu action="radio" name="radio">
<menuitem action="download"/>
<menuitem action="upload"/>
<menu action="importsrc" name="importsrc">
<menuitem action="iradioreference"/>
<menuitem action="irbook"/>
<menuitem action="ipr"/>
<menuitem action="irfinder"/>
</menu>
<menu action="querysrc" name="querysrc">
<menuitem action="qradioreference"/>
<menuitem action="qrbook"/>
<menuitem action="qpr"/>
<menuitem action="qrfinder"/>
</menu>
<menu action="stock" name="stock"/>
<separator/>
<menuitem action="channel_defaults"/>
<separator/>
<menuitem action="cancelq"/>
</menu>
<menu action="help">
<menuitem action="about"/>
<menuitem action="documentation"/>
<menuitem action="report"/>
<menuitem action="developer"/>
</menu>
</menubar>
</ui>
"""
actions = [\
('file', None, _("_File"), None, None, self.mh),
('new', gtk.STOCK_NEW, None, None, None, self.mh),
('open', gtk.STOCK_OPEN, None, None, None, self.mh),
('openstock', None, _("Open stock config"), None, None, self.mh),
('recent', None, _("_Recent"), None, None, self.mh),
('save', gtk.STOCK_SAVE, None, None, None, self.mh),
('saveas', gtk.STOCK_SAVE_AS, None, None, None, self.mh),
('loadmod', None, _("Load Module"), None, None, self.mh),
('close', gtk.STOCK_CLOSE, None, None, None, self.mh),
('quit', gtk.STOCK_QUIT, None, None, None, self.mh),
('edit', None, _("_Edit"), None, None, self.mh),
('cut', None, _("_Cut"), "<Ctrl>x", None, self.mh),
('copy', None, _("_Copy"), "<Ctrl>c", None, self.mh),
('paste', None, _("_Paste"), "<Ctrl>v", None, self.mh),
('delete', None, _("_Delete"), "Delete", None, self.mh),
('move_up', None, _("Move _Up"), "<Control>Up", None, self.mh),
('move_dn', None, _("Move Dow_n"), "<Control>Down", None, self.mh),
('exchange', None, _("E_xchange"), "<Control><Shift>x", None, self.mh),
('view', None, _("_View"), None, None, self.mh),
('columns', None, _("Columns"), None, None, self.mh),
('viewdeveloper', None, _("Developer"), None, None, self.mh),
('devshowraw', None, _('Show raw memory'), "<Control><Shift>r", None, self.mh),
('devdiffraw', None, _("Diff raw memories"), "<Control><Shift>d", None, self.mh),
('devdifftab', None, _("Diff tabs"), "<Control><Shift>t", None, self.mh),
('language', None, _("Change language"), None, None, self.mh),
('radio', None, _("_Radio"), None, None, self.mh),
('download', None, _("Download From Radio"), "<Alt>d", None, self.mh),
('upload', None, _("Upload To Radio"), "<Alt>u", None, self.mh),
('import', None, _("Import"), "<Alt>i", None, self.mh),
('export', None, _("Export"), "<Alt>x", None, self.mh),
('importsrc', None, _("Import from data source"), None, None, self.mh),
('iradioreference', None, _("RadioReference.com"), None, None, self.mh),
('irfinder', None, _("RFinder"), None, None, self.mh),
('irbook', None, _("RepeaterBook"), None, None, self.mh),
('ipr', None, _("przemienniki.net"), None, None, self.mh),
('querysrc', None, _("Query data source"), None, None, self.mh),
('qradioreference', None, _("RadioReference.com"), None, None, self.mh),
('qrfinder', None, _("RFinder"), None, None, self.mh),
('qpr', None, _("przemienniki.net"), None, None, self.mh),
('qrbook', None, _("RepeaterBook"), None, None, self.mh),
('export_chirp', None, _("CHIRP Native File"), None, None, self.mh),
('export_csv', None, _("CSV File"), None, None, self.mh),
('stock', None, _("Import from stock config"), None, None, self.mh),
('channel_defaults', None, _("Channel defaults"), None, None, self.mh),
('cancelq', gtk.STOCK_STOP, None, "Escape", None, self.mh),
('help', None, _('Help'), None, None, self.mh),
('about', gtk.STOCK_ABOUT, None, None, None, self.mh),
('documentation', None, _("Documentation"), None, None, self.mh),
]
conf = config.get()
re = not conf.get_bool("no_report");
hu = conf.get_bool("hide_unused", "memedit")
dv = conf.get_bool("developer", "state")
st = not conf.get_bool("no_smart_tmode", "memedit")
toggles = [\
('report', None, _("Report statistics"), None, None, self.mh, re),
('hide_unused', None, _("Hide Unused Fields"), None, None, self.mh, hu),
('no_smart_tmode', None, _("Smart Tone Modes"), None, None, self.mh, st),
('developer', None, _("Enable Developer Functions"), None, None, self.mh, dv),
]
self.menu_uim = gtk.UIManager()
self.menu_ag = gtk.ActionGroup("MenuBar")
self.menu_ag.add_actions(actions)
self.menu_ag.add_toggle_actions(toggles)
self.menu_uim.insert_action_group(self.menu_ag, 0)
self.menu_uim.add_ui_from_string(menu_xml)
self.add_accel_group(self.menu_uim.get_accel_group())
self.recentmenu = self.menu_uim.get_widget("/MenuBar/file/recent")
# Initialize
self.do_toggle_developer(self.menu_ag.get_action("developer"))
return self.menu_uim.get_widget("/MenuBar")
def make_tabs(self):
self.tabs = gtk.Notebook()
return self.tabs
def close_out(self):
num = self.tabs.get_n_pages()
while num > 0:
num -= 1
print "Closing %i" % num
try:
self.do_close(self.tabs.get_nth_page(num))
except ModifiedError:
return False
gtk.main_quit()
return True
def make_status_bar(self):
box = gtk.HBox(False, 2)
self.sb_general = gtk.Statusbar()
self.sb_general.set_has_resize_grip(False)
self.sb_general.show()
box.pack_start(self.sb_general, 1,1,1)
self.sb_radio = gtk.Statusbar()
self.sb_radio.set_has_resize_grip(True)
self.sb_radio.show()
box.pack_start(self.sb_radio, 1,1,1)
box.show()
return box
def ev_delete(self, window, event):
if not self.close_out():
return True # Don't exit
def ev_destroy(self, window):
if not self.close_out():
return True # Don't exit
def setup_extra_hotkeys(self):
accelg = self.menu_uim.get_accel_group()
memedit = lambda a: self.get_current_editorset().editors["memedit"].hotkey(a)
actions = [
# ("action_name", "key", function)
]
for name, key, fn in actions:
a = gtk.Action(name, name, name, "")
a.connect("activate", fn)
self.menu_ag.add_action_with_accel(a, key)
a.set_accel_group(accelg)
a.connect_accelerator()
def _set_icon(self):
execpath = platform.get_platform().executable_path()
path = os.path.abspath(os.path.join(execpath, "share", "chirp.png"))
if not os.path.exists(path):
path = "/usr/share/pixmaps/chirp.png"
if os.path.exists(path):
self.set_icon_from_file(path)
else:
print "Icon %s not found" % path
def _updates(self, version):
if not version:
return
if version == CHIRP_VERSION:
return
print "Server reports version %s is available" % version
# Report new updates every seven days
intv = 3600 * 24 * 7
if CONF.is_defined("last_update_check", "state") and \
(time.time() - CONF.get_int("last_update_check", "state")) < intv:
return
CONF.set_int("last_update_check", int(time.time()), "state")
d = gtk.MessageDialog(buttons=gtk.BUTTONS_OK, parent=self,
type=gtk.MESSAGE_INFO)
d.set_property("text",
_("A new version of CHIRP is available: " +
"{ver}. ".format(ver=version) +
"It is recommended that you upgrade, so " +
"go to http://chirp.danplanet.com soon!"))
d.run()
d.destroy()
def _init_macos(self, menu_bar):
try:
import gtk_osxapplication
macapp = gtk_osxapplication.OSXApplication()
except ImportError, e:
print "No MacOS support: %s" % e
return
menu_bar.hide()
macapp.set_menu_bar(menu_bar)
quititem = self.menu_uim.get_widget("/MenuBar/file/quit")
quititem.hide()
aboutitem = self.menu_uim.get_widget("/MenuBar/help/about")
macapp.insert_app_menu_item(aboutitem, 0)
documentationitem = self.menu_uim.get_widget("/MenuBar/help/documentation")
macapp.insert_app_menu_item(documentationitem, 0)
macapp.set_use_quartz_accelerators(False)
macapp.ready()
print "Initialized MacOS support"
def __init__(self, *args, **kwargs):
gtk.Window.__init__(self, *args, **kwargs)
def expose(window, event):
allocation = window.get_allocation()
CONF.set_int("window_w", allocation.width, "state")
CONF.set_int("window_h", allocation.height, "state")
self.connect("expose_event", expose)
def state_change(window, event):
CONF.set_bool(
"window_maximized",
event.new_window_state == gtk.gdk.WINDOW_STATE_MAXIMIZED,
"state")
self.connect("window-state-event", state_change)
d = CONF.get("last_dir", "state")
if d and os.path.isdir(d):
platform.get_platform().set_last_dir(d)
vbox = gtk.VBox(False, 2)
self._recent = []
self.menu_ag = None
mbar = self.make_menubar()
if os.name != "nt":
self._set_icon() # Windows gets the icon from the exe
if os.uname()[0] == "Darwin":
self._init_macos(mbar)
vbox.pack_start(mbar, 0, 0, 0)
self.tabs = None
tabs = self.make_tabs()
tabs.connect("switch-page", lambda n, _, p: self.ev_tab_switched(p))
tabs.connect("page-removed", lambda *a: self.ev_tab_switched())
tabs.show()
self.ev_tab_switched()
vbox.pack_start(tabs, 1, 1, 1)
vbox.pack_start(self.make_status_bar(), 0, 0, 0)
vbox.show()
self.add(vbox)
try:
width = CONF.get_int("window_w", "state")
height = CONF.get_int("window_h", "state")
except Exception:
width = 800
height = 600
self.set_default_size(width, height)
if CONF.get_bool("window_maximized", "state"):
self.maximize()
self.set_title("CHIRP")
self.connect("delete_event", self.ev_delete)
self.connect("destroy", self.ev_destroy)
if not CONF.get_bool("warned_about_reporting") and \
not CONF.get_bool("no_report"):
d = gtk.MessageDialog(buttons=gtk.BUTTONS_OK, parent=self)
d.set_markup("<b><big>" +
_("Error reporting is enabled") +
"</big></b>")
d.format_secondary_markup(\
_("If you wish to disable this feature you may do so in "
"the <u>Help</u> menu"))
d.run()
d.destroy()
CONF.set_bool("warned_about_reporting", True)
self.update_recent_files()
self.update_stock_configs()
self.setup_extra_hotkeys()
def updates_callback(ver):
gobject.idle_add(self._updates, ver)
if not CONF.get_bool("skip_update_check", "state"):
reporting.check_for_updates(updates_callback)
|
__version__ = '0.1.3'
|
import os
import sys
from ase import Atoms
from gpaw import GPAW
from gpaw import ConvergenceError
from gpaw.mpi import rank
from gpaw.eigensolvers.rmm_diis_old import RMM_DIIS
from gpaw import setup_paths
if len(sys.argv) == 1:
run = 'A'
else:
run = sys.argv[1]
assert run in ['A', 'B']
setup_paths.insert(0, '.')
setup_paths.insert(0, '../')
positions=[
(-0.069, 0.824,-1.295), ( 0.786, 0.943,-0.752), (-0.414,-0.001,-0.865),
(-0.282,-0.674,-3.822), ( 0.018,-0.147,-4.624), (-0.113,-0.080,-3.034),
( 2.253, 1.261, 0.151), ( 2.606, 0.638,-0.539), ( 2.455, 0.790, 1.019),
( 3.106,-0.276,-1.795), ( 2.914, 0.459,-2.386), ( 2.447,-1.053,-1.919),
( 6.257,-0.625,-0.626), ( 7.107,-1.002,-0.317), ( 5.526,-1.129,-0.131),
( 5.451,-1.261,-2.937), ( 4.585,-0.957,-2.503), ( 6.079,-0.919,-2.200),
(-0.515, 3.689, 0.482), (-0.218, 3.020,-0.189), ( 0.046, 3.568, 1.382),
(-0.205, 2.640,-3.337), (-1.083, 2.576,-3.771), (-0.213, 1.885,-2.680),
( 0.132, 6.301,-0.278), ( 1.104, 6.366,-0.068), (-0.148, 5.363,-0.112),
(-0.505, 6.680,-3.285), (-0.674, 7.677,-3.447), (-0.965, 6.278,-2.517),
( 4.063, 3.342,-0.474), ( 4.950, 2.912,-0.663), ( 3.484, 2.619,-0.125),
( 2.575, 2.404,-3.170), ( 1.694, 2.841,-3.296), ( 3.049, 2.956,-2.503),
( 6.666, 2.030,-0.815), ( 7.476, 2.277,-0.316), ( 6.473, 1.064,-0.651),
( 6.860, 2.591,-3.584), ( 6.928, 3.530,-3.176), ( 6.978, 2.097,-2.754),
( 2.931, 6.022,-0.243), ( 3.732, 6.562,-0.004), ( 3.226, 5.115,-0.404),
( 2.291, 7.140,-2.455), ( 1.317, 6.937,-2.532), ( 2.586, 6.574,-1.669),
( 6.843, 5.460, 1.065), ( 7.803, 5.290, 0.852), ( 6.727, 5.424, 2.062),
( 6.896, 4.784,-2.130), ( 6.191, 5.238,-2.702), ( 6.463, 4.665,-1.259),
( 0.398, 0.691, 4.098), ( 0.047, 1.567, 3.807), ( 1.268, 0.490, 3.632),
( 2.687, 0.272, 2.641), ( 3.078, 1.126, 3.027), ( 3.376,-0.501, 2.793),
( 6.002,-0.525, 4.002), ( 6.152, 0.405, 3.660), ( 5.987,-0.447, 4.980),
( 0.649, 3.541, 2.897), ( 0.245, 4.301, 3.459), ( 1.638, 3.457, 3.084),
(-0.075, 5.662, 4.233), (-0.182, 6.512, 3.776), (-0.241, 5.961, 5.212),
( 3.243, 2.585, 3.878), ( 3.110, 2.343, 4.817), ( 4.262, 2.718, 3.780),
( 5.942, 2.582, 3.712), ( 6.250, 3.500, 3.566), ( 6.379, 2.564, 4.636),
( 2.686, 5.638, 5.164), ( 1.781, 5.472, 4.698), ( 2.454, 6.286, 5.887),
( 6.744, 5.276, 3.826), ( 6.238, 5.608, 4.632), ( 7.707, 5.258, 4.110),
( 8.573, 8.472, 0.407), ( 9.069, 7.656, 0.067), ( 8.472, 8.425, 1.397),
( 8.758, 8.245, 2.989), ( 9.294, 9.091, 3.172), ( 7.906, 8.527, 3.373),
( 4.006, 7.734, 3.021), ( 4.685, 8.238, 3.547), ( 3.468, 7.158, 3.624),
( 5.281, 6.089, 6.035), ( 5.131, 7.033, 6.378), ( 4.428, 5.704, 5.720),
( 5.067, 7.323, 0.662), ( 5.785, 6.667, 0.703), ( 4.718, 7.252, 1.585)]
prefix = 'b256H2O'
L = 9.8553729
atoms = Atoms('32(OH2)',
positions=positions)
atoms.set_cell((L,L,L),scale_atoms=False)
atoms.set_pbc(1)
r = [1, 1, 2]
atoms = atoms.repeat(r)
n = [56 * ri for ri in r]
nbands = 2*6*11 # 132
for ri in r: nbands = nbands*ri
es = RMM_DIIS(keep_htpsit=False)
calc = GPAW(nbands=nbands,
# uncomment next two lines to use lcao/sz
#mode='lcao',
#basis='sz',
gpts=tuple(n),
#maxiter=5,
width = 0.01,
eigensolver = es,
txt=prefix + '.txt',
)
if run == 'A':
atoms.set_calculator(calc)
pot = atoms.get_potential_energy()
elif run == 'B':
# converge first with keep_htpsit=True
calc.set(eigensolver='rmm-diis')
calc.set(txt=prefix + '_True.txt')
atoms.set_calculator(calc)
pot = atoms.get_potential_energy()
# fails to converge with keep_htpsit=False
calc.set(eigensolver=es)
calc.set(maxiter=200)
calc.set(txt=prefix + '_False.txt')
atoms.set_calculator(calc)
pot = atoms.get_potential_energy()
|
import configparser
import os
import traceback
__version__ = '18.10.16'
class CCParser(object):
def __init__(self, ini_path='', section='', debug=False):
"""
To init CCParser you can enter a path
and a section. If you doesn't know them yet
you can leave them empty.
If debug is set to True, all the exceptions
will print its traceback.
"""
self._debug = debug
self._config = configparser.ConfigParser()
if ini_path != '':
self.set_configuration_path(ini_path)
if section != '':
self.set_section(section)
self.__default_bool = False
self.__default_string = ''
self.__default_int = 0
self.__default_float = 0.0
self.__default_list = []
self._accepted_true_bool = ('true', 'yes') # must be lower case
self._accepted_false_bool = ('false', 'no') # must be lower case
def __str__(self):
return '''
CCParser instance: {}
Configuration Path: {}
Section: {}
Default boolean: {}
Default float: {}
Default integer: {}
Default string: {}
Default list: {}
'''.format( repr(self),
self.get_configuration_path(),
self.get_section(),
self.get_default_bool(),
self.get_default_float(),
self.get_default_int(),
self.get_default_str(),
self.get_default_list())
def check_value(self, value):
"""
return False if the value don't exists,
return True if the value exists
"""
if not os.path.exists(self.ini_path):
return False
else:
try:
self._config.read(self.ini_path)
except Exception:
print("CCParser Warning: reading damaged file or file without section")
print(traceback.format_exc())
print()
return False
if not self._config.has_section(self.__section):
return False
elif self._config.has_option(self.__section, value):
return True
else:
return False
def get_bool(self, value):
"""
If the value exists, return the boolean
corresponding to the string. If it does
not exists, or the value can not be converted
to a boolean, return the default boolean.
"""
if self.check_value(value):
val = self._config.get(self.__section, value).lower()
if val in self._accepted_false_bool:
return False
elif val in self._accepted_true_bool:
return True
else:
return self.__default_bool
else:
return self.__default_bool
def get_float(self, value):
"""
If the value exists, return the float
corresponding to the string. If it does
not exists, or the value can not be converted
to a float, return the default float.
"""
if self.check_value(value):
val = self._config.get(self.__section, value)
try:
val = float(val)
return val
except Exception:
if self._debug:
print(traceback.format_exc())
return self.__default_float
else:
return self.__default_float
def get_int(self, value):
"""
If the value exists, return the integer
corresponding to the string. If it does
not exists, or the value can not be converted
to a integer, return the default integer.
"""
if self.check_value(value):
val = self._config.get(self.__section, value)
try:
val = int(val)
return val
except Exception:
if self._debug:
print(traceback.format_exc())
return self.__default_int
else:
return self.__default_int
def get_list(self, value):
"""
If the value exists, return the integer
corresponding to the string. If it does
not exists, or the value can not be converted
to a integer, return the default integer.
"""
if self.check_value(value):
val = self._config.get(self.__section, value)
try:
val = val.split("|")
return val
except Exception:
if self._debug:
print(traceback.format_exc())
return self.__default_list
else:
return self.__default_list
def get_str(self, value):
"""
If the value exists, return the string,
other wise return the default string.
"""
if self.check_value(value):
return self._config.get(self.__section, value)
else:
return self.__default_string
def get_bool_defval(self, value, default):
"""
If the value exists, return the boolean
corresponding to the string. If it does
not exists, or the value can not be converted
to a boolean, return the the second argument.
"""
if self.check_value(value):
val = self._config.get(self.__section, value).lower()
if val in self._accepted_false_bool:
return False
elif val in self._accepted_true_bool:
return True
else:
return default
else:
return default
def get_float_defval(self, value, default):
"""
If the value exists, return the float
corresponding to the string. If it does
not exists, or the value can not be converted
to a float, return the the second argument.
"""
if self.check_value(value):
val = self._config.get(self.__section, value)
try:
val = float(val)
return val
except Exception:
if self._debug:
print(traceback.format_exc())
return default
else:
return default
def get_int_defval(self, value, default):
"""
If the value exists, return the integer
corresponding to the string. If it does
not exists, or the value can not be converted
to a integer, return the the second argument.
"""
if self.check_value(value):
val = self._config.get(self.__section, value)
try:
val = int(val)
return val
except Exception:
if self._debug:
print(traceback.format_exc())
return default
else:
return default
def get_str_defval(self, value, default):
"""
If the value exists, return the string,
if it does not exists, return the the
second argument.
"""
if self.check_value(value):
return self._config.get(self.__section, value)
else:
return default
def set_configuration_path(self, ini_path):
"""
Set the path to the configuration file.
"""
if isinstance(ini_path, str):
self.ini_path = ini_path
if not os.path.exists(ini_path) and self._debug:
print("CCParser Warning: the path to the configuration file does not exists\n")
else:
print("CCParser Warning: The path is not valid.\n")
self.ini_path = ''
def set_section(self, section):
"""
Set the section to check for values.
"""
section = str(section)
self.__section = section
def set_default_float(self, value):
"""
Set the default float to return when
a value does not exists. By default
it returns 0.0
"""
self.__default_float = value
def set_default_string(self, value):
"""
Set the default string to return when
a value does not exists. By default
it returns an empty string.
"""
self.__default_string = value
def set_default_bool(self, value):
"""
Set the default boolean to return when
a value does not exists. By default
it returns false
"""
self.__default_bool = value
def set_default_int(self, value):
"""
Set the default integer to return when
a value does not exists. By default
it returns 0
"""
self.__default_int = value
def set_default_list(self, value):
"""
Set the default integer to return when
a value does not exists. By default
it returns 0
"""
self.__default_list = value
def write(self, value_name, value):
"""
Write the value name and its value.
If the config file does not exists,
or the directories to the path, they
will be created.
"""
if self.ini_path != '' and isinstance(self.ini_path, str):
if not os.path.exists(os.path.dirname(self.ini_path)):
os.makedirs(os.path.dirname(self.ini_path))
if not os.path.exists(self.ini_path):
open(self.ini_path, 'wt').close()
try:
self._config.read(self.ini_path)
except Exception:
print("CCParser Warning: reading damaged file or file without section")
print(traceback.format_exc())
print()
return False
if not self._config.has_section(self.__section):
self._config.add_section(self.__section)
if isinstance(value, list) or isinstance(value, tuple):
values = '|'.join(item for item in value)
self._config.set(self.__section, value_name, values)
else:
self._config.set(self.__section, value_name, str(value))
with open(self.ini_path, 'w') as f:
self._config.write(f)
else:
print(
"CCParser Error: Trying to write the configuration without an ini path.")
print("Configuration Path: " + str(self.get_configuration_path()))
print()
def get_default_bool(self):
return self.__default_bool
def get_default_float(self):
return self.__default_float
def get_default_str(self):
return self.__default_string
def get_default_int(self):
return self.__default_int
def get_default_list(self):
return self.__default_list
def get_section(self):
return self.__section
def get_configuration_path(self):
return self.ini_path
if __name__ == '__main__':
def test(path):
if os.path.exists(path):
os.remove(path)
cp = CCParser(path, 'test')
print('section:', cp.get_section())
cp.write('bool', False)
print(cp.get_bool('bool'))
cp.write('bool', True)
print(cp.get_bool('bool'))
cp.write('string1', 'this is a test')
print(cp.get_str('string1'))
print(cp)
test('/home/rsm/Desktop/test.ini') # unexisting file
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('flocks', '0015_auto_20170624_1312'),
('feeding', '0005_auto_20170625_1129'),
]
operations = [
migrations.CreateModel(
name='FeedingPeriodForFlock',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_date', models.DateField()),
('end_date', models.DateField(null=True)),
('feed_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='feeding.FeedType')),
('flock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flocks.Flock')),
],
),
migrations.RemoveField(
model_name='feedingperiodforroom',
name='feed_type',
),
migrations.RemoveField(
model_name='feedingperiodforroom',
name='room',
),
migrations.DeleteModel(
name='FeedingPeriodForRoom',
),
]
|
from ptools import *
pdb1f88 = getPDB("1F88")
WritePDB(pdb1f88, "1F88.pdb")
|
from node import models
from django.forms import ModelForm
from . import cdmsportalfunc as cpf
from django.core.exceptions import ValidationError
from django import forms
class MoleculeForm(ModelForm):
class Meta:
model = models.Molecules
fields = '__all__'
class SpecieForm(ModelForm):
datearchived = forms.DateField(
widget=forms.TextInput(attrs={'readonly': 'readonly'})
)
dateactivated = forms.DateField(
widget=forms.TextInput(attrs={'readonly': 'readonly'})
)
class Meta:
model = models.Species
fields = '__all__'
class FilterForm(ModelForm):
class Meta:
model = models.QuantumNumbersFilter
fields = '__all__'
class XsamsConversionForm(forms.Form):
inurl = forms.URLField(
label='Input URL',
required=False,
widget=forms.TextInput(
attrs={'size': 50,
'title': 'Paste here a URL that delivers an XSAMS '
'document.',
}))
infile = forms.FileField()
format = forms.ChoiceField(
choices=[("RAD 3D", "RAD 3D"), ("CSV", "CSV")], )
def clean(self):
infile = self.cleaned_data.get('infile')
inurl = self.cleaned_data.get('inurl')
if (infile and inurl):
raise ValidationError('Give either input file or URL!')
if inurl:
try:
data = cpf.urlopen(inurl)
except Exception as err:
raise ValidationError('Could not open given URL: %s' % err)
elif infile:
data = infile
else:
raise ValidationError('Give either input file or URL!')
try:
self.cleaned_data['result'] = cpf.applyStylesheet2File(data)
except Exception as err:
raise ValidationError('Could not transform XML file: %s' % err)
return self.cleaned_data
|
"""Clean db
Revision ID: 4f8bd7cac829
Revises: 3f249e0d2769
Create Date: 2014-01-09 14:03:13.997656
"""
revision = '4f8bd7cac829'
down_revision = '3f249e0d2769'
from alembic import op
import sqlalchemy as sa
def upgrade():
''' Drop the columns calendar_multiple_meetings and
calendar_regional_meetings and rename meeting_region into
meeting_location.
'''
op.drop_column('calendars', 'calendar_multiple_meetings')
op.drop_column('calendars', 'calendar_regional_meetings')
op.alter_column(
'meetings',
column_name='meeting_region',
name='meeting_location',
type_=sa.Text,
existing_type=sa.String(100))
def downgrade():
''' Add the columns calendar_multiple_meetings and
calendar_regional_meetings and rename meeting_location into
meeting_region.
'''
op.add_column(
'calendars',
sa.Column(
'calendar_multiple_meetings',
sa.Boolean, default=False,
nullable=False
)
)
op.add_column(
'calendars',
sa.Column(
'calendar_regional_meetings',
sa.Boolean, default=False,
nullable=False
)
)
op.alter_column(
'meetings',
column_name='meeting_location',
name='meeting_region',
type_=sa.String(100),
existing_type=sa.Text)
|
"""
Takes Google's json encoded spreadsheet and prints a python dictionary keyed by
the values in the first column of the SS. ©2017 J. J. Crump, GNU general public
license
"""
import urllib2
from pprint import pprint
import re
import json
ssURL = "https://spreadsheets.google.com/feeds/list/1OPNQC3xBp3iQTpjVfd6cpvvA0BpHWhb3QiNOvGFZ9z8/od6/public/basic?prettyprint=true&alt=json"
response = urllib2.urlopen(ssURL)
jsonIn = response.read()
pyDict = json.loads(jsonIn)
entryList = pyDict['feed']['entry']
fields = ["name", "city", "state", "zip"]
SSdict = {}
def parsestring(rowstring, fields):
"""yields tuples of (fieldname, fieldvalue)"""
i = iter(fields[1:])
field = i.next()
start = end = 0
try:
while True:
lastfield = field
field = i.next()
if rowstring.find(field) == -1:
field = lastfield
continue
end = rowstring.find(field)
yield lastfield, re.sub('^.*?:', '', rowstring[start:end].strip().strip(',')).strip()
start = end
except StopIteration:
start = rowstring.find(field)
yield lastfield, re.sub('^.*?:', '', rowstring[start:].strip().strip(',')).strip()
for e in entryList:
entrydict = dict([x for x in parsestring(e['content']['$t'], fields)])
entrykey = e['title']['$t']
SSdict[entrykey] = entrydict
pprint(SSdict)
|
from django.contrib.auth import authenticate, login, logout
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response, RequestContext, render
from membro_profile.forms import MembroForm, MembroProfileForm, EditProfileForm
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from membro_profile.models import MembroProfile
from submissao.models import Submissao
def some_view(request):
if not request.user.is_authenticated():
return HttpResponse("You are logged in.")
else:
return HttpResponse("You are not logged in.")
def register(request):
context = RequestContext(request)
registered = False
if request.method == 'POST':
membro_form = MembroForm(data=request.POST)
membro_profile_form = MembroProfileForm(data=request.POST)
if membro_form.is_valid() and membro_profile_form.is_valid():
membro = membro_form.save()
membro.set_password(membro.password)
membro.save()
membro_profile = membro_profile_form.save(commit=False)
membro_profile.user = membro
if 'avatar' in request.FILES:
membro_profile.picture = request.FILES['avatar']
membro_profile.save()
registered = True
else:
print (membro_form.errors, membro_profile_form.errors)
else:
membro_form = MembroForm()
membro_profile_form = MembroProfileForm()
return render_to_response(
'profile/register.html',
{'membro_form': membro_form, 'membro_profile_form': membro_profile_form, 'registered': registered},
context)
def membro_login(request):
context = RequestContext(request)
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
membro = authenticate(username=username,password=password)
if membro:
if membro.is_active:
login(request, membro)
return HttpResponseRedirect('/')
else:
return HttpResponse('Sua conta ainda não foi liberada.')
else:
print ("Login e senha invalidos: {0}, {1}".format(username, password))
return HttpResponse("Login ou Senha, Invalidos")
else:
return render_to_response('profile/login.html', {}, context)
@login_required
def user_logout(request):
# Since we know the user is logged in, we can now just log them out.
logout(request)
# Take the user back to the homepage.
return HttpResponseRedirect('/')
@login_required
def profile(request):
context = RequestContext(request)
print (context)
usuario = User.objects.get(username=request.user)
membro = MembroProfile.objects.get(user=usuario)
if membro:
return render_to_response('profile/profile.html', {'m':membro}, context)
else:
return HttpResponse('Inscrição não encontrado')
@login_required
def edit_profile(request):
membro = request.user
form = EditProfileForm(
request.POST or None,
initial={
'first_name': membro.first_name,
'last_name': membro.last_name,
'cpf': membro.membroprofile.cpf,
}
)
if form.is_valid():
membro.first_name = request.POST['first_name']
membro.last_name = request.POST['last_name']
membro.cpf = request.POST['cpf']
membro.save()
return HttpResponseRedirect('%s'%(reverse('profile')))
context = {
"form": form
}
return render(request, 'profile/editar.html', context)
def index(request):
context = RequestContext(request)
print (str(request.user) == 'AnonymousUser')
if str(request.user) == 'AnonymousUser':
return render_to_response('profile/login.html', context)
else:
queryset = Submissao.objects.filter(autor_id=request.user.membroprofile.id or None)
if request.user.is_authenticated():
membro = MembroProfile.objects.filter(user__username=request.user).latest('user').user
context["membro"] = membro
context['lista_resumos'] = queryset
return render_to_response('profile/index.html', context)
else:
return render_to_response('profile/login.html', context)
|
class ImproperlyConfigured(Exception):
pass
class TaskHandlingError(Exception):
pass
|
from datetime import datetime, timedelta
import json
import csv
import pytz
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from django.http import Http404, HttpResponseRedirect, HttpResponse
from django.template.response import TemplateResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import Group, Permission
from django.shortcuts import render, get_object_or_404
from django.db.models import Q
from django.views.decorators.cache import cache_page
from Map.models import *
from Map import utils, signals
from core.utils import get_config
from POS.models import POS
def require_map_permission(permission=2):
def _dec(view_func):
def _view(request, map_id, *args, **kwargs):
current_map = get_object_or_404(Map, pk=map_id)
if current_map.get_permission(request.user) < permission:
raise PermissionDenied
else:
return view_func(request, map_id, *args, **kwargs)
_view.__name__ = view_func.__name__
_view.__doc__ = view_func.__doc__
_view.__dict__ = view_func.__dict__
return _view
return _dec
@login_required
@require_map_permission(permission=1)
def get_map(request, map_id):
"""Get the map and determine if we have permissions to see it.
If we do, then return a TemplateResponse for the map. If map does not
exist, return 404. If we don't have permission, return PermissionDenied.
"""
current_map = get_object_or_404(Map, pk=map_id)
context = {
'map': current_map,
'access': current_map.get_permission(request.user),
}
return TemplateResponse(request, 'map.html', context)
@login_required
@require_map_permission(permission=1)
def map_checkin(request, map_id):
# Initialize json return dict
json_values = {}
current_map = get_object_or_404(Map, pk=map_id)
# AJAX requests should post a JSON datetime called loadtime
# back that we use to get recent logs.
if 'loadtime' not in request.POST:
return HttpResponse(json.dumps({'error': "No loadtime"}),
mimetype="application/json")
time_string = request.POST['loadtime']
if time_string == 'null':
return HttpResponse(json.dumps({'error': "No loadtime"}),
mimetype="application/json")
load_time = datetime.strptime(time_string, "%Y-%m-%d %H:%M:%S.%f")
load_time = load_time.replace(tzinfo=pytz.utc)
if request.is_igb_trusted:
dialog_html = _checkin_igb_trusted(request, current_map)
if dialog_html is not None:
json_values.update({'dialogHTML': dialog_html})
log_list = MapLog.objects.filter(timestamp__gt=load_time,
visible=True,
map=current_map)
log_string = render_to_string('log_div.html', {'logs': log_list})
json_values.update({'logs': log_string})
return HttpResponse(json.dumps(json_values), mimetype="application/json")
@login_required
@require_map_permission(permission=1)
def map_refresh(request, map_id):
"""
Returns an HttpResponse with the updated systemJSON for an asynchronous
map refresh.
"""
if not request.is_ajax():
raise PermissionDenied
current_map = get_object_or_404(Map, pk=map_id)
if request.is_igb:
char_cache_key = 'char_%s_location' % request.eve_charid
old_location = cache.get(char_cache_key)
if old_location:
my_sys = get_object_or_404(System, pk=old_location[0])
my_sys.remove_active_pilot(request.eve_charid)
my_sys.add_active_pilot(request.user.username, request.eve_charid, request.eve_charname, request.eve_shipname, request.eve_shiptypename)
result = None
result = [
datetime.strftime(datetime.now(pytz.utc),
"%Y-%m-%d %H:%M:%S.%f"),
utils.MapJSONGenerator(current_map,
request.user).get_systems_json()
]
# TODO update active pilots
# get users current system
#map_sys = get_object_or_404(MapSystem, pk=ms_id)
# if this system is on the map, update. Otherwise, don't..
#remove_active_pilot(request.eve_charid)
#map_sys.remove_active_pilot(request.eve_charid)
#map_sys.add_active_pilot(request.user.username, request.eve_charid,
# request.eve_charname, request.eve_shipname,
# request.eve_shiptypename)
return HttpResponse(json.dumps(result))
def log_movement(oldSys, newSys, charName, shipType, current_map, user):
# get msid for oldsys
# print oldSys
# print newSys
try:
oldSysMapId = current_map.systems.filter(system=oldSys).all()[0]
newSysMapId = current_map.systems.filter(system=newSys).all()[0] # BUG
wh = Wormhole.objects.filter(top__in=[oldSysMapId,newSysMapId],bottom__in=[oldSysMapId,newSysMapId]).all()[0]
# get current ship size
#print shipType
shipSize = Ship.objects.get(shipname=shipType).shipmass
# get old mass
if wh.mass_amount != None:
wh.mass_amount = (wh.mass_amount + shipSize)
else:
wh.mass_amount = shipSize
wh.save()
except:
print "Hole didn't exist yet"
# jumplog
jl = JumpLog.objects.create(user_id=user.id, char_name=charName, src=oldSys, dest=newSys)
jl.save()
def _checkin_igb_trusted(request, current_map):
"""
Runs the specific code for the case that the request came from an igb that
trusts us, returns None if no further action is required, returns a string
containing the html for a system add dialog if we detect that a new system
needs to be added
"""
# XXX possibly where the logging needs to happen
can_edit = current_map.get_permission(request.user) == 2
current_location = (request.eve_systemid, request.eve_charname,
request.eve_shipname, request.eve_shiptypename)
char_cache_key = 'char_%s_location' % request.eve_charid
old_location = cache.get(char_cache_key)
result = None
#print old_location
if old_location != current_location:
current_system = get_object_or_404(System, pk=current_location[0])
if old_location:
old_system = get_object_or_404(System, pk=old_location[0])
old_system.remove_active_pilot(request.eve_charid)
log_movement(old_system, current_system, request.eve_charname, request.eve_shiptypename, current_map, request.user) #XXX vtadd
current_system.add_active_pilot(request.user.username,
request.eve_charid, request.eve_charname, request.eve_shipname,
request.eve_shiptypename)
request.user.get_profile().update_location(current_system.pk,
request.eve_charid, request.eve_charname, request.eve_shipname,
request.eve_shiptypename)
cache.set(char_cache_key, current_location, 60 * 5)
#Conditions for the system to be automagically added to the map.
if (can_edit and
old_location and
old_system in current_map
and current_system not in current_map
and not _is_moving_from_kspace_to_kspace(old_system, current_system)
):
context = {
'oldsystem': current_map.systems.filter(
system=old_system).all()[0],
'newsystem': current_system,
'wormholes': utils.get_possible_wh_types(old_system,
current_system),
}
if request.POST.get('silent', 'false') != 'true':
result = render_to_string('igb_system_add_dialog.html', context,
context_instance=RequestContext(request))
else:
new_ms = current_map.add_system(request.user, current_system, '',
context['oldsystem'])
k162_type = WormholeType.objects.get(name="K162")
new_ms.connect_to(context['oldsystem'], k162_type, k162_type)
result = 'silent'
# maybe fixes
else:
cache.set(char_cache_key, current_location, 60 * 5)
return result
def _is_moving_from_kspace_to_kspace(old_system, current_system):
"""
returns whether we are moving through kspace
:param old_system:
:param current_system:
:return:
"""
return old_system.is_kspace() and current_system.is_kspace()
def get_system_context(ms_id, user):
map_system = get_object_or_404(MapSystem, pk=ms_id)
if map_system.map.get_permission(user) == 2:
can_edit = True
else:
can_edit = False
#If map_system represents a k-space system get the relevant KSystem object
if map_system.system.is_kspace():
system = map_system.system.ksystem
else:
system = map_system.system.wsystem
scan_threshold = datetime.now(pytz.utc) - timedelta(
hours=int(get_config("MAP_SCAN_WARNING", None).value)
)
interest_offset = int(get_config("MAP_INTEREST_TIME", None).value)
interest_threshold = (datetime.now(pytz.utc)
- timedelta(minutes=interest_offset))
scan_warning = system.lastscanned < scan_threshold
if interest_offset > 0:
interest = (map_system.interesttime and
map_system.interesttime > interest_threshold)
else:
interest = map_system.interesttime
# Include any SiteTracker fleets that are active
st_fleets = map_system.system.stfleets.filter(ended=None).all()
locations = cache.get('sys_%s_locations' % map_system.system.pk)
if not locations:
locations = {}
return {'system': system, 'mapsys': map_system,
'scanwarning': scan_warning, 'isinterest': interest,
'stfleets': st_fleets, 'locations': locations,
'can_edit': can_edit}
@login_required
@require_map_permission(permission=2)
def add_system(request, map_id):
"""
AJAX view to add a system to a current_map. Requires POST containing:
topMsID: map_system ID of the parent map_system
bottomSystem: Name of the new system
topType: WormholeType name of the parent side
bottomType: WormholeType name of the new side
timeStatus: Wormhole time status integer value
massStatus: Wormhole mass status integer value
topBubbled: 1 if Parent side bubbled
bottomBubbled: 1 if new side bubbled
friendlyName: Friendly name for the new map_system
"""
if not request.is_ajax():
raise PermissionDenied
try:
# Prepare data
current_map = Map.objects.get(pk=map_id)
top_ms = MapSystem.objects.get(pk=request.POST.get('topMsID'))
bottom_sys = System.objects.get(
name=request.POST.get('bottomSystem')
)
top_type = WormholeType.objects.get(
name=request.POST.get('topType')
)
bottom_type = WormholeType.objects.get(
name=request.POST.get('bottomType')
)
time_status = int(request.POST.get('timeStatus'))
mass_status = int(request.POST.get('massStatus'))
if request.POST.get('topBubbled', '0') != "0":
top_bubbled = True
else:
top_bubbled = False
if request.POST.get('bottomBubbled', '0') != "0":
bottom_bubbled = True
else:
bottom_bubbled = False
# Add System
bottom_ms = current_map.add_system(
request.user, bottom_sys,
request.POST.get('friendlyName'), top_ms
)
# Add Wormhole
bottom_ms.connect_to(top_ms, top_type, bottom_type, top_bubbled,
bottom_bubbled, time_status, mass_status)
current_map.clear_caches()
return HttpResponse()
except ObjectDoesNotExist:
return HttpResponse(status=400)
@login_required
@require_map_permission(permission=2)
def remove_system(request, map_id, ms_id):
"""
Removes the supplied map_system from a map.
"""
system = get_object_or_404(MapSystem, pk=ms_id)
system.remove_system(request.user)
return HttpResponse()
@login_required
@require_map_permission(permission=1)
def system_details(request, map_id, ms_id):
"""
Returns a html div representing details of the System given by ms_id in
map map_id
"""
if not request.is_ajax():
raise PermissionDenied
return render(request, 'system_details.html',
get_system_context(ms_id, request.user))
@login_required
@require_map_permission(permission=1)
def system_menu(request, map_id, ms_id):
"""
Returns the html for system menu
"""
if not request.is_ajax():
raise PermissionDenied
return render(request, 'system_menu.html',
get_system_context(ms_id, request.user))
@login_required
@require_map_permission(permission=1)
def system_tooltips(request, map_id):
"""
Returns the system tooltips for map_id
"""
if not request.is_ajax():
raise PermissionDenied
cache_key = 'map_%s_sys_tooltip' % map_id
cached_tips = cache.get(cache_key)
if not cached_tips:
ms_list = MapSystem.objects.filter(map_id=map_id)\
.select_related('parent_wormhole', 'system__region')\
.iterator()
new_tips = render_to_string('system_tooltip.html',
{'map_systems': ms_list}, RequestContext(request))
cache.set(cache_key, new_tips, 60)
return HttpResponse(new_tips)
else:
return HttpResponse(cached_tips)
@login_required
@require_map_permission(permission=1)
def wormhole_tooltips(request, map_id):
"""Takes a POST request from AJAX with a Wormhole ID and renders the
wormhole tooltip for that ID to response.
"""
if not request.is_ajax():
raise PermissionDenied
cache_key = 'map_%s_wh_tooltip' % map_id
cached_tips = cache.get(cache_key)
if not cached_tips:
cur_map = get_object_or_404(Map, pk=map_id)
ms_list = MapSystem.objects.filter(map=cur_map).all()
whs = Wormhole.objects.filter(top__in=ms_list).all()
new_tips = render_to_string('wormhole_tooltip.html',
{'wormholes': whs}, RequestContext(request))
cache.set(cache_key, new_tips, 60)
return HttpResponse(new_tips)
else:
return HttpResponse(cached_tips)
@login_required()
@require_map_permission(permission=2)
def collapse_system(request, map_id, ms_id):
"""
Mark the system as collapsed.
"""
if not request.is_ajax():
raise PermissionDenied
map_sys = get_object_or_404(MapSystem, pk=ms_id)
parent_wh = map_sys.parent_wormhole
parent_wh.collapsed = True
parent_wh.save()
return HttpResponse()
@login_required()
@require_map_permission(permission=2)
def resurrect_system(request, map_id, ms_id):
"""
Unmark the system as collapsed.
"""
if not request.is_ajax():
raise PermissionDenied
map_sys = get_object_or_404(MapSystem, pk=ms_id)
parent_wh = map_sys.parent_wormhole
parent_wh.collapsed = False
parent_wh.save()
return HttpResponse()
@login_required()
@require_map_permission(permission=2)
def mark_scanned(request, map_id, ms_id):
"""Takes a POST request from AJAX with a system ID and marks that system
as scanned.
"""
if request.is_ajax():
map_system = get_object_or_404(MapSystem, pk=ms_id)
map_system.system.lastscanned = datetime.now(pytz.utc)
map_system.system.save()
return HttpResponse()
else:
raise PermissionDenied
@login_required()
def manual_location(request, map_id, ms_id):
"""Takes a POST request form AJAX with a System ID and marks the user as
being active in that system.
"""
if not request.is_ajax():
raise PermissionDenied
user_locations = cache.get('user_%s_locations' % request.user.pk)
if user_locations:
old_location = user_locations.pop(request.user.pk, None)
if old_location:
old_sys = get_object_or_404(System, pk=old_location[0])
old_sys.remove_active_pilot(request.user.pk)
map_sys = get_object_or_404(MapSystem, pk=ms_id)
map_sys.system.add_active_pilot(request.user.username, request.user.pk,
'OOG Browser', 'Unknown', 'Unknown')
request.user.get_profile().update_location(map_sys.system.pk, request.user.pk,
'OOG Browser', 'Unknown', 'Unknown')
map_sys.map.clear_caches()
return HttpResponse()
@login_required()
@require_map_permission(permission=2)
def set_interest(request, map_id, ms_id):
"""Takes a POST request from AJAX with an action and marks that system
as having either utcnow or None as interesttime. The action can be either
"set" or "remove".
"""
if request.is_ajax():
action = request.POST.get("action", "none")
if action == "none":
raise Http404
system = get_object_or_404(MapSystem, pk=ms_id)
if action == "set":
system.interesttime = datetime.now(pytz.utc)
system.save()
return HttpResponse()
if action == "remove":
system.interesttime = None
system.save()
return HttpResponse()
system.map.clear_caches()
return HttpResponse(status=418)
else:
raise PermissionDenied
def _update_sig_from_tsv(signature, row):
COL_SIG = 0
COL_SIG_TYPE = 3
COL_SIG_GROUP = 2
COL_SIG_SCAN_GROUP = 1
COL_SIG_STRENGTH = 4
COL_DISTANCE = 5
info = row[COL_SIG_TYPE]
updated = False
sig_type = None
if (row[COL_SIG_SCAN_GROUP] == "Cosmic Signature"
or row[COL_SIG_SCAN_GROUP] == "Cosmic Anomaly"
):
try:
sig_type = SignatureType.objects.get(
longname=row[COL_SIG_GROUP])
except:
sig_type = None
else:
sig_type = None
if sig_type:
updated = True
if sig_type:
signature.sigtype = sig_type
signature.updated = updated or signature.updated
if info:
signature.info = info
if signature.info == None:
signature.info = ''
return signature
@login_required
@require_map_permission(permission=2)
def bulk_sig_import(request, map_id, ms_id):
"""
GET gets a bulk signature import form. POST processes it, creating sigs
with blank info and type for each sig ID detected.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
k = 0
if request.method == 'POST':
reader = csv.reader(request.POST.get('paste', '').decode(
'utf-8').splitlines(), delimiter="\t")
COL_SIG = 0
COL_STRENGTH = 4
for row in reader:
# To prevent pasting of POSes into the sig importer, make sure
# the strength column is present
try:
test_var = row[COL_STRENGTH]
except IndexError:
return HttpResponse('A valid signature paste was not found',
status=400)
if k < 75:
sig_id = utils.convert_signature_id(row[COL_SIG])
sig = Signature.objects.get_or_create(sigid=sig_id,
system=map_system.system)[0]
sig = _update_sig_from_tsv(sig, row)
sig.modified_by = request.user
sig.save()
signals.signature_update.send_robust(sig, user=request.user,
map=map_system.map,
signal_strength=row[COL_STRENGTH])
k += 1
map_system.map.add_log(request.user,
"Imported %s signatures for %s(%s)."
% (k, map_system.system.name,
map_system.friendlyname), True)
map_system.system.lastscanned = datetime.now(pytz.utc)
map_system.system.save()
return HttpResponse()
else:
return TemplateResponse(request, "bulk_sig_form.html",
{'mapsys': map_system})
@login_required
@require_map_permission(permission=2)
def toggle_sig_owner(request, map_id, ms_id, sig_id=None):
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.toggle_ownership(request.user)
return HttpResponse()
@login_required
@require_map_permission(permission=1)
def edit_signature(request, map_id, ms_id, sig_id=None):
"""
GET gets a pre-filled edit signature form.
POST updates the signature with the new information and returns a
blank add form.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
# If the user can't edit signatures, return a blank response
if map_system.map.get_permission(request.user) != 2:
return HttpResponse()
action = None
if sig_id != None:
signature = get_object_or_404(Signature, pk=sig_id)
created = False
if not signature.owned_by:
signature.toggle_ownership(request.user)
if request.method == 'POST':
form = SignatureForm(request.POST)
if form.is_valid():
ingame_id = utils.convert_signature_id(form.cleaned_data['sigid'])
if sig_id == None:
signature, created = Signature.objects.get_or_create(
system=map_system.system, sigid=ingame_id)
signature.sigid = ingame_id
signature.updated = True
signature.info = form.cleaned_data['info']
if request.POST['sigtype'] != '':
sigtype = form.cleaned_data['sigtype']
else:
sigtype = None
signature.sigtype = sigtype
signature.modified_by = request.user
signature.save()
map_system.system.lastscanned = datetime.now(pytz.utc)
map_system.system.save()
if created:
action = 'Created'
else:
action = 'Updated'
if signature.owned_by:
signature.toggle_ownership(request.user)
map_system.map.add_log(request.user,
"%s signature %s in %s (%s)" %
(action, signature.sigid, map_system.system.name,
map_system.friendlyname))
signals.signature_update.send_robust(signature, user=request.user,
map=map_system.map)
else:
return TemplateResponse(request, "edit_sig_form.html",
{'form': form,
'system': map_system, 'sig': signature})
form = SignatureForm()
if sig_id == None or action == 'Updated':
return TemplateResponse(request, "add_sig_form.html",
{'form': form, 'system': map_system})
else:
return TemplateResponse(request, "edit_sig_form.html",
{'form': SignatureForm(instance=signature),
'system': map_system, 'sig': signature})
@login_required()
@require_map_permission(permission=1)
@cache_page(1)
def get_signature_list(request, map_id, ms_id):
"""
Determines the proper escalationThreshold time and renders
system_signatures.html
"""
if not request.is_ajax():
raise PermissionDenied
system = get_object_or_404(MapSystem, pk=ms_id)
escalation_downtimes = int(get_config("MAP_ESCALATION_BURN",
request.user).value)
return TemplateResponse(request, "system_signatures.html",
{'system': system,
'downtimes': escalation_downtimes})
@login_required
@require_map_permission(permission=2)
def mark_signature_cleared(request, map_id, ms_id, sig_id):
"""
Marks a signature as having its NPCs cleared.
"""
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.clear_rats()
return HttpResponse()
@login_required
@require_map_permission(permission=2)
def escalate_site(request, map_id, ms_id, sig_id):
"""
Marks a site as having been escalated.
"""
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.escalate()
return HttpResponse()
@login_required
@require_map_permission(permission=2)
def activate_signature(request, map_id, ms_id, sig_id):
"""
Marks a site activated.
"""
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.activate()
return HttpResponse()
@login_required
@require_map_permission(permission=2)
def delete_signature(request, map_id, ms_id, sig_id):
"""
Deletes a signature.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
sig = get_object_or_404(Signature, pk=sig_id)
sig.delete()
map_system.map.add_log(request.user, "Deleted signature %s in %s (%s)."
% (sig.sigid, map_system.system.name,
map_system.friendlyname))
return HttpResponse()
@login_required
@require_map_permission(permission=2)
def manual_add_system(request, map_id, ms_id):
"""
A GET request gets a blank add system form with the provided MapSystem
as top system. The form is then POSTed to the add_system view.
"""
if request.is_igb_trusted:
current_system = System.objects.get(name=request.eve_systemname)
else:
current_system = ""
top_map_system = get_object_or_404(MapSystem, pk=ms_id)
systems = System.objects.all()
wormholes = WormholeType.objects.all()
return render(request, 'add_system_box.html',
{'topMs': top_map_system, 'sysList': systems,
'whList': wormholes,'newsystem': current_system})
@login_required
@require_map_permission(permission=2)
def edit_system(request, map_id, ms_id):
"""
A GET request gets the edit system dialog pre-filled with current
information.
A POST request saves the posted data as the new information.
POST values are friendlyName, info, and occupied.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
if request.method == 'GET':
occupied = map_system.system.occupied.replace("<br />", "\n")
info = map_system.system.info.replace("<br />", "\n")
return TemplateResponse(request, 'edit_system.html',
{'mapsys': map_system,
'occupied': occupied, 'info': info}
)
if request.method == 'POST':
map_system.friendlyname = request.POST.get('friendlyName', '')
if (
(map_system.system.info != request.POST.get('info', '')) or
(map_system.system.occupied !=
request.POST.get('occupied', ''))
):
map_system.system.info = request.POST.get('info', '')
map_system.system.occupied = request.POST.get('occupied', '')
map_system.system.save()
map_system.save()
map_system.map.add_log(request.user, "Edited System: %s (%s)"
% (map_system.system.name,
map_system.friendlyname))
return HttpResponse()
raise PermissionDenied
@login_required
@require_map_permission(permission=2)
def edit_wormhole(request, map_id, wh_id):
"""
A GET request gets the edit wormhole dialog pre-filled with current info.
A POST request saves the posted data as the new info.
POST values are topType, bottomType, massStatus, timeStatus, topBubbled,
and bottomBubbled.
"""
if not request.is_ajax():
raise PermissionDenied
wormhole = get_object_or_404(Wormhole, pk=wh_id)
if request.method == 'GET':
return TemplateResponse(request, 'edit_wormhole.html',
{'wormhole': wormhole}
)
if request.method == 'POST':
manualShipMassAdd = request.POST.get('massAdd',0)
if manualShipMassAdd != "":
addedMass = Ship.objects.get(shipname=manualShipMassAdd).shipmass
wormhole.mass_amount = (wormhole.mass_amount + addedMass)
wormhole.mass_status = int(request.POST.get('massStatus', 0))
wormhole.time_status = int(request.POST.get('timeStatus', 0))
wormhole.top_type = get_object_or_404(
WormholeType,
name=request.POST.get('topType', 'K162')
)
wormhole.bottom_type = get_object_or_404(
WormholeType,
name=request.POST.get('bottomType', 'K162')
)
wormhole.top_bubbled = request.POST.get('topBubbled', '1') == '1'
wormhole.bottom_bubbled = request.POST.get('bottomBubbled', '1') == '1'
wormhole.save()
wormhole.map.add_log(request.user,
("Updated the wormhole between %s(%s) and %s(%s)."
% (wormhole.top.system.name,
wormhole.top.friendlyname,
wormhole.bottom.system.name,
wormhole.bottom.friendlyname)))
return HttpResponse()
raise PermissiondDenied
@permission_required('Map.add_map')
def create_map(request):
"""
This function creates a map and then redirects to the new map.
"""
if request.method == 'POST':
form = MapForm(request.POST)
if form.is_valid():
new_map = form.save()
new_map.add_log(request.user, "Created the %s map." % new_map.name)
new_map.add_system(request.user, new_map.root, "Root", None)
return HttpResponseRedirect(reverse('Map.views.get_map',
kwargs={'map_id': new_map.pk}))
else:
return TemplateResponse(request, 'new_map.html', {'form': form})
else:
form = MapForm
return TemplateResponse(request, 'new_map.html', {'form': form, })
def _sort_destinations(destinations):
"""
Takes a list of destination tuples and returns the same list, sorted in order of the jumps.
"""
results = []
onVal = 0
for dest in destinations:
if len(results) == 0:
results.append(dest)
else:
while onVal <= len(results):
if onVal == len(results):
results.append(dest)
onVal = 0
break
else:
if dest[1] > results[onVal][1]:
onVal += 1
else:
results.insert(onVal, dest)
onVal = 0
break
return results
@require_map_permission(permission=1)
def destination_list(request, map_id, ms_id):
"""
Returns the destinations of interest tuple for K-space systems and
a blank response for w-space systems.
"""
if not request.is_ajax():
raise PermissionDenied
destinations = Destination.objects.filter(Q(user=None) |
Q(user=request.user))
map_system = get_object_or_404(MapSystem, pk=ms_id)
try:
system = KSystem.objects.get(pk=map_system.system.pk)
rf = utils.RouteFinder()
result = []
for destination in destinations:
result.append((destination.system,
rf.route_length(system,
destination.system) - 1,
round(rf.ly_distance(system,
destination.system), 3)
))
except ObjectDoesNotExist:
return HttpResponse()
return render(request, 'system_destinations.html',
{'system': system, 'destinations': _sort_destinations(result)})
def site_spawns(request, map_id, ms_id, sig_id):
"""
Returns the spawns for a given signature and system.
"""
sig = get_object_or_404(Signature, pk=sig_id)
spawns = SiteSpawn.objects.filter(sigtype=sig.sigtype).all()
if spawns[0].sysclass != 0:
spawns = SiteSpawn.objects.filter(sigtype=sig.sigtype,
sysclass=sig.system.sysclass).all()
return render(request, 'site_spawns.html', {'spawns': spawns})
@permission_required('Map.map_admin')
def general_settings(request):
"""
Returns and processes the general settings section.
"""
npc_threshold = get_config("MAP_NPC_THRESHOLD", None)
pvp_threshold = get_config("MAP_PVP_THRESHOLD", None)
scan_threshold = get_config("MAP_SCAN_WARNING", None)
interest_time = get_config("MAP_INTEREST_TIME", None)
escalation_burn = get_config("MAP_ESCALATION_BURN", None)
if request.method == "POST":
scan_threshold.value = int(request.POST['scanwarn'])
interest_time.value = int(request.POST['interesttimeout'])
pvp_threshold.value = int(request.POST['pvpthreshold'])
npc_threshold.value = int(request.POST['npcthreshold'])
escalation_burn.value = int(request.POST['escdowntimes'])
scan_threshold.save()
interest_time.save()
pvp_threshold.save()
npc_threshold.save()
escalation_burn.save()
return HttpResponse()
return TemplateResponse(
request, 'general_settings.html',
{'npcthreshold': npc_threshold.value,
'pvpthreshold': pvp_threshold.value,
'scanwarn': scan_threshold.value,
'interesttimeout': interest_time.value,
'escdowntimes': escalation_burn.value}
)
@permission_required('Map.map_admin')
def sites_settings(request):
"""
Returns the site spawns section.
"""
return TemplateResponse(request, 'spawns_settings.html',
{'spawns': SiteSpawn.objects.all()})
@permission_required('Map.map_admin')
def add_spawns(request):
"""
Adds a site spawn.
"""
return HttpResponse()
@permission_required('Map.map_admin')
def delete_spawns(request, spawn_id):
"""
Deletes a site spawn.
"""
return HttpResponse()
@permission_required('Map.map_admin')
def edit_spawns(request, spawn_id):
"""
Alters a site spawn.
"""
return HttpResponse()
def destination_settings(request, user=None):
"""
Returns the destinations section.
"""
if not user:
dest_list = Destination.objects.filter(user=None)
else:
dest_list = Destination.objects.filter(Q(user=None) |
Q(user=request.user))
return TemplateResponse(request, 'dest_settings.html',
{'destinations': dest_list,
'user_context': user})
def add_destination(request, dest_user=None):
"""
Add a destination.
"""
if not dest_user and not request.user.has_perm('Map.map_admin'):
raise PermissionDenied
system = get_object_or_404(KSystem, name=request.POST['systemName'])
Destination(system=system, user=dest_user).save()
return HttpResponse()
def add_personal_destination(request):
"""
Add a personal destination.
"""
return add_destination(request, dest_user=request.user)
def delete_destination(request, dest_id):
"""
Deletes a destination.
"""
destination = get_object_or_404(Destination, pk=dest_id)
if not request.user.has_perm('Map.map_admin') and not destination.user:
raise PermissionDenied
if destination.user and not request.user == destination.user:
raise PermissionDenied
destination.delete()
return HttpResponse()
@permission_required('Map.map_admin')
def sigtype_settings(request):
"""
Returns the signature types section.
"""
return TemplateResponse(request, 'sigtype_settings.html',
{'sigtypes': SignatureType.objects.all()})
@permission_required('Map.map_admin')
def edit_sigtype(request, sigtype_id):
"""
Alters a signature type.
"""
return HttpResponse()
@permission_required('Map.map_admin')
def add_sigtype(request):
"""
Adds a signature type.
"""
return HttpResponse()
@permission_required('Map.map_admin')
def delete_sigtype(request, sigtype_id):
"""
Deletes a signature type.
"""
return HttpResponse()
@permission_required('Map.map_admin')
def map_settings(request, map_id):
"""
Returns and processes the settings section for a map.
"""
saved = False
subject = get_object_or_404(Map, pk=map_id)
if request.method == 'POST':
name = request.POST.get('name', None)
explicit_perms = request.POST.get('explicitperms', False)
if not name:
return HttpResponse('The map name cannot be blank', status=400)
subject.name = name
subject.explicitperms = explicit_perms
for group in Group.objects.all():
MapPermission.objects.filter(group=group, map=subject).delete()
setting = request.POST.get('map-%s-group-%s-permission' % (
subject.pk, group.pk), 0)
if setting != 0:
MapPermission(group=group, map=subject, access=setting).save()
subject.save()
saved = True
groups = []
for group in Group.objects.all():
if MapPermission.objects.filter(map=subject, group=group).exists():
perm = MapPermission.objects.get(map=subject, group=group).access
else:
perm = 0
groups.append((group,perm))
return TemplateResponse(request, 'map_settings_single.html',
{'map': subject, 'groups': groups, 'saved': saved})
@permission_required('Map.map_admin')
def delete_map(request, map_id):
"""
Deletes a map.
"""
subject = get_object_or_404(Map, pk=map_id)
subject.delete()
return HttpResponse()
@permission_required('Map.map_admin')
def edit_map(request, map_id):
"""
Alters a map.
"""
return HttpResponse('[]')
@permission_required('Map.map_admin')
def global_permissions(request):
"""
Returns and processes the global permissions section.
"""
if not request.is_ajax():
raise PermissionDenied
group_list = []
admin_perm = Permission.objects.get(codename="map_admin")
unrestricted_perm = Permission.objects.get(codename="map_unrestricted")
add_map_perm = Permission.objects.get(codename="add_map")
if request.method == "POST":
for group in Group.objects.all():
if request.POST.get('%s_unrestricted' % group.pk, None):
if unrestricted_perm not in group.permissions.all():
group.permissions.add(unrestricted_perm)
else:
if unrestricted_perm in group.permissions.all():
group.permissions.remove(unrestricted_perm)
if request.POST.get('%s_add' % group.pk, None):
if add_map_perm not in group.permissions.all():
group.permissions.add(add_map_perm)
else:
if add_map_perm in group.permissions.all():
group.permissions.remove(add_map_perm)
if request.POST.get('%s_admin' % group.pk, None):
if admin_perm not in group.permissions.all():
group.permissions.add(admin_perm)
else:
if admin_perm in group.permissions.all():
group.permissions.remove(admin_perm)
return HttpResponse()
for group in Group.objects.all():
entry = {
'group': group, 'admin': admin_perm in group.permissions.all(),
'unrestricted': unrestricted_perm in group.permissions.all(),
'add_map': add_map_perm in group.permissions.all()
}
group_list.append(entry)
return TemplateResponse(request, 'global_perms.html',
{'groups': group_list})
@require_map_permission(permission=2)
def purge_signatures(request, map_id, ms_id):
if not request.is_ajax():
raise PermissionDenied
mapsys = get_object_or_404(MapSystem, pk=ms_id)
if request.method == "POST":
mapsys.system.signatures.all().delete()
return HttpResponse()
else:
return HttpResponse(status=400)
|
from datetime import datetime
import uuid
class Torrent(object):
def __init__(self):
self.tracker = None
self.url = None
self.title = None
self.magnet = None
self.seeders = None
self.leechers = None
self.size = None
self.date = None
self.details = None
self.uuid = uuid.uuid4().hex
self._remove = False
@property
def human_age(self):
if self.date:
age = datetime.now() - self.date
return "%s days" % (int(age.total_seconds()/(60*60*24)))
else:
return "Unknown"
@property
def human_size(self):
if self.size:
if self.size > 1000000000:
return "%.2f GB" % (self.size / 1000000000)
elif self.size > 1000000:
return "%.2f MB" % (self.size/1000000)
else:
return "%s KB" % (self.size/1000)
@property
def html_friendly_title(self):
return self.title.replace('.', '.​').replace('[', '​[').replace(']', ']​')
def __unicode__(self):
return "%s Size: %s Seeders: %s Age: %s %s" % (self.title.ljust(60)[0:60], str(self.human_size).ljust(12),
str(self.seeders).ljust(6), self.human_age,
self.tracker)
def __str__(self):
return self.__unicode__()
|
import math
import os
import re
import itertools
from types import LambdaType
import pkg_resources
import numpy
from PyQt4 import QtGui, QtCore, QtWebKit
from PyQt4.QtCore import Qt, pyqtSignal as Signal
from PyQt4.QtGui import QCursor, QApplication
import Orange.data
from Orange.widgets.utils import getdeepattr
from Orange.data import ContinuousVariable, StringVariable, DiscreteVariable, Variable
from Orange.widgets.utils import vartype
from Orange.widgets.utils.constants import CONTROLLED_ATTRIBUTES, ATTRIBUTE_CONTROLLERS
from Orange.util import namegen
YesNo = NoYes = ("No", "Yes")
_enter_icon = None
__re_label = re.compile(r"(^|[^%])%\((?P<value>[a-zA-Z]\w*)\)")
OrangeUserRole = itertools.count(Qt.UserRole)
LAMBDA_NAME = namegen('_lambda_')
def resource_filename(path):
"""
Return a resource filename (package data) for path.
"""
return pkg_resources.resource_filename(__name__, path)
class TableWidget(QtGui.QTableWidget):
""" An easy to use, row-oriented table widget """
ROW_DATA_ROLE = QtCore.Qt.UserRole + 1
ITEM_DATA_ROLE = ROW_DATA_ROLE + 1
class TableWidgetNumericItem(QtGui.QTableWidgetItem):
"""TableWidgetItem that sorts numbers correctly!"""
def __lt__(self, other):
return (self.data(TableWidget.ITEM_DATA_ROLE) <
other.data(TableWidget.ITEM_DATA_ROLE))
def selectionChanged(self, selected:[QtGui.QItemSelectionRange], deselected:[QtGui.QItemSelectionRange]):
"""Override or monkey-patch this method to catch selection changes"""
super().selectionChanged(selected, deselected)
def __setattr__(self, attr, value):
"""
The following selectionChanged magic ensures selectionChanged
slot, when monkey-patched, always calls the super's selectionChanged
first (--> avoids Qt quirks), and the user needs not care about that.
"""
if attr == 'selectionChanged':
func = value
@QtCore.pyqtSlot(QtGui.QItemSelection, QtGui.QItemSelection)
def _f(selected, deselected):
super(self.__class__, self).selectionChanged(selected, deselected)
func(selected, deselected)
value = _f
self.__dict__[attr] = value
def _update_headers(func):
"""Decorator to update certain table features after method calls"""
def _f(self, *args, **kwargs):
func(self, *args, **kwargs)
if self.col_labels is not None:
self.setHorizontalHeaderLabels(self.col_labels)
if self.row_labels is not None:
self.setVerticalHeaderLabels(self.row_labels)
if self.stretch_last_section:
self.horizontalHeader().setStretchLastSection(True)
return _f
@_update_headers
def __init__(self,
parent=None,
col_labels=None,
row_labels=None,
stretch_last_section=True,
multi_selection=False,
select_rows=False):
"""
Parameters
----------
parent: QObject
Parent QObject. If parent has layout(), this widget is added to it.
col_labels: list of str
Labels or [] (sequential numbers) or None (no horizontal header)
row_label: list_of_str
Labels or [] (sequential numbers) or None (no vertical header)
stretch_last_section: bool
multi_selection: bool
Single selection if False
select_rows: bool
If True, select whole rows instead of individual cells.
"""
super().__init__(parent)
self._column_filter = {}
self.col_labels = col_labels
self.row_labels = row_labels
self.stretch_last_section = stretch_last_section
try: parent.layout().addWidget(self)
except (AttributeError, TypeError): pass
if col_labels is None:
self.horizontalHeader().setVisible(False)
if row_labels is None:
self.verticalHeader().setVisible(False)
if multi_selection:
self.setSelectionMode(self.MultiSelection)
if select_rows:
self.setSelectionBehavior(self.SelectRows)
self.setHorizontalScrollMode(self.ScrollPerPixel)
self.setVerticalScrollMode(self.ScrollPerPixel)
self.setEditTriggers(self.NoEditTriggers)
self.setAlternatingRowColors(True)
self.setShowGrid(False)
self.setSortingEnabled(True)
@_update_headers
def addRow(self, items:tuple, data=None):
"""
Appends iterable of `items` as the next row, optionally setting row
data to `data`. Each item of `items` can be a string or tuple
(item_name, item_data) if individual, cell-data is required.
"""
row_data = data
row = self.rowCount()
self.insertRow(row)
col_count = max(len(items), self.columnCount())
if col_count != self.columnCount():
self.setColumnCount(col_count)
for col, item_data in enumerate(items):
if isinstance(item_data, str):
name = item_data
elif hasattr(item_data, '__iter__') and len(item_data) == 2:
name, item_data = item_data
elif isinstance(item_data, float):
name = '{:.4f}'.format(item_data)
else:
name = str(item_data)
if isinstance(item_data, (float, int, numpy.number)):
item = self.TableWidgetNumericItem(name)
else:
item = QtGui.QTableWidgetItem(name)
item.setData(self.ITEM_DATA_ROLE, item_data)
if col in self._column_filter:
item = self._column_filter[col](item) or item
self.setItem(row, col, item)
self.resizeColumnsToContents()
self.resizeRowsToContents()
if row_data is not None:
self.setRowData(row, row_data)
def rowData(self, row:int):
return self.item(row, 0).data(self.ROW_DATA_ROLE)
def setRowData(self, row:int, data):
self.item(row, 0).setData(self.ROW_DATA_ROLE, data)
def setColumnFilter(self, item_filter_func, columns:int or list):
"""
Pass item(s) at column(s) through `item_filter_func` before
insertion. Useful for setting specific columns to bold or similar.
"""
try: iter(columns)
except TypeError: columns = [columns]
for i in columns:
self._column_filter[i] = item_filter_func
def clear(self):
super().clear()
self.setRowCount(0)
self.setColumnCount(0)
def selectFirstRow(self):
if self.rowCount() > 0:
self.selectRow(0)
def selectRowsWhere(self, col, value, n_hits=-1,
flags=QtCore.Qt.MatchExactly, _select=True):
"""
Select (also return) at most `n_hits` rows where column `col`
has value (``data()``) `value`.
"""
model = self.model()
matches = model.match(model.index(0, col),
self.ITEM_DATA_ROLE,
value,
n_hits,
flags)
model = self.selectionModel()
selection_flag = model.Select if _select else model.Deselect
for index in matches:
if _select ^ model.isSelected(index):
model.select(index, selection_flag | model.Rows)
return matches
def deselectRowsWhere(self, col, value, n_hits=-1,
flags=QtCore.Qt.MatchExactly):
"""
Deselect (also return) at most `n_hits` rows where column `col`
has value (``data()``) `value`.
"""
return self.selectRowsWhere(col, value, n_hits, flags, False)
class WebviewWidget(QtWebKit.QWebView):
"""WebKit window in a window"""
def __init__(self, parent=None, bridge=None, html=None, debug=None):
"""
Parameters
----------
parent: QObject
Parent QObject. If parent has layout(), this widget is added to it.
bridge: QObject
The "bridge" object exposed as ``window.pybridge`` in JavaScript.
Any bridge methods desired to be accessible from JS need to be
decorated ``@QtCore.pyqtSlot(<*args>, result=<type>)``.
html: str
HTML content to set in the webview.
debug: bool
If True, enable context menu and webkit inspector.
"""
super().__init__(parent)
self.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding))
self._bridge = bridge
try: parent.layout().addWidget(self)
except (AttributeError, TypeError): pass
settings = self.settings()
settings.setAttribute(settings.LocalContentCanAccessFileUrls, True)
if debug is None:
import logging
debug = logging.getLogger().level <= logging.DEBUG
if debug:
settings.setAttribute(settings.DeveloperExtrasEnabled, True)
else:
self.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
if html:
self.setHtml(html)
def setContent(self, data, mimetype, url=''):
super().setContent(data, mimetype, QtCore.QUrl(url))
if self._bridge:
self.page().mainFrame().addToJavaScriptWindowObject('pybridge', self._bridge)
def setHtml(self, html, url=''):
self.setContent(html.encode('utf-8'), 'text/html', url)
def sizeHint(self):
return QtCore.QSize(600, 500)
def evalJS(self, javascript):
self.page().mainFrame().evaluateJavaScript(javascript)
class ControlledAttributesDict(dict):
def __init__(self, master):
super().__init__()
self.master = master
def __setitem__(self, key, value):
if key not in self:
dict.__setitem__(self, key, [value])
else:
dict.__getitem__(self, key).append(value)
set_controllers(self.master, key, self.master, "")
callbacks = lambda obj: getattr(obj, CONTROLLED_ATTRIBUTES, {})
subcontrollers = lambda obj: getattr(obj, ATTRIBUTE_CONTROLLERS, {})
def notify_changed(obj, name, value):
if name in callbacks(obj):
for callback in callbacks(obj)[name]:
callback(value)
return
for controller, prefix in list(subcontrollers(obj)):
if getdeepattr(controller, prefix, None) != obj:
del subcontrollers(obj)[(controller, prefix)]
continue
full_name = prefix + "." + name
if full_name in callbacks(controller):
for callback in callbacks(controller)[full_name]:
callback(value)
continue
prefix = full_name + "."
prefix_length = len(prefix)
for controlled in callbacks(controller):
if controlled[:prefix_length] == prefix:
set_controllers(value, controlled[prefix_length:], controller, full_name)
def set_controllers(obj, controlled_name, controller, prefix):
while obj:
if prefix:
if hasattr(obj, ATTRIBUTE_CONTROLLERS):
getattr(obj, ATTRIBUTE_CONTROLLERS)[(controller, prefix)] = True
else:
setattr(obj, ATTRIBUTE_CONTROLLERS, {(controller, prefix): True})
parts = controlled_name.split(".", 1)
if len(parts) < 2:
break
new_prefix, controlled_name = parts
obj = getattr(obj, new_prefix, None)
if prefix:
prefix += '.'
prefix += new_prefix
class OWComponent:
def __init__(self, widget):
setattr(self, CONTROLLED_ATTRIBUTES, ControlledAttributesDict(self))
if widget.settingsHandler:
widget.settingsHandler.initialize(self)
def __setattr__(self, key, value):
super().__setattr__(key, value)
notify_changed(self, key, value)
def miscellanea(control, box, parent,
addToLayout=True, stretch=0, sizePolicy=None, addSpace=False,
disabled=False, tooltip=None):
"""
Helper function that sets various properties of the widget using a common
set of arguments.
The function
- sets the `control`'s attribute `box`, if `box` is given and `control.box`
is not yet set,
- attaches a tool tip to the `control` if specified,
- disables the `control`, if `disabled` is set to `True`,
- adds the `box` to the `parent`'s layout unless `addToLayout` is set to
`False`; the stretch factor can be specified,
- adds the control into the box's layout if the box is given (regardless
of `addToLayout`!)
- sets the size policy for the box or the control, if the policy is given,
- adds space in the `parent`'s layout after the `box` if `addSpace` is set
and `addToLayout` is not `False`.
If `box` is the same as `parent` it is set to `None`; this is convenient
because of the way complex controls are inserted.
:param control: the control, e.g. a `QCheckBox`
:type control: PyQt4.QtGui.QWidget
:param box: the box into which the widget was inserted
:type box: PyQt4.QtGui.QWidget or None
:param parent: the parent into whose layout the box or the control will be
inserted
:type parent: PyQt4.QtGui.QWidget
:param addSpace: the amount of space to add after the widget
:type addSpace: bool or int
:param disabled: If set to `True`, the widget is initially disabled
:type disabled: bool
:param addToLayout: If set to `False` the widget is not added to the layout
:type addToLayout: bool
:param stretch: the stretch factor for this widget, used when adding to
the layout (default: 0)
:type stretch: int
:param tooltip: tooltip that is attached to the widget
:type tooltip: str or None
:param sizePolicy: the size policy for the box or the control
:type sizePolicy: PyQt4.QtQui.QSizePolicy
"""
if disabled:
# if disabled==False, do nothing; it can be already disabled
control.setDisabled(disabled)
if tooltip is not None:
control.setToolTip(tooltip)
if box is parent:
box = None
elif box and box is not control and not hasattr(control, "box"):
control.box = box
if box and box.layout() is not None and \
isinstance(control, QtGui.QWidget) and \
box.layout().indexOf(control) == -1:
box.layout().addWidget(control)
if sizePolicy is not None:
(box or control).setSizePolicy(sizePolicy)
if addToLayout and parent and parent.layout() is not None:
parent.layout().addWidget(box or control, stretch)
_addSpace(parent, addSpace)
def setLayout(widget, orientation):
"""
Set the layout of the widget according to orientation. Argument
`orientation` can be an instance of :obj:`~PyQt4.QtGui.QLayout`, in which
case is it used as it is. If `orientation` is `'vertical'` or `True`,
the layout is set to :obj:`~PyQt4.QtGui.QVBoxLayout`. If it is
`'horizontal'` or `False`, it is set to :obj:`~PyQt4.QtGui.QVBoxLayout`.
:param widget: the widget for which the layout is being set
:type widget: PyQt4.QtGui.QWidget
:param orientation: orientation for the layout
:type orientation: str or bool or PyQt4.QtGui.QLayout
"""
if isinstance(orientation, QtGui.QLayout):
widget.setLayout(orientation)
elif orientation == 'horizontal' or not orientation:
widget.setLayout(QtGui.QHBoxLayout())
else:
widget.setLayout(QtGui.QVBoxLayout())
def _enterButton(parent, control, placeholder=True):
"""
Utility function that returns a button with a symbol for "Enter" and
optionally a placeholder to show when the enter button is hidden. Both
are inserted into the parent's layout, if it has one. If placeholder is
constructed it is shown and the button is hidden.
The height of the button is the same as the height of the widget passed
as argument `control`.
:param parent: parent widget into which the button is inserted
:type parent: PyQt4.QtGui.QWidget
:param control: a widget for determining the height of the button
:type control: PyQt4.QtGui.QWidget
:param placeholder: a flag telling whether to construct a placeholder
(default: True)
:type placeholder: bool
:return: a tuple with a button and a place holder (or `None`)
:rtype: PyQt4.QtGui.QToolButton or tuple
"""
global _enter_icon
if not _enter_icon:
_enter_icon = QtGui.QIcon(
os.path.dirname(__file__) + "/icons/Dlg_enter.png")
button = QtGui.QToolButton(parent)
height = control.sizeHint().height()
button.setFixedSize(height, height)
button.setIcon(_enter_icon)
if parent.layout() is not None:
parent.layout().addWidget(button)
if placeholder:
button.hide()
holder = QtGui.QWidget(parent)
holder.setFixedSize(height, height)
if parent.layout() is not None:
parent.layout().addWidget(holder)
else:
holder = None
return button, holder
def _addSpace(widget, space):
"""
A helper function that adds space into the widget, if requested.
The function is called by functions that have the `addSpace` argument.
:param widget: Widget into which to insert the space
:type widget: PyQt4.QtGui.QWidget
:param space: Amount of space to insert. If False, the function does
nothing. If the argument is an `int`, the specified space is inserted.
Otherwise, the default space is inserted by calling a :obj:`separator`.
:type space: bool or int
"""
if space:
if type(space) == int: # distinguish between int and bool!
separator(widget, space, space)
else:
separator(widget)
def separator(widget, width=4, height=4):
"""
Add a separator of the given size into the widget.
:param widget: the widget into whose layout the separator is added
:type widget: PyQt4.QtGui.QWidget
:param width: width of the separator
:type width: int
:param height: height of the separator
:type height: int
:return: separator
:rtype: PyQt4.QtGui.QWidget
"""
sep = QtGui.QWidget(widget)
if widget.layout() is not None:
widget.layout().addWidget(sep)
sep.setFixedSize(width, height)
return sep
def rubber(widget):
"""
Insert a stretch 100 into the widget's layout
"""
widget.layout().addStretch(100)
def widgetBox(widget, box=None, orientation='vertical', margin=None, spacing=4,
**misc):
"""
Construct a box with vertical or horizontal layout, and optionally,
a border with an optional label.
If the widget has a frame, the space after the widget is added unless
explicitly disabled.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param orientation: orientation for the layout. If the argument is an
instance of :obj:`~PyQt4.QtGui.QLayout`, it is used as a layout. If
"horizontal" or false-ish, the layout is horizontal
(:obj:`~PyQt4.QtGui.QHBoxLayout`), otherwise vertical
(:obj:`~PyQt4.QtGui.QHBoxLayout`).
:type orientation: str, int or :obj:`PyQt4.QtGui.QLayout`
:param sizePolicy: The size policy for the widget (default: None)
:type sizePolicy: :obj:`~PyQt4.QtGui.QSizePolicy`
:param margin: The margin for the layout. Default is 7 if the widget has
a border, and 0 if not.
:type margin: int
:param spacing: Spacing within the layout (default: 4)
:type spacing: int
:return: Constructed box
:rtype: PyQt4.QtGui.QGroupBox or PyQt4.QtGui.QWidget
"""
if box:
b = QtGui.QGroupBox(widget)
if isinstance(box, str):
b.setTitle(" " + box.strip() + " ")
if margin is None:
margin = 7
else:
b = QtGui.QWidget(widget)
b.setContentsMargins(0, 0, 0, 0)
if margin is None:
margin = 0
setLayout(b, orientation)
b.layout().setSpacing(spacing)
b.layout().setMargin(margin)
misc.setdefault('addSpace', bool(box))
miscellanea(b, None, widget, **misc)
return b
def indentedBox(widget, sep=20, orientation="vertical", **misc):
"""
Creates an indented box. The function can also be used "on the fly"::
gui.checkBox(gui.indentedBox(box), self, "spam", "Enable spam")
To align the control with a check box, use :obj:`checkButtonOffsetHint`::
gui.hSlider(gui.indentedBox(self.interBox), self, "intervals")
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget
:param sep: Indent size (default: 20)
:type sep: int
:param orientation: layout of the inserted box; see :obj:`widgetBox` for
details
:type orientation: str, int or PyQt4.QtGui.QLayout
:return: Constructed box
:rtype: PyQt4.QtGui.QGroupBox or PyQt4.QtGui.QWidget
"""
outer = widgetBox(widget, orientation=False, spacing=0)
separator(outer, sep, 0)
indented = widgetBox(outer, orientation=orientation)
miscellanea(indented, outer, widget, **misc)
return indented
def widgetLabel(widget, label="", labelWidth=None, **misc):
"""
Construct a simple, constant label.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param label: The text of the label (default: None)
:type label: str
:param labelWidth: The width of the label (default: None)
:type labelWidth: int
:return: Constructed label
:rtype: PyQt4.QtGui.QLabel
"""
lbl = QtGui.QLabel(label, widget)
if labelWidth:
lbl.setFixedSize(labelWidth, lbl.sizeHint().height())
miscellanea(lbl, None, widget, **misc)
return lbl
def label(widget, master, label, labelWidth=None, box=None,
orientation="vertical", **misc):
"""
Construct a label that contains references to the master widget's
attributes; when their values change, the label is updated.
Argument :obj:`label` is a format string following Python's syntax
(see the corresponding Python documentation): the label's content is
rendered as `label % master.__dict__`. For instance, if the
:obj:`label` is given as "There are %(mm)i monkeys", the value of
`master.mm` (which must be an integer) will be inserted in place of
`%(mm)i`.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param label: The text of the label, including attribute names
:type label: str
:param labelWidth: The width of the label (default: None)
:type labelWidth: int
:return: label
:rtype: PyQt4.QtGui.QLabel
"""
if box:
b = widgetBox(widget, box, orientation=None, addToLayout=False)
else:
b = widget
lbl = QtGui.QLabel("", b)
reprint = CallFrontLabel(lbl, label, master)
for mo in __re_label.finditer(label):
getattr(master, CONTROLLED_ATTRIBUTES)[mo.group("value")] = reprint
reprint()
if labelWidth:
lbl.setFixedSize(labelWidth, lbl.sizeHint().height())
miscellanea(lbl, b, widget, **misc)
return lbl
class SpinBoxWFocusOut(QtGui.QSpinBox):
"""
A class derived from QtGui.QSpinBox, which postpones the synchronization
of the control's value with the master's attribute until the user presses
Enter or clicks an icon that appears beside the spin box when the value
is changed.
The class overloads :obj:`onChange` event handler to show the commit button,
and :obj:`onEnter` to commit the change when enter is pressed.
.. attribute:: enterButton
A widget (usually an icon) that is shown when the value is changed.
.. attribute:: placeHolder
A placeholder which is shown when the button is hidden
.. attribute:: inSetValue
A flag that is set when the value is being changed through
:obj:`setValue` to prevent the programmatic changes from showing the
commit button.
"""
def __init__(self, minv, maxv, step, parent=None):
"""
Construct the object and set the range (`minv`, `maxv`) and the step.
:param minv: Minimal value
:type minv: int
:param maxv: Maximal value
:type maxv: int
:param step: Step
:type step: int
:param parent: Parent widget
:type parent: PyQt4.QtGui.QWidget
"""
super().__init__(parent)
self.setRange(minv, maxv)
self.setSingleStep(step)
self.inSetValue = False
self.enterButton = None
self.placeHolder = None
def onChange(self, _):
"""
Hides the place holder and shows the commit button unless
:obj:`inSetValue` is set.
"""
if not self.inSetValue:
self.placeHolder.hide()
self.enterButton.show()
def onEnter(self):
"""
If the commit button is visible, the overload event handler commits
the change by calling the appropriate callbacks. It also hides the
commit button and shows the placeHolder.
"""
if self.enterButton.isVisible():
self.enterButton.hide()
self.placeHolder.show()
if self.cback:
self.cback(int(str(self.text())))
if self.cfunc:
self.cfunc()
# doesn't work: it's probably LineEdit's focusOut that we should
# (but can't) catch
def focusOutEvent(self, *e):
"""
This handler was intended to catch the focus out event and reintepret
it as if enter was pressed. It does not work, though.
"""
super().focusOutEvent(*e)
if self.enterButton and self.enterButton.isVisible():
self.onEnter()
def setValue(self, value):
"""
Set the :obj:`inSetValue` flag and call the inherited method.
"""
self.inSetValue = True
super().setValue(value)
self.inSetValue = False
class DoubleSpinBoxWFocusOut(QtGui.QDoubleSpinBox):
"""
Same as :obj:`SpinBoxWFocusOut`, except that it is derived from
:obj:`~PyQt4.QtGui.QDoubleSpinBox`"""
def __init__(self, minv, maxv, step, parent):
super().__init__(parent)
self.setDecimals(math.ceil(-math.log10(step)))
self.setRange(minv, maxv)
self.setSingleStep(step)
self.inSetValue = False
self.enterButton = None
self.placeHolder = None
def onChange(self, _):
if not self.inSetValue:
self.placeHolder.hide()
self.enterButton.show()
def onEnter(self):
if self.enterButton.isVisible():
self.enterButton.hide()
self.placeHolder.show()
if self.cback:
self.cback(float(str(self.text()).replace(",", ".")))
if self.cfunc:
self.cfunc()
# doesn't work: it's probably LineEdit's focusOut that we should
# (and can't) catch
def focusOutEvent(self, *e):
super().focusOutEvent(*e)
if self.enterButton and self.enterButton.isVisible():
self.onEnter()
def setValue(self, value):
self.inSetValue = True
super().setValue(value)
self.inSetValue = False
def spin(widget, master, value, minv, maxv, step=1, box=None, label=None,
labelWidth=None, orientation=None, callback=None,
controlWidth=None, callbackOnReturn=False, checked=None,
checkCallback=None, posttext=None, disabled=False,
alignment=Qt.AlignLeft, keyboardTracking=True,
decimals=None, spinType=int, **misc):
"""
A spinbox with lots of bells and whistles, such as a checkbox and various
callbacks. It constructs a control of type :obj:`SpinBoxWFocusOut` or
:obj:`DoubleSpinBoxWFocusOut`.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param minv: minimal value
:type minv: int
:param maxv: maximal value
:type maxv: int
:param step: step (default: 1)
:type step: int
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param label: label that is put in above or to the left of the spin box
:type label: str
:param labelWidth: optional label width (default: None)
:type labelWidth: int
:param orientation: tells whether to put the label above (`"vertical"` or
`True`) or to the left (`"horizontal"` or `False`)
:type orientation: int or bool or str
:param callback: a function that is called when the value is entered; if
:obj:`callbackOnReturn` is `True`, the function is called when the
user commits the value by pressing Enter or clicking the icon
:type callback: function
:param controlWidth: the width of the spin box
:type controlWidth: int
:param callbackOnReturn: if `True`, the spin box has an associated icon
that must be clicked to confirm the value (default: False)
:type callbackOnReturn: bool
:param checked: if not None, a check box is put in front of the spin box;
when unchecked, the spin box is disabled. Argument `checked` gives the
name of the master's attribute given whose value is synchronized with
the check box's state (default: None).
:type checked: str
:param checkCallback: a callback function that is called when the check
box's state is changed
:type checkCallback: function
:param posttext: a text that is put to the right of the spin box
:type posttext: str
:param alignment: alignment of the spin box (e.g. `QtCore.Qt.AlignLeft`)
:type alignment: PyQt4.QtCore.Qt.Alignment
:param keyboardTracking: If `True`, the valueChanged signal is emitted
when the user is typing (default: True)
:type keyboardTracking: bool
:param spinType: determines whether to use QSpinBox (int) or
QDoubleSpinBox (float)
:type spinType: type
:param decimals: number of decimals (if `spinType` is `float`)
:type decimals: int
:return: Tuple `(spin box, check box) if `checked` is `True`, otherwise
the spin box
:rtype: tuple or gui.SpinBoxWFocusOut
"""
# b is the outermost box or the widget if there are no boxes;
# b is the widget that is inserted into the layout
# bi is the box that contains the control or the checkbox and the control;
# bi can be the widget itself, if there are no boxes
# cbox is the checkbox (or None)
# sbox is the spinbox itself
if box or label and not checked:
b = widgetBox(widget, box, orientation, addToLayout=False)
hasHBox = orientation == 'horizontal' or not orientation
else:
b = widget
hasHBox = False
if not hasHBox and (checked or callback and callbackOnReturn or posttext):
bi = widgetBox(b, orientation=0, addToLayout=False)
else:
bi = b
cbox = None
if checked is not None:
cbox = checkBox(bi, master, checked, label, labelWidth=labelWidth,
callback=checkCallback)
elif label:
b.label = widgetLabel(b, label, labelWidth)
if posttext:
widgetLabel(bi, posttext)
isDouble = spinType == float
sbox = bi.control = \
(SpinBoxWFocusOut, DoubleSpinBoxWFocusOut)[isDouble](minv, maxv,
step, bi)
if bi is not widget:
bi.setDisabled(disabled)
else:
sbox.setDisabled(disabled)
if decimals is not None:
sbox.setDecimals(decimals)
sbox.setAlignment(alignment)
sbox.setKeyboardTracking(keyboardTracking)
if controlWidth:
sbox.setFixedWidth(controlWidth)
if value:
sbox.setValue(getdeepattr(master, value))
cfront, sbox.cback, sbox.cfunc = connectControl(
master, value, callback,
not (callback and callbackOnReturn) and
sbox.valueChanged[(int, float)[isDouble]],
(CallFrontSpin, CallFrontDoubleSpin)[isDouble](sbox))
if checked:
cbox.disables = [sbox]
cbox.makeConsistent()
if callback and callbackOnReturn:
sbox.enterButton, sbox.placeHolder = _enterButton(bi, sbox)
sbox.valueChanged[str].connect(sbox.onChange)
sbox.editingFinished.connect(sbox.onEnter)
sbox.enterButton.clicked.connect(sbox.onEnter)
if hasattr(sbox, "upButton"):
sbox.upButton().clicked.connect(
lambda c=sbox.editor(): c.setFocus())
sbox.downButton().clicked.connect(
lambda c=sbox.editor(): c.setFocus())
miscellanea(sbox, b if b is not widget else bi, widget, **misc)
if checked:
if isDouble and b == widget:
# TODO Backward compatilibity; try to find and eliminate
sbox.control = b.control
return sbox
return cbox, sbox
else:
return sbox
def doubleSpin(widget, master, value, minv, maxv, step=1, box=None, label=None,
labelWidth=None, orientation=None, callback=None,
controlWidth=None, callbackOnReturn=False, checked=None,
checkCallback=None, posttext=None,
alignment=Qt.AlignLeft, keyboardTracking=True,
decimals=None, **misc):
"""
Backward compatilibity function: calls :obj:`spin` with `spinType=float`.
"""
return spin(widget, master, value, minv, maxv, step, box=box, label=label,
labelWidth=labelWidth, orientation=orientation,
callback=callback, controlWidth=controlWidth,
callbackOnReturn=callbackOnReturn, checked=checked,
checkCallback=checkCallback, posttext=posttext,
alignment=alignment, keyboardTracking=keyboardTracking,
decimals=decimals, spinType=float, **misc)
def checkBox(widget, master, value, label, box=None,
callback=None, getwidget=False, id_=None, labelWidth=None,
disables=None, **misc):
"""
A simple checkbox.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param label: label
:type label: str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param callback: a function that is called when the check box state is
changed
:type callback: function
:param getwidget: If set `True`, the callback function will get a keyword
argument `widget` referencing the check box
:type getwidget: bool
:param id_: If present, the callback function will get a keyword argument
`id` with this value
:type id_: any
:param labelWidth: the width of the label
:type labelWidth: int
:param disables: a list of widgets that are disabled if the check box is
unchecked
:type disables: list or PyQt4.QtGui.QWidget or None
:return: constructed check box; if is is placed within a box, the box is
return in the attribute `box`
:rtype: PyQt4.QtGui.QCheckBox
"""
if box:
b = widgetBox(widget, box, orientation=None, addToLayout=False)
else:
b = widget
cbox = QtGui.QCheckBox(label, b)
if labelWidth:
cbox.setFixedSize(labelWidth, cbox.sizeHint().height())
cbox.setChecked(getdeepattr(master, value))
connectControl(master, value, None, cbox.toggled[bool],
CallFrontCheckBox(cbox),
cfunc=callback and FunctionCallback(
master, callback, widget=cbox, getwidget=getwidget,
id=id_))
if isinstance(disables, QtGui.QWidget):
disables = [disables]
cbox.disables = disables or []
cbox.makeConsistent = Disabler(cbox, master, value)
cbox.toggled[bool].connect(cbox.makeConsistent)
cbox.makeConsistent(value)
miscellanea(cbox, b, widget, **misc)
return cbox
class LineEditWFocusOut(QtGui.QLineEdit):
"""
A class derived from QtGui.QLineEdit, which postpones the synchronization
of the control's value with the master's attribute until the user leaves
the line edit, presses Enter or clicks an icon that appears beside the
line edit when the value is changed.
The class also allows specifying a callback function for focus-in event.
.. attribute:: enterButton
A widget (usually an icon) that is shown when the value is changed.
.. attribute:: placeHolder
A placeholder which is shown when the button is hidden
.. attribute:: inSetValue
A flag that is set when the value is being changed through
:obj:`setValue` to prevent the programmatic changes from showing the
commit button.
.. attribute:: callback
Callback that is called when the change is confirmed
.. attribute:: focusInCallback
Callback that is called on the focus-in event
"""
def __init__(self, parent, callback, focusInCallback=None,
placeholder=False):
super().__init__(parent)
if parent.layout() is not None:
parent.layout().addWidget(self)
self.callback = callback
self.focusInCallback = focusInCallback
self.enterButton, self.placeHolder = \
_enterButton(parent, self, placeholder)
self.enterButton.clicked.connect(self.returnPressedHandler)
self.textChanged[str].connect(self.markChanged)
self.returnPressed.connect(self.returnPressedHandler)
def markChanged(self, *_):
if self.placeHolder:
self.placeHolder.hide()
self.enterButton.show()
def markUnchanged(self, *_):
self.enterButton.hide()
if self.placeHolder:
self.placeHolder.show()
def returnPressedHandler(self):
if self.enterButton.isVisible():
self.markUnchanged()
if hasattr(self, "cback") and self.cback:
self.cback(self.text())
if self.callback:
self.callback()
def setText(self, t):
super().setText(t)
if self.enterButton:
self.markUnchanged()
def focusOutEvent(self, *e):
super().focusOutEvent(*e)
self.returnPressedHandler()
def focusInEvent(self, *e):
if self.focusInCallback:
self.focusInCallback()
return super().focusInEvent(*e)
def lineEdit(widget, master, value, label=None, labelWidth=None,
orientation='vertical', box=None, callback=None,
valueType=str, validator=None, controlWidth=None,
callbackOnType=False, focusInCallback=None,
enterPlaceholder=False, **misc):
"""
Insert a line edit.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param label: label
:type label: str
:param labelWidth: the width of the label
:type labelWidth: int
:param orientation: tells whether to put the label above (`"vertical"` or
`True`) or to the left (`"horizontal"` or `False`)
:type orientation: int or bool or str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param callback: a function that is called when the check box state is
changed
:type callback: function
:param valueType: the type into which the entered string is converted
when synchronizing to `value`
:type valueType: type
:param validator: the validator for the input
:type validator: PyQt4.QtGui.QValidator
:param controlWidth: the width of the line edit
:type controlWidth: int
:param callbackOnType: if set to `True`, the callback is called at each
key press (default: `False`)
:type callbackOnType: bool
:param focusInCallback: a function that is called when the line edit
receives focus
:type focusInCallback: function
:param enterPlaceholder: if set to `True`, space of appropriate width is
left empty to the right for the icon that shows that the value is
changed but has not been committed yet
:type enterPlaceholder: bool
:rtype: PyQt4.QtGui.QLineEdit or a box
"""
if box or label:
b = widgetBox(widget, box, orientation, addToLayout=False)
if label is not None:
widgetLabel(b, label, labelWidth)
hasHBox = orientation == 'horizontal' or not orientation
else:
b = widget
hasHBox = False
baseClass = misc.pop("baseClass", None)
if baseClass:
ledit = baseClass(b)
ledit.enterButton = None
if b is not widget:
b.layout().addWidget(ledit)
elif focusInCallback or callback and not callbackOnType:
if not hasHBox:
outer = widgetBox(b, "", 0, addToLayout=(b is not widget))
else:
outer = b
ledit = LineEditWFocusOut(outer, callback, focusInCallback,
enterPlaceholder)
else:
ledit = QtGui.QLineEdit(b)
ledit.enterButton = None
if b is not widget:
b.layout().addWidget(ledit)
if value:
ledit.setText(str(getdeepattr(master, value)))
if controlWidth:
ledit.setFixedWidth(controlWidth)
if validator:
ledit.setValidator(validator)
if value:
ledit.cback = connectControl(
master, value,
callbackOnType and callback, ledit.textChanged[str],
CallFrontLineEdit(ledit), fvcb=value and valueType)[1]
miscellanea(ledit, b, widget, **misc)
return ledit
def button(widget, master, label, callback=None, width=None, height=None,
toggleButton=False, value="", default=False, autoDefault=True,
buttonType=QtGui.QPushButton, **misc):
"""
Insert a button (QPushButton, by default)
:param widget: the widget into which the button is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param label: label
:type label: str
:param callback: a function that is called when the button is pressed
:type callback: function
:param width: the width of the button
:type width: int
:param height: the height of the button
:type height: int
:param toggleButton: if set to `True`, the button is checkable, but it is
not synchronized with any attribute unless the `value` is given
:type toggleButton: bool
:param value: the master's attribute with which the value is synchronized
(the argument is optional; if present, it makes the button "checkable",
even if `toggleButton` is not set)
:type value: str
:param default: if `True` it makes the button the default button; this is
the button that is activated when the user presses Enter unless some
auto default button has current focus
:type default: bool
:param autoDefault: all buttons are auto default: they are activated if
they have focus (or are the next in the focus chain) when the user
presses enter. By setting `autoDefault` to `False`, the button is not
activated on pressing Return.
:type autoDefault: bool
:param buttonType: the button type (default: `QPushButton`)
:type buttonType: PyQt4.QtGui.QAbstractButton
:rtype: PyQt4.QtGui.QAbstractButton
"""
button = buttonType(widget)
if label:
button.setText(label)
if width:
button.setFixedWidth(width)
if height:
button.setFixedHeight(height)
if toggleButton or value:
button.setCheckable(True)
if buttonType == QtGui.QPushButton:
button.setDefault(default)
button.setAutoDefault(autoDefault)
if value:
button.setChecked(getdeepattr(master, value))
connectControl(
master, value, None, button.toggled[bool],
CallFrontButton(button),
cfunc=callback and FunctionCallback(master, callback,
widget=button))
elif callback:
button.clicked.connect(callback)
miscellanea(button, None, widget, **misc)
return button
def toolButton(widget, master, label="", callback=None,
width=None, height=None, tooltip=None):
"""
Insert a tool button. Calls :obj:`button`
:param widget: the widget into which the button is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param label: label
:type label: str
:param callback: a function that is called when the button is pressed
:type callback: function
:param width: the width of the button
:type width: int
:param height: the height of the button
:type height: int
:rtype: PyQt4.QtGui.QToolButton
"""
return button(widget, master, label, callback, width, height,
buttonType=QtGui.QToolButton, tooltip=tooltip)
def createAttributePixmap(char, background=Qt.black, color=Qt.white):
"""
Create a QIcon with a given character. The icon is 13 pixels high and wide.
:param char: The character that is printed in the icon
:type char: str
:param background: the background color (default: black)
:type background: PyQt4.QtGui.QColor
:param color: the character color (default: white)
:type color: PyQt4.QtGui.QColor
:rtype: PyQt4.QtGui.QIcon
"""
pixmap = QtGui.QPixmap(13, 13)
pixmap.fill(QtGui.QColor(0, 0, 0, 0))
painter = QtGui.QPainter()
painter.begin(pixmap)
painter.setRenderHints(painter.Antialiasing | painter.TextAntialiasing |
painter.SmoothPixmapTransform)
painter.setPen(background)
painter.setBrush(background)
rect = QtCore.QRectF(0, 0, 13, 13)
painter.drawRoundedRect(rect, 4, 4)
painter.setPen(color)
painter.drawText(2, 11, char)
painter.end()
return QtGui.QIcon(pixmap)
class __AttributeIconDict(dict):
def __getitem__(self, key):
if not self:
for tpe, char, col in ((vartype(ContinuousVariable()),
"C", (202, 0, 32)),
(vartype(DiscreteVariable()),
"D", (26, 150, 65)),
(vartype(StringVariable()),
"S", (0, 0, 0)),
(-1, "?", (128, 128, 128))):
self[tpe] = createAttributePixmap(char, QtGui.QColor(*col))
if key not in self:
key = vartype(key) if isinstance(key, Variable) else -1
return super().__getitem__(key)
attributeIconDict = __AttributeIconDict()
def attributeItem(var):
"""
Construct a pair (icon, name) for inserting a variable into a combo or
list box
:param var: variable
:type var: Orange.data.Variable
:rtype: tuple with PyQt4.QtGui.QIcon and str
"""
return attributeIconDict[var], var.name
def listBox(widget, master, value=None, labels=None, box=None, callback=None,
selectionMode=QtGui.QListWidget.SingleSelection,
enableDragDrop=False, dragDropCallback=None,
dataValidityCallback=None, sizeHint=None, **misc):
"""
Insert a list box.
The value with which the box's value synchronizes (`master.<value>`)
is a list of indices of selected items.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the name of the master's attribute with which the value is
synchronized (list of ints - indices of selected items)
:type value: str
:param labels: the name of the master's attribute with the list of items
(as strings or tuples with icon and string)
:type labels: str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param callback: a function that is called when the selection state is
changed
:type callback: function
:param selectionMode: selection mode - single, multiple etc
:type selectionMode: PyQt4.QtGui.QAbstractItemView.SelectionMode
:param enableDragDrop: flag telling whether drag and drop is available
:type enableDragDrop: bool
:param dragDropCallback: callback function on drop event
:type dragDropCallback: function
:param dataValidityCallback: function that check the validity on enter
and move event; it should return either `ev.accept()` or `ev.ignore()`.
:type dataValidityCallback: function
:param sizeHint: size hint
:type sizeHint: PyQt4.QtGui.QSize
:rtype: OrangeListBox
"""
if box:
bg = widgetBox(widget, box,
orientation="horizontal", addToLayout=False)
else:
bg = widget
lb = OrangeListBox(master, enableDragDrop, dragDropCallback,
dataValidityCallback, sizeHint, bg)
lb.setSelectionMode(selectionMode)
lb.ogValue = value
lb.ogLabels = labels
lb.ogMaster = master
if value is not None:
clist = getdeepattr(master, value)
if not isinstance(clist, ControlledList):
clist = ControlledList(clist, lb)
master.__setattr__(value, clist)
if labels is not None:
setattr(master, labels, getdeepattr(master, labels))
if hasattr(master, CONTROLLED_ATTRIBUTES):
getattr(master, CONTROLLED_ATTRIBUTES)[labels] = CallFrontListBoxLabels(lb)
if value is not None:
setattr(master, value, getdeepattr(master, value))
connectControl(master, value, callback, lb.itemSelectionChanged,
CallFrontListBox(lb), CallBackListBox(lb, master))
misc.setdefault('addSpace', True)
miscellanea(lb, bg, widget, **misc)
return lb
def radioButtons(widget, master, value, btnLabels=(), tooltips=None,
box=None, label=None, orientation='vertical',
callback=None, **misc):
"""
Construct a button group and add radio buttons, if they are given.
The value with which the buttons synchronize is the index of selected
button.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param btnLabels: a list of labels or icons for radio buttons
:type btnLabels: list of str or pixmaps
:param tooltips: a list of tool tips of the same length as btnLabels
:type tooltips: list of str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param label: a label that is inserted into the box
:type label: str
:param callback: a function that is called when the selection is changed
:type callback: function
:param orientation: orientation of the layout in the box
:type orientation: int or str or QLayout
:rtype: PyQt4.QtQui.QButtonGroup
"""
bg = widgetBox(widget, box, orientation, addToLayout=False)
if not label is None:
widgetLabel(bg, label)
rb = QtGui.QButtonGroup(bg)
if bg is not widget:
bg.group = rb
bg.buttons = []
bg.ogValue = value
bg.ogMaster = master
for i, lab in enumerate(btnLabels):
appendRadioButton(bg, lab, tooltip=tooltips and tooltips[i])
connectControl(master, value, callback, bg.group.buttonClicked[int],
CallFrontRadioButtons(bg), CallBackRadioButton(bg, master))
misc.setdefault('addSpace', bool(box))
miscellanea(bg.group, bg, widget, **misc)
return bg
radioButtonsInBox = radioButtons
def appendRadioButton(group, label, insertInto=None,
disabled=False, tooltip=None, sizePolicy=None,
addToLayout=True, stretch=0, addSpace=False):
"""
Construct a radio button and add it to the group. The group must be
constructed with :obj:`radioButtonsInBox` since it adds additional
attributes need for the call backs.
The radio button is inserted into `insertInto` or, if omitted, into the
button group. This is useful for more complex groups, like those that have
radio buttons in several groups, divided by labels and inside indented
boxes.
:param group: the button group
:type group: PyQt4.QtCore.QButtonGroup
:param label: string label or a pixmap for the button
:type label: str or PyQt4.QtGui.QPixmap
:param insertInto: the widget into which the radio button is inserted
:type insertInto: PyQt4.QtGui.QWidget
:rtype: PyQt4.QtGui.QRadioButton
"""
i = len(group.buttons)
if isinstance(label, str):
w = QtGui.QRadioButton(label)
else:
w = QtGui.QRadioButton(str(i))
w.setIcon(QtGui.QIcon(label))
if not hasattr(group, "buttons"):
group.buttons = []
group.buttons.append(w)
group.group.addButton(w)
w.setChecked(getdeepattr(group.ogMaster, group.ogValue) == i)
# miscellanea for this case is weird, so we do it here
if disabled:
w.setDisabled(disabled)
if tooltip is not None:
w.setToolTip(tooltip)
if sizePolicy:
w.setSizePolicy(sizePolicy)
if addToLayout:
dest = insertInto or group
dest.layout().addWidget(w, stretch)
_addSpace(dest, addSpace)
return w
def hSlider(widget, master, value, box=None, minValue=0, maxValue=10, step=1,
callback=None, label=None, labelFormat=" %d", ticks=False,
divideFactor=1.0, vertical=False, createLabel=True, width=None,
intOnly=True, **misc):
"""
Construct a slider.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param label: a label that is inserted into the box
:type label: str
:param callback: a function that is called when the value is changed
:type callback: function
:param minValue: minimal value
:type minValue: int or float
:param maxValue: maximal value
:type maxValue: int or float
:param step: step size
:type step: int or float
:param labelFormat: the label format; default is `" %d"`
:type labelFormat: str
:param ticks: if set to `True`, ticks are added below the slider
:type ticks: bool
:param divideFactor: a factor with which the displayed value is divided
:type divideFactor: float
:param vertical: if set to `True`, the slider is vertical
:type vertical: bool
:param createLabel: unless set to `False`, labels for minimal, maximal
and the current value are added to the widget
:type createLabel: bool
:param width: the width of the slider
:type width: int
:param intOnly: if `True`, the slider value is integer (the slider is
of type :obj:`PyQt4.QtGui.QSlider`) otherwise it is float
(:obj:`FloatSlider`, derived in turn from :obj:`PyQt4.QtQui.QSlider`).
:type intOnly: bool
:rtype: :obj:`PyQt4.QtGui.QSlider` or :obj:`FloatSlider`
"""
sliderBox = widgetBox(widget, box, orientation="horizontal",
addToLayout=False)
if label:
widgetLabel(sliderBox, label)
sliderOrient = Qt.Vertical if vertical else Qt.Horizontal
if intOnly:
slider = QtGui.QSlider(sliderOrient, sliderBox)
slider.setRange(minValue, maxValue)
if step:
slider.setSingleStep(step)
slider.setPageStep(step)
slider.setTickInterval(step)
signal = slider.valueChanged[int]
else:
slider = FloatSlider(sliderOrient, minValue, maxValue, step)
signal = slider.valueChangedFloat[float]
sliderBox.layout().addWidget(slider)
slider.setValue(getdeepattr(master, value))
if width:
slider.setFixedWidth(width)
if ticks:
slider.setTickPosition(QtGui.QSlider.TicksBelow)
slider.setTickInterval(ticks)
if createLabel:
label = QtGui.QLabel(sliderBox)
sliderBox.layout().addWidget(label)
label.setText(labelFormat % minValue)
width1 = label.sizeHint().width()
label.setText(labelFormat % maxValue)
width2 = label.sizeHint().width()
label.setFixedSize(max(width1, width2), label.sizeHint().height())
txt = labelFormat % (getdeepattr(master, value) / divideFactor)
label.setText(txt)
label.setLbl = lambda x: \
label.setText(labelFormat % (x / divideFactor))
signal.connect(label.setLbl)
connectControl(master, value, callback, signal, CallFrontHSlider(slider))
miscellanea(slider, sliderBox, widget, **misc)
return slider
def labeledSlider(widget, master, value, box=None,
label=None, labels=(), labelFormat=" %d", ticks=False,
callback=None, vertical=False, width=None, **misc):
"""
Construct a slider with labels instead of numbers.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param label: a label that is inserted into the box
:type label: str
:param labels: labels shown at different slider positions
:type labels: tuple of str
:param callback: a function that is called when the value is changed
:type callback: function
:param ticks: if set to `True`, ticks are added below the slider
:type ticks: bool
:param vertical: if set to `True`, the slider is vertical
:type vertical: bool
:param width: the width of the slider
:type width: int
:rtype: :obj:`PyQt4.QtGui.QSlider`
"""
sliderBox = widgetBox(widget, box, orientation="horizontal",
addToLayout=False)
if label:
widgetLabel(sliderBox, label)
sliderOrient = Qt.Vertical if vertical else Qt.Horizontal
slider = QtGui.QSlider(sliderOrient, sliderBox)
slider.ogValue = value
slider.setRange(0, len(labels) - 1)
slider.setSingleStep(1)
slider.setPageStep(1)
slider.setTickInterval(1)
sliderBox.layout().addWidget(slider)
slider.setValue(labels.index(getdeepattr(master, value)))
if width:
slider.setFixedWidth(width)
if ticks:
slider.setTickPosition(QtGui.QSlider.TicksBelow)
slider.setTickInterval(ticks)
max_label_size = 0
slider.value_label = value_label = QtGui.QLabel(sliderBox)
value_label.setAlignment(Qt.AlignRight)
sliderBox.layout().addWidget(value_label)
for lb in labels:
value_label.setText(labelFormat % lb)
max_label_size = max(max_label_size, value_label.sizeHint().width())
value_label.setFixedSize(max_label_size, value_label.sizeHint().height())
value_label.setText(getdeepattr(master, value))
if isinstance(labelFormat, str):
value_label.set_label = lambda x: \
value_label.setText(labelFormat % x)
else:
value_label.set_label = lambda x: value_label.setText(labelFormat(x))
slider.valueChanged[int].connect(value_label.set_label)
connectControl(master, value, callback, slider.valueChanged[int],
CallFrontLabeledSlider(slider, labels),
CallBackLabeledSlider(slider, master, labels))
miscellanea(slider, sliderBox, widget, **misc)
return slider
def valueSlider(widget, master, value, box=None, label=None,
values=(), labelFormat=" %d", ticks=False,
callback=None, vertical=False, width=None, **misc):
"""
Construct a slider with different values.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param label: a label that is inserted into the box
:type label: str
:param values: values at different slider positions
:type values: list of int
:param labelFormat: label format; default is `" %d"`; can also be a function
:type labelFormat: str or func
:param callback: a function that is called when the value is changed
:type callback: function
:param ticks: if set to `True`, ticks are added below the slider
:type ticks: bool
:param vertical: if set to `True`, the slider is vertical
:type vertical: bool
:param width: the width of the slider
:type width: int
:rtype: :obj:`PyQt4.QtGui.QSlider`
"""
if isinstance(labelFormat, str):
labelFormat = lambda x, f=labelFormat: f(x)
sliderBox = widgetBox(widget, box, orientation="horizontal",
addToLayout=False)
if label:
widgetLabel(sliderBox, label)
slider_orient = Qt.Vertical if vertical else Qt.Horizontal
slider = QtGui.QSlider(slider_orient, sliderBox)
slider.ogValue = value
slider.setRange(0, len(values) - 1)
slider.setSingleStep(1)
slider.setPageStep(1)
slider.setTickInterval(1)
sliderBox.layout().addWidget(slider)
slider.setValue(values.index(getdeepattr(master, value)))
if width:
slider.setFixedWidth(width)
if ticks:
slider.setTickPosition(QtGui.QSlider.TicksBelow)
slider.setTickInterval(ticks)
max_label_size = 0
slider.value_label = value_label = QtGui.QLabel(sliderBox)
value_label.setAlignment(Qt.AlignRight)
sliderBox.layout().addWidget(value_label)
for lb in values:
value_label.setText(labelFormat(lb))
max_label_size = max(max_label_size, value_label.sizeHint().width())
value_label.setFixedSize(max_label_size, value_label.sizeHint().height())
value_label.setText(labelFormat(getdeepattr(master, value)))
value_label.set_label = lambda x: value_label.setText(labelFormat(values[x]))
slider.valueChanged[int].connect(value_label.set_label)
connectControl(master, value, callback, slider.valueChanged[int],
CallFrontLabeledSlider(slider, values),
CallBackLabeledSlider(slider, master, values))
miscellanea(slider, sliderBox, widget, **misc)
return slider
class OrangeComboBox(QtGui.QComboBox):
"""
A QtGui.QComboBox subclass extened to support bounded contents width hint.
"""
def __init__(self, parent=None, maximumContentsLength=-1, **kwargs):
super().__init__(parent, **kwargs)
self.__maximumContentsLength = maximumContentsLength
def setMaximumContentsLength(self, length):
"""
Set the maximum contents length hint.
The hint specifies the upper bound on the `sizeHint` and
`minimumSizeHint` width specified in character length.
Set to 0 or negative value to disable.
.. note::
This property does not affect the widget's `maximumSize`.
The widget can still grow depending in it's sizePolicy.
Parameters
----------
lenght : int
Maximum contents length hint.
"""
if self.__maximumContentsLength != length:
self.__maximumContentsLength = length
self.updateGeometry()
def maximumContentsLength(self):
"""
Return the maximum contents length hint.
"""
return self.__maximumContentsLength
def sizeHint(self):
# reimplemented
sh = super().sizeHint()
if self.__maximumContentsLength > 0:
width = (self.fontMetrics().width("X") * self.__maximumContentsLength
+ self.iconSize().width() + 4)
sh = sh.boundedTo(QtCore.QSize(width, sh.height()))
return sh
def minimumSizeHint(self):
# reimplemented
sh = super().minimumSizeHint()
if self.__maximumContentsLength > 0:
width = (self.fontMetrics().width("X") * self.__maximumContentsLength
+ self.iconSize().width() + 4)
sh = sh.boundedTo(QtCore.QSize(width, sh.height()))
return sh
def comboBox(widget, master, value, box=None, label=None, labelWidth=None,
orientation='vertical', items=(), callback=None,
sendSelectedValue=False, valueType=str,
control2attributeDict=None, emptyString=None, editable=False,
contentsLength=None, maximumContentsLength=25,
**misc):
"""
Construct a combo box.
The `value` attribute of the `master` contains either the index of the
selected row (if `sendSelected` is left at default, `False`) or a value
converted to `valueType` (`str` by default).
Furthermore, the value is converted by looking up into dictionary
`control2attributeDict`.
:param widget: the widget into which the box is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param master: master widget
:type master: OWWidget or OWComponent
:param value: the master's attribute with which the value is synchronized
:type value: str
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:param orientation: orientation of the layout in the box
:type orientation: str or int or bool
:param label: a label that is inserted into the box
:type label: str
:param labelWidth: the width of the label
:type labelWidth: int
:param callback: a function that is called when the value is changed
:type callback: function
:param items: items (optionally with data) that are put into the box
:type items: tuple of str or tuples
:param sendSelectedValue: flag telling whether to store/retrieve indices
or string values from `value`
:type sendSelectedValue: bool
:param valueType: the type into which the selected value is converted
if sentSelectedValue is `False`
:type valueType: type
:param control2attributeDict: a dictionary through which the value is
converted
:type control2attributeDict: dict or None
:param emptyString: the string value in the combo box that gets stored as
an empty string in `value`
:type emptyString: str
:param editable: a flag telling whether the combo is editable
:type editable: bool
:param int contentsLength: Contents character length to use as a
fixed size hint. When not None, equivalent to::
combo.setSizeAdjustPolicy(
QComboBox.AdjustToMinimumContentsLengthWithIcon)
combo.setMinimumContentsLength(contentsLength)
:param int maximumContentsLength: Specifies the upper bound on the
`sizeHint` and `minimumSizeHint` width specified in character
length (default: 25, use 0 to disable)
:rtype: PyQt4.QtGui.QComboBox
"""
if box or label:
hb = widgetBox(widget, box, orientation, addToLayout=False)
if label is not None:
widgetLabel(hb, label, labelWidth)
else:
hb = widget
combo = OrangeComboBox(
hb, maximumContentsLength=maximumContentsLength,
editable=editable)
if contentsLength is not None:
combo.setSizeAdjustPolicy(
QtGui.QComboBox.AdjustToMinimumContentsLengthWithIcon)
combo.setMinimumContentsLength(contentsLength)
combo.box = hb
for item in items:
if isinstance(item, (tuple, list)):
combo.addItem(*item)
else:
combo.addItem(str(item))
if value:
cindex = getdeepattr(master, value)
if isinstance(cindex, str):
if items and cindex in items:
cindex = items.index(getdeepattr(master, value))
else:
cindex = 0
if cindex > combo.count() - 1:
cindex = 0
combo.setCurrentIndex(cindex)
if sendSelectedValue:
if control2attributeDict is None:
control2attributeDict = {}
if emptyString:
control2attributeDict[emptyString] = ""
connectControl(
master, value, callback, combo.activated[str],
CallFrontComboBox(combo, valueType, control2attributeDict),
ValueCallbackCombo(master, value, valueType,
control2attributeDict))
else:
connectControl(
master, value, callback, combo.activated[int],
CallFrontComboBox(combo, None, control2attributeDict))
miscellanea(combo, hb, widget, **misc)
return combo
class OrangeListBox(QtGui.QListWidget):
"""
List box with drag and drop functionality. Function :obj:`listBox`
constructs instances of this class; do not use the class directly.
.. attribute:: master
The widget into which the listbox is inserted.
.. attribute:: ogLabels
The name of the master's attribute that holds the strings with items
in the list box.
.. attribute:: ogValue
The name of the master's attribute that holds the indices of selected
items.
.. attribute:: enableDragDrop
A flag telling whether drag-and-drop is enabled.
.. attribute:: dragDropCallback
A callback that is called at the end of drop event.
.. attribute:: dataValidityCallback
A callback that is called on dragEnter and dragMove events and returns
either `ev.accept()` or `ev.ignore()`.
.. attribute:: defaultSizeHint
The size returned by the `sizeHint` method.
"""
def __init__(self, master, enableDragDrop=False, dragDropCallback=None,
dataValidityCallback=None, sizeHint=None, *args):
"""
:param master: the master widget
:type master: OWWidget or OWComponent
:param enableDragDrop: flag telling whether drag and drop is enabled
:type enableDragDrop: bool
:param dragDropCallback: callback for the end of drop event
:type dragDropCallback: function
:param dataValidityCallback: callback that accepts or ignores dragEnter
and dragMove events
:type dataValidityCallback: function with one argument (event)
:param sizeHint: size hint
:type sizeHint: PyQt4.QtGui.QSize
:param args: optional arguments for the inherited constructor
"""
self.master = master
super().__init__(*args)
self.drop_callback = dragDropCallback
self.valid_data_callback = dataValidityCallback
if not sizeHint:
self.size_hint = QtCore.QSize(150, 100)
else:
self.size_hint = sizeHint
if enableDragDrop:
self.setDragEnabled(True)
self.setAcceptDrops(True)
self.setDropIndicatorShown(True)
def sizeHint(self):
return self.size_hint
def dragEnterEvent(self, ev):
super().dragEnterEvent(ev)
if self.valid_data_callback:
self.valid_data_callback(ev)
elif isinstance(ev.source(), OrangeListBox):
ev.setDropAction(Qt.MoveAction)
ev.accept()
else:
ev.ignore()
def dropEvent(self, ev):
ev.setDropAction(Qt.MoveAction)
super().dropEvent(ev)
items = self.update_master()
if ev.source() is not self:
ev.source().update_master(exclude=items)
if self.drop_callback:
self.drop_callback()
def update_master(self, exclude=()):
control_list = [self.item(i).data(Qt.UserRole) for i in range(self.count()) if self.item(i).data(Qt.UserRole) not in exclude]
if self.ogLabels:
master_list = getattr(self.master, self.ogLabels)
if master_list != control_list:
setattr(self.master, self.ogLabels, control_list)
return control_list
def updateGeometries(self):
# A workaround for a bug in Qt
# (see: http://bugreports.qt.nokia.com/browse/QTBUG-14412)
if getattr(self, "_updatingGeometriesNow", False):
return
self._updatingGeometriesNow = True
try:
return super().updateGeometries()
finally:
self._updatingGeometriesNow = False
class SmallWidgetButton(QtGui.QPushButton):
def __init__(self, widget, text="", pixmap=None, box=None,
orientation='vertical', autoHideWidget=None, **misc):
#self.parent = parent
if pixmap is not None:
iconDir = os.path.join(os.path.dirname(__file__), "icons")
name = ""
if isinstance(pixmap, str):
if os.path.exists(pixmap):
name = pixmap
elif os.path.exists(os.path.join(iconDir, pixmap)):
name = os.path.join(iconDir, pixmap)
elif isinstance(pixmap, (QtGui.QPixmap, QtGui.QIcon)):
name = pixmap
name = name or os.path.join(iconDir, "arrow_down.png")
super().__init__(QtGui.QIcon(name), text, widget)
else:
super().__init__(text, widget)
if widget.layout() is not None:
widget.layout().addWidget(self)
# create autohide widget and set a layout
self.widget = self.autohideWidget = \
(autoHideWidget or AutoHideWidget)(None, Qt.Popup)
setLayout(self.widget, orientation)
if box:
self.widget = widgetBox(self.widget, box, orientation)
self.autohideWidget.hide()
miscellanea(self, self.widget, widget, **misc)
def mousePressEvent(self, ev):
super().mousePressEvent(ev)
if self.autohideWidget.isVisible():
self.autohideWidget.hide()
else:
self.autohideWidget.move(
self.mapToGlobal(QtCore.QPoint(0, self.height())))
self.autohideWidget.show()
class SmallWidgetLabel(QtGui.QLabel):
def __init__(self, widget, text="", pixmap=None, box=None,
orientation='vertical', **misc):
super().__init__(widget)
if text:
self.setText("<font color=\"#C10004\">" + text + "</font>")
elif pixmap is not None:
iconDir = os.path.join(os.path.dirname(__file__), "icons")
name = ""
if isinstance(pixmap, str):
if os.path.exists(pixmap):
name = pixmap
elif os.path.exists(os.path.join(iconDir, pixmap)):
name = os.path.join(iconDir, pixmap)
elif isinstance(pixmap, (QtGui.QPixmap, QtGui.QIcon)):
name = pixmap
name = name or os.path.join(iconDir, "arrow_down.png")
self.setPixmap(QtGui.QPixmap(name))
self.autohideWidget = self.widget = AutoHideWidget(None, Qt.Popup)
setLayout(self.widget, orientation)
if box:
self.widget = widgetBox(self.widget, box, orientation)
self.autohideWidget.hide()
miscellanea(self, self.widget, widget, **misc)
def mousePressEvent(self, ev):
super().mousePressEvent(ev)
if self.autohideWidget.isVisible():
self.autohideWidget.hide()
else:
self.autohideWidget.move(
self.mapToGlobal(QtCore.QPoint(0, self.height())))
self.autohideWidget.show()
class AutoHideWidget(QtGui.QWidget):
def leaveEvent(self, _):
self.hide()
class SearchLineEdit(QtGui.QLineEdit):
"""
QLineEdit for quick searches
"""
def __init__(self, t, searcher):
super().__init__(self, t)
self.searcher = searcher
def keyPressEvent(self, e):
"""
Handles keys up and down by selecting the previous and the next item
in the list, and the escape key, which hides the searcher.
"""
k = e.key()
if k == Qt.Key_Down:
curItem = self.searcher.lb.currentItem()
if curItem + 1 < self.searcher.lb.count():
self.searcher.lb.setCurrentItem(curItem + 1)
elif k == Qt.Key_Up:
curItem = self.searcher.lb.currentItem()
if curItem:
self.searcher.lb.setCurrentItem(curItem - 1)
elif k == Qt.Key_Escape:
self.searcher.window.hide()
else:
return super().keyPressEvent(e)
class Searcher:
"""
The searcher class for :obj:`SearchLineEdit`.
"""
def __init__(self, control, master):
self.control = control
self.master = master
def __call__(self):
_s = QtGui.QStyle
self.window = t = QtGui.QFrame(
self.master,
_s.WStyle_Dialog + _s.WStyle_Tool + _s.WStyle_Customize +
_s.WStyle_NormalBorder)
QtGui.QVBoxLayout(t).setAutoAdd(1)
gs = self.master.mapToGlobal(QtCore.QPoint(0, 0))
gl = self.control.mapToGlobal(QtCore.QPoint(0, 0))
t.move(gl.x() - gs.x(), gl.y() - gs.y())
self.allItems = [self.control.text(i)
for i in range(self.control.count())]
le = SearchLineEdit(t, self)
self.lb = QtGui.QListWidget(t)
for i in self.allItems:
self.lb.insertItem(i)
t.setFixedSize(self.control.width(), 200)
t.show()
le.setFocus()
le.textChanged.connect(self.textChanged)
le.returnPressed.connect(self.returnPressed)
self.lb.itemClicked.connect(self.mouseClicked)
def textChanged(self, s):
s = str(s)
self.lb.clear()
for i in self.allItems:
if s.lower() in i.lower():
self.lb.insertItem(i)
def returnPressed(self):
if self.lb.count():
self.conclude(self.lb.text(max(0, self.lb.currentItem())))
else:
self.window.hide()
def mouseClicked(self, item):
self.conclude(item.text())
def conclude(self, value):
index = self.allItems.index(value)
self.control.setCurrentItem(index)
if self.control.cback:
if self.control.sendSelectedValue:
self.control.cback(value)
else:
self.control.cback(index)
if self.control.cfunc:
self.control.cfunc()
self.window.hide()
class collapsableWidgetBox(QtGui.QGroupBox):
def __init__(self, widget, box="", master=None, value="",
orientation="vertical", callback=None):
super().__init__(widget)
self.setFlat(1)
setLayout(self, orientation)
if widget.layout() is not None:
widget.layout().addWidget(self)
if isinstance(box, str):
self.setTitle(" " + box.strip() + " ")
self.setCheckable(True)
self.master = master
self.value = value
self.callback = callback
self.clicked.connect(self.toggled)
def toggled(self, _=0):
if self.value:
self.master.__setattr__(self.value, self.isChecked())
self.updateControls()
if self.callback is not None:
self.callback()
def updateControls(self):
val = getdeepattr(self.master, self.value)
width = self.width()
self.setChecked(val)
self.setFlat(not val)
self.setMinimumSize(QtCore.QSize(width if not val else 0, 0))
for c in self.children():
if isinstance(c, QtGui.QLayout):
continue
if val:
c.show()
else:
c.hide()
class widgetHider(QtGui.QWidget):
def __init__(self, widget, master, value, _=(19, 19), widgets=None,
tooltip=None):
super().__init__(widget)
if widget.layout() is not None:
widget.layout().addWidget(self)
self.value = value
self.master = master
if tooltip:
self.setToolTip(tooltip)
iconDir = os.path.join(os.path.dirname(__file__), "icons")
icon1 = os.path.join(iconDir, "arrow_down.png")
icon2 = os.path.join(iconDir, "arrow_up.png")
self.pixmaps = [QtGui.QPixmap(icon1), QtGui.QPixmap(icon2)]
self.setFixedSize(self.pixmaps[0].size())
self.disables = list(widgets or [])
self.makeConsistent = Disabler(self, master, value, type=HIDER)
if widgets:
self.setWidgets(widgets)
def mousePressEvent(self, ev):
self.master.__setattr__(self.value,
not getdeepattr(self.master, self.value))
self.makeConsistent()
def setWidgets(self, widgets):
self.disables = list(widgets)
self.makeConsistent()
def paintEvent(self, ev):
super().paintEvent(ev)
if self.pixmaps:
pix = self.pixmaps[getdeepattr(self.master, self.value)]
painter = QtGui.QPainter(self)
painter.drawPixmap(0, 0, pix)
def auto_commit(widget, master, value, label, auto_label=None, box=True,
checkbox_label=None, orientation=None, commit=None,
callback=None, **misc):
"""
Add a commit button with auto-commit check box.
The widget must have a commit method and a setting that stores whether
auto-commit is on.
The function replaces the commit method with a new commit method that
checks whether auto-commit is on. If it is, it passes the call to the
original commit, otherwise it sets the dirty flag.
The checkbox controls the auto-commit. When auto-commit is switched on, the
checkbox callback checks whether the dirty flag is on and calls the original
commit.
Important! Do not connect any signals to the commit before calling
auto_commit.
:param widget: the widget into which the box with the button is inserted
:type widget: PyQt4.QtGui.QWidget or None
:param value: the master's attribute which stores whether the auto-commit
is on
:type value: str
:param master: master widget
:type master: OWWidget or OWComponent
:param label: The button label
:type label: str
:param label: The label used when auto-commit is on; default is
`"Auto " + label`
:type label: str
:param commit: master's method to override ('commit' by default)
:type commit: function
:param callback: function to call whenever the checkbox's statechanged
:type callback: function
:param box: tells whether the widget has a border, and its label
:type box: int or str or None
:return: the box
"""
def checkbox_toggled():
if getattr(master, value):
btn.setText(auto_label)
btn.setEnabled(False)
if dirty:
do_commit()
else:
btn.setText(label)
btn.setEnabled(True)
if callback:
callback()
def unconditional_commit(*args, **kwargs):
nonlocal dirty
if getattr(master, value):
do_commit(*args, **kwargs)
else:
dirty = True
def do_commit(*args, **kwargs):
nonlocal dirty
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
commit(*args, **kwargs)
QApplication.restoreOverrideCursor()
dirty = False
dirty = False
commit = commit or getattr(master, 'commit')
commit_name = next(LAMBDA_NAME) if isinstance(commit, LambdaType) else commit.__name__
setattr(master, 'unconditional_' + commit_name, commit)
if not auto_label:
if checkbox_label:
auto_label = label
else:
auto_label = "Auto " + label.lower() + " is on"
if isinstance(box, QtGui.QWidget):
b = box
else:
if orientation is None:
orientation = bool(checkbox_label)
b = widgetBox(widget, box=box, orientation=orientation,
addToLayout=False)
b.setSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Maximum)
b.checkbox = cb = checkBox(b, master, value, checkbox_label,
callback=checkbox_toggled, tooltip=auto_label)
if checkbox_label and orientation == 'horizontal' or not orientation:
b.layout().insertSpacing(-1, 10)
cb.setSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
b.button = btn = button(b, master, label, callback=lambda: do_commit())
if not checkbox_label:
btn.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Preferred)
checkbox_toggled()
setattr(master, commit_name, unconditional_commit)
miscellanea(b, widget, widget,
addToLayout=not isinstance(box, QtGui.QWidget), **misc)
return b
class ControlledList(list):
"""
A class derived from a list that is connected to a
:obj:`PyQt4.QtGui.QListBox`: the list contains indices of items that are
selected in the list box. Changing the list content changes the
selection in the list box.
"""
def __init__(self, content, listBox=None):
super().__init__(content)
self.listBox = listBox
def __reduce__(self):
# cannot pickle self.listBox, but can't discard it
# (ControlledList may live on)
import copyreg
return copyreg._reconstructor, (list, list, ()), None, self.__iter__()
# TODO ControllgedList.item2name is probably never used
def item2name(self, item):
item = self.listBox.labels[item]
if type(item) is tuple:
return item[1]
else:
return item
def __setitem__(self, index, item):
if isinstance(index, int):
self.listBox.item(self[index]).setSelected(0)
item.setSelected(1)
else:
for i in self[index]:
self.listBox.item(i).setSelected(0)
for i in item:
self.listBox.item(i).setSelected(1)
super().__setitem__(index, item)
def __delitem__(self, index):
if isinstance(index, int):
self.listBox.item(self[index]).setSelected(0)
else:
for i in self[index]:
self.listBox.item(i).setSelected(0)
super().__delitem__(index)
def append(self, item):
super().append(item)
item.setSelected(1)
def extend(self, items):
super().extend(items)
for i in items:
self.listBox.item(i).setSelected(1)
def insert(self, index, item):
item.setSelected(1)
super().insert(index, item)
def pop(self, index=-1):
i = super().pop(index)
self.listBox.item(i).setSelected(0)
def remove(self, item):
item.setSelected(0)
super().remove(item)
def connectControl(master, value, f, signal,
cfront, cback=None, cfunc=None, fvcb=None):
cback = cback or value and ValueCallback(master, value, fvcb)
if cback:
if signal:
signal.connect(cback)
cback.opposite = cfront
if value and cfront and hasattr(master, CONTROLLED_ATTRIBUTES):
getattr(master, CONTROLLED_ATTRIBUTES)[value] = cfront
cfunc = cfunc or f and FunctionCallback(master, f)
if cfunc:
if signal:
signal.connect(cfunc)
cfront.opposite = tuple(filter(None, (cback, cfunc)))
return cfront, cback, cfunc
class ControlledCallback:
def __init__(self, widget, attribute, f=None):
self.widget = widget
self.attribute = attribute
self.f = f
self.disabled = 0
if isinstance(widget, dict):
return # we can't assign attributes to dict
if not hasattr(widget, "callbackDeposit"):
widget.callbackDeposit = []
widget.callbackDeposit.append(self)
def acyclic_setattr(self, value):
if self.disabled:
return
if self.f:
if self.f in (int, float) and (
not value or isinstance(value, str) and value in "+-"):
value = self.f(0)
else:
value = self.f(value)
opposite = getattr(self, "opposite", None)
if opposite:
try:
opposite.disabled += 1
if type(self.widget) is dict:
self.widget[self.attribute] = value
else:
setattr(self.widget, self.attribute, value)
finally:
opposite.disabled -= 1
else:
if isinstance(self.widget, dict):
self.widget[self.attribute] = value
else:
setattr(self.widget, self.attribute, value)
class ValueCallback(ControlledCallback):
# noinspection PyBroadException
def __call__(self, value):
if value is None:
return
try:
self.acyclic_setattr(value)
except:
print("gui.ValueCallback: %s" % value)
import traceback
import sys
traceback.print_exception(*sys.exc_info())
class ValueCallbackCombo(ValueCallback):
def __init__(self, widget, attribute, f=None, control2attributeDict=None):
super().__init__(widget, attribute, f)
self.control2attributeDict = control2attributeDict or {}
def __call__(self, value):
value = str(value)
return super().__call__(self.control2attributeDict.get(value, value))
class ValueCallbackLineEdit(ControlledCallback):
def __init__(self, control, widget, attribute, f=None):
ControlledCallback.__init__(self, widget, attribute, f)
self.control = control
# noinspection PyBroadException
def __call__(self, value):
if value is None:
return
try:
pos = self.control.cursorPosition()
self.acyclic_setattr(value)
self.control.setCursorPosition(pos)
except:
print("invalid value ", value, type(value))
import traceback
import sys
traceback.print_exception(*sys.exc_info())
class SetLabelCallback:
def __init__(self, widget, label, format="%5.2f", f=None):
self.widget = widget
self.label = label
self.format = format
self.f = f
if hasattr(widget, "callbackDeposit"):
widget.callbackDeposit.append(self)
self.disabled = 0
def __call__(self, value):
if not self.disabled and value is not None:
if self.f:
value = self.f(value)
self.label.setText(self.format % value)
class FunctionCallback:
def __init__(self, master, f, widget=None, id=None, getwidget=False):
self.master = master
self.widget = widget
self.f = f
self.id = id
self.getwidget = getwidget
if hasattr(master, "callbackDeposit"):
master.callbackDeposit.append(self)
self.disabled = 0
def __call__(self, *value):
if not self.disabled and value is not None:
kwds = {}
if self.id is not None:
kwds['id'] = self.id
if self.getwidget:
kwds['widget'] = self.widget
if isinstance(self.f, list):
for f in self.f:
f(**kwds)
else:
self.f(**kwds)
class CallBackListBox:
def __init__(self, control, widget):
self.control = control
self.widget = widget
self.disabled = 0
def __call__(self, *_): # triggered by selectionChange()
if not self.disabled and self.control.ogValue is not None:
clist = getdeepattr(self.widget, self.control.ogValue)
# skip the overloaded method to avoid a cycle
list.__delitem__(clist, slice(0, len(clist)))
control = self.control
for i in range(control.count()):
if control.item(i).isSelected():
list.append(clist, i)
self.widget.__setattr__(self.control.ogValue, clist)
class CallBackRadioButton:
def __init__(self, control, widget):
self.control = control
self.widget = widget
self.disabled = False
def __call__(self, *_): # triggered by toggled()
if not self.disabled and self.control.ogValue is not None:
arr = [butt.isChecked() for butt in self.control.buttons]
self.widget.__setattr__(self.control.ogValue, arr.index(1))
class CallBackLabeledSlider:
def __init__(self, control, widget, lookup):
self.control = control
self.widget = widget
self.lookup = lookup
self.disabled = False
def __call__(self, *_):
if not self.disabled and self.control.ogValue is not None:
self.widget.__setattr__(self.control.ogValue,
self.lookup[self.control.value()])
class ControlledCallFront:
def __init__(self, control):
self.control = control
self.disabled = 0
def action(self, *_):
pass
def __call__(self, *args):
if not self.disabled:
opposite = getattr(self, "opposite", None)
if opposite:
try:
for op in opposite:
op.disabled += 1
self.action(*args)
finally:
for op in opposite:
op.disabled -= 1
else:
self.action(*args)
class CallFrontSpin(ControlledCallFront):
def action(self, value):
if value is not None:
self.control.setValue(value)
class CallFrontDoubleSpin(ControlledCallFront):
def action(self, value):
if value is not None:
self.control.setValue(value)
class CallFrontCheckBox(ControlledCallFront):
def action(self, value):
if value is not None:
values = [Qt.Unchecked, Qt.Checked, Qt.PartiallyChecked]
self.control.setCheckState(values[value])
class CallFrontButton(ControlledCallFront):
def action(self, value):
if value is not None:
self.control.setChecked(bool(value))
class CallFrontComboBox(ControlledCallFront):
def __init__(self, control, valType=None, control2attributeDict=None):
super().__init__(control)
self.valType = valType
if control2attributeDict is None:
self.attribute2controlDict = {}
else:
self.attribute2controlDict = \
{y: x for x, y in control2attributeDict.items()}
def action(self, value):
if value is not None:
value = self.attribute2controlDict.get(value, value)
if self.valType:
for i in range(self.control.count()):
if self.valType(str(self.control.itemText(i))) == value:
self.control.setCurrentIndex(i)
return
values = ""
for i in range(self.control.count()):
values += str(self.control.itemText(i)) + \
(i < self.control.count() - 1 and ", " or ".")
print("unable to set %s to value '%s'. Possible values are %s"
% (self.control, value, values))
else:
if value < self.control.count():
self.control.setCurrentIndex(value)
class CallFrontHSlider(ControlledCallFront):
def action(self, value):
if value is not None:
self.control.setValue(value)
class CallFrontLabeledSlider(ControlledCallFront):
def __init__(self, control, lookup):
super().__init__(control)
self.lookup = lookup
def action(self, value):
if value is not None:
self.control.setValue(self.lookup.index(value))
class CallFrontLogSlider(ControlledCallFront):
def action(self, value):
if value is not None:
if value < 1e-30:
print("unable to set %s to %s (value too small)" %
(self.control, value))
else:
self.control.setValue(math.log10(value))
class CallFrontLineEdit(ControlledCallFront):
def action(self, value):
self.control.setText(str(value))
class CallFrontRadioButtons(ControlledCallFront):
def action(self, value):
if value < 0 or value >= len(self.control.buttons):
value = 0
self.control.buttons[value].setChecked(1)
class CallFrontListBox(ControlledCallFront):
def action(self, value):
if value is not None:
if not isinstance(value, ControlledList):
setattr(self.control.ogMaster, self.control.ogValue,
ControlledList(value, self.control))
for i in range(self.control.count()):
shouldBe = i in value
if shouldBe != self.control.item(i).isSelected():
self.control.item(i).setSelected(shouldBe)
class CallFrontListBoxLabels(ControlledCallFront):
unknownType = None
def action(self, values):
self.control.clear()
if values:
for value in values:
if isinstance(value, tuple):
text, icon = value
if isinstance(icon, int):
item = QtGui.QListWidgetItem(attributeIconDict[icon], text)
else:
item = QtGui.QListWidgetItem(icon, text)
elif isinstance(value, Variable):
item = QtGui.QListWidgetItem(*attributeItem(value))
else:
item = QtGui.QListWidgetItem(value)
item.setData(Qt.UserRole, value)
self.control.addItem(item)
class CallFrontLabel:
def __init__(self, control, label, master):
self.control = control
self.label = label
self.master = master
def __call__(self, *_):
self.control.setText(self.label % self.master.__dict__)
DISABLER = 1
HIDER = 2
class Disabler:
def __init__(self, widget, master, valueName, propagateState=True,
type=DISABLER):
self.widget = widget
self.master = master
self.valueName = valueName
self.propagateState = propagateState
self.type = type
def __call__(self, *value):
currState = self.widget.isEnabled()
if currState or not self.propagateState:
if len(value):
disabled = not value[0]
else:
disabled = not getdeepattr(self.master, self.valueName)
else:
disabled = 1
for w in self.widget.disables:
if type(w) is tuple:
if isinstance(w[0], int):
i = 1
if w[0] == -1:
disabled = not disabled
else:
i = 0
if self.type == DISABLER:
w[i].setDisabled(disabled)
elif self.type == HIDER:
if disabled:
w[i].hide()
else:
w[i].show()
if hasattr(w[i], "makeConsistent"):
w[i].makeConsistent()
else:
if self.type == DISABLER:
w.setDisabled(disabled)
elif self.type == HIDER:
if disabled:
w.hide()
else:
w.show()
class tableItem(QtGui.QTableWidgetItem):
def __init__(self, table, x, y, text, editType=None, backColor=None,
icon=None, type=QtGui.QTableWidgetItem.Type):
super().__init__(type)
if icon:
self.setIcon(QtGui.QIcon(icon))
if editType is not None:
self.setFlags(editType)
else:
self.setFlags(Qt.ItemIsEnabled | Qt.ItemIsUserCheckable |
Qt.ItemIsSelectable)
if backColor is not None:
self.setBackground(QtGui.QBrush(backColor))
# we add it this way so that text can also be int and sorting will be
# done properly (as integers and not as text)
self.setData(Qt.DisplayRole, text)
table.setItem(x, y, self)
TableValueRole = next(OrangeUserRole) # Role to retrieve orange.Value
TableClassValueRole = next(OrangeUserRole) # Retrieve class value for the row
TableDistribution = next(OrangeUserRole) # Retrieve distribution of the column
TableVariable = next(OrangeUserRole) # Role to retrieve the column's variable
BarRatioRole = next(OrangeUserRole) # Ratio for drawing distribution bars
BarBrushRole = next(OrangeUserRole) # Brush for distribution bar
SortOrderRole = next(OrangeUserRole) # Used for sorting
class TableBarItem(QtGui.QItemDelegate):
BarRole = next(OrangeUserRole)
ColorRole = next(OrangeUserRole)
def __init__(self, parent=None, color=QtGui.QColor(255, 170, 127),
color_schema=None):
"""
:param QObject parent: Parent object.
:param QColor color: Default color of the distribution bar.
:param color_schema:
If not None it must be an instance of
:class:`OWColorPalette.ColorPaletteGenerator` (note: this
parameter, if set, overrides the ``color``)
:type color_schema: :class:`OWColorPalette.ColorPaletteGenerator`
"""
super().__init__(parent)
self.color = color
self.color_schema = color_schema
def paint(self, painter, option, index):
painter.save()
self.drawBackground(painter, option, index)
ratio = index.data(TableBarItem.BarRole)
if isinstance(ratio, float):
if math.isnan(ratio):
ratio = None
color = self.color
if self.color_schema is not None and ratio is not None:
class_ = index.data(TableClassValueRole)
if isinstance(class_, Orange.data.Value) and \
class_.variable.is_discrete and \
not math.isnan(class_):
color = self.color_schema[int(class_)]
if ratio is not None:
painter.save()
painter.setPen(QtGui.QPen(QtGui.QBrush(color), 5,
Qt.SolidLine, Qt.RoundCap))
rect = option.rect.adjusted(3, 0, -3, -5)
x, y = rect.x(), rect.y() + rect.height()
painter.drawLine(x, y, x + rect.width() * ratio, y)
painter.restore()
text_rect = option.rect.adjusted(0, 0, 0, -3)
else:
text_rect = option.rect
text = index.data(Qt.DisplayRole)
self.drawDisplay(painter, option, text_rect, text)
painter.restore()
class BarItemDelegate(QtGui.QStyledItemDelegate):
def __init__(self, parent, brush=QtGui.QBrush(QtGui.QColor(255, 170, 127)),
scale=(0.0, 1.0)):
super().__init__(parent)
self.brush = brush
self.scale = scale
def paint(self, painter, option, index):
if option.widget is not None:
style = option.widget.style()
else:
style = QtGui.QApplication.style()
style.drawPrimitive(
QtGui.QStyle.PE_PanelItemViewRow, option, painter,
option.widget)
style.drawPrimitive(
QtGui.QStyle.PE_PanelItemViewItem, option, painter,
option.widget)
rect = option.rect
val = index.data(Qt.DisplayRole)
if isinstance(val, float):
minv, maxv = self.scale
val = (val - minv) / (maxv - minv)
painter.save()
if option.state & QtGui.QStyle.State_Selected:
painter.setOpacity(0.75)
painter.setBrush(self.brush)
painter.drawRect(
rect.adjusted(1, 1, - rect.width() * (1.0 - val) - 2, -2))
painter.restore()
class IndicatorItemDelegate(QtGui.QStyledItemDelegate):
IndicatorRole = next(OrangeUserRole)
def __init__(self, parent, role=IndicatorRole, indicatorSize=2):
super().__init__(parent)
self.role = role
self.indicatorSize = indicatorSize
def paint(self, painter, option, index):
super().paint(painter, option, index)
rect = option.rect
indicator = index.data(self.role)
if indicator:
painter.save()
painter.setRenderHints(QtGui.QPainter.Antialiasing)
painter.setBrush(QtGui.QBrush(Qt.black))
painter.drawEllipse(rect.center(),
self.indicatorSize, self.indicatorSize)
painter.restore()
class LinkStyledItemDelegate(QtGui.QStyledItemDelegate):
LinkRole = next(OrangeUserRole)
def __init__(self, parent):
super().__init__(parent)
self.mousePressState = QtCore.QModelIndex(), QtCore.QPoint()
parent.entered.connect(self.onEntered)
def sizeHint(self, option, index):
size = super().sizeHint(option, index)
return QtCore.QSize(size.width(), max(size.height(), 20))
def linkRect(self, option, index):
if option.widget is not None:
style = option.widget.style()
else:
style = QtGui.QApplication.style()
text = self.displayText(index.data(Qt.DisplayRole),
QtCore.QLocale.system())
self.initStyleOption(option, index)
textRect = style.subElementRect(
QtGui.QStyle.SE_ItemViewItemText, option, option.widget)
if not textRect.isValid():
textRect = option.rect
margin = style.pixelMetric(
QtGui.QStyle.PM_FocusFrameHMargin, option, option.widget) + 1
textRect = textRect.adjusted(margin, 0, -margin, 0)
font = index.data(Qt.FontRole)
if not isinstance(font, QtGui.QFont):
font = option.font
metrics = QtGui.QFontMetrics(font)
elideText = metrics.elidedText(text, option.textElideMode,
textRect.width())
return metrics.boundingRect(textRect, option.displayAlignment,
elideText)
def editorEvent(self, event, model, option, index):
if event.type() == QtCore.QEvent.MouseButtonPress and \
self.linkRect(option, index).contains(event.pos()):
self.mousePressState = (QtCore.QPersistentModelIndex(index),
QtCore.QPoint(event.pos()))
elif event.type() == QtCore.QEvent.MouseButtonRelease:
link = index.data(LinkRole)
if not isinstance(link, str):
link = None
pressedIndex, pressPos = self.mousePressState
if pressedIndex == index and \
(pressPos - event.pos()).manhattanLength() < 5 and \
link is not None:
import webbrowser
webbrowser.open(link)
self.mousePressState = QtCore.QModelIndex(), event.pos()
elif event.type() == QtCore.QEvent.MouseMove:
link = index.data(LinkRole)
if not isinstance(link, str):
link = None
if link is not None and \
self.linkRect(option, index).contains(event.pos()):
self.parent().viewport().setCursor(Qt.PointingHandCursor)
else:
self.parent().viewport().setCursor(Qt.ArrowCursor)
return super().editorEvent(event, model, option, index)
def onEntered(self, index):
link = index.data(LinkRole)
if not isinstance(link, str):
link = None
if link is None:
self.parent().viewport().setCursor(Qt.ArrowCursor)
def paint(self, painter, option, index):
link = index.data(LinkRole)
if not isinstance(link, str):
link = None
if link is not None:
if option.widget is not None:
style = option.widget.style()
else:
style = QtGui.QApplication.style()
style.drawPrimitive(
QtGui.QStyle.PE_PanelItemViewRow, option, painter,
option.widget)
style.drawPrimitive(
QtGui.QStyle.PE_PanelItemViewItem, option, painter,
option.widget)
text = self.displayText(index.data(Qt.DisplayRole),
QtCore.QLocale.system())
textRect = style.subElementRect(
QtGui.QStyle.SE_ItemViewItemText, option, option.widget)
if not textRect.isValid():
textRect = option.rect
margin = style.pixelMetric(
QtGui.QStyle.PM_FocusFrameHMargin, option, option.widget) + 1
textRect = textRect.adjusted(margin, 0, -margin, 0)
elideText = QtGui.QFontMetrics(option.font).elidedText(
text, option.textElideMode, textRect.width())
painter.save()
font = index.data(Qt.FontRole)
if not isinstance(font, QtGui.QFont):
font = option.font
painter.setFont(font)
painter.setPen(QtGui.QPen(Qt.blue))
painter.drawText(textRect, option.displayAlignment, elideText)
painter.restore()
else:
super().paint(painter, option, index)
LinkRole = LinkStyledItemDelegate.LinkRole
class ColoredBarItemDelegate(QtGui.QStyledItemDelegate):
""" Item delegate that can also draws a distribution bar
"""
def __init__(self, parent=None, decimals=3, color=Qt.red):
super().__init__(parent)
self.decimals = decimals
self.float_fmt = "%%.%if" % decimals
self.color = QtGui.QColor(color)
def displayText(self, value, locale):
if isinstance(value, float):
return self.float_fmt % value
elif isinstance(value, str):
return value
elif value is None:
return "NA"
else:
return str(value)
def sizeHint(self, option, index):
font = self.get_font(option, index)
metrics = QtGui.QFontMetrics(font)
height = metrics.lineSpacing() + 8 # 4 pixel margin
width = metrics.width(self.displayText(index.data(Qt.DisplayRole),
QtCore.QLocale())) + 8
return QtCore.QSize(width, height)
def paint(self, painter, option, index):
self.initStyleOption(option, index)
text = self.displayText(index.data(Qt.DisplayRole), QtCore.QLocale())
ratio, have_ratio = self.get_bar_ratio(option, index)
rect = option.rect
if have_ratio:
# The text is raised 3 pixels above the bar.
# TODO: Style dependent margins?
text_rect = rect.adjusted(4, 1, -4, -4)
else:
text_rect = rect.adjusted(4, 4, -4, -4)
painter.save()
font = self.get_font(option, index)
painter.setFont(font)
if option.widget is not None:
style = option.widget.style()
else:
style = QtGui.QApplication.style()
style.drawPrimitive(
QtGui.QStyle.PE_PanelItemViewRow, option, painter,
option.widget)
style.drawPrimitive(
QtGui.QStyle.PE_PanelItemViewItem, option, painter,
option.widget)
# TODO: Check ForegroundRole.
if option.state & QtGui.QStyle.State_Selected:
color = option.palette.highlightedText().color()
else:
color = option.palette.text().color()
painter.setPen(QtGui.QPen(color))
align = self.get_text_align(option, index)
metrics = QtGui.QFontMetrics(font)
elide_text = metrics.elidedText(
text, option.textElideMode, text_rect.width())
painter.drawText(text_rect, align, elide_text)
painter.setRenderHint(QtGui.QPainter.Antialiasing, True)
if have_ratio:
brush = self.get_bar_brush(option, index)
painter.setBrush(brush)
painter.setPen(QtGui.QPen(brush, 1))
bar_rect = QtCore.QRect(text_rect)
bar_rect.setTop(bar_rect.bottom() - 1)
bar_rect.setBottom(bar_rect.bottom() + 1)
w = text_rect.width()
bar_rect.setWidth(max(0, min(w * ratio, w)))
painter.drawRoundedRect(bar_rect, 2, 2)
painter.restore()
def get_font(self, option, index):
font = index.data(Qt.FontRole)
if not isinstance(font, QtGui.QFont):
font = option.font
return font
def get_text_align(self, _, index):
align = index.data(Qt.TextAlignmentRole)
if not isinstance(align, int):
align = Qt.AlignLeft | Qt.AlignVCenter
return align
def get_bar_ratio(self, _, index):
ratio = index.data(BarRatioRole)
return ratio, isinstance(ratio, float)
def get_bar_brush(self, _, index):
bar_brush = index.data(BarBrushRole)
if not isinstance(bar_brush, (QtGui.QColor, QtGui.QBrush)):
bar_brush = self.color
return QtGui.QBrush(bar_brush)
class VerticalLabel(QtGui.QLabel):
def __init__(self, text, parent=None):
super().__init__(text, parent)
self.setSizePolicy(QtGui.QSizePolicy.Preferred,
QtGui.QSizePolicy.MinimumExpanding)
self.setMaximumWidth(self.sizeHint().width() + 2)
self.setMargin(4)
def sizeHint(self):
metrics = QtGui.QFontMetrics(self.font())
rect = metrics.boundingRect(self.text())
size = QtCore.QSize(rect.height() + self.margin(),
rect.width() + self.margin())
return size
def setGeometry(self, rect):
super().setGeometry(rect)
def paintEvent(self, event):
painter = QtGui.QPainter(self)
rect = self.geometry()
text_rect = QtCore.QRect(0, 0, rect.width(), rect.height())
painter.translate(text_rect.bottomLeft())
painter.rotate(-90)
painter.drawText(
QtCore.QRect(QtCore.QPoint(0, 0),
QtCore.QSize(rect.height(), rect.width())),
Qt.AlignCenter, self.text())
painter.end()
class VerticalItemDelegate(QtGui.QStyledItemDelegate):
# Extra text top/bottom margin.
Margin = 6
def sizeHint(self, option, index):
sh = super().sizeHint(option, index)
return QtCore.QSize(sh.height() + self.Margin * 2, sh.width())
def paint(self, painter, option, index):
option = QtGui.QStyleOptionViewItemV4(option)
self.initStyleOption(option, index)
if not option.text:
return
if option.widget is not None:
style = option.widget.style()
else:
style = QtGui.QApplication.style()
style.drawPrimitive(
QtGui.QStyle.PE_PanelItemViewRow, option, painter,
option.widget)
cell_rect = option.rect
itemrect = QtCore.QRect(0, 0, cell_rect.height(), cell_rect.width())
opt = QtGui.QStyleOptionViewItemV4(option)
opt.rect = itemrect
textrect = style.subElementRect(
QtGui.QStyle.SE_ItemViewItemText, opt, opt.widget)
painter.save()
painter.setFont(option.font)
if option.displayAlignment & (Qt.AlignTop | Qt.AlignBottom):
brect = painter.boundingRect(
textrect, option.displayAlignment, option.text)
diff = textrect.height() - brect.height()
offset = max(min(diff / 2, self.Margin), 0)
if option.displayAlignment & Qt.AlignBottom:
offset = -offset
textrect.translate(0, offset)
painter.translate(option.rect.x(), option.rect.bottom())
painter.rotate(-90)
painter.drawText(textrect, option.displayAlignment, option.text)
painter.restore()
class ProgressBar:
def __init__(self, widget, iterations):
self.iter = iterations
self.widget = widget
self.count = 0
self.widget.progressBarInit()
def advance(self, count=1):
self.count += count
self.widget.progressBarSet(int(self.count * 100 / max(1, self.iter)))
def finish(self):
self.widget.progressBarFinished()
def tabWidget(widget):
w = QtGui.QTabWidget(widget)
if widget.layout() is not None:
widget.layout().addWidget(w)
return w
def createTabPage(tabWidget, name, widgetToAdd=None, canScroll=False):
if widgetToAdd is None:
widgetToAdd = widgetBox(tabWidget, addToLayout=0, margin=4)
if canScroll:
scrollArea = QtGui.QScrollArea()
tabWidget.addTab(scrollArea, name)
scrollArea.setWidget(widgetToAdd)
scrollArea.setWidgetResizable(1)
scrollArea.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
scrollArea.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
else:
tabWidget.addTab(widgetToAdd, name)
return widgetToAdd
def table(widget, rows=0, columns=0, selectionMode=-1, addToLayout=True):
w = QtGui.QTableWidget(rows, columns, widget)
if widget and addToLayout and widget.layout() is not None:
widget.layout().addWidget(w)
if selectionMode != -1:
w.setSelectionMode(selectionMode)
w.setHorizontalScrollMode(QtGui.QTableWidget.ScrollPerPixel)
w.horizontalHeader().setMovable(True)
return w
class VisibleHeaderSectionContextEventFilter(QtCore.QObject):
def __init__(self, parent, itemView=None):
super().__init__(parent)
self.itemView = itemView
def eventFilter(self, view, event):
if not isinstance(event, QtGui.QContextMenuEvent):
return False
model = view.model()
headers = [(view.isSectionHidden(i),
model.headerData(i, view.orientation(), Qt.DisplayRole)
) for i in range(view.count())]
menu = QtGui.QMenu("Visible headers", view)
for i, (checked, name) in enumerate(headers):
action = QtGui.QAction(name, menu)
action.setCheckable(True)
action.setChecked(not checked)
menu.addAction(action)
def toogleHidden(b, section=i):
view.setSectionHidden(section, not b)
if not b:
return
if self.itemView:
self.itemView.resizeColumnToContents(section)
else:
view.resizeSection(section,
max(view.sectionSizeHint(section), 10))
action.toggled.connect(toogleHidden)
menu.exec_(event.globalPos())
return True
def checkButtonOffsetHint(button, style=None):
option = QtGui.QStyleOptionButton()
option.initFrom(button)
if style is None:
style = button.style()
if isinstance(button, QtGui.QCheckBox):
pm_spacing = QtGui.QStyle.PM_CheckBoxLabelSpacing
pm_indicator_width = QtGui.QStyle.PM_IndicatorWidth
else:
pm_spacing = QtGui.QStyle.PM_RadioButtonLabelSpacing
pm_indicator_width = QtGui.QStyle.PM_ExclusiveIndicatorWidth
space = style.pixelMetric(pm_spacing, option, button)
width = style.pixelMetric(pm_indicator_width, option, button)
# TODO: add other styles (Maybe load corrections from .cfg file?)
style_correction = {"macintosh (aqua)": -2, "macintosh(aqua)": -2,
"plastique": 1, "cde": 1, "motif": 1}
return space + width + \
style_correction.get(QtGui.qApp.style().objectName().lower(), 0)
def toolButtonSizeHint(button=None, style=None):
if button is None and style is None:
style = QtGui.qApp.style()
elif style is None:
style = button.style()
button_size = \
style.pixelMetric(QtGui.QStyle.PM_SmallIconSize) + \
style.pixelMetric(QtGui.QStyle.PM_ButtonMargin)
return button_size
class FloatSlider(QtGui.QSlider):
valueChangedFloat = Signal(float)
def __init__(self, orientation, min_value, max_value, step, parent=None):
super().__init__(orientation, parent)
self.setScale(min_value, max_value, step)
self.valueChanged[int].connect(self.sendValue)
def update(self):
self.setSingleStep(1)
if self.min_value != self.max_value:
self.setEnabled(True)
self.setMinimum(int(self.min_value / self.step))
self.setMaximum(int(self.max_value / self.step))
else:
self.setEnabled(False)
def sendValue(self, slider_value):
value = min(max(slider_value * self.step, self.min_value),
self.max_value)
self.valueChangedFloat.emit(value)
def setValue(self, value):
super().setValue(value // self.step)
def setScale(self, minValue, maxValue, step=0):
if minValue >= maxValue:
## It would be more logical to disable the slider in this case
## (self.setEnabled(False))
## However, we do nothing to keep consistency with Qwt
# TODO If it's related to Qwt, remove it
return
if step <= 0 or step > (maxValue - minValue):
if isinstance(maxValue, int) and isinstance(minValue, int):
step = 1
else:
step = float(minValue - maxValue) / 100.0
self.min_value = float(minValue)
self.max_value = float(maxValue)
self.step = step
self.update()
def setRange(self, minValue, maxValue, step=1.0):
# For compatibility with qwtSlider
# TODO If it's related to Qwt, remove it
self.setScale(minValue, maxValue, step)
|
import sys, os
sys.path.insert(0, os.path.abspath('..'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'CodeBug Tether'
copyright = '2015, OpenLX'
version = '0.8.5'
release = '0.8.5'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'codebug_tetherdoc'
latex_elements = {
}
latex_documents = [
('index', 'codebug_tether.tex', 'CodeBug Tether Documentation',
'Thomas Preston', 'manual'),
]
man_pages = [
('index', 'codebug_tether', 'CodeBug Tether Documentation',
['Thomas Preston'], 1)
]
texinfo_documents = [
('index', 'codebug_tether', 'CodeBug Tether Documentation',
'Thomas Preston', 'codebug_tether', 'One line description of project.',
'Miscellaneous'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
todo_include_todos = True
rst_epilog = """
.. |firmwaredownload| raw:: html
<a href="https://github.com/codebugtools/codebug_tether/blob/master/firmware/codebug_tether_v{version}.cbg?raw=true">download</a>
""".format(version=version)
|
'''
Nibblegen: A script to convert LaTex text to html usable in Nibbleblog Forked from the latex2wp project (the licenceing for which is below).
Copyright (C) 2014 Theodore Jones
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
"""
Copyright 2009 Luca Trevisan
Additional contributors: Radu Grigore
LaTeX2WP version 0.6.2
This file is part of LaTeX2WP, a program that converts
a LaTeX document into a format that is ready to be
copied and pasted into WordPress.
You are free to redistribute and/or modify LaTeX2WP under the
terms of the GNU General Public License (GPL), version 3
or (at your option) any later version.
I hope you will find LaTeX2WP useful, but be advised that
it comes WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GPL for more details.
You should have received a copy of the GNU General Public
License along with LaTeX2WP. If you can't find it,
see <http://www.gnu.org/licenses/>.
"""
import re
from sys import argv
from latex2wpstyle import *
count = dict()
for thm in ThmEnvs:
count[T[thm]] = 0
count["section"] = count["subsection"] = count["equation"] = 0
ref={}
endlatex = "&fg="+textcolor
if HTML : endproof = ""
inthm = ""
"""
At the beginning, the commands \$, \% and \& are temporarily
replaced by placeholders (the second entry in each 4-tuple).
At the end, The placeholders in text mode are replaced by
the third entry, and the placeholders in math mode are
replaced by the fourth entry.
"""
esc = [["\\$","_dollar_","$","\\$"],
["\\%","_percent_","%","\\%"],
["\\&","_amp_","&","\\&"],
[">","_greater_",">",">"],
["<","_lesser_","<","<"]]
M = M + [ ["\\more","<!--more-->"],
["\\newblock","\\\\"],
["\\sloppy",""],
["\\S","§"]]
Mnomath =[["\\\\","<br/>\n"],
["\\ "," "],
["\\`a","à"],
["\\'a","á"],
["\\\"a","ä"],
["\\aa ","å"],
["{\\aa}","å"],
["\\`e","è"],
["\\'e","é"],
["\\\"e","ë"],
["\\`i","ì"],
["\\'i","í"],
["\\\"i","ï"],
["\\`o","ò"],
["\\'o","ó"],
["\\\"o","ö"],
["\\`o","ò"],
["\\'o","ó"],
["\\\"o","ö"],
["\\H o","ö"],
["\\`u","ù"],
["\\'u","ú"],
["\\\"u","ü"],
["\\`u","ù"],
["\\'u","ú"],
["\\\"u","ü"],
["\\v{C}","Č"]]
cb = re.compile("\\{|}")
def extractbody(m) :
begin = re.compile("\\\\begin\s*")
m= begin.sub("\\\\begin",m)
end = re.compile("\\\\end\s*")
m = end.sub("\\\\end",m)
beginenddoc = re.compile("\\\\begin\\{document}"
"|\\\\end\\{document}")
parse = beginenddoc.split(m)
if len(parse)== 1 :
m = parse[0]
else :
m = parse[1]
"""
removes comments, replaces double returns with <p> and
other returns and multiple spaces by a single space.
"""
for e in esc :
m = m.replace(e[0],e[1])
comments = re.compile("%.*?\n")
m=comments.sub(" ",m)
multiplereturns = re.compile("\n\n+")
m= multiplereturns.sub ("<p>",m)
spaces=re.compile("(\n|[ ])+")
m=spaces.sub(" ",m)
"""
removes text between \iffalse ... \fi and
between \iftex ... \fi keeps text between
\ifblog ... \fi
"""
ifcommands = re.compile("\\\\iffalse|\\\\ifblog|\\\\iftex|\\\\fi")
L=ifcommands.split(m)
I=ifcommands.findall(m)
m= L[0]
for i in range(1,(len(L)+1)/2) :
if (I[2*i-2]=="\\ifblog") :
m=m+L[2*i-1]
m=m+L[2*i]
"""
changes $$ ... $$ into \[ ... \] and reformats
eqnarray* environments as regular array environments
"""
doubledollar = re.compile("\\$\\$")
L=doubledollar.split(m)
m=L[0]
for i in range(1,(len(L)+1)/2) :
m = m+ "\\[" + L[2*i-1] + "\\]" + L[2*i]
m=m.replace("\\begin{eqnarray*}","\\[ \\begin{array}{rcl} ")
m=m.replace("\\end{eqnarray*}","\\end{array} \\]")
return m
def convertsqb(m) :
r = re.compile("\\\\item\\s*\\[.*?\\]")
Litems = r.findall(m)
Lrest = r.split(m)
m = Lrest[0]
for i in range(0,len(Litems)) :
s= Litems[i]
s=s.replace("\\item","\\nitem")
s=s.replace("[","{")
s=s.replace("]","}")
m=m+s+Lrest[i+1]
r = re.compile("\\\\begin\\s*\\{\\w+}\\s*\\[.*?\\]")
Lthms = r.findall(m)
Lrest = r.split(m)
m = Lrest[0]
for i in range(0,len(Lthms)) :
s= Lthms[i]
s=s.replace("\\begin","\\nbegin")
s=s.replace("[","{")
s=s.replace("]","}")
m=m+s+Lrest[i+1]
return m
def converttables(m) :
retable = re.compile("\\\\begin\s*\\{tabular}.*?\\\\end\s*\\{tabular}"
"|\\\\begin\s*\\{btabular}.*?\\\\end\s*\\{btabular}")
tables = retable.findall(m)
rest = retable.split(m)
m = rest[0]
for i in range(len(tables)) :
if tables[i].find("{btabular}") != -1 :
m = m + convertonetable(tables[i],True)
else :
m = m + convertonetable(tables[i],False)
m = m + rest[i+1]
return m
def convertmacros(m) :
comm = re.compile("\\\\[a-zA-Z]*")
commands = comm.findall(m)
rest = comm.split(m)
r= rest[0]
for i in range( len (commands) ) :
for s1,s2 in M :
if s1==commands[i] :
commands[i] = s2
r=r+commands[i]+rest[i+1]
return(r)
def convertonetable(m,border) :
tokens = re.compile("\\\\begin\\{tabular}\s*\\{.*?}"
"|\\\\end\\{tabular}"
"|\\\\begin\\{btabular}\s*\\{.*?}"
"|\\\\end\\{btabular}"
"|&|\\\\\\\\")
align = { "c" : "center", "l" : "left" , "r" : "right" }
T = tokens.findall(m)
C = tokens.split(m)
L = cb.split(T[0])
format = L[3]
columns = len(format)
if border :
m = "<table border=\"1\" align=center>"
else :
m="<table align = center><tr>"
p=1
i=0
while T[p-1] != "\\end{tabular}" and T[p-1] != "\\end{btabular}":
m = m + "<td align="+align[format[i]]+">" + C[p] + "</td>"
p=p+1
i=i+1
if T[p-1]=="\\\\" :
for i in range (p,columns) :
m=m+"<td></td>"
m=m+"</tr><tr>"
i=0
m = m+ "</tr></table>"
return (m)
def separatemath(m) :
mathre = re.compile("\\$.*?\\$"
"|\\\\begin\\{equation}.*?\\\\end\\{equation}"
"|\\\\\\[.*?\\\\\\]")
math = mathre.findall(m)
text = mathre.split(m)
return(math,text)
def processmath( M ) :
R = []
counteq=0
global ref
mathdelim = re.compile("\\$"
"|\\\\begin\\{equation}"
"|\\\\end\\{equation}"
"|\\\\\\[|\\\\\\]")
label = re.compile("\\\\label\\{.*?}")
for m in M :
md = mathdelim.findall(m)
mb = mathdelim.split(m)
"""
In what follows, md[0] contains the initial delimiter,
which is either \begin{equation}, or $, or \[, and
mb[1] contains the actual mathematical equation
"""
if md[0] == "$" :
if HTML :
m=m.replace("$","")
m="$$"+m+""+endlatex+"$$"
else :
m="$$ {"+mb[1]+"}"+endlatex+"$$"
else :
if md[0].find("\\begin") != -1 :
count["equation"] += 1
mb[1] = mb[1] + "\\ \\ \\ \\ \\ ("+str(count["equation"])+")"
if HTML :
m = "<p align=center>$$" + mb[1] +endlatex+"$$" + "</p>\n"
else :
m = "<p align=center>$$ " + mb[1] +endlatex+"$$</p>\n"
if m.find("\\label") != -1 :
mnolab = label.split(m)
mlab = label.findall(m)
"""
Now the mathematical equation, which has already
been formatted for WordPress, is the union of
the strings mnolab[0] and mnolab[1]. The content
of the \label{...} command is in mlab[0]
"""
lab = mlab[0]
lab=cb.split(lab)[1]
lab=lab.replace(":","")
ref[lab]=count["equation"]
m="<a name=\""+lab+"\">"+mnolab[0]+mnolab[1]+"</a>"
R= R + [m]
return R
def convertcolors(m,c) :
if m.find("begin") != -1 :
return("<span style=\"color:#"+colors[c]+";\">")
else :
return("</span>")
def convertitm(m) :
if m.find("begin") != -1 :
return ("\n\n<ul>")
else :
return ("\n</ul>\n\n")
def convertenum(m) :
if m.find("begin") != -1 :
return ("\n\n<ol>")
else :
return ("\n</ol>\n\n")
def convertbeginnamedthm(thname,thm) :
global inthm
count[T[thm]] +=1
inthm = thm
t = beginnamedthm.replace("_ThmType_",thm.capitalize())
t = t.replace("_ThmNumb_",str(count[T[thm]]))
t = t.replace("_ThmName_",thname)
return(t)
def convertbeginthm(thm) :
global inthm
count[T[thm]] +=1
inthm = thm
t = beginthm.replace("_ThmType_",thm.capitalize())
t = t.replace("_ThmNumb_",str(count[T[thm]]))
return(t)
def convertendthm(thm) :
global inthm
inthm = ""
return(endthm)
def convertlab(m) :
global inthm
global ref
m=cb.split(m)[1]
m=m.replace(":","")
if inthm != "" :
ref[m]=count[T[inthm]]
else :
ref[m]=count["section"]
return("<a name=\""+m+"\"></a>")
def convertproof(m) :
if m.find("begin") != -1 :
return(beginproof)
else :
return(endproof)
def convertsection (m) :
L=cb.split(m)
"""
L[0] contains the \\section or \\section* command, and
L[1] contains the section name
"""
if L[0].find("*") == -1 :
t=section
count["section"] += 1
count["subsection"]=0
else :
t=sectionstar
t=t.replace("_SecNumb_",str(count["section"]) )
t=t.replace("_SecName_",L[1])
return(t)
def convertsubsection (m) :
L=cb.split(m)
if L[0].find("*") == -1 :
t=subsection
else :
t=subsectionstar
count["subsection"] += 1
t=t.replace("_SecNumb_",str(count["section"]) )
t=t.replace("_SubSecNumb_",str(count["subsection"]) )
t=t.replace("_SecName_",L[1])
return(t)
def converturl (m) :
L = cb.split(m)
return ("<a href=\""+L[1]+"\">"+L[3]+"</a>")
def converturlnosnap (m) :
L = cb.split(m)
return ("<a class=\"snap_noshots\" href=\""+L[1]+"\">"+L[3]+"</a>")
def convertimage (m) :
L = cb.split (m)
return ("<p align=center><img "+L[1] + " src=\""+L[3]
+"\"></p>")
def convertstrike (m) :
L=cb.split(m)
return("<s>"+L[1]+"</s>")
def processtext ( t ) :
p = re.compile("\\\\begin\\{\\w+}"
"|\\\\nbegin\\{\\w+}\\s*\\{.*?}"
"|\\\\end\\{\\w+}"
"|\\\\item"
"|\\\\nitem\\s*\\{.*?}"
"|\\\\label\\s*\\{.*?}"
"|\\\\section\\s*\\{.*?}"
"|\\\\section\\*\\s*\\{.*?}"
"|\\\\subsection\\s*\\{.*?}"
"|\\\\subsection\\*\\s*\\{.*?}"
"|\\\\href\\s*\\{.*?}\\s*\\{.*?}"
"|\\\\hrefnosnap\\s*\\{.*?}\\s*\\{.*?}"
"|\\\\image\\s*\\{.*?}\\s*\\{.*?}\\s*\\{.*?}"
"|\\\\sout\\s*\\{.*?}")
for s1, s2 in Mnomath :
t=t.replace(s1,s2)
ttext = p.split(t)
tcontrol = p.findall(t)
w = ttext[0]
i=0
while i < len(tcontrol) :
if tcontrol[i].find("{itemize}") != -1 :
w=w+convertitm(tcontrol[i])
elif tcontrol[i].find("{enumerate}") != -1 :
w= w+convertenum(tcontrol[i])
elif tcontrol[i][0:5]=="\\item" :
w=w+"<li>"
elif tcontrol[i][0:6]=="\\nitem" :
lb = tcontrol[i][7:].replace("{","")
lb = lb.replace("}","")
w=w+"<li>"+lb
elif tcontrol[i].find("\\hrefnosnap") != -1 :
w = w+converturlnosnap(tcontrol[i])
elif tcontrol[i].find("\\href") != -1 :
w = w+converturl(tcontrol[i])
elif tcontrol[i].find("{proof}") != -1 :
w = w+convertproof(tcontrol[i])
elif tcontrol[i].find("\\subsection") != -1 :
w = w+convertsubsection(tcontrol[i])
elif tcontrol[i].find("\\section") != -1 :
w = w+convertsection(tcontrol[i])
elif tcontrol[i].find("\\label") != -1 :
w=w+convertlab(tcontrol[i])
elif tcontrol[i].find("\\image") != -1 :
w = w+convertimage(tcontrol[i])
elif tcontrol[i].find("\\sout") != -1 :
w = w+convertstrike(tcontrol[i])
elif tcontrol[i].find("\\begin") !=-1 and tcontrol[i].find("{center}")!= -1 :
w = w+"<p align=center>"
elif tcontrol[i].find("\\end")!= -1 and tcontrol[i].find("{center}") != -1 :
w = w+"</p>"
else :
for clr in colorchoice :
if tcontrol[i].find("{"+clr+"}") != -1:
w=w + convertcolors(tcontrol[i],clr)
for thm in ThmEnvs :
if tcontrol[i]=="\\end{"+thm+"}" :
w=w+convertendthm(thm)
elif tcontrol[i]=="\\begin{"+thm+"}":
w=w+convertbeginthm(thm)
elif tcontrol[i].find("\\nbegin{"+thm+"}") != -1:
L=cb.split(tcontrol[i])
thname=L[3]
w=w+convertbeginnamedthm(thname,thm)
w += ttext[i+1]
i += 1
return processfontstyle(w)
def processfontstyle(w) :
close = dict()
ww = ""
level = i = 0
while i < len(w):
special = False
for k, v in fontstyle.items():
l = len(k)
if w[i:i+l] == k:
level += 1
ww += '<' + v + '>'
close[level] = '</' + v + '>'
i += l
special = True
if not special:
if w[i] == '{':
ww += '{'
level += 1
close[level] = '}'
elif w[i] == '}' and level > 0:
ww += close[level]
level -= 1
else:
ww += w[i]
i += 1
return ww
def convertref(m) :
global ref
p=re.compile("\\\\ref\s*\\{.*?}|\\\\eqref\s*\\{.*?}")
T=p.split(m)
M=p.findall(m)
w = T[0]
for i in range(len(M)) :
t=M[i]
lab=cb.split(t)[1]
lab=lab.replace(":","")
if t.find("\\eqref") != -1 :
w=w+"<a href=\"#"+lab+"\">("+str(ref[lab])+")</a>"
else :
w=w+"<a href=\"#"+lab+"\">"+str(ref[lab])+"</a>"
w=w+T[i+1]
return w
"""
The program makes several passes through the input.
In a first clean-up, all text before \begin{document}
and after \end{document}, if present, is removed,
all double-returns are converted
to <p>, and all remaining returns are converted to
spaces.
The second step implements a few simple macros. The user can
add support for more macros if desired by editing the
convertmacros() procedure.
Then the program separates the mathematical
from the text parts. (It assumes that the document does
not start with a mathematical expression.)
It makes one pass through the text part, translating
environments such as theorem, lemma, proof, enumerate, itemize,
\em, and \bf. Along the way, it keeps counters for the current
section and subsection and for the current numbered theorem-like
environment, as well as a flag that tells whether one is
inside a theorem-like environment or not. Every time a \label{xx}
command is encountered, we give ref[xx] the value of the section
in which the command appears, or the number of the theorem-like
environment in which it appears (if applicable). Each appearence
of \label is replace by an html "name" tag, so that later we can
replace \ref commands by clickable html links.
The next step is to make a pass through the mathematical environments.
Displayed equations are numbered and centered, and when a \label{xx}
command is encountered we give ref[xx] the number of the current
equation.
A final pass replaces \ref{xx} commands by the number in ref[xx],
and a clickable link to the referenced location.
"""
import sys
s = ""
while True:
char = sys.stdin.read(1)
if not char:
break
if char:
s = s + char
"""
extractbody() takes the text between a \begin{document}
and \end{document}, if present, (otherwise it keeps the
whole document), normalizes the spacing, and removes comments
"""
s=extractbody(s)
s=converttables(s)
s=convertsqb(s)
s=convertmacros(s)
(math,text) = separatemath(s)
s=text[0]
for i in range(len(math)) :
s=s+"__math"+str(i)+"__"+text[i+1]
s = processtext ( s )
math = processmath ( math )
for e in esc :
s=s.replace(e[1],e[2])
for i in range ( len ( math ) ) :
math[i] = math[i].replace(e[1],e[3])
for i in range(len(math)) :
s=s.replace("__math"+str(i)+"__",math[i])
s=convertref(s)
if HTML :
s="<head><style>body{max-width:55em;}a:link{color:#4444aa;}a:visited{color:#4444aa;}a:hover{background-color:#aaaaFF;}</style></head><body>"+s+"</body></html>"
s = s.replace("<p>","\n<p>\n")
print s
|
import sys
WorkList = None
def SH(i):
"""reformatting .SH"""
global WorkList
string = WorkList[i]
l = len(string) - 2
r = 0
while string[0] == '=' and string[l] == '=':
WorkList[i] = string[1:l]
string = WorkList[i]
l = len(string) - 1
r = r + 1
if r == 2:
WorkList[i] = '\n.SH "' + string + '"\n.PP\n'
else:
WorkList[i] = '\n.SS "' + string + '"\n.PP\n'
def TP(i):
"""reformatting .TP"""
global WorkList
string = WorkList[i]
l=0
string1 = WorkList[i + l]
while string1 != '' and string1[0] == ';':
j=0
finish = 0
nexcl = 1
s = 0
while len(string) > j and finish == 0:
if string[j:j+8] == '<nowiki>':
nexcl = 0
j = j + 7
elif string[j:j+9] == '</nowiki>':
nexcl = 1
j = j + 8
elif string[j:j+4] == '<!--':
nexcl = 0
j = j + 3
elif string[j:j+3] == '-->':
nexcl = 1
j = j + 2
if string[j] == ':':
s = 1
finish = nexcl * s
s = 0
j = j + 1
if len(string) == j:
WorkList[i] = '.TP\n.B ' + string[1:]
elif string[j-1] == ':':
WorkList[i] = '.TP\n.B ' + string[1:j-1] + '\n' + string[j:]
l = l + 1
string1 = WorkList[i+l]
while string1 != '' and string1[0] == ':' and string1[1] <> ':' and string1[1] <> ';':
WorkList[i + l] = '.br\n' + string1[1:]
l = l + 1
string1 = WorkList[i + l]
def wiki2man(content):
global WorkList
string = '\n'
string = unicode(string, 'utf-8')
WorkList = [string]
cpt = 0
while string != '' and cpt < len(content):
string = content[cpt]
cpt += 1
WorkList.append(string)
path = sys.argv[0]
n = len(path)
n = n - 11
path = path[:n]
########## Reformatting from wiki to roff ##########
# TH:
string = WorkList[1];
if len(string) > 2 and string[0] != '=' and string[:4] != '<!--' and string[:2] != '{{':
i = 0
while len(string) > i and string[i] != '(':
i = i + 1
WorkList.pop(1)
WorkList.pop(0)
i = 0
tabacc = -1
tab = 0
tab2 = 0
col = 0
nf = 0
nr = 0
excl = 0
nowiki = 0
RS=0
strng = unicode('{{MAN индекс}}', 'utf-8')
while len(WorkList) > i:
string = WorkList[i]
if len(string) > 1:
# reformatting "nowiki"
if string[:9] == '</nowiki>':
WorkList[i] = string[9:]
nowiki = 0
if nowiki == 0:
# reformatting "pre"
if string[:6] == '</pre>':
WorkList[i] = '\n.fi\n.RE\n' + string[6:]
nf = 0
# reformatting "tt"
elif string[:5] == '</tt>':
if string[6:7] == '. ':
WorkList[i] = '\n.fi\n.RE\n' + string[7:]
elif len(string) > 6 and string[6] == '.':
WorkList[i] = '\n.fi\n.RE\n' + string[6:]
else:
WorkList[i] = '\n.fi\n.RE\n' + string[5:]
nf = 0
# reformatting " "
if string[0] == ' ':
if nf == 0:
nf = 1
WorkList[i] = '\n.RS\n.nf\n' + string
elif nf == 1:
WorkList[i] = string
else:
if nf == 1:
nf = 0
WorkList[i] = '\n.fi\n.RE\n'
WorkList.insert(i+1, string)
string = WorkList[i]
if nf != 2 and nowiki == 0:
# reformatting excluded text <!-- * -->
if excl == 1:
WorkList[i] = '.\" ' + string[0:]
string = WorkList[i]
if nf == 0:
# format titles
if string[0] == '=' and string[len(string)-2] == '=':
SH(i)
# format ";"
elif string[0] == ';':
TP(i)
# format ":..."
elif string[0] == ':':
l = 1
s = ''
while string[l] == ':':
l = l + 1;
if RS == l:
s = '\n.br\n'
elif RS < l:
while RS < l:
s = s + '.RS\n'
RS = RS + 1
if string[RS] == ';':
WorkList[i] = s + '.TP\n.B ' + string[RS+1:]
else:
WorkList[i] = s + string[RS:]
string = WorkList[i]
stri = WorkList[i+1]
if RS > 0 and stri[0] <> ':':
while RS > 0:
WorkList[i] = string + '\n.RE\n'
RS = RS - 1
string = WorkList[i]
else:
while RS > 0 and len(stri) > RS-1 and stri[RS-1] <> ':':
RS = RS - 1
WorkList[i] = string + '\n.RE\n'
string = WorkList[i]
# format "*..."
elif string[0] == '*':
WorkList[i] = '.br\n * ' + string[1:]
# format tables 2
elif string[:2] == '{|':
if tab2 > 0:
WorkList[i] = '.RS\n'
tab2 = tab2 + 1
col = 0
else:
WorkList[i] = ''
tab2 = 1
elif string[:2] == '|-' and tab2 > 0:
WorkList[i] = ''
col = 0
elif string[:2] == '|}':
if tab2 == 1:
WorkList[i] = ''
col = 0
tab2 = 0
elif tab2 > 1:
WorkList[i] = '\n.RE\n'
col = 0
tab2 = tab2 - 1
elif string[:8] == '|valign=' and tab2 > 0:
j = 9
while len(string) > j and string[j]!='|':
j = j + 1
if string[j] == '|':
if col == 0:
WorkList[i] = '\n.TP\n' + string[j+1:]
col = 1
elif col > 0:
WorkList[i] = string[j+1:]
col = 2
elif col > 1:
WorkList[i] = '.PP\n' + string[j+1:]
col = col + 1
elif string[:1] == '|' and tab2 > 0:
if col == 0:
WorkList[i] = '\n.TP\n' + string[1:]
col = 1
elif col == 1:
WorkList[i] = string[1:]
col = col + 1
elif col > 1:
WorkList[i] = '\n' + string[1:]
col = col + 1
# delete wiki "Category:"
elif string[:11] == '[[Category:':
WorkList[i] = ''
# delete wiki {{MAN индекс}}
elif string[:14] == strng:
WorkList[i] = ''
# delete wiki [[en:Man ...]]
elif string[:9] == '[[en:Man ':
WorkList[i] = ''
string = WorkList[i]
j = 0
B = -1
I = -1
U = -1
K = -1
K1 = -1
while len(string) > j:
# reformatting excluded text <!-- * -->
if string[j:j+4] == '<!--':
string = string[:j] + '\n.\"' + string[j+4:]
excl = 1
j = j + 1
elif string[j:j+3] == '-->':
string = string[:j] + '\n' + string[j+3:]
excl = 0
j = j - 1
if excl == 0:
# Change some symbols: — « » — © " & < >
if string[j:j+8] == '―':
string = string[:j] + unicode('—', 'utf-8') + string[j+8:]
elif string[j:j+7] == '«':
string = string[:j] + unicode('«', 'utf-8') + string[j+7:]
elif string[j:j+7] == '»':
string = string[:j] + unicode('»', 'utf-8') + string[j+7:]
elif string[j:j+7] == '—':
string = string[:j] + unicode('—', 'utf-8') + string[j+7:]
elif string[j:j+6] == '©':
string = string[:j] + unicode('©', 'utf-8') + string[j+6:]
elif string[j:j+6] == '"':
string = string[:j] + unicode('"', 'utf-8') + string[j+6:]
elif string[j:j+6] == ' ':
string = string[:j] + unicode(' ', 'utf-8') + string[j+6:]
elif string[j:j+5] == '&':
string = string[:j] + unicode('&', 'utf-8') + string[j+5:]
elif string[j:j+4] == '<':
string = string[:j] + unicode('<', 'utf-8') + string[j+4:]
elif string[j:j+4] == '>':
string = string[:j] + unicode('>', 'utf-8') + string[j+4:]
# reformatting "-" or "\"
elif string[j:j+1] == '-':
string = string[0:j] + '\\' + string[j:]
j = j + 1
elif string[j:j+1] == '\\':
string = string[0:j] + '\e' + string[j+1:]
j = j + 1
# reformatting "nowiki"
elif string[j:j+8] == '<nowiki>':
nowiki = 1
if nf != 2:
string = string[:j] + string[j+8:]
j = j
elif string[j:j+9] == '</nowiki>':
nowiki = 0
if nf != 2:
string = string[:j] + string[j+9:]
j = j
if nowiki == 0:
if string[j:j+5] == "'''''":
if B != -1 and I == -1 :
if tabacc == 1:
string = string[:B] + '"' + string[B+3:j] + '"' + string[j+3:]
j = j - 4
B =- 1
else:
string = string[:B] + '\\fB' + string[B+3:j] + '\\fR' + string[j+3:]
j = j + 1
B =- 1
if I != -1 and B == -1:
string = string[:I] + '\\fI' + string[I+2:j] + '\\fR' + string[j+2:]
j = j + 2
I =- 1
# reformatting boolean text 1
elif string[j:j+3] == "'''":
if B == -1:
B = j
else:
if tabacc == 1:
string = string[:B] + '"' + string[B+3:j] + '"' + string[j+3:]
j = j - 4
B =- 1
elif j+3-B > 5:
string = string[:B] + '\\fB' + string[B+3:j] + '\\fR' + string[j+3:]
j = j + 1
B =- 1
# reformatting italic text 1
elif string[j:j+2] == "''" and B == -1:
if I == -1:
I = j
else:
if j+3-I > 2:
string = string[:I] + '\\fI' + string[I+2:j] + '\\fR' + string[j+2:]
j = j + 2
I =- 1
# reformatting "pre"
elif string[j:j+5] == '<pre>':
string = string[:j] + '\n.RS\n.nf\n' + string[j+5:]
nf = 2
j = j + 3
elif string[j:j+6] == '</pre>':
string = string[:j] + '\n.fi\n.RE\n' + string[j+6:]
nf = 0
j = j + 3
# reformatting "code"
elif string[j:j+6] == '<code>':
string = string[:j] + '\n.nf\n' + string[j+6:]
nf = 2
j = j + 3
elif string[j:j+7] == '</code>':
string = string[:j] + '\n.fi\n' + string[j+7:]
nf = 0
j = j + 3
# reformatting "tt"
elif string[j:j+4] == '<tt>':
string = string[:j] + '\n.RS\n.nf\n' + string[j+4:]
nf = 2
j = j + 3
elif string[j:j+5] == '</tt>':
if string[j+5] == '.':
string = string[:j] + '\n.fi\n.RE\n' + string[j+6:]
else:
string = string[:j] + '\n.fi\n.RE\n' + string[j+5:]
nf = 0
j = j + 3
# reformatting "...}}"
elif string[j:j+2] == '}}':
if nr == 1:
string = string[:j] + '\\fR' + string[j+2:]
nr = 0
j = j + 2
elif nr == 2:
string = string[:j] + '\n.RE\n' + string[j+2:]
nr = 0
j = j + 3
# reformatting "{{Codeline|...}}"
elif string[j:j+11] == '{{Codeline|':
string = string[:j] + '\\fB' + string[j+11:]
nr = 1
j = j + 2
# reformatting "{{Warning|...}}"
elif string[j:j+10] == '{{Warning|':
string = string[:j] + '\\fB' + string[j+10:]
nr = 1
j = j + 2
# reformatting "{{Note|...}}"
elif string[j:j+7] == '{{Note|':
string = string[:j] + '\\fI' + string[j+7:]
nr = 1
j = j + 2
# reformatting "{{Discussion|...}}"
elif string[j:j+13] == '{{Discussion|':
string = string[:j] + '\\fI' + string[j+13:]
nr = 1
j = j + 2
# reformatting "{{Filename|...}}"
elif string[j:j+11] == '{{Filename|':
string = string[:j] + '\\fI' + string[j+11:]
nr = 1
j = j + 2
# reformatting "[mailto:...]"
elif string[j:j+8] == '[mailto:':
a = j + 8
while string[a] <> ' ':
a = a + 1
b = a + 1
while string[b] <> ']':
b = b + 1
string = string[:j] + string[a+1:b] + ' <' + string[j+8:a] + '>'
# reformatting "{{Box File|...|...}}"
elif string[j:j+11] == '{{Box File|':
a = j + 11
while string[a] <> '|':
a = a + 1
string = string[:j] + '\n.TP\n.B ' + string[j+11:a] + '\n.RS\n' + string[a+1:]
nr = 2
if nf == 0:
# reformatting boolean text 2
if string[j:j+3] == '<b>':
string = string[:j] + '\\fB' + string[j+3:]
j = j + 2
elif string[j:j+4] == '</b>':
string = string[:j] + '\\fR' + string[j+4:]
j = j + 2
# reformatting italic text 2
elif string[j:j+3] == '<i>':
string = string[:j] + '\\fI' + string[j+3:]
j = j + 2
elif string[j:j+4] == '</i>':
string = string[:j] + '\\fR' + string[j+4:]
j = j + 2
# format underlined text
elif string[j:j+3] == '<u>':
U = j
elif string[j:j+4] == '</u>' and U != -1:
string = string[:U] + '\\fB\\fI' + string[U+3:j] + '\\fB\\fR' + string[j+4:]
j = j + 7
U =- 1
# brake line 1
elif string[j:j+4] == '<br>':
string = string[0:j] + '\n.br\n' + string[j+4:]
j = j + 2
# brake line 2
elif string[j:j+6] == '<br />':
string = string[0:j] + '\n.PP\n' + string[j+6:]
j = j + 2
# format tables 1
elif string[j:j+6] == '<table':
tab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:tab] + string[j+1:]
j = tab - 1
tab = 1
else:
j = tab
tab = 0
elif string[j:j+3] == '<tr':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
tabacc = 0
string = string[:Ktab] + '\n.SS ' + string[j+1:]
j = Ktab + 4
else:
j = Ktab
elif string[j:j+4] == '</tr':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
tabacc =- 1
string = string[:Ktab] + string[j+1:]
j = Ktab - 1
else:
j = Ktab
elif string[j:j+3] == '<td':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
tabacc = tabacc + 1
if tabacc == 1:
string = string[:Ktab] + string[j+1:]
j = Ktab - 1
else:
string = string[:Ktab] + '\n.PP\n' + string[j+1:]
j = Ktab + 3
else:
j = Ktab
elif string[j:j+4] == '</td':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:Ktab] + string[j+1:]
j = Ktab - 1
else:
j = Ktab
elif string[j:j+7] == '</table':
tab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:tab] + string[j+1:]
j = tab - 1
tab = 0
else:
j = tab
tab = 1
# format table 2 {| |- | || |}
elif string[j:j+2] == '||' and tab2 > 0 and col > 0:
string = string[:j] + '\n' + string[j+2:]
col = col + 1
# format div????
elif string[j:j+4] == '<div':
div = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:div] + string[j+1:]
j = div - 1
else:
j = div
elif string[j:j+5] == '</div':
div = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:div] + string[j+1:]
j = div - 1
else:
j = div
# format internal links
elif string[j:j+2] == '[[':
K = j
elif string[j] == '|':
if K != -1:
K1 = j
elif string[j:j+2] == ']]':
if K != -1 and K1 != -1:
string = string[:K] + string[K1+1:j] + string[j+2:]
j = j - K1 + K - 2
K =- 1
K1 =- 1
elif K != -1 and K1 == -1:
string = string[:K] + string[K+2:j] + string[j+2:]
j = j - 4
K =- 1
j = j + 1
WorkList[i] = string
i = i + 1
# Make title .TH
string = '\n'
string = string.encode('utf-8')
string = unicode(string, 'utf-8')
WorkList.insert(0, string)
########## Output roff formatted file ##########
# Output encoded symbols:
string = ''
for i in range(len(WorkList)):
string = string + WorkList[i]
# Delete empty lines and some think else..., just for making roff code better:
i = 0
while len(string) > i:
if string[i:i+8] == '.RE\n\n.RS':
string = string[:i+3] + string[i+4:]
if string[i:i+8] == '.RE\n\n.br':
string = string[:i+3] + string[i+4:]
if string[i:i+6] == '\n.SS\n':
string = string[:i+5] + string[i+6:]
if string[i:i+5] == '\n\n.RE':
string = string[:i+1] + string[i+2:]
if string[i:i+5] == '\n\n\n\n\n':
string = string[:i] + string[i+3:]
if string[i:i+4] == '\n\n\n\n':
string = string[:i] + string[i+2:]
if string[i:i+3] == '\n\n\n':
string = string[:i] + string[i+1:]
i = i + 1
return string
|
try:
from process.sequence import Alignment
from base.plotter import bar_plot, multi_bar_plot
from process.error_handling import KillByUser
except ImportError:
from trifusion.process.sequence import Alignment
from trifusion.base.plotter import bar_plot, multi_bar_plot
from trifusion.process.error_handling import KillByUser
from collections import OrderedDict, Counter
import pickle
import os
import sqlite3
from os.path import join
import random
import string
import copy
class Cluster(object):
""" Object for clusters of the OrthoMCL groups file. It is useful to set a
number of attributes that will make subsequent filtration and
processing much easier """
def __init__(self, line_string):
"""
To initialize a Cluster object, only a string compliant with the
format of a cluster in an OrthoMCL groups file has to be provided.
This line should contain the name of the group, a colon, and the
sequences belonging to that group separated by whitespace
:param line_string: String of a cluster
"""
# Initializing attributes for parse_string
self.name = None
self.sequences = None
self.species_frequency = {}
# Initializing attributes for apply filter
# If the value is different than None, this will inform downstream
# objects of whether this cluster is compliant with the specified
# gene_threshold
self.gene_compliant = None
# If the value is different than None, this will inform downstream
# objects of whether this cluster is compliant with the specified
# species_threshold
self.species_compliant = None
self.parse_string(line_string)
def parse_string(self, cluster_string):
"""
Parses the string and sets the group name and sequence list attributes
"""
fields = cluster_string.split(":")
# Setting the name and sequence list of the clusters
self.name = fields[0].strip()
self.sequences = fields[1].strip().split()
# Setting the gene frequency for each species in the cluster
self.species_frequency = Counter([field.split("|")[0] for field in
self.sequences])
def remove_taxa(self, taxa_list):
"""
Removes the taxa contained in taxa_list from self.sequences and
self.species_frequency
:param taxa_list: list, each element should be a taxon name
"""
self.sequences = [x for x in self.sequences if x.split("|")[0]
not in taxa_list]
self.species_frequency = dict((taxon, val) for taxon, val in
self.species_frequency.items()
if taxon not in taxa_list)
def apply_filter(self, gene_threshold, species_threshold):
"""
This method will update two Cluster attributes, self.gene_flag and
self.species_flag, which will inform downstream objects if this
cluster respects the gene and species threshold
:param gene_threshold: Integer for the maximum number of gene copies
per species
:param species_threshold: Integer for the minimum number of species
present
"""
# Check whether cluster is compliant with species_threshold
if len(self.species_frequency) >= species_threshold and \
species_threshold:
self.species_compliant = True
else:
self.species_compliant = False
# Check whether cluster is compliant with gene_threshold
if max(self.species_frequency.values()) <= gene_threshold and \
gene_threshold:
self.gene_compliant = True
else:
self.gene_compliant = False
class OrthoGroupException(Exception):
pass
class GroupLight(object):
"""
Analogous to Group object but with several changes to reduce memory usage
"""
def __init__(self, groups_file, gene_threshold=None,
species_threshold=None, ns=None):
self.gene_threshold = gene_threshold if gene_threshold else None
self.species_threshold = species_threshold if species_threshold \
else None
# Attribute containing the list of included species
self.species_list = []
# Attribute that will contain taxa to be excluded from analyses
self.excluded_taxa = []
self.species_frequency = []
# Attributes that will store the number (int) of cluster after gene and
# species filter
self.all_clusters = 0
self.num_gene_compliant = 0
self.num_species_compliant = 0
self.all_compliant = 0
# Attribute containing the total number of sequences
self.total_seqs = 0
# Attribute containing the maximum number of extra copies found in the
# clusters
self.max_extra_copy = 0
# Attribute with name of the group file, which will be an ID
self.name = os.path.abspath(groups_file)
self.table = groups_file.split(os.sep)[-1].split(".")[0]
# Initialize atribute containing the groups filtered using the gene and
# species threshold. This attribute can be updated at any time using
# the update_filtered_group method
self.filtered_groups = []
self._parse_groups(ns)
if type(self.species_threshold) is float:
self._get_sp_proportion()
def groups(self):
"""
Generator for group file. This replaces the self.groups attribute of
the original Group Object. Instead of loading the whole file into
memory, a generator is created to iterate over its contents. It may
run a bit slower but its a lot more memory efficient.
:return:
"""
file_handle = open(self.name)
for line in file_handle:
if line.strip() != "":
yield line.strip()
def iter_species_frequency(self):
"""
In order to prevent permanent changes to the species_frequency
attribute due to the filtering of taxa, this iterable should be used
instead of the said variable. This creates a temporary deepcopy of
species_frequency which will be iterated over and eventually modified.
"""
# Since the items of species_frequency are mutable, this ensures
# that even those objects are correctly cloned
sp_freq = copy.deepcopy(self.species_frequency)
for cl in sp_freq:
yield cl
def _remove_tx(self, line):
"""
Given a group line, remove all references to the excluded taxa
:param line: raw group file line
"""
new_line = "{}:".format(line.split(":")[0])
tx_str = "\t".join([x for x in line.split(":")[1].split() if
x.split("|")[0] not in self.excluded_taxa])
return new_line + tx_str
def _apply_filter(self, cl):
"""
Sets or updates the basic group statistics, such as the number of
orthologs compliant with the gene copy and minimum taxa filters.
:param cl: dictionary. Contains the number of occurrences for each
taxon present in the ortholog cluster
(e.g. {"taxonA": 2, "taxonB": 1).
"""
# First, remove excluded taxa from the cl object since this will
# impact all other filters
for tx in self.excluded_taxa:
cl.pop(tx, None)
if cl:
self.all_clusters += 1
extra_copies = max(cl.values())
if extra_copies > self.max_extra_copy:
self.max_extra_copy = extra_copies
if extra_copies <= self.gene_threshold and self.gene_threshold and\
len(cl) >= self.species_threshold and \
self.species_threshold:
self.num_gene_compliant += 1
self.num_species_compliant += 1
self.all_compliant += 1
elif (extra_copies <= self.gene_threshold and
self.gene_threshold) or self.gene_threshold == 0:
self.num_gene_compliant += 1
elif len(cl) >= self.species_threshold and \
self.species_threshold:
self.num_species_compliant += 1
def _get_compliance(self, cl):
"""
Determines whether an ortholog cluster is compliant with the specified
ortholog filters.
:param ccl: dictionary. Contains the number of occurrences for each
taxon present in the ortholog cluster
(e.g. {"taxonA": 2, "taxonB": 1).
:return: tuple. The first element refers to the gene copy filter
while the second refers to the minimum taxa filter. Values of 1
indicate that the ortholg cluster is compliant.
"""
for tx in self.excluded_taxa:
cl.pop(tx, None)
if cl:
cp = max(cl.values())
if not self.gene_threshold and not self.species_threshold:
return 1, 1
if cp <= self.gene_threshold and self.gene_threshold and\
len(cl) >= self.species_threshold and \
self.species_threshold:
return 1, 1
elif (cp <= self.gene_threshold and self.gene_threshold) or \
not self.gene_threshold:
return 1, 0
elif (len(cl) >= self.species_threshold and
self.species_threshold) or not self.species_threshold:
return 0, 1
else:
return 0, 0
def _reset_counter(self):
self.all_clusters = 0
self.num_gene_compliant = 0
self.num_species_compliant = 0
self.all_compliant = 0
def _parse_groups(self, ns=None):
for cl in self.groups():
if ns:
if ns.stop:
raise KillByUser("")
# Retrieve the field containing the ortholog sequences
sequence_field = cl.split(":")[1]
# Update species frequency list
sp_freq = Counter((x.split("|")[0] for x in
sequence_field.split()))
self.species_frequency.append(sp_freq)
# Update number of sequences
self.total_seqs += len(sequence_field)
# Update max number of extra copies
extra_copies = max(sp_freq.values())
if extra_copies > self.max_extra_copy:
self.max_extra_copy = max(sp_freq.values())
self.species_list.extend([x for x in sp_freq if x not in
self.species_list])
# Apply filters, if any
# gene filter
if self.species_threshold and self.gene_threshold:
self._apply_filter(sp_freq)
def exclude_taxa(self, taxa_list, update_stats=False):
"""
Updates the excluded_taxa attribute and updates group statistics if
update_stats is True. This does not change the Group object data
permanently, only sets an attribute that will be taken into account
when plotting and exporting data.
:param taxa_list: list. List of taxa that should be excluded from
downstream operations
:param update_stats: boolean. If True, it will update the group
statistics
"""
# IF the taxa_list is the same as the excluded_taxa attribute,
# there is nothing to do
if sorted(taxa_list) == sorted(self.excluded_taxa):
return
self.species_list = [x for x in self.species_list + self.excluded_taxa
if x not in taxa_list]
self.excluded_taxa = taxa_list
if update_stats:
self._reset_counter()
for cl in self.iter_species_frequency():
self._apply_filter(cl)
def basic_group_statistics(self, update_stats=True):
if update_stats:
self._reset_counter()
for cl in self.iter_species_frequency():
self._apply_filter(cl)
return len(self.species_frequency), self.total_seqs, \
self.num_gene_compliant, self.num_species_compliant, \
self.all_compliant
def _get_sp_proportion(self):
"""
When the species filter is a float value between 0 and 1, convert
this proportion into absolute values (rounded up), since filters were
already designed for absolutes.
"""
self.species_threshold = int(self.species_threshold *
len(self.species_list))
def update_filters(self, gn_filter, sp_filter, update_stats=False):
"""
Updates the group filter attributes and group summary stats if
update_stats is True. This method does not change the
data of the Group object, only sets attributes that will be taken into
account when plotting or exporting data
:param gn_filter: integer. Maximum number of gene copies allowed in an
ortholog cluster
:param sp_filter: integer/float. Minimum number/proportion of taxa
representation
:param update_stats: boolean. If True it will update the group summary
statistics
"""
# If the provided filters are the same as the current group attributes
# there is nothing to do
if (gn_filter, sp_filter) == (self.gene_threshold,
self.species_threshold):
return
self.gene_threshold = gn_filter
self.species_threshold = sp_filter
if type(self.species_threshold) is float:
self._get_sp_proportion()
if update_stats:
self._reset_counter()
for cl in self.iter_species_frequency():
self._apply_filter(cl)
def retrieve_sequences(self, sqldb, protein_db, dest="./",
shared_namespace=None, outfile=None):
"""
:param sqldb: srting. Path to sqlite database file
:param protein_db: string. Path to protein database file
:param dest: string. Directory where sequences will be exported
:param shared_namespace: Namespace object to communicate with
TriFusion's main process
:param outfile: If set, all sequeces will be instead saved in a
single output file. This is used for the nucleotide sequence export
:return:
"""
if not os.path.exists(dest) and not outfile:
os.makedirs(dest)
if not os.path.exists(join(dest, "header_correspondance")):
os.makedirs(join(dest, "header_correspondance"))
if shared_namespace:
shared_namespace.act = shared_namespace.msg = "Creating database"
# Stores sequences that could not be retrieved
shared_namespace.missed = shared_namespace.counter = 0
shared_namespace.progress = 0
# Get number of lines of protein database
p = 0
with open(protein_db) as fh:
for p, _ in enumerate(fh):
pass
shared_namespace.max_pb = shared_namespace.total = p + 1
# Connect to database
con = sqlite3.connect(sqldb)
c = con.cursor()
table_name = "".join([x for x in protein_db if x.isalnum()]).encode(
"utf8")
# Create table if it does not exist
if not c.execute("SELECT name FROM sqlite_master WHERE type='table' "
"AND name='{}'".format(table_name)).fetchall():
c.execute("CREATE TABLE [{}] (seq_id text PRIMARY KEY, seq text)".
format(table_name))
# Populate database
with open(protein_db) as ph:
seq = ""
for line in ph:
# Kill switch
if shared_namespace:
if shared_namespace.stop:
con.close()
raise KillByUser("")
shared_namespace.progress += 1
shared_namespace.counter += 1
if line.startswith(">"):
if seq != "":
c.execute("INSERT INTO [{}] VALUES (?, ?)".
format(table_name), (seq_id, seq))
seq_id = line.strip()[1:]
seq = ""
else:
seq += line.strip()
con.commit()
if shared_namespace:
shared_namespace.act = shared_namespace.msg = "Fetching sequences"
shared_namespace.good = shared_namespace.counter = 0
shared_namespace.progress = 0
shared_namespace.max_pb = shared_namespace.total = \
self.all_compliant
# Set single output file, if option is set
if outfile:
output_handle = open(join(dest, outfile), "w")
# Fetching sequences
for line, cl in zip(self.groups(), self.iter_species_frequency()):
# Kill switch
if shared_namespace:
if shared_namespace.stop:
con.close()
raise KillByUser("")
# Filter sequences
if self._get_compliance(cl) == (1, 1):
if shared_namespace:
shared_namespace.good += 1
shared_namespace.progress += 1
shared_namespace.counter += 1
# Retrieve sequences from current cluster
if self.excluded_taxa:
line = self._remove_tx(line)
fields = line.split(":")
# Open file
if not outfile:
cl_name = fields[0]
oname = join(dest, cl_name)
mname = join(dest, "header_correspondance", cl_name)
output_handle = open(oname + ".fas", "w")
map_handle = open(mname + "_headerMap.csv", "w")
seqs = fields[-1].split()
for i in seqs:
# Query database
c.execute("SELECT * FROM [{}] WHERE seq_id = ?".
format(table_name), (i,))
vals = c.fetchone()
# Handles cases where the sequence could not be retrieved
# If outfile is set, output_handle will be a single file
# for all groups. If not, it will represent an individual
# group file
try:
if not outfile:
tx_name = vals[0].split("|")[0]
output_handle.write(">{}\n{}\n".format(tx_name,
vals[1]))
map_handle.write("{}; {}\n".format(vals[0],
tx_name))
else:
output_handle.write(">{}\n{}\n".format(vals[0],
vals[1]))
except TypeError:
pass
if not outfile:
output_handle.close()
if outfile:
output_handle.close()
con.close()
def export_filtered_group(self, output_file_name="filtered_groups",
dest="./", shared_namespace=None):
if shared_namespace:
shared_namespace.act = "Exporting filtered orthologs"
shared_namespace.missed = 0
shared_namespace.good = 0
output_handle = open(os.path.join(dest, output_file_name), "w")
for p, (line, cl) in enumerate(zip(self.groups(),
self.iter_species_frequency())):
if shared_namespace:
if shared_namespace.stop:
raise KillByUser("")
if shared_namespace:
shared_namespace.progress = p
if self._get_compliance(cl) == (1, 1):
if shared_namespace:
shared_namespace.good += 1
if self.excluded_taxa:
l = self._remove_tx(line)
else:
l = line
output_handle.write("{}\n".format(l))
output_handle.close()
def bar_species_distribution(self, filt=False):
if filt:
data = Counter((len(cl) for cl in self.iter_species_frequency() if
self._get_compliance(cl) == (1, 1)))
else:
data = Counter((len(cl) for cl in self.species_frequency))
x_labels = [x for x in list(data)]
data = list(data.values())
# When data is empty, return an exception
if not data:
return {"data": None}
# Sort lists
x_labels = [list(x) for x in zip(*sorted(zip(x_labels, data)))][0]
# Convert label to strings
x_labels = [str(x) for x in x_labels]
title = "Taxa frequency distribution"
ax_names = ["Number of taxa", "Ortholog frequency"]
return {"data": [data],
"title": title,
"ax_names": ax_names,
"labels": x_labels,
"table_header": ["Number of species",
"Ortholog frequency"]}
def bar_genecopy_distribution(self, filt=False):
"""
Creates a bar plot with the distribution of gene copies across
clusters
:param filt: Boolean, whether or not to use the filtered groups.
"""
if filt:
data = Counter((max(cl.values()) for cl in
self.iter_species_frequency() if
self._get_compliance(cl) == (1, 1)))
else:
data = Counter((max(cl.values()) for cl in self.species_frequency
if cl))
x_labels = [x for x in list(data)]
data = list(data.values())
# When data is empty, return an exception
if not data:
return {"data": None}
x_labels, data = (list(x) for x in zip(*sorted(zip(x_labels, data))))
# Convert label to strings
x_labels = [str(x) for x in x_labels]
title = "Gene copy distribution"
ax_names = ["Number of gene copies", "Ortholog frequency"]
return {"data": [data],
"labels": x_labels,
"title": title,
"ax_names": ax_names,
"table_header": ["Number of gene copies",
"Ortholog frequency"]}
def bar_species_coverage(self, filt=False):
"""
Creates a stacked bar plot with the proportion of
:return:
"""
data = Counter(dict((x, 0) for x in self.species_list))
self._reset_counter()
for cl in self.iter_species_frequency():
self._apply_filter(cl)
if filt:
data += Counter(dict((x, 1) for x, y in cl.items() if y > 0 and
self._get_compliance(cl) == (1, 1)))
else:
data += Counter(dict((x, 1) for x, y in cl.items() if y > 0))
data = data.most_common()
# When data is empty, return an exception
if not data:
return {"data": None}
x_labels = [str(x[0]) for x in data]
data = [[x[1] for x in data], [self.all_clusters - x[1] if not
filt else self.all_compliant - x[1]
for x in data]]
lgd_list = ["Available data", "Missing data"]
ax_names = [None, "Ortholog frequency"]
return {"data": data,
"labels": x_labels,
"lgd_list": lgd_list,
"ax_names": ax_names}
def bar_genecopy_per_species(self, filt=False):
data = Counter(dict((x, 0) for x in self.species_list))
self._reset_counter()
for cl in self.iter_species_frequency():
self._apply_filter(cl)
if filt:
data += Counter(dict((x, y) for x, y in cl.items() if y > 1 and
self._get_compliance(cl) == (1, 1)))
else:
data += Counter(dict((x, y) for x, y in cl.items() if y > 1))
data = data.most_common()
# When data is empty, return an exception
if not data:
return {"data": None}
x_labels = [str(x[0]) for x in data]
data = [[x[1] for x in data]]
ax_names = [None, "Gene copies"]
return {"data": data,
"labels": x_labels,
"ax_names": ax_names}
class Group(object):
""" This represents the main object of the orthomcl toolbox module. It is
initialized with a file name of a orthomcl groups file and provides
several methods that act on that group file. To process multiple Group
objects, see MultiGroups object """
def __init__(self, groups_file, gene_threshold=None,
species_threshold=None, project_prefix="MyGroups"):
# Initializing thresholds. These may be set from the start, or using
# some method that uses them as arguments
self.gene_threshold = gene_threshold
self.species_threshold = species_threshold
# Attribute containing the list of included species
self.species_list = []
# Attribute that will contain taxa to be excluded from analyses
self.excluded_taxa = []
# Attributes that will store the number (int) of cluster after gene and
# species filter
self.all_compliant = 0
self.num_gene_compliant = 0
self.num_species_compliant = 0
# Attribute containing the total number of sequences
self.total_seqs = 0
# Attribute containing the maximum number of extra copies found in the
# clusters
self.max_extra_copy = 0
# Attribute with name of the group file, which will be an ID
self.group_name = groups_file
# Initialize the project prefix for possible output files
self.prefix = project_prefix
# Initialize attribute containing the original groups
self.groups = []
# Initialize atribute containing the groups filtered using the gene and
# species threshold. This attribute can be updated at any time using
# the update_filtered_group method
self.filtered_groups = []
self.name = None
# Parse groups file and populate groups attribute
self.__parse_groups(groups_file)
def __parse_groups(self, groups_file):
"""
Parses the ortholog clusters in the groups file and populates the
self.groups list with Cluster objects for each line in the groups file.
:param groups_file: File name for the orthomcl groups file
:return: populates the groups attribute
"""
self.name = groups_file
self.species_list = []
groups_file_handle = open(groups_file)
for line in groups_file_handle:
cluster_object = Cluster(line)
# Add cluster to general group list
self.groups.append(cluster_object)
# Update total sequence counter
self.total_seqs += len(cluster_object.sequences)
# Update maximum number of extra copies, if needed
if max(cluster_object.species_frequency.values()) > \
self.max_extra_copy:
self.max_extra_copy = \
max(cluster_object.species_frequency.values())
# Update species_list attribute
self.species_list = list(set(self.species_list).union(
set(cluster_object.species_frequency.keys())))
# If thresholds have been specified, update self.filtered_groups
# attribute
if self.species_threshold and self.gene_threshold:
cluster_object.apply_filter(self.gene_threshold,
self.species_threshold)
if cluster_object.species_compliant and \
cluster_object.gene_compliant:
# Add cluster to the filtered group list
self.filtered_groups.append(cluster_object)
self.all_compliant += 1
# Update num_species_compliant attribute
if cluster_object.species_compliant:
self.num_species_compliant += 1
# Update num_gene_compliant attribute
if cluster_object.gene_compliant:
self.num_gene_compliant += 1
def exclude_taxa(self, taxa_list):
"""
Adds a taxon_name to the excluded_taxa list and updates the
filtered_groups list
"""
self.excluded_taxa.extend(taxa_list)
# Storage variable for new filtered groups
filtered_groups = []
# Reset max_extra_copy attribute
self.max_extra_copy = 0
for cl in self.groups:
cl.remove_taxa(taxa_list)
if cl.iter_sequences and cl.species_frequency:
filtered_groups.append(cl)
# Update maximum number of extra copies, if needed
if max(cl.species_frequency.values()) > self.max_extra_copy:
self.max_extra_copy = max(cl.species_frequency.values())
# Update species_list
self.species_list = sorted(list(set(self.species_list) -
set(taxa_list)))
self.filtered_groups = self.groups = filtered_groups
def get_filters(self):
"""
Returns a tuple with the thresholds for max gene copies and min species
"""
return self.gene_threshold, self.species_threshold
def basic_group_statistics(self):
"""
This method creates a basic table in list format containing basic
information of the groups file (total number of clusters, total number
of sequences, number of clusters below the gene threshold, number of
clusters below the species threshold and number of clusters below the
gene AND species threshold)
:return: List containing number of
[total clusters,
total sequences,
clusters above gene threshold,
clusters above species threshold,
clusters above gene and species threshold]
"""
# Total number of clusters
total_cluster_num = len(self.groups)
# Total number of sequenes
total_sequence_num = self.total_seqs
# Gene compliant clusters
clusters_gene_threshold = self.num_gene_compliant
# Species compliant clusters
clusters_species_threshold = self.num_species_compliant
clusters_all_threshold = len(self.filtered_groups)
statistics = [total_cluster_num, total_sequence_num,
clusters_gene_threshold, clusters_species_threshold,
clusters_all_threshold]
return statistics
def paralog_per_species_statistic(self, output_file_name=
"Paralog_per_species.csv", filt=True):
"""
This method creates a CSV table with information on the number of
paralog clusters per species
:param output_file_name: string. Name of the output csv file
:param filt: Boolean. Whether to use the filtered groups (True) or
total groups (False)
"""
# Setting which clusters to use
if filt:
groups = self.filtered_groups
else:
groups = self.groups
paralog_count = dict((species, 0) for species in self.species_list)
for cluster in groups:
for species in paralog_count:
if cluster.species_frequency[species] > 1:
paralog_count[species] += 1
# Writing table
output_handle = open(output_file_name, "w")
output_handle.write("Species; Clusters with paralogs\n")
for species, val in paralog_count.items():
output_handle.write("%s; %s\n" % (species, val))
output_handle.close()
def export_filtered_group(self, output_file_name="filtered_groups",
dest="./", get_stats=False,
shared_namespace=None):
"""
Export the filtered groups into a new file.
:param output_file_name: string, name of the filtered groups file
:param dest: string, path to directory where the filtered groups file
will be created
:param get_stats: Boolean, whether to return the basic count stats or
not
:param shared_namespace: Namespace object, for communicating with
main process.
"""
if self.filtered_groups:
if shared_namespace:
shared_namespace.act = "Exporting filtered orthologs"
output_handle = open(os.path.join(dest, output_file_name), "w")
if get_stats:
all_orthologs = len(self.groups)
sp_compliant = 0
gene_compliant = 0
final_orthologs = 0
for cluster in self.filtered_groups:
if shared_namespace:
shared_namespace.progress = \
self.filtered_groups.index(cluster)
if cluster.species_compliant and cluster.gene_compliant:
output_handle.write("%s: %s\n" % (
cluster.name, " ".join(cluster.iter_sequences)))
if get_stats:
final_orthologs += 1
if get_stats:
if cluster.species_compliant:
sp_compliant += 1
if cluster.gene_compliant:
gene_compliant += 1
output_handle.close()
if get_stats:
return all_orthologs, sp_compliant, gene_compliant,\
final_orthologs
else:
raise OrthoGroupException("The groups object must be filtered "
"before using the export_filtered_group"
"method")
def update_filters(self, gn_filter, sp_filter):
"""
Sets new values for the self.species_threshold and self.gene_threshold
and updates the filtered_group
:param gn_filter: int. Maximum value for gene copies in cluster
:param sp_filter: int. Minimum value for species in cluster
"""
self.species_threshold = int(sp_filter)
self.gene_threshold = int(gn_filter)
self.update_filtered_group()
def update_filtered_group(self):
"""
This method creates a new filtered group variable, like
export_filtered_group, but instead of writing into a new file, it
replaces the self.filtered_groups variable
"""
self.filtered_groups = []
# Reset gene and species compliant counters
self.num_gene_compliant = 0
self.num_species_compliant = 0
for cluster in self.groups:
cluster.apply_filter(self.gene_threshold, self.species_threshold)
if cluster.species_compliant and cluster.gene_compliant:
self.filtered_groups.append(cluster)
# Update num_species_compliant attribute
if cluster.species_compliant:
self.num_species_compliant += 1
# Update num_gene_compliant attribute
if cluster.gene_compliant:
self.num_gene_compliant += 1
def retrieve_sequences(self, database, dest="./", mode="fasta",
filt=True, shared_namespace=None):
"""
When provided with a database in Fasta format, this will use the
Alignment object to retrieve sequences
:param database: String. Fasta file
:param dest: directory where files will be save
:param mode: string, whether to retrieve sequences to a file ('fasta'),
or a dictionary ('dict')
:param filt: Boolean. Whether to use the filtered groups (True) or
total groups (False)
:param shared_namespace: Namespace object. This argument is meant for
when fast are retrieved in a background process, where there is a need
to update the main process of the changes in this method
:param dest: string. Path to directory where the retrieved sequences
will be created.
"""
if mode == "dict":
seq_storage = {}
if filt:
groups = self.filtered_groups
else:
groups = self.groups
if not os.path.exists("Orthologs"):
os.makedirs("Orthologs")
# Update method progress
if shared_namespace:
shared_namespace.act = "Creating database"
shared_namespace.progress = 0
print("Creating db")
# Check what type of database was provided
#TODO: Add exception handling if file is not parsed with Aligment
if isinstance(database, str):
try:
db_aln = pickle.load(open(database, "rb"))
except (EnvironmentError, pickle.UnpicklingError):
db_aln = Alignment(database)
db_aln = db_aln.alignment
elif isinstance(database, dict):
db_aln = database
else:
raise OrthoGroupException("The input database is neither a string"
"nor a dictionary object")
print("Retrieving seqs")
# Update method progress
if shared_namespace:
shared_namespace.act = "Retrieving sequences"
for cluster in groups:
if shared_namespace:
shared_namespace.progress += 1
if mode == "dict":
seq_storage[cluster.name] = []
output_handle = open(join(dest, cluster.name + ".fas"), "w")
for sequence_id in cluster.iter_sequences:
seq = db_aln[sequence_id]
if mode == "fasta":
output_handle.write(">%s\n%s\n" % (sequence_id, seq))
elif mode == "dict":
seq_storage[cluster.name].append([sequence_id.split("|")[0],
seq])
output_handle.close()
if mode == "dict":
return seq_storage
def bar_species_distribution(self, dest="./", filt=False, ns=None,
output_file_name="Species_distribution"):
"""
Creates a bar plot with the distribution of species numbers across
clusters
:param dest: string, destination directory
:param filt: Boolean, whether or not to use the filtered groups.
:param output_file_name: string, name of the output file
"""
data = []
# Determine which groups to use
if filt:
groups = self.filtered_groups
else:
groups = self.groups
for i in groups:
if ns:
if ns.stop:
raise KillByUser("")
data.append(len([x for x, y in i.species_frequency.items()
if y > 0]))
# Transform data into histogram-like
transform_data = Counter(data)
x_labels = [x for x in list(transform_data)]
y_vals = list(transform_data.values())
# Sort lists
x_labels, y_vals = (list(x) for x in zip(*sorted(zip(x_labels,
y_vals))))
# Convert label to strings
x_labels = [str(x) for x in x_labels]
if ns:
if ns.stop:
raise KillByUser("")
# Create plot
b_plt, lgd, _ = bar_plot([y_vals], x_labels,
title="Taxa frequency distribution",
ax_names=["Number of taxa", "Ortholog frequency"])
b_plt.savefig(os.path.join(dest, output_file_name), bbox_inches="tight",
dpi=400)
# Create table
table_list = [["Number of species", "Ortholog frequency"]]
for x, y in zip(x_labels, y_vals):
table_list.append([x, y])
return b_plt, lgd, table_list
def bar_genecopy_distribution(self, dest="./", filt=False,
output_file_name="Gene_copy_distribution.png"):
"""
Creates a bar plot with the distribution of gene copies across
clusters
:param dest: string, destination directory
:param filt: Boolean, whether or not to use the filtered groups.
:param output_file_name: string, name of the output file
"""
data = []
# Determin which groups to use
if filt:
groups = self.filtered_groups
else:
groups = self.groups
for cl in groups:
# Get max number of copies
max_copies = max(cl.species_frequency.values())
data.append(max_copies)
# Transform data into histogram-like
transform_data = Counter(data)
x_labels = [x for x in list(transform_data)]
y_vals = list(transform_data.values())
# Sort lists
x_labels, y_vals = (list(x) for x in zip(*sorted(zip(x_labels,
y_vals))))
# Convert label to strings
x_labels = [str(x) for x in x_labels]
# Create plot
b_plt, lgd, _ = bar_plot([y_vals], x_labels,
title="Gene copy distribution",
ax_names=["Number of gene copies", "Ortholog frequency"],
reverse_x=False)
b_plt.savefig(os.path.join(dest, output_file_name), bbox_inches="tight",
figsize=(8 * len(x_labels) / 4, 6), dpi=200)
# Create table
table_list = [["Number of gene copies", "Ortholog frequency"]]
for x, y in zip(x_labels, y_vals):
table_list.append([x, y])
return b_plt, lgd, table_list
def bar_species_coverage(self, dest="./", filt=False, ns=None,
output_file_name="Species_coverage"):
"""
Creates a stacked bar plot with the proportion of
:return:
"""
# Determine which groups to use
if filt:
groups = self.filtered_groups
else:
groups = self.groups
data = Counter(dict((x, 0) for x in self.species_list))
for cl in groups:
if ns:
if ns.stop:
raise KillByUser("")
data += Counter(dict((x, 1) for x, y in cl.species_frequency.items()
if y > 0))
xlabels = [str(x) for x in list(data.keys())]
data = [list(data.values()), [len(groups) - x for x in
data.values()]]
lgd_list = ["Available data", "Missing data"]
if ns:
if ns.stop:
raise KillByUser("")
b_plt, lgd, _ = bar_plot(data, xlabels, lgd_list=lgd_list,
ax_names=[None, "Ortholog frequency"])
b_plt.savefig(os.path.join(dest, output_file_name), bbox_inches="tight",
dpi=200)
return b_plt, lgd, ""
class MultiGroups(object):
""" Creates an object composed of multiple Group objects """
def __init__(self, groups_files=None, gene_threshold=None,
species_threshold=None, project_prefix="MyGroups"):
"""
:param groups_files: A list containing the file names of the multiple
group files
:return: Populates the self.multiple_groups attribute
"""
# If a MultiGroups is initialized with duplicate Group objects, these
# will be stored in a list. If all Group objects are unique, the list
# will remain empty
self.duplicate_groups = []
# Initializing thresholds. These may be set from the start, or using
# some method that uses them as arguments
self.gene_threshold = gene_threshold
self.species_threshold = species_threshold
self.prefix = project_prefix
self.multiple_groups = {}
self.filters = {}
if groups_files:
for group_file in groups_files:
# If group_file is already a Group object, just add it
if not isinstance(group_file, Group):
# Check for duplicate group files
group_object = Group(group_file, self.gene_threshold,
self.species_threshold)
else:
group_object = group_file
if group_object.name in self.multiple_groups:
self.duplicate_groups.append(group_object.name)
else:
self.multiple_groups[group_object.name] = group_object
self.filters[group_object.name] = (1,
len(group_object.species_list))
def __iter__(self):
return iter(self.multiple_groups)
def iter_gnames(self):
return (x.name for x in self.multiple_groups)
def get_gnames(self):
return [x.name for x in self.multiple_groups]
def add_group(self, group_obj):
"""
Adds a group object
:param group_obj: Group object
"""
# Check for duplicate groups
if group_obj.name in self.multiple_groups:
self.duplicate_groups.append(group_obj.name)
else:
self.multiple_groups[group_obj.name] = group_obj
def remove_group(self, group_id):
"""
Removes a group object according to its name
:param group_id: string, name matching a Group object name attribute
"""
if group_id in self.multiple_groups:
del self.multiple_groups[group_id]
def get_group(self, group_id):
"""
Returns a group object based on its name. If the name does not match
any group object, returns None
:param group_id: string. Name of group object
"""
try:
return self.multiple_groups[group_id]
except KeyError:
return
def add_multigroups(self, multigroup_obj):
"""
Merges a MultiGroup object
:param multigroup_obj: MultiGroup object
"""
for group_obj in multigroup_obj:
self.add_group(group_obj)
def update_filters(self, gn_filter, sp_filter, group_names=None,
default=False):
"""
This will not change the Group object themselves, only the filter
mapping. The filter is only applied when the Group object is retrieved
to reduce computations
:param gn_filter: int, filter for max gene copies
:param sp_filter: int, filter for min species
:param group_names: list, with names of group objects
"""
if group_names:
for group_name in group_names:
# Get group object
group_obj = self.multiple_groups[group_name]
# Define filters
gn_filter = gn_filter if not default else 1
sp_filter = sp_filter if not default else \
len(group_obj.species_list)
# Update Group object with new filters
group_obj.update_filters(gn_filter, sp_filter)
# Update filter map
self.filters[group_name] = (gn_filter, sp_filter)
for group_name, group_obj in self.multiple_groups.items():
# Define filters
gn_filter = gn_filter if not default else 1
sp_filter = sp_filter if not default else \
len(group_obj.species_list)
# Update Group object with new filters
group_obj.update_filters(gn_filter, sp_filter)
# Update filter map
self.filters[group_name] = (gn_filter, sp_filter)
def basic_multigroup_statistics(self, output_file_name=
"multigroup_base_statistics.csv"):
"""
:param output_file_name:
:return:
"""
# Creates the storage for the statistics of the several files
statistics_storage = OrderedDict()
for group in self.multiple_groups:
group_statistics = group.basic_group_statistics()
statistics_storage[group.name] = group_statistics
output_handle = open(self.prefix + "." + output_file_name, "w")
output_handle.write("Group file; Total clusters; Total sequences; "
"Clusters below gene threshold; Clusters above "
"species threshold; Clusters below gene and above"
" species thresholds\n")
for group, vals in statistics_storage.items():
output_handle.write("%s; %s\n" % (group, ";".join([str(x) for x
in vals])))
output_handle.close()
def bar_orthologs(self, output_file_name="Final_orthologs",
dest="./", stats="total"):
"""
Creates a bar plot with the final ortholog values for each group file
:param output_file_name: string. Name of output file
:param dest: string. output directory
:param stats: string. The statistics that should be used to generate
the bar plot. Options are:
..: "1": Total orthologs
..: "2": Species compliant orthologs
..: "3": Gene compliant orthologs
..: "4": Final orthologs
..: "all": All of the above
Multiple combinations can be provided, for instance: "123" will
display bars for total, species compliant and gene compliant stats
"""
# Stores the x-axis labels
x_labels = []
# Stores final ortholog values for all 4 possible data sets
vals = [[], [], [], []]
lgd = ["Total orthologs", "After species filter", "After gene filter",
"Final orthologs"]
# Get final ortholog values
for g_obj in self.multiple_groups:
x_labels.append(g_obj.name.split(os.sep)[-1])
# Populate total orthologs
if "1" in stats or stats == "all":
vals[0].append(len(g_obj.groups))
# Populate species compliant orthologs
if "2" in stats or stats == "all":
vals[1].append(g_obj.num_species_compliant)
# Populate gene compliant orthologs
if "3" in stats or stats == "all":
vals[2].append(g_obj.num_gene_compliant)
# Populate final orthologs
if "4" in stats or stats == "all":
vals[3].append(len(g_obj.filtered_groups))
# Filter valid data sets
lgd_list = [x for x in lgd if vals[lgd.index(x)]]
vals = [l for l in vals if l]
# Create plot
b_plt, lgd = multi_bar_plot(vals, x_labels, lgd_list=lgd_list)
b_plt.savefig(os.path.join(dest, output_file_name),
bbox_extra_artists=(lgd,), bbox_inches="tight")
# Create table list object
table_list = []
# Create header
table_list.append([""] + x_labels)
# Create content
for i in range(len(vals)):
table_list += [x for x in [[lgd_list[i]] + vals[i]]]
return b_plt, lgd, table_list
def group_overlap(self):
"""
This will find the overlap of orthologs between two group files.
THIS METHOD IS TEMPORARY AND EXPERIMENTAL
"""
def parse_groups(group_obj):
"""
Returns a list with the sorted ortholog clusters
"""
storage = []
for cluster in group_obj.groups:
storage.append(set(cluster.iter_sequences))
return storage
if len(self.multiple_groups) != 2:
raise SystemExit("This method can only be used with two group "
"files")
group1 = self.multiple_groups[0]
group2 = self.multiple_groups[1]
group1_list = parse_groups(group1)
group2_list = parse_groups(group2)
counter = 0
for i in group1_list:
if i in group2_list:
counter += 1
class MultiGroupsLight(object):
"""
Creates an object composed of multiple Group objects like MultiGroups.
However, instead of storing the groups in memory, these are shelved in
the disk
"""
# The report calls available
calls = ['bar_genecopy_distribution',
'bar_species_distribution',
'bar_species_coverage',
'bar_genecopy_per_species']
def __init__(self, db_path, groups=None, gene_threshold=None,
species_threshold=None, project_prefix="MyGroups",
ns=None):
"""
:param groups: A list containing the file names of the multiple
group files
:return: Populates the self.multiple_groups attribute
"""
self.db_path = db_path
# If a MultiGroups is initialized with duplicate Group objects, their
# names will be stored in a list. If all Group objects are unique, the
# list will remain empty
self.duplicate_groups = []
self.groups = {}
self.groups_stats = {}
# Attribute that will store the paths of badly formated group files
self.bad_groups = []
# Initializing thresholds. These may be set from the start, or using
# some method that uses them as arguments
self.gene_threshold = gene_threshold
self.species_threshold = species_threshold
# Initializing mapping of group filters to their names. Should be
# something like {"groupA": (1, 10)}
self.filters = {}
self.taxa_list = {}
self.excluded_taxa = {}
# This attribute will contain a dictionary with the maximum extra copies
# for each group object
self.max_extra_copy = {}
# This attribute will contain a list with the number of species for
# each group object, excluding replicates. If a MultiGroupLight object
# contains Group objects with different taxa numbers, this attribute
# can be used to issue a warning
self.species_number = []
self.prefix = project_prefix
if ns:
ns.files = len(groups)
if groups:
for group_file in groups:
# If group_file is already a Group object, just add it
if not isinstance(group_file, GroupLight):
try:
if ns:
if ns.stop:
raise KillByUser("")
ns.counter += 1
group_object = GroupLight(group_file,
self.gene_threshold,
self.species_threshold,
ns=ns)
except Exception as e:
print(e.message)
self.bad_groups.append(group_file)
continue
else:
group_object = group_file
# Check for duplicate group files
if group_object.name in self.groups:
self.duplicate_groups.append(group_file.name)
else:
self.add_group(group_object)
def __iter__(self):
for k, val in self.groups.items():
yield k, pickle.load(open(val, "rb"))
def clear_groups(self):
"""
Clears the current MultiGroupsLight object
"""
for f in self.groups.values():
os.remove(f)
self.duplicate_groups = []
self.groups = {}
self.groups_stats = {}
self.filters = {}
self.max_extra_copy = {}
self.species_number = []
self.gene_threshold = self.species_threshold = 0
def add_group(self, group_obj):
"""
Adds a group object
:param group_obj: Group object
"""
# Check for duplicate groups
if group_obj.name not in self.groups:
gpath = os.path.join(self.db_path,
"".join(random.choice(string.ascii_uppercase) for _ in
range(15)))
pickle.dump(group_obj, open(gpath, "wb"))
self.groups[group_obj.name] = gpath
self.filters[group_obj.name] = (1, len(group_obj.species_list), [])
self.max_extra_copy[group_obj.name] = group_obj.max_extra_copy
if len(group_obj.species_list) not in self.species_number:
self.species_number.append(len(group_obj.species_list))
else:
self.duplicate_groups.append(group_obj.name)
def remove_group(self, group_id):
"""
Removes a group object according to its name
:param group_id: string, name matching a Group object name attribute
"""
if group_id in self.groups:
os.remove(self.groups[group_id])
del self.groups[group_id]
def get_group(self, group_id):
"""
Returns a group object based on its name. If the name does not match
any group object, returns None
:param group_id: string. Name of group object
"""
try:
return pickle.load(open(self.groups[unicode(group_id)], "rb"))
except KeyError:
return
def add_multigroups(self, multigroup_obj):
"""
Merges a MultiGroup object
:param multigroup_obj: MultiGroup object
"""
for _, group_obj in multigroup_obj:
self.add_group(group_obj)
def update_filters(self, gn_filter, sp_filter, excluded_taxa,
group_names=None, default=False):
"""
This will not change the Group object themselves, only the filter
mapping. The filter is only applied when the Group object is retrieved
to reduce computations
:param gn_filter: int, filter for max gene copies
:param sp_filter: int, filter for min species
:param group_names: list, with names of group objects
"""
# There are no groups to update
if group_names == []:
return
if group_names:
glist = group_names
else:
glist = self.groups
for group_name in glist:
# Get group object
group_obj = pickle.load(open(self.groups[group_name], "rb"))
# Define excluded taxa
group_obj.exclude_taxa(excluded_taxa, True)
# Define filters
gn_filter = gn_filter if not default else 1
sp_filter = sp_filter if not default else \
len(group_obj.species_list)
# Correct maximum filter values after excluding taxa
gn_filter = gn_filter if gn_filter <= group_obj.max_extra_copy \
else group_obj.max_extra_copy
sp_filter = sp_filter if sp_filter <= len(group_obj.species_list) \
else len(group_obj.species_list)
# Update Group object with new filters
group_obj.update_filters(gn_filter, sp_filter)
# Update group stats
self.get_multigroup_statistics(group_obj)
pickle.dump(group_obj, open(self.groups[group_name], "wb"))
# Update filter map
self.filters[group_name] = (gn_filter, group_obj.species_threshold)
self.taxa_list[group_name] = group_obj.species_list
self.excluded_taxa[group_name] = group_obj.excluded_taxa
def get_multigroup_statistics(self, group_obj):
"""
:return:
"""
stats = group_obj.basic_group_statistics()
self.groups_stats[group_obj.name] = {"stats": stats,
"species": group_obj.species_list,
"max_copies": group_obj.max_extra_copy}
def bar_orthologs(self, group_names=None, output_file_name="Final_orthologs",
dest="./", stats="all"):
"""
Creates a bar plot with the final ortholog values for each group file
:param group_names: list. If None, all groups in self.group_stats will
be used to generate the plot. Else, only the groups with the names in
the list will be plotted.
:param output_file_name: string. Name of output file
:param dest: string. output directory
:param stats: string. The statistics that should be used to generate
the bar plot. Options are:
..: "1": Total orthologs
..: "2": Species compliant orthologs
..: "3": Gene compliant orthologs
..: "4": Final orthologs
..: "all": All of the above
Multiple combinations can be provided, for instance: "123" will
display bars for total, species compliant and gene compliant stats
"""
# Stores the x-axis labels
x_labels = []
# Stores final ortholog values for all 4 possible data sets
vals = [[], [], [], []]
lgd = ["Total orthologs", "After species filter", "After gene filter",
"Final orthologs"]
# Determine which groups will be plotted
if group_names:
groups_lst = group_names
else:
groups_lst = self.groups_stats.keys()
for gname in groups_lst:
gstats = self.groups_stats[gname]
x_labels.append(gname.split(os.sep)[-1])
# Populate total orthologs
if "1" in stats or stats == "all":
vals[0].append(gstats["stats"][0])
# Populate species compliant orthologs
if "2" in stats or stats == "all":
vals[1].append(gstats["stats"][3])
# Populate gene compliant orthologs
if "3" in stats or stats == "all":
vals[2].append(gstats["stats"][2])
# Populate final orthologs
if "4" in stats or stats == "all":
vals[3].append(gstats["stats"][4])
# Filter valid data sets
lgd_list = [x for x in lgd if vals[lgd.index(x)]]
vals = [l for l in vals if l]
# Create plot
b_plt, lgd = multi_bar_plot(vals, x_labels, lgd_list=lgd_list)
b_plt.savefig(os.path.join(dest, output_file_name),
bbox_extra_artists=(lgd,), bbox_inches="tight", dpi=200)
# Create table list object
table_list = []
# Create header
table_list.append([""] + x_labels)
# Create content
for i in range(len(vals)):
table_list += [x for x in [[lgd_list[i]] + vals[i]]]
return b_plt, lgd, table_list
__author__ = "Diogo N. Silva"
|
from logger import *
"""
log_functions = [('no_negative_ret', 'no_negatives_log')]
log_function_args = []
def query():
def sqrt_filter(x):
return x[0] < 0
get_log('no_negatives_log').filter(sqrt_filter).print_log()
"""
"""
log_functions = [('add', 'add_log')]
log_function_args = [('mult', 'mult_log')]
def query():
print 'add log:'
get_log('add_log').print_log()
print 'mult log:'
get_log('mult_log').print_log()
"""
"""
log_functions = []
log_function_args = [('process', 'url_log')]
def query():
import re
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def handle_url(urls):
for url in urls[0]:
if regex.match(url) is not None:
log('valid_url', url)
else:
log('invalid_url', url)
get_log('url_log').map(handle_url)
print 'Valid URLs:'
get_log('valid_url').print_log()
print 'Invalid URLs:'
get_log('invalid_url').print_log()
"""
|
from marshmallow import EXCLUDE, Schema
from ..fields.objectid import ID
class BaseSchema(Schema):
id = ID(description='ID', dump_only=True)
class Meta:
strict = True
ordered = True
unknown = EXCLUDE
|
"""
| *** ATTENTION: This is early work in progress. Interfaces are subject to change. ***
| *** DO NOT USE IN PRODUCTION until you know what you are doing ***
|
This library contains the future network classes for SmartHomeNG.
New network functions and utilities are going to be implemented in this library.
This classes, functions and methods are mainly meant to be used by plugin developers
"""
import logging
import re
import ipaddress
import requests
import select
import socket
import threading
import time
import queue
class Network(object):
""" This Class has some usefull static methods that you can use in your projects """
@staticmethod
def is_mac(mac):
"""
Validates a MAC address
:param mac: MAC address
:type string: str
:return: True if value is a MAC
:rtype: bool
"""
mac = str(mac)
if len(mac) == 12:
for c in mac:
try:
if int(c, 16) > 15:
return False
except:
return False
return True
octets = re.split('[\:\-\ ]', mac)
if len(octets) != 6:
return False
for i in octets:
try:
if int(i, 16) > 255:
return False
except:
return False
return True
@staticmethod
def is_ip(string):
"""
Checks if a string is a valid ip-address (v4 or v6)
:param string: String to check
:type string: str
:return: True if an ip, false otherwise.
:rtype: bool
"""
return (Network.is_ipv4(string) or Network.is_ipv6(string))
@staticmethod
def is_ipv4(string):
"""
Checks if a string is a valid ip-address (v4)
:param string: String to check
:type string: str
:return: True if an ip, false otherwise.
:rtype: bool
"""
try:
ipaddress.IPv4Address(string)
return True
except ipaddress.AddressValueError:
return False
@staticmethod
def is_ipv6(string):
"""
Checks if a string is a valid ip-address (v6)
:param string: String to check
:type string: str
:return: True if an ipv6, false otherwise.
:rtype: bool
"""
try:
ipaddress.IPv6Address(string)
return True
except ipaddress.AddressValueError:
return False
@staticmethod
def is_hostname(string):
"""
Checks if a string is a valid hostname
The hostname has is checked to have a valid format
:param string: String to check
:type string: str
:return: True if a hostname, false otherwise.
:rtype: bool
"""
try:
return bool(re.match("^(([a-zA-Z]|[a-zA-Z][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z]|[A-Za-z][A-Za-z0-9\-]*[A-Za-z0-9])$", string))
except TypeError:
return False
@staticmethod
def get_local_ipv4_address():
"""
Get's local ipv4 address of the interface with the default gateway.
Return '127.0.0.1' if no suitable interface is found
:return: IPv4 address as a string
:rtype: string
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('8.8.8.8', 1))
IP = s.getsockname()[0]
except:
IP = '127.0.0.1'
finally:
s.close()
return IP
@staticmethod
def get_local_ipv6_address():
"""
Get's local ipv6 address of the interface with the default gateway.
Return '::1' if no suitable interface is found
:return: IPv6 address as a string
:rtype: string
"""
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
try:
s.connect(('2001:4860:4860::8888', 1))
IP = s.getsockname()[0]
except:
IP = '::1'
finally:
s.close()
return IP
@staticmethod
def ip_port_to_socket(ip, port):
"""
Returns an ip address plus port to a socket string.
Format is 'ip:port' for IPv4 or '[ip]:port' for IPv6
:return: Socket address / IPEndPoint as string
:rtype: string
"""
if Network.is_ipv6(ip):
ip = '[{}]'.format(ip)
return '{}:{}'.format(ip, port)
@staticmethod
def ipver_to_string(ipver):
"""
Converts a socket address family to an ip version string 'IPv4' or 'IPv6'
:param ipver: Socket family
:type ipver: socket.AF_INET or socket.AF_INET6
:return: 'IPv4' or 'IPv6'
:rtype: string
"""
return 'IPv6' if ipver == socket.AF_INET6 else 'IPv4'
class Http(object):
"""
Creates an instance of the Http class.
:param baseurl: base URL used everywhere in this instance (example: http://www.myserver.tld)
:type baseurl: str
"""
def __init__(self, baseurl=None):
self.logger = logging.getLogger(__name__)
self.baseurl = baseurl
self._response = None
self.timeout = 10
def get_json(self, url=None, params=None):
"""
Launches a GET request and returns JSON answer as a dict or None on error.
:param url: Optional URL to fetch from. If None (default) use baseurl given on init.
:param params: Optional dict of parameters to add to URL query string.
:type url: str
:type params: dict
:return: JSON answer decoded into a dict or None on whatever error occured
:rtype: dict | None
"""
self.__get(url=url, params=params)
json = None
try:
json = self._response.json()
except:
self.logger.warning("Invalid JSON received from {} !".format(url if url else self.baseurl))
return json
def get_text(self, url=None, params=None, encoding=None, timeout=None):
"""
Launches a GET request and returns answer as string or None on error.
:param url: Optional URL to fetch from. Default is to use baseurl given to constructor.
:param params: Optional dict of parameters to add to URL query string.
:param encoding: Optional encoding of the received text. Default is to let the lib try to figure out the right encoding.
:type url: str
:type params: dict
:type encoding: str
:return: Answer decoded into a string or None on whatever error occured
:rtype: str | None
"""
_text = None
if self.__get(url=url, params=params, timeout=timeout):
try:
if encoding:
self._response.encoding = encoding
_text = self._response.text
except:
self.logger.error("Successfull GET, but decoding response failed. This should never happen !")
return _text
def get_binary(self, url=None, params=None):
"""
Launches a GET request and returns answer as raw binary data or None on error.
This is usefull for downloading binary objects / files.
:param url: Optional URL to fetch from. Default is to use baseurl given to constructor.
:param params: Optional dict of parameters to add to URL query string.
:type url: str
:type params: dict
:return: Answer as raw binary objector None on whatever error occured
:rtype: bytes | None
"""
self.__get(url=url, params=params)
return self._response.content
def response_status(self):
"""
Returns the status code (200, 404, ...) of the last executed request.
If GET request was not possible and thus no HTTP statuscode is available the returned status code = 0.
:return: Status code and text of last request
:rtype: (int, str)
"""
try:
(code, reason) = (self._response.status_code, self._response.reason)
except:
code = 0
reason = 'Unable to complete GET request'
return (code, reason)
def response_headers(self):
"""
Returns a dictionary with the server return headers of the last executed request
:return: Headers returned by server
:rtype: dict
"""
return self._response.headers
def response_cookies(self):
"""
Returns a dictionary with the cookies the server may have sent on the last executed request
:return: Cookies returned by server
:rtype: dict
"""
return self._response.cookies
def response_object(self):
"""
Returns the raw response object for advanced ussage. Use if you know what you are doing.
Maybe this lib can be extented to your needs instead ?
:return: Reponse object as returned by underlying requests library
:rtype: `requests.Response <http://docs.python-requests.org/en/master/user/quickstart/#response-content>`_
"""
return self._response
def __get(self, url=None, params=None, timeout=None):
url = url if url else self.baseurl
timeout = timeout if timeout else self.timeout
self.logger.info("Sending GET request to {}".format(url))
try:
self._response = requests.get(url, params=params, timeout=timeout)
self.logger.debug("{} Fetched URL {}".format(self.response_status(), self._response.url))
except Exception as e:
self.logger.warning("Error sending GET request to {}: {}".format(url, e))
return False
return True
class Tcp_client(object):
""" Creates a new instance of the Tcp_client class
:param host: Remote host name or ip address (v4 or v6)
:param port: Remote host port to connect to
:param name: Name of this connection (mainly for logging purposes). Try to keep the name short.
:param autoreconnect: Should the socket try to reconnect on lost connection (or finished connect cycle)
:param connect_retries: Number of connect retries per cycle
:param connect_cycle: Time between retries inside a connect cycle
:param retry_cycle: Time between cycles if :param:autoreconnect is True
:param binary: Switch between binary and text mode. Text will be encoded / decoded using encoding parameter.
:param terminator: Terminator to use to split received data into chunks (split lines <cr> for example). If integer then split into n bytes. Default is None means process chunks as received.
:type host: str
:type port: int
:type name: str
:type autoreconnect: bool
:type connect_retries: int
:type connect_cycle: int
:type retry_cycle: int
:type binary: bool
:type terminator: int | bytes | str
"""
def __init__(self, host, port, name=None, autoreconnect=True, connect_retries=5, connect_cycle=5, retry_cycle=30, binary=False, terminator=False):
self.logger = logging.getLogger(__name__)
# Public properties
self.name = name
self.terminator = None
# "Private" properties
self._host = host
self._port = port
self._autoreconnect = autoreconnect
self._is_connected = False
self._connect_retries = connect_retries
self._connect_cycle = connect_cycle
self._retry_cycle = retry_cycle
self._timeout = 1
self._hostip = None
self._ipver = socket.AF_INET
self._socket = None
self._connect_counter = 0
self._binary = binary
self._connected_callback = None
self._disconnected_callback = None
self._data_received_callback = None
# "Secret" properties
self.__connect_thread = None
self.__connect_threadlock = threading.Lock()
self.__receive_thread = None
self.__receive_threadlock = threading.Lock()
self.__running = True
self.logger.setLevel(logging.DEBUG)
self.logger.info("Initializing a connection to {} on TCP port {} {} autoreconnect".format(self._host, self._port, ('with' if self._autoreconnect else 'without')))
# Test if host is an ip address or a host name
if Network.is_ip(self._host):
# host is a valid ip address (v4 or v6)
self.logger.debug("{} is a valid IP address".format(host))
self._hostip = self._host
if Network.is_ipv6(self._host):
self._ipver = socket.AF_INET6
else:
self._ipver = socket.AF_INET
else:
# host is a hostname, trying to resolve to an ip address (v4 or v6)
self.logger.debug("{} is not a valid IP address, trying to resolve it as hostname".format(host))
try:
self._ipver, sockettype, proto, canonname, socketaddr = socket.getaddrinfo(host, None)[0]
# Check if resolved address is IPv4 or IPv6
if self._ipver == socket.AF_INET: # is IPv4
self._hostip, port = socketaddr
elif self._ipver == socket.AF_INET6: # is IPv6
self._hostip, port, flow_info, scope_id = socketaddr
else:
# This should never happen
self.logger.error("Unknown ip address family {}".format(self._ipver))
self._hostip = None
# Print ip address on successfull resolve
if self._hostip is not None:
self.logger.info("Resolved {} to {} address {}".format(self._host, 'IPv6' if self._ipver == socket.AF_INET6 else 'IPv4', self._hostip))
except:
# Unable to resolve hostname
self.logger.error("Cannot resolve {} to a valid ip address (v4 or v6)".format(self._host))
self._hostip = None
def set_callbacks(self, connected=None, data_received=None, disconnected=None):
""" Set callbacks to caller for different socket events
:param connected: Called whenever a connection is established successfully
:param data_received: Called when data is received
:param disconnected: Called when a connection has been dropped for whatever reason
:type connected: function
:type data_received: function
:type disconnected: function
"""
self._connected_callback = connected
self._disconnected_callback = disconnected
self._data_received_callback = data_received
def connect(self):
""" Connects the socket
:return: False if an error prevented us from launching a connection thread. True if a connection thread has been started.
:rtype: bool
"""
if self._hostip is None: # return False if no valid ip to connect to
self.logger.error("No valid IP address to connect to {}".format(self._host))
self._is_connected = False
return False
if self._is_connected: # return false if already connected
self.logger.error("Already connected to {}, ignoring new request".format(self._host))
return False
self.__connect_thread = threading.Thread(target=self._connect_thread_worker, name='TCP_Connect')
self.__connect_thread.daemon = True
self.__connect_thread.start()
return True
def connected(self):
""" Returns the current connection state
:return: True if an active connection exists,else False.
:rtype: bool
"""
return self._is_connected
def send(self, message):
""" Sends a message to the server. Can be a string, bytes or a bytes array.
:return: True if message has been successfully sent, else False.
:rtype: bool
"""
if not isinstance(message, (bytes, bytearray)):
try:
message = message.encode('utf-8')
except:
self.logger.warning("Error encoding message for client {}".format(self.name))
return False
try:
if self._is_connected:
self._socket.send(message)
else:
return False
except:
self.logger.warning("No connection to {}, cannot send data {}".format(self._host, msg))
return False
return True
def _connect_thread_worker(self):
if not self.__connect_threadlock.acquire(blocking=False):
self.logger.warning("Connection attempt already in progress for {}, ignoring new request".format(self._host))
return
if self._is_connected:
self.logger.error("Already connected to {}, ignoring new request".format(self._host))
return
self.logger.debug("Starting connection cycle for {}".format(self._host))
self._connect_counter = 0
while self.__running and not self._is_connected:
# Try a full connect cycle
while not self._is_connected and self._connect_counter < self._connect_retries and self.__running:
self._connect()
if self._is_connected:
try:
self.__connect_threadlock.release()
self._connected_callback and self._connected_callback(self)
self.__receive_thread = threading.Thread(target=self.__receive_thread_worker, name='TCP_Receive')
self.__receive_thread.daemon = True
self.__receive_thread.start()
except:
raise
return True
self._sleep(self._connect_cycle)
if self._autoreconnect:
self._sleep(self._retry_cycle)
self._connect_counter = 0
else:
break
try:
self.__connect_threadlock.release()
except:
pass
def _connect(self):
self.logger.debug("Connecting to {} using {} {} on TCP port {} {} autoreconnect".format(self._host, 'IPv6' if self._ipver == socket.AF_INET6 else 'IPv4', self._hostip, self._port, ('with' if self._autoreconnect else 'without')))
# Try to connect to remote host using ip (v4 or v6)
try:
self._socket = socket.socket(self._ipver, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self._socket.settimeout(5)
self._socket.connect(('{}'.format(self._hostip), int(self._port)))
self._socket.settimeout(self._timeout)
self._is_connected = True
self.logger.info("Connected to {} on TCP port {}".format(self._host, self._port))
# Connection error
except Exception as err:
self._is_connected = False
self._connect_counter += 1
self.logger.warning("TCP connection to {}:{} failed with error {}. Counter: {}/{}".format(self._host, self._port, err, self._connect_counter, self._connect_retries))
def __receive_thread_worker(self):
poller = select.poll()
poller.register(self._socket, select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR)
__buffer = b''
while self._is_connected and self.__running:
events = poller.poll(1000)
for fd, event in events:
if event & select.POLLHUP:
self.logger.warning("Client socket closed")
# Check if POLLIN event triggered
if event & (select.POLLIN | select.POLLPRI):
msg = self._socket.recv(4096)
# Check if incoming message is not empty
if msg:
# If we transfer in text mode decode message to string
if not self._binary:
msg = str.rstrip(str(msg, 'utf-8'))
# If we work in line mode (with a terminator) slice buffer into single chunks based on terminator
if self.terminator:
__buffer += msg
while True:
# terminator = int means fixed size chunks
if isinstance(self.terminator, int):
i = self.terminator
if i > len(__buffer):
break
# terminator is str or bytes means search for it
else:
i = __buffer.find(self.terminator)
if i == -1:
break
i += len(self.terminator)
line = __buffer[:i]
__buffer = __buffer[i:]
if self._data_received_callback is not None:
self._data_received_callback(self, line)
# If not in terminator mode just forward what we received
else:
if self._data_received_callback is not None:
self._data_received_callback(self, msg)
# If empty peer has closed the connection
else:
# Peer connection closed
self.logger.warning("Connection closed by peer {}".format(self._host))
self._is_connected = False
poller.unregister(self._socket)
self._disconnected_callback and self._disconnected_callback(self)
if self._autoreconnect:
self.logger.debug("Autoreconnect enabled for {}".format(self._host))
self.connect()
def _sleep(self, time_lapse):
time_start = time.time()
time_end = (time_start + time_lapse)
while self.__running and time_end > time.time():
pass
def close(self):
""" Closes the current client socket """
self.logger.info("Closing connection to {} on TCP port {}".format(self._host, self._port))
self.__running = False
if self.__connect_thread is not None and self.__connect_thread.isAlive():
self.__connect_thread.join()
if self.__receive_thread is not None and self.__receive_thread.isAlive():
self.__receive_thread.join()
class _Client(object):
""" Client object that represents a connected client of tcp_server
:param server: The tcp_server passes a reference to itself to access parent methods
:param socket: socket.Socket class used by the Client object
:param fd: File descriptor of socket used by the Client object
:type server: tcp_server
:type socket: function
:type fd: int
"""
def __init__(self, server=None, socket=None, fd=None):
self.logger = logging.getLogger(__name__)
self.name = None
self.ip = None
self.port = None
self.ipver = None
self._message_queue = queue.Queue()
self._data_received_callback = None
self._will_close_callback = None
self._fd = fd
self.__server = server
self.__socket = socket
@property
def socket(self):
return self.__socket
@property
def fd(self):
return self._fd
def set_callbacks(self, data_received=None, will_close=None):
""" Set callbacks for different socket events (client based)
:param data_received: Called when data is received
:type data_received: function
"""
self._data_received_callback = data_received
self._will_close_callback = will_close
def send(self, message):
""" Send a string to connected client
:param msg: Message to send
:type msg: string | bytes | bytearray
:return: True if message has been queued successfully.
:rtype: bool
"""
if not isinstance(message, (bytes, bytearray)):
try:
message = message.encode('utf-8')
except:
self.logger.warning("Error encoding message for client {}".format(self.name))
return False
try:
self._message_queue.put_nowait(message)
except:
self.logger.warning("Error queueing message for client {}".format(self.name))
return False
return True
def send_echo_off(self):
""" Sends an IAC telnet command to ask client to turn it's echo off """
command = bytearray([0xFF, 0xFB, 0x01])
string = self._iac_to_string(command)
self.logger.debug("Sending IAC telnet command: '{}'".format(string))
self.send(command)
def send_echo_on(self):
""" Sends an IAC telnet command to ask client to turn it's echo on again """
command = bytearray([0xFF, 0xFC, 0x01])
string = self._iac_to_string(command)
self.logger.debug("Sending IAC telnet command: '{}'".format(string))
self.send(command)
def process_IAC(self, msg):
""" Processes incomming IAC messages. Does nothing for now except logging them in clear text """
string = self._iac_to_string(msg)
self.logger.debug("Received IAC telnet command: '{}'".format(string))
def close(self):
""" Client socket closes itself """
self._process_queue() # Be sure that possible remaining messages will be processed
self.__socket.shutdown(socket.SHUT_RDWR)
self.logger.info("Closing connection for client {}".format(self.name))
self._will_close_callback and self._will_close_callback(self)
self.set_callbacks(data_received=None, will_close=None)
del self.__message_queue
self.__socket.close()
return True
def _iac_to_string(self, msg):
iac = {1: 'ECHO', 251: 'WILL', 252: 'WON\'T', 253: 'DO', 254: 'DON\'T', 255: 'IAC'}
string = ''
for char in msg:
if char in iac:
string += iac[char] + ' '
else:
string += '<UNKNOWN> '
return string.rstrip()
def _process_queue(self):
while not self._message_queue.empty():
msg = self._message_queue.get_nowait()
try:
string = str(msg, 'utf-8'),
self.logger.debug("Sending '{}' to {}".format(string, self.name))
except:
self.logger.debug("Sending undecodable bytes to {}".format(self.name))
self.__socket.send(msg)
self._message_queue.task_done()
return True
class Tcp_server(object):
""" Creates a new instance of the Tcp_server class
:param interface: Remote interface name or ip address (v4 or v6). Default is '::' which listens on all IPv4 and all IPv6 addresses available.
:param port: Remote interface port to connect to
:param name: Name of this connection (mainly for logging purposes)
:type interface: str
:type port: int
:type name: str
"""
def __init__(self, port, interface='::', name=None):
self.logger = logging.getLogger(__name__)
# Public properties
self.name = name
# "Private" properties
self._interface = interface
self._port = port
self._is_listening = False
self._timeout = 1
self._interfaceip = None
self._ipver = socket.AF_INET
self._socket = None
self._listening_callback = None
self._incoming_connection_callback = None
self._data_received_callback = None
# "Secret" properties
self.__listening_thread = None
self.__listening_threadlock = threading.Lock()
self.__connection_thread = None
self.__connection_threadlock = threading.Lock()
self.__connection_poller = None
self.__message_queues = {}
self.__connection_map = {}
self.__running = True
# Test if host is an ip address or a host name
if Network.is_ip(self._interface):
# host is a valid ip address (v4 or v6)
self.logger.debug("{} is a valid IP address".format(self._interface))
self._interfaceip = self._interface
if Network.is_ipv6(self._interfaceip):
self._ipver = socket.AF_INET6
else:
self._ipver = socket.AF_INET
else:
# host is a hostname, trying to resolve to an ip address (v4 or v6)
self.logger.debug("{} is not a valid IP address, trying to resolve it as hostname".format(self._interface))
try:
self._ipver, sockettype, proto, canonname, socketaddr = socket.getaddrinfo(self._interface, None)[0]
# Check if resolved address is IPv4 or IPv6
if self._ipver == socket.AF_INET:
self._interfaceip, port = socketaddr
elif self._ipver == socket.AF_INET6:
self._interfaceip, port, flow_info, scope_id = socketaddr
else:
self.logger.error("Unknown ip address family {}".format(self._ipver))
self._interfaceip = None
if self._interfaceip is not None:
self.logger.info("Resolved {} to {} address {}".format(self._interface, ipver_to_string(self._ipver), self._hostip))
except:
# Unable to resolve hostname
self.logger.error("Cannot resolve {} to a valid ip address (v4 or v6)".format(self._interface))
self._interfaceip = None
self.__our_socket = Network.ip_port_to_socket(self._interfaceip, self._port)
if not self.name:
self.name = self.__our_socket
self.logger.info("Initializing TCP server socket {}".format(self.__our_socket))
def set_callbacks(self, listening=None, incoming_connection=None, disconnected=None, data_received=None):
""" Set callbacks to caller for different socket events
:param connected: Called whenever a connection is established successfully
:param data_received: Called when data is received
:param disconnected: Called when a connection has been dropped for whatever reason
:type connected: function
:type data_received: function
:type disconnected: function
"""
self._listening_callback = listening
self._incoming_connection_callback = incoming_connection
self._data_received_callback = data_received
self._disconnected_callback = disconnected
def start(self):
""" Start the server socket
:return: False if an error prevented us from launching a connection thread. True if a connection thread has been started.
:rtype: bool
"""
if self._is_listening:
return
try:
self._socket = socket.socket(self._ipver, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._socket.bind((self._interfaceip, self._port))
except Exception as e:
self.logger.error("Problem binding to interface {} on port {}: {}".format(self._interfaceip, self._port, e))
self._is_listening = False
return False
else:
self.logger.debug("Bound listening socket to interface {} on port {}".format(self._interfaceip, self._port))
try:
self._socket.listen(5)
self._socket.setblocking(0)
self.logger.info("Listening on socket {}".format(self.__our_socket))
except Exception as e:
self.logger.error("Problem starting listening socket on interface {} port {}: {}".format(self._interfaceip, self._port, e))
self._is_listening = False
return False
self._is_listening = True
self._listening_callback and self._listening_callback(self)
self.__listening_thread = threading.Thread(target=self.__listening_thread_worker, name='TCP_Listener')
self.__listening_thread.daemon = True
self.__listening_thread.start()
return True
def __listening_thread_worker(self):
poller = select.poll()
poller.register(self._socket, select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR)
self.logger.debug("Waiting for incomming commections on socket {}".format(self.__our_socket))
while self.__running:
events = poller.poll(1000)
for fd, event in events:
if event & select.POLLERR:
self.logger.debug("Listening thread POLLERR")
if event & select.POLLHUP:
self.logger.debug("Listening thread POLLHUP")
if event & (select.POLLIN | select.POLLPRI):
connection, peer = self._socket.accept()
connection.setblocking(0)
fd = connection.fileno()
__peer_socket = Network.ip_port_to_socket(peer[0], peer[1])
client = _Client(server=self, socket=connection, fd=fd)
client.ip = peer[0]
client.ipver = socket.AF_INET6 if Network.is_ipv6(client.ip) else socket.AF_INET
client.port = peer[1]
client.name = Network.ip_port_to_socket(client.ip, client.port)
self.logger.info("Incoming connection from {} on socket {}".format(__peer_socket, self.__our_socket))
self.__connection_map[fd] = client
self._incoming_connection_callback and self._incoming_connection_callback(self, client)
if self.__connection_thread is None:
self.logger.debug("Connection thread not running yet, firing it up ...")
self.__connection_thread = threading.Thread(target=self.__connection_thread_worker, name='TCP_Server')
if self.__connection_poller is None:
self.__connection_poller = select.poll()
self.__connection_poller.register(connection, select.POLLOUT | select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR)
if not self.__connection_thread.isAlive():
self.__connection_thread.daemon = True
self.__connection_thread.start()
del client
def __connection_thread_worker(self):
""" This thread handles the send & receive tasks of connected clients. """
self.logger.debug("Connection thread on socket {} starting up".format(self.__our_socket))
while self.__running and len(self.__connection_map) > 0:
events = self.__connection_poller.poll(1000)
for fd, event in events:
__client = self.__connection_map[fd]
__socket = __client.socket
if event & select.POLLERR:
self.logger.debug("Connection thread POLLERR")
if event & select.POLLHUP:
self.logger.debug("Connection thread POLLHUP")
if event & select.POLLOUT:
if not __client._message_queue.empty():
__client._process_queue()
if event & (select.POLLIN | select.POLLPRI):
msg = __socket.recv(4096)
if msg:
try:
string = str.rstrip(str(msg, 'utf-8'))
self.logger.debug("Received '{}' from {}".format(string, __client.name))
self._data_received_callback and self._data_received_callback(self, __client, string)
__client._data_received_callback and __client._data_received_callback(self, __client, string)
except:
self.logger.debug("Received undecodable bytes from {}".format(__client.name))
if msg[0] == 0xFF:
__client.process_IAC(msg)
else:
self._remove_client(__client)
del __socket
del __client
self.__connection_poller = None
self.__connection_thread = None
self.logger.debug("Last connection closed for socket {}, stopping connection thread".format(self.__our_socket))
def listening(self):
""" Returns the current listening state
:return: True if the server socket is actually listening, else False.
:rtype: bool
"""
return self._is_listening
def send(self, client, msg):
""" Send a string to connected client
:param client: Client Object to send message to
:param msg: Message to send
:type client: network.Client
:type msg: string | bytes | bytearray
:return: True if message has been queued successfully.
:rtype: bool
"""
return client.send(msg)
def disconnect(self, client):
""" Disconnects a specific client
:param client: Client Object to disconnect
:type client: network.Client
"""
client.close()
return True
def _remove_client(self, client):
self.logger.info("Lost connection to client {}, removing it".format(client.name))
self._disconnected_callback and self._disconnected_callback(self, client)
self.__connection_poller.unregister(client.fd)
del self.__connection_map[client.fd]
return True
def _sleep(self, time_lapse):
""" Non blocking sleep. Does return when self.close is called and running set to False.
:param time_lapse: Time in seconds to sleep
:type time_lapse: float
"""
time_start = time.time()
time_end = (time_start + time_lapse)
while self.__running and time_end > time.time():
pass
def close(self):
""" Closes running listening socket """
self.logger.info("Shutting down listening socket on interface {} port {}".format(self._interface, self._port))
self.__running = False
if self.__listening_thread is not None and self.__listening_thread.isAlive():
self.__listening_thread.join()
if self.__connection_thread is not None and self.__connection_thread.isAlive():
self.__connection_thread.join()
|
"""
Site
====
Site class. Create one for each independent site with its own configuration.
"""
import logging
from .request import normalize, make_request, And, Condition, Or, Not
from .query import QueryFilter, QuerySelect, QueryChain, QueryOrder, QueryRange,\
QueryDistinct, QueryAggregate
from .access_point import DEFAULT_PARAMETER
def _translate_request(request, aliases):
"""Translate high-level ``request`` to low-level using ``aliases``."""
if isinstance(request, And):
return And(*(_translate_request(req, aliases)
for req in request.sub_requests))
elif isinstance(request, Or):
return Or(*(_translate_request(req, aliases)
for req in request.sub_requests))
elif isinstance(request, Not):
return Not(_translate_request(request.sub_request, aliases))
elif isinstance(request, Condition):
name = repr(request.property)
if name in aliases:
# The complete path has already been selected,
# Let's use the alias instead !
new_name = aliases.get(name, name)
request.property.name = new_name
request.property.child_property = None
return request
elif name in aliases.values():
return request
elif ".".join(name.split(".")[:-1] + ["*"]) in aliases:
return request
else:
new_name = "__%s" % name.replace(".", "_")
aliases[name] = new_name
request.property.name = new_name
request.property.child_property = None
return request
def _delegate_to_acces_point(method_name, first_arg_is_a_request=False):
"""Create a function delegating ``method_name`` to an access point."""
if first_arg_is_a_request:
def wrapper(self, access_point_name, request=None, *args, **kwargs):
"""Call ``access_point.method_name(request, *args, **kwargs)``."""
access_point = self.access_points[access_point_name]
request = normalize(access_point.properties, request)
return getattr(access_point, method_name)(request, *args, **kwargs)
else:
def wrapper(self, access_point_name, *args, **kwargs):
"""Call ``access_point.method_name(*args, **kwargs)``."""
access_point = self.access_points[access_point_name]
return getattr(access_point, method_name)(*args, **kwargs)
# Redefining documentation and name of the wrappers
# pylint: disable=W0622
wrapper.__name__ = method_name
wrapper.__doc__ = \
"Call :meth:`kalamar.access_point.AccessPoint.%s`." % method_name
# pylint: enable=W0622
return wrapper
class Site(object):
"""Kalamar site."""
def __init__(self):
self.access_points = {}
self.logger = logging.getLogger("dyko")
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
self.logger.addHandler(NullHandler())
def register(self, name, access_point):
"""Add an access point to this site.
:param name: Identifier string of the added access point.
:param access_point: Concrete subclass of :class:`AccessPoint`.
"""
if name in self.access_points:
raise RuntimeError(
"Site already has an access point named %r." % name)
self.access_points[name] = access_point
access_point.bind(self, name)
def view(self, access_point_name, aliases=None, request=None, order_by=None,
select_range=None, distinct=False, aggregate=None, query=None):
"""Call :meth:`kalamar.access_point.AccessPoint.view`.
If ``alias`` and ``request`` are given, a query is created from them.
The query is then validated and then passed to the ``view`` method of
the acess point called ``access_point_name``.
"""
access_point = self.access_points[access_point_name]
if aliases is None:
aliases = {"": "*"}
if query is None:
# Add dummy selects to be able to filter on those
chain = []
aliases = dict(((value, key) for key, value in aliases.items()))
request = make_request(request)
request = _translate_request(request, aliases)
aliases = dict(((value, key) for key, value in aliases.items()))
chain.append(QuerySelect(aliases))
chain.append(QueryFilter(request))
if distinct:
chain.append(QueryDistinct())
if order_by is not None:
chain.append(QueryOrder(order_by))
if aggregate is not None:
chain.append(QueryAggregate(aggregate))
if select_range is not None:
if hasattr(select_range, "__iter__"):
select_range = slice(*select_range)
else:
select_range = slice(select_range)
chain.append(QueryRange(select_range))
query = QueryChain(chain)
query.validate(access_point.properties)
for line in access_point.view(query):
for prop_name in [name for name in line if name.startswith("__")]:
line.pop(prop_name)
yield line
def from_repr(self, access_point_name, repr, default=DEFAULT_PARAMETER):
"""
Return an item of ``access_point_name`` from the ``repr`` string.
``repr`` should have been generated with item.__repr__()
"""
access_point = self.access_points[access_point_name]
return access_point.loader_from_reference_repr(repr)(None)[0]
create = _delegate_to_acces_point("create")
delete = _delegate_to_acces_point("delete")
delete_many = _delegate_to_acces_point("delete_many", True)
open = _delegate_to_acces_point("open", True)
search = _delegate_to_acces_point("search", True)
save = _delegate_to_acces_point("save")
|
from logging import Logger
from requests.sessions import Session
def getpocket_download(session: Session, _logger: Logger):
"""
This does the heavy lifting
:param session:
:param _logger:
:return:
"""
headers = {
"Origin": "https://app.getpocket.com", # checked that this is needed
}
params = {
"enable_cors": "1", # checked that this is needed
"consumer_key": "78809-9423d8c743a58f62b23ee85c", # checked that this is needed
}
url = "https://getpocket.com/v3/get"
response = session.post(url=url, headers=headers, params=params)
response.raise_for_status()
obj = response.json()
print(obj)
|
from setuptools import setup, find_packages
try: # for pip >= 10
from pip._internal.req import parse_requirements
except ImportError: # for pip <= 9.0.3
from pip.req import parse_requirements
import re, ast
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('bench/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
requirements = parse_requirements("requirements.txt", session="")
setup(
name='bench',
description='Metadata driven, full-stack web framework',
author='Frappe Technologies',
author_email='info@frappe.io',
version=version,
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=[str(ir.req) for ir in requirements],
dependency_links=[str(ir._link) for ir in requirements if ir._link],
entry_points='''
[console_scripts]
bench=bench.cli:cli
''',
)
|
from os import path
from collections import defaultdict
import math
root = path.dirname(path.dirname(path.dirname(__file__)))
result_dir = path.join(root, 'results')
def get_file_name(test):
test = '%s_result' % test
return path.join(result_dir, test)
def mean(l):
return float(sum(l))/len(l) if len(l) > 0 else float('nan')
def std_dev(l):
m = mean(l)
return math.sqrt(sum((x - m) ** 2 for x in l) / len(l))
def run_timing_overhead_ana():
test_name = 'timing_overhead'
file_name = get_file_name(test_name)
datas = []
with open(file_name) as f:
for l in f:
datas.append(int(l))
datas = [i for i in datas[:10000]]
print "%s mean: %f" % (test_name, mean(datas))
print "%s std dev: %f" % (test_name, std_dev(datas))
def run_loop_overhead_ana():
test_name = 'loop_overhead'
file_name = get_file_name(test_name)
datas = []
with open(file_name) as f:
for l in f:
datas.append(float(l.split(' ')[0]))
datas = [i for i in datas[:10000]]
print "%s mean: %f" % (test_name, mean(datas))
print "%s std dev: %f" % (test_name, std_dev(datas))
def run_proc_call_overhead_ana():
test_name = 'proc_call_overhead'
file_name = get_file_name(test_name)
datas = []
with open(file_name) as f:
for l in f:
if l.startswith('-'):
datas.append([])
continue
datas[-1].append(int(l.split(' ')[0]) * 1.0 / 10)
print "%s result:" % test_name
for i, data in enumerate(datas):
m = mean(data)
std = std_dev(data)
print "%f\t%f" % (m, std)
#print "%s %d mean: %f" % (test_name, i, mean(data))
#print "%s %d std dev: %f" % (test_name, i, std_dev(data))
def run_process_context_switch_ana():
test_name = 'process_context_switch'
file_name = get_file_name(test_name)
datas = []
with open(file_name) as f:
for l in f:
try:
datas.append(int(l.split(' ')[1]))
except:
pass
datas = [i for i in datas[:100]]
print "%s mean: %f" % (test_name, mean(datas))
print "%s std dev: %f" % (test_name, std_dev(datas))
def run_thread_context_switch_ana():
test_name = 'thread_context_switch'
file_name = get_file_name(test_name)
datas = []
with open(file_name) as f:
for l in f:
datas.append(int(l.split(' ')[1]))
datas = [i for i in datas[:100]]
print "%s mean: %f" % (test_name, mean(datas))
print "%s std dev: %f" % (test_name, std_dev(datas))
def run_mem_acc_ana():
test_name = 'mem_acc'
filename = get_file_name(test_name)
datas = defaultdict(lambda: defaultdict(list))
with open(filename) as f:
for l in f:
ll = l.split(' ')
step = int(ll[7])
offset = int(ll[1])
cycle = float(ll[3])
datas[step][offset].append(cycle)
results = {}
offsets = set()
for step, v in sorted(datas.items()):
result = []
for offset, cycles in sorted(v.items()):
offsets.add(offset)
m = mean(cycles)
result.append(m)
results[step] = (result)
print "mem access time result"
fl = "step/offset\t%s" % "\t".join(str(i) for i in sorted(offsets))
print fl
for step, means in sorted(results.items()):
line = "\t".join(str(i) for i in means)
line = "%s\t%s" % (str(step), line)
print line
if __name__ == '__main__':
run_timing_overhead_ana()
run_loop_overhead_ana()
run_proc_call_overhead_ana()
run_process_context_switch_ana()
run_thread_context_switch_ana()
run_mem_acc_ana()
|
from decimal import Decimal
import json
class InvalidDimension(ValueError):
"""Raised when a sheet specification has inconsistent dimensions. """
pass
class Specification(object):
"""Specification for a sheet of labels.
All dimensions are given in millimetres. If any of the margins are not
given, then any remaining space is divided equally amongst them. If all the
width or all the height margins are given, they must exactly use up all
non-label space on the sheet.
"""
def __init__(self, sheet_width, sheet_height, columns, rows, label_width, label_height, **kwargs):
"""
Required parameters
-------------------
sheet_width, sheet_height: positive dimension
The size of the sheet.
columns, rows: positive integer
The number of labels on the sheet.
label_width, label_size: positive dimension
The size of each label.
Margins and gaps
----------------
left_margin: positive dimension
The gap between the left edge of the sheet and the first column.
column_gap: positive dimension
The internal gap between columns.
right_margin: positive dimension
The gap between the right edge of the sheet and the last column.
top_margin: positive dimension
The gap between the top edge of the sheet and the first row.
row_gap: positive dimension
The internal gap between rows.
bottom_margin: positive dimension
The gap between the bottom edge of the sheet and the last row.
Padding
-------
left_padding, right_padding, top_padding, bottom_padding: positive dimensions, default 0
The padding between the edges of the label and the area available
to draw on.
Corners
---------------------
corner_radius: positive dimension, default 0
Gives the labels rounded corners with the given radius.
padding_radius: positive dimension, default 0
Give the drawing area rounded corners. If there is no padding, this
must be set to zero.
Background
----------
background_image: reportlab.graphics.shape.Image
An image to use as the background to the page. This will be
automatically sized to fit the page; make sure it has the correct
aspect ratio.
background_filename: string
Filename of an image to use as a background to the page. If both
this and background_image are given, then background_image will
take precedence.
Raises
------
InvalidDimension
If any given dimension is invalid (i.e., the labels cannot fit on
the sheet).
"""
# Compulsory arguments.
self._sheet_width = Decimal(sheet_width)
self._sheet_height = Decimal(sheet_height)
self._columns = int(columns)
self._rows = int(rows)
self._label_width = Decimal(label_width)
self._label_height = Decimal(label_height)
# Optional arguments; missing ones will be computed later.
self._left_margin = kwargs.pop('left_margin', None)
self._column_gap = kwargs.pop('column_gap', None)
self._right_margin = kwargs.pop('right_margin', None)
self._top_margin = kwargs.pop('top_margin', None)
self._row_gap = kwargs.pop('row_gap', None)
self._bottom_margin = kwargs.pop('bottom_margin', None)
# Optional arguments with default values.
self._left_padding = kwargs.pop('left_padding', 0)
self._right_padding = kwargs.pop('right_padding', 0)
self._top_padding = kwargs.pop('top_padding', 0)
self._bottom_padding = kwargs.pop('bottom_padding', 0)
self._corner_radius = Decimal(kwargs.pop('corner_radius', 0))
self._padding_radius = Decimal(kwargs.pop('padding_radius', 0))
self._background_image = kwargs.pop('background_image', None)
self._background_filename = kwargs.pop('background_filename', None)
# Leftover arguments.
if kwargs:
args = kwargs.keys()
if len(args) == 1:
raise TypeError("Unknown keyword argument {}.".format(args[0]))
else:
raise TypeError("Unknown keyword arguments: {}.".format(', '.join(args)))
# Track which attributes have been automatically set.
self._autoset = set()
# Check all the dimensions etc are valid.
self._calculate()
def _calculate(self):
"""Checks the dimensions of the sheet are valid and consistent.
NB: this is called internally when needed; there should be no need for
user code to call it.
"""
# Check the dimensions are larger than zero.
for dimension in ('_sheet_width', '_sheet_height', '_columns', '_rows', '_label_width', '_label_height'):
if getattr(self, dimension) <= 0:
name = dimension.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} must be greater than zero.".format(name))
# Check margins / gaps are not smaller than zero if given.
# At the same time, force the values to decimals.
for margin in ('_left_margin', '_column_gap', '_right_margin', '_top_margin', '_row_gap', '_bottom_margin',
'_left_padding', '_right_padding', '_top_padding', '_bottom_padding'):
val = getattr(self, margin)
if val is not None:
if margin in self._autoset:
val = None
else:
val = Decimal(val)
if val < 0:
name = margin.replace('_', ' ').strip().capitalize()
raise InvalidDimension("{0:s} cannot be less than zero.".format(name))
setattr(self, margin, val)
else:
self._autoset.add(margin)
# Check the corner radius.
if self._corner_radius < 0:
raise InvalidDimension("Corner radius cannot be less than zero.")
if self._corner_radius > (self._label_width / 2):
raise InvalidDimension("Corner radius cannot be more than half the label width.")
if self._corner_radius > (self._label_height / 2):
raise InvalidDimension("Corner radius cannot be more than half the label height.")
# If there is no padding, we don't need the padding radius.
if (self._left_padding + self._right_padding + self._top_padding + self._bottom_padding) == 0:
if self._padding_radius != 0:
raise InvalidDimension("Padding radius must be zero if there is no padding.")
else:
if (self._left_padding + self._right_padding) >= self._label_width:
raise InvalidDimension("Sum of horizontal padding must be less than the label width.")
if (self._top_padding + self._bottom_padding) >= self._label_height:
raise InvalidDimension("Sum of vertical padding must be less than the label height.")
if self._padding_radius < 0:
raise InvalidDimension("Padding radius cannot be less than zero.")
# Calculate the amount of spare space.
hspace = self._sheet_width - (self._label_width * self._columns)
vspace = self._sheet_height - (self._label_height * self._rows)
# Cannot fit.
if hspace < 0:
raise InvalidDimension("Labels are too wide to fit on the sheet.")
if vspace < 0:
raise InvalidDimension("Labels are too tall to fit on the sheet.")
# Process the horizontal margins / gaps.
hcount = 1 + self._columns
if self._left_margin is not None:
hspace -= self._left_margin
if hspace < 0:
raise InvalidDimension("Left margin is too wide for the labels to fit on the sheet.")
hcount -= 1
if self._column_gap is not None:
hspace -= ((self._columns - 1) * self._column_gap)
if hspace < 0:
raise InvalidDimension("Column gap is too wide for the labels to fit on the sheet.")
hcount -= (self._columns - 1)
if self._right_margin is not None:
hspace -= self._right_margin
if hspace < 0.01 and hspace > -0.01:
self._right_margin += hspace
hspace = 0
if hspace < 0:
raise InvalidDimension("Right margin is too wide for the labels to fit on the sheet.")
hcount -= 1
# Process the vertical margins / gaps.
vcount = 1 + self._rows
if self._top_margin is not None:
vspace -= self._top_margin
if vspace < 0:
raise InvalidDimension("Top margin is too tall for the labels to fit on the sheet.")
vcount -= 1
if self._row_gap is not None:
vspace -= ((self._rows - 1) * self._row_gap)
if vspace < 0:
raise InvalidDimension("Row gap is too tall for the labels to fit on the sheet.")
vcount -= (self._rows - 1)
if self._bottom_margin is not None:
vspace -= self._bottom_margin
if vspace < 0.01 and vspace > -0.01:
self._bottom_margin += vspace
vspace = 0
if vspace < 0:
raise InvalidDimension("Bottom margin is too tall for the labels to fit on the sheet.")
vcount -= 1
# If all the margins are specified, they must use up all available space.
if hcount == 0 and hspace != 0:
raise InvalidDimension("Not all width used by manually specified margins/gaps; {}mm left.".format(hspace))
if vcount == 0 and vspace != 0:
raise InvalidDimension("Not all height used by manually specified margins/gaps; {}mm left.".format(vspace))
# Split any extra horizontal space and allocate it.
if hcount:
auto_margin = hspace / hcount
for margin in ('_left_margin', '_column_gap', '_right_margin'):
if getattr(self, margin) is None:
setattr(self, margin, auto_margin)
# And allocate any extra vertical space.
if vcount:
auto_margin = vspace / vcount
for margin in ('_top_margin', '_row_gap', '_bottom_margin'):
if getattr(self, margin) is None:
setattr(self, margin, auto_margin)
def bounding_boxes(self, mode='fraction', output='dict'):
"""Get the bounding boxes of the labels on a page.
Parameters
----------
mode: 'fraction', 'actual'
If 'fraction', the bounding boxes are expressed as a fraction of the
height and width of the sheet. If 'actual', they are the actual
position of the labels in millimetres from the top-left of the
sheet.
output: 'dict', 'json'
If 'dict', a dictionary with label identifier tuples (row, column)
as keys and a dictionary with 'left', 'right', 'top', and 'bottom'
entries as the values.
If 'json', a JSON encoded string which represents a dictionary with
keys of the string format 'rowxcolumn' and each value being a
bounding box dictionary with 'left', 'right', 'top', and 'bottom'
entries.
Returns
-------
The bounding boxes in the format set by the output parameter.
"""
boxes = {}
# Check the parameters.
if mode not in ('fraction', 'actual'):
raise ValueError("Unknown mode {0}.".format(mode))
if output not in ('dict', 'json'):
raise ValueError("Unknown output {0}.".format(output))
# Iterate over the rows.
for row in range(1, self.rows + 1):
# Top and bottom of all labels in the row.
top = self.top_margin + ((row - 1) * (self.label_height + self.row_gap))
bottom = top + self.label_height
# Now iterate over all columns in this row.
for column in range(1, self.columns + 1):
# Left and right position of this column.
left = self.left_margin + ((column - 1) * (self.label_width + self.column_gap))
right = left + self.label_width
# Output in the appropriate mode format.
if mode == 'fraction':
box = {
'top': top / self.sheet_height,
'bottom': bottom / self.sheet_height,
'left': left / self.sheet_width,
'right': right / self.sheet_width,
}
elif mode == 'actual':
box = {'top': top, 'bottom': bottom, 'left': left, 'right': right}
# Add to the collection.
if output == 'json':
boxes['{0:d}x{1:d}'.format(row, column)] = box
box['top'] = float(box['top'])
box['bottom'] = float(box['bottom'])
box['left'] = float(box['left'])
box['right'] = float(box['right'])
else:
boxes[(row, column)] = box
# Done.
if output == 'json':
return json.dumps(boxes)
return boxes
# Helper function to create an accessor for one of the properties.
# attr is the 'internal' attribute e.g., _sheet_width.
def create_accessor(attr, deletable=False):
# Getter is simple; no processing needed.
@property
def accessor(self):
return getattr(self, attr)
# Setter is more complicated.
@accessor.setter
def accessor(self, value):
# Store the original value in case we need to reset.
original = getattr(self, attr)
# If this was originally autoset or not.
was_autoset = attr in self._autoset
# Discard this attribute from the autoset list.
self._autoset.discard(attr)
# Set the value and see if it is valid.
setattr(self, attr, value)
try:
self._calculate()
except:
# Reset to the original state.
setattr(self, attr, original)
if was_autoset:
self._autoset.add(attr)
# Let the error propogate up.
raise
# Create a deleter if allowable.
if deletable:
@accessor.deleter
def accessor(self):
self._autoset.add(attr)
setattr(self, attr, None)
self._calculate()
# And we now have our accessor.
return accessor
# Create accessors for all our properties.
sheet_width = create_accessor('_sheet_width')
sheet_height = create_accessor('_sheet_height')
label_width = create_accessor('_label_width')
label_height = create_accessor('_label_height')
columns = create_accessor('_columns')
rows = create_accessor('_rows')
left_margin = create_accessor('_left_margin', deletable=True)
column_gap = create_accessor('_column_gap', deletable=True)
right_margin = create_accessor('_right_margin', deletable=True)
top_margin = create_accessor('_top_margin', deletable=True)
row_gap = create_accessor('_row_gap', deletable=True)
bottom_margin = create_accessor('_bottom_margin', deletable=True)
corner_radius = create_accessor('_corner_radius')
padding_radius = create_accessor('_padding_radius')
background_image = create_accessor('_background_image', deletable=True)
background_filename = create_accessor('_background_filename', deletable=True)
left_padding = create_accessor('_left_padding', deletable=True)
right_padding = create_accessor('_right_padding', deletable=True)
top_padding = create_accessor('_top_padding', deletable=True)
bottom_padding = create_accessor('_bottom_padding', deletable=True)
# Don't need the helper function any more.
del create_accessor
|
import os.path
import unittest
from unittest.mock import patch
import libpipe
from libpipe.cmds.align import HisatCmd
import logging
log = logging.getLogger(__name__)
class TestHistatCmd(unittest.TestCase):
def setUp(self):
# prevent error logs from occuring during testing
patcher = patch.object(libpipe.cmds.base.log, 'error')
patcher.start()
self.addCleanup(patcher.stop)
# override base cmd method
patcher = patch.object(libpipe.cmds.base.BaseCmd, '_cmd')
patcher.start()
self.addCleanup(patcher.stop)
def sample_cmd(self):
kw = {
'-U': 'upath/seq.fa',
'-x': 'gpath/gen',
'timestamp': '000',
'-S': 'path/al.sam',
}
return HisatCmd(**kw)
#
# Test _prepcmd
#
def test_prepcmd_sets_S_if_not_given(self):
hc = self.sample_cmd()
del hc.kwargs['-S']
hc._prepcmd()
self.assertEqual(
hc.kwargs['-S'],
'upath/seq_gen.sam',
)
def test_prepcmd_sets_redirect_to_log_file(self):
hc = self.sample_cmd()
hc._prepcmd()
self.assertTrue(
hc.redirect.endswith('path/al_gen_000_hisat.log'),
'Redirect not set to expected log file ({})'.format(hc.redirect),
)
def test_prepcmd_sets_redirect_for_stdout_and_stderr_to_tee(self):
hc = self.sample_cmd()
hc._prepcmd()
self.assertTrue(
hc.redirect.startswith('2>&1 | tee -a'),
'Redirect not set properly: {}'.format(hc.redirect),
)
def test_prepcmd_sets_unal_based_on_given_samfile_name(self):
hc = self.sample_cmd()
hc._prepcmd()
expected_file = os.path.splitext(hc.kwargs['-S'])[0] + '.unal.fastq'
self.assertIn('--un', hc.kwargs)
self.assertEqual(hc.kwargs['--un'], expected_file)
#
# Test cmd
#
def test_cmd_raises_AttributeError_if_only_one_ppe_given(self):
hc = self.sample_cmd()
hc.kwargs['-1'] = hc.kwargs['-U']
del hc.kwargs['-U']
with self.assertRaises(AttributeError):
hc.cmd()
def test_addreq_raises_FileNotFoundError_if_n_idx_ne_expected(self):
with patch('remsci.lib.utility.path.walk_file') as m:
for i in [0, 100]:
with self.subTest(n_indx=i):
m.return_value = [0] * i
hc = self.sample_cmd()
with self.assertRaises(FileNotFoundError):
hc._additional_requirements()
#
# Test _prepreq
#
def test_prepreq_raises_TypeError_if_linked_input_not_used(self):
with patch.object(
HisatCmd, 'output', autospec=True, return_value=['seq.txt']):
ohc = self.sample_cmd()
ihc = self.sample_cmd()
ohc.link(ihc)
with self.assertRaises(TypeError):
ihc._prepreq()
def test_prepreq_sets_single_link_input_to_U_kwarg(self):
with patch.object(HisatCmd, 'output', return_value=['seq.fq']):
ohc = self.sample_cmd()
ihc = self.sample_cmd()
ohc.link(ihc)
ihc._prepreq()
self.assertEqual(ihc.kwargs['-U'], 'seq.fq')
def test_prepreq_sets_double_link_input_to_1_and_2_kwarg(self):
args = ['seq.1.fq', 'seq.2.fq']
with patch.object(HisatCmd, 'output', return_value=args):
ohc = self.sample_cmd()
ihc = self.sample_cmd()
ohc.link(ihc)
ihc._prepreq()
self.assertEqual(ihc.kwargs['-1'], 'seq.1.fq')
self.assertEqual(ihc.kwargs['-2'], 'seq.2.fq')
def test_prepreq_preserves_kwargs_if_no_input_given(self):
ihc = self.sample_cmd()
ihc._prepreq()
self.assertEqual(ihc.kwargs['-U'], 'upath/seq.fa')
if __name__ == '__main__':
unittest.main()
|
import os
import sys
import re
if len(sys.argv)<4:
print "Usage: make-sim-options.py <decay_file> <output_prefix> <event_number>"
exit(1)
HOME_DIR = os.environ['HOME']
JPSIKKROOT_DIR = os.environ['JPSIKKROOT']
SHARE_DIR = os.path.join(JPSIKKROOT_DIR, "share")
TEMPLATE_DIR = os.path.join(JPSIKKROOT_DIR, "share/template")
TEMPLATE_SIM_FILE = os.path.joint(TEMPLATE_DIR, "simulation.cfg")
print HOMEDIR, JPSIKKROOT_DIR, TE
DECAY_FILE = os.path.abspath(os.path.join(SHARE_DIR,sys.argv[1]))
PREFIX = sys.argv[2]
RTRAW_FILE = os.path.abspath(PREFIX+".rtraw")
DST_FILE = os.path.abspath(PREFIX+".dst")
ROOT_FILE = os.path.abspath(PREFIX+".root")
|
'''Defines the Special class for theia.'''
import numpy as np
from ..helpers import geometry, settings
from ..helpers.units import deg, cm, pi
from .optic import Optic
class Special(Optic):
'''
Special class.
This class represents general optics, as their actions on R and T are left
to the user to input. They are useful for special optics which are neither
reflective nor transmissive.
Actions:
* T on HR: user input
* R on HR: user input
* T on AR: user input
* R on AR: user input
**Note**: by default the actions of these objects are those of
beamsplitters (0, 0, 0, 0)
*=== Additional attributes with respect to the Optic class ===*
None
*=== Name ===*
Special
**Note**: the curvature of any surface is positive for a concave surface
(coating inside the sphere).
Thus kurv*HRNorm/|kurv| always points to the center
of the sphere of the surface, as is the convention for the lineSurfInter of
geometry module. Same for AR.
******* HRK > 0 and ARK > 0 ******* HRK > 0 and ARK < 0
***** ******** and |ARK| > |HRK|
H***A H*********A
***** ********
******* *******
'''
Name = "Special"
def __init__(self, Wedge = 0., Alpha = 0., X = 0., Y = 0., Z = 0.,
Theta = pi/2., Phi = 0., Diameter = 10.e-2,
HRr = .99, HRt = .01, ARr = .1, ARt = .9,
HRK = 0.01, ARK = 0, Thickness = 2.e-2,
N = 1.4585, KeepI = False,
RonHR = 0, TonHR = 0, RonAR = 0, TonAR = 0,
Ref = None):
'''Special optic initializer.
Parameters are the attributes.
Returns a special optic.
'''
# actions
TonHR = int(TonHR)
RonHR = int(RonHR)
TonAR = int(TonAR)
RonAR = int(RonAR)
# Initialize input data
N = float(N)
Wedge = float(Wedge)
Alpha = float(Alpha)
Theta = float(Theta)
Phi = float(Phi)
Diameter = float(Diameter)
Thickness = float(Thickness)
HRK = float(HRK)
ARK = float(ARK)
HRt = float(HRt)
HRr = float(HRr)
ARt = float(ARt)
ARr = float(ARr)
#prepare for mother initializer
HRNorm = np.array([np.sin(Theta)*np.cos(Phi),
np.sin(Theta) * np.sin(Phi),
np.cos(Theta)], dtype = np.float64)
HRCenter = np.array([X, Y, Z], dtype = np.float64)
#Calculate ARCenter and ARNorm with wedge and alpha and thickness:
ARCenter = HRCenter\
- (Thickness + .5 * np.tan(Wedge) * Diameter) * HRNorm
a,b = geometry.basis(HRNorm)
ARNorm = -np.cos(Wedge) * HRNorm\
+ np.sin(Wedge) * (np.cos(Alpha) * a\
+ np.sin(Alpha) * b)
super(Special, self).__init__(ARCenter = ARCenter, ARNorm = ARNorm,
N = N, HRK = HRK, ARK = ARK, ARr = ARr, ARt = ARt, HRr = HRr, HRt = HRt,
KeepI = KeepI, HRCenter = HRCenter, HRNorm = HRNorm,
Thickness = Thickness, Diameter = Diameter,
Wedge = Wedge, Alpha = Alpha,
TonHR = TonHR, RonHR = RonHR, TonAR = TonAR, RonAR = RonAR,
Ref = Ref)
#Warnings for console output
if settings.warning:
self.geoCheck("mirror")
def lines(self):
'''Returns the list of lines necessary to print the object.'''
sph = geometry.rectToSph(self.HRNorm)
return ["Special: %s {" % str(self.Ref),
"TonHR, RonHR: %s, %s" % (str(self.TonHR), str(self.RonHR)),
"TonAR, RonAR: %s, %s" % (str(self.TonAR), str(self.RonAR)),
"Thick: %scm" % str(self.Thick/cm),
"Diameter: %scm" % str(self.Dia/cm),
"Wedge: %sdeg" % str(self.Wedge/deg),
"Alpha: %sdeg" % str(self.Alpha/deg),
"HRCenter: %s" % str(self.HRCenter),
"HRNorm: (%s, %s)deg" % (str(sph[0]/deg), str(sph[1]/deg)),
"Index: %s" %str(self.N),
"HRKurv, ARKurv: %s, %s" % (str(self.HRK), str(self.ARK)),
"HRr, HRt, ARr, ARt: %s, %s, %s, %s" \
% (str(self.HRr), str(self.HRt), str(self.ARr), str(self.ARt)),
"}"]
|
import sys, shutil
try:
from gi.repository import Gtk, Gdk, Vte, GLib, Pango, GConf, GdkPixbuf
import json, os, getpass
from pycm.pycm_globals import *
except ImportError as e:
print "Error during importing of necessaries modules.\nError is '%s'" % e
sys.exit()
python_path = "/usr/lib/python2.7/dist-packages/"
module_path = python_path + 'pycm'
bin_exe = '/usr/bin/pycm.py'
launcher = '/usr/share/applications/pyconnection-manager.desktop'
uid = os.getuid()
def __init__():
if uid > 0:
print "You need to be root to install pyConnection Manager"
sys.exit()
try:
remove_old()
except OSError, IOError:
print "ERROR removing old stuff"
sys.exit()
try:
create_new()
except OSError, IOError:
print "ERROR installing pyConnection Manager"
sys.exit()
ok = "\n\tpyConnection Manager succesfully installed\n"
print ok
def remove_old():
if os.path.exists(module_path):
shutil.rmtree(module_path)
if os.path.exists(GLADE_DIR):
shutil.rmtree(GLADE_DIR)
if os.path.exists(IMAGE_DIR):
shutil.rmtree(IMAGE_DIR)
if os.path.exists(bin_exe):
os.remove(bin_exe)
if os.path.exists(launcher):
os.remove(launcher)
def create_new():
shutil.copytree('pycm', module_path)
shutil.copytree('glade', GLADE_DIR)
shutil.copytree('img', IMAGE_DIR)
shutil.copyfile('pycm.py', '/usr/bin/pycm')
shutil.copyfile('pyconnection-manager.desktop', launcher)
dir_list = [module_path, GLADE_DIR, IMAGE_DIR]
for i in dir_list:
os.chmod(i, 655)
__init__()
|
"""
Message Queue wrapper
"""
__RCSID__ = "$Id$"
from DIRAC.FrameworkSystem.private.standardLogging.Handler.MessageQueueHandler import MessageQueueHandler
from DIRAC.Resources.LogBackends.AbstractBackend import AbstractBackend
from DIRAC.FrameworkSystem.private.standardLogging.Formatter.JsonFormatter import JsonFormatter
class MessageQueueBackend(AbstractBackend):
"""
MessageQueueBackend is used to create an abstraction of the handler and the formatter concepts from logging.
Here, we have:
- MessageQueueHandler: which is a custom handler created in DIRAC to send
log records to a Message Queue server. You can find it in: FrameworkSys./private/standardlogging/Handler
- BaseFormatter: is a custom Formatter object, created for DIRAC in order to get the appropriate display.
You can find it in FrameworkSystem/private/standardLogging/Formatter
"""
def __init__(self):
"""
Initialization of the MessageQueueBackend
"""
super(MessageQueueBackend, self).__init__(None, JsonFormatter)
self.__queue = ''
def createHandler(self, parameters=None):
"""
Each backend can initialize its attributes and create its handler with them.
:params parameters: dictionary of parameters. ex: {'FileName': file.log}
"""
if parameters is not None:
self.__queue = parameters.get("MsgQueue", self.__queue)
self._handler = MessageQueueHandler(self.__queue)
def setLevel(self, level):
"""
No possibility to set the level of the MessageQueue handler.
It is not set by default so it can send all Log Records of all levels to the MessageQueue.
"""
pass
|
import math
import random, datetime
from gettext import gettext as _
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('PangoCairo', '1.0')
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
from gi.repository import Pango
from gi.repository import PangoCairo
import medalscreen
BALLOON_COLORS = [
(65535, 0, 0),
(0, 0, 65535),
(65535, 32768, 0),
(0, 32768, 65535),
]
class Balloon:
def __init__(self, x, y, vx, vy, word):
self.x = x
self.y = y
self.vx = vx
self.vy = vy
self.word = word
self.size = max(100, 50 + len(word) * 20)
self.color = random.choice(BALLOON_COLORS)
class BalloonGame(Gtk.VBox):
def __init__(self, lesson, activity):
GObject.GObject.__init__(self)
self.lesson = lesson
self.activity = activity
# Build title bar.
title = Gtk.Label()
title.set_markup("<span size='20000'><b>" + lesson['name'] + "</b></span>")
title.set_alignment(1.0, 0.0)
stoplabel = Gtk.Label(label=_('Go Back'))
stopbtn = Gtk.Button()
stopbtn.add(stoplabel)
stopbtn.connect('clicked', self.stop_cb)
hbox = Gtk.HBox()
hbox.pack_start(stopbtn, False, False, 10)
hbox.pack_end(title, False, False, 10)
# Build the game drawing area.
self.area = Gtk.DrawingArea()
self.draw_cb_id = self.area.connect("draw", self.draw_cb)
# Connect keyboard grabbing and releasing callbacks.
self.area.connect('realize', self.realize_cb)
self.area.connect('unrealize', self.unrealize_cb)
self.pack_start(hbox, False, False, 10)
self.pack_start(self.area, True, True, 0)
self.show_all()
# Initialize the game data.
self.balloons = []
self.score = 0
self.spawn_delay = 10
self.count = 0
self.count_left = self.lesson.get('length', 60)
self.medal = None
self.finished = False
# Start the animation loop running.
self.update_timer = GObject.timeout_add(20, self.tick, priority=GObject.PRIORITY_HIGH_IDLE+30)
def realize_cb(self, widget):
self.activity.add_events(Gdk.EventMask.KEY_PRESS_MASK)
self.key_press_cb_id = self.activity.connect('key-press-event', self.key_cb)
# Clear the mouse cursor.
#pixmap = Gdk.Pixmap(widget.window, 10, 10)
#color = Gdk.Color()
#cursor = Gdk.Cursor.new(pixmap, pixmap, color, color, 5, 5)
#widget.window.set_cursor(cursor)
def unrealize_cb(self, widget):
self.activity.disconnect(self.key_press_cb_id)
def stop_cb(self, widget):
# Stop the animation loop.
if self.update_timer:
try:
GObject.source_remove(self.update_timer)
except:
pass # Try remove instance, if not found, just pass
self.activity.pop_screen()
def key_cb(self, widget, event):
# Ignore hotkeys.
if event.get_state() & (Gdk.ModifierType.CONTROL_MASK | Gdk.ModifierType.MOD1_MASK):
return False
# Extract information about the key pressed.
key = Gdk.keyval_to_unicode(event.keyval)
if key != 0: key = chr(key)
if self.finished:
key_name = Gdk.keyval_name(event.keyval)
if key_name == 'Return':
self.activity.pop_screen()
# Show the new medal if there was one.
if self.medal:
self.activity.push_screen(medalscreen.MedalScreen(self.medal, self.activity))
else:
for b in self.balloons:
if b.word[0] == key:
b.word = b.word[1:]
self.add_score(1)
# Pop the balloon if it's been typed.
if len(b.word) == 0:
self.balloons.remove(b)
self.add_score(100)
self.queue_draw_balloon(b)
break
return False
def update_balloon(self, b):
b.x += b.vx
b.y += b.vy
if b.x < 100 or b.x >= self.bounds.width - 100:
b.vx = -b.vx
if b.y < -100:
self.balloons.remove(b)
self.queue_draw_balloon(b)
def tick(self):
if self.finished:
return False
self.bounds = self.area.get_allocation()
for b in self.balloons:
self.update_balloon(b)
self.spawn_delay -= 1
if self.count_left >= 0 and self.spawn_delay <= 0:
self.count += 1
self.count_left -= 1
word = random.choice(self.lesson['words'])
x = random.randint(100, self.bounds.width - 100)
y = self.bounds.height + 100
vx = random.uniform(-2, 2)
vy = -2 #random.uniform(-5, -3)
b = Balloon(x, y, vx, vy, word)
self.balloons.append(b)
if self.count < 10:
delay = 200
elif self.count < 20:
delay = 150
else:
delay = 100
self.spawn_delay = random.randint(delay-20, delay+20)
if self.count_left <= 0 and len(self.balloons) == 0:
self.finish_game()
return True
def draw_results(self, cr):
# Draw background.
w = self.bounds.width - 400
h = self.bounds.height - 200
x = self.bounds.width/2 - w/2
y = self.bounds.height/2 - h/2
cr.set_source_rgb(0.762, 0.762, 0.762)
cr.rectangle(x, y, w, h)
cr.fill()
cr.set_source_rgb(0, 0, 0)
cr.rectangle(x, y, w, h)
cr.stroke()
# Draw text
title = _('You finished!') + '\n'
cr.set_source_rgb(0, 0, 0)
pango_layout = PangoCairo.create_layout(cr)
fd = Pango.FontDescription('Serif Bold')
fd.set_size(16 * Pango.SCALE)
pango_layout.set_font_description(fd)
pango_layout.set_text(title,
len(title))
size = pango_layout.get_size()
tx = x + (w / 2) - (size[0] / Pango.SCALE) / 2
ty = y + 100
cr.move_to(tx, ty)
PangoCairo.update_layout(cr, pango_layout)
PangoCairo.show_layout(cr, pango_layout)
report = ''
report += _('Your score was %(score)d.') % { 'score': self.score } + '\n'
if self.medal:
report += _('You earned a %(type)s medal!') % self.medal + '\n'
report += '\n'
report += _('Press the ENTER key to continue.')
cr.set_source_rgb(0, 0, 0)
pango_layout = PangoCairo.create_layout(cr)
fd = Pango.FontDescription('Times')
fd.set_size(12 * Pango.SCALE)
pango_layout.set_font_description(fd)
pango_layout.set_text(report, len(report))
size = pango_layout.get_size()
sx = x + w / 2 - (size[0] / Pango.SCALE) / 2
sy = y + 200
cr.move_to(sx, sy)
PangoCairo.update_layout(cr, pango_layout)
PangoCairo.show_layout(cr, pango_layout)
def finish_game(self):
self.finished = True
# Add to the lesson history.
report = {
'lesson': self.lesson['name'],
'score': self.score,
}
self.activity.add_history(report)
# Show the medal screen, if one should be given.
got_medal = None
medals = self.lesson['medals']
for medal in medals:
if self.score >= medal['score']:
got_medal = medal['name']
if got_medal:
# Award the medal.
medal = {
'lesson': self.lesson['name'],
'type': got_medal,
'date': datetime.date.today().strftime('%B %d, %Y'),
'nick': self.activity.nick,
'score': self.score
}
self.medal = medal
# Compare this medal with any existing medals for this lesson.
# Only record the best one.
add_medal = True
if self.lesson['name'] in self.activity.data['medals']:
old_medal = self.activity.data['medals'][self.lesson['name']]
order = ' '.join([m['name'] for m in medals])
add_idx = order.index(medal['type'])
old_idx = order.index(old_medal['type'])
if add_idx < old_idx:
add_medal = False
elif add_idx == old_idx:
if medal['score'] < old_medal['score']:
add_medal = False
if add_medal:
self.activity.data['motd'] = 'newmedal'
self.activity.data['medals'][self.lesson['name']] = medal
# Refresh the main screen given the new medal.
self.activity.mainscreen.show_lesson(self.activity.mainscreen.lesson_index)
self.queue_draw()
def queue_draw_balloon(self, b):
x = int(b.x - b.size/2) - 5
y = int(b.y - b.size/2) - 5
w = int(b.size + 100)
h = int(b.size*1.5 + 10)
self.area.queue_draw_area(x, y, w, h)
def draw_balloon(self, cr, b):
x = int(b.x)
y = int(b.y)
# Draw the string.
cr.set_source_rgb(0, 0, 0)
cr.move_to(int(b.x), int(b.y + b.size / 2))
cr.line_to(int(b.x), int(b.y + b.size))
cr.stroke()
# Draw the balloon.
cr.save()
cr.set_source_rgb(b.color[0], b.color[1], b.color[2])
cr.arc(b.x, b.y, b.size / 2, 0, 2 * math.pi)
cr.fill()
cr.restore()
cr.set_source_rgb(0, 0, 0)
pango_layout = PangoCairo.create_layout(cr)
fd = Pango.FontDescription('Sans')
fd.set_size(12 * Pango.SCALE)
pango_layout.set_font_description(fd)
pango_layout.set_text(b.word, len(b.word))
size = pango_layout.get_size()
x = x - (size[0] / Pango.SCALE) / 2
y = y - (size[1] / Pango.SCALE) / 2
cr.move_to(x, y)
PangoCairo.update_layout(cr, pango_layout)
PangoCairo.show_layout(cr, pango_layout)
def add_score(self, num):
self.score += num
self.queue_draw_score()
def queue_draw_score(self):
layout = self.area.create_pango_layout(_('SCORE: %d') % self.score)
layout.set_font_description(Pango.FontDescription('Times 14'))
size = layout.get_size()
x = self.bounds.width-20-size[0]/Pango.SCALE
y = 20
self.queue_draw_area(x, y, x+size[0], y+size[1])
def draw_score(self, cr):
cr.set_source_rgb(0, 0, 0)
pango_layout = PangoCairo.create_layout(cr)
fd = Pango.FontDescription('Times')
fd.set_size(14 * Pango.SCALE)
pango_layout.set_font_description(fd)
text = _('SCORE: %d') % self.score
pango_layout.set_text(text, len(text))
size = pango_layout.get_size()
x = self.bounds.width - 20 - size[0] / Pango.SCALE
y = 20
cr.move_to(x, y)
PangoCairo.update_layout(cr, pango_layout)
PangoCairo.show_layout(cr, pango_layout)
def draw_instructions(self, cr):
# Draw instructions.
cr.set_source_rgb(0, 0, 0)
pango_layout = PangoCairo.create_layout(cr)
pango_layout.set_font_description(Pango.FontDescription('Times 14'))
text = _('Type the words to pop the balloons!')
pango_layout.set_text(text, len(text))
size = pango_layout.get_size()
x = (self.bounds.width - size[0] / Pango.SCALE) / 2
y = self.bounds.height - 20 - size[1] / Pango.SCALE
cr.move_to(x, y)
PangoCairo.update_layout(cr, pango_layout)
PangoCairo.show_layout(cr, pango_layout)
def draw(self, cr):
self.bounds = self.area.get_allocation()
# Draw background.
cr.set_source_rgb(0.915, 0.915, 1)
cr.rectangle(0, 0, self.bounds.width, self.bounds.height)
cr.fill()
# Draw the balloons.
for b in self.balloons:
self.draw_balloon(cr, b)
if self.finished:
self.draw_results(cr)
else:
self.draw_instructions(cr)
self.draw_score(cr)
def draw_cb(self, area, cr):
self.draw(cr)
|
"""
Created on Thu Jan 31 2018
Unit tests for the Balance game
@author: IvanPopov
"""
import unittest
from game import Game
class GameTest(unittest.TestCase):
def test_game_loads(self):
g=Game()
self.assertEqual(g.c.title(), "Balance")
|
import os, sys, re, argparse, time, json, logging
import requests
from glob import glob
from urlparse import urlsplit
from getpass import getpass
from mastodon import Mastodon
from markdown import markdown
from html_text import extract_text
from flask import (Flask, render_template, abort,
request, redirect, jsonify)
DEBUG = False # If it ain't broke, don't debug it.
NO_TOOTING = False # Handy during debug: create gist, but don't toot.
RE_HASHTAG = re.compile(u'(?:^|(?<=\s))#(\\w+)')
RE_MENTION = re.compile(u'(?:^|(?<=\s))@(\\w+)@([\\w.]+)')
def get_hashtags(s, ignore=None):
tags = set(
['#'+tag.lower() for tag in RE_HASHTAG.findall(s)])
if ignore:
tags -= get_hashtags(ignore)
return tags
def linkify_hashtags(s, instance):
return RE_HASHTAG.sub(
lambda m:
u"[#{tag}](https://{instance}/tags/{tag})".format(
tag=m.group(1), instance=instance),
s)
def get_mentions(s, ignore=None):
mentions = set(
[u"@{}@{}".format(user,instance)
for user, instance in RE_MENTION.findall(s)])
if ignore:
mentions -= get_mentions(ignore)
return mentions
def linkify_mentions(s):
return RE_MENTION.sub(
lambda m:
u"[@{user}](https://{instance}/@{user})".format(
user=m.group(1), instance=m.group(2)),
s)
def url2toot(masto, url):
u = urlsplit(url)
if not (u.scheme=='https' and u.netloc and u.path):
return None # Don't bother the instance
res = masto.search(url, True)
res = res.get('statuses',[])
return res and res[0] or None
def make_gist(title, body):
return requests.post(
"https://api.github.com/gists",
json={
"description": title,
"public": True,
"files": {
"TOOT.md": {
"content": u"### {}\n\n{}".format(title, body)
}
}
}
).json()['html_url']+"#file-toot-md"
def post(masto, body, instance, title=None,
direction='ltr', in_reply_to=None):
# Markdown more than we need, to [hopefully] discard chopped markup.
summary = extract_text(markdown(body.strip()))[:140]
hashtags = get_hashtags(body, ignore=summary)
mentions = get_mentions(body, ignore=summary)
irt_id = in_reply_to and in_reply_to.get('id') or None
body = linkify_hashtags(linkify_mentions(body), instance)
if direction=='rtl':
body = u"""<div dir="rtl">
{}
</div>""".format(markdown(body))
if in_reply_to:
body = u"""#### In reply to [@{}]({}):
{}""".format(
in_reply_to['account']['username'],
in_reply_to['url'], body)
gist = make_gist(
title or u"A gistodon toot, {} GMT".format(
time.asctime(time.gmtime())),
body+u"""
if NO_TOOTING:
return gist
status = u'{}... {}'.format(summary, gist)
if hashtags or mentions:
status += u'\n'+u' '.join(hashtags.union(mentions))
return masto.status_post(
status, spoiler_text=title, in_reply_to_id=irt_id)['url']
def webserver(masto, instance, account):
app = Flask(__name__, static_url_path='')
@app.route('/')
def index():
re = request.args.get('re','')
return render_template('index.html', account=account,
re=re)
@app.route('/toot', methods=['POST'])
def tootit():
if not request.form['markdown'].strip():
return "Nothing to toot"
in_reply_to=request.form.get('re')
if in_reply_to:
in_reply_to = url2toot(masto, in_reply_to)
if not in_reply_to:
abort(500, 'The "in reply to" url is not a toot.')
return redirect(post(
masto, request.form['markdown'], instance,
title=request.form['title'],
in_reply_to=in_reply_to,
direction=request.form['direction']))
@app.route('/re', methods=['GET', 'POST'])
def tootsearch():
return jsonify(url2toot(masto,
request.form.get('q', request.args.get('q',''))))
@app.route('/search', methods=['GET', 'POST'])
def search():
q = request.form.get(
'q', request.args.get('q','')).strip()
if not q:
return jsonify([])
res = masto.search(q, True)
return jsonify(sorted(
[
{
# This trick makes sure both local and external
# accounts get a @hostname suffix.
"value": "@{}@{}".format(
a["username"], urlsplit(a["url"]).netloc),
"title": a.get("display_name")
} for a in res.get('accounts',[])]+ \
[{"value": '#'+a} for a in res.get('hashtags',[])],
key=lambda s: s['value'].lower()))
app.run(host='localhost', port=8008, debug=DEBUG)
def main():
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
parser = argparse.ArgumentParser(
description=("Toot stdin as a gist [markdown is supported],"
" or launch a localhost web interface."))
parser.add_argument('-i', '--instance',
help='Your mastodon instance (e.g. mastodon.social).')
parser.add_argument('-e', '--email',
help='The email address you login to that instance with.')
parser.add_argument('-a', '--app_name', default='Gistodon',
help=('Name for the app (default is Gistodon).'
' Appears below the toot, near the date.'))
parser.add_argument('-w', '--web', action="store_true",
help=("Run as a web server on localhost"
" (toot-specific --title, --re, and --rtl"
" are ignored)."))
parser.add_argument('-t', '--title',
help="Optional: gist's title, and the toot's content warning (CW).")
parser.add_argument('-r', '--re',
help="Optional: url of the toot you're replying to.")
parser.add_argument('--rtl', dest='direction', action='store_const',
const='rtl', default='ltr',
help=("Format the gist as right-to-left text."))
args = parser.parse_args()
instance = args.instance
if instance:
client_cred_filename = '{}.{}.client.secret'.format(args.app_name, args.instance)
else:
candidates = glob('{}.*.client.secret'.format(args.app_name))
assert candidates, "No app/user registered. Please run register.sh first."
client_cred_filename = candidates[0]
instance = client_cred_filename[len(args.app_name)+1:-len('.client.secret')]
email = args.email
if email:
user_cred_filename = '{}.{}.{}.user.secret'.format(
args.app_name, instance, email.replace('@','.'))
else:
candidates = glob('{}.{}.*.user.secret'.format(
args.app_name, instance))
assert len(candidates), \
"No user registered for {} at {}. Please run register.sh first.".format(
args.app_name, instance)
user_cred_filename = candidates[0]
assert \
os.path.exists(client_cred_filename) and \
os.path.exists(user_cred_filename), \
"App/user not registered. Please run register.sh"
logging.info("Connecting to {}...".format(instance))
masto = Mastodon(
client_id = client_cred_filename,
access_token = user_cred_filename,
api_base_url = 'https://'+instance)
if args.web:
account = masto.account_verify_credentials()
webserver(masto, instance, account)
else:
logging.info("Reading markdown from standard input...")
lines = [unicode(l,'utf-8') for l in sys.stdin.readlines()]
assert len(filter(lambda l: l.strip(), lines)), \
"Empty toot."
body = u'\n'.join(lines)
assert not args.title or len(args.title)<=80, "Title exceeds 80 characters"
if args.re:
irt = url2toot(masto, args.re)
assert irt, "not a toot's url: {}".format(args.re)
else:
irt = None
title = args.title
try:
title = unicode(title,'utf-8')
except TypeError:
pass # Either Null, or already unicode(?!?)
logging.info("Posted {}.".format(post(
masto, body, instance,
title=title, direction=args.direction, in_reply_to=irt)))
if __name__=='__main__':
main()
|
import time
import os
import json
import requests
|
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
from app import db
import os.path
db.create_all()
if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
api.create(SQLALCHEMY_MIGRATE_REPO, 'database_repository')
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
else:
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))
|
from troposphere import Tags,FindInMap, Ref, Template, Parameter,ImportValue, Ref, Output
from troposphere.efs import FileSystem, MountTarget
from troposphere.ec2 import SecurityGroup, SecurityGroupRule, Instance, Subnet
from create import export_ref, import_ref
from create.network import AclFactory, assoc_nacl_subnet
def efs_setup(template, ops, app_cfn_options, stack_name, stack_setup):
# Variable Declarations
vpc_id=ops.get('vpc_id')
efs_sg = app_cfn_options.network_names['tcpstacks'][stack_name]['sg_name']
efs_acl = app_cfn_options.network_names['tcpstacks'][stack_name]['nacl_name']
# Create EFS FIleSystem
efs_fs=FileSystem(
title='{}{}'.format(ops.app_name, stack_name),
FileSystemTags=Tags(Name='{}-{}'.format(ops.app_name, stack_name))
)
template.add_resource(efs_fs)
export_ref(template, '{}{}{}'.format(ops.app_name,stack_name,"Endpoint"), value=Ref(efs_fs), desc="Endpoint for EFS FileSystem")
# EFS FS Security Groups
efs_security_group=SecurityGroup(
title=efs_sg,
GroupDescription='Allow Access',
VpcId=vpc_id,
Tags=Tags(Name=efs_sg)
)
template.add_resource(efs_security_group)
export_ref(template, efs_sg, value=Ref(efs_sg), desc="Export for EFS Security Group")
# Create Network ACL for EFS Stack
efs_nacl = AclFactory(
template,
name=efs_acl,
vpc_id=ops.vpc_id,
in_networks=[val for key, val in sorted(ops.app_networks.items())],
in_ports=stack_setup['ports'],
out_ports=ops.out_ports,
out_networks=[val for key, val in sorted(ops.app_networks.items())],
ssh_hosts=ops.get("deploy_hosts"),
)
export_ref(
template,
export_name=efs_acl,
value=Ref(efs_acl),
desc="{}{} stack".format("NetACL for", stack_name)
)
# Create Subnets for Mount Targets
for k, v in ops['tcpstacks']['EFS']['networks'].items():
efs_subnet=Subnet(
title='{}{}{}{}'.format(ops.app_name, stack_name, "MountTargetSubnet", k.split("-")[-1]),
AvailabilityZone=k,
CidrBlock=v,
VpcId=vpc_id,
Tags=Tags(Name='{}-{}-{}-{}'.format(ops.app_name, stack_name, "MountTargetSubnet", k.split("-")[-1]))
)
template.add_resource(efs_subnet)
assoc_name = '{}{}{}'.format(stack_name,"AclAssoc",k.split("-")[-1])
assoc_nacl_subnet(template, assoc_name, Ref(efs_acl), Ref(efs_subnet))
efs_mount_target=MountTarget(
title='{}{}{}'.format(ops.app_name, "EFSMountTarget", k.split("-")[-1]),
FileSystemId=Ref(efs_fs),
SecurityGroups=[Ref(efs_security_group)],
SubnetId=Ref(efs_subnet)
)
template.add_resource(efs_mount_target)
|
import os
import os.path
import optparse, ConfigParser
import snap
from snap.options import *
from snap.snapshottarget import SnapshotTarget
from snap.exceptions import ArgError
class ConfigOptions:
"""Container holding all the configuration options available
to the Snap system"""
# modes of operation
RESTORE = 0
BACKUP = 1
def __init__(self):
'''initialize configuration'''
# mode of operation
self.mode = None
# mapping of targets to lists of backends to use when backing up / restoring them
self.target_backends = {}
# mapping of targets to lists of entities to include when backing up
self.target_includes = {}
# mapping of targets to lists of entities to exclude when backing up
self.target_excludes = {}
# output log level
# currently supports 'quiet', 'normal', 'verbose', 'debug'
self.log_level = 'normal'
# output format to backup / restore
self.outputformat = 'snapfile'
# location of the snapfile to backup to / restore from
self.snapfile = None
# Encryption/decryption password to use, if left as None, encryption will be disabled
self.encryption_password = None
# hash of key/value pairs of service-specific options
self.service_options = {}
for backend in SnapshotTarget.BACKENDS:
self.target_backends[backend] = False
self.target_includes[backend] = []
self.target_excludes[backend] = []
def log_level_at_least(self, comparison):
return (comparison == 'quiet') or \
(comparison == 'normal' and self.log_level != 'quiet') or \
(comparison == 'verbose' and (self.log_level == 'verbose' or self.log_level == 'debug')) or \
(comparison == 'debug' and self.log_level == 'debug')
class ConfigFile:
"""Represents the snap config file to be read and parsed"""
parser = None
def __init__(self, config_file):
'''
Initialize the config file, specifying its path
@param file - the path to the file to load
'''
# if config file doesn't exist, just ignore
if not os.path.exists(config_file):
if snap.config.options.log_level_at_least("verbose"):
snap.callback.snapcallback.warn("Config file " + config_file + " not found")
else:
self.parser = ConfigParser.ConfigParser()
self.parser.read(config_file)
self.__parse()
def string_to_bool(string):
'''Static helper to convert a string to a boolean value'''
if string == 'True' or string == 'true' or string == '1':
return True
elif string == 'False' or string == 'false' or string == '0':
return False
return None
string_to_bool = staticmethod(string_to_bool)
def string_to_array(string):
'''Static helper to convert a colon deliminated string to an array of strings'''
return string.split(':')
string_to_array = staticmethod(string_to_array)
def __get_bool(self, key, section='main'):
'''
Retreive the indicated boolean value from the config file
@param key - the string key corresponding to the boolean value to retrieve
@param section - the section to retrieve the value from
@returns - the value or False if not found
'''
try:
return ConfigFile.string_to_bool(self.parser.get(section, key))
except:
return None
def __get_string(self, key, section='main'):
'''
Retreive the indicated string value from the config file
@param key - the string key corresponding to the string value to retrieve
@param section - the section to retrieve the value from
@returns - the value or None if not found
'''
try:
return self.parser.get(section, key)
except:
return None
def __get_array(self, section='main'):
'''return array of key/value pairs from the config file section
@param section - the section which to retrieve the key / values from
@returns - the array of key / value pairs or None if not found
'''
try:
return self.parser.items(section)
except:
return None
def __parse(self):
'''parse configuration out of the config file'''
for backend in SnapshotTarget.BACKENDS:
val = self.__get_bool(backend)
if val is not None:
snap.config.options.target_backends[backend] = val
else:
val = self.__get_string(backend)
if val:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
val = self.__get_bool('no' + backend)
if val:
snap.config.options.target_backends[backend] = False
of = self.__get_string('outputformat')
sf = self.__get_string('snapfile')
ll = self.__get_string('loglevel')
enp = self.__get_string('encryption_password')
if of != None:
snap.config.options.outputformat = of
if sf != None:
snap.config.options.snapfile = sf
if ll != None:
snap.config.options.log_level = ll
if enp != None:
snap.config.options.encryption_password = enp
services = self.__get_array('services')
if services:
for k, v in services:
snap.config.options.service_options[k] = v
class Config:
"""The configuration manager, used to set and verify snap config values
from the config file and command line. Primary interface to the
Configuration System"""
configoptions = None
parser = None
# read values from the config files and set them in the target ConfigOptions
def read_config(self):
# add conf stored in resources if running from local checkout
CONFIG_FILES.append(os.path.join(os.path.dirname(__file__), "..", "resources", "snap.conf"))
for config_file in CONFIG_FILES:
ConfigFile(config_file)
def parse_cli(self):
'''
parses the command line an set them in the target ConfigOptions
'''
usage = "usage: %prog [options] arg"
self.parser = optparse.OptionParser(usage, version=SNAP_VERSION)
self.parser.add_option('', '--restore', dest='restore', action='store_true', default=False, help='Restore snapshot')
self.parser.add_option('', '--backup', dest='backup', action='store_true', default=False, help='Take snapshot')
self.parser.add_option('-l', '--log-level', dest='log_level', action='store', default="normal", help='Log level (quiet, normal, verbose, debug)')
self.parser.add_option('-o', '--outputformat', dest='outputformat', action='store', default=None, help='Output file format')
self.parser.add_option('-f', '--snapfile', dest='snapfile', action='store', default=None, help='Snapshot file, use - for stdout')
self.parser.add_option('-p', '--password', dest='encryption_password', action='store', default=None, help='Snapshot File Encryption/Decryption Password')
# FIXME how to permit parameter lists for some of these
for backend in SnapshotTarget.BACKENDS:
self.parser.add_option('', '--' + backend, dest=backend, action='store_true', help='Enable ' + backend + ' snapshots/restoration')
self.parser.add_option('', '--no' + backend, dest=backend, action='store_false', help='Disable ' + backend + ' snapshots/restoration')
(options, args) = self.parser.parse_args()
if options.restore != False:
snap.config.options.mode = ConfigOptions.RESTORE
if options.backup != False:
snap.config.options.mode = ConfigOptions.BACKUP
if options.log_level:
snap.config.options.log_level = options.log_level
if options.outputformat != None:
snap.config.options.outputformat = options.outputformat
if options.snapfile != None:
snap.config.options.snapfile = options.snapfile
if options.encryption_password != None:
snap.config.options.encryption_password = options.encryption_password
for backend in SnapshotTarget.BACKENDS:
val = getattr(options, backend)
if val != None:
if type(val) == str:
snap.config.options.target_backends[backend] = True
val = ConfigFile.string_to_array(val)
for include in val:
if include[0] == '!':
snap.config.options.target_excludes[backend].append(include[1:])
else:
snap.config.options.target_includes[backend].append(include)
else:
snap.config.options.target_backends[backend] = val
def verify_integrity(self):
'''
verify the integrity of the current option set
@raises - ArgError if the options are invalid
'''
if snap.config.options.mode == None: # mode not specified
raise snap.exceptions.ArgError("Must specify backup or restore")
if snap.config.options.snapfile == None: # need to specify snapfile location
raise snap.exceptions.ArgError("Must specify snapfile")
# TODO verify output format is one of permitted types
if snap.config.options.outputformat == None: # need to specify output format
raise snap.exceptions.ArgError("Must specify valid output format")
options = ConfigOptions()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.