code
stringlengths 1
199k
|
|---|
import ui.gui
import gettext
if __name__ == '__main__':
gettext.install('ppam')
ui.gui.main()
|
import sys
import os
from os.path import join
import re
import time
import shutil
import collections
stat_names = ['pct-lat', 'avg-lat', 'min-lat', 'max-lat', 'call-rate']
directions = ['MBps-read', 'MBps-written']
min_lat_infinity = 1.0e24
pbench_graphs = True
if os.getenv('SKIP_PBENCH_GRAPHING'): pbench_graphs = False
graph_csvs = [
('MBps-written', 'MB/sec written to Gluster volume'),
('MBps-read', 'MB/sec read from Gluster volume'),
('call-rate', 'FOP call rates'),
('pct-lat', 'percentage latency by FOP')
]
start_time = None
expected_duration = None
expected_sample_count = None
sorted_fop_names = None
intervals = None
class FopProfile:
def __init__(self, avg_lat, min_lat, max_lat, calls):
self.avg_lat = avg_lat
self.min_lat = min_lat
self.max_lat = max_lat
self.calls = calls
self.pct_lat = 0.0 # will compute later
def __str__(self):
return '%6.2f, %8.0f, %8.0f, %8.0f, %d' % (
self.pct_lat, self.avg_lat, self.min_lat, self.max_lat, self.calls)
# append a single field to .csv record based on statistic type
# use "-6.2f" instead of "%6.2f" so there are no leading spaces in record,
# otherwise spreadsheet inserts colums at col. B
def field2str(self, stat, duration):
if stat == stat_names[0]:
return '%-6.2f' % self.pct_lat
elif stat == stat_names[1]:
return '%8.0f' % self.avg_lat
elif stat == stat_names[2]:
if self.min_lat == min_lat_infinity:
return '' # don't confuse spreadsheet/user
else:
return '%8.0f' % self.min_lat
elif stat == stat_names[3]:
if self.max_lat == 0:
return ''
else:
return '%8.0f' % self.max_lat
elif stat == stat_names[4]:
call_rate = self.calls / float(duration)
return '%10.3f' % call_rate
# accumulate weighted sum of component profiles, will normalize them later
def accumulate(self, addend):
self.avg_lat += (addend.avg_lat * addend.calls)
self.calls += addend.calls
if addend.calls > 0:
self.max_lat = max(self.max_lat, addend.max_lat)
self.min_lat = min(self.min_lat, addend.min_lat)
# normalize weighted sum to get averages
def normalize_sum(self):
try:
# totals will become averages
self.avg_lat /= self.calls
except ZeroDivisionError: # if no samples, set these stats to zero
self.pct_lat = 0.0
self.avg_lat = 0.0
# compute % latency for this FOP given total latency of all FOPs
def get_pct_lat(self, total_lat):
try:
self.pct_lat = 100.0 * (self.avg_lat * self.calls) / total_lat
except ZeroDivisionError: # if no samples, set these stats to zero
self.pct_lat = 0.0
class ProfileInterval:
def __init__(self):
self.bytes_read = None
self.bytes_written = None
self.duration = None
self.fop_profiles = {}
def __str__(self):
return '%d, %d, %s, %s'%(
self.bytes_read, self.bytes_written,
str(self.duration), [ str(f) + ' : ' + str(self.fop_profiles[f]) for f in self.fop_profiles ])
def usage(msg):
print('ERROR: %s' % msg)
print('usage: extract-gl-client-prof.py your-gluster-client-profile.log')
sys.exit(1)
def make_out_dir(path):
dir_path = path + '_csvdir'
try:
if os.path.exists(dir_path):
shutil.rmtree(dir_path)
os.mkdir(dir_path)
except IOError:
usage('could not (re-)create directory ' + dir_path)
return dir_path
def parse_input(input_pathname):
global start_time
global expected_sample_interval
global expected_sample_count
global sorted_fop_names
global intervals
try:
with open(input_pathname, 'r') as file_handle:
lines = [ l.strip() for l in file_handle.readlines() ]
except IOError:
usage('could not read ' + input_pathname)
tokens = lines[0].split()
expected_sample_count = int(tokens[0])
expected_sample_interval = int(tokens[1])
start_time = time.mktime(
time.strptime(
lines[1], '%a %b %d %H:%M:%S %Z %Y')) * 1000
print('collection started at %s' % lines[1])
print('sampling interval is %d seconds' % expected_sample_interval)
print('expected sample count is %d samples' % expected_sample_count)
# parse the file and record each cell of output in a way that lets you
# aggregate across bricks later
found_cumulative_output = False
found_interval_output = False
all_caps_name = re.compile('^[A-Z]{3,15}')
fop_names = set()
last_intvl = -2
intvl = -1
per_op_table = {}
sample = -1
intervals = []
for ln in lines[2:]:
tokens = ln.split()
if ln.__contains__('Interval') and ln.__contains__('stats'):
interval_number = int(tokens[2])
assert intvl == last_intvl + 1
last_intvl = intvl
intvl += 1
intvl_profile = ProfileInterval()
intervals.append(intvl_profile)
found_interval_output = True
elif ln.__contains__('Cumulative Stats'):
found_cumulative_output = True
elif ln.__contains__('Duration :'):
# we are at end of output for this brick and interval
assert found_cumulative_output ^ found_interval_output
duration = int(tokens[2])
diff_from_expected = abs(duration - expected_sample_interval)
if found_interval_output:
if diff_from_expected > 1:
print(('WARNING: in sample %d the sample ' +
'interval %d deviates from expected value %d') %
(sample, duration, expected_sample_interval))
fops_in_interval = intervals[intvl]
fops_in_interval.duration = duration
elif ln.__contains__('BytesRead'):
if found_interval_output:
intvl_profile = intervals[intvl]
intvl_profile.bytes_read = int(tokens[2])
elif ln.__contains__('BytesWritten'):
if found_interval_output:
intvl_profile = intervals[intvl]
intvl_profile.bytes_written = int(tokens[2])
elif ln.__contains__('Cumulative stats'):
# this is the end of this sample
found_interval_output = False
found_cumulative_output = True
elif ln.__contains__('Current open fd'):
found_cumulative_output = False
elif found_interval_output and all_caps_name.match(ln):
# we found a record we're interested in,
# accumulate table of data for each gluster function
sample += 1
intvl_profile = intervals[intvl]
fop_name = tokens[0]
fop_names.add(fop_name)
new_fop_profile = FopProfile(
float(tokens[2]), float(tokens[4]), float(tokens[6]),
float(tokens[1]))
try:
fop_stats = intvl_profile.fop_profiles[fop_name]
raise Exception('did not expect fop already defined: %s' %
str(intvl_profile))
except KeyError:
intvl_profile.fop_profiles[fop_name] = new_fop_profile
sorted_fop_names = sorted(fop_names)
def gen_timestamp_ms(sample_index):
return start_time + ((expected_sample_interval * sample_index) * 1000)
def get_interval(interval_index, duration_type = 'interval'):
if duration_type == 'cumulative':
return interval_index * float(expected_sample_interval)
else:
return float(expected_sample_interval)
def gen_output_bytes(out_dir_path):
bytes_per_MB = 1000000.0
for direction in directions:
# when we support cumulative data, then we can name files this way
#direction_filename = duration_type + '_' + direction + '.csv'
direction_filename = direction + '.csv'
direction_pathname = join(out_dir_path, direction_filename)
with open(direction_pathname, 'w') as transfer_fh:
if pbench_graphs:
transfer_fh.write('timestamp_ms, ')
transfer_fh.write('MB/s\n')
for j in range(0, len(intervals)):
if pbench_graphs:
transfer_fh.write('%d, ' % gen_timestamp_ms(j))
rate_interval = get_interval(j)
interval_profile = intervals[j]
if direction.__contains__('read'):
transfer = interval_profile.bytes_read
else:
transfer = interval_profile.bytes_written
transfer_fh.write('%-8.3f\n' %
((transfer/rate_interval)/bytes_per_MB))
def gen_per_fop_stats(out_dir_path, stat, duration_type='interval'):
per_fop_filename = stat + '.csv'
per_fop_path = join(out_dir_path, per_fop_filename)
with open(per_fop_path, 'a') as fop_fh:
hdr = ''
if pbench_graphs:
hdr += 'timestamp_ms, '
hdr += ','.join(sorted_fop_names)
hdr += '\n'
fop_fh.write(hdr)
for i in range(0, len(intervals)):
interval_profile = intervals[i]
fops_in_interval = interval_profile.fop_profiles
all_fop_profile = FopProfile(0, 0, 0, 0)
for fop in sorted_fop_names:
fop_stats = fops_in_interval[fop]
all_fop_profile.accumulate(fop_stats)
all_fop_profile.normalize_sum()
#print('intvl: %d' % i)
#print('ALL FOPs: %s' % all_fop_profile)
if pbench_graphs:
fop_fh.write('%d, ' % gen_timestamp_ms(i))
columns = []
for fop in sorted_fop_names:
fop_stats = fops_in_interval[fop]
fop_stats.get_pct_lat(
all_fop_profile.avg_lat * all_fop_profile.calls)
try:
fop_stats = fops_in_interval[fop]
except KeyError:
fops_in_interval[fop] = fop_stats
columns.append(
fop_stats.field2str(
stat, interval_profile.duration))
fop_fh.write(','.join(columns) + '\n')
next_graph_template='''
<div class="chart">
<h3 class="chart-header">%s
<button id="save1">Save as Image</button>
<div id="svgdataurl1"></div>
</h3>
<svg id="chart%d"></svg>
<canvas id="canvas1" style="display:none"></canvas>
<script>
constructChart("lineChart", %d, "%s", 0.00);
</script>
</div>
'''
def output_next_graph(graph_fh, gr_index):
(csv_filename, graph_description) = graph_csvs[gr_index]
gr_index += 1 # graph numbers start at 1
graph_fh.write( next_graph_template % (
graph_description, gr_index, gr_index, csv_filename))
header='''
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<link href="static/css/v0.2/nv.d3.css" rel="stylesheet" type="text/css" media="all">
<link href="static/css/v0.2/pbench_utils.css" rel="stylesheet" type="text/css" media="all">
<script src="static/js/v0.2/function-bind.js"></script>
<script src="static/js/v0.2/fastdom.js"></script>
<script src="static/js/v0.2/d3.js"></script>
<script src="static/js/v0.2/nv.d3.js"></script>
<script src="static/js/v0.2/saveSvgAsPng.js"></script>
<script src="static/js/v0.2/pbench_utils.js"></script>
</head>
<body class="with-3d-shadow with-transitions">
<h2 class="page-header">summary profile of application activity on one client</h2>
'''
trailer='''
</body>
</html>
'''
def gen_graphs(out_dir_path):
graph_path = join(out_dir_path, 'gvp-client-graphs.html')
with open(graph_path, 'w') as graph_fh:
graph_fh.write(header)
for j in range(0, len(graph_csvs)):
output_next_graph(graph_fh, j)
graph_fh.write(trailer)
return graph_path
def gen_static_softlink(out_dir_path):
saved_cwd = os.getcwd()
static_dir = join(saved_cwd, 'static')
if not os.path.exists(static_dir):
print('ERROR: sorry, the javascript directory "static" ' +
'needs to be in same directory as this script, trying anyway...')
os.chdir(out_dir_path)
os.symlink(join('..', 'static'), 'static')
os.chdir(saved_cwd)
def generate_output(out_dir_path):
gen_output_bytes(out_dir_path)
for s in stat_names:
gen_per_fop_stats(out_dir_path, s)
graph_path = gen_graphs(out_dir_path)
gen_static_softlink(out_dir_path)
sys.stdout.write('Gluster FOP types seen: ')
for fop_name in sorted_fop_names:
sys.stdout.write(' ' + fop_name)
sys.stdout.write('\n')
print('created Gluster statistics files in directory %s' % out_dir_path)
if not os.path.isabs(graph_path):
graph_path = join(os.getcwd(), graph_path)
print('graphs now available at browser URL file://%s' % graph_path)
def main():
if len(sys.argv) < 2:
usage('missing gluster volume profile output log filename parameter'
)
fn = sys.argv[1]
parse_input(fn)
outdir = make_out_dir(fn)
generate_output(outdir)
main()
|
my_list = ['Pizza', 'index_1', 'index_2', 'index_3', 'index_4', 'index_5',
'1', '2', '3', 'Bannana']
squares = [1, 4, 9, 16]
print('a: ' + str(my_list))
for element in my_list:
print(element)
sum = 0
for num in squares:
sum += num # same es sum = sum + num
print('sum: ' + str(sum))
if 'Pizza' in my_list:
print('Yeah, yeah, yeay! I love Pizza!')
print('Burger in my list? --> ' + str('Burger' in my_list))
for i in range(11):
print(i)
i = 0
while i < len(my_list):
print(str(i) + '. ' + str(my_list[i]))
i = i + 2
lines = '\n'.join(my_list)
titel = '\nmy_list line by line:'
print(titel)
print('-' * len(titel))
print(lines)
new_list = lines.split('\n')
titel = '\nstrings back to list:'
print(titel)
print('-' * len(titel))
print(new_list)
|
from __future__ import division
from sys import argv,exit,stderr
from subprocess import Popen,PIPE
from random import choice
from re import search
def intron_length(region1,region2,pos1,pos2):
c1,st1_sp1,sd1 = region1.split(':')
c2,st2_sp2,sd2 = region2.split(':')
st1,sp1 = map(int,st1_sp1.split('-'))
st2,sp2 = map(int,st2_sp2.split('-'))
if c1 != c2: raise ValueError("Conflict in chromosome names")
if sd1 != sd2: raise ValueError("Conflict in strands")
if sd1 == '+': return str(abs((st1+pos1) - (sp2-pos2))+1)
elif sd1 == '-': return str(abs((st2+pos2) - (sp1-pos1))+1)
files = ["up5.score","up3.score","down5.score","down3.score"]
data = dict()
for fn in files:
i = 0
f = open(fn)
for row in f:
l = row.strip().split('\t')
if 'row_%s' % i not in data: data['row_%s' % i] = [l[0],l[2]]
else: data['row_%s' % i] += [l[0],l[2]]
i += 1
f.close()
print "up5SS\tup3SS\tup_len\tup5_score\tup3_score\tdown5SS\tdown3SS\tdown_len\tdown5_score\tdown3_score"
c = 0
for d in data:
if c > 20: break
v = data[d]
print "\t".join([v[0],v[2],intron_length(v[0],v[2],3,3),v[1],v[3],v[4],v[6],intron_length(v[4],v[6],3,3),v[5],v[7]])
c += 0
|
"""
* Copyright (C) 2010-2014 Loïc BLOT, CNRS <http://www.unix-experience.fr/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import MySQLdb
from pyPgSQL import PgSQL
import datetime
import sys
import thread
import commands
import os
import time
import string
from threading import Lock
tc_mutex = Lock()
threadCounter = 0
import netdiscoCfg
max_threads = 30
def zeye_log(text):
logfile = open("/usr/local/www/z-eye/datas/logs/z_eye_radius_cleaner.log","a")
print "%s\n" % text
logfile.writelines("%s\n" % text)
logfile.close()
def cleanRadius(dbhost,dbport,dbname):
global threadCounter
try:
tc_mutex.acquire()
threadCounter += 1
tc_mutex.release()
pgsqlCon = PgSQL.connect(host=netdiscoCfg.pgHost,user=netdiscoCfg.pgUser,password=netdiscoCfg.pgPwd,database=netdiscoCfg.pgDB)
pgcursor = pgsqlCon.cursor()
pgcursor.execute("SELECT login,pwd FROM z_eye_radius_db_list where addr='%s' and port='%s' and dbname='%s'" % (dbhost,dbport,dbname))
pgres2 = pgcursor.fetchone()
if(pgres2):
try:
mysqlconn = MySQLdb.connect(host=dbhost,user=pgres2[0],passwd=pgres2[1],port=dbport,db=dbname)
zeye_log("[Z-Eye][Radius-Cleaner] Connect to MySQL DB %s@%s:%s (user %s)" % (dbname,dbhost,dbport,pgres2[0]))
mysqlcur = mysqlconn.cursor()
mysqlcur.execute("SELECT username from z_eye_radusers WHERE expiration < NOW()")
mysqlres = mysqlcur.fetchall()
for idx in mysqlres:
mysqlcur.execute("DELETE FROM radcheck WHERE username = '%s'" % idx[0])
mysqlcur.execute("DELETE FROM radreply WHERE username = '%s'" % idx[0])
mysqlcur.execute("DELETE FROM radusergroup WHERE username = '%s'" % idx[0])
mysqlconn.commit()
mysqlcur.close()
mysqlconn.close()
except MySQLdb.Error, e:
zeye_log("[Z-Eye][Radius-Cleaner] MySQL Error %s" % e)
sys.exit(1)
tc_mutex.acquire()
threadCounter = threadCounter - 1
tc_mutex.release()
pgsqlCon.close()
except PgSQL.Error, e:
zeye_log("[Z-Eye][Radius-Cleaner] Pgsql Error %s" % e)
tc_mutex.acquire()
threadCounter = threadCounter - 1
tc_mutex.release()
sys.exit(1)
now = datetime.datetime.now()
zeye_log("[Z-Eye][Radius-Cleaner] Start at: %s" % now.strftime("%Y-%m-%d %H:%M"))
try:
pgsqlCon = PgSQL.connect(host=netdiscoCfg.pgHost,user=netdiscoCfg.pgUser,password=netdiscoCfg.pgPwd,database=netdiscoCfg.pgDB)
pgcursor = pgsqlCon.cursor()
pgcursor.execute("SELECT addr,port,dbname FROM z_eye_radius_options GROUP BY addr,port,dbname")
try:
pgres = pgcursor.fetchall()
for idx in pgres:
while threadCounter >= max_threads:
print "Waiting for %d threads..." % threadCounter
time.sleep(1)
thread.start_new_thread(cleanRadius,(idx[0],idx[1],idx[2]))
except StandardError, e:
zeye_log("[Z-Eye][Radius-Cleaner] Fatal Error: %s" % e)
except PgSQL.Error, e:
zeye_log("[Z-Eye][Radius-Cleaner] Pgsql Error %s" % e)
sys.exit(1);
finally:
if pgsqlCon:
pgsqlCon.close()
while threadCounter > 0:
print "Waiting for %d threads..." % threadCounter
time.sleep(1)
totaltime = datetime.datetime.now() - now
now = datetime.datetime.now()
zeye_log("[Z-Eye][Radius-Cleaner] End at: %s (Total time %s)" % (now.strftime("%Y-%m-%d %H:%M"), totaltime))
|
import sys
try:
import os
import socket
import threading
except ImportError as err:
print("[!] Something has gone wrong while trying to import necessary libraries.")
print("[!]", err)
sys.exit(1)
except Exception as e:
print("[!] An unexpected error has occured.")
print("[!]", e)
sys.exit(1)
RECV_MAX = 4096
BINARY_DECODER = "utf-8"
CMD_WIDTH = 75
class ClientHandler(threading.Thread):
def __init__(self, clientConnection, clientAddress):
threading.Thread.__init__(self)
self.clientConnection = clientConnection
self.clientAddress = clientAddress
def run(self):
while True:
self.request = self.clientConnection.recv(RECV_MAX)
self.request = self.request.decode(BINARY_DECODER)
print(self.clientAddress[0] + "> " + self.request)
class TCPServer:
def __init__(self, serverAddress, serverPort, clientBufferSize):
self._addr = serverAddress
self._port = serverPort
self._cliBufSiz = clientBufferSize
self.activeConnections = 0
self._connectionList = []
def setup(self):
print("[*] Setting up server at {0}:{1} ...".format(self._addr, self._port), end = " ")
try:
self.__serverSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.__serverSock.bind((self._addr, self._port))
except socket.error as err:
raise RuntimeError(err)
except Exception as e:
print("[!] An unexpected error has occured.")
raise RuntimeError(e)
print("done.")
def activate(self):
try:
self.__serverSock.listen(self._cliBufSiz)
except socket.error as err:
raise RuntimeError(err)
except Exception as e:
print("[!] An unexpected error has occured.")
raise RuntimeError(e)
print("[*] Server is running at {0}:{1}".format(self._addr, self._port))
self.printStatus()
while True:
self.conn, self.cli_addr = self.__serverSock.accept()
self.client_thread = ClientHandler(self.conn, self.cli_addr)
#self.client_thread.run() # I think that this part of code is dangerous
self.client_thread.start() # So, I wrote this instead of line 76th.
self._connectionList.append((self.cli_addr[0], self.cli_addr[1], "*", self.conn))
self.activeConnections += 1
print("[*] Got a connection from {0}:{1}".format(self.cli_addr[0], self.cli_addr[1]))
self.printStatus()
def printStatus(self):
print("[*] Waiting for connection ...")
print("\t=> Server : {0}:{1}".format(self._addr, self._port))
print("\t=> Queue : {0} connected, {1} at all.".format(self.activeConnections, self._cliBufSiz))
if self.activeConnections:
for conn in self._connectionList:
print("\n\tClient\t: {0}:{1}".format(conn[0], conn[1]))
print("\tStatus\t: " + conn[2])
print("\n")
if __name__ == "__main__":
server = TCPServer("localhost", 3333, 5)
server.setup()
server.activate()
|
import contextlib
from typing import Any, Callable, Iterator, Optional, Union
import astroid
from astroid import nodes
from astroid.manager import AstroidManager
from astroid.nodes.node_classes import AssignAttr, Name
from pylint.checkers import stdlib
from pylint.testutils import CheckerTestCase
@contextlib.contextmanager
def _add_transform(
manager: AstroidManager,
node: type,
transform: Callable,
predicate: Optional[Any] = None,
) -> Iterator:
manager.register_transform(node, transform, predicate)
try:
yield
finally:
manager.unregister_transform(node, transform, predicate)
class TestStdlibChecker(CheckerTestCase):
CHECKER_CLASS = stdlib.StdlibChecker
def test_deprecated_no_qname_on_unexpected_nodes(self) -> None:
"""Test that we don't crash on nodes which don't have a qname method.
While this test might seem weird since it uses a transform, it's actually testing a crash
that happened in production, but there was no way to retrieve the code for which this
occurred (how an AssignAttr got to be the result of a function inference beats me...)
"""
def infer_func(
node: Name, context: Optional[Any] = None
) -> Iterator[
Union[Iterator, Iterator[AssignAttr]]
]: # pylint: disable=unused-argument
new_node = nodes.AssignAttr(attrname="alpha", parent=node)
yield new_node
manager = astroid.MANAGER
transform = astroid.inference_tip(infer_func)
with _add_transform(manager, nodes.Name, transform):
node = astroid.extract_node(
"""
call_something()
"""
)
with self.assertNoMessages():
self.checker.visit_call(node)
|
"""Basic core types and utilities."""
import os
import time
import functools
import pathlib
import dataclasses
from collections import namedtuple
from typing import Optional
from . import LOCAL_FS_ENCODING
from .utils.log import getLogger
log = getLogger(__name__)
AUDIO_NONE = 0
AUDIO_MP3 = 1
AUDIO_TYPES = (AUDIO_NONE, AUDIO_MP3)
LP_TYPE = "lp"
EP_TYPE = "ep"
EP_MAX_SIZE_HINT = 6
COMP_TYPE = "compilation"
LIVE_TYPE = "live"
VARIOUS_TYPE = "various"
DEMO_TYPE = "demo"
SINGLE_TYPE = "single"
ALBUM_TYPE_IDS = [LP_TYPE, EP_TYPE, COMP_TYPE, LIVE_TYPE, VARIOUS_TYPE,
DEMO_TYPE, SINGLE_TYPE]
VARIOUS_ARTISTS = "Various Artists"
TXXX_ALBUM_TYPE = "eyeD3#album_type"
TXXX_ARTIST_ORIGIN = "eyeD3#artist_origin"
CountAndTotalTuple = namedtuple("CountAndTotalTuple", "count, total")
@dataclasses.dataclass
class ArtistOrigin:
city: str
state: str
country: str
def __bool__(self):
return bool(self.city or self.state or self.country)
def id3Encode(self):
return "\t".join([(o if o else "") for o in dataclasses.astuple(self)])
@dataclasses.dataclass
class AudioInfo:
"""A base container for common audio details."""
# The number of seconds of audio data (i.e., the playtime)
time_secs: float
# The number of bytes of audio data.
size_bytes: int
def __post_init__(self):
self.time_secs = int(self.time_secs * 100.0) / 100.0
class Tag:
"""An abstract interface for audio tag (meta) data (e.g. artist, title,
etc.)
"""
read_only: bool = False
def _setArtist(self, val):
raise NotImplementedError() # pragma: nocover
def _getArtist(self):
raise NotImplementedError() # pragma: nocover
def _getAlbumArtist(self):
raise NotImplementedError() # pragma: nocover
def _setAlbumArtist(self, val):
raise NotImplementedError() # pragma: nocover
def _setAlbum(self, val):
raise NotImplementedError() # pragma: nocover
def _getAlbum(self):
raise NotImplementedError() # pragma: nocover
def _setTitle(self, val):
raise NotImplementedError() # pragma: nocover
def _getTitle(self):
raise NotImplementedError() # pragma: nocover
def _setTrackNum(self, val):
raise NotImplementedError() # pragma: nocover
def _getTrackNum(self) -> CountAndTotalTuple:
raise NotImplementedError() # pragma: nocover
@property
def artist(self):
return self._getArtist()
@artist.setter
def artist(self, v):
self._setArtist(v)
@property
def album_artist(self):
return self._getAlbumArtist()
@album_artist.setter
def album_artist(self, v):
self._setAlbumArtist(v)
@property
def album(self):
return self._getAlbum()
@album.setter
def album(self, v):
self._setAlbum(v)
@property
def title(self):
return self._getTitle()
@title.setter
def title(self, v):
self._setTitle(v)
@property
def track_num(self) -> CountAndTotalTuple:
"""Track number property.
Must return a 2-tuple of (track-number, total-number-of-tracks).
Either tuple value may be ``None``.
"""
return self._getTrackNum()
@track_num.setter
def track_num(self, v):
self._setTrackNum(v)
def __init__(self, title=None, artist=None, album=None, album_artist=None, track_num=None):
self.title = title
self.artist = artist
self.album = album
self.album_artist = album_artist
self.track_num = track_num
class AudioFile:
"""Abstract base class for audio file types (AudioInfo + Tag)"""
tag: Tag = None
def _read(self):
"""Subclasses MUST override this method and set ``self._info``,
``self._tag`` and ``self.type``.
"""
raise NotImplementedError()
def initTag(self, version=None):
raise NotImplementedError()
def rename(self, name, fsencoding=LOCAL_FS_ENCODING,
preserve_file_time=False):
"""Rename the file to ``name``.
The encoding used for the file name is :attr:`eyed3.LOCAL_FS_ENCODING`
unless overridden by ``fsencoding``. Note, if the target file already
exists, or the full path contains non-existent directories the
operation will fail with :class:`IOError`.
File times are not modified when ``preserve_file_time`` is ``True``,
``False`` is the default.
"""
curr_path = pathlib.Path(self.path)
ext = curr_path.suffix
new_path = curr_path.parent / "{name}{ext}".format(**locals())
if new_path.exists():
raise IOError(f"File '{new_path}' exists, will not overwrite")
elif not new_path.parent.exists():
raise IOError("Target directory '%s' does not exists, will not "
"create" % new_path.parent)
os.rename(self.path, str(new_path))
if self.tag:
self.tag.file_info.name = str(new_path)
if preserve_file_time:
self.tag.file_info.touch((self.tag.file_info.atime,
self.tag.file_info.mtime))
self.path = str(new_path)
@property
def path(self):
"""The absolute path of this file."""
return self._path
@path.setter
def path(self, path):
"""Set the path"""
if isinstance(path, pathlib.Path):
path = str(path)
self._path = path
@property
def info(self) -> AudioInfo:
"""Returns a concrete implemenation of :class:`eyed3.core.AudioInfo`"""
return self._info
@property
def tag(self):
"""Returns a concrete implemenation of :class:`eyed3.core.Tag`"""
return self._tag
@tag.setter
def tag(self, t):
self._tag = t
def __init__(self, path):
"""Construct with a path and invoke ``_read``.
All other members are set to None."""
if isinstance(path, pathlib.Path):
path = str(path)
self.path = path
self.type = None
self._info = None
self._tag = None
self._read()
def __str__(self):
return str(self.path)
@functools.total_ordering
class Date:
"""
A class for representing a date and time (optional). This class differs
from ``datetime.datetime`` in that the default values for month, day,
hour, minute, and second is ``None`` and not 'January 1, 00:00:00'.
This allows for an object that is simply 1987, and not January 1 12AM,
for example. But when more resolution is required those vales can be set
as well.
"""
TIME_STAMP_FORMATS = ["%Y",
"%Y-%m",
"%Y-%m-%d",
"%Y-%m-%dT%H",
"%Y-%m-%dT%H:%M",
"%Y-%m-%dT%H:%M:%S",
# The following end with 'Z' signally time is UTC
"%Y-%m-%dT%HZ",
"%Y-%m-%dT%H:%MZ",
"%Y-%m-%dT%H:%M:%SZ",
# The following are wrong per the specs, but ...
"%Y-%m-%d %H:%M:%S",
"%Y-00-00",
"%Y%m%d",
]
"""Valid time stamp formats per ISO 8601 and used by `strptime`."""
def __init__(self, year, month=None, day=None,
hour=None, minute=None, second=None):
# Validate with datetime
from datetime import datetime
_ = datetime(year, month if month is not None else 1,
day if day is not None else 1,
hour if hour is not None else 0,
minute if minute is not None else 0,
second if second is not None else 0)
self._year = year
self._month = month
self._day = day
self._hour = hour
self._minute = minute
self._second = second
# Python's date classes do a lot more date validation than does not
# need to be duplicated here. Validate it
_ = Date._validateFormat(str(self)) # noqa
@property
def year(self):
return self._year
@property
def month(self):
return self._month
@property
def day(self):
return self._day
@property
def hour(self):
return self._hour
@property
def minute(self):
return self._minute
@property
def second(self):
return self._second
def __eq__(self, rhs) -> bool:
if not rhs:
return False
return (self.year == rhs.year and
self.month == rhs.month and
self.day == rhs.day and
self.hour == rhs.hour and
self.minute == rhs.minute and
self.second == rhs.second)
def __ne__(self, rhs) -> bool:
return not(self == rhs)
def __lt__(self, rhs) -> bool:
if not rhs:
return False
for left, right in ((self.year, rhs.year),
(self.month, rhs.month),
(self.day, rhs.day),
(self.hour, rhs.hour),
(self.minute, rhs.minute),
(self.second, rhs.second)):
left = left if left is not None else -1
right = right if right is not None else -1
if left < right:
return True
elif left > right:
return False
return False
def __hash__(self) -> int:
return hash(str(self))
@staticmethod
def _validateFormat(s):
pdate, fmt = None, None
for fmt in Date.TIME_STAMP_FORMATS:
try:
pdate = time.strptime(s, fmt)
break
except ValueError:
# date string did not match format.
continue
if pdate is None:
raise ValueError(f"Invalid date string: {s}")
return pdate, fmt
@staticmethod
def parse(s):
"""Parses date strings that conform to ISO-8601."""
if not isinstance(s, str):
s = s.decode("ascii")
s = s.strip('\x00')
pdate, fmt = Date._validateFormat(s)
# Here is the difference with Python date/datetime objects, some
# of the members can be None
kwargs = {}
if "%m" in fmt:
kwargs["month"] = pdate.tm_mon
if "%d" in fmt:
kwargs["day"] = pdate.tm_mday
if "%H" in fmt:
kwargs["hour"] = pdate.tm_hour
if "%M" in fmt:
kwargs["minute"] = pdate.tm_min
if "%S" in fmt:
kwargs["second"] = pdate.tm_sec
return Date(pdate.tm_year, **kwargs)
def __str__(self) -> str:
"""Returns date strings that conform to ISO-8601.
The returned string will be no larger than 17 characters."""
s = "%d" % self.year
if self.month:
s += "-%s" % str(self.month).rjust(2, '0')
if self.day:
s += "-%s" % str(self.day).rjust(2, '0')
if self.hour is not None:
s += "T%s" % str(self.hour).rjust(2, '0')
if self.minute is not None:
s += ":%s" % str(self.minute).rjust(2, '0')
if self.second is not None:
s += ":%s" % str(self.second).rjust(2, '0')
return s
def parseError(ex) -> None:
"""A function that is invoked when non-fatal parse, format, etc. errors
occur. In most cases the invalid values will be ignored or possibly fixed.
This function simply logs the error."""
log.warning(ex)
def load(path, tag_version=None) -> Optional[AudioFile]:
"""Loads the file identified by ``path`` and returns a concrete type of
:class:`eyed3.core.AudioFile`. If ``path`` is not a file an ``IOError`` is
raised. ``None`` is returned when the file type (i.e. mime-type) is not
recognized.
The following AudioFile types are supported:
* :class:`eyed3.mp3.Mp3AudioFile` - For mp3 audio files.
* :class:`eyed3.id3.TagFile` - For raw ID3 data files.
If ``tag_version`` is not None (the default) only a specific version of
metadata is loaded. This value must be a version constant specific to the
eventual format of the metadata.
"""
from . import mimetype, mp3, id3
if not isinstance(path, pathlib.Path):
path = pathlib.Path(path)
log.debug(f"Loading file: {path}")
if path.exists():
if not path.is_file():
raise IOError(f"not a file: {path}")
else:
raise IOError(f"file not found: {path}")
mtype = mimetype.guessMimetype(path)
log.debug(f"File mime-type: {mtype}")
if mtype in mp3.MIME_TYPES:
return mp3.Mp3AudioFile(path, tag_version)
elif mtype == id3.ID3_MIME_TYPE:
return id3.TagFile(path, tag_version)
else:
return None
|
from PyQt4 import QtDesigner
from camelot.view.plugins import CamelotEditorPlugin
class DateEditorPlugin(QtDesigner.QPyDesignerCustomWidgetPlugin, CamelotEditorPlugin):
def __init__(self, parent = None):
QtDesigner.QPyDesignerCustomWidgetPlugin.__init__(self)
from camelot.view.controls.editors import ImageEditor
CamelotEditorPlugin.__init__(self)
self._widget = ImageEditor
|
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
import sys, os, httplib, json, tempfile, urllib
from Utils import *
from resources import *
from numericmarkers import *
from ImageDialog import *
class Elevation:
def __init__(self, iface):
# Save reference to the QGIS interface
self.iface = iface
self.canvas = iface.mapCanvas()
# store layer id
self.layerid = ''
def initGui(self):
self.obtainAction = QAction(QIcon(":/plugins/elevation/elevation_icon.png"), QCoreApplication.translate('Elevation', "&Obtain Elevation"), self.iface.mainWindow())
self.aboutAction = QAction(QIcon(":/plugins/elevation/about_icon.png"), QCoreApplication.translate('Elevation', "&About"), self.iface.mainWindow())
self.iface.addPluginToMenu("Elevation", self.obtainAction)
self.iface.addPluginToMenu("Elevation", self.aboutAction)
self.iface.addToolBarIcon(self.obtainAction)
QObject.connect(self.obtainAction, SIGNAL("triggered()"), self.obtain)
QObject.connect(self.aboutAction, SIGNAL("triggered()"), self.about)
def unload(self):
# Remove the plugin menu item and icon
self.iface.removePluginMenu("Elevation", self.obtainAction)
self.iface.removePluginMenu("Elevation", self.aboutAction)
self.iface.removeToolBarIcon(self.obtainAction)
def about(self):
infoString = QCoreApplication.translate('Elevation', "QGIS Elevation Plugin 0.4.0<br />This plugin allows to mark point elevations in Google Maps.<br />Copyright (c) 2010, 2013 Steffen Macke<br /><a href=\"http://polylinie.de/elevation\">polylinie.de/elevation</a><br/>You have to accept the<br/><a href=\"http://code.google.com/intl/en/apis/maps/terms.html\">Google Maps APIs Terms of Service</a>\n")
QMessageBox.information(self.iface.mainWindow(), "About Elevation", infoString)
# Obtain elevation
def obtain(self):
chk = self.check_settings()
if len(chk) :
QMessageBox.information(self.iface.mainWindow(), QCoreApplication.translate('Elevation', "Elevation plugin error"), chk)
return
sb = self.iface.mainWindow().statusBar()
sb.showMessage(QCoreApplication.translate('Elevation', "Click on the map to obtain the elevation"))
ct = ClickTool(self.iface, self.obtain_action);
self.iface.mapCanvas().setMapTool(ct)
def get_elevation(self,point):
epsg4326 = QgsCoordinateReferenceSystem(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
self.reprojectgeographic = QgsCoordinateTransform(self.iface.mapCanvas().mapRenderer().destinationCrs(), epsg4326)
pt = self.reprojectgeographic.transform(point)
conn = httplib.HTTPConnection("maps.googleapis.com")
QgsMessageLog.instance().logMessage( "http://maps.googleapis.com/maps/api/elevation/json?locations=" + str(pt[1])+","+str(pt[0])+"&sensor=false", "Elevation")
conn.request("GET", "/maps/api/elevation/json?locations=" + str(pt[1])+","+str(pt[0])+"&sensor=false")
response = conn.getresponse()
jsonresult = response.read()
try:
results = json.loads(jsonresult).get('results')
if 0 < len(results):
return int(round(results[0].get('elevation')))
else:
QMessageBox.warning(self.iface.mainWindow(), 'Elevation', 'HTTP GET Request failed.', QMessageBox.Ok, QMessageBox.Ok)
except ValueError, e:
QMessageBox.warning(self.iface.mainWindow(), 'Elevation', 'JSON decode failed: '+str(jsonresult), QMessageBox.Ok, QMessageBox.Ok)
def obtain_action(self, point) :
epsg4326 = QgsCoordinateReferenceSystem(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
self.reprojectgeographic = QgsCoordinateTransform(self.iface.mapCanvas().mapRenderer().destinationCrs(), epsg4326)
pt = self.reprojectgeographic.transform(point)
elevation = self.get_elevation(point)
if elevation == None:
QMessageBox.warning(self.iface.mainWindow(), 'Elevation', 'Failed to get elevation.', QMessageBox.Ok, QMessageBox.Ok)
else:
# save point
self.save_point(point, elevation)
#find marker
marker = 'http://bit.ly/aUwrKs'
for x in range(0, 1000):
if numericmarkers.has_key(elevation+x) :
marker = numericmarkers.get(elevation+x)
break
if numericmarkers.has_key(elevation-x):
marker = numericmarkers.get(elevation-x)
break
# create map
image = tempfile.mkstemp(suffix='png')
os.close(image[0])
urllib.urlretrieve('http://maps.google.com/maps/api/staticmap?size=640x480&maptype=terrain\&markers=icon:'+marker+'|'+str(pt[1])+','+str(pt[0])+'&mobile=true&sensor=false', image[1])
QgsMessageLog.instance().logMessage('http://maps.google.com/maps/api/staticmap?size=640x4802&maptype=terrain\&markers=icon:'+marker+'|'+str(pt[1])+','+str(pt[0])+'&mobile=true&sensor=false')
dlg = ImageDialog()
dlg.image.setPixmap(QPixmap(image[1]))
dlg.show()
dlg.exec_()
if os.path.exists(image[1]):
os.unlink(image[1])
# save point to file, point is in project's crs
def save_point(self, point, elevation):
# create and add the point layer if not exists or not set
if not QgsMapLayerRegistry.instance().mapLayer(self.layerid) :
# create layer with same CRS as project
self.layer = QgsVectorLayer("Point?crs=epsg:4326", "Elevation Plugin Results", "memory")
self.provider = self.layer.dataProvider()
# add fields
self.provider.addAttributes( [QgsField("elevation", QVariant.Double)] )
self.layer.updateFields()
# Labels on
label = self.layer.label()
label.setLabelField(QgsLabel.Text, 0)
self.layer.enableLabels(True)
# add layer if not already
QgsMapLayerRegistry.instance().addMapLayer(self.layer)
# store layer id
self.layerid = QgsMapLayerRegistry.instance().mapLayers().keys()[-1]
# add a feature
fet = QgsFeature()
fet.initAttributes(1)
fet.setGeometry(QgsGeometry.fromPoint(self.reprojectgeographic.transform(point)))
fet.setAttribute(0, elevation)
self.provider.addFeatures( [ fet ] )
# update layer's extent when new features have been added
# because change of extent in provider is not propagated to the layer
self.layer.updateExtents()
self.canvas.refresh()
# check project settings before obtaining elevations
# return an error string
def check_settings (self) :
p = QgsProject.instance()
error = ''
proj4string = self.iface.mapCanvas().mapRenderer().destinationCrs().toProj4()
return error
if __name__ == "__main__":
pass
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding M2M table for field maintainers on 'Herd'
db.create_table('djeuscan_herd_maintainers', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('herd', models.ForeignKey(orm['djeuscan.herd'], null=False)),
('maintainer', models.ForeignKey(orm['djeuscan.maintainer'], null=False))
))
db.create_unique('djeuscan_herd_maintainers', ['herd_id', 'maintainer_id'])
def backwards(self, orm):
# Removing M2M table for field maintainers on 'Herd'
db.delete_table('djeuscan_herd_maintainers')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'djeuscan.categoryassociation': {
'Meta': {'unique_together': "(['user', 'category'],)", 'object_name': 'CategoryAssociation'},
'category': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'djeuscan.categorylog': {
'Meta': {'object_name': 'CategoryLog', '_ormbases': ['djeuscan.Log']},
'category': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'log_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['djeuscan.Log']", 'unique': 'True', 'primary_key': 'True'})
},
'djeuscan.euscanresult': {
'Meta': {'object_name': 'EuscanResult'},
'datetime': ('django.db.models.fields.DateTimeField', [], {}),
'ebuild': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Package']"}),
'result': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'scan_time': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
'djeuscan.herd': {
'Meta': {'object_name': 'Herd'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'herd': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintainers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['djeuscan.Maintainer']", 'symmetrical': 'False'})
},
'djeuscan.herdassociation': {
'Meta': {'unique_together': "(['user', 'herd'],)", 'object_name': 'HerdAssociation'},
'herd': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Herd']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'djeuscan.herdlog': {
'Meta': {'object_name': 'HerdLog', '_ormbases': ['djeuscan.Log']},
'herd': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Herd']"}),
'log_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['djeuscan.Log']", 'unique': 'True', 'primary_key': 'True'})
},
'djeuscan.log': {
'Meta': {'object_name': 'Log'},
'datetime': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'n_packages_gentoo': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'n_packages_outdated': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'n_packages_overlay': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'n_versions_gentoo': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'n_versions_overlay': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'n_versions_upstream': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'djeuscan.maintainer': {
'Meta': {'object_name': 'Maintainer'},
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'djeuscan.maintainerassociation': {
'Meta': {'unique_together': "(['user', 'maintainer'],)", 'object_name': 'MaintainerAssociation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintainer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Maintainer']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'djeuscan.maintainerlog': {
'Meta': {'object_name': 'MaintainerLog', '_ormbases': ['djeuscan.Log']},
'log_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['djeuscan.Log']", 'unique': 'True', 'primary_key': 'True'}),
'maintainer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Maintainer']"})
},
'djeuscan.overlayassociation': {
'Meta': {'unique_together': "(['user', 'overlay'],)", 'object_name': 'OverlayAssociation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'overlay': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'djeuscan.package': {
'Meta': {'unique_together': "(['category', 'name'],)", 'object_name': 'Package'},
'category': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'herds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['djeuscan.Herd']", 'symmetrical': 'False', 'blank': 'True'}),
'homepage': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_version_gentoo': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_version_gentoo'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['djeuscan.Version']"}),
'last_version_overlay': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_version_overlay'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['djeuscan.Version']"}),
'last_version_upstream': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_version_upstream'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['djeuscan.Version']"}),
'maintainers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['djeuscan.Maintainer']", 'symmetrical': 'False', 'blank': 'True'}),
'n_overlay': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'n_packaged': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'n_versions': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'djeuscan.packageassociation': {
'Meta': {'unique_together': "(['user', 'package'],)", 'object_name': 'PackageAssociation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Package']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'djeuscan.problemreport': {
'Meta': {'object_name': 'ProblemReport'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Package']"}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Version']", 'null': 'True', 'blank': 'True'})
},
'djeuscan.refreshpackagequery': {
'Meta': {'object_name': 'RefreshPackageQuery'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Package']"}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'djeuscan.version': {
'Meta': {'unique_together': "(['package', 'slot', 'revision', 'version', 'overlay'],)", 'object_name': 'Version'},
'alive': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'confidence': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'ebuild_path': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'handler': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'metadata_path': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'overlay': ('django.db.models.fields.CharField', [], {'default': "'gentoo'", 'max_length': '128', 'db_index': 'True', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Package']"}),
'packaged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'revision': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slot': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
'urls': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'vtype': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'})
},
'djeuscan.versionlog': {
'Meta': {'object_name': 'VersionLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'overlay': ('django.db.models.fields.CharField', [], {'default': "'gentoo'", 'max_length': '128', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['djeuscan.Package']"}),
'packaged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'revision': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slot': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'vtype': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'})
},
'djeuscan.worldlog': {
'Meta': {'object_name': 'WorldLog', '_ormbases': ['djeuscan.Log']},
'log_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['djeuscan.Log']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['djeuscan']
|
from django.contrib import admin
from django.db.models import Count
from models import EbuildModel, PackageModel, LicenseModel, CategoryModel, \
UseFlagModel, RepositoryModel, HomepageModel, MaintainerModel, \
Keyword, ArchesModel, UseFlagDescriptionModel, HerdsModel, \
VirtualPackageModel, RepositoryFeedModel, \
RepositorySourceModel, LicenseGroupModel, PortageNewsModel
class KeywordAdmin(admin.TabularInline):
model = Keyword
class ArchesAdmin(admin.ModelAdmin):
list_display = ('name',)
search_fields = ('name',)
class EbuildAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'is_hard_masked', )
list_filter = ('created_datetime','updated_datetime', 'is_hard_masked', 'licenses')
filter_horizontal = ('licenses', 'use_flags', 'homepages')
date_hierarchy = 'updated_datetime'
list_select_related = True
inlines = (KeywordAdmin,)
class VirtualPackageAdmin(admin.ModelAdmin):
list_display = ('__unicode__',)
search_fields = ('name','category__category')
list_select_related = True
class PackageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'ebuilds_count')
list_filter = ('created_datetime', 'updated_datetime', 'herds')
list_select_related = True
class HerdsAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'description', 'packages_count',
'ebuilds_count', 'maintainers_count',)
# 'repositories_count')
search_fields = ('name', 'email')
class MaintainerAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'is_dev', 'packages_count',
'ebuilds_count', 'herds_count')
list_filter = ('is_dev',)
search_fields = ('name', 'email')
class UseFlagAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'ebuilds_count')
search_fields = ('name', 'description')
class UseFlagDescriptionAdmin(admin.ModelAdmin):
list_display = ('use_flag', 'package', 'description')
list_select_related = True
class HomepageAdmin(admin.ModelAdmin):
list_display = ('url',)
search_fields = ('url',)
class LicenseAdmin(admin.ModelAdmin):
list_display = ('name', 'ebuilds_count')
search_fields = ('name',)
class RepositoryAdmin(admin.ModelAdmin):
list_display = ('name', 'updated_datetime', 'official', 'homepage',
'quality', 'packages_count', 'ebuilds_count')
search_fields = ('name', 'description', 'owner_name', 'owner_email')
list_filter = ('created_datetime', 'updated_datetime', 'official', 'quality')
date_hierarchy = 'updated_datetime'
class RepositoryFeedAdmin(admin.ModelAdmin):
list_display = ('repository', 'feed')
search_fields = ('repository__name', 'feed')
list_filter = ('repository', )
list_select_related = True
class RepositorySourceAdmin(admin.ModelAdmin):
list_display = ('repository', 'repo_type', 'url', 'subpath')
search_fields = ('repository__name', 'url')
list_filter = ('repo_type', )
list_select_related = True
class PortageNewsAdmin(admin.ModelAdmin):
list_display = ('name', 'lang', 'date')
list_filter = ('lang',)
search_fields = ('name', 'title', 'message')
date_hierarchy = 'date'
class CategoryAdmin(admin.ModelAdmin):
list_display = ('category', 'virtual_packages_count', 'packages_count',
'ebuilds_count', 'repositories_count')
search_fields = ('category', 'description')
admin.site.register(EbuildModel, EbuildAdmin)
admin.site.register(VirtualPackageModel, VirtualPackageAdmin)
admin.site.register(PackageModel, PackageAdmin)
admin.site.register(LicenseModel, LicenseAdmin)
admin.site.register(LicenseGroupModel)
admin.site.register(CategoryModel, CategoryAdmin)
admin.site.register(UseFlagModel, UseFlagAdmin)
admin.site.register(UseFlagDescriptionModel, UseFlagDescriptionAdmin)
admin.site.register(RepositoryModel, RepositoryAdmin)
admin.site.register(RepositoryFeedModel, RepositoryFeedAdmin)
admin.site.register(RepositorySourceModel, RepositorySourceAdmin)
admin.site.register(HomepageModel, HomepageAdmin)
admin.site.register(HerdsModel, HerdsAdmin)
admin.site.register(MaintainerModel, MaintainerAdmin)
admin.site.register(ArchesModel, ArchesAdmin)
admin.site.register(PortageNewsModel, PortageNewsAdmin)
|
GRID_SIZE = (160, 120)
GRID_SQUARE_SIZE = (4, 4)
ant_image_filename = "ant.png"
ITERATIONS = 10
import pygame
from pygame.locals import *
class AntGrid(object):
def __init__(self, width, height):
self.width = width
self.height = height
self.clear()
def clear(self):
self.rows = []
for col_no in xrange(self.height):
new_row = []
self.rows.append(new_row)
for row_no in xrange(self.width):
new_row.append(False)
def swap(self, x, y):
self.rows[y][x] = not self.rows[y][x]
def get(self, x, y):
return self.rows[y][x]
def render(self, surface, colors, square_size):
w, h = square_size
surface.fill(colors[0])
for y, row in enumerate(self.rows):
rect_y = y * h
for x, state in enumerate(row):
if state:
surface.fill(colors[1], (x * w, rect_y, w, h))
class Ant(object):
directions = ( (0,-1), (+1,0), (0,+1), (-1,0) )
def __init__(self, grid, x, y, image, direction=1):
self.grid = grid
self.x = x
self.y = y
self.image = image
self.direction = direction
def move(self):
self.grid.swap(self.x, self.y)
self.x = ( self.x + Ant.directions[self.direction][0] ) % self.grid.width
self.y = ( self.y + Ant.directions[self.direction][1] ) % self.grid.height
if self.grid.get(self.x, self.y):
self.direction = (self.direction-1) % 4
else:
self.direction = (self.direction+1) % 4
def render(self, surface, grid_size):
grid_w, grid_h = grid_size
ant_w, ant_h = self.image.get_size()
render_x = self.x * grid_w - ant_w / 2
render_y = self.y * grid_h - ant_h / 2
surface.blit(self.image, (render_x, render_y))
def run():
pygame.init()
w = GRID_SIZE[0] * GRID_SQUARE_SIZE[0]
h = GRID_SIZE[1] * GRID_SQUARE_SIZE[1]
screen = pygame.display.set_mode((w, h), 0, 32)
ant_image = pygame.image.load(ant_image_filename).convert_alpha()
default_font = pygame.font.get_default_font()
font = pygame.font.SysFont(default_font, 22)
ants = []
grid = AntGrid(*GRID_SIZE)
running = False
total_iterations = 0
while True:
for event in pygame.event.get():
if event.type == QUIT:
return
if event.type == MOUSEBUTTONDOWN:
x, y = event.pos
x /= GRID_SQUARE_SIZE[0]
y /= GRID_SQUARE_SIZE[1]
ant = Ant(grid, int(x), int(y), ant_image)
ants.append(ant)
if event.type == KEYDOWN:
if event.key == K_SPACE:
running = not running
if event.key == K_c:
grid.clear()
total_iterations = 0
del ants[:]
grid.render(screen, ((255, 255, 255), (0, 128, 0)), GRID_SQUARE_SIZE)
if running:
for iteration_no in xrange(ITERATIONS):
for ant in ants:
ant.move()
total_iterations += ITERATIONS
txt = "%i iterations"%total_iterations
txt_surface = font.render(txt, True, (0, 0, 0))
screen.blit(txt_surface, (0, 0))
for ant in ants:
ant.render(screen, GRID_SQUARE_SIZE)
pygame.display.update()
if __name__ == "__main__":
run()
|
"""
***************************************************************************
ProcessingToolbox.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
__revision__ = '$Format:%H$'
import os
from PyQt4 import uic
from PyQt4.QtCore import Qt, QSettings, QCoreApplication
from PyQt4.QtGui import QMenu, QAction, QTreeWidgetItem
from qgis.utils import iface
from processing.modeler.ModelerUtils import ModelerUtils
from processing.core.Processing import Processing
from processing.core.ProcessingLog import ProcessingLog
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.gui.MessageDialog import MessageDialog
from processing.gui import AlgorithmClassification
from processing.gui.AlgorithmDialog import AlgorithmDialog
from processing.gui.BatchAlgorithmDialog import BatchAlgorithmDialog
from processing.gui.EditRenderingStylesDialog import EditRenderingStylesDialog
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'ProcessingToolbox.ui'))
class ProcessingToolbox(BASE, WIDGET):
USE_CATEGORIES = '/Processing/UseSimplifiedInterface'
updateAlgList = True
def __init__(self):
super(ProcessingToolbox, self).__init__(None)
self.setupUi(self)
self.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea)
self.modeComboBox.clear()
self.modeComboBox.addItems([self.tr('Simplified interface'),
self.tr('Advanced interface')])
settings = QSettings()
if not settings.contains(self.USE_CATEGORIES):
settings.setValue(self.USE_CATEGORIES, True)
useCategories = settings.value(self.USE_CATEGORIES, type=bool)
if useCategories:
self.modeComboBox.setCurrentIndex(0)
else:
self.modeComboBox.setCurrentIndex(1)
self.modeComboBox.currentIndexChanged.connect(self.modeHasChanged)
self.searchBox.textChanged.connect(self.textChanged)
self.algorithmTree.customContextMenuRequested.connect(
self.showPopupMenu)
self.algorithmTree.doubleClicked.connect(self.executeAlgorithm)
if hasattr(self.searchBox, 'setPlaceholderText'):
self.searchBox.setPlaceholderText(self.tr('Search...'))
self.fillTree()
def textChanged(self):
text = self.searchBox.text().strip(' ').lower()
self._filterItem(self.algorithmTree.invisibleRootItem(), text)
if text:
self.algorithmTree.expandAll()
else:
self.algorithmTree.collapseAll()
self.algorithmTree.invisibleRootItem().child(0).setExpanded(True)
def _filterItem(self, item, text):
if (item.childCount() > 0):
show = False
for i in xrange(item.childCount()):
child = item.child(i)
showChild = self._filterItem(child, text)
show = showChild or show
item.setHidden(not show)
return show
elif isinstance(item, (TreeAlgorithmItem, TreeActionItem)):
hide = bool(text) and (text not in item.text(0).lower())
item.setHidden(hide)
return not hide
else:
item.setHidden(True)
return False
def modeHasChanged(self):
idx = self.modeComboBox.currentIndex()
settings = QSettings()
if idx == 0:
# Simplified
settings.setValue(self.USE_CATEGORIES, True)
else:
settings.setValue(self.USE_CATEGORIES, False)
self.fillTree()
def algsListHasChanged(self):
if self.updateAlgList:
self.fillTree()
def updateProvider(self, providerName, updateAlgsList=True):
if updateAlgsList:
self.updateAlgList = False
Processing.updateAlgsList()
self.updateAlgList = True
for i in xrange(self.algorithmTree.invisibleRootItem().childCount()):
child = self.algorithmTree.invisibleRootItem().child(i)
if isinstance(child, TreeProviderItem):
if child.providerName == providerName:
child.refresh()
# sort categories and items in categories
child.sortChildren(0, Qt.AscendingOrder)
for i in xrange(child.childCount()):
child.child(i).sortChildren(0, Qt.AscendingOrder)
break
self.addRecentAlgorithms(True)
def showPopupMenu(self, point):
item = self.algorithmTree.itemAt(point)
if isinstance(item, TreeAlgorithmItem):
alg = item.alg
popupmenu = QMenu()
executeAction = QAction(self.tr('Execute'), self.algorithmTree)
executeAction.triggered.connect(self.executeAlgorithm)
popupmenu.addAction(executeAction)
if alg.canRunInBatchMode and not alg.allowOnlyOpenedLayers:
executeBatchAction = QAction(
self.tr('Execute as batch process'),
self.algorithmTree)
executeBatchAction.triggered.connect(
self.executeAlgorithmAsBatchProcess)
popupmenu.addAction(executeBatchAction)
popupmenu.addSeparator()
editRenderingStylesAction = QAction(
self.tr('Edit rendering styles for outputs'),
self.algorithmTree)
editRenderingStylesAction.triggered.connect(
self.editRenderingStyles)
popupmenu.addAction(editRenderingStylesAction)
actions = Processing.contextMenuActions
if len(actions) > 0:
popupmenu.addSeparator()
for action in actions:
action.setData(alg, self)
if action.isEnabled():
contextMenuAction = QAction(action.name,
self.algorithmTree)
contextMenuAction.triggered.connect(action.execute)
popupmenu.addAction(contextMenuAction)
popupmenu.exec_(self.algorithmTree.mapToGlobal(point))
def editRenderingStyles(self):
item = self.algorithmTree.currentItem()
if isinstance(item, TreeAlgorithmItem):
alg = Processing.getAlgorithm(item.alg.commandLineName())
dlg = EditRenderingStylesDialog(alg)
dlg.exec_()
def executeAlgorithmAsBatchProcess(self):
item = self.algorithmTree.currentItem()
if isinstance(item, TreeAlgorithmItem):
alg = Processing.getAlgorithm(item.alg.commandLineName())
alg = alg.getCopy()
dlg = BatchAlgorithmDialog(alg)
dlg.show()
dlg.exec_()
def executeAlgorithm(self):
item = self.algorithmTree.currentItem()
if isinstance(item, TreeAlgorithmItem):
alg = Processing.getAlgorithm(item.alg.commandLineName())
message = alg.checkBeforeOpeningParametersDialog()
if message:
dlg = MessageDialog()
dlg.setTitle(self.tr('Missing dependency'))
dlg.setMessage(
self.tr('<h3>Missing dependency. This algorithm cannot '
'be run :-( </h3>\n%s') % message)
dlg.exec_()
return
alg = alg.getCopy()
dlg = alg.getCustomParametersDialog()
if not dlg:
dlg = AlgorithmDialog(alg)
canvas = iface.mapCanvas()
prevMapTool = canvas.mapTool()
dlg.show()
dlg.exec_()
if canvas.mapTool() != prevMapTool:
try:
canvas.mapTool().reset()
except:
pass
canvas.setMapTool(prevMapTool)
if dlg.executed:
showRecent = ProcessingConfig.getSetting(
ProcessingConfig.SHOW_RECENT_ALGORITHMS)
if showRecent:
self.addRecentAlgorithms(True)
if isinstance(item, TreeActionItem):
action = item.action
action.setData(self)
action.execute()
def fillTree(self):
settings = QSettings()
useCategories = settings.value(self.USE_CATEGORIES, type=bool)
if useCategories:
self.fillTreeUsingCategories()
else:
self.fillTreeUsingProviders()
self.algorithmTree.sortItems(0, Qt.AscendingOrder)
self.addRecentAlgorithms(False)
def addRecentAlgorithms(self, updating):
showRecent = ProcessingConfig.getSetting(
ProcessingConfig.SHOW_RECENT_ALGORITHMS)
if showRecent:
recent = ProcessingLog.getRecentAlgorithms()
if len(recent) != 0:
found = False
if updating:
recentItem = self.algorithmTree.topLevelItem(0)
treeWidget = recentItem.treeWidget()
treeWidget.takeTopLevelItem(
treeWidget.indexOfTopLevelItem(recentItem))
recentItem = QTreeWidgetItem()
recentItem.setText(0, self.tr('Recently used algorithms'))
for algname in recent:
alg = Processing.getAlgorithm(algname)
if alg is not None:
algItem = TreeAlgorithmItem(alg)
recentItem.addChild(algItem)
found = True
if found:
self.algorithmTree.insertTopLevelItem(0, recentItem)
recentItem.setExpanded(True)
self.algorithmTree.setWordWrap(True)
def fillTreeUsingCategories(self):
providersToExclude = ['model', 'script']
self.algorithmTree.clear()
text = unicode(self.searchBox.text())
groups = {}
for providerName in Processing.algs.keys():
provider = Processing.algs[providerName]
name = 'ACTIVATE_' + providerName.upper().replace(' ', '_')
if not ProcessingConfig.getSetting(name):
continue
if providerName in providersToExclude or \
len(ModelerUtils.providers[providerName].actions) != 0:
continue
algs = provider.values()
# add algorithms
for alg in algs:
if not alg.showInToolbox:
continue
altgroup, altsubgroup = AlgorithmClassification.getClassification(alg)
if altgroup is None:
continue
algName = AlgorithmClassification.getDisplayName(alg)
if text == '' or text.lower() in algName.lower():
if altgroup not in groups:
groups[altgroup] = {}
group = groups[altgroup]
if altsubgroup not in group:
groups[altgroup][altsubgroup] = []
subgroup = groups[altgroup][altsubgroup]
subgroup.append(alg)
if len(groups) > 0:
mainItem = QTreeWidgetItem()
mainItem.setText(0, self.tr('Geoalgorithms'))
mainItem.setIcon(0, GeoAlgorithm.getDefaultIcon())
mainItem.setToolTip(0, mainItem.text(0))
for (groupname, group) in groups.items():
groupItem = QTreeWidgetItem()
groupItem.setText(0, groupname)
groupItem.setIcon(0, GeoAlgorithm.getDefaultIcon())
groupItem.setToolTip(0, groupItem.text(0))
mainItem.addChild(groupItem)
for (subgroupname, subgroup) in group.items():
subgroupItem = QTreeWidgetItem()
subgroupItem.setText(0, subgroupname)
subgroupItem.setIcon(0, GeoAlgorithm.getDefaultIcon())
subgroupItem.setToolTip(0, subgroupItem.text(0))
groupItem.addChild(subgroupItem)
for alg in subgroup:
algItem = TreeAlgorithmItem(alg)
subgroupItem.addChild(algItem)
self.algorithmTree.addTopLevelItem(mainItem)
for providerName in Processing.algs.keys():
if providerName not in providersToExclude:
continue
name = 'ACTIVATE_' + providerName.upper().replace(' ', '_')
if not ProcessingConfig.getSetting(name):
continue
providerItem = TreeProviderItem(providerName)
self.algorithmTree.addTopLevelItem(providerItem)
def fillTreeUsingProviders(self):
self.algorithmTree.clear()
for providerName in Processing.algs.keys():
name = 'ACTIVATE_' + providerName.upper().replace(' ', '_')
if not ProcessingConfig.getSetting(name):
continue
providerItem = TreeProviderItem(providerName)
self.algorithmTree.addTopLevelItem(providerItem)
providerItem.setHidden(providerItem.childCount() == 0)
class TreeAlgorithmItem(QTreeWidgetItem):
def __init__(self, alg):
settings = QSettings()
useCategories = settings.value(ProcessingToolbox.USE_CATEGORIES,
type=bool)
QTreeWidgetItem.__init__(self)
self.alg = alg
icon = alg.getIcon()
if useCategories:
icon = GeoAlgorithm.getDefaultIcon()
name = AlgorithmClassification.getDisplayName(alg)
self.setIcon(0, icon)
self.setToolTip(0, name)
self.setText(0, name)
class TreeActionItem(QTreeWidgetItem):
def __init__(self, action):
QTreeWidgetItem.__init__(self)
self.action = action
self.setText(0, action.name)
self.setIcon(0, action.getIcon())
class TreeProviderItem(QTreeWidgetItem):
def __init__(self, providerName):
QTreeWidgetItem.__init__(self)
self.providerName = providerName
self.provider = Processing.getProviderFromName(providerName)
self.setIcon(0, self.provider.getIcon())
self.populate()
def refresh(self):
self.takeChildren()
self.populate()
def populate(self):
groups = {}
count = 0
provider = Processing.algs[self.providerName]
algs = provider.values()
# Add algorithms
for alg in algs:
if not alg.showInToolbox:
continue
if alg.group in groups:
groupItem = groups[alg.group]
else:
groupItem = QTreeWidgetItem()
name = alg.i18n_group or alg.group
groupItem.setText(0, name)
groupItem.setToolTip(0, name)
groups[alg.group] = groupItem
algItem = TreeAlgorithmItem(alg)
groupItem.addChild(algItem)
count += 1
actions = Processing.actions[self.providerName]
for action in actions:
if action.group in groups:
groupItem = groups[action.group]
else:
groupItem = QTreeWidgetItem()
groupItem.setText(0, action.group)
groups[action.group] = groupItem
algItem = TreeActionItem(action)
groupItem.addChild(algItem)
self.setText(0, self.provider.getDescription()
+ QCoreApplication.translate("TreeProviderItem", " [{0} geoalgorithms]").format(count))
self.setToolTip(0, self.text(0))
for groupItem in groups.values():
self.addChild(groupItem)
|
"""
test_broot
----------------------------------
Tests for `broot` module.
"""
import unittest
from broot import broot
class TestBroot(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
"""
Class for IQ Data
GNU Radio simple binary format reader
Xaratustrah Aug-2018
"""
import numpy as np
import time
import os
from iqtools.iqbase import IQBase
class GRData(IQBase):
def __init__(self, filename, fs, center=0, date_time=""):
super().__init__(filename)
# Additional fields in this subclass
self.date_time = date_time
self.center = center
# each complex64 sample is 8 bytes on disk
self.nsamples_total = os.path.getsize(filename) / 8
def read(self, nframes=10, lframes=1024, sframes=0):
self.read_samples(nframes * lframes, offset=sframes * lframes)
def read_samples(self, nsamples, offset=0):
# TODO:
pass
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Software',
fields=[
('ID', models.CharField(max_length=50, serialize=False, primary_key=True)),
('Name', models.CharField(max_length=255, null=True, blank=True)),
('Category', models.CharField(max_length=255, null=True, blank=True)),
('Language', models.CharField(max_length=50, null=True, blank=True)),
('License', models.CharField(max_length=50, null=True, blank=True)),
('Platform', models.CharField(max_length=50, null=True, blank=True)),
('Size', models.CharField(max_length=255, null=True, blank=True)),
('Initial_Date', models.CharField(max_length=50, null=True, blank=True)),
('Final_Date', models.CharField(max_length=50, null=True, blank=True)),
('Value', models.CharField(max_length=50, null=True, blank=True)),
],
),
]
|
from pyqtgraph import functions as fn
from lib.flowchart.nodes.generalNode import NodeWithCtrlWidget, NodeCtrlWidget
class pipeNode(NodeWithCtrlWidget):
"""Transmits the data further without processing"""
nodeName = "Pipe"
uiTemplate = [
{'title': 'Close Pipe', 'name': 'closed', 'type': 'bool', 'value': False, 'tip': 'If Checked -- close pipe, and do not transmit data further'}]
def __init__(self, name, parent=None):
terms = {'In': {'io': 'in'}, 'Out': {'io': 'out'}}
super(pipeNode, self).__init__(name, parent=parent, terminals=terms, color=(95, 66, 94, 100))
self.opened_color = (95, 66, 94, 100)
self.closed_color = (255, 0, 0, 100)
def _createCtrlWidget(self, **kwargs):
return pipeNodeCtrlWidget(**kwargs)
def process(self, In):
kwargs = self.CW().prepareInputArguments()
if kwargs['closed']:
self.graphicsItem().setBrush(fn.mkBrush(self.closed_color))
return {'Out': None}
else:
self.graphicsItem().setBrush(fn.mkBrush(self.opened_color))
return {'Out': In}
class pipeNodeCtrlWidget(NodeCtrlWidget):
def __init__(self, **kwargs):
super(pipeNodeCtrlWidget, self).__init__(update_on_statechange=True, **kwargs)
def prepareInputArguments(self):
kwargs = dict()
kwargs['closed'] = self.p['closed']
return kwargs
|
from __future__ import absolute_import
import ctypes
import getpass
import os
import sys
from bindings import tracing
from edenscm.mercurial import blackbox, encoding, json, progress, pycompat, util
from edenscm.mercurial.node import hex
from .. import pywatchman
from ..pywatchman import compat
def createclientforrepo(repo):
"""Creates a Watchman client and associates it with the repo if it does
not already have one. Note that creating the client may raise an exception.
To get the client associated with the repo, use getclientforrepo()."""
if not util.safehasattr(repo, "_watchmanclient"):
repo._watchmanclient = client(repo)
def getclientforrepo(repo):
"""Returns the Watchman client associated with the repo or None.
createclientforrepo() must have be called previously to create the
client."""
if util.safehasattr(repo, "_watchmanclient"):
return repo._watchmanclient
else:
return None
class Unavailable(Exception):
def __init__(self, msg, warn=True, invalidate=False):
self.msg = msg
self.warn = warn
if self.msg == "timed out waiting for response":
self.warn = False
self.invalidate = invalidate
def __str__(self):
if self.warn:
return "warning: Watchman unavailable: %s" % self.msg
else:
return "Watchman unavailable: %s" % self.msg
class WatchmanNoRoot(Unavailable):
def __init__(self, root, msg):
self.root = root
super(WatchmanNoRoot, self).__init__(msg)
class client(object):
def __init__(self, repo, timeout=1.0):
err = None
if not self._user:
err = "couldn't get user"
warn = True
if self._user in repo.ui.configlist("fsmonitor", "blacklistusers"):
err = "user %s in blacklist" % self._user
warn = False
if err:
raise Unavailable(err, warn)
self._sockpath = None
# When spawned indirectly by watchman, or the watchman/eden integration
# tests, the appropriate sockpath is passed down to us via the environment
# and must take precedence over other configuration
sockpath = encoding.environ.get("WATCHMAN_SOCK", None)
if sockpath is None:
sockpath = repo.ui.config("fsmonitor", "sockpath")
if sockpath and self._user:
sockpath = sockpath.replace("%i", self._user)
repo.ui.debug("watchman sockpath is set as %s\n" % sockpath)
if sockpath:
if os.path.exists(sockpath):
self._sockpath = sockpath
self._transport = None
if repo.ui.configbool("fsmonitor", "tcp", False):
self._transport = "tcp"
self._tcp_host = repo.ui.config("fsmonitor", "tcp-host", "::1")
self._tcp_port = repo.ui.configint("fsmonitor", "tcp-port", 12300)
self._timeout = timeout
self._watchmanclient = None
self._root = repo.root
self._resolved_root = getcanonicalpath(self._root)
self._ui = repo.ui
self._firsttime = True
def settimeout(self, timeout):
self._timeout = timeout
if self._watchmanclient is not None:
self._watchmanclient.setTimeout(timeout)
def getcurrentclock(self):
result = self.command("clock")
if not util.safehasattr(result, "clock"):
raise Unavailable("clock result is missing clock value", invalidate=True)
return result.clock
def clearconnection(self):
self._watchmanclient = None
def available(self):
return self._watchmanclient is not None or self._firsttime
@util.propertycache
def _user(self):
try:
return getpass.getuser()
except KeyError:
# couldn't figure out our user
return None
def _command(self, *args):
with util.traced("watchman-command", args=json.dumps(args[1:])) as span:
return self._retrycommand(span, 0, *args)
def _retrycommand(self, span, retry, *args):
if retry > 0:
span.record(retry=retry)
watchmanargs = (args[0], self._resolved_root) + args[1:]
error = None
needretry = False
starttime = util.timer()
try:
if self._watchmanclient is None:
if compat.PYTHON3:
encoding = "bser"
else:
encoding = "bser-v1"
self._firsttime = False
self._watchmanclient = pywatchman.client(
sockpath=self._sockpath,
transport=self._transport,
tcpAddress=(self._tcp_host, self._tcp_port),
timeout=self._timeout,
recvEncoding=encoding,
sendEncoding=encoding,
useImmutableBser=True,
)
return self._watchmanclient.query(*watchmanargs)
except pywatchman.CommandError as ex:
error = ex.msg
span.record(error=ex.msg)
if "unable to resolve root" in ex.msg:
raise WatchmanNoRoot(self._resolved_root, ex.msg)
raise Unavailable(ex.msg)
except pywatchman.SocketConnectError as ex:
error = str(ex)
# If fsmonitor.sockpath was specified in the configuration, we will
# have skipped running `watchman get-sockname` which has the
# consequence of not starting the watchman server up if it happens
# to have been stopped.
# Rather than just throwing up our hands in that situation, let's
# clear the pre-configured sockpath so that the client will probe
# and start it up.
if not self._ui.config("fsmonitor", "sockpath") or self._sockpath is None:
span.record(error=error)
# Either sockpath wasn't configured, or we already tried clearing
# it out, so let's propagate this error.
raise Unavailable(str(ex))
# Recurse and retry the command, and hopefully it will
# start the server this time.
self._sockpath = None
self._watchmanclient = None
needretry = True
except pywatchman.WatchmanError as ex:
error = str(ex)
span.record(error=error)
raise Unavailable(str(ex))
finally:
event = {
"watchman": {
"args": args,
"duration_ms": int((util.timer() - starttime) * 1000),
}
}
if error is not None:
event["watchman"]["result"] = {"error": error}
blackbox.log(event)
if needretry:
return self._retrycommand(span, retry + 1, *args)
@util.timefunction("watchmanquery", 0, "_ui")
def command(self, *args, **kwargs):
ignoreerrors = kwargs.get("ignoreerrors", False)
with progress.spinner(self._ui, "querying watchman"):
try:
try:
return self._command(*args)
except pywatchman.UseAfterFork:
# Ideally we wouldn't let this happen, but if it does happen,
# record it in the log and retry the command.
blackbox.log(
{
"debug": {
"value": "fork detected. re-connect to watchman socket"
}
}
)
self._watchmanclient = None
return self._command(*args)
except WatchmanNoRoot:
# this 'watch' command can also raise a WatchmanNoRoot if
# watchman refuses to accept this root
self._command("watch")
return self._command(*args)
except Unavailable:
# this is in an outer scope to catch Unavailable form any of the
# above _command calls
if not ignoreerrors:
self._watchmanclient = None
raise
def calcdistance(repo, oldnode, newnode):
anc = repo.changelog.ancestor(oldnode, newnode)
ancrev = repo[anc].rev()
distance = abs(repo[oldnode].rev() - ancrev) + abs(repo[newnode].rev() - ancrev)
return distance
class state_update(object):
"""This context manager is responsible for dispatching the state-enter
and state-leave signals to the watchman service. The enter and leave
methods can be invoked manually (for scenarios where context manager
semantics are not possible). If parameters oldnode and newnode are None,
they will be populated based on current working copy in enter and
leave, respectively. Similarly, if the distance is none, it will be
calculated based on the oldnode and newnode in the leave method."""
def __init__(
self,
repo,
name,
oldnode=None,
newnode=None,
distance=None,
partial=False,
metadata=None,
):
self.repo = repo
self.name = name
self.oldnode = oldnode
self.newnode = newnode
self.distance = distance
self.partial = partial
self._lock = None
self.need_leave = False
self.metadata = metadata or {}
def __enter__(self):
self.enter()
def enter(self):
# Make sure we have a wlock prior to sending notifications to watchman.
# We don't want to race with other actors. In the update case,
# merge.update is going to take the wlock almost immediately. We are
# effectively extending the lock around several short sanity checks.
if self.oldnode is None:
self.oldnode = self.repo["."].node()
if self.repo.currentwlock() is None:
if util.safehasattr(self.repo, "wlocknostateupdate"):
self._lock = self.repo.wlocknostateupdate()
else:
self._lock = self.repo.wlock()
self.need_leave = self._state("state-enter", hex(self.oldnode))
return self
def __exit__(self, type_, value, tb):
abort = True if type_ else False
self.exit(abort=abort)
def exit(self, abort=False):
try:
if self.need_leave:
status = "failed" if abort else "ok"
if self.newnode is None:
self.newnode = self.repo["."].node()
if self.distance is None:
try:
self.distance = calcdistance(
self.repo, self.oldnode, self.newnode
)
except Exception:
# this happens in complex cases where oldnode
# or newnode might become unavailable.
pass
self._state("state-leave", hex(self.newnode), status=status)
finally:
self.need_leave = False
if self._lock:
self._lock.release()
def _state(self, cmd, commithash, status="ok"):
client = getclientforrepo(self.repo)
if not client:
return False
try:
metadata = {
# the target revision
"rev": commithash,
# approximate number of commits between current and target
"distance": self.distance if self.distance else 0,
# success/failure (only really meaningful for state-leave)
"status": status,
# whether the working copy parent is changing
"partial": self.partial,
}
metadata.update(self.metadata)
client.command(
# ignoreerrors=True prevents the client from invalidating it's
# watchman client in the event of an error. It will still throw
# an exception though.
cmd,
{"name": self.name, "metadata": metadata},
ignoreerrors=True,
)
return True
except Exception as ex:
# Swallow any errors; fire and forget
exctype = sys.exc_info()[0]
exctypename = "None" if exctype is None else exctype.__name__
self.repo.ui.log(
"hgerrors",
"watchman '%s' event has failed: %s",
cmd,
str(ex),
exception_type=exctypename,
)
return False
if pycompat.iswindows:
from ctypes.wintypes import HANDLE, DWORD
def openfilewin(path):
createfile = ctypes.windll.kernel32.CreateFileW
cpath = ctypes.create_unicode_buffer(path)
access = 0
mode = 7 # FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE
disposition = 3 # OPEN_EXISTING
flags = 33554432 # FILE_FLAG_BACKUP_SEMANTICS
createfile.restype = HANDLE
h = createfile(
cpath,
DWORD(access),
DWORD(mode),
None,
DWORD(disposition),
DWORD(flags),
HANDLE(0),
)
if h == HANDLE(-1).value:
raise WindowsError("Failed to open file: " + path)
return HANDLE(h)
def getcanonicalpath(name):
gfpnbh = ctypes.windll.kernel32.GetFinalPathNameByHandleW
closehandler = ctypes.windll.kernel32.CloseHandle
h = openfilewin(name)
try:
numwchars = 1024
while True:
buf = ctypes.create_unicode_buffer(numwchars)
result = gfpnbh(h, buf, DWORD(numwchars), DWORD(0))
if result == 0:
raise IOError("unknown error while normalizing path")
# The first four chars are //?/
if result <= numwchars:
path = buf.value[4:].replace("\\", "/")
if compat.PYTHON2:
path = path.encode("utf-8")
return path
# Not big enough; the result is the amount we need
numwchars = result + 1
finally:
closehandler(h)
elif pycompat.isdarwin:
import ctypes.util
F_GETPATH = 50
libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True)
getpathfcntl = libc.fcntl
getpathfcntl.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_char_p]
getpathfcntl.restype = ctypes.c_int
def getcanonicalpath(name):
fd = os.open(name, os.O_RDONLY, 0)
try:
numchars = 1024 # MAXPATHLEN
# The kernel caps this routine to MAXPATHLEN, so there is no
# point in over-allocating or trying again with a larger buffer
buf = ctypes.create_string_buffer(numchars)
ctypes.set_errno(0)
result = getpathfcntl(fd, F_GETPATH, buf)
if result != 0:
raise OSError(ctypes.get_errno())
# buf is a bytes buffer, so normalize it if necessary
ret = buf.value
if isinstance(name, compat.UNICODE):
ret = os.fsdecode(ret)
return ret
finally:
os.close(fd)
else:
def getcanonicalpath(name):
return os.path.normpath(name)
|
"""The application toolbar, and its specialised widgets"""
from __future__ import division, print_function
import os
from gettext import gettext as _
from lib.gibindings import Gtk
from . import widgets
FRAMEWORK_XML = 'toolbar.xml'
MERGEABLE_XML = [
("toolbar1_file", 'toolbar-file.xml', _("File handling")),
("toolbar1_scrap", 'toolbar-scrap.xml', _("Scraps switcher")),
("toolbar1_edit", 'toolbar-edit.xml', _("Undo and Redo")),
("toolbar1_blendmodes", 'toolbar-blendmodes.xml', _("Blend Modes")),
("toolbar1_linemodes", 'toolbar-linemodes.xml', _("Line Modes")),
("toolbar1_view_modes", 'toolbar-view-modes.xml', _("View (Main)")),
("toolbar1_view_manips", 'toolbar-view-manips.xml',
_("View (Alternative/Secondary)")),
("toolbar1_view_resets", 'toolbar-view-resets.xml',
_("View (Resetting)")),
]
class ToolbarManager (object):
"""Manager for toolbars, currently just the main one.
The main toolbar, /toolbar1, contains a menu button and quick
access to the painting tools.
"""
def __init__(self, draw_window):
super(ToolbarManager, self).__init__()
self.draw_window = draw_window
self.app = draw_window.app
self.toolbar1_ui_loaded = {} # {name: merge_id, ...}
self.init_actions()
ui_dir = os.path.dirname(os.path.abspath(__file__))
toolbarpath = os.path.join(ui_dir, FRAMEWORK_XML)
self.app.ui_manager.add_ui_from_file(toolbarpath)
self.toolbar1 = self.app.ui_manager.get_widget('/toolbar1')
self.toolbar1.set_style(Gtk.ToolbarStyle.ICONS)
self.toolbar1.set_icon_size(widgets.get_toolbar_icon_size())
self.toolbar1.set_border_width(0)
self.toolbar1.set_show_arrow(True)
self.toolbar1.connect(
"popup-context-menu",
self.on_toolbar1_popup_context_menu
)
self.toolbar1_popup = self.app.ui_manager\
.get_widget('/toolbar1-settings-menu')
for item in self.toolbar1:
if isinstance(item, Gtk.SeparatorToolItem):
item.set_draw(False)
self.toolbar2 = self.app.ui_manager.get_widget('/toolbar2')
self.toolbar2.set_style(Gtk.ToolbarStyle.ICONS)
self.toolbar2.set_icon_size(widgets.get_toolbar_icon_size())
self.toolbar2.set_border_width(0)
self.toolbar2.set_show_arrow(False)
for toolbar in (self.toolbar1, self.toolbar2):
styles = toolbar.get_style_context()
styles.add_class(Gtk.STYLE_CLASS_PRIMARY_TOOLBAR)
# Merge in UI pieces based on the user's saved preferences
for action in self.settings_actions:
name = action.get_property("name")
active = self.app.preferences["ui.toolbar_items"].get(name, False)
action.set_active(active)
action.toggled()
def init_actions(self):
ag = self.draw_window.action_group
actions = []
self.settings_actions = []
for name, ui_xml, label in MERGEABLE_XML:
action = Gtk.ToggleAction.new(name, label, None, None)
action.connect("toggled", self.on_settings_toggle, ui_xml)
self.settings_actions.append(action)
actions += self.settings_actions
for action in actions:
ag.add_action(action)
def on_toolbar1_popup_context_menu(self, toolbar, x, y, button):
menu = self.toolbar1_popup
def _posfunc(*a):
return x, y, True
time = Gtk.get_current_event_time()
menu.popup(None, None, _posfunc, None, button, time)
def on_settings_toggle(self, toggleaction, ui_xml_file):
name = toggleaction.get_property("name")
merge_id = self.toolbar1_ui_loaded.get(name, None)
if toggleaction.get_active():
self.app.preferences["ui.toolbar_items"][name] = True
if merge_id is not None:
return
ui_dir = os.path.dirname(os.path.abspath(__file__))
ui_xml_path = os.path.join(ui_dir, ui_xml_file)
merge_id = self.app.ui_manager.add_ui_from_file(ui_xml_path)
self.toolbar1_ui_loaded[name] = merge_id
else:
self.app.preferences["ui.toolbar_items"][name] = False
if merge_id is None:
return
self.app.ui_manager.remove_ui(merge_id)
self.toolbar1_ui_loaded.pop(name)
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('releng', '0001_squashed_0005_auto_20180616_0947'),
]
operations = [
migrations.AlterField(
model_name='release',
name='last_modified',
field=models.DateTimeField(editable=False),
),
]
|
import paho.mqtt.client as mqtt
MQTT_SERVER = "iot.eclipse.org"
MQTT_PORT = 1883
MQTT_NAME_TOPIC = "spooplights"
MQTT_HEX_TOPIC = MQTT_NAME_TOPIC + "RGB"
def on_connect(client, userdata, rc):
print("Connected with result code "+str(rc))
client.subscribe(MQTT_NAME_TOPIC)
client.subscribe(MQTT_HEX_TOPIC)
def on_message(client, userdata, msg):
print(msg.topic+" "+msg.payload.decode(encoding='UTF-8'))
def main():
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_SERVER, MQTT_PORT, 60)
client.loop_forever()
if __name__ == '__main__':
main()
|
'''
Python mapping for the InputMethodKit framework.
This module does not contain docstrings for the wrapped code, check Apple's
documentation for details on how to use these functions and classes.
'''
import sys
import objc
import Foundation
from InputMethodKit import _metadata
from InputMethodKit._InputMethodKit import *
sys.modules['InputMethodKit'] = mod = objc.ObjCLazyModule('InputMethodKit',
"com.apple.InputMethodKit",
objc.pathForFramework("/System/Library/Frameworks/InputMethodKit.framework"),
_metadata.__dict__, None, {
'__doc__': __doc__,
'__path__': __path__,
'objc': objc,
}, ( Foundation,))
|
import os
import re
from MenuList import MenuList
from Components.Harddisk import harddiskmanager
from Tools.Directories import SCOPE_ACTIVE_SKIN, resolveFilename, fileExists, pathExists
from enigma import RT_HALIGN_LEFT, eListboxPythonMultiContent, \
eServiceReference, eServiceCenter, gFont, getDesktop
from Tools.LoadPixmap import LoadPixmap
EXTENSIONS = {
"m4a": "music",
"mp2": "music",
"mp3": "music",
"wav": "music",
"ogg": "music",
"wma": "music",
"flac": "music",
"jpg": "picture",
"jpeg": "picture",
"png": "picture",
"bmp": "picture",
"ts": "movie",
"avi": "movie",
"divx": "movie",
"m4v": "movie",
"mpg": "movie",
"mpeg": "movie",
"mkv": "movie",
"mp4": "movie",
"mov": "movie",
"m2ts": "movie",
"3gp": "movie",
"3g2": "movie",
"asf": "movie",
"wmv": "movie",
}
def FileEntryComponent(name, absolute = None, isDir = False):
screenwidth = getDesktop(0).size().width()
if screenwidth and screenwidth == 1920:
res = [(absolute, isDir), (eListboxPythonMultiContent.TYPE_TEXT, 90, 3, 470, 60, 1, RT_HALIGN_LEFT, name)]
else:
res = [(absolute, isDir), (eListboxPythonMultiContent.TYPE_TEXT, 35, 1, 470, 20, 0, RT_HALIGN_LEFT, name)]
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 10, 5, 50, 50, png))
else:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 10, 2, 20, 20, png))
return res
class FileList(MenuList):
def __init__(self, directory, showDirectories = True, showFiles = True, showMountpoints = True, matchingPattern = None, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.additional_extensions = additionalExtensions
self.mountpoints = []
self.current_directory = None
self.current_mountpoint = None
self.useServiceRef = useServiceRef
self.showDirectories = showDirectories
self.showMountpoints = showMountpoints
self.showFiles = showFiles
if isTop:
self.topDirectory = directory
else:
self.topDirectory = "/"
# example: matching .nfi and .ts files: "^.*\.(nfi|ts)"
if matchingPattern:
self.matchingPattern = re.compile(matchingPattern)
else:
self.matchingPattern = None
self.inhibitDirs = inhibitDirs or []
self.inhibitMounts = inhibitMounts or []
self.refreshMountpoints()
self.changeDir(directory)
self.l.setFont(0, gFont("Regular", 18))
self.l.setFont(1, gFont("Regular", 32))
self.l.setItemHeight(23)
self.serviceHandler = eServiceCenter.getInstance()
def refreshMountpoints(self):
self.mountpoints = [os.path.join(p.mountpoint, "") for p in harddiskmanager.getMountedPartitions()]
self.mountpoints.sort(reverse = True)
def getMountpoint(self, file):
file = os.path.join(os.path.realpath(file), "")
for m in self.mountpoints:
if file.startswith(m):
return m
return False
def getMountpointLink(self, file):
if os.path.realpath(file) == file:
return self.getMountpoint(file)
else:
if file[-1] == "/":
file = file[:-1]
mp = self.getMountpoint(file)
last = file
file = os.path.dirname(file)
while last != "/" and mp == self.getMountpoint(file):
last = file
file = os.path.dirname(file)
return os.path.join(last, "")
def getSelection(self):
if self.l.getCurrentSelection() is None:
return None
return self.l.getCurrentSelection()[0]
def getCurrentEvent(self):
l = self.l.getCurrentSelection()
if not l or l[0][1] == True:
return None
else:
return self.serviceHandler.info(l[0][0]).getEvent(l[0][0])
def getFileList(self):
return self.list
def inParentDirs(self, dir, parents):
dir = os.path.realpath(dir)
for p in parents:
if dir.startswith(p):
return True
return False
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os.path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(FileEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
# we should not use the 'eServiceReference(string)' constructor, because it doesn't allow ':' in the directoryname
root = eServiceReference(2, 0, directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = os.listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os.path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if self.showDirectories:
if directory:
if self.showMountpoints and directory == self.current_mountpoint:
self.list.append(FileEntryComponent(name = "<" +_("List of storage devices") + ">", absolute = None, isDir = True))
elif (directory != self.topDirectory) and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(FileEntryComponent(name = "<" +_("Parent directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
self.list.append(FileEntryComponent(name = name, absolute = x, isDir = True))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or self.matchingPattern.search(path):
self.list.append(FileEntryComponent(name = name, absolute = x , isDir = False))
if self.showMountpoints and len(self.list) == 0:
self.list.append(FileEntryComponent(name = _("nothing connected"), absolute = None, isDir = False))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
def getCurrentDirectory(self):
return self.current_directory
def canDescent(self):
if self.getSelection() is None:
return False
return self.getSelection()[1]
def descent(self):
if self.getSelection() is None:
return
self.changeDir(self.getSelection()[0], select = self.current_directory)
def getFilename(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
x = x.getPath()
return x
def getServiceRef(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
return x
return None
def execBegin(self):
harddiskmanager.on_partition_list_change.append(self.partitionListChanged)
def execEnd(self):
harddiskmanager.on_partition_list_change.remove(self.partitionListChanged)
def refresh(self):
self.changeDir(self.current_directory, self.getFilename())
def partitionListChanged(self, action, device):
self.refreshMountpoints()
if self.current_directory is None:
self.refresh()
def MultiFileSelectEntryComponent(name, absolute = None, isDir = False, selected = False):
screenwidth = getDesktop(0).size().width()
if screenwidth and screenwidth == 1920:
res = [(absolute, isDir, selected, name), (eListboxPythonMultiContent.TYPE_TEXT, 150, 3, 470, 60, 1, RT_HALIGN_LEFT, name)]
else:
res = [(absolute, isDir, selected, name), (eListboxPythonMultiContent.TYPE_TEXT, 55, 1, 470, 20, 0, RT_HALIGN_LEFT, name)]
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 80, 5, 50, 50, png))
else:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 30, 2, 20, 20, png))
if not name.startswith('<'):
if selected:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "icons/lock_on.png"))
else:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_ACTIVE_SKIN, "icons/lock_off.png"))
if screenwidth and screenwidth == 1920:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 10, 5, 50, 50, icon))
else:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 2, 0, 25, 25, icon))
return res
class MultiFileSelectList(FileList):
def __init__(self, preselectedFiles, directory, showMountpoints = False, matchingPattern = None, showDirectories = True, showFiles = True, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
if preselectedFiles is None:
self.selectedFiles = []
else:
self.selectedFiles = preselectedFiles
FileList.__init__(self, directory, showMountpoints = showMountpoints, matchingPattern = matchingPattern, showDirectories = showDirectories, showFiles = showFiles, useServiceRef = useServiceRef, inhibitDirs = inhibitDirs, inhibitMounts = inhibitMounts, isTop = isTop, enableWrapAround = enableWrapAround, additionalExtensions = additionalExtensions)
self.changeDir(directory)
self.l.setItemHeight(25)
self.l.setFont(0, gFont("Regular", 20))
self.l.setFont(1, gFont("Regular", 32))
self.onSelectionChanged = [ ]
def selectionChanged(self):
for f in self.onSelectionChanged:
f()
def changeSelectionState(self):
if len(self.list):
idx = self.l.getCurrentSelectionIndex()
newList = self.list[:]
x = self.list[idx]
if not x[0][3].startswith('<'):
if x[0][1] is True:
realPathname = x[0][0]
else:
realPathname = self.current_directory + x[0][0]
if x[0][2]:
SelectState = False
try:
self.selectedFiles.remove(realPathname)
except:
try:
self.selectedFiles.remove(os.path.normpath(realPathname))
except:
print "Couldn't remove:", realPathname
else:
SelectState = True
if (realPathname not in self.selectedFiles) and (os.path.normpath(realPathname) not in self.selectedFiles):
self.selectedFiles.append(realPathname)
newList[idx] = MultiFileSelectEntryComponent(name = x[0][3], absolute = x[0][0], isDir = x[0][1], selected = SelectState)
self.list = newList
self.l.setList(self.list)
def getSelectedList(self):
selectedFilesExist = []
for x in self.selectedFiles:
if pathExists(x):
selectedFilesExist.append(x)
return selectedFilesExist
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os.path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(MultiFileSelectEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
root = eServiceReference("2:0:1:0:0:0:0:0:0:0:" + directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = os.listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os.path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if self.showDirectories:
if directory:
if self.showMountpoints and directory == self.current_mountpoint:
self.list.append(FileEntryComponent(name = "<" +_("List of storage devices") + ">", absolute = None, isDir = True))
elif (directory != self.topDirectory) and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(FileEntryComponent(name = "<" +_("Parent directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
alreadySelected = (x in self.selectedFiles) or (os.path.normpath(x) in self.selectedFiles)
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x, isDir = True, selected = alreadySelected))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or self.matchingPattern.search(path):
alreadySelected = False
for entry in self.selectedFiles:
#if os.path.basename(entry) == x:
if entry == path:
alreadySelected = True
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x , isDir = False, selected = alreadySelected))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
|
import os
import ycm_core
flags = [
'-Wall',
'-Wextra',
'-std=c99',
'-x',
'c',
'-DVGO_linux',
'-DVGA_amd64',
'-isystem',
'../BoostParts',
'-isystem',
'/System/Library/Frameworks/Python.framework/Headers',
'-isystem',
'../llvm/include',
'-isystem',
'../llvm/tools/clang/include',
'-I','.',
'-I','..',
'-I','../include',
'-I','../coregrind',
'-I','../VEX/pub',
'-I','../VEX/priv',
'-I','../inst/include/valgrind',
'-I','../../',
'-I','../../../../',
'-I','../../../../../'
]
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
|
import random as rand
import rts_rm as rm
import rts_edf as edf
def rts_gen_task_set(n, U):
"""
Use the unifast algo to generate the task set given the total
number of tasks and total utilization
"""
task_set = [] # The task set will be populated by the function
time_period_min = 2
time_period_max = 15
sumU = U; # the sum of n uniform random variables
for i in range(1,n): # i=n-1, n-2,... 1
nextSumU = sumU*(float(rand.random()**(float(1)/(n-i)))); # the sum of n-i uniform random variables
#print (float(1)/(n-i))
time_period = rand.randrange(time_period_min, time_period_max)
task_util = (sumU - nextSumU)
task_set.append((task_util, (task_util*time_period)/100, time_period));
sumU = nextSumU;
time_period = rand.randrange(time_period_min, time_period_max)
task_util = (sumU)
task_set.append((task_util, (task_util*time_period)/100, time_period));
return task_set
def rts_rm_stat():
#task_set = [(0,3,6), (0,2,8), (0,3,12)]
for task_util in range(70, 101, 1):
rm_feasible_tasks = 0
rm_pp_feasible_tasks = 0
rm_pp_only_feasible_tasks = 0
rm_pp_total_promotions = 0
total_tasks = 0
for i in range(0,100):
task_no = rand.randrange(5,21)
task_set = rts_gen_task_set(task_no, task_util)
if(rm.rts_rm_time_analysis(task_set)):
rm_feasible_tasks += 1
rm_pp_feasible_tasks += 1
else:
if(rm.rts_rm_pp_schedule(task_set, check_feasible=True, show=False)[0]):
rm_pp_total_promotions +=rm.rts_rm_pp_schedule(task_set, show=False)[1]
rm_pp_feasible_tasks += 1
rm_pp_only_feasible_tasks += 1
total_tasks += 1
print "% =",task_util,",","RM SR =",(float(rm_feasible_tasks)/total_tasks)*100,"RM-PP SR =", (float(rm_pp_feasible_tasks)/total_tasks)*100, "RM-PP Superiority=", (float(rm_pp_only_feasible_tasks)/total_tasks)*100,"RM-PP Promotions=",float(rm_pp_total_promotions)/total_tasks
def show_menu():
print "\t\tRTS Simulator Menu (Choose from options below)"
print "\t\tTasks to be simulated (1)"
print "\t\tSpecific task set (2)"
print "\t\tGenerate Stats for RM and RM-PP (3)"
print "\t\tGenerate comparision between EDF, Enhanced-EDF and RM (6)"
print "\t\tGenerate comparision between EDF and RM with 50% mandatory task time (7)"
print "\t\tShow Help (4)"
print "\t\tExit (5)"
if __name__ == "__main__":
#task_no = 10
#task_util = 100
#task_set = rts_gen_task_set(task_no, task_util)
#rts_rm_stat();
#task_set = [(0,3,6), (0,2,8), (0,3,12)]
show_menu()
while 1:
try:
#x = int(raw_input())
op = int(input())
except:
continue
if(type(op) == int):
if (op == 1):
print "Enter number of tasks:"
task_no = input()
task_util = rand.randrange(70,101)
task_set = rts_gen_task_set(task_no, task_util)
if (rm.rts_rm_time_analysis(task_set)):
print "RM Schedule"
print "Utilization:", task_util, ",", "No of Tasks:", task_no
print "Task Set:", task_set
print task_set
rm.rts_rm_schedule(task_set)
elif(rm.rts_rm_pp_schedule(task_set, check_feasible=True)[0]):
print "RM-PP Schedule"
print "Utilization:", task_util, ",", "No of Tasks:", task_no
print "Task Set:", task_set
rm.rts_rm_pp_schedule(task_set)
else:
print "Task set is not schedulable by RM or RM-PP"
elif (op == 2):
print "Enter task set"
task_set = input()
for task_no in range(0, len(task_set)):
task_set[task_no] = list(task_set[task_no])
task_set[task_no].insert(0, 0)
print task_set
if (rm.rts_rm_time_analysis(task_set)):
print "RM Schedule"
rm.rts_rm_schedule(task_set)
else:
print "RM-PP Schedule"
rm.rts_rm_pp_schedule(task_set)
elif (op == 3):
rts_rm_stat()
elif (op == 4):
show_menu()
elif (op == 5):
break
elif (op == 6):
print "Enter number of tasks:"
task_no = input()
task_util = rand.randrange(100, 101)
task_util = 999
rounds = 0
while rounds < 30:
task_set = rts_gen_task_set(task_no, task_util)
#print "EDF Schedule"
#task_set = list(task_set)
#for task_no in range(0, len(task_set)):
# task_set[task_no] = list(task_set[task_no])
# task_set[task_no][1] = task_set[task_no][2]
print "======================================"
ret, misses = edf.rts_edf_schedule(task_set, check_feasible=True)
ret1, misses1 = rm.rts_rm_schedule(task_set, check_feasible=True)
ret2, misses2 = edf.rts_edf_schedule(task_set, check_feasible=True, enhanced_edf=True)
print ("%d: %s (%s, %s) %s (%s, %s) %s (%s, %s)") %(rounds, "EDF:", ret, misses, "Enhanced EDF:", ret2, misses2, "RM:", ret1, misses1)
#task_set = list(task_set)
#for task_no in range(0, len(task_set)):
# task_set[task_no] = list(task_set[task_no])
# task_set[task_no][1] = 0.5 * task_set[task_no][2]
#ret, misses = edf.rts_edf_schedule(task_set, check_feasible=True)
#ret1, misses1 = rm.rts_rm_schedule(task_set, check_feasible=True)
#print ("%d: %s (%s, %s) %s (%s, %s)") %(rounds, "EDF:",ret, misses, "RM:", ret1, misses1)
print "======================================"
rounds += 1
elif (op == 7):
print "Enter number of tasks:"
task_no = input()
task_util = rand.randrange(100, 101)
task_util = 999
rounds = 0
while rounds < 30:
task_set = rts_gen_task_set(task_no, task_util)
task_set = list(task_set)
for task_no in range(0, len(task_set)):
task_set[task_no] = list(task_set[task_no])
task_set[task_no][1] = 0.5 * task_set[task_no][2]
ret, misses = edf.rts_edf_schedule(task_set, check_feasible=True)
ret1, misses1 = rm.rts_rm_schedule(task_set, check_feasible=True)
print ("%d: %s (%s, %s) %s (%s, %s)") %(rounds, "EDF:",ret, misses, "RM:", ret1, misses1)
print "======================================"
rounds += 1
else:
show_menu()
else:
continue
|
from Components.MenuList import MenuList
from Components.Label import Label
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import NumberActionMap
from Components.Pixmap import Pixmap
from Components.FileList import FileList
from Screens.ChoiceBox import ChoiceBox
from Components.ActionMap import ActionMap
from Components.config import config
from Screens.Standby import TryQuitMainloop
from Screens.Downloads import Getipklist
from Screens.Ipkremove import Ipkremove
class SkinSetup(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "Settings"
title = "Setup Skin"
self.setTitle(title)
self["list"] = MenuList([])
self["info"] = Label()
self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)
txt = _("Here you can change skin, configure mainmenu, configure second-infobar or configure TechniHD skin.")
self["info"].setText(txt)
self.onShown.append(self.startSession)
def startSession(self):
self.res = []
self.res.append(_("Skin Manager"))
self.res.append(_("Configure mainmenu"))
self.res.append(_("Configure second-infobar"))
self.res.append(_("Show Picons in Channel List"))
self.res.append(_("TechniHD Setup"))
self.res.append(_("Exit"))
self["list"].setList(self.res)
def okClicked(self):
ires = self["list"].getSelectionIndex()
if ires == 0:
self.session.open(SettingsA)
elif ires == 1:
self.session.open(SettingsB)
elif ires == 2:
self.session.open(SettingsC)
elif ires == 3:
self.session.open(SettingsD)
elif ires == 4:
self.xtaskin()
else:
self.close()
def xtaskin(self):
try:
from Plugins.Extensions.TechniHDSetup.plugin import TechniHDSetup
self.session.open(TechniHDSetup)
except:
self.session.open(MessageBox, _("TechniHD is not installed on your Xtrend box !"), MessageBox.TYPE_ERROR, timeout = 10)
self.close()
class SettingsA(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "Settings"
title = _("Skin Manager")
self.setTitle(title)
self["list"] = MenuList([])
self["info"] = Label()
self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)
self.cur = config.usage.service_icon_enable.value
txt = _("Here you can download, install or remove skins")
self["info"].setText(txt)
self.onLayoutFinish.append(self.startSession)
def startSession(self):
self.res = []
self.res.append(_("Download skin"))
self.res.append(_("Install skin"))
self.res.append(_("Remove skin"))
self.res.append(_("Exit"))
self["list"].setList(self.res)
def okClicked(self):
ires = self["list"].getSelectionIndex()
if ires == 0:
self.session.open(Getipklist)
elif ires == 1:
self.startskin()
elif ires == 2:
self.session.open(Ipkremove)
else:
self.close()
def startskin(self):
try:
from Plugins.SystemPlugins.SkinSelector.plugin import SkinSelector
self.session.open(SkinSelector)
except:
self.session.open(MessageBox, _("SystemPlugin SkinSelector is not installed!"), MessageBox.TYPE_ERROR, timeout = 10)
self.close()
class SettingsB(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "Settings"
title = _("Configure Mainmenu")
self.setTitle(title)
self["list"] = MenuList([])
self["info"] = Label()
self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)
self.cur = config.usage.mainmenu_mode.value
if self.cur == "horzicon":
txt = _("Current Mainmenu list setting is Horizontal-icon.\nHere you can change it.\nAfter select enigma will restart.")
elif self.cur == "vert":
txt = _("Current Mainmenu list setting is Vertical.\nHere you can change it.\nAfter select enigma will restart.")
elif self.cur == "horzanim":
txt = _("Current Mainmenu list setting is Horizontal-Animated.\nHere you can change it.\nAfter select enigma will restart.")
self["info"].setText(txt)
self.onLayoutFinish.append(self.startSession)
def startSession(self):
self.res = []
self.res.append(_("Horizontal icon list"))
self.res.append(_("Vertical list"))
self.res.append(_("Horizontal animated list"))
self.res.append(_("Exit"))
self["list"].setList(self.res)
def okClicked(self):
ires = self["list"].getSelectionIndex()
if ires == 0:
self.Icon()
elif ires == 1:
self.Vert()
elif ires == 2:
self.Anim()
else:
self.close()
def Icon(self):
config.usage.mainmenu_mode.value = "horzicon"
config.usage.mainmenu_mode.save()
self.session.open(TryQuitMainloop, 3)
def Vert(self):
config.usage.mainmenu_mode.value = "vert"
config.usage.mainmenu_mode.save()
self.session.open(TryQuitMainloop, 3)
def Anim(self):
config.usage.mainmenu_mode.value = "horzanim"
config.usage.mainmenu_mode.save()
self.session.open(TryQuitMainloop, 3)
class SettingsC(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "Settings"
title = _("Configure second-infobar")
self.setTitle(title)
self["list"] = MenuList([])
self["info"] = Label()
self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)
self.cur = config.usage.show_second_infobar.value
txt = " "
if self.cur is None:
txt = _("Current Second-infobar setting is None.\nHere you can change it.")
else:
txt = _("Current Second-infobar setting is Show.\nHere you can change it.")
self["info"].setText(txt)
self.onLayoutFinish.append(self.startSession)
def startSession(self):
self.res = []
self.res.append(_("Show second-infobar"))
self.res.append(_("Remove second-infobar"))
self.res.append(_("Exit"))
self["list"].setList(self.res)
def okClicked(self):
ires = self["list"].getSelectionIndex()
if ires == 0:
self.ShowSI()
elif ires == 1:
self.RemSI()
else:
self.close()
def ShowSI(self):
if self.cur is not None:
self.session.open(MessageBox, _("Current Second-infobar setting is Show !"), type = MessageBox.TYPE_INFO,timeout = 10 )
else:
config.usage.show_second_infobar.value = "0"
config.usage.show_second_infobar.save()
def RemSI(self):
if self.cur is None:
self.session.open(MessageBox, _("Current Second-infobar setting is None !"), type = MessageBox.TYPE_INFO,timeout = 10 )
else:
config.usage.show_second_infobar.value = None
config.usage.show_second_infobar.save()
class SettingsD(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "Settings"
title = _("Show Picons in Channel List")
self.setTitle(title)
self["list"] = MenuList([])
self["info"] = Label()
self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)
self.cur = config.usage.service_icon_enable.value
txt = " "
if self.cur is True:
txt = _("Current picon setting is True.\nHere you can change it.")
else:
txt = _("Current picon setting is False.\nHere you can change it.")
self["info"].setText(txt)
self.onLayoutFinish.append(self.startSession)
def startSession(self):
self.res = []
self.res.append(_("Show Picons in Channel List"))
self.res.append(_("Remove Picons in Channel List"))
self.res.append(_("Exit"))
self["list"].setList(self.res)
def okClicked(self):
ires = self["list"].getSelectionIndex()
if ires == 0:
self.ShowSp()
elif ires == 1:
self.RemSp()
else:
self.close()
def ShowSp(self):
if self.cur is True:
self.session.open(MessageBox, _("Current picon setting is True !"), type = MessageBox.TYPE_INFO,timeout = 10 )
else:
config.usage.service_icon_enable.value = True
config.usage.service_icon_enable.save()
def RemSp(self):
if self.cur is False:
self.session.open(MessageBox, _("Current picon setting is False !"), type = MessageBox.TYPE_INFO,timeout = 10 )
else:
config.usage.service_icon_enable.value = False
config.usage.service_icon_enable.save()
|
from django.dispatch import Signal, receiver
from misago.core import serializer
from misago.core.signals import secret_key_changed
from misago.users.signals import username_changed
from .models import Category, CategoryRole
delete_category_content = Signal()
move_category_content = Signal(providing_args=["new_category"])
"""
Signal handlers
"""
@receiver(secret_key_changed)
def update_roles_pickles(sender, **kwargs):
for role in CategoryRole.objects.iterator():
if role.pickled_permissions:
role.pickled_permissions = serializer.regenerate_checksum(
role.pickled_permissions)
role.save(update_fields=['pickled_permissions'])
@receiver(username_changed)
def update_usernames(sender, **kwargs):
Category.objects.filter(last_poster=sender).update(
last_poster_name=sender.username,
last_poster_slug=sender.slug
)
|
register_rulegroup("activechecks",
_("Active checks (HTTP, TCP, etc.)"),
_("Configure active networking checks like HTTP and TCP"))
group = "activechecks"
register_rule(group,
"active_checks:dns",
Tuple(
title = _("Check DNS service"),
help = _("Check optain an IP address for a host or domain"
"It uses <tt>check_dns</tt> from standard plugins."),
elements = [
TextAscii(title = _("Hostname"), allow_empty = False,
help = _('The name or address you want to query')),
Dictionary(
title = _("Optional parameters"),
elements = [
( "server",
TextAscii(
title = _("DNS Server"),
allow_empty = False,
help = _("Optional DNS server you want to use for the lookup"))),
( "expected_address",
TextAscii(
title = _("Expected Address"),
allow_empty = False,
help = _("Optional IP-ADDRESS you expect the DNS server to return. HOST"
"must end with a dot (.) " )),
),
( "expected_authority",
TextAscii(
title = _("Expected Authority"),
allow_empty = False,
help = _("Optional expect the DNS server to be authoriative"
"for the lookup ")),
),
( "response_time",
Tuple(
title = _("Expected response time"),
elements = [
Float(
title = _("Warning at"),
unit = "sec",
default_value = 1),
Float(
title = _("Critical at"),
unit = "sec",
default_value = 2),
])
),
( "timeout",
Integer(
title = _("Seconds before connection times out"),
unit = _("sec"),
default_value = 10,
)
),
]),
]
),
match = 'all')
register_rule(group,
"active_checks:tcp",
Tuple(
title = _("Check connecting to a TCP port"),
help = _("This check test the connection to a TCP port. It uses "
"<tt>check_tcp</tt> from the standard Nagios plugins."),
elements = [
Integer(title = _("TCP Port"), minvalue=1, maxvalue=65535),
Dictionary(
title = _("Optional parameters"),
elements = [
( "hostname",
TextAscii(
title = _("DNS Hostname"),
allow_empty = False,
help = _("If you specify a hostname here, then a dynamic DNS lookup "
"will be done instead of using the IP address of the host "
"as configured in your host properties."))),
( "response_time",
Tuple(
title = _("Expected response time"),
elements = [
Float(
title = _("Warning at"),
unit = "ms",
default_value = 100.0),
Float(
title = _("Critical at"),
unit = "ms",
default_value = 200.0),
])
),
( "timeout",
Integer(
title = _("Seconds before connection times out"),
unit = _("sec"),
default_value = 10,
)
),
( "refuse_state",
DropdownChoice(
title = _("State for connection refusal"),
choices = [ ('crit', _("CRITICAL")),
('warn', _("WARNING")),
('ok', _("OK")),
])
),
( "send_string",
TextAscii(
title = _("String to send"),
size = 30)
),
( "escape_send_string",
FixedValue(
value = True,
title = _("Expand <tt>\\n</tt>, <tt>\\r</tt> and <tt>\\t</tt> in the sent string"),
totext = _("expand escapes"))
),
( "expect",
ListOfStrings(
title = _("Strings to expect in response"),
orientation = "horizontal",
valuespec = TextAscii(size = 30),
)
),
( "expect_all",
FixedValue(
value = True,
totext = _("expect all"),
title = _("Expect <b>all</b> of those strings in the response"))
),
( "jail",
FixedValue(
value = True,
title = _("Hide response from socket"),
help = _("As soon as you configure expected strings in "
"the response the check will output the response - "
"as long as you do not hide it with this option"),
totext = _("hide response"))
),
( "mismatch_state",
DropdownChoice(
title = _("State for expected string mismatch"),
choices = [ ('crit', _("CRITICAL")),
('warn', _("WARNING")),
('ok', _("OK")),
])
),
( "delay",
Integer(
title = _("Seconds to wait before polling"),
help = _("Seconds to wait between sending string and polling for response"),
unit = _("sec"),
default_value = 0,
)
),
( "maxbytes",
Integer(
title = _("Maximum number of bytes to receive"),
help = _("Close connection once more than this number of "
"bytes are received. Per default the number of "
"read bytes is not limited. This setting is only "
"used if you expect strings in the response."),
default_value = 1024,
),
),
( "ssl",
FixedValue(
value = True,
totext = _("use SSL"),
title = _("Use SSL for the connection."))
),
( "cert_days",
Integer(
title = _("SSL certificate validation"),
help = _("Minimum number of days a certificate has to be valid"),
unit = _("days"),
default_value = 30)
),
( "quit_string",
TextAscii(
title = _("Final string to send"),
help = _("String to send server to initiate a clean close of "
"the connection"),
size = 30)
),
]),
]
),
match = 'all')
register_rule(group,
"active_checks:http",
Tuple(
title = _("Check HTTP service"),
help = _("Check HTTP/HTTPS service using the plugin <tt>check_http</tt> "
"from the standard Nagios Plugins. "
"This plugin tests the HTTP service on the specified host. "
"It can test normal (HTTP) and secure (HTTPS) servers, follow "
"redirects, search for strings and regular expressions, check "
"connection times, and report on certificate expiration times. "),
elements = [
TextUnicode(
title = _("Name"),
help = _("Will be used in the service description"),
allow_empty = False),
Dictionary(
title = _("Optional settings"),
elements = [
( "virthost",
Tuple(
title = _("Virtual host"),
elements = [
TextAscii(
title = _("Name of the virtual host"),
help = _("Set this in order to specify the name of the "
"virtual host for the query (using HTTP/1.1). When you "
"leave this empty, then the IP address of the host "
"will be used instead."),
allow_empty = False),
Checkbox(
label = _("Omit specifying an IP address"),
help = _("Usually Check_MK will nail this check to the "
"IP address of the host it is attached to. With this "
"option you can have the check use the name of the "
"virtual host instead and do a dynamic DNS lookup."),
true_label = _("omit IP address"),
false_label = _("specify IP address"),
),
]
)
),
( "uri",
TextAscii(
title = _("URI to fetch (default is <tt>/</tt>)"),
allow_empty = False,
default_value = "/")
),
( "port",
Integer(
title = _("TCP Port"),
minvalue = 1,
maxvalue = 65535,
default_value = 80)
),
( "ssl",
FixedValue(
value = True,
totext = _("use SSL/HTTPS"),
title = _("Use SSL/HTTPS for the connection."))
),
( "cert_days",
Integer(
title = _("Maximum certificate age"),
help = _("Minimum number of days a certificate has to be valid. "
"Port defaults to 443. When this option is used the URL "
"is not checked."),
unit = _("days"),
)
),
( "sni",
FixedValue(
value = True,
totext = _("enable SNI"),
title = _("Enable SSL/TLS hostname extension support (SNI)"),
)
),
( "response_time",
Tuple(
title = _("Expected response time"),
elements = [
Float(
title = _("Warning at"),
unit = "ms",
default_value = 100.0),
Float(
title = _("Critical at"),
unit = "ms",
default_value = 200.0),
])
),
( "timeout",
Integer(
title = _("Seconds before connection times out"),
unit = _("sec"),
default_value = 10,
)
),
( "user_agent",
TextAscii(
title = _("User Agent"),
help = _("String to be sent in http header as \"User Agent\""),
allow_empty = False,
),
),
( "add_headers",
ListOfStrings(
title = _("Additional header lines"),
orientation = "vertical",
valuespec = TextAscii(size = 40),
),
),
( "auth",
Tuple(
title = _("Authorization"),
help = _("Credentials for HTTP Basic Authentication"),
elements = [
TextAscii(
title = _("Username"),
size = 12,
allow_empty = False),
TextAscii(
title = _("Password"),
size = 12,
allow_empty = False),
])
),
( "proxy_auth",
Tuple(
title = _("Proxy-Authorization"),
help = _("Credentials for HTTP Proxy with basic authentication"),
elements = [
TextAscii(
title = _("Username"),
size = 12,
allow_empty = False),
TextAscii(
title = _("Password"),
size = 12,
allow_empty = False),
])
),
( "onredirect",
DropdownChoice(
title = _("How to handle redirect"),
choices = [
( 'ok', _("Make check OK") ),
( 'warning', _("Make check WARNING") ),
( 'critical', _("Make check CRITICAL") ),
( 'follow', _("Follow the redirection") ),
( 'sticky', _("Follow, but stay to same IP address") ),
( 'stickyport', _("Follow, but stay to same IP-address and port") ),
],
default_value = 'follow'),
),
( "expect_response",
ListOfStrings(
title = _("Strings to expect in server response"),
help = _("At least one of these strings is expected in "
"the first (status) line of the server response "
"(default: <tt>HTTP/1.</tt>). If specified skips "
"all other status line logic (ex: 3xx, 4xx, 5xx "
"processing)"),
)
),
( "expect_string",
TextAscii(
title = _("Fixed string to expect in the content"),
allow_empty = False,
)
),
( "expect_regex",
Tuple(
title = _("Regular expression to expect in content"),
orientation = "vertical",
show_titles = False,
elements = [
RegExp(label = _("Regular expression: ")),
Checkbox(label = _("Case insensitive")),
Checkbox(label = _("return CRITICAL if found, OK if not")),
])
),
( "post_data",
Tuple(
title = _("Send HTTP POST data"),
elements = [
TextAscii(
title = _("HTTP POST data"),
help = _("Data to send via HTTP POST method. "
"Please make sure, that the data is URL-encoded."),
size = 40,
),
TextAscii(
title = _("Content-Type"),
default_value = "text/html"),
])
),
( "method",
DropdownChoice(
title = _("HTTP Method"),
default_value = "GET",
choices = [
( "GET", "GET" ),
( "POST", "POST" ),
( "OPTIONS", "OPTIONS" ),
( "TRACE", "TRACE" ),
( "PUT", "PUT" ),
( "DELETE", "DELETE" ),
( "HEAD", "HEAD" ),
( "CONNECT", "CONNECT" ),
])
),
( "no_body",
FixedValue(
value = True,
title = _("Don't wait for document body"),
help = _("Note: this still does an HTTP GET or POST, not a HEAD."),
totext = _("dont wait for body"))
),
( "page_size",
Tuple(
title = _("Page size to expect"),
elements = [
Integer(title = _("Minimum"), unit=_("Bytes")),
Integer(title = _("Maximum"), unit=_("Bytes")),
])
),
( "max_age",
Age(
title = _("Maximum age"),
help = _("Warn, if the age of the page is older than this"),
default_value = 3600 * 24,
)
),
]),
]
),
match = 'all')
register_rule(group,
"active_checks:ldap",
Tuple(
title = _("Check access to LDAP service"),
help = _("This check uses <tt>check_ldap</tt> from the standard "
"Nagios plugins in order to try the response of an LDAP "
"server."),
elements = [
TextUnicode(
title = _("Name"),
help = _("The service description will be <b>LDAP</b> plus this name"),
allow_empty = False),
TextAscii(
title = _("Base DN"),
help = _("LDAP base, e.g. ou=Development, o=Mathias Kettner GmbH, c=de"),
allow_empty = False,
size = 60),
Dictionary(
title = _("Optional parameters"),
elements = [
( "attribute",
TextAscii(
title = _("Attribute to search"),
help = _("LDAP attribute to search, "
"The default is <tt>(objectclass=*)</tt>."),
size = 40,
allow_empty = False,
default_value = "(objectclass=*)",
)
),
( "authentication",
Tuple(
title = _("Authentication"),
elements = [
TextAscii(
title = _("Bind DN"),
help = _("Distinguished name for binding"),
allow_empty = False,
size = 60,
),
TextAscii(
title = _("Password"),
help = _("Password for binding, if you server requires an authentication"),
allow_empty = False,
size = 20,
)
]
)
),
( "port",
Integer(
title = _("TCP Port"),
help = _("Default is 389 for normal connetions and 636 for SSL connections."),
minvalue = 1,
maxvalue = 65535,
default_value = 389)
),
( "ssl",
FixedValue(
value = True,
totext = _("Use SSL"),
title = _("Use LDAPS (SSL)"),
help = _("Use LDAPS (LDAP SSLv2 method). This sets the default port number to 636"))
),
( "version",
DropdownChoice(
title = _("LDAP Version"),
help = _("The default is to use version 2"),
choices = [
( "v2", _("Version 2") ),
( "v3", _("Version 3") ),
( "v3tls", _("Version 3 and TLS") ),
],
default_value = "v2",
)
),
( "response_time",
Tuple(
title = _("Expected response time"),
elements = [
Float(
title = _("Warning at"),
unit = "ms",
default_value = 1000.0),
Float(
title = _("Critical at"),
unit = "ms",
default_value = 2000.0),
])
),
( "timeout",
Integer(
title = _("Seconds before connection times out"),
unit = _("sec"),
default_value = 10,
)
),
])
]),
match = 'all'
)
register_rule(group,
"custom_checks",
Dictionary(
title = _("Classical active and passive Nagios checks"),
help = _("With this ruleset you can configure "classical Nagios checks" "
"to be executed directly on your monitoring server. These checks "
"will not use Check_MK. It is also possible to configure passive "
"checks that are fed with data from external sources via the Nagios "
"command pipe."),
elements = [
( "service_description",
TextUnicode(
title = _("Service description"),
help = _("Please make sure that this is unique per host "
"and does not collide with other services."),
allow_empty = False,
default_value = _("Customcheck"))
),
( "command_line",
TextAscii(
title = _("Command line"),
help = _("Please enter the complete shell command including "
"path name and arguments to execute. You can use Nagios "
"macros here. The most important are:<ul>"
"<li><tt>$HOSTADDRESS$</tt>: The IP address of the host</li>"
"<li><tt>$HOSTNAME$</tt>: The name of the host</li>"
"<li><tt>$USER1$</tt>: user macro 1 (usually path to shipped plugins)</li>"
"<li><tt>$USER2$</tt>: user marco 2 (usually path to your own plugins)</li>"
"</ul>"
"If you are using OMD, then you can omit the path and just specify "
"the command (e.g. <tt>check_foobar</tt>). This command will be "
"searched first in the local plugins directory "
"(<tt>~/local/lib/nagios/plugins</tt>) and then in the shipped plugins "
"directory (<tt>~/lib/nagios/plugins</tt>) within your site directory.<br><br>"
"<b>Passive checks</b>: Do no specify a command line if you want "
"to define passive checks."),
size = 80,
)
),
( "command_name",
TextAscii(
title = _("Internal command name"),
help = _("If you want, then you can specify a name that will be used "
"in the <tt>define command</tt> section for these checks. This "
"allows you to a assign a customer PNP template for the performance "
"data of the checks. If you omit this, then <tt>check-mk-custom</tt> "
"will be used."),
size = 32)
),
( "has_perfdata",
FixedValue(
title = _("Performance data"),
value = True,
totext = _("process performance data"),
)
),
],
required_keys = [ "service_description" ],
),
match = 'all'
)
|
import sys
print "Argumento 0: "+sys.argv[0]
print "Argumento 1: "+sys.argv[1]
|
""" This module contains all context menus needed to be displayed in different sections. Basically any menu that is bigger than 2 menu items should be here."""
from __future__ import unicode_literals
import wx
class postMenu(wx.Menu):
""" Display a menu with actions related to posts in the news feed or walls. """
def __init__(self, can_delete=False, *args, **kwargs):
super(postMenu, self).__init__(*args, **kwargs)
self.open = self.Append(wx.NewId(), _("Open"))
self.like = self.Append(wx.NewId(), _("Like"))
self.dislike = self.Append(wx.NewId(), _("Dislike"))
self.dislike.Enable(False)
self.comment = self.Append(wx.NewId(), _("Add comment"))
self.fav = self.Append(wx.NewId(), _("Add to favorites"))
self.fav.Enable(False)
if can_delete:
self.delete = self.Append(wx.NewId(), _("Delete"))
else:
self.post_in_wall = self.Append(wx.NewId(), _("Post to this profile"))
self.post_in_wall.Enable(False)
self.view_profile = self.Append(wx.NewId(), _("View user profile"))
self.open_in_browser = self.Append(wx.NewId(), _("Open in vk.com"))
class audioMenu(wx.Menu):
def __init__(self, *args, **kwargs):
super(audioMenu, self).__init__(*args, **kwargs)
self.open = self.Append(wx.NewId(), _("&Open"))
self.play = self.Append(wx.NewId(), _("&Play"))
self.library = self.Append(wx.NewId(), _("&Add to library"))
self.move = self.Append(wx.NewId(), _("Move to album"))
self.download = self.Append(wx.NewId(), _("Download"))
self.select = self.Append(wx.NewId(), _("Select all"))
self.deselect = self.Append(wx.NewId(), _("Unselect all"))
class peopleMenu(wx.Menu):
def __init__(self, is_request=False, is_subscriber=False, not_friend=False, *args, **kwargs):
super(peopleMenu, self).__init__(*args, **kwargs)
if is_request:
self.create_request_items()
elif is_subscriber:
self.create_subscriber_items()
self.view_profile = self.Append(wx.NewId(), _("View profile"))
self.message = self.Append(wx.NewId(), _("Send a message"))
self.timeline = self.Append(wx.NewId(), _("Open timeline"))
self.fav = self.Append(wx.NewId(), _("Add to favorites"))
self.fav.Enable(False)
if not_friend == False:
self.common_friends = self.Append(wx.NewId(), _("View friends in common"))
if is_request == False and is_subscriber == False and not_friend == False:
self.decline = self.Append(wx.NewId(), _("Remove from friends"))
self.block = self.Append(wx.NewId(), _("Block"))
self.open_in_browser = self.Append(wx.NewId(), _("Open in vk.com"))
def create_request_items(self):
self.accept = self.Append(wx.NewId(), _("Accept"))
self.decline = self.Append(wx.NewId(), _("Decline"))
self.keep_as_follower = self.Append(wx.NewId(), _("Keep as follower"))
self.block = self.Append(wx.NewId(), _("Block"))
def create_subscriber_items(self):
self.add = self.Append(wx.NewId(), _("Add to friends"))
self.block = self.Append(wx.NewId(), _("Block"))
class documentMenu(wx.Menu):
def __init__(self, added=False, *args, **kwargs):
super(documentMenu, self).__init__(*args, **kwargs)
self.download = self.Append(wx.NewId(), _("Download document"))
if added == True:
self.action = self.Append(wx.NewId(), _("Remove from my documents"))
else:
self.action = self.Append(wx.NewId(), _("Add to my documents"))
self.open_in_browser = self.Append(wx.NewId(), _("Open in vk.com"))
self.fav = self.Append(wx.NewId(), _("Add to favorites"))
self.fav.Enable(False)
class commentMenu(wx.Menu):
def __init__(self, *args, **kwargs):
super(commentMenu, self).__init__(*args, **kwargs)
self.open = self.Append(wx.NewId(), _("Open"))
self.like = self.Append(wx.NewId(), _("Like"))
self.dislike = self.Append(wx.NewId(), _("Dislike"))
self.open_in_browser = self.Append(wx.NewId(), _("Open in vk.com"))
class attachMenu(wx.Menu):
def __init__(self):
super(attachMenu, self).__init__()
self.upload = self.Append(wx.NewId(), _("Upload from computer"))
self.add = self.Append(wx.NewId(), _("Add from VK"))
class communityBufferMenu(wx.Menu):
def __init__(self):
super(communityBufferMenu, self).__init__()
load = wx.Menu()
self.load_posts = load.Append(wx.NewId(), _("Load posts"))
self.load_topics = load.Append(wx.NewId(), _("Load topics"))
self.load_members = load.Append(wx.NewId(), _("Load members"))
self.load_audios = load.Append(wx.NewId(), _("Load audios"))
self.load_videos = load.Append(wx.NewId(), _("Load videos"))
self.load_documents = load.Append(wx.NewId(), _("Load documents"))
self.Append(wx.NewId(), _("Load"), load)
self.open_in_browser = self.Append(wx.NewId(), _("Open in vk.com"))
class conversationBufferMenu(wx.Menu):
def __init__(self):
super(conversationBufferMenu, self).__init__()
self.delete = self.Append(wx.NewId(), _("Delete conversation"))
self.open_in_browser = self.Append(wx.NewId(), _("Open in vk.com"))
class toolsMenu(wx.Menu):
def __init__(self, *args, **kwargs):
super(toolsMenu, self).__init__(*args, **kwargs)
self.translate = self.Append(wx.NewId(), _("&Translate message"))
self.spellcheck = self.Append(wx.NewId(), _("Spelling &correction"))
|
"""
@summary: Module containing information about providers
@author: CJ Grady
@version: 1.0
@status: alpha
@license: gpl2
@copyright: Copyright (C) 2014, University of Kansas Center for Research
Lifemapper Project, lifemapper [at] ku [dot] edu,
Biodiversity Institute,
1345 Jayhawk Boulevard, Lawrence, Kansas, 66045, USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
"""
PROVIDERS = {
1 : "The Global Biodiversity Information Facility",
2 : "The Species 2000 and ITIS Catalogue of Life",
3 : "The International Plant Names Index",
4 : "Academy of Natural Sciences",
5 : "University of Arkansas",
6 : "Australian Antarctic Data Centre",
7 : "Australian Biological Resources Study",
8 : "Australian National Herbarium (CANB)",
9 : "Australian National Insect Collection, CSIRO Entomology",
10 : "Avian Knowledge Network",
11 : "Banc de dades de biodiversitat de Catalunya",
12 : "BeBIF Provider",
13 : "Berkeley Natural History Museums",
14 : "Bernice Pauahi Bishop Museum",
15 : "University of Warsaw, Białowieża Geobotanical Station of the Biological Faculty",
16 : "Biologiezentrum Linz Oberoesterreich",
17 : "Biologische Anstalt Helgoland in the Foundation Alfred-Wegener Institute for Polar and Marine Research",
18 : "Bird Studies Canada",
19 : "Bodensee-Naturmuseum Konstanz",
20 : "Botanical Research Institute of Texas",
21 : "Botanic Garden and Botanical Museum Berlin-Dahlem",
22 : "University of Warsaw, Botanic Garden",
23 : "British Antarctic Survey",
24 : "Bundesamt für Naturschutz / Netzwerk Phytodiversität Deutschland",
25 : "University of Washington Burke Museum",
26 : "CABIDiGIR Provider",
27 : "California Academy of Sciences",
28 : "Cameroon National Herbarium",
29 : "Nearctic Spider Database",
30 : "Canadian Biodiversity Information Facility",
31 : "Canadian Museum of Nature",
32 : "CBS (NLBIF)",
33 : "Korea Institute of Science and Technology Information",
34 : "Centre for Genetic Resources, The Netherlands",
35 : "Centro de Ecologia Aplicada",
36 : "Centro Nacional Patagonico - CONICET",
37 : "Colección Herpetológica de la UNNE",
38 : "Conservatoire botanique national du Bassin parisien",
39 : "Cornell University Museum of Vertebrates",
40 : "CRICYT - CONICET",
41 : "Department of Plant Sciences, University of Oxford",
42 : "Dept. Of Biology, University of Trieste",
43 : "Leibniz Institute DSMZ - German Collection of Microorganisms and Cell Cultures",
44 : "EMAN Provider",
45 : "European Environment Agency",
46 : "Ewha Womans University Natural History Museum",
47 : "Facultad de Ciencias Naturales y Museo - U.N.L.P.",
48 : "Fairchild Tropical Botanic Garden",
49 : "Field Museum",
50 : "Finnish Museum of Natural History",
51 : "FishBase",
52 : "Florida Museum of Natural History",
53 : "Folklore and Natural History Museum",
54 : "Forest Research Institute, European Centre for Natural Forests",
55 : "Fundacion Miguel Lillo Provider",
56 : "GBIF New Zealand",
57 : "GBIF-Spain",
58 : "Herbario de la Universidad de Granada",
59 : "GBIF Swiss Node",
60 : "GTI Japan",
61 : "Gyeryonsan Natural History Museum",
62 : "Haus der Natur Salzburg",
63 : "Herbaria of the University and ETH Zürich (Z+ZT)",
64 : "Herbario de la Universidad de Leon",
65 : "Instituto de Ciencias Naturales",
66 : "Herbario SANT, Universidade de Santiago de Compostela",
67 : "HERBARIUM AMAZONENSE – AMAZ",
68 : "Herbarium Hamburgense",
69 : "Herbarium of the University of Aarhus",
70 : "Herbarium RNG, School of Plant Sciences, The University of Reading",
71 : "Herbier de la Guyane",
72 : "Humboldt-Universität Berlin",
73 : "Icelandic Institute of Natural History",
74 : "Plant Breeding and Acclimatization Institute (IHAR) - National Research Institute",
75 : "Illinois Natural History Survey",
76 : "inatura - Erlebnis Naturschau Dornbirn",
77 : "Institute of Agricultural and Food Biotechnology, Dept. of Microbiology",
78 : "Institute of Biochemistry and Biophysics, Polish Academy of Sciences",
79 : "INSTITUTE OF BOTANY S.A.S.",
80 : "Institute of Dendrology, Polish Academy of Sciences",
81 : "Jagiellonian University, Institute of Environmental Sciences",
82 : "Ocean Biogeographic Information System",
83 : "New Mexico Biodiversity Collections Consortium",
84 : "Institute of Nature Conservation, Polish Academy of Sciences",
85 : "Institute of Systematics and Evolution of Animals, Polish Academy of Sciences",
86 : "Institute of Zoology and Botany of EAU",
87 : "Jagiellonian University, Institute of Zoology",
88 : "Instituto de Botánica Darwinion - CONICET",
89 : "Instituto de Investigaciones de la Amazonía Peruana",
90 : "Instituto Nacional de Biodiversidad (INBio), Costa Rica",
91 : "Marine Biology Laboratory",
92 : "Bioversity International",
94 : "Israel Nature and Parks Authority",
95 : "James R. Slater Museum of Natural History",
96 : "JEOLLANAMDO MARITIME & FISHERIES SCIENCE MUSEUM",
97 : "Jyvaskyla University Museum - The Section of Natural Sciences",
98 : "Karl Franzens University of Graz, Insitute for Botany - Herbarium GZU",
99 : "KBRC (Korean Biological Resource Center) - KBIF Node",
100 : "KBIF Data Repository",
101 : "Korea National Arboretum (Korea Forest Service)",
102 : "Kyung Hee University Natural History Museum",
103 : "Laboratoire de Paléobotanique et Paléoécologie",
104 : "Natural History Museum of Los Angeles County",
105 : "Louisiana State University Museum of Natural Science",
106 : "Mammal Research Institute, Polish Academy of Sciences",
107 : "Marine Biology Section Ugent",
108 : "Marine Science Institute, UCSB",
109 : "Michigan State University Herbarium",
110 : "Michigan State University Museum",
111 : "Missouri Botanical Garden",
112 : "Mokpo Museum of Natural History",
113 : "Monty L. Bean Museum, Brigham Young University",
114 : "Museo Argentino de Ciencias Naturales",
115 : "Museo de Zoología de la Facultad de Ciencias, UNAM",
116 : "Museo Nacional de Costa Rica",
117 : "Museum and Institute of Zoology, Polish Academy of Sciences",
118 : "Museum für Naturkunde Berlin",
119 : "Museum national d'histoire naturelle et Reseau des Herbiers de France",
120 : "Museum of Comparative Zoology, Harvard University",
121 : "Wrocław University, Museum of Natural History",
122 : "Museum of Southwestern Biology",
123 : "Museum of Texas Tech University (TTU)",
124 : "Museum of Vertebrate Zoology",
125 : "National Chemical Laboratory",
126 : "National Herbarium of New South Wales",
127 : "National Institute of Genetics, ROIS",
128 : "National Medicines Institute, Poland",
130 : "South African National Biodiversity Institute",
131 : "National Museum of Nature and Science, Japan",
132 : "National Science Museum of Korea",
133 : "Natural History Museum",
134 : "Natural History Museum, University of Oslo",
135 : "Natural History Museum, Vienna - Herbarium W",
136 : "NatureServe",
137 : "New Brunswick Museum",
138 : "Nicolaus Copernicus University of Toruń",
139 : "Netherlands Biodiversity Information Facility (NLBIF)",
140 : "Ohio State University Insect Collection",
141 : "Oregon State University",
142 : "Österreichische Mykologische Gesellschaft",
143 : "CSIRO",
144 : "PAKISTAN MUSEUM OF NATURAL HISTORY Provider",
145 : "PANGAEA - Publishing Network for Geoscientific and Environmental Data",
146 : "Prirodoslovni muzej Slovenije",
147 : "Royal Museum for Central Africa, Belgium",
148 : "Royal Botanic Gardens, Kew",
149 : "Royal Ontario Museum",
150 : "Sam Noble Oklahoma Museum of Natural History",
151 : "San Diego Natural History Museum",
152 : "Santa Barbara Museum of Natural History",
153 : "Alaska Ocean Observing System",
154 : "Arizona State University, International Institute for Species Exploration",
155 : "Senckenberg",
156 : "Seodaemun Museum of Natural History",
157 : "Siamazonia Provider",
158 : "Staatliche Naturwissenschaftliche Sammlungen Bayerns",
159 : "Steiermärkisches Landesmuseum Joanneum - Herbarium GJO",
160 : "Sternberg Museum of Natural History",
161 : "SysTax",
162 : "Taiwan Biodiversity Information Facility (TaiBIF)",
163 : "Texas Natural Science Center",
164 : "Danish Biodiversity Information Facility",
165 : "The Hong Kong University Herbarium",
166 : "The Museum of Biology, Sun Yatsen Univ",
167 : "The New York Botanical Garden",
168 : "Nordic Genetic Resource Center (NORDGEN)",
169 : "GBIF-Sweden",
170 : "Tiroler Landesmuseum Ferdinandeum",
171 : "University of California, Davis",
172 : "UK National Biodiversity Network",
173 : "UNIBIO, IBUNAM",
175 : "Georg-August-Universität Göttingen, Albrecht-von-Haller-Institut für Pflanzenwissenschaften, Abteilung Systematische Botanik",
176 : "University of Alabama Biodiversity and Systematics",
177 : "Arctos",
178 : "University of Alberta Museums",
179 : "University of Colorado Museum of Natural History",
180 : "University of Gdańsk, Bird Migration Research Station",
181 : "University of Gdańsk, Dept. of Plant Taxonomy and Nature Conservation",
182 : "University of Helsinki, Department of Applied Biology",
183 : "University of Michigan Museum of Zoology",
184 : "University of Minnesota Bell Museum of Natural History",
185 : "University of Navarra, Museum of Zoology",
186 : "University of Nebraska State Museum",
187 : "University of Silesia, Laboratory of Botanical Documentation - Herbarium KTU",
188 : "University of Tennessee - Chattanooga (UTC)",
189 : "University of Tennessee, Knoxville",
190 : "University of Texas-Arlington",
191 : "University of Texas at El Paso",
192 : "University of Texas-Austin",
193 : "University of Turku provider",
194 : "University of Vienna, Institute for Botany - Herbarium WU",
195 : "University of Vienna, Institute for Botany, Research Group for Plant Biogeography",
196 : "University of Kansas Biodiversity Institute",
197 : "Upper Silesian Museum, Bytom",
198 : "USDA PLANTS",
199 : "US National Plant Germplasm System",
200 : "Utah Museum of Natural History",
201 : "Utah State University",
202 : "Utah Valley State College (UVSC)",
203 : "Warsaw University of Life Sciences, Fac. Horticulture and Landscape Architecture, Dept. of Applied Entomology",
204 : "University of Warsaw, Dept. of Ecology",
205 : "Western Foundation of Vertebrate Zoology",
206 : "WFCC-MIRCEN World Data Centre for Microorganisms (WDCM)",
207 : "Yale University Peabody Museum",
208 : "Zoologisches Forschungsinstitut und Museum Alexander Koenig",
209 : "Institute of Botany, University of Hohenheim",
210 : "Korean Bioinformation Center",
211 : "Wildscreen",
212 : "Botanic Garden, National Museum of Natural History, University of Lisbon",
213 : "Comisión nacional para el conocimiento y uso de la biodiversidad",
214 : "Harvard University Herbaria",
215 : "Université de Strasbourg",
219 : "Białowieża National Park",
220 : "National Museum of Natural History, Smithsonian Institution",
221 : "Borror Laboratory of Bioacoustics",
222 : "Leibniz Institute of Plant Genetics and Crop Plant Research (IPK)",
223 : "Index Fungorum",
224 : "Centre de Ressources Biologiques de l Institut Pasteur",
225 : "UCLA-Dickey Collection (UCLA-Dickey)",
226 : "ECAT Checklists",
229 : "Carnegie Museums",
230 : "ACOI - Coimbra Collection of Algae - University of Coimbra",
231 : "Georgia Museum of Natural History (GMNH)",
232 : "USDA-ARS Bee Biology and Systematics Laboratory",
233 : "Hungarian Natural History Museum",
234 : "Delaware Museum of Natural History",
235 : "Raffles Museum of Biodiversity Research",
236 : "Staatliches Museum für Naturkunde Stuttgart",
237 : "UC Riverside Entomology Collection",
238 : "York University",
239 : "Hannam University Natural History Museum",
240 : "University Museums of Norway (MUSIT)",
241 : "Plazi.org taxonomic treatments database",
242 : "Centro de Referência em Informação Ambiental",
243 : "University of Białystok, Institute of Biology",
244 : "University of Opole, Dept. of Biosystematics",
245 : "University of Oxford",
246 : "Association for Nature WOLF",
247 : "Wrocław University, Fac. Natural Sciences",
248 : "Korean Aquatic Plant Resources and Information Bank",
249 : "Chengdu Institute of Biology, Chinese Academy of Science",
250 : "Walter Reed Biosystematics Unit, Smithsonian Institution",
251 : "Chungnam University Natural History Museum",
252 : "Facultad de Ciencias, UNAM",
253 : "Oklahoma Vascular Plants Database Provider",
254 : "Scientific Committee on Antarctic Research - Marine Biodiversity Information Network (SCAR-MarBIN)",
255 : "SPN - Service du Patrimoine naturel, Muséum national d'Histoire naturelle, Paris",
256 : "International Centre for Insect Physiology and Ecology",
257 : "Scientific Research Centre of the Slovenian Academy of Sciences and Arts, Institute of Biology",
258 : "Slovenian Forestry Institute",
259 : "Micoteca da Universidade do Minho",
260 : "University of Mons-Hainaut",
261 : "ECOCEAN",
262 : "Instituto de Investigación de Recursos Biológicos Alexander von Humboldt",
263 : "Collection Française de Bactéries Phytopathogènes - CFBP",
264 : "Centre International de Ressources Microbiennes-Levures",
265 : "GEO-Tag der Artenvielfalt",
266 : "University of Connecticut",
267 : "TanBIF",
268 : "Colorado State University Herbarium (CSU)",
269 : "University of Arizona Herbarium",
270 : "University of Nebraska - Lincoln",
271 : "Musée Zoologique de la Ville de Strasbourg",
272 : "Société Botanique de Franche-Comté/Conservatoire Botanique National de Franche-Comté",
273 : "Norwegian Institute for Nature Research",
274 : "Discover Life",
275 : "Charles R. Conner Museum",
276 : "Floraine",
277 : "Senckenberg - CeDAMar Provider",
278 : "Instituto de Investigação Científica Tropical",
279 : "Warsaw University of Life Sciences, Fac. Forestry, Dept. of Forest Protection and Ecology",
280 : "Geocollections of Estonia",
281 : "Louisiana State University Herbarium",
282 : "Arbeitsgemeinschaft Heuschrecken Österreichs",
283 : "TELDAP",
284 : "Texas A&M University Insect Collection",
285 : "Conservation International",
286 : "Tulane University Museum of Natural History",
287 : "Halla Arboretum",
288 : "Korea Institutie of Water and Enviroment",
289 : "Kunsan Passage Bird Research Institute",
290 : "Wooseokheon Natural History Museum",
291 : "North Carolina State Museum of Natural Sciences",
292 : "Duke University Herbarium",
293 : "Musée national d'histoire naturelle Luxembourg",
294 : "University of Malaga",
295 : "IFREMER - French Research Institute for Exploitation of the Sea",
296 : "Gyeongsangnam-do forest environment Research Institute",
297 : "Ohio State University Acarology Collection",
298 : "Queen Mary University of London",
299 : "University of Hawaii",
300 : "ZooKeys",
301 : "Endangered Wildlife Trust",
302 : "Bioresource Collection and Research Center (BCRC)",
303 : "Royal Botanic Garden Edinburgh",
304 : "University of Arizona Museum of Natural History",
305 : "American Museum of Natural History",
306 : "National Inventory of Swiss bryophytes",
307 : "Observatoire Océanologique de Banyuls sur mer",
308 : "Alabama Museum of Natural History",
309 : "Auburn University Museum DiGIR Provider",
310 : "University of Louisiana at Monroe",
311 : "Milwaukee Public Museum",
312 : "New Mexico Museum of Natural History and Science",
313 : "Redpath Museum, McGill University",
314 : "Texas A&M University Biodiversity Research and Teaching Collections",
315 : "University of Nevada, Reno",
316 : "MACOI - Portuguese Seaweeds",
317 : "University Museum of Zoology Cambridge",
318 : "Arizona State University, Global Institute for Sustainability",
319 : "Consortium of California Herbaria",
320 : "Ireland",
321 : "Ireland",
322 : "National Biodiversity Data Centre",
324 : "Wildlife Conservation Society - Madagascar Program (WCS - Mad)",
325 : "Taiwan Forestry Research Institute",
326 : "Administración de Parques Nacionales, Argentina",
327 : "Herbier National de Mauritanie",
328 : "Wildlife Institute of India",
329 : "INRA Antilles-Guyane",
330 : "Tela-Botanica",
331 : "Netherlands Centre for Biodiversity Naturalis, section National Herbarium of the Netherlands",
332 : "BeBIF Provider",
333 : "Birdlife Finland",
334 : "The Norwegian Biodiversity Information Centre (NBIC)",
335 : "Birdlife Finland",
336 : "Illinois State University",
337 : "Birdlife Finland",
338 : "Birdlife Finland",
339 : "Crop Research Institute (CRI)",
340 : "GBIF-Sweden",
341 : "Department of Natural Resources, Environment, The Arts and Sport, Northern Territory of Australia",
342 : "Birds Australia",
343 : "Eremaea",
344 : "South Australia, Department of Environment and Natural Resources",
345 : "South Australia, Department of Environment and Natural Resources",
346 : "NSW Department of Environment, Climate Change, and Water representing the State of New South Wales",
347 : "Centre d'estudis de la neu i de la muntanya d'Andorra (CENMA), Institut d'Estudis Andorrans",
348 : "IRD - Institute of Research for Development",
349 : "University of Eastern Finland",
350 : "Université de Lomé",
351 : "Centre National pour le Développement Rural - Madagascar (FOFIFA)",
352 : "Parc Botanique et Zoologique de Tsimbazaza (P.B.Z.T.)",
353 : "Zoological Institute, Russian Academy of Sciences, St. Petersburg",
354 : "University of Oulu",
355 : "Ecole de Faune de Garoua",
356 : "ArtDatabanken",
357 : "IRAD Cameroun (Institut de Recherche Agronomique pour le Développement Cameroun)",
358 : "Universidad Nacional de San Agustín (Herbarium Areqvipense HUSA)",
359 : "University of Arkansas Collections Facility, UAFMC",
360 : "Sir Alister Hardy Foundation for Ocean Science (SAHFOS)",
361 : "Kokkola Nature Collection Kieppi",
362 : "United States Geological Survey",
363 : "Centre d'Observation de Surveillance et d'Information Environnementales (COSIE)",
364 : "Research Institute for Nature and Forest (INBO)",
365 : "Université d'Abomey-Calavi, Faculté des Sciences Agronomiques",
366 : "Museum of Natural and Cultural History - University of Oregon",
488 : "Department of Bioscience, Aarhus University",
489 : "Department of Veterinary Disease Biology, University of Copenhagen",
490 : "Forest & Landscape, University of Copenhagen",
491 : "Danske-Dyr, Condidact",
492 : "Zoological Museum, Natural History Museum of Denmark",
493 : "HabitatVision",
494 : "MycoKey, Denmark",
495 : "Copenhagen Bird Ringing Centre, Zoological Museum, Natural History Museum of Denmark",
496 : "Danish Ornithological Society",
497 : "Danish Mycological Society",
498 : "Danish Nature Agency",
499 : "Lepidopterological Society, Denmark",
500 : "Danish Centre for Environment and Energy, Aarhus University",
501 : "Ocean Biogeographic Information System (OBIS Australia)",
502 : "Emporia State University",
503 : "Universidad del Valle",
504 : "Musee de la Valle",
505 : "McGill University",
506 : "Universidad de La Salle",
507 : "Fraunhofer-Institute for Biomedical Engineering IBMT - Group Extremophile Research and Biobank CCCryo",
508 : "Universidad de Antioquia",
509 : "Universidad de los Andes",
510 : "Universidad de Nariño",
511 : "Museo Entomológico de León – MEL",
512 : "Ernst-Moritz-Arndt-Universitaet",
513 : "Naturhistorisches Museum Mainz",
514 : "Staatliches Museum für Naturkunde Karlsruhe",
515 : "Asilweb - Atlas der Raubfliegen Deutschlands",
516 : "Freie Universität Berlin",
517 : "Instituto de Investigaciones Ambientales del Pacifico John Von Neumann (IIAP)",
518 : "Instituto Amazónico de Investigaciones Científicas Sinchi",
519 : "Universidad del Magdalena",
520 : "University of Amsterdam / IBED",
521 : "Fundación Estación Biológica Guayacanal",
522 : "University of Alaska Museum of the North",
523 : "Herbarium de la Réserve de l’INERA-Luki",
524 : "Herbarium du CRSN-Lwiro",
525 : "Jardín Botánico del Quindío",
526 : "University of Wyoming Museum of Vertebrates",
527 : "Moore Laboratory of Zoology",
528 : "Denver Museum of Nature & Science",
529 : "Perot Museum of Nature and Science",
530 : "Western New Mexico University",
531 : "Georgia Southern University",
532 : "Universidad Central de Venezuela",
533 : "Reserva Natural de la Sociedad Civil Tenasucá de Pedro Palo",
534 : "Oleoducto Bicentenario",
535 : "University of California Museum of Paleontology",
536 : "Asociación para el estudio y conservación de las aves acuáticas en Colombia",
537 : "Corporación Autónoma Regional Para la Defensa de la Meseta de Bucaramanga",
538 : "Lund Botanical Museum (LD)",
539 : "International Fossil Shell Museum",
540 : "Reptile, Amphibian and Fish Conservation Netherlands (RAVON)",
541 : "Aranzadi Science Society",
542 : "Institute of Marine Sciences (ICM-CSIC)",
543 : "School of Forestry Engineering. Technical University of Madrid",
544 : "CeDoc of Plant Biodiversity (CeDocBIV), Univ. Barcelona",
545 : "Spanish National Museum of Natural Sciences (CSIC)",
546 : "Real Jardín Botánico (CSIC)",
547 : "Dep. of Plant Biology (Botany), Faculty of Pharmacy, Univ. Salamanca",
548 : "Department of Plant Biology II. Fac. of Pharmacy. Univ. Complutense de Madrid",
549 : "Department of Organisms and Systems Biology. University of Oviedo",
550 : "Dep. Zoology and Animal Cellular Biology, Fac. Sciences, Univ. País Vasco",
551 : "Zoologic Audio, National Museum of Natural Science",
552 : "Pyrinean Institute of Ecology (CSIC)",
553 : "Mediterranean Institute for Advanced Studies (CSIC-UIB)",
554 : "Botany laboratory. Plant Biology and Ecology Department. Univ. País Vasco",
555 : "Centre for Advanced Studies of Blanes, CSIC",
556 : "Department of Animal Biology, Faculty of Biology, University of La Laguna",
557 : "Zoological collections of the University of Leon (CZULE), University of Leon",
558 : "Experimental Station of Arid Areas (CSIC)",
559 : "Dep. of Plant Biology (Botany), Fac. of Pharmacy, Univ. La Laguna",
560 : "Public University of Navarre Herbarium service, Dept. of Natural Environment Sciences, University of Navarre",
561 : "Biological Station of Doñana (CSIC)",
562 : "Natural Sciences Museum of Barcelona",
563 : "CIBIO (Institute of Biodiversity), Univ. Alicante",
564 : "Ministry of Agriculture, Food and Environment",
565 : "Athlantic Botanical Garden",
566 : "Sóller Botanical Garden Foundation",
567 : "Department of Botany, Faculty of Biological Sciences, University of Valencia",
568 : "Dept. of Botany, Ecology and Plant physiology, University of Cordoba",
569 : "Directorate General of Environment and Water, Dept. of Rural Development and Environment, Government of Navarre",
570 : "Botanical Garden, University of Valencia",
571 : "Cavanilles Institute of Biodiversity and Evolutionary Biology, University of Valencia",
572 : "Environmental Information Network of Andalusia (REDIAM), Regional Ministry for the Environment, Regional Government of Andalusia",
573 : "Botanical Institute of Barcelona (CSIC - Ayuntamiento de Barcelona)",
574 : "Carl Faust Foundation",
575 : "Natural History Museum of Valencia",
576 : "Natural Sciences Museum of Tenerife",
577 : "Faculty of Biology, University of Barcelona",
578 : "Dep. Plant Biology, E.T.S.I. Agronomy, Univ. Politécnica de Madrid",
579 : "Animal Biology Section, Zoology and Phisic Antropology Department, Murcia University",
580 : "AZTI-Tecnalia",
581 : "Flora Mycologica Iberica Project (FMI)",
582 : "Dep. Biology, Univ. Autónoma de Madrid",
583 : "La Orden Estate, Dep. Forest Production and Grasslands, Agriculture and Environment Office, Regional Government of Extremadura",
584 : "Dep. of Plant Biology and Ecology, Univ. Seville",
585 : "Botanical Garden of Córdoba",
586 : "Association for the Integral Development of the Manchuela Conquense area",
587 : "Herbarium LEB Jaime Andrés Rodríguez. Faculty of Sciences, University of León",
588 : "Dept. of Zoology, Faculty of Science, University of Granada",
589 : "Department of Plant Biology. Faculty of Biological Sciences. Univ. Murcia",
590 : "Dep. Environmental Sciences, Faculty of Sciences, Univ. Girona",
591 : "Institution for the Study, Management and Recovery of the Ecosystems of Lerida (EGRELL)",
592 : "Dept. Research and Forest Experiences of Valonsadero, Regional Government of Castile and Leon",
593 : "Biodiversity data bank of Generalitat Valenciana",
594 : "Dept. of Vegetal Biology and Ecology, Faculty of Experimental Science, University of Almeria",
595 : "National Institute for Public Health and the Environment (RIVM)",
596 : "Instituto de Pesquisas Jardim Botanico do Rio de Janeiro",
597 : "Nomok Nature Observatory",
598 : "Korea Institute of Environmental Ecology",
599 : "Upo Wetland Ecological Park",
600 : "Korean Wild Bird Society",
601 : "Mongolia Natural History Museum",
602 : "Cornell Lab of Ornithology",
603 : "Oceanographic Center of Malaga, Spanish Institute of Oceanography (IEO)",
604 : "Essig Museum of Entomology",
605 : "Royal Society for the Protection of Birds",
606 : "HanBat Botanical Garden",
607 : "Korea Botanic Garden",
608 : "Yeongwol Cave Ecological Museum",
609 : "Zoologische Staatssammlung München/Staatliche Naturwissenschaftliche Sammlungen Bayerns",
367 : "ECOCEAN USA",
368 : "ECAT development publisher",
369 : "Enciclopedia de la Flora Chilena",
370 : "Innvista",
371 : "International Compositae Alliance",
372 : "International Union for Conservation of Nature",
373 : "Royal Belgian Institute of Natural Sciences",
374 : "Species File Group",
375 : "Université de Montréal Biodiversity Centre",
376 : "New York State Museum (NYSM)",
377 : "MNHN - Museum national d'Histoire naturelle",
378 : "Association des Naturalistes de la Vallée du Loing et du massif de Fontainebleau",
379 : "Herbier de l'université de Limoges",
380 : "Natural History Museum, University of Tartu",
381 : "Western Australian Herbarium",
382 : "Museum d'Histoire Naturelle of Aix-en-Provence, Herbarium",
383 : "Institut Scientifique, Mohamed V University",
384 : "Lycée Félix Esclangon. Comité du Patrimoine Manosquin. Herbier G. Fenoul",
385 : "Société des Sciences Naturelles et Mathématiques de Cherbourg",
386 : "Herbiers Universitaires de Clermont-Ferrand",
387 : "Herbarium specimens of Bamboo collection Prafrance Générargue (BAMBO)",
388 : "Conservatoire Botanique National Alpin",
389 : "Herbier des conservatoires et jardins botaniques de Nancy",
390 : "Herbarium of Université de Montpellier 2, Institut de Botanique",
391 : "Muséum d'Histoire Naturelle d'Autun, Herbarium",
392 : "Kuopio Natural History Museum",
393 : "Wildlife Sightings",
394 : "Muséum d'Histoire Naturelle de Bourges",
395 : "Cincinnati Museum Center",
396 : "Musée cantonal de zoologie, Lausanne - MZL",
397 : "Ohio State University Fish Division",
398 : "Lund Museum of Zoology",
399 : "Isagen",
400 : "International Centre for Integrated Mountain Development",
401 : "Muséum d'Histoire Naturelle de Nice",
402 : "Naturkundemuseum im Ottoneum Kassel",
403 : "Universität Salzburg",
404 : "Department of National Parks, Wildlife and Plant Conservation",
405 : "Corantioquia",
406 : "Red Nacional de Observadores de Aves de Colombia",
407 : "iNaturalist.org",
408 : "Acadia University",
409 : "Organization for Tropical Studies",
410 : "Humboldt State University",
411 : "Museum Henri-Lecoq de Clermont-Ferrand",
412 : "University of British Columbia",
413 : "BRC Forage and turf, INRA Lusignan",
414 : "Sistema de Información sobre Biodiversidad de Colombia - SiB",
415 : "Instituto Nacional de Tecnologia Agropecuaria - Instituto de Recursos Biológicos",
416 : "Antarctic Biodiversity Information Facility (ANTABIF)",
417 : "PhytoKeys",
418 : "European Forest Institute",
419 : "University of Puerto Rico - Invertebrate Collection",
420 : "University of Toronto Mississauga",
421 : "Queensland Museum",
422 : "Queensland Department of Environment and Resource Management",
423 : "Australian Institute of Marine Science",
424 : "European Molecular Biology Laboratory Australia",
425 : "Department of Environment and Conservation, Western Australia",
426 : "Chevron Australia",
427 : "Australian Museum",
428 : "Office of Environment and Heritage, Department of Premier and Cabinet representing the State of New South Wales",
429 : "Canberra Ornithologists Group",
430 : "Museum and Art Gallery of the Northern Territory",
431 : "Kangawalla Pastoral Co Pty Ltd",
432 : "Museum Victoria",
433 : "Citizen Science - ALA Website",
434 : "Queen Victoria Museum and Art Gallery",
435 : "South Australian Museum",
436 : "Tasmanian Museum and Art Gallery",
437 : "Western Australian Museum",
438 : "University of Dar es Salaam, Department of Zoology",
439 : "Tall Timbers Research Station and Land Conservancy",
440 : "Field Study Group of the Dutch Mammal Society",
441 : "Dutch Mammal Society",
442 : "Society for the Management of Electronic Biodiversity Data",
443 : "Senckenberg Museum für Naturkunde Görlitz",
444 : "Global Mountain Biodiversity Assessment - GMBA",
445 : "Muséum d'histoire naturelle de la Ville de Genève - MHNG",
446 : "Jardin Botanique de Montréal",
447 : "Mountain Invasion Research Network - MIREN_ETH",
448 : "Biodiversitäts-Monitoring Schweiz - BDMCH",
449 : "Naturhistorisches Museum Basel - NMB",
450 : "Centre Suisse de Cartographie de la Faune - CSCF",
451 : "Naturhistorisches Museum Bern - NMBE",
452 : "Natural History Museum Maastricht",
453 : "Conservatoire et Jardin botaniques de la Ville de Genève - G",
454 : "Ghana Biodiversity Information Facility (GhaBIF)",
455 : "Museum George Sand et de la Vallee Noire",
456 : "Agricultural Research Council",
457 : "Australian Coral Ecoregions",
458 : "Bioinformatics Research Group, Biological Science Department, Faculty of Bioscience & Bioengineering",
459 : "University of Manitoba",
460 : "Université Laval",
461 : "Gothenburg Natural History Museum (GNM)",
462 : "Swedish Museum of Natural History",
463 : "Ministerio de Ciencia, Tecnología e Innovación Productiva",
464 : "Natural History Museum Rotterdam",
465 : "AIT Austrian Institute of Technology GmbH",
466 : "Encyclopedia of Life (EOL)",
467 : "Landcare Research",
468 : "The National Institute of Water and Atmospheric Research (NIWA)",
469 : "University of Dar es Salaam, Department of Botany",
470 : "Fundación Alma",
471 : "Asociación de Becarios del Casanare - ABC",
472 : "Dutch Foundation for Applied Water Research",
473 : "Agoralogie",
474 : "Royal Dutch Angling Association",
475 : "Naturalis Biodiversity Center",
476 : "Estonian Environment Information Centre",
477 : "Asociación Selva",
478 : "Musee Vert, Museum d'Histoire Naturelle du Mans",
479 : "Nijmegen Natural History Museum",
480 : "Botanical Garden & Museum, Natural History Museum of Denmark",
481 : "Digitarium",
482 : "Federación Nacional de Cafeteros de Colombia",
483 : "Fundación Pro-Sierra Nevada de Santa Marta",
484 : "Arboretum, University of Copenhagen",
485 : "Atlas survey of the Butterflies of Denmark",
486 : "Department of Biology, University of Copenhagen",
487 : "Center for Macroecology, Evolution and Climate, University of Copenhagen",
610 : "Reserva Natural de la Sociedad Civil Nuestro Sueño",
611 : "Universidad Nacional de Colombia",
612 : "YOLUKA ONG, Fundación de Investigación en Biodiversidad y Conservación"
}
PROVIDER_MAPPINGS = {
"e196c8d6-f795-463c-80c4-310dd14ee50b" : 1,
"f4ce3c03-7b38-445e-86e6-5f6b04b649d4" : 2,
"72da316f-62b4-451e-9238-cf72289e6372" : 3,
"f9b67ad0-9c9b-11d9-b9db-b8a03c50a862" : 4,
"af00f590-cb78-11d9-b772-b8a03c50a862" : 5,
"3693ff90-4c16-11d8-b290-b8a03c50a862" : 6,
"8692e460-6783-11d8-b9b2-b8a03c50a862" : 7,
"14c5f0f0-1dab-11d9-8435-b8a03c50a862" : 8,
"06bbd2a0-671b-11d8-b9b2-b8a03c50a862" : 9,
"95f1c5d0-8996-11d9-a962-b8a03c50a862" : 10,
"7b4f2f30-a456-11d9-8049-b8a03c50a862" : 11,
"37E82B90-1E21-11DE-AB90-F72009D2669B" : 12,
"0dc9c7a0-b5c6-11d9-a0b8-b8a03c50a862" : 13,
"90cc71b0-055b-11d8-b84e-b8a03c50a862" : 14,
"85f9aba0-af9d-11db-ad75-b8a03c50a862" : 15,
"2d6e0e40-2aee-11d8-aa2d-b8a03c50a862" : 16,
"497688a0-59d6-11db-893e-b8a03c50a862" : 17,
"7b3aa470-605f-11d8-b9b2-b8a03c50a862" : 18,
"bfb257e0-b415-11da-967e-b8a03c50a862" : 19,
"c4cbb860-e9c7-11da-8113-b8a03c50a862" : 20,
"57254bd0-8256-11d8-b7ed-b8a03c50a862" : 21,
"e2475770-5046-11da-8af1-b8a03c50a862" : 22,
"94458240-4e38-11db-985b-b8a03c50a862" : 23,
"43cab2b0-0653-11d9-acb2-b8a03c50a862" : 24,
"06fcbbf0-0562-11d8-b851-b8a03c50a862" : 25,
"29817330-5d61-11d8-b9b2-b8a03c50a862" : 26,
"66522820-055c-11d8-b84e-b8a03c50a862" : 27,
"24266ce0-38be-11d9-8439-b8a03c50a862" : 28,
"1b02b980-027a-11da-87cd-b8a03c50a862" : 29,
"09871350-0a23-11d8-b854-b8a03c50a862" : 30,
"a41250f0-7c3e-11d8-a19c-b8a03c50a862" : 31,
"6a430540-7f09-11d8-a1a0-b8a03c50a862" : 32,
"657b7bf0-766b-11d9-9f77-b8a03c50a862" : 33,
"efc2ae40-9880-11da-89b4-b8a03c50a862" : 34,
"229d4040-7e98-11d9-add9-b8a03c50a862" : 35,
"b62390e0-0e35-11d9-8431-b8a03c50a862" : 36,
"0f56ac70-81c1-11d9-b6d0-b8a03c50a862" : 37,
"6bcc0290-6e76-11db-bcd5-b8a03c50a862" : 38,
"cf9ceb80-9f3d-11da-b791-b8a03c50a862" : 39,
"778b4aa0-7067-11d9-b13a-b8a03c50a862" : 40,
"aeb25010-c36e-11d8-84ea-b8a03c50a862" : 41,
"d48116e0-281e-11d9-8436-b8a03c50a862" : 42,
"463555b0-d081-11da-ae8f-b8a03c50a862" : 43,
"ca2d0ad0-45e0-11d8-aa2e-b8a03c50a862" : 44,
"ec469dc0-9530-11d9-8902-b8a03c50a862" : 45,
"79e0b850-b280-11db-b710-b8a03c50a862" : 46,
"610414e0-8040-11d9-8294-b8a03c50a862" : 47,
"bd561840-efdc-11d9-a329-b8a03c50a862" : 48,
"7b8aff00-a9f8-11d8-944b-b8a03c50a862" : 49,
"e5585950-488e-11db-a1c2-b8a03c50a862" : 50,
"192a9ab0-5565-11d8-b290-b8a03c50a862" : 51,
"8483a1f0-1032-11db-ae00-b8a03c50a862" : 52,
"21074610-6c06-11db-a819-b8a03c50a862" : 53,
"63c6de50-9274-11da-8901-b8a03c50a862" : 54,
"afa44940-5a60-11d9-bbdd-b8a03c50a862" : 55,
"2760d7b0-767c-11d8-a198-b8a03c50a862" : 56,
"6c4a0bb0-2a4d-11d8-aa2d-b8a03c50a862" : 57,
"d68c4230-a271-11d8-a9b3-b8a03c50a862" : 58,
"23e067c0-a255-11da-beae-b8a03c50a862" : 59,
"29e30490-219f-11d8-a2da-b8a03c50a862" : 60,
"d8bca410-a773-11da-95cd-b8a03c50a862" : 61,
"7a070ba0-bafb-11d9-8e53-b8a03c50a862" : 62,
"605e7170-1123-11d9-8433-b8a03c50a862" : 63,
"db7c0100-1a6f-11da-88d4-b8a03c50a862" : 64,
"23b946e0-4e2d-11d9-bbdd-b8a03c50a862" : 65,
"def87a70-0837-11d9-acb2-b8a03c50a862" : 66,
"2783f1f0-8ef2-11db-af3b-b8a03c50a862" : 67,
"f739aef0-8a5b-11d9-bc8d-b8a03c50a862" : 68,
"8e1a97a0-3ca8-11d9-8439-b8a03c50a862" : 69,
"da182250-8257-11d8-b7ed-b8a03c50a862" : 70,
"5302de30-ac23-11d9-bc6f-b8a03c50a862" : 71,
"4c4920d0-61a5-11da-9136-b8a03c50a862" : 72,
"12308e00-4b80-11da-8809-b8a03c50a862" : 73,
"24c3dc90-822b-11d8-b7ed-b8a03c50a862" : 74,
"fd0038d0-3e7b-11d9-8439-b8a03c50a862" : 75,
"fccc6740-bbb1-11d9-839f-b8a03c50a862" : 76,
"81b4aef0-af9d-11db-ad75-b8a03c50a862" : 77,
"974a24e0-4f8e-11da-a57c-b8a03c50a862" : 78,
"e2ce7dc0-a8b5-11d8-9534-b8a03c50a862" : 79,
"f7259790-9300-11da-8596-b8a03c50a862" : 80,
"8f01b340-b2f3-11d8-a8af-b8a03c50a862" : 81,
"0e0fc0f0-828e-11d8-b7ed-b8a03c50a862" : 82,
"0fbd3740-5703-11dd-b927-b8a03c50a862" : 83,
"f121e450-78ba-11d8-a19c-b8a03c50a862" : 84,
"a8ab3400-6739-11da-8f8a-b8a03c50a862" : 85,
"3fa69630-8256-11d8-b7ed-b8a03c50a862" : 86,
"97a3ce10-3e54-11d9-8439-b8a03c50a862" : 87,
"b6f05cf0-43ce-11d9-8439-b8a03c50a862" : 88,
"3c1d3550-c688-11da-bc3f-b8a03c50a862" : 89,
"5c7a5c20-1bd0-11d8-a2da-b8a03c50a862" : 90,
"e3e307d0-1637-11da-a5ec-b8a03c50a862" : 91,
"a3c228d0-3110-11db-abb8-b8a03c50a862" : 92,
"4f160e00-329d-11d9-8439-b8a03c50a862" : 94,
"f56c5f80-055c-11d8-b84e-b8a03c50a862" : 95,
"01a3b990-c4b6-11da-98ad-b8a03c50a862" : 96,
"ff3a2000-b2ac-11db-b710-b8a03c50a862" : 97,
"43587ad0-bafb-11d9-8e53-b8a03c50a862" : 98,
"569a9110-cb1d-11d8-bf68-b8a03c50a862" : 99,
"a98e4f40-fa3b-11d8-a1a1-b8a03c50a862" : 100,
"ce3cfcc0-6d56-11db-a819-b8a03c50a862" : 101,
"0ee09980-b280-11db-b70f-b8a03c50a862" : 102,
"9c6afd40-4d63-11db-9eae-b8a03c50a862" : 103,
"493fe050-055d-11d8-b84f-b8a03c50a862" : 104,
"c361e830-181e-11da-9544-b8a03c50a862" : 105,
"2e66c910-6bbd-11da-8cf2-b8a03c50a862" : 106,
"05c249d0-dfa0-11d8-b22e-b8a03c50a862" : 107,
"c96dd940-165d-11da-a5ec-b8a03c50a862" : 108,
"41d6acb0-48b5-11db-a1c2-b8a03c50a862" : 109,
"311e2740-f15a-11d8-b22f-b8a03c50a862" : 110,
"90fd6680-349f-11d8-aa2d-b8a03c50a862" : 111,
"422412e0-b280-11db-b710-b8a03c50a862" : 112,
"fd70f580-4ce2-11db-b80e-b8a03c50a862" : 113,
"2e4af590-37d0-11d9-8439-b8a03c50a862" : 114,
"4f48f140-38a5-11d9-8439-b8a03c50a862" : 115,
"036cdc50-e7f5-11d9-9a0e-b8a03c50a862" : 116,
"e147be40-75eb-11d9-b257-b8a03c50a862" : 117,
"10980920-6dad-11da-ad13-b8a03c50a862" : 118,
"4bfa9210-a4ec-11d9-8049-b8a03c50a862" : 119,
"b4640710-8e03-11d8-b956-b8a03c50a862" : 120,
"218534d0-925e-11da-8900-b8a03c50a862" : 121,
"939ed180-aa7b-11db-8edf-b8a03c50a862" : 122,
"acac73b0-055d-11d8-b84f-b8a03c50a862" : 123,
"8edbbde0-055e-11d8-b850-b8a03c50a862" : 124,
"ff6a9550-c5c3-11d9-b4b2-b8a03c50a862" : 125,
"c4254a00-cb2b-11d8-bf68-b8a03c50a862" : 126,
"c3367060-856b-11d9-8486-b8a03c50a862" : 127,
"0f727d00-b29a-11da-94a7-b8a03c50a862" : 128,
"c5f7ef70-e233-11d9-a4d6-b8a03c50a862" : 130,
"4fd82480-ea1c-11da-8db4-b8a03c50a862" : 131,
"eb07eeb0-3db9-11d9-8439-b8a03c50a862" : 132,
"19456090-b49a-11d8-abeb-b8a03c50a862" : 133,
"f314b0b0-e3dc-11d9-8d81-b8a03c50a862" : 134,
"4e11d750-bafb-11d9-8e53-b8a03c50a862" : 135,
"aa40a1e0-818b-11d9-b6d0-b8a03c50a862" : 136,
"9c7737c0-359c-11da-9a82-b8a03c50a862" : 137,
"6ea466a0-925e-11da-8900-b8a03c50a862" : 138,
"cace8d10-2646-11d8-a2da-b8a03c50a862" : 139,
"038205f0-dcaf-11d8-b22e-b8a03c50a862" : 140,
"ac5e8480-3714-11da-bc2e-b8a03c50a862" : 141,
"839f8ed0-bafb-11d9-8e53-b8a03c50a862" : 142,
"602f0aa0-1f3e-11da-9ae9-b8a03c50a862" : 143,
"2c7d9b50-a845-11da-ae09-b8a03c50a862" : 144,
"d5778510-eb28-11da-8629-b8a03c50a862" : 145,
"3b331240-abea-11d9-a96c-b8a03c50a862" : 146,
"575c52b0-a742-11db-a6ff-b8a03c50a862" : 147,
"061b4f20-f241-11da-a328-b8a03c50a862" : 148,
"ff418020-1d67-11d9-8435-b8a03c50a862" : 149,
"609765e0-4ce8-11db-b80e-b8a03c50a862" : 150,
"8a471700-4ce8-11db-b80e-b8a03c50a862" : 151,
"78678140-1815-11da-9544-b8a03c50a862" : 152,
"18377e40-63a3-11da-b5f3-b8a03c50a862" : 153,
"e73a3ad0-98e7-11d8-a9b2-b8a03c50a862" : 154,
"c76cf030-2a95-11da-9cc1-b8a03c50a862" : 155,
"a2936bd0-b280-11db-b710-b8a03c50a862" : 156,
"79914a60-cb99-11d9-b772-b8a03c50a862" : 157,
"0674aea0-a7e1-11d8-9534-b8a03c50a862" : 158,
"32ceedc0-bafb-11d9-8e53-b8a03c50a862" : 159,
"976af1e0-ca25-11da-bfed-b8a03c50a862" : 160,
"021121c0-f040-11d8-b22f-b8a03c50a862" : 161,
"12b1df00-3f75-11d8-aa2d-b8a03c50a862" : 162,
"3c6ad080-bdbe-11da-ac59-b8a03c50a862" : 163,
"299958e0-4c06-11d8-b290-b8a03c50a862" : 164,
"988a0d00-34b5-11da-9a7b-b8a03c50a862" : 165,
"c304b410-dee6-11d9-b1d4-b8a03c50a862" : 166,
"ae447c50-b8a8-11d8-92a4-b8a03c50a862" : 167,
"B9C5F740-34D9-11DE-BAF5-E00D96B185EF" : 168,
"4C415E40-1E21-11DE-9E40-A0D6ECEBB8BF" : 169,
"f9912a00-84a9-11d9-8952-b8a03c50a862" : 170,
"f0b6a090-a4c6-11d8-9534-b8a03c50a862" : 171,
"07f617d0-c688-11d8-bf62-b8a03c50a862" : 172,
"b459e790-0d3c-11d9-8431-b8a03c50a862" : 173,
"59c81290-df0e-11d8-b22e-b8a03c50a862" : 175,
"79324e30-d4c7-11d8-bf69-b8a03c50a862" : 176,
"3988de20-0560-11d8-b851-b8a03c50a862" : 177,
"03cb9700-6cee-11d8-922d-b8a03c50a862" : 178,
"4cadac10-3e7b-11d9-8439-b8a03c50a862" : 179,
"814254c0-925e-11da-8900-b8a03c50a862" : 180,
"e3f07310-a215-11da-beae-b8a03c50a862" : 181,
"2f462930-4d96-11d8-b290-b8a03c50a862" : 182,
"6ea87510-0561-11d8-b851-b8a03c50a862" : 183,
"927f1e70-cf74-11d8-bf68-b8a03c50a862" : 184,
"2f390a80-56f3-11da-880b-b8a03c50a862" : 185,
"9b049850-d958-11da-9047-b8a03c50a862" : 186,
"4668d6a0-130d-11db-a7d3-b8a03c50a862" : 187,
"efcf5460-a06a-11d9-8ec2-b8a03c50a862" : 188,
"c654fe80-188b-11d9-8435-b8a03c50a862" : 189,
"d003dd60-94fa-11da-a99f-b8a03c50a862" : 190,
"52c8f620-1816-11da-9544-b8a03c50a862" : 191,
"5553cf70-8fa6-11da-956e-b8a03c50a862" : 192,
"3ee55840-1f1d-11d8-a2da-b8a03c50a862" : 193,
"58520c30-bafb-11d9-8e53-b8a03c50a862" : 194,
"64995bb0-bafb-11d9-8e53-b8a03c50a862" : 195,
"b554c320-0560-11d8-b851-b8a03c50a862" : 196,
"41cce8a0-925e-11da-8900-b8a03c50a862" : 197,
"58b5fc00-5ba3-11db-8a89-b8a03c50a862" : 198,
"83E105D0-1E21-11DE-85D0-FE7FA1DF8EE2" : 199,
"2d6267a0-0561-11d8-b851-b8a03c50a862" : 200,
"4c0e9f60-c489-11d8-bf60-b8a03c50a862" : 201,
"4b37eb40-10e6-11db-939a-b8a03c50a862" : 202,
"ac2273c0-0516-11db-a8cf-b8a03c50a862" : 203,
"04665d80-7e5f-11d8-a1a0-b8a03c50a862" : 204,
"80960f60-ad5e-11da-9c37-b8a03c50a862" : 205,
"8273e420-46f3-11d8-aa2e-b8a03c50a862" : 206,
"2e167bb0-4441-11db-9ba2-b8a03c50a862" : 207,
"6e1cad80-bdf5-11d8-84ea-b8a03c50a862" : 208,
"5353b120-c0bc-11db-b2dd-b8a03c50a862" : 209,
"7acb3000-c181-11db-adfe-b8a03c50a862" : 210,
"07e58d67-c090-4703-aada-3b88b10355f7" : 211,
"315127e0-d7bb-11db-9885-b8a03c50a862" : 212,
"ff90b050-c256-11db-b71b-b8a03c50a862" : 213,
"485ff490-e3b7-11db-9acc-b8a03c50a862" : 214,
"a6de8f30-d931-11db-973c-b8a03c50a862" : 215,
"8face560-fd52-11db-ace1-b8a03c50a862" : 219,
"bc092ff0-02e4-11dc-991f-b8a03c50a862" : 220,
"9b2452f0-1b68-11dc-8e60-b8a03c50a862" : 221,
"19a3a4e0-1908-11dc-a07b-b8a03c50a862" : 222,
"1ce482ab-14e3-48f0-8b70-b58b14625902" : 223,
"30d2bd90-12aa-11dc-80c8-b8a03c50a862" : 224,
"c0dc3c80-23f9-11dc-98d1-b8a03c50a862" : 225,
"\N" : 226,
"c1da06f0-580e-11dc-8a9e-b8a03c50a862" : 229,
"e92e0710-24c4-11dc-a625-b8a03c50a862" : 230,
"df5031f0-5813-11dc-8a9e-b8a03c50a862" : 231,
"1e26a630-7203-11dc-a0d8-b8a03c50a862" : 232,
"be11c6a0-7cf5-11dc-92cb-b8a03c50a862" : 233,
"a42867f0-813a-11dc-82e0-b8a03c50a862" : 234,
"f53a51c0-88cf-11dc-a32a-b8a03c50a862" : 235,
"99ea0c90-61e5-11dc-a64c-b8a03c50a862" : 236,
"2b831170-8265-11dc-9e45-b8a03c50a862" : 237,
"719e2ab0-91c8-11dc-8602-b8a03c50a862" : 238,
"02b15320-934e-11dc-8e7d-b8a03c50a862" : 239,
"da879c20-98e7-11dc-80b1-b8a03c50a862" : 240,
"7ce8aef0-9e92-11dc-8738-b8a03c50a862" : 241,
"8595cd50-87c0-11dc-bb35-b8a03c50a862" : 242,
"83af6510-a704-11dc-80be-b8a03c50a862" : 243,
"08106730-ae7d-11dc-b3c6-b8a03c50a862" : 244,
"bf722320-bea2-11dc-9113-b8a03c50a862" : 245,
"787e5bf0-bdcb-11dc-9a6c-b8a03c50a862" : 246,
"3890bfb0-b461-11dc-bc42-b8a03c50a862" : 247,
"822ae110-d90e-11dc-8e3b-b8a03c50a862" : 248,
"cb1987f0-8661-11dc-ace4-b8a03c50a862" : 249,
"5ab24120-de25-11dc-9a41-b8a03c50a862" : 250,
"7d1bc440-e60c-11dc-af9e-b8a03c50a862" : 251,
"49355de0-eb1f-11dc-ad06-b8a03c50a862" : 252,
"8ef49720-e6dc-11dc-8b77-b8a03c50a862" : 253,
"d0080730-e6ad-11dc-8b77-b8a03c50a862" : 254,
"1928bdf0-f5d2-11dc-8c12-b8a03c50a862" : 255,
"43c1d5e0-0a24-11dd-953d-b8a03c50a862" : 256,
"a5586d20-0f7c-11dd-9d45-b8a03c50a862" : 257,
"5796fe40-1516-11dd-9fa4-b8a03c50a862" : 258,
"30b6eaa0-21c9-11dd-9858-b8a03c50a862" : 259,
"d02b7f80-1b78-11dd-be3c-b8a03c50a862" : 260,
"5daa3840-20bc-11dd-bc28-b8a03c50a862" : 261,
"2a7e3080-28a9-11dd-97cd-b8a03c50a862" : 262,
"cbe8b310-360e-11dd-a88d-b8a03c50a862" : 263,
"8db8c070-3628-11dd-a88d-b8a03c50a862" : 264,
"ef69a030-3940-11dd-b168-b8a03c50a862" : 265,
"1ef55060-54da-11dd-9d47-b8a03c50a862" : 266,
"76905CA0-1E21-11DE-9CA0-CD203B273F5D" : 267,
"0da0bbb0-633f-11dd-9199-b8a03c50a862" : 268,
"60d24660-7054-11dd-b804-b8a03c50a862" : 269,
"1afe9ab0-5a80-11dd-aac6-b8a03c50a862" : 270,
"609d49d0-78fe-11dd-b871-b8a03c50a862" : 271,
"8fd192b0-734f-11dd-8fc0-b8a03c50a862" : 272,
"46fec380-8e1d-11dd-8679-b8a03c50a862" : 273,
"7ff14f90-946d-11dd-a03f-b8a03c50a862" : 274,
"7c7b6440-94d1-11dd-bdb1-b8a03c50a862" : 275,
"38222b00-9911-11dd-9689-b8a03c50a862" : 276,
"48490260-8fc0-11dd-be72-b8a03c50a862" : 277,
"9fdb5a90-a5a6-11dd-8d5f-b8a03c50a862" : 278,
"5bbd4d00-a75e-11dd-b1b2-b8a03c50a862" : 279,
"70488160-b003-11d8-a8af-b8a03c50a862" : 280,
"38a69420-bd2a-11dd-b15e-b8a03c50a862" : 281,
"723e80e0-bd38-11dd-b15e-b8a03c50a862" : 282,
"7050ce20-c75e-11dd-87ef-b8a03c50a862" : 283,
"352bdfe0-c57b-11dd-a8eb-b8a03c50a862" : 284,
"335880a0-e2e1-11dd-8102-b8a03c50a862" : 285,
"039f44e0-e712-11dd-9cd2-b8a03c50a862" : 286,
"193bb600-f1d3-11dd-b2d5-b8a03c50a862" : 287,
"4e0835d0-f1d2-11dd-b2d5-b8a03c50a862" : 288,
"34829730-f1d3-11dd-b2d5-b8a03c50a862" : 289,
"f5c46d20-f1d2-11dd-b2d5-b8a03c50a862" : 290,
"69e7f900-ed5b-11dd-ac7e-b8a03c50a862" : 291,
"ead371f0-f93d-11dd-af50-b8a03c50a862" : 292,
"75642970-f855-11dd-8235-b8a03c50a862" : 293,
"7fadb370-f921-11dd-af50-b8a03c50a862" : 294,
"61e40671-1a75-4bcd-9a0d-63515057d39d" : 295,
"f7023470-0211-11de-8f36-b8a03c50a862" : 296,
"0242b530-1a22-11de-ab70-b8a03c50a862" : 297,
"3c552c40-1fda-11de-a7f3-b8a03c50a862" : 298,
"58172c10-1dc5-11de-be11-b8a03c50a862" : 299,
"BB922300-7DDB-11DE-A300-90AC77AA923F" : 300,
"40ECE160-5362-11DE-A160-D8AA0EE066B0" : 301,
"6b4fc310-4452-11de-87a1-b8a03c50a862" : 302,
"98e934b0-5f31-11de-b67e-b8a03c50a862" : 303,
"84d451a0-644d-11de-b910-b8a03c50a862" : 304,
"ccd1ddc0-6c21-11de-8224-b8a03c50a862" : 305,
"6d8dec90-656a-11de-85b2-b8a03c50a862" : 306,
"3857ca00-7aaa-11de-927e-b8a03c50a862" : 307,
"045f6b80-812c-11de-86fd-b8a03c50a862" : 308,
"34cc6c50-812c-11de-86fe-b8a03c50a862" : 309,
"57324530-812c-11de-86fe-b8a03c50a862" : 310,
"845f3630-812c-11de-86fe-b8a03c50a862" : 311,
"ac1a4340-812c-11de-86fe-b8a03c50a862" : 312,
"d9207260-812c-11de-86fe-b8a03c50a862" : 313,
"2b8e48b0-812d-11de-86fe-b8a03c50a862" : 314,
"470365d0-812d-11de-86fe-b8a03c50a862" : 315,
"03e90890-8b28-11de-b647-b8a03c50a862" : 316,
"d9ccac00-9bc7-11de-a329-b8a03c50a862" : 317,
"edd92410-ac77-11de-90e0-b8a03c50a862" : 318,
"fb702600-a84e-11de-978d-b8a03c50a862" : 319,
"not-in-uddi-3027583" : 320,
"not-in-uddi-7424883" : 321,
"D2B97690-BFD6-11DE-B279-D52977ACE833" : 322,
"643FE8A0-BD8A-11DE-B279-8C1F280C453E" : 324,
"898ba450-1627-11df-bd84-b8a03c50a862" : 325,
"979dd240-16f7-11df-b5b3-b8a03c50a862" : 326,
"247e7360-5f30-11df-9ae1-b8a03c50a862" : 327,
"4a19e429-8f13-478e-86d7-93adb7534524" : 328,
"4bce8f23-20a5-48a7-b25a-83700caad2db" : 329,
"b2dbd210-90c2-11df-86a3-b8a03c50a862" : 330,
"ade229f5-9bd5-471d-946f-f34723b9222c" : 331,
"3d21bfa0-71b9-11d8-922f-b8a03c50a862" : 332,
"8f152af7-8ecb-42b0-8471-32841a75cb2d" : 333,
"d3978a37-635a-4ae3-bb85-7b4d41bc0b88" : 334,
"8f152af7-8ecb-42b0-8471-32841a75cb2d" : 335,
"2e4967ed-fd35-4d34-ae4d-e8731d366e97" : 336,
"8f152af7-8ecb-42b0-8471-32841a75cb2d" : 337,
"8f152af7-8ecb-42b0-8471-32841a75cb2d" : 338,
"417c3159-419c-4325-a257-c482371860c6" : 339,
"4e36d3e0-fd86-11d7-b84d-b8a03c50a862" : 340,
"24f26321-3960-4a16-a8e9-b3dfb401a9a4" : 341,
"75ed4c27-9997-41c4-81a4-675b4282ffad" : 342,
"633f217c-c007-48dc-86ed-f8fdae6fd0d8" : 343,
"526a57bd-769c-4cc9-94ae-1eb9bdfa5d6c" : 344,
"526a57bd-769c-4cc9-94ae-1eb9bdfa5d6c" : 345,
"09644437-7734-42d9-b759-c40c03d953f8" : 346,
"b4370195-ab86-42a1-822a-96809e3d0384" : 347,
"3b5161b7-9831-4444-9822-a60df4df107d" : 348,
"42fdc1e2-f7be-454f-9672-b8bd44e1ba31" : 349,
"d2583793-e5c1-41ae-83bd-86300d34e2cb" : 350,
"c0e25223-5b55-4e02-8556-178781709720" : 351,
"7bec6457-78d6-416f-a185-00ae45e1d9a0" : 352,
"c14b9ce2-9545-4376-8a3b-6741558c256a" : 353,
"d1146fd8-35d2-4264-a569-8f88cc29d0c8" : 354,
"01ea5fd7-42b3-42a0-aa34-2fe0a50167ef" : 355,
"b8323864-602a-4a7d-9127-bb903054e97d" : 356,
"a4e7734f-58a3-4833-b7bd-20bb470bce0f" : 357,
"28d17f8f-39f3-4041-9734-67c4f1e7693e" : 358,
"9a367b8c-22dd-402d-9161-d3c64c6d6a94" : 359,
"09b17aee-d3fb-48ca-a30b-303d671a8155" : 360,
"5d18805e-7810-4aa4-aad8-8447f236dcc5" : 361,
"c3ad790a-d426-4ac1-8e32-da61f81f0117" : 362,
"1c905e09-830d-43d6-bea7-32b9275364bf" : 363,
"1CD669D0-80EA-11DE-A9D0-F1765F95F18B" : 364,
"2afeb273-cbbc-4fcf-a10f-7baf271b62c5" : 365,
"0ba220e1-77cf-487a-8d91-494167240f5d" : 366,
"8e1a7b73-e746-4d20-a0ba-e647ce0f9784" : 488,
"a9e1c0c6-efd6-40b8-b03e-8919483704c2" : 489,
"6a87f324-407f-4f34-babf-2c717523b8af" : 490,
"f1f8a1e3-d00a-4f73-a9c0-afe7674ff948" : 491,
"456058db-f70b-4005-97ad-e08570cf0c56" : 492,
"f50d9732-d5f4-4573-9d04-8eba43d2c272" : 493,
"d8d42b62-311c-4841-b93a-009b0f729a50" : 494,
"d779fbe1-5e62-48cc-8de4-098fddf50cb1" : 495,
"8b734449-479a-4924-8f7a-9a2a64112f8f" : 496,
"ccc2e3ec-98ba-4e74-878d-7dcf0f57baba" : 497,
"977178c7-76fe-4c86-94f3-cf039ede5c3c" : 498,
"ab76e54b-a6cf-4024-9e29-30df1e2f6383" : 499,
"1a127d84-5508-4307-be4b-04e0b71eb113" : 500,
"2174414a-9b2a-4774-85f1-2a9c54c28ca9" : 501,
"9bcba4e4-a10d-4484-9c5e-36a059a0c9cd" : 502,
"85be57ed-f187-49c9-b7ff-eaa622e06217" : 503,
"95e0556d-0271-4ded-a317-58b2236afc22" : 504,
"182287da-fe0f-457f-ac06-2b8f4c7680fd" : 505,
"478a9e81-e716-42dc-a68d-03487953a32e" : 506,
"c809a470-090e-498b-abc9-cfd65e90b698" : 507,
"cccff716-2694-4209-9f9e-2f7a484465a0" : 508,
"77c64839-4c99-4a40-beb3-cd16afc23540" : 509,
"58c7e325-82fc-446d-9406-851b4d357db7" : 510,
"60f8ce17-065e-40ff-bac6-4f85f5edaedd" : 511,
"7ac19c7e-3e49-4672-ac94-76126d676f40" : 512,
"33aecde5-7e13-4272-9cb7-f4f3b0eb820c" : 513,
"b3fbef09-563b-4ee2-96c1-4a449def8992" : 514,
"399f50f1-05cd-4f8f-aa42-4b27f9d8a047" : 515,
"be1a8b33-d07a-4190-a219-2daf4985ecfc" : 516,
"e1050db2-9faf-4d72-b860-295debaf9d2a" : 517,
"9d77fdeb-100f-4b29-98ad-4effdd824457" : 518,
"30ff48bd-4dd1-429d-a5a5-348c8e5fbfb1" : 519,
"1f55bd0a-6ebe-49a1-8a9c-9258702b6f46" : 520,
"00a915e7-b4e2-4795-bcbf-45e4dda0e927" : 521,
"d9699a79-4736-45c2-925c-f4033ac67d52" : 522,
"eb13847f-3699-4a6a-a7ab-65c133e6185b" : 523,
"45a451cf-a94a-4df0-9511-1dca805a97db" : 524,
"e144e6ee-503e-4bd9-9eff-f2fc77473642" : 525,
"50576fe5-7883-495d-9089-b5dcf5e56098" : 526,
"247b85e0-4e6b-46d1-b1c8-47b00bb8ced7" : 527,
"a2ef6dd1-8886-48c9-8025-c62bac973cc7" : 528,
"410d186f-903a-44fd-b84f-61c47c88bcaf" : 529,
"4f1bbd8f-a4ad-40dd-b098-283792d3679b" : 530,
"2c6e6b89-8473-44ae-803f-60267d9bf6de" : 531,
"3eeffcbd-0e84-4223-ac65-3d32e7cc8511" : 532,
"65740d6c-d782-442e-9e0c-ebb9493ef22a" : 533,
"c3da1f49-b2c8-4751-b72f-28855546ec4c" : 534,
"c762645a-4638-40f5-b129-5024f8d66b51" : 535,
"a2f1c6f5-88de-4fc5-891a-336259f32f4e" : 536,
"7a079928-aee9-418a-b083-6152d01c78d6" : 537,
"883ee5d0-9177-423d-a636-0284e8a4de46" : 538,
"ca6c2ee2-661b-4776-acd1-8603d55d5057" : 539,
"8629d002-c1cf-4c61-9895-55ca7cd0e149" : 540,
"823818f6-0696-4e29-bc4c-b6f3817535a7" : 541,
"ad2c7028-26fd-49c6-88d2-457c207c387b" : 542,
"46fca4d7-47a9-465f-97c8-161e209e624e" : 543,
"cb202839-87c6-456a-821c-dc5b268a200f" : 544,
"3059a14b-7b43-41ce-beef-873999911dfe" : 545,
"0363cbd4-f666-455e-8e86-0bbddcf51950" : 546,
"a6deb94a-2c40-47ec-a678-3d502cd42bc2" : 547,
"a7ae8a07-cb3f-42e6-8f4e-22868f8cdb27" : 548,
"95cb537c-74c5-4c1e-ae24-32e7ea08f380" : 549,
"0f797329-9a45-44fe-a3f5-358638ccf62b" : 550,
"68209dd2-7f9c-4176-8135-d93f3321d3d3" : 551,
"dfa60bf9-be17-4b11-bdd1-23734e6dbfbd" : 552,
"64d0ae66-2e97-4682-aedd-67a45017e0be" : 553,
"e8fb330c-5750-4698-afba-5438618af521" : 554,
"cf88ad28-d04c-4dd6-9d4f-7b137fbecd15" : 555,
"60674aba-1fde-4903-9dc7-a99104e2851e" : 556,
"984745f9-26f4-4e08-8b63-f8ed970a29f0" : 557,
"ce5a9e85-93f6-43fd-b993-b76064456b1b" : 558,
"98f488ec-114d-433b-843d-dea4767b0a8a" : 559,
"fbcad409-9d05-41d7-a869-1ddcab51e091" : 560,
"6b8da9ca-0648-4df3-9f0a-d43ab20a9412" : 561,
"e8eada63-4a33-44aa-b2fd-4f71efb222a0" : 562,
"9c56586d-d2c7-4eac-9cc9-436e6b8b1a54" : 563,
"7338b4d9-d75b-4edb-b238-b09f52397f49" : 564,
"797b2594-278e-421d-88c1-0925d122d611" : 565,
"e287af9f-768b-410d-a232-84522b1968b0" : 566,
"306334d7-dba4-4ac0-9b18-be2cb49bff35" : 567,
"5a21e58d-5a4c-4ee4-b2e0-b67f5ce28720" : 568,
"487a39c4-f5c0-4dbe-8e11-54b743d6f735" : 569,
"8209258a-80e8-4094-aba2-a3febe5ed45b" : 570,
"e07be3bf-c9c6-4d05-a7f8-db2d6f0b6ac5" : 571,
"799acb53-2764-4e89-9792-5fdb6f7d7d29" : 572,
"6f3ccc65-0a54-4e65-ae7e-13b92d618e7c" : 573,
"b98c5e71-1874-489e-b704-a3f76549e72d" : 574,
"4e1a0487-456b-4620-bab9-fa0e44ece156" : 575,
"9c308e53-f7f1-4f19-92c5-03df23b3fe74" : 576,
"4baf4e1d-f960-49d1-b09a-8012bd38eb20" : 577,
"367822e7-0973-4725-986a-979ec2ac0704" : 578,
"9cea954b-cd33-4c43-8622-6bc68c3e9379" : 579,
"33867370-b176-4f56-81ed-f7e60be9668e" : 580,
"5c507727-6dec-41d5-82c3-c67398930daf" : 581,
"f7bf768e-d88d-489d-a263-dd07232bd9e4" : 582,
"bafca313-f02c-4ce7-9cba-760770a0e4b9" : 583,
"520e3c81-7de2-4ef3-9b36-11f3bdad76a9" : 584,
"857b3f90-230f-474d-9559-6f740efadbba" : 585,
"ea28f930-aa50-4bee-b702-76ead24cabbb" : 586,
"730cd32c-627f-44a8-9d93-fcfb23ade969" : 587,
"cc69bc8a-3015-45af-971b-af69fe82ab2f" : 588,
"eba6daa8-0281-479e-aa2d-d78a2614db93" : 589,
"32498aba-d8d4-4202-ae39-d795577cdc4a" : 590,
"71ddfe28-d86c-4238-b514-3601990f488b" : 591,
"4655782c-7272-4bab-96b4-b3fd8a202d43" : 592,
"ebda1cab-dda8-418b-8f25-df1e71ff5bc2" : 593,
"c1564d80-1b9d-471c-b84d-8d5dd2efbf0a" : 594,
"e116f5a9-c732-4d7d-98e7-af30725adf59" : 595,
"838537fc-4aeb-4980-8b58-97fbdcd1d91a" : 596,
"2cfc0d25-27a3-47d9-8cd0-47c84f58ffd5" : 597,
"e1e18384-5355-4f36-a7a1-ce65c2e70cf5" : 598,
"19f696a3-ea19-464b-9d01-e0f259568d3e" : 599,
"2829e29f-178b-4428-995c-0de6d53a0f6c" : 600,
"79acbdd0-f405-11dc-ae6d-b8a03c50a862" : 601,
"e2e717bf-551a-4917-bdc9-4fa0f342c530" : 602,
"8700fabb-30e5-44b1-9daa-67b5a3cb22d2" : 603,
"b126d06c-83d3-491b-993b-daa1dc19390b" : 604,
"c71f0513-c95a-4e98-abad-0c222b87ec66" : 605,
"3e51406d-5cb9-45f9-8960-77fdfd4eb69f" : 606,
"f7a5ad56-5c1e-4d50-8be4-10b460c48c00" : 607,
"920ac962-fa45-40b1-9c08-bad4f0745698" : 608,
"0e2ae538-4332-4f41-bfee-8aa2c693d2a9" : 609,
"39b7b3bc-3dc2-4c64-8de6-aa6ece25c1d9" : 367,
"fbca90e3-8aed-48b1-84e3-369afbd000ce" : 368,
"1a3d9bef-baf1-4eb3-814e-f6e9ba670c04" : 369,
"772eb820-a0de-402a-8c5e-ac9bdb33f2c4" : 370,
"a9485c7e-f399-47c8-9c8c-f7a03a4e94b0" : 371,
"d44af9a3-e779-40c0-a186-79e7717c6d2b" : 372,
"e88c96a3-5884-4e51-a580-e417ca4c9eed" : 373,
"47a779a6-a230-4edd-b787-19c3d2c80ab5" : 374,
"ff7bd5db-b724-4932-b496-10ea298851f9" : 375,
"baa8ccb1-8d9d-4940-93a6-2241c8a04ba5" : 376,
"2cd829bb-b713-433d-99cf-64bef11e5b3e" : 377,
"79e9459f-bf2f-408c-acf5-d69766a39e06" : 378,
"514d4c44-45f9-4f49-83d1-aff5f973d880" : 379,
"0870a77b-587c-4369-a8ed-bc3d347b8e1c" : 380,
"9d23a1e0-5388-11df-a59b-b8a03c50a862" : 381,
"c2c3be16-01fe-4255-a8dd-cfcdf285d721" : 382,
"899dccde-1e1b-4da0-953d-3be7d31f2da7" : 383,
"196893ee-8103-4ac7-9977-cb136b253a7f" : 384,
"dca4d666-c9e0-4712-b658-0d1bc3f39d48" : 385,
"c4b8c992-bc88-4b19-87c3-f89a19604e25" : 386,
"cc364fce-e436-4c77-8b37-289a0e837681" : 387,
"8d8c424d-9ab0-4b82-811e-900577f93f66" : 388,
"131217d6-04ff-4682-9bc2-e418f961fc0d" : 389,
"2853c0e7-c758-4c9c-83a2-7c2d18967fb9" : 390,
"743b654c-be54-4ba1-94ef-2b874f160395" : 391,
"7f65a925-b9ce-4c1c-9d9f-5d32c4e76ae1" : 392,
"dce7a3c9-ea78-4be7-9abc-e3838de70dc5" : 393,
"32b45fb7-1992-4f30-95c8-1604a38a57fa" : 394,
"1afc3a83-38f8-4282-90ae-d377ded4b628" : 395,
"1354d651-e529-4a8e-95be-faa807639461" : 396,
"1d9ee036-c4e8-4c29-a33a-608da32db564" : 397,
"ee6a779a-dce6-4f16-ae3d-84f495def86e" : 398,
"04ce62dd-30ec-4d98-8b30-b09cafc3ac38" : 399,
"d33a9c4e-e33c-4311-a2fe-88dbdff7b4b9" : 400,
"3fed8a70-5eb1-4d47-a580-92ce393a4a3e" : 401,
"fe9d23a5-5514-4d6e-b05b-d115425cd159" : 402,
"b0a2cd60-4059-11de-9f59-b8a03c50a862" : 403,
"d5ddb6a4-2ad0-4928-8682-f9d5d12be1eb" : 404,
"15b278a8-1356-4f7b-ba32-3c733c3d0aac" : 405,
"dc282b9d-8f3b-4197-b174-ba5272721a6f" : 406,
"28eb1a3f-1c15-4a95-931a-4af90ecb574d" : 407,
"6491ad8c-8e49-4bd4-a089-0ddb3bbe0890" : 408,
"a4d6cc6b-4c62-4015-ad7b-1ba69847a616" : 409,
"c10e9f64-38b0-4cb9-98d3-92b82cb7256a" : 410,
"ef05e4c5-43a3-4686-af2a-528684b228ae" : 411,
"b542788f-0dc2-4a2b-b652-fceced449591" : 412,
"0b5846f7-20b5-410a-93c5-5de83b522deb" : 413,
"c4f66525-4d36-4c18-82ac-98e088f54db4" : 414,
"4c43ef1b-b005-4ed5-840b-7886d53fb069" : 415,
"fb10a11f-4417-41c8-be6a-13a5c8535122" : 416,
"fc871c4a-bb5e-4db6-b332-487bc23797f1" : 417,
"634deee3-2c0b-4698-b634-23096802abe6" : 418,
"45346344-bad3-4a45-8563-98955464b3ca" : 419,
"95aba44b-121f-41fe-9f16-4515b826ce54" : 420,
"f9d3db7d-6ee9-4fbf-bd4c-e5d0f0bd26c6" : 421,
"ee9a9cbf-d62d-4d6d-a46a-769e0b1714d1" : 422,
"ee87a3ad-dd8d-4d3c-9549-bc6589d2406a" : 423,
"9ae14314-8a89-499b-97b9-f5e4585f69ce" : 424,
"e6e855d7-9775-400f-883b-c4e04e517d79" : 425,
"d49251f5-379f-43b4-b747-9d8240334fa5" : 426,
"770c30d2-c2a8-4bb2-8056-6167297cddae" : 427,
"25fe2ec4-4405-4b37-9386-8cc80e337abb" : 428,
"4e1336c8-6101-44ef-8dc1-8af45ba7178a" : 429,
"8cdd9232-84eb-4bad-9d9b-a36910cd524b" : 430,
"3f8e73d6-5f4c-4e42-98b6-8487b890dccf" : 431,
"4cff2aca-5ce5-4028-88e6-d79b6449173a" : 432,
"adc174cd-c752-4eee-9630-7c1209eb1c4a" : 433,
"14209842-b56e-40ae-b95d-b13224d0411c" : 434,
"4d0539cd-a2e0-4a2c-8b7d-ec6718eef764" : 435,
"8a226609-56b9-4b87-a0a2-767c38edd572" : 436,
"b36f3505-f2d9-499f-a669-fa418a127b9d" : 437,
"00aa4e95-ff37-445f-99a8-636afe047a59" : 438,
"132fe0d8-aa85-40a0-af20-52f48fae7bd6" : 439,
"baa855b0-a010-4240-9adf-98c810a6ccf5" : 440,
"8d5f7d67-9132-41e9-a068-a907aa03b907" : 441,
"6c821c6d-1936-41a0-8dd9-74e2c4ab38bd" : 442,
"98dbab03-09e5-4ceb-988e-04f3e803decb" : 443,
"dc3f994e-bd32-4714-8d88-6fb895fb48cc" : 444,
"23539a57-1347-4dd3-af2a-cf9478563baa" : 445,
"38c97c54-2ff1-42f1-aeff-0d40c8d16cdb" : 446,
"76388ab6-61ca-439a-ab09-e1fe73eb224a" : 447,
"7aec3384-574d-4058-ab36-337cad184999" : 448,
"6505c875-003c-472d-879f-2624c00070e5" : 449,
"dc8aab12-94a0-47ce-b188-8fe30b91bb23" : 450,
"593527a0-d2e9-4365-a583-cc683125dbb7" : 451,
"01d38e13-944c-4f4b-96a2-25ecf1751140" : 452,
"43a26bbf-466a-4335-96cc-01d0656c614a" : 453,
"4bc4943a-c94f-4bd4-9aa3-2a9cae94398a" : 454,
"6f0517c4-913a-401a-95f1-144f724f424e" : 455,
"149b9411-e994-4416-a218-f2ef21173e44" : 456,
"1bce0a01-a3d8-4ee3-825a-d1be104caa82" : 457,
"63a08e67-8d4e-4653-82d7-c133b136527f" : 458,
"38105fdf-4886-4c2e-9c7c-2bcb091411a5" : 459,
"b52e9a80-8cb7-4d37-9324-46032fd69237" : 460,
"45e5c621-df96-4ea7-bbc6-aa3e53b90bb8" : 461,
"6ba9a8cc-513a-4a51-bf93-6f5de8040a96" : 462,
"497c081b-8157-4287-bb5c-291a4c71439c" : 463,
"3ead5cf3-2840-482e-bbd9-76380825be63" : 464,
"81bfa2a5-22a8-4bea-b91c-d54bda1365b9" : 465,
"d4beb51b-b7cc-419e-b943-0e5254aac3ef" : 466,
"6b2f029b-7823-4b84-9c30-31ff364238fe" : 467,
"c58ac019-b413-4613-8772-39e57acbeb8e" : 468,
"3c1ff1ce-cc3b-4637-b86f-2766ca452c89" : 469,
"05827c69-a802-472f-bbe3-76629dfd57a7" : 470,
"c803f6f5-2c6a-4b41-8c15-768d48ef1c8c" : 471,
"68a216ee-f721-40cd-932c-84153afd9451" : 472,
"13e05353-53b2-47e6-ae01-7e45eedc84fc" : 473,
"224c2985-c87e-4330-bc0d-da0c8067fac4" : 474,
"396d5f30-dea9-11db-8ab4-b8a03c50a862" : 475,
"6d2a7654-1ed0-4924-b22e-9bc221ab2124" : 476,
"567cdb5e-5bb3-42e1-ae07-ffa6fc60b56e" : 477,
"34ad60e6-3bfb-4bdd-acd6-420523a38d09" : 478,
"16fd28c4-e4be-40e5-a551-139e573cfe68" : 479,
"ba482b53-07ed-4ca4-8981-5396d1a8a6fc" : 480,
"1eacbaf3-ce23-4cfb-98d5-a51a7a574562" : 481,
"fe602f47-b553-4291-b6e5-197b9837e167" : 482,
"6fd43b88-b13f-4547-94ab-1720f2a3b7ed" : 483,
"f63e13b5-584a-42af-9d68-e21e0f24b073" : 484,
"7bd9e639-1f72-44ea-9c5e-c542cee18d97" : 485,
"1649bf8c-d7e0-482a-9f1a-8292cd418c62" : 486,
"3f72d721-27e1-4b80-9038-20430911ba0c" : 487,
"a665e604-14e4-4aba-b93d-53755968eb14" : 610,
"eac88d99-9f6c-4031-8fc4-8088f0e0dfe7" : 611,
"c0252e99-c6d0-449e-8106-508be14c34fa" : 612
}
AGGREGATORS = [
1, 2, 3, 6, 10, 12, 18, 23, 26, 32, 44, 45, 51, 56, 57, 59, 60, 82, 92, 99,
100, 136, 139, 143, 145, 161, 162, 169, 173, 177, 198, 206, 210, 223, 226,
246, 254, 256, 261, 267, 274, 283, 295, 300, 301, 332, 333, 335, 337, 338,
339, 340, 342, 343, 362, 496, 501, 515, 539, 580]
|
"""
Copyright (C) 2008 Krzysztof Kosyl <krzysztof.kosyl@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import lilyplayer.utils.utils as utils
from lilyplayer.utils.play_time import Time
from lilyplayer.playlist.item import PlaylistItem, PlaylistError
class PlaylistFormat(object):
def __init__(self):
super(PlaylistFormat, self).__init__()
if not hasattr(self, 'name'):
glob = globals()
self._formats = {}
self._extensions = {}
self._mimes = {}
for i in glob:
obj = glob[i]
if obj.__class__.__name__ == 'type' and PlaylistFormat in obj.mro()[1:]:
self._formats[obj.name] = obj()
for e in obj.extensions:
self._extensions[e] = obj.name
for m in obj.mimes:
self._mimes[m] = obj.name
print self._formats, self._extensions, self._mimes
def __getitem__(self, key):
if key in self._formats:
return self._formats[key]
if key in self._extensions:
return self._formats[self._extensions[key]]
if key in self._mimes:
return self._formats[self._mimes[key]]
raise PlaylistError, 'Playlist format %r unknown' % key
def __iter__(self):
return iter(self._formats)
def load(self, path):
data = utils.File(path).read()
return self.loads(data)
def dump(self, data, path):
data = self.dumps(data)
utils.File(path).write(data)
class M3uPlaylist(PlaylistFormat):
name = 'm3u'
label = 'MP3 audio (streamed)'
extensions = ['m3u', 'vlc']
mimes = ['audio/x-mpegurl']
def loads(self, data):
lines = [l.strip() for l in data.splitlines()]
lines = [l for l in lines if len(l) > 0]
extended = False
if lines[0] == '#EXTM3U':
extended = True
lines = lines[1:]
result = []
lasttime, lastname = None, None
for line in lines:
if line.startswith('#'):
if line.startswith('#EXTINF:'):
try:
a = line.split(':', 1)[1].split(',', 1)
lasttime = int(a[0])
lastname = a[1]
if lasttime < 0:
lasttime = None
else:
lasttime = Time(s=lasttime)
except:
lasttime, lastname = None, None
else:
result.append(PlaylistItem(
filename=line, name=lastname, duration=lasttime))
lasttime, lastname = None, None
return result
def dumps(self, data):
result = ['#EXTM3U']
for i in data:
if i.duration:
result.append('#EXTINF:%d,%s' % (i.duration.total_seconds(), i.name))
else:
result.append('#EXTINF:%d,%s' % (-1, i.name))
result.append(i.filename)
return '\n'.join(result)
class PlsPlaylist(PlaylistFormat):
name = 'pls'
label = 'MP3 ShoutCast playlist'
extensions = ['pls']
mimes = ['audio/x-scpls']
def loads(self, data):
def add(d, key):
if not d.has_key(key):
d[key] = ['', '', Time()]
return d[key]
lines = [l.strip() for l in data.splitlines()]
lines = [l for l in lines if len(l) > 0]
result = []
tmp = {}
if lines[0].lower() == '[playlist]':
lines = lines[1:]
for line in lines:
try:
a = line.split('=', 1)
cmd = a[0].strip().lower()
arg = a[1].strip()
if cmd.startswith('file'):
i = int(cmd[4:])
add(tmp, i)[0] = arg
elif cmd.startswith('title'):
i = int(cmd[5:])
add(tmp, i)[1] = arg
elif cmd.startswith('length'):
i = int(cmd[6:])
time = int(arg)
if time > 0:
add(tmp, i)[2] = Time(s = int(arg))
except:
pass
keys = tmp.keys()
keys.sort()
for k in keys:
a = tmp[k]
result.append(PlaylistItem(
filename=a[0], name=a[1], duration=a[2]))
return result
def dumps(self, data):
result = ['[playlist]', 'NumberOfEntries=%d' % len(data), '']
for i, d in enumerate(data):
result.append('File%d=%s' % (i + 1, d.filename))
if d.name:
result.append('Title%d=%s' % (i + 1, d.name))
if d.duration:
result.append('Length%d=%s' % (i + 1, d.duration.totalSeconds()))
result.append('')
result.append('Version=2')
return '\n'.join(result)
|
from django.db import models
from django.utils.translation import gettext_lazy as _
from japos.discounts.models import Discount
class Group(models.Model):
sku = models.CharField(max_length = 15, unique = True, null = True, verbose_name = _("SKU"))
name = models.CharField(max_length = 45, null = True, unique = True, verbose_name = _("Group Name"))
date_created = models.DateTimeField(auto_now_add = True)
date_modified = models.DateTimeField(auto_now = True)
def __unicode__(self):
return self.name
class Product(models.Model):
sku = models.CharField(max_length = 15, unique = True, null = True, verbose_name = _("SKU"))
barcode = models.CharField(max_length = 45, unique = True, verbose_name = _("Barcode"))
name = models.CharField(max_length = 45, unique = True, verbose_name = _("Name"))
description = models.TextField(blank = True, null = True, verbose_name = _("Description"))
stock = models.IntegerField(null = True, verbose_name = _('Stock'))
group = models.ForeignKey(Group, verbose_name = _("Group"))
purchase_price = models.DecimalField(max_digits = 9, decimal_places = 3, verbose_name = _("Purchase price"))
discount = models.ForeignKey(Discount, blank = True, null=True, verbose_name = _("Discount"))
date_created = models.DateTimeField(auto_now_add = True)
date_modified = models.DateTimeField(auto_now = True)
def __unicode__(self):
return '(%s) %s - %i' %(self.sku, self.name, self.stock)
|
from kivy.core.window import Window
from kivy.animation import Animation
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.popup import Popup
from kivy.uix.button import Button
from kivy.uix.textinput import TextInput
from kivy.uix.label import Label
from kivy.graphics import *
from widget3D import Widget3D
from nat import NatLogo
class Login(BoxLayout):
def __init__(self, **kwargs):
super(Login, self).__init__(orientation='vertical', size_hint=(None,None), size=(300,140), center=Window.center, **kwargs)
#little code fixbug
self.center = Window.center
self.txt_user = TextInput(text='Usuario')
self.add_widget(self.txt_user)
self.txt_pass = TextInput(text='Password')
self.add_widget(self.txt_pass)
self.btn_iniciar_sesion = Button(text='Iniciar sesión')
self.add_widget(self.btn_iniciar_sesion)
class Netget(FloatLayout):
def __init__(self, **kwargs):
super(Netget, self).__init__(**kwargs)
with self.canvas.before:
Color(.9,.9,.9,1)
Rectangle(size=Window.size)
self.natlogo = NatLogo(pos_x=-6, pos_y=10, size_logo=(25,10) )
self.add_widget(self.natlogo )
self.login = Login()
self.add_widget(self.login)
#login bind
self.login.btn_iniciar_sesion.bind(on_press=self.on_login)
def on_login(self, w):
Animation(opacity=0, duration=.3).start(self.login)
anim_nat = Animation(pos_y=0, duration=.3)
anim_nat.bind(on_complete=self.init_nat_animation)
anim_nat.start(self.natlogo)
def init_nat_animation(self, w, dt):
pass
#self.natlogo.animate()
if __name__ == '__main__':
from kivy.base import runTouchApp
runTouchApp(Netget() )
|
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['300mbmoviesdl.com', 'hevcbluray.com']
self.base_link = 'http://www.300mbmoviesdl.com'
self.search_link = '/search/%s/feed/rss2/'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() == False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']
query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.base_link, url)
r = client.request(url)
posts = client.parseDOM(r, 'item')
hostDict = hostprDict + hostDict
items = []
for post in posts:
try:
t = client.parseDOM(post, 'title')[0]
c = client.parseDOM(post, 'content.+?')
u = c[0].split('<h1 ')
u = [i for i in u if 'Download Links' in i]
u = client.parseDOM(u, 'a', ret='href')
try: s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', c[0])[0]
except: s = '0'
items += [(t, i, s) for i in u]
except:
pass
for item in items:
try:
name = item[0]
name = client.replaceHTMLCodes(name)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name)
if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
if not y == hdlr: raise Exception()
fmt = re.sub('(.+)(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*)(\.|\)|\]|\s)', '', name.upper())
fmt = re.split('\.|\(|\)|\[|\]|\s|\-', fmt)
fmt = [i.lower() for i in fmt]
if any(i.endswith(('subs', 'sub', 'dubbed', 'dub')) for i in fmt): raise Exception()
if any(i in ['extras'] for i in fmt): raise Exception()
if '1080p' in fmt: quality = '1080p'
elif '720p' in fmt: quality = 'HD'
else: quality = 'SD'
if any(i in ['dvdscr', 'r5', 'r6'] for i in fmt): quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts'] for i in fmt): quality = 'CAM'
info = []
if '3d' in fmt: info.append('3D')
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', item[2])[-1]
div = 1 if size.endswith(('GB', 'GiB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size))/div
size = '%.2f GB' % size
info.append(size)
except:
pass
if any(i in ['hevc', 'h265', 'x265'] for i in fmt): info.append('HEVC')
info = ' | '.join(info)
url = item[1]
if any(x in url for x in ['.rar', '.zip', '.iso']): raise Exception()
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': 'HEVC', 'direct': False, 'debridonly': True})
except:
pass
check = [i for i in sources if not i['quality'] == 'CAM']
if check: sources = check
return sources
except:
return sources
def resolve(self, url):
return url
|
"""
This module provides access to the Unix shadow password database.
It is available on various Unix versions.
Shadow password database entries are reported as 9-tuples of type struct_spwd,
containing the following items from the password database (see `<shadow.h>'):
sp_namp, sp_pwdp, sp_lstchg, sp_min, sp_max, sp_warn, sp_inact, sp_expire, sp_flag.
The sp_namp and sp_pwdp are strings, the rest are integers.
An exception is raised if the entry asked for cannot be found.
You have to be root to be able to use this module.
"""
def getspall(): # real signature unknown; restored from __doc__
"""
getspall() -> list_of_entries
Return a list of all available shadow password database entries, in arbitrary order.
See spwd.__doc__ for more on shadow password database entries.
"""
pass
def getspnam(name): # real signature unknown; restored from __doc__
"""
getspnam(name) -> (sp_namp, sp_pwdp, sp_lstchg, sp_min, sp_max,
sp_warn, sp_inact, sp_expire, sp_flag)
Return the shadow password database entry for the given user name.
See spwd.__doc__ for more on shadow password database entries.
"""
pass
from .tuple import tuple
class struct_spwd(tuple):
"""
spwd.struct_spwd: Results from getsp*() routines.
This object may be accessed either as a 9-tuple of
(sp_namp,sp_pwdp,sp_lstchg,sp_min,sp_max,sp_warn,sp_inact,sp_expire,sp_flag)
or via the object attributes as named in the above tuple.
"""
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
sp_expire = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""#days since 1970-01-01 when account expires"""
sp_flag = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""reserved"""
sp_inact = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""#days after pw expires until account is disabled"""
sp_lstchg = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""date of last change"""
sp_max = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""max #days between changes"""
sp_min = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""min #days between changes"""
sp_nam = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""login name; deprecated"""
sp_namp = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""login name"""
sp_pwd = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""encrypted password; deprecated"""
sp_pwdp = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""encrypted password"""
sp_warn = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""#days before pw expires to warn user about it"""
n_fields = 11
n_sequence_fields = 9
n_unnamed_fields = 0
from .object import object
class __loader__(object):
"""
Meta path import for built-in modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def find_module(cls, *args, **kwargs): # real signature unknown
"""
Find the built-in module.
If 'path' is ever specified then the search is considered a failure.
This method is deprecated. Use find_spec() instead.
"""
pass
@classmethod
def find_spec(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def get_code(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have code objects. """
pass
@classmethod
def get_source(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have source code. """
pass
@classmethod
def is_package(cls, *args, **kwargs): # real signature unknown
""" Return False as built-in modules are never packages. """
pass
@classmethod
def load_module(cls, *args, **kwargs): # real signature unknown
""" Load a built-in module. """
pass
def module_repr(module): # reliably restored by inspect
"""
Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is ''
__spec__ = None # (!) real value is ''
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from filebrowser.sites import site
from related.api.resources import RelatedResource
from related.api.resources import PostsResource
related_resource = RelatedResource()
posts_resource = PostsResource()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'project.views.first_page', name='index'),
url(r'^posts/', include('posts.urls')),
url(r'^project/', include('project.urls')),
url(r'^admin/', include(admin.site.urls)),
(r'^grappelli/', include('grappelli.urls')), # grappelli URLS
url(r'^admin/filebrowser/', include(site.urls)),
(r'^tinymce/', include('tinymce.urls')),
(r'^search/', include('haystack.urls')),
)
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
(r'^theme/static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_URL}),
(r'^related/', include('related.urls')),
(r'^images/', include('images.urls')),
(r'^api/', include(related_resource.urls)),
(r'^api/', include(posts_resource.urls)),
url(r'^post/comments/', include('django.contrib.comments.urls')),
)
|
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from datetime import datetime, timedelta
from traceback import format_tb
import logging
import sys
from lib.pytz import utc
from lib.six import six
from lib.apscheduler.events import JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED
class MaxInstancesReachedError(Exception):
def __init__(self, job):
super(MaxInstancesReachedError, self).__init__(
'Job "%s" has already reached its maximum number of instances (%d)' % (job.id, job.max_instances))
class BaseExecutor(six.with_metaclass(ABCMeta, object)):
"""Abstract base class that defines the interface that every executor must implement."""
_scheduler = None
_lock = None
_logger = logging.getLogger('apscheduler.executors')
def __init__(self):
super(BaseExecutor, self).__init__()
self._instances = defaultdict(lambda: 0)
def start(self, scheduler, alias):
"""
Called by the scheduler when the scheduler is being started or when the executor is being added to an already
running scheduler.
:param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting this executor
:param str|unicode alias: alias of this executor as it was assigned to the scheduler
"""
self._scheduler = scheduler
self._lock = scheduler._create_lock()
self._logger = logging.getLogger('apscheduler.executors.%s' % alias)
def shutdown(self, wait=True):
"""
Shuts down this executor.
:param bool wait: ``True`` to wait until all submitted jobs have been executed
"""
def submit_job(self, job, run_times):
"""
Submits job for execution.
:param Job job: job to execute
:param list[datetime] run_times: list of datetimes specifying when the job should have been run
:raises MaxInstancesReachedError: if the maximum number of allowed instances for this job has been reached
"""
assert self._lock is not None, 'This executor has not been started yet'
with self._lock:
if self._instances[job.id] >= job.max_instances:
raise MaxInstancesReachedError(job)
self._do_submit_job(job, run_times)
self._instances[job.id] += 1
@abstractmethod
def _do_submit_job(self, job, run_times):
"""Performs the actual task of scheduling `run_job` to be called."""
def _run_job_success(self, job_id, events):
"""Called by the executor with the list of generated events when `run_job` has been successfully called."""
with self._lock:
self._instances[job_id] -= 1
if self._instances[job_id] == 0:
del self._instances[job_id]
for event in events:
self._scheduler._dispatch_event(event)
def _run_job_error(self, job_id, exc, traceback=None):
"""Called by the executor with the exception if there is an error calling `run_job`."""
with self._lock:
self._instances[job_id] -= 1
if self._instances[job_id] == 0:
del self._instances[job_id]
exc_info = (exc.__class__, exc, traceback)
self._logger.error('Error running job %s', job_id, exc_info=exc_info)
def run_job(job, jobstore_alias, run_times, logger_name):
"""Called by executors to run the job. Returns a list of scheduler events to be dispatched by the scheduler."""
events = []
logger = logging.getLogger(logger_name)
for run_time in run_times:
# See if the job missed its run time window, and handle possible misfires accordingly
if job.misfire_grace_time is not None:
difference = datetime.now(utc) - run_time
grace_time = timedelta(seconds=job.misfire_grace_time)
if difference > grace_time:
events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias, run_time))
logger.warning('Run time of job "%s" was missed by %s', job, difference)
continue
logger.info('Running job "%s" (scheduled at %s)', job, run_time)
try:
retval = job.func(*job.args, **job.kwargs)
except:
exc, tb = sys.exc_info()[1:]
formatted_tb = ''.join(format_tb(tb))
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time, exception=exc,
traceback=formatted_tb))
logger.exception('Job "%s" raised an exception', job)
else:
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, retval=retval))
logger.info('Job "%s" executed successfully', job)
return events
|
""" Todo - Docstring
"""
|
"""Base class for directed graphs."""
from copy import deepcopy
import networkx as nx
from networkx.classes.graph import Graph
from networkx.exception import NetworkXError
import networkx.convert as convert
__author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)',
'Pieter Swart (swart@lanl.gov)',
'Dan Schult(dschult@colgate.edu)'])
class DiGraph(Graph):
"""
Base class for directed graphs.
A DiGraph stores nodes and edges with optional data, or attributes.
DiGraphs hold directed edges. Self loops are allowed but multiple
(parallel) edges are not.
Nodes can be arbitrary (hashable) Python objects with optional
key/value attributes.
Edges are represented as links between nodes with optional
key/value attributes.
Parameters
----------
data : input graph
Data to initialize graph. If data=None (default) an empty
graph is created. The data can be an edge list, or any
NetworkX graph object. If the corresponding optional Python
packages are installed the data can also be a NumPy matrix
or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
Graph
MultiGraph
MultiDiGraph
Examples
--------
Create an empty graph structure (a "null graph") with no nodes and
no edges.
>>> G = nx.DiGraph()
G can be grown in several ways.
**Nodes:**
Add one node at a time:
>>> G.add_node(1)
Add the nodes from any container (a list, dict, set or
even the lines from a file or the nodes from another graph).
>>> G.add_nodes_from([2,3])
>>> G.add_nodes_from(range(100,110))
>>> H=nx.Graph()
>>> H.add_path([0,1,2,3,4,5,6,7,8,9])
>>> G.add_nodes_from(H)
In addition to strings and integers any hashable Python object
(except None) can represent a node, e.g. a customized node object,
or even another Graph.
>>> G.add_node(H)
**Edges:**
G can also be grown by adding edges.
Add one edge,
>>> G.add_edge(1, 2)
a list of edges,
>>> G.add_edges_from([(1,2),(1,3)])
or a collection of edges,
>>> G.add_edges_from(H.edges())
If some edges connect nodes not yet in the graph, the nodes
are added automatically. There are no errors when adding
nodes or edges that already exist.
**Attributes:**
Each graph, node, and edge can hold key/value attribute pairs
in an associated attribute dictionary (the keys must be hashable).
By default these are empty, but can be added or changed using
add_edge, add_node or direct manipulation of the attribute
dictionaries named graph, node and edge respectively.
>>> G = nx.DiGraph(day="Friday")
>>> G.graph
{'day': 'Friday'}
Add node attributes using add_node(), add_nodes_from() or G.node
>>> G.add_node(1, time='5pm')
>>> G.add_nodes_from([3], time='2pm')
>>> G.node[1]
{'time': '5pm'}
>>> G.node[1]['room'] = 714
>>> del G.node[1]['room'] # remove attribute
>>> G.nodes(data=True)
[(1, {'time': '5pm'}), (3, {'time': '2pm'})]
Warning: adding a node to G.node does not add it to the graph.
Add edge attributes using add_edge(), add_edges_from(), subscript
notation, or G.edge.
>>> G.add_edge(1, 2, weight=4.7 )
>>> G.add_edges_from([(3,4),(4,5)], color='red')
>>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})])
>>> G[1][2]['weight'] = 4.7
>>> G.edge[1][2]['weight'] = 4
**Shortcuts:**
Many common graph features allow python syntax to speed reporting.
>>> 1 in G # check if node in graph
True
>>> [n for n in G if n<3] # iterate through nodes
[1, 2]
>>> len(G) # number of nodes in graph
5
>>> G[1] # adjacency dict keyed by neighbor to edge attributes
... # Note: you should not change this dict manually!
{2: {'color': 'blue', 'weight': 4}}
The fastest way to traverse all edges of a graph is via
adjacency_iter(), but the edges() method is often more convenient.
>>> for n,nbrsdict in G.adjacency_iter():
... for nbr,eattr in nbrsdict.items():
... if 'weight' in eattr:
... (n,nbr,eattr['weight'])
(1, 2, 4)
(2, 3, 8)
>>> [ (u,v,edata['weight']) for u,v,edata in G.edges(data=True) if 'weight' in edata ]
[(1, 2, 4), (2, 3, 8)]
**Reporting:**
Simple graph information is obtained using methods.
Iterator versions of many reporting methods exist for efficiency.
Methods exist for reporting nodes(), edges(), neighbors() and degree()
as well as the number of nodes and edges.
For details on these and other miscellaneous methods, see below.
"""
def __init__(self, data=None, **attr):
"""Initialize a graph with edges, name, graph attributes.
Parameters
----------
data : input graph
Data to initialize graph. If data=None (default) an empty
graph is created. The data can be an edge list, or any
NetworkX graph object. If the corresponding optional Python
packages are installed the data can also be a NumPy matrix
or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph.
name : string, optional (default='')
An optional name for the graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
convert
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G = nx.Graph(name='my graph')
>>> e = [(1,2),(2,3),(3,4)] # list of edges
>>> G = nx.Graph(e)
Arbitrary graph attribute pairs (key=value) may be assigned
>>> G=nx.Graph(e, day="Friday")
>>> G.graph
{'day': 'Friday'}
"""
self.graph = {} # dictionary for graph attributes
self.node = {} # dictionary for node attributes
# We store two adjacency lists:
# the predecessors of node n are stored in the dict self.pred
# the successors of node n are stored in the dict self.succ=self.adj
self.adj = {} # empty adjacency dictionary
self.pred = {} # predecessor
self.succ = self.adj # successor
# attempt to load graph with data
if data is not None:
convert.to_networkx_graph(data,create_using=self)
# load graph attributes (must be after convert)
self.graph.update(attr)
self.edge=self.adj
def add_node(self, n, attr_dict=None, **attr):
"""Add a single node n and update node attributes.
Parameters
----------
n : node
A node can be any hashable Python object except None.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of node attributes. Key/value pairs will
update existing data associated with the node.
attr : keyword arguments, optional
Set or change attributes using key=value.
See Also
--------
add_nodes_from
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_node(1)
>>> G.add_node('Hello')
>>> K3 = nx.Graph([(0,1),(1,2),(2,0)])
>>> G.add_node(K3)
>>> G.number_of_nodes()
3
Use keywords set/change node attributes:
>>> G.add_node(1,size=10)
>>> G.add_node(3,weight=0.4,UTM=('13S',382871,3972649))
Notes
-----
A hashable object is one that can be used as a key in a Python
dictionary. This includes strings, numbers, tuples of strings
and numbers, etc.
On many platforms hashable items also include mutables such as
NetworkX Graphs, though one should be careful that the hash
doesn't change on mutables.
"""
# set up attribute dict
if attr_dict is None:
attr_dict=attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(\
"The attr_dict argument must be a dictionary.")
if n not in self.succ:
self.succ[n] = {}
self.pred[n] = {}
self.node[n] = attr_dict
else: # update attr even if node already exists
self.node[n].update(attr_dict)
def add_nodes_from(self, nodes, **attr):
"""Add multiple nodes.
Parameters
----------
nodes : iterable container
A container of nodes (list, dict, set, etc.).
OR
A container of (node, attribute dict) tuples.
Node attributes are updated using the attribute dict.
attr : keyword arguments, optional (default= no attributes)
Update attributes for all nodes in nodes.
Node attributes specified in nodes as a tuple
take precedence over attributes specified generally.
See Also
--------
add_node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_nodes_from('Hello')
>>> K3 = nx.Graph([(0,1),(1,2),(2,0)])
>>> G.add_nodes_from(K3)
>>> sorted(G.nodes(),key=str)
[0, 1, 2, 'H', 'e', 'l', 'o']
Use keywords to update specific node attributes for every node.
>>> G.add_nodes_from([1,2], size=10)
>>> G.add_nodes_from([3,4], weight=0.4)
Use (node, attrdict) tuples to update attributes for specific
nodes.
>>> G.add_nodes_from([(1,dict(size=11)), (2,{'color':'blue'})])
>>> G.node[1]['size']
11
>>> H = nx.Graph()
>>> H.add_nodes_from(G.nodes(data=True))
>>> H.node[1]['size']
11
"""
for n in nodes:
try:
newnode=n not in self.succ
except TypeError:
nn,ndict = n
if nn not in self.succ:
self.succ[nn] = {}
self.pred[nn] = {}
newdict = attr.copy()
newdict.update(ndict)
self.node[nn] = newdict
else:
olddict = self.node[nn]
olddict.update(attr)
olddict.update(ndict)
continue
if newnode:
self.succ[n] = {}
self.pred[n] = {}
self.node[n] = attr.copy()
else:
self.node[n].update(attr)
def remove_node(self, n):
"""Remove node n.
Removes the node n and all adjacent edges.
Attempting to remove a non-existent node will raise an exception.
Parameters
----------
n : node
A node in the graph
Raises
-------
NetworkXError
If n is not in the graph.
See Also
--------
remove_nodes_from
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> G.edges()
[(0, 1), (1, 2)]
>>> G.remove_node(1)
>>> G.edges()
[]
"""
try:
nbrs=self.succ[n]
del self.node[n]
except KeyError: # NetworkXError if n not in self
raise NetworkXError("The node %s is not in the digraph."%(n,))
for u in nbrs:
del self.pred[u][n] # remove all edges n-u in digraph
del self.succ[n] # remove node from succ
for u in self.pred[n]:
del self.succ[u][n] # remove all edges n-u in digraph
del self.pred[n] # remove node from pred
def remove_nodes_from(self, nbunch):
"""Remove multiple nodes.
Parameters
----------
nodes : iterable container
A container of nodes (list, dict, set, etc.). If a node
in the container is not in the graph it is silently
ignored.
See Also
--------
remove_node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> e = G.nodes()
>>> e
[0, 1, 2]
>>> G.remove_nodes_from(e)
>>> G.nodes()
[]
"""
for n in nbunch:
try:
succs=self.succ[n]
del self.node[n]
for u in succs:
del self.pred[u][n] # remove all edges n-u in digraph
del self.succ[n] # now remove node
for u in self.pred[n]:
del self.succ[u][n] # remove all edges n-u in digraph
del self.pred[n] # now remove node
except KeyError:
pass # silent failure on remove
def add_edge(self, u, v, attr_dict=None, **attr):
"""Add an edge between u and v.
The nodes u and v will be automatically added if they are
not already in the graph.
Edge attributes can be specified with keywords or by providing
a dictionary with key/value pairs. See examples below.
Parameters
----------
u,v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of edge attributes. Key/value pairs will
update existing data associated with the edge.
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
See Also
--------
add_edges_from : add a collection of edges
Notes
-----
Adding an edge that already exists updates the edge data.
Many NetworkX algorithms designed for weighted graphs use as
the edge weight a numerical value assigned to a keyword
which by default is 'weight'.
Examples
--------
The following all add the edge e=(1,2) to graph G:
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> e = (1,2)
>>> G.add_edge(1, 2) # explicit two-node form
>>> G.add_edge(*e) # single edge as tuple of two nodes
>>> G.add_edges_from( [(1,2)] ) # add edges from iterable container
Associate data to edges using keywords:
>>> G.add_edge(1, 2, weight=3)
>>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7)
"""
# set up attribute dict
if attr_dict is None:
attr_dict=attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(\
"The attr_dict argument must be a dictionary.")
# add nodes
if u not in self.succ:
self.succ[u]={}
self.pred[u]={}
self.node[u] = {}
if v not in self.succ:
self.succ[v]={}
self.pred[v]={}
self.node[v] = {}
# add the edge
datadict=self.adj[u].get(v,{})
datadict.update(attr_dict)
self.succ[u][v]=datadict
self.pred[v][u]=datadict
def add_edges_from(self, ebunch, attr_dict=None, **attr):
"""Add all the edges in ebunch.
Parameters
----------
ebunch : container of edges
Each edge given in the container will be added to the
graph. The edges must be given as as 2-tuples (u,v) or
3-tuples (u,v,d) where d is a dictionary containing edge
data.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of edge attributes. Key/value pairs will
update existing data associated with each edge.
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
See Also
--------
add_edge : add a single edge
add_weighted_edges_from : convenient way to add weighted edges
Notes
-----
Adding the same edge twice has no effect but any edge data
will be updated when each duplicate edge is added.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edges_from([(0,1),(1,2)]) # using a list of edge tuples
>>> e = zip(range(0,3),range(1,4))
>>> G.add_edges_from(e) # Add the path graph 0-1-2-3
Associate data to edges
>>> G.add_edges_from([(1,2),(2,3)], weight=3)
>>> G.add_edges_from([(3,4),(1,4)], label='WN2898')
"""
# set up attribute dict
if attr_dict is None:
attr_dict=attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(\
"The attr_dict argument must be a dict.")
# process ebunch
for e in ebunch:
ne = len(e)
if ne==3:
u,v,dd = e
assert hasattr(dd,"update")
elif ne==2:
u,v = e
dd = {}
else:
raise NetworkXError(\
"Edge tuple %s must be a 2-tuple or 3-tuple."%(e,))
if u not in self.succ:
self.succ[u] = {}
self.pred[u] = {}
self.node[u] = {}
if v not in self.succ:
self.succ[v] = {}
self.pred[v] = {}
self.node[v] = {}
datadict=self.adj[u].get(v,{})
datadict.update(attr_dict)
datadict.update(dd)
self.succ[u][v] = datadict
self.pred[v][u] = datadict
def remove_edge(self, u, v):
"""Remove the edge between u and v.
Parameters
----------
u,v: nodes
Remove the edge between nodes u and v.
Raises
------
NetworkXError
If there is not an edge between u and v.
See Also
--------
remove_edges_from : remove a collection of edges
Examples
--------
>>> G = nx.Graph() # or DiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.remove_edge(0,1)
>>> e = (1,2)
>>> G.remove_edge(*e) # unpacks e from an edge tuple
>>> e = (2,3,{'weight':7}) # an edge with attribute data
>>> G.remove_edge(*e[:2]) # select first part of edge tuple
"""
try:
del self.succ[u][v]
del self.pred[v][u]
except KeyError:
raise NetworkXError("The edge %s-%s not in graph."%(u,v))
def remove_edges_from(self, ebunch):
"""Remove all edges specified in ebunch.
Parameters
----------
ebunch: list or container of edge tuples
Each edge given in the list or container will be removed
from the graph. The edges can be:
- 2-tuples (u,v) edge between u and v.
- 3-tuples (u,v,k) where k is ignored.
See Also
--------
remove_edge : remove a single edge
Notes
-----
Will fail silently if an edge in ebunch is not in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> ebunch=[(1,2),(2,3)]
>>> G.remove_edges_from(ebunch)
"""
for e in ebunch:
(u,v)=e[:2] # ignore edge data
if u in self.succ and v in self.succ[u]:
del self.succ[u][v]
del self.pred[v][u]
def has_successor(self, u, v):
"""Return True if node u has successor v.
This is true if graph has the edge u->v.
"""
return (u in self.succ and v in self.succ[u])
def has_predecessor(self, u, v):
"""Return True if node u has predecessor v.
This is true if graph has the edge u<-v.
"""
return (u in self.pred and v in self.pred[u])
def successors_iter(self,n):
"""Return an iterator over successor nodes of n.
neighbors_iter() and successors_iter() are the same.
"""
try:
return iter(self.succ[n])
except KeyError:
raise NetworkXError("The node %s is not in the digraph."%(n,))
def predecessors_iter(self,n):
"""Return an iterator over predecessor nodes of n."""
try:
return iter(self.pred[n])
except KeyError:
raise NetworkXError("The node %s is not in the digraph."%(n,))
def successors(self, n):
"""Return a list of successor nodes of n.
neighbors() and successors() are the same function.
"""
return list(self.successors_iter(n))
def predecessors(self, n):
"""Return a list of predecessor nodes of n."""
return list(self.predecessors_iter(n))
# digraph definitions
neighbors = successors
neighbors_iter = successors_iter
def edges_iter(self, nbunch=None, data=False):
"""Return an iterator over the edges.
Edges are returned as tuples with optional data
in the order (node, neighbor, data).
Parameters
----------
nbunch : iterable container, optional (default= all nodes)
A container of nodes. The container will be iterated
through once.
data : bool, optional (default=False)
If True, return edge attribute dict in 3-tuple (u,v,data).
Returns
-------
edge_iter : iterator
An iterator of (u,v) or (u,v,d) tuples of edges.
See Also
--------
edges : return a list of edges
Notes
-----
Nodes in nbunch that are not in the graph will be (quietly) ignored.
For directed graphs this returns the out-edges.
Examples
--------
>>> G = nx.DiGraph() # or MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> [e for e in G.edges_iter()]
[(0, 1), (1, 2), (2, 3)]
>>> list(G.edges_iter(data=True)) # default data is {} (empty dict)
[(0, 1, {}), (1, 2, {}), (2, 3, {})]
>>> list(G.edges_iter([0,2]))
[(0, 1), (2, 3)]
>>> list(G.edges_iter(0))
[(0, 1)]
"""
if nbunch is None:
nodes_nbrs=self.adj.items()
else:
nodes_nbrs=((n,self.adj[n]) for n in self.nbunch_iter(nbunch))
if data:
for n,nbrs in nodes_nbrs:
for nbr,data in nbrs.items():
yield (n,nbr,data)
else:
for n,nbrs in nodes_nbrs:
for nbr in nbrs:
yield (n,nbr)
# alias out_edges to edges
out_edges_iter=edges_iter
out_edges=Graph.edges
def in_edges_iter(self, nbunch=None, data=False):
"""Return an iterator over the incoming edges.
Parameters
----------
nbunch : iterable container, optional (default= all nodes)
A container of nodes. The container will be iterated
through once.
data : bool, optional (default=False)
If True, return edge attribute dict in 3-tuple (u,v,data).
Returns
-------
in_edge_iter : iterator
An iterator of (u,v) or (u,v,d) tuples of incoming edges.
See Also
--------
edges_iter : return an iterator of edges
"""
if nbunch is None:
nodes_nbrs=self.pred.items()
else:
nodes_nbrs=((n,self.pred[n]) for n in self.nbunch_iter(nbunch))
if data:
for n,nbrs in nodes_nbrs:
for nbr,data in nbrs.items():
yield (nbr,n,data)
else:
for n,nbrs in nodes_nbrs:
for nbr in nbrs:
yield (nbr,n)
def in_edges(self, nbunch=None, data=False):
"""Return a list of the incoming edges.
See Also
--------
edges : return a list of edges
"""
return list(self.in_edges_iter(nbunch, data))
def degree_iter(self, nbunch=None, weight=None):
"""Return an iterator for (node, degree).
The node degree is the number of edges adjacent to the node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd_iter : an iterator
The iterator returns two-tuples of (node, degree).
See Also
--------
degree, in_degree, out_degree, in_degree_iter, out_degree_iter
Examples
--------
>>> G = nx.DiGraph() # or MultiDiGraph
>>> G.add_path([0,1,2,3])
>>> list(G.degree_iter(0)) # node 0 with degree 1
[(0, 1)]
>>> list(G.degree_iter([0,1]))
[(0, 1), (1, 2)]
"""
if nbunch is None:
nodes_nbrs=zip(iter(self.succ.items()),iter(self.pred.items()))
else:
nodes_nbrs=zip(
((n,self.succ[n]) for n in self.nbunch_iter(nbunch)),
((n,self.pred[n]) for n in self.nbunch_iter(nbunch)))
if weight is None:
for (n,succ),(n2,pred) in nodes_nbrs:
yield (n,len(succ)+len(pred))
else:
# edge weighted graph - degree is sum of edge weights
for (n,succ),(n2,pred) in nodes_nbrs:
yield (n,
sum((succ[nbr].get(weight,1) for nbr in succ))+
sum((pred[nbr].get(weight,1) for nbr in pred)))
def in_degree_iter(self, nbunch=None, weight=None):
"""Return an iterator for (node, in-degree).
The node in-degree is the number of edges pointing in to the node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd_iter : an iterator
The iterator returns two-tuples of (node, in-degree).
See Also
--------
degree, in_degree, out_degree, out_degree_iter
Examples
--------
>>> G = nx.DiGraph()
>>> G.add_path([0,1,2,3])
>>> list(G.in_degree_iter(0)) # node 0 with degree 0
[(0, 0)]
>>> list(G.in_degree_iter([0,1]))
[(0, 0), (1, 1)]
"""
if nbunch is None:
nodes_nbrs=self.pred.items()
else:
nodes_nbrs=((n,self.pred[n]) for n in self.nbunch_iter(nbunch))
if weight is None:
for n,nbrs in nodes_nbrs:
yield (n,len(nbrs))
else:
# edge weighted graph - degree is sum of edge weights
for n,nbrs in nodes_nbrs:
yield (n, sum(data.get(weight,1) for data in nbrs.values()))
def out_degree_iter(self, nbunch=None, weight=None):
"""Return an iterator for (node, out-degree).
The node out-degree is the number of edges pointing out of the node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd_iter : an iterator
The iterator returns two-tuples of (node, out-degree).
See Also
--------
degree, in_degree, out_degree, in_degree_iter
Examples
--------
>>> G = nx.DiGraph()
>>> G.add_path([0,1,2,3])
>>> list(G.out_degree_iter(0)) # node 0 with degree 1
[(0, 1)]
>>> list(G.out_degree_iter([0,1]))
[(0, 1), (1, 1)]
"""
if nbunch is None:
nodes_nbrs=self.succ.items()
else:
nodes_nbrs=((n,self.succ[n]) for n in self.nbunch_iter(nbunch))
if weight is None:
for n,nbrs in nodes_nbrs:
yield (n,len(nbrs))
else:
# edge weighted graph - degree is sum of edge weights
for n,nbrs in nodes_nbrs:
yield (n, sum(data.get(weight,1) for data in nbrs.values()))
def in_degree(self, nbunch=None, weight=None):
"""Return the in-degree of a node or nodes.
The node in-degree is the number of edges pointing in to the node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd : dictionary, or number
A dictionary with nodes as keys and in-degree as values or
a number if a single node is specified.
See Also
--------
degree, out_degree, in_degree_iter
Examples
--------
>>> G = nx.DiGraph() # or MultiDiGraph
>>> G.add_path([0,1,2,3])
>>> G.in_degree(0)
0
>>> G.in_degree([0,1])
{0: 0, 1: 1}
>>> list(G.in_degree([0,1]).values())
[0, 1]
"""
if nbunch in self: # return a single node
return next(self.in_degree_iter(nbunch,weight))[1]
else: # return a dict
return dict(self.in_degree_iter(nbunch,weight))
def out_degree(self, nbunch=None, weight=None):
"""Return the out-degree of a node or nodes.
The node out-degree is the number of edges pointing out of the node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd : dictionary, or number
A dictionary with nodes as keys and out-degree as values or
a number if a single node is specified.
Examples
--------
>>> G = nx.DiGraph() # or MultiDiGraph
>>> G.add_path([0,1,2,3])
>>> G.out_degree(0)
1
>>> G.out_degree([0,1])
{0: 1, 1: 1}
>>> list(G.out_degree([0,1]).values())
[1, 1]
"""
if nbunch in self: # return a single node
return next(self.out_degree_iter(nbunch,weight))[1]
else: # return a dict
return dict(self.out_degree_iter(nbunch,weight))
def clear(self):
"""Remove all nodes and edges from the graph.
This also removes the name, and all graph, node, and edge attributes.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.clear()
>>> G.nodes()
[]
>>> G.edges()
[]
"""
self.succ.clear()
self.pred.clear()
self.node.clear()
self.graph.clear()
def is_multigraph(self):
"""Return True if graph is a multigraph, False otherwise."""
return False
def is_directed(self):
"""Return True if graph is directed, False otherwise."""
return True
def to_directed(self):
"""Return a directed copy of the graph.
Returns
-------
G : DiGraph
A deepcopy of the graph.
Notes
-----
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar D=DiGraph(G) which returns a
shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, http://docs.python.org/library/copy.html.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> H.edges()
[(0, 1), (1, 0)]
If already directed, return a (deep) copy
>>> G = nx.DiGraph() # or MultiDiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> H.edges()
[(0, 1)]
"""
return deepcopy(self)
def to_undirected(self, reciprocal=False):
"""Return an undirected representation of the digraph.
Parameters
----------
reciprocal : bool (optional)
If True only keep edges that appear in both directions
in the original digraph.
Returns
-------
G : Graph
An undirected graph with the same name and nodes and
with edge (u,v,data) if either (u,v,data) or (v,u,data)
is in the digraph. If both edges exist in digraph and
their edge data is different, only one edge is created
with an arbitrary choice of which edge data to use.
You must check and correct for this manually if desired.
Notes
-----
If edges in both directions (u,v) and (v,u) exist in the
graph, attributes for the new undirected edge will be a combination of
the attributes of the directed edges. The edge data is updated
in the (arbitrary) order that the edges are encountered. For
more customized control of the edge attributes use add_edge().
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar G=DiGraph(D) which returns a
shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, http://docs.python.org/library/copy.html.
"""
H=Graph()
H.name=self.name
H.add_nodes_from(self)
if reciprocal is True:
H.add_edges_from( (u,v,deepcopy(d))
for u,nbrs in self.adjacency_iter()
for v,d in nbrs.items()
if v in self.pred[u])
else:
H.add_edges_from( (u,v,deepcopy(d))
for u,nbrs in self.adjacency_iter()
for v,d in nbrs.items() )
H.graph=deepcopy(self.graph)
H.node=deepcopy(self.node)
return H
def reverse(self, copy=True):
"""Return the reverse of the graph.
The reverse is a graph with the same nodes and edges
but with the directions of the edges reversed.
Parameters
----------
copy : bool optional (default=True)
If True, return a new DiGraph holding the reversed edges.
If False, reverse the reverse graph is created using
the original graph (this changes the original graph).
"""
if copy:
H = self.__class__(name="Reverse of (%s)"%self.name)
H.add_nodes_from(self)
H.add_edges_from( (v,u,deepcopy(d)) for u,v,d
in self.edges(data=True) )
H.graph=deepcopy(self.graph)
H.node=deepcopy(self.node)
else:
self.pred,self.succ=self.succ,self.pred
self.adj=self.succ
H=self
return H
def subgraph(self, nbunch):
"""Return the subgraph induced on nodes in nbunch.
The induced subgraph of the graph contains the nodes in nbunch
and the edges between those nodes.
Parameters
----------
nbunch : list, iterable
A container of nodes which will be iterated through once.
Returns
-------
G : Graph
A subgraph of the graph with the same edge attributes.
Notes
-----
The graph, edge or node attributes just point to the original graph.
So changes to the node or edge structure will not be reflected in
the original graph while changes to the attributes will.
To create a subgraph with its own copy of the edge/node attributes use:
nx.Graph(G.subgraph(nbunch))
If edge attributes are containers, a deep copy can be obtained using:
G.subgraph(nbunch).copy()
For an inplace reduction of a graph to a subgraph you can remove nodes:
G.remove_nodes_from([ n in G if n not in set(nbunch)])
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> H = G.subgraph([0,1,2])
>>> H.edges()
[(0, 1), (1, 2)]
"""
bunch = self.nbunch_iter(nbunch)
# create new graph and copy subgraph into it
H = self.__class__()
# copy node and attribute dictionaries
for n in bunch:
H.node[n]=self.node[n]
# namespace shortcuts for speed
H_succ=H.succ
H_pred=H.pred
self_succ=self.succ
# add nodes
for n in H:
H_succ[n]={}
H_pred[n]={}
# add edges
for u in H_succ:
Hnbrs=H_succ[u]
for v,datadict in self_succ[u].items():
if v in H_succ:
# add both representations of edge: u-v and v-u
Hnbrs[v]=datadict
H_pred[v][u]=datadict
H.graph=self.graph
return H
|
from pytest import XFAIL
from wxgeometrie.geolib import Point, Fonction, Interpolation_polynomiale_par_morceaux, \
Glisseur_courbe, Interpolation_lineaire, Courbe
def test_Courbe():
f = Fonction('1/(x+3)')
c1 = Courbe(f)
assert isinstance(c1, Courbe)
A = Point(0, 0)
B = Point(-1, 2)
C = Point(4, 3)
D = Point(-3, 1)
E = Point(4, 5)
c2 = Courbe(A, B, C, D, E)
try:
import scipy
assert isinstance(c2, Interpolation_polynomiale_par_morceaux), type(c2)
except ImportError:
assert isinstance(c2, Interpolation_lineaire)
|
"""QGIS Unit tests for QgsActionManager.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '28/05/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
__revision__ = '$Format:%H$'
import qgis # NOQA switch sip api
from qgis.core import (QgsVectorLayer,
QgsFeature,
QgsActionManager,
QgsAction,
QgsExpressionContext,
QgsField,
QgsFields
)
from qgis.PyQt.QtCore import QDir, QTemporaryFile, QUuid
from qgis.testing import start_app, unittest
import os
import time
import platform
start_app()
class TestQgsActionManager(unittest.TestCase):
def __init__(self, methodName):
"""Run once on class initialization."""
unittest.TestCase.__init__(self, methodName)
self.layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer&field=flddate:datetime",
"test_layer", "memory")
self.manager = QgsActionManager(self.layer)
# make a little script to aid in recording action outputs
# this is just a little python file which writes out its arguments to a text file
self.run_script_file = os.path.join(QDir.tempPath(), 'run_action.py')
with open(self.run_script_file, 'w') as s:
s.write('import sys\n')
s.write('open(sys.argv[1], "w").write(" ".join(sys.argv[2:]))\n')
def get_temp_filename(self):
tmpFile = QTemporaryFile()
tmpFile.open() # fileName is no available until open
tmpName = tmpFile.fileName()
tmpFile.close()
tmpFile.setAutoRemove(False)
return tmpName
def create_action(self, dest_file, text_to_write):
""" returns an action which writes some output to a file """
return 'python {} {} {}'.format(self.run_script_file, dest_file, text_to_write)
def testLayer(self):
self.assertEqual(self.manager.layer(), self.layer)
def testAddAction(self):
""" Test adding actions """
# should be empty to start with
self.assertEqual(self.manager.actions(), [])
# add an action
action1 = QgsAction(QgsAction.GenericPython, 'Test Action', 'i=1')
self.manager.addAction(action1)
self.assertEqual(len(self.manager.actions()), 1)
self.assertEqual(self.manager.actions()[0].type(), QgsAction.GenericPython)
self.assertEqual(self.manager.actions()[0].name(), 'Test Action')
self.assertEqual(self.manager.actions()[0].command(), 'i=1')
# add another action
action2 = QgsAction(QgsAction.Windows, 'Test Action2', 'i=2')
self.manager.addAction(action2)
self.assertEqual(len(self.manager.actions()), 2)
self.assertEqual(self.manager.action(action2.id()).type(), QgsAction.Windows)
self.assertEqual(self.manager.action(action2.id()).name(), 'Test Action2')
self.assertEqual(self.manager.action(action2.id()).command(), 'i=2')
id3 = self.manager.addAction(QgsAction.Generic, 'Test Action3', 'i=3')
self.assertEqual(len(self.manager.actions()), 3)
self.assertEqual(self.manager.action(id3).type(), QgsAction.Generic)
self.assertEqual(self.manager.action(id3).name(), 'Test Action3')
self.assertEqual(self.manager.action(id3).command(), 'i=3')
def testRemoveActions(self):
""" test removing actions """
# add an action
self.manager.addAction(QgsAction.GenericPython, 'test_action', 'i=1')
# clear the manager and check that it's empty
self.manager.clearActions()
self.assertEqual(self.manager.actions(), [])
# add some actions
id1 = self.manager.addAction(QgsAction.GenericPython, 'test_action', 'i=1')
id2 = self.manager.addAction(QgsAction.GenericPython, 'test_action2', 'i=2')
id3 = self.manager.addAction(QgsAction.GenericPython, 'test_action3', 'i=3')
# remove non-existent action
self.manager.removeAction(QUuid.createUuid())
# remove them one by one
self.manager.removeAction(id2)
self.assertEqual(len(self.manager.actions()), 2)
self.assertEqual(self.manager.action(id1).name(), 'test_action')
self.assertEqual(self.manager.action(id3).name(), 'test_action3')
self.manager.removeAction(id1)
self.assertEqual(len(self.manager.actions()), 1)
self.assertEqual(self.manager.action(id3).name(), 'test_action3')
self.manager.removeAction(id3)
self.assertEqual(len(self.manager.actions()), 0)
def testDefaultAction(self):
""" test default action for layer"""
self.manager.clearActions()
action1 = QgsAction(QgsAction.GenericPython, 'test_action', '', 'i=1', False, actionScopes={'Feature'})
self.manager.addAction(action1)
action2 = QgsAction(QgsAction.GenericPython, 'test_action2', 'i=2')
self.manager.addAction(action2)
# initially should be not set
self.assertFalse(self.manager.defaultAction('Feature').isValid())
# set bad default action
self.manager.setDefaultAction('Feature', QUuid.createUuid())
self.assertFalse(self.manager.defaultAction('Feature').isValid())
# set good default action
self.manager.setDefaultAction('Feature', action1.id())
self.assertTrue(self.manager.defaultAction('Feature').isValid())
self.assertEquals(self.manager.defaultAction('Feature').id(), action1.id())
self.assertNotEquals(self.manager.defaultAction('Feature').id(), action2.id())
# if default action is removed, should be reset to -1
self.manager.clearActions()
self.assertFalse(self.manager.defaultAction('Feature').isValid())
def check_action_result(self, temp_file):
with open(temp_file, 'r') as result:
output = result.read()
return output
@unittest.expectedFailure(platform.system() != 'Linux')
def testDoAction(self):
""" test running action """
self.manager.clearActions()
# simple action
temp_file = self.get_temp_filename()
id1 = self.manager.addAction(QgsAction.Unix, 'test_action', self.create_action(temp_file, 'test output'))
fields = QgsFields()
fields.append(QgsField('my_field'))
fields.append(QgsField('my_other_field'))
f = QgsFeature(fields, 1)
f.setAttributes([5, 'val'])
c = QgsExpressionContext()
self.manager.doAction(id1, f, c)
time.sleep(0.5)
self.assertEquals(self.check_action_result(temp_file), 'test output')
# action with substitutions
temp_file = self.get_temp_filename()
id2 = self.manager.addAction(QgsAction.Unix, 'test_action', self.create_action(temp_file, 'test [% $id %] output [% @layer_name %]'))
self.manager.doAction(id2, f, c)
time.sleep(0.5)
self.assertEquals(self.check_action_result(temp_file), 'test 1 output test_layer')
# test doAction using field variant
temp_file = self.get_temp_filename()
id3 = self.manager.addAction(QgsAction.Unix, 'test_action',
self.create_action(temp_file, 'test : [% @field_index %] : [% @field_name %] : [% @field_value%]'))
self.manager.doActionFeature(id3, f, 0)
time.sleep(0.5)
self.assertEquals(self.check_action_result(temp_file), 'test : 0 : my_field : 5')
self.manager.doActionFeature(id3, f, 1)
time.sleep(0.5)
self.assertEquals(self.check_action_result(temp_file), 'test : 1 : my_other_field : val')
if __name__ == '__main__':
unittest.main()
|
__all__ = ["_", "N_"]
import gettext
_ = lambda x: gettext.ldgettext("kdump-anaconda-addon", x)
N_ = lambda x: x
|
import sys
import os
import seq
import math
import networkx as nx
from clint.textui import colored
from shutil import copyfile,move
import platform
plat = platform.platform()
from logger import Logger
from conf import tempname
from conf import dosamp
from conf import nthread
from conf import length_limit,evalue_limit,perc_identity
import filter_blast
import emoticons
nthread = str(nthread)
mac = False
if "Darwin" in plat:
mac = True
use_merge = True
def make_blast_db_from_cluster(indir,tempdir="./"):
outf = open(tempdir+tempname,"w")
for i in os.listdir(indir):
if i[-3:] != ".fa" and i[-4:] != ".fas" and i[-6] != ".fasta":
continue
fn = i
for j in seq.read_fasta_file_iter(indir+"/"+i):
j.name = fn+"___"+j.name
outf.write(j.get_fasta())
outf.close()
cmd = "makeblastdb -in "+tempdir+tempname+" -out "+tempdir+tempname+".db -dbtype nucl > /dev/null 2>&1"
os.system(cmd)
def make_blast_db_from_cluster_samp(indir,tempdir="./"):
outf = open(tempdir+tempname,"w")
for i in os.listdir(indir):
if i[-3:] != ".fa":
continue
fn = i
if os.path.isfile(indir+"/"+i.replace(".fa",".samp")):
for j in seq.read_fasta_file_iter(indir+"/"+i.replace(".fa",".samp")):
j.name = fn+"___"+j.name
outf.write(j.get_fasta())
else:
for j in seq.read_fasta_file_iter(indir+"/"+i):
j.name = fn+"___"+j.name
outf.write(j.get_fasta())
outf.close()
cmd = "makeblastdb -in "+tempdir+tempname+" -out "+tempdir+tempname+".db -dbtype nucl > /dev/null 2>&1"
os.system(cmd)
def blast_file_against_db(indir,filename,tempdir="./"):
cmd = "blastn -task blastn -db "+tempdir+tempname+".db -query "+indir+"/"+filename+" -perc_identity "+str(perc_identity)+" -evalue "+str(evalue_limit)+" -num_threads "+nthread+" -max_target_seqs 10000000 -out "+tempdir+tempname+".rawblastn -outfmt '6 qseqid qlen sseqid slen frames pident nident length mismatch gapopen qstart qend sstart send evalue bitscore'"
os.system(cmd)
def add_file(fromfile,tofile):
tf = open(tofile,"a")
ff = open(fromfile,"r")
for i in ff:
tf.write(i)
ff.close()
tf.close()
def write_merge_table_and_temp_aln_file(filelist,tempdir="./"):
tf = open(tempdir+"subMSAtable","w")
tf2 = open(tempdir+"temp.mergealn","w")
count = 1
addlater = []
for i in filelist:
flcount = 0
for j in seq.read_fasta_file_iter(i):
flcount += 1
if flcount > 1:
for j in seq.read_fasta_file_iter(i):
tf.write(str(count)+" ")
count += 1
tf2.write(j.get_fasta())
tf.write("# "+i)
tf.write("\n")
else:
for j in seq.read_fasta_file_iter(i):
addlater.append(j.get_fasta())
for i in addlater:
tf2.write(i)
tf.close()
tf2.close()
#x = [j.split("/")[-1] for j in filelist]
#copyfile("subMSAtable","subMSAtable_"+".".join(x))
#copyfile("temp.mergealn","temp.mergealn_"+".".join(x))
def check_unaligned(infile):
clen = None
count = 0
for i in seq.read_fasta_file_iter(infile):
count += 1
if clen == None:
clen = len(i.seq)
else:
if len(i.seq) != clen:
return False
if count == 0:
return False
return True
def merge_alignments(outfile,tempdir="./"):
cmd = "mafft --thread "+nthread+" --quiet --adjustdirection --merge "+tempdir+"subMSAtable "+tempdir+"temp.mergealn 2> "+tempdir+"mafft.out > "+outfile
os.system(cmd)
if os.path.exists(outfile) == False:
print(colored.red("ALIGNMENT DOESN'T EXIST"+" "+emoticons.get_ran_emot("sad")))
sys.exit(1)
#for some buggy reason these can be unaligned, so realigning here
if check_unaligned(outfile) == False:
print(colored.red("PROBLEM REDOING ALIGNMENT ("+outfile+")"+" "+emoticons.get_ran_emot("sad")))
#log.w("PROBLEM REDOING ALIGNMENT")
copyfile(tempdir+"subMSAtable","problem_subMSAtable")
copyfile(tempdir+"temp.mergealn","problem_temp.mergealn")
cmd = "mafft --quiet --adjustdirection "+tempdir+"temp.mergealn > "+outfile
os.system(cmd)
if mac == False:
os.system("sed -i 's/_R_//g' "+outfile)
else:
os.system("sed -i '' 's/_R_//g' "+outfile)
#os.remove("subMSAtable")
#os.remove("temp.mergealn")
if __name__ == "__main__":
if len(sys.argv) != 4 and len(sys.argv) != 5:
print("python "+sys.argv[0]+" fromclusterdir tooutdir logfile [TEMPDIR]")
sys.exit(0)
dir1 = sys.argv[1]
diro = sys.argv[2]
logfile = sys.argv[3]
log = Logger(logfile)
log.a()
tempdir = "./"
if len(sys.argv) == 5:
tempdir = sys.argv[4]
if tempdir[-1] != "/":
tempdir += "/"
curcount = 0
if len(os.listdir(diro)) == 0:
from shutil import copyfile
for i in os.listdir(dir1):
log.w(" ".join(["INITIAL CLUSTER POPULATION COPY TO",diro+"/"+i,"FROM",dir1+"/"+i]))
copyfile(dir1+"/"+i,diro+"/"+i)
else:
for i in os.listdir(diro):
if ".fa" not in i:
continue
x = int(i.replace("cluster","").replace(".fa",""))
if x > curcount:
curcount = x
curcount += 1
#make blast dir of the out
if dosamp:
make_blast_db_from_cluster_samp(diro, tempdir)
else:
make_blast_db_from_cluster(diro, tempdir)
count = 1
G = nx.Graph()
for i in os.listdir(dir1):
if i[-3:] != ".fa":
continue
# doing just the sample for speed
if dosamp:
if os.path.isfile(dir1+"/"+i.replace(".fa",".samp")):
blast_file_against_db(dir1,i.replace(".fa",".samp"),tempdir)
else:
blast_file_against_db(dir1,i,tempdir)
else:
blast_file_against_db(dir1,i,tempdir)
dclus,clus = filter_blast.process_blast_out(tempdir+tempname+".rawblastn")
if len(clus) > 0:
for j in clus:
G.add_edge(dir1+"/"+i,diro+"/"+j)
else:
G.add_node(dir1+"/"+i)
# need to log these operations
origcurcount = curcount
for i in nx.connected_components(G):
tf = open(diro+"/"+"cluster"+str(curcount)+".fa","w")
log.w(" ".join(["MERGING FASTA TO",diro+"/cluster"+str(curcount)+".fa","FROM"," ".join(list(i))]))
curcount += 1
for j in i:
for k in seq.read_fasta_file_iter(j):
tf.write(k.get_fasta())
tf.close()
if use_merge == True:
for i in nx.connected_components(G):
if len(i) > 1:
x = [j.replace(".fa",".aln") for j in i]
log.w(" ".join(["MERGING ALIGNMENTS FROM"," ".join(x)]))
write_merge_table_and_temp_aln_file(x,tempdir)
outfile = diro+"/"+"cluster"+str(origcurcount)+".aln"
merge_alignments(outfile,tempdir)
log.w(" ".join(["CREATED FROM MERGE",diro+"/cluster"+str(origcurcount)+".aln"]))
for j in i:
if diro in j:
log.w(" ".join(["REMOVING ALIGNMENTS",j,j.replace(".fa",".aln")]))
os.remove(j)
os.remove(j.replace(".fa",".aln"))
if os.path.isfile(j.replace(".fa",".tre")):
os.remove(j.replace(".fa",".tre"))
if os.path.isfile(j.replace(".fa",".samp")):
os.remove(j.replace(".fa",".samp"))
else:
tf = open(diro+"/"+"cluster"+str(origcurcount)+".aln","w")
for j in i:
#copyfile(j,diro+"/cluster"+str(origcurcount)+".fa")
log.w(" ".join(["CREATING SINGLE ALIGNMENT",diro+"/cluster"+str(origcurcount)+".aln","FROM",j.replace(".fa",".aln")]))
numseq = 0
for k in seq.read_fasta_file_iter(j.replace(".fa",".aln")):
tf.write(k.get_fasta())
numseq += 1
if diro in j:
log.w(" ".join(["REMOVING ALIGNMENTS"+j,j.replace(".fa",".aln")]))
os.remove(j)
os.remove(j.replace(".fa",".aln"))
if os.path.isfile(j.replace(".fa",".tre")):
os.remove(j.replace(".fa",".tre"))
if os.path.isfile(j.replace(".fa",".samp")):
os.remove(j.replace(".fa",".samp"))
tf.close()
origcurcount += 1
log.c()
|
"""Check format
"""
__revision__ = ''
notpreceded= 1
notfollowed =1
notfollowed <=1
correct = 1
correct >= 1
def func(arg, arg2):
"""test named argument
"""
func(arg=arg+1,
arg2=arg2-arg)
aaaa,bbbb = 1,2
aaaa |= bbbb
aaaa &= bbbb
if aaaa: pass
else:
aaaa,bbbb = 1,2
aaaa,bbbb = bbbb,aaaa
bbbb = (1,2,3)
aaaa = bbbb[1:]
aaaa = bbbb[:1]
aaaa = bbbb[:]
aaaa = {aaaa:bbbb}
"""docstring,should not match
isn't it:yes!
a=b
"""
aaaa = 'multiple lines\
string,hehehe'
boo = 2 # allclose(x,y) uses |x-y|<ATOL+RTOL*|y|
def other(funky):
"""yo, test formatted result with indentation"""
funky= funky+2
html = """<option value="=">ist genau gleich</option>
yo+=4
"""
html2 = """<option value='='>ist genau gleich</option>
yo+=4
"""
func('''<body>Hello
</body>''', 0)
assert boo <= 10, "Note is %.2f. Either you cheated, or pylint's \
broken!" % boo
def _gc_debug(gcc):
"""bad format undetected w/ py2.5"""
ocount = {}
for obj in gcc.get_objects():
try:
ocount[obj.__class__]+= 1
except KeyError:
ocount[obj.__class__]=1
except AttributeError:
pass
def hop(context):
"""multi-lines string"""
return ['''<a id="sendbutton" href="javascript: $('%(domid)s').submit()">
<img src="%(sendimgpath)s" alt="%(send)s"/>%(send)s</a>''' % context,
'''<a id="cancelbutton" href="javascript: history.back()">
<img src="%(cancelimgpath)s" alt="%(cancel)s"/>%(cancel)s</a>''' % context,
]
titreprojet = '<tr><td colspan="10">\
<img src="images/drapeau_vert.png" alt="Drapeau vert" />\
<strong>%s</strong></td></tr>' % aaaa
with open('a') as a, open('b') as b:
pass
with open('a') as a, open('b') as b: pass # multiple-statements
try:
pass
except IOError, e:
print e
finally:
pass
try:
pass
except IOError, e:
print e
finally: pass # multiple-statements
if True: print False
|
import urllib
import re
import requests
import json
from bs4 import BeautifulSoup
class LeaderSkill(object):
_name = ""
_desc = ""
_subDesc = ""
_monsterList = []
_monsterNum = 0
def __init__(self, _name = "", _desc = "", _subDesc = "", _monsterList = [], _monsterNum = 0):
self._name = _name
self._desc = _desc
self._subDesc = _subDesc
self._monsterList = _monsterList
self._monsterNum = _monsterNum
def showSkill(self):
print "name: %s"%self._name
print "_desc %s"% self._desc
print "_subDesc %s"% self._subDesc
print "_monsterNum: %d"% self._monsterNum
print ""
def getLeaderSkillListFromUrl():
url = 'http://www.thisisgame.com/pad/info/skill/list.php?class1=2' # all skill List site
u = urllib.urlopen(url)
c = u.read()
u.close()
soup = BeautifulSoup(c)
rst = soup.find_all("tbody")
c = rst[0]
skill_list = c.find_all('tr')
_skillList = [];
for _skill in skill_list:
# 1st get name
name = _skill.contents[0].contents[1].encode('utf-8').strip()
desc = _skill.contents[1].contents[0].encode('utf-8').strip()
try:
subDesc = _skill.contents[1].contents[1].contents[0].encode('utf-8').strip()
except:
subDesc = ""
rgx_mon_num = re.compile("icon_([\d]+)")
try:
monsterList = rgx_mon_num.findall(_skill.contents[2].contents[1].contents[0].encode('utf-8'))
monsterNum = len(monsterList)
except:
monsterList = []
monsterNum = 0
_s = LeaderSkill(name, desc, subDesc, monsterList, monsterNum)
_s.showSkill()
_skillList.append(_s);
return _skillList
if __name__ == "__main__":
getLeaderSkillListFromUrl();
|
import os
os.system('ffmpeg -f lavfi -i color=red -frames:v 16 -r 24 red_24.mp4')
os.system('ffmpeg -f lavfi -i color=red -frames:v 16 -r 30 red_30.mp4')
|
from django.test import TestCase
from .models import Route
class RouteModelTestCase(TestCase):
def create_objects(self, objects):
"""
Creates list of dict()s as Route objects
Assumes Route.order as Route.pk
"""
for route in objects:
route['order'] = route['pk']
Route.objects.create(**route)
def test_url_path_is_saved(self):
"""
Tests if the full relative path to this URL is saved in the ddbb
"""
self.create_objects([
{'pk': 1, 'name': 'Forums', 'slug': 'forums/test/'},
{'pk': 2, 'name': 'Forum 1', 'slug': 'forum-1', 'parent_id': 1},
])
for route in Route.objects.all():
self.assertEqual(route.cached_pattern,
route.get_relative_url())
# Test recursive routes
route = Route.objects.get(pk=2)
self.assertTrue(route.parent.cached_pattern in route.cached_pattern)
def test_slug_is_cleaned(self):
"""
Check if clean function removes trailing slash but allows slashes
to be in the middle of slugs
"""
self.create_objects([
{'pk': 99, 'name': 'Bogus slug', 'slug': 'bogus/'},
{'pk': 98, 'name': 'Bogus slug 2', 'slug': 'bogus/test'},
])
self.assertFalse(Route.objects.get(pk=99).slug[-1] == '/')
self.assertTrue('/' in Route.objects.get(pk=98).slug)
def test_create_without_slug(self):
"""
Test if we broke something when creating a route without a slug
"""
self.create_objects([
{'pk': 3, 'name': 'No slug', 'slug': '', 'order': 2},
])
obj = Route.objects.get(pk=3)
self.assertTrue(obj.slug == '')
class RouteApphooksTestCase(TestCase):
def setUp(self):
pass
def test_choices_loaded_from_pool(self):
pass
|
__author__ = "Martin Blais <blais@furius.ca>"
import unittest
from beancount.core import flags
class TestFlags(unittest.TestCase):
ALLOW_NOT_UNIQUE = {'FLAG_IMPORT'}
def test_unique_flags(self):
names = set()
values = set()
for name, value in flags.__dict__.items():
# pylint: disable=bad-continuation
if (not name.startswith("FLAG_") or
name in self.ALLOW_NOT_UNIQUE):
continue
names.add(name)
values.add(value)
self.assertEqual(len(names), len(values))
|
from askapdev.rbuild.builders import Setuptools as Builder
builder = Builder()
builder.remote_archive = "APLpy-0.9.5.tar.gz"
builder.build()
|
"""
@org: GAE-CMS.COM
@description: Python-based CMS designed for Google App Engine
@(c): gae-cms.com 2012
@author: Imran Somji
@license: GNU GPL v2
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import random
from google.appengine.ext import db
from django.utils.html import strip_tags
from framework import content
from framework.subsystems import template
from framework.subsystems.forms import form, control, selectcontrol, textareacontrol
class Text(content.Content):
titles = db.StringListProperty()
bodies = db.ListProperty(item_type=db.Text)
name = 'Text'
author = 'Imran Somji'
actions = [
['add', 'Add', False, False],
['edit', 'Edit', True, False],
['reorder', 'Reorder', False, False],
['delete', 'Delete', False, False],
]
views = [
['default', 'Default - Multiple items in tabs', True],
['random', 'Random', True],
]
def action_add(self):
rank = int(self.section.path_params[0]) if self.section.path_params else 0
if rank > len(self.titles) or rank < 0:
raise Exception('BadRequest', 'Text item out of range')
elif self.section.handler.request.get('submit'):
self.titles.insert(rank, self.section.handler.request.get('title'))
self.bodies.insert(rank, db.Text(self.section.handler.request.get('body')))
self.update()
raise Exception('Redirect', self.section.action_redirect_path)
return '<h2>Add text</h2>' + get_form(self.section, '', '')
def action_edit(self):
if not self.titles: return self.action_add()
rank = int(self.section.path_params[0]) if self.section.path_params else 0
if rank > len(self.titles) - 1 or rank < 0:
raise Exception('BadRequest', 'Text item out of range')
elif self.section.handler.request.get('submit'):
self.titles[rank] = self.section.handler.request.get('title')
self.bodies[rank] = db.Text(self.section.handler.request.get('body'))
self.update()
raise Exception('Redirect', self.section.action_redirect_path)
elif not self.section.path_params and self.titles:
self.items = []
for i in range(len(self.titles)):
self.items.append([self.titles[i], self.bodies[i]])
return template.snippet('text-edit-multiple', { 'content': self })
return '<h2>Edit text</h2>' + get_form(self.section, self.titles[rank], self.bodies[rank])
def action_reorder(self):
rank = int(self.section.path_params[0]) if self.section.path_params else 0
if rank > len(self.titles) - 1 or rank < 0:
raise Exception('BadRequest', 'Text item out of range')
if self.section.handler.request.get('submit'):
new_rank = int(self.section.handler.request.get('new_rank'))
if new_rank > len(self.titles) - 1 or rank < 0:
raise Exception('BadRequest', 'Reorder rank out of range')
self.titles.insert(new_rank, self.titles.pop(rank))
self.bodies.insert(new_rank, self.bodies.pop(rank))
self.update()
raise Exception('Redirect', self.section.action_redirect_path)
f = form(self.section, self.section.full_path)
ranks = []
for i in range(len(self.titles)):
ranks.append([i, i])
f.add_control(selectcontrol(self.section, 'new_rank', ranks, rank, 'Rank'))
f.add_control(control(self.section, 'submit', 'submit', 'Submit'))
return '<h2>Reorder text</h2>' + unicode(f)
def action_delete(self):
rank = int(self.section.path_params[0]) if self.section.path_params else 0
if rank > len(self.titles) - 1 or rank < 0:
raise Exception('BadRequest', 'Text item out of range')
if self.section.handler.request.get('submit'):
self.titles.pop(rank)
self.bodies.pop(rank)
self.update()
raise Exception('Redirect', self.section.action_redirect_path)
f = form(self.section, self.section.full_path)
f.add_control(control(self.section, 'submit', 'submit', 'Confirm'))
return '<div class="status warning">Are you sure you wish to delete item %d?</div>%s' % (rank + 1, unicode(f))
def view_default(self, params):
self.items = []
for i in range(len(self.titles)):
self.items.append([self.titles[i], self.bodies[i]])
return template.snippet('text-default', { 'content': self }) if self.items else ''
def view_random(self, params):
if not self.titles: return ''
i = random.randint(0, len(self.titles) - 1)
ret = '<h2>%s</h2>' % self.titles[i] if self.titles[i] else ''
ret += self.bodies[i]
return '<div class="content text single random">%s</div>' % ret
def get_form(section, title, body):
f = form(section, section.full_path)
f.add_control(control(section, 'text', 'title', title, 'Title', 60))
f.add_control(textareacontrol(section, 'body', body, 'Body', 100, 10, html=True))
f.add_control(control(section, 'submit', 'submit'))
return unicode(f)
|
'''
Generate a random number between 1 and 9 (including 1 and 9).
Ask the user to guess the number, then tell them whether they guessed too low, too high, or exactly right. (Hint: remember to use the user input lessons from the very first exercise)
Extras:
Keep the game going until the user types "exit"
Keep track of how many guesses the user has taken, and when the game ends, print this out.
'''
import random
quit = ""
num = random.randint(1, 10)
tries = 0
while quit != "exit":
try:
usr_response = (raw_input("I have generated a number, can you guess what it is? "))
if usr_response == "exit":
break
usr_response = int(usr_response)
if usr_response > num:
print "Your guess is too high. Let's try this again. Type 'exit' to quit"
tries += 1
elif usr_response < num:
print "Your guess is too low. Let's try this again. Type 'exit' to quit"
tries += 1
elif usr_response == num:
tries += 1
print "Your guess is just right. Goodbye. It took you %d attempt(s) to get it right" % tries
break
except ValueError:
print "You need to enter a number or type 'exit' to quit."
|
"""Burnin program
"""
import sys
import optparse
import time
import socket
import urllib
from itertools import izip, islice, cycle
from cStringIO import StringIO
from ganeti import opcodes
from ganeti import constants
from ganeti import cli
from ganeti import errors
from ganeti import utils
from ganeti import hypervisor
from ganeti import compat
from ganeti import pathutils
from ganeti.confd import client as confd_client
from ganeti.runtime import (GetClient)
USAGE = ("\tburnin -o OS_NAME [options...] instance_name ...")
MAX_RETRIES = 3
LOG_HEADERS = {
0: "- ",
1: "* ",
2: "",
}
_SINGLE_NODE_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_PLAIN,
constants.DT_FILE,
constants.DT_SHARED_FILE,
constants.DT_EXT,
constants.DT_RBD,
constants.DT_GLUSTER
])
_SUPPORTED_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_DRBD8,
constants.DT_EXT,
constants.DT_FILE,
constants.DT_PLAIN,
constants.DT_RBD,
constants.DT_SHARED_FILE,
constants.DT_GLUSTER
])
_IMPEXP_DISK_TEMPLATES = (_SUPPORTED_DISK_TEMPLATES - frozenset([
constants.DT_DISKLESS,
constants.DT_FILE,
constants.DT_SHARED_FILE,
constants.DT_GLUSTER
]))
class InstanceDown(Exception):
"""The checked instance was not up"""
class BurninFailure(Exception):
"""Failure detected during burning"""
def Usage():
"""Shows program usage information and exits the program."""
print >> sys.stderr, "Usage:"
print >> sys.stderr, USAGE
sys.exit(2)
def Log(msg, *args, **kwargs):
"""Simple function that prints out its argument.
"""
if args:
msg = msg % args
indent = kwargs.get("indent", 0)
sys.stdout.write("%*s%s%s\n" % (2 * indent, "",
LOG_HEADERS.get(indent, " "), msg))
sys.stdout.flush()
def Err(msg, exit_code=1):
"""Simple error logging that prints to stderr.
"""
sys.stderr.write(msg + "\n")
sys.stderr.flush()
sys.exit(exit_code)
class SimpleOpener(urllib.FancyURLopener):
"""A simple url opener"""
# pylint: disable=W0221
def prompt_user_passwd(self, host, realm, clear_cache=0):
"""No-interaction version of prompt_user_passwd."""
# we follow parent class' API
# pylint: disable=W0613
return None, None
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Custom error handling"""
# make sure sockets are not left in CLOSE_WAIT, this is similar
# but with a different exception to the BasicURLOpener class
_ = fp.read() # throw away data
fp.close()
raise InstanceDown("HTTP error returned: code %s, msg %s" %
(errcode, errmsg))
OPTIONS = [
cli.cli_option("-o", "--os", dest="os", default=None,
help="OS to use during burnin",
metavar="<OS>",
completion_suggest=cli.OPT_COMPL_ONE_OS),
cli.HYPERVISOR_OPT,
cli.OSPARAMS_OPT,
cli.cli_option("--disk-size", dest="disk_size",
help="Disk size (determines disk count)",
default="128m", type="string", metavar="<size,size,...>",
completion_suggest=("128M 512M 1G 4G 1G,256M"
" 4G,1G,1G 10G").split()),
cli.cli_option("--disk-growth", dest="disk_growth", help="Disk growth",
default="128m", type="string", metavar="<size,size,...>"),
cli.cli_option("--mem-size", dest="mem_size", help="Memory size",
default=None, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--maxmem-size", dest="maxmem_size", help="Max Memory size",
default=256, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--minmem-size", dest="minmem_size", help="Min Memory size",
default=128, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--vcpu-count", dest="vcpu_count", help="VCPU count",
default=3, type="unit", metavar="<count>",
completion_suggest=("1 2 3 4").split()),
cli.DEBUG_OPT,
cli.VERBOSE_OPT,
cli.NOIPCHECK_OPT,
cli.NONAMECHECK_OPT,
cli.EARLY_RELEASE_OPT,
cli.cli_option("--no-replace1", dest="do_replace1",
help="Skip disk replacement with the same secondary",
action="store_false", default=True),
cli.cli_option("--no-replace2", dest="do_replace2",
help="Skip disk replacement with a different secondary",
action="store_false", default=True),
cli.cli_option("--no-failover", dest="do_failover",
help="Skip instance failovers", action="store_false",
default=True),
cli.cli_option("--no-migrate", dest="do_migrate",
help="Skip instance live migration",
action="store_false", default=True),
cli.cli_option("--no-move", dest="do_move",
help="Skip instance moves", action="store_false",
default=True),
cli.cli_option("--no-importexport", dest="do_importexport",
help="Skip instance export/import", action="store_false",
default=True),
cli.cli_option("--no-startstop", dest="do_startstop",
help="Skip instance stop/start", action="store_false",
default=True),
cli.cli_option("--no-reinstall", dest="do_reinstall",
help="Skip instance reinstall", action="store_false",
default=True),
cli.cli_option("--no-reboot", dest="do_reboot",
help="Skip instance reboot", action="store_false",
default=True),
cli.cli_option("--no-renamesame", dest="do_renamesame",
help="Skip instance rename to same name", action="store_false",
default=True),
cli.cli_option("--reboot-types", dest="reboot_types",
help="Specify the reboot types", default=None),
cli.cli_option("--no-activate-disks", dest="do_activate_disks",
help="Skip disk activation/deactivation",
action="store_false", default=True),
cli.cli_option("--no-add-disks", dest="do_addremove_disks",
help="Skip disk addition/removal",
action="store_false", default=True),
cli.cli_option("--no-add-nics", dest="do_addremove_nics",
help="Skip NIC addition/removal",
action="store_false", default=True),
cli.cli_option("--no-nics", dest="nics",
help="No network interfaces", action="store_const",
const=[], default=[{}]),
cli.cli_option("--no-confd", dest="do_confd_tests",
help="Skip confd queries",
action="store_false", default=constants.ENABLE_CONFD),
cli.cli_option("--rename", dest="rename", default=None,
help=("Give one unused instance name which is taken"
" to start the renaming sequence"),
metavar="<instance_name>"),
cli.cli_option("-t", "--disk-template", dest="disk_template",
choices=list(_SUPPORTED_DISK_TEMPLATES),
default=constants.DT_DRBD8,
help=("Disk template (default %s, otherwise one of %s)" %
(constants.DT_DRBD8,
utils.CommaJoin(_SUPPORTED_DISK_TEMPLATES)))),
cli.cli_option("-n", "--nodes", dest="nodes", default="",
help=("Comma separated list of nodes to perform"
" the burnin on (defaults to all nodes)"),
completion_suggest=cli.OPT_COMPL_MANY_NODES),
cli.cli_option("-I", "--iallocator", dest="iallocator",
default=None, type="string",
help=("Perform the allocation using an iallocator"
" instead of fixed node spread (node restrictions no"
" longer apply, therefore -n/--nodes must not be"
" used"),
completion_suggest=cli.OPT_COMPL_ONE_IALLOCATOR),
cli.cli_option("-p", "--parallel", default=False, action="store_true",
dest="parallel",
help=("Enable parallelization of some operations in"
" order to speed burnin or to test granular locking")),
cli.cli_option("--net-timeout", default=15, type="int",
dest="net_timeout",
help=("The instance check network timeout in seconds"
" (defaults to 15 seconds)"),
completion_suggest="15 60 300 900".split()),
cli.cli_option("-C", "--http-check", default=False, action="store_true",
dest="http_check",
help=("Enable checking of instance status via http,"
" looking for /hostname.txt that should contain the"
" name of the instance")),
cli.cli_option("-K", "--keep-instances", default=False,
action="store_true",
dest="keep_instances",
help=("Leave instances on the cluster after burnin,"
" for investigation in case of errors or simply"
" to use them")),
cli.REASON_OPT,
]
ARGUMENTS = [cli.ArgInstance(min=1)]
def _DoCheckInstances(fn):
"""Decorator for checking instances.
"""
def wrapper(self, *args, **kwargs):
val = fn(self, *args, **kwargs)
for instance in self.instances:
self._CheckInstanceAlive(instance) # pylint: disable=W0212
return val
return wrapper
def _DoBatch(retry):
"""Decorator for possible batch operations.
Must come after the _DoCheckInstances decorator (if any).
@param retry: whether this is a retryable batch, will be
passed to StartBatch
"""
def wrap(fn):
def batched(self, *args, **kwargs):
self.StartBatch(retry)
val = fn(self, *args, **kwargs)
self.CommitQueue()
return val
return batched
return wrap
class Burner(object):
"""Burner class."""
def __init__(self):
"""Constructor."""
self.url_opener = SimpleOpener()
self._feed_buf = StringIO()
self.nodes = []
self.instances = []
self.to_rem = []
self.queued_ops = []
self.opts = None
self.queue_retry = False
self.disk_count = self.disk_growth = self.disk_size = None
self.hvp = self.bep = None
self.ParseOptions()
self.cl = cli.GetClient()
self.GetState()
def ClearFeedbackBuf(self):
"""Clear the feedback buffer."""
self._feed_buf.truncate(0)
def GetFeedbackBuf(self):
"""Return the contents of the buffer."""
return self._feed_buf.getvalue()
def Feedback(self, msg):
"""Acumulate feedback in our buffer."""
formatted_msg = "%s %s" % (time.ctime(utils.MergeTime(msg[0])), msg[2])
self._feed_buf.write(formatted_msg + "\n")
if self.opts.verbose:
Log(formatted_msg, indent=3)
def MaybeRetry(self, retry_count, msg, fn, *args):
"""Possibly retry a given function execution.
@type retry_count: int
@param retry_count: retry counter:
- 0: non-retryable action
- 1: last retry for a retryable action
- MAX_RETRIES: original try for a retryable action
@type msg: str
@param msg: the kind of the operation
@type fn: callable
@param fn: the function to be called
"""
try:
val = fn(*args)
if retry_count > 0 and retry_count < MAX_RETRIES:
Log("Idempotent %s succeeded after %d retries",
msg, MAX_RETRIES - retry_count)
return val
except Exception, err: # pylint: disable=W0703
if retry_count == 0:
Log("Non-idempotent %s failed, aborting", msg)
raise
elif retry_count == 1:
Log("Idempotent %s repeated failure, aborting", msg)
raise
else:
Log("Idempotent %s failed, retry #%d/%d: %s",
msg, MAX_RETRIES - retry_count + 1, MAX_RETRIES, err)
self.MaybeRetry(retry_count - 1, msg, fn, *args)
def _ExecOp(self, *ops):
"""Execute one or more opcodes and manage the exec buffer.
@return: if only opcode has been passed, we return its result;
otherwise we return the list of results
"""
job_id = cli.SendJob(ops, cl=self.cl)
results = cli.PollJob(job_id, cl=self.cl, feedback_fn=self.Feedback)
if len(ops) == 1:
return results[0]
else:
return results
def ExecOp(self, retry, *ops):
"""Execute one or more opcodes and manage the exec buffer.
@return: if only opcode has been passed, we return its result;
otherwise we return the list of results
"""
if retry:
rval = MAX_RETRIES
else:
rval = 0
cli.SetGenericOpcodeOpts(ops, self.opts)
return self.MaybeRetry(rval, "opcode", self._ExecOp, *ops)
def ExecOrQueue(self, name, ops, post_process=None):
"""Execute an opcode and manage the exec buffer."""
if self.opts.parallel:
cli.SetGenericOpcodeOpts(ops, self.opts)
self.queued_ops.append((ops, name, post_process))
else:
val = self.ExecOp(self.queue_retry, *ops) # pylint: disable=W0142
if post_process is not None:
post_process()
return val
def StartBatch(self, retry):
"""Start a new batch of jobs.
@param retry: whether this is a retryable batch
"""
self.queued_ops = []
self.queue_retry = retry
def CommitQueue(self):
"""Execute all submitted opcodes in case of parallel burnin"""
if not self.opts.parallel or not self.queued_ops:
return
if self.queue_retry:
rval = MAX_RETRIES
else:
rval = 0
try:
results = self.MaybeRetry(rval, "jobset", self.ExecJobSet,
self.queued_ops)
finally:
self.queued_ops = []
return results
def ExecJobSet(self, jobs):
"""Execute a set of jobs and return once all are done.
The method will return the list of results, if all jobs are
successful. Otherwise, OpExecError will be raised from within
cli.py.
"""
self.ClearFeedbackBuf()
jex = cli.JobExecutor(cl=self.cl, feedback_fn=self.Feedback)
for ops, name, _ in jobs:
jex.QueueJob(name, *ops) # pylint: disable=W0142
try:
results = jex.GetResults()
except Exception, err: # pylint: disable=W0703
Log("Jobs failed: %s", err)
raise BurninFailure()
fail = False
val = []
for (_, name, post_process), (success, result) in zip(jobs, results):
if success:
if post_process:
try:
post_process()
except Exception, err: # pylint: disable=W0703
Log("Post process call for job %s failed: %s", name, err)
fail = True
val.append(result)
else:
fail = True
if fail:
raise BurninFailure()
return val
def ParseOptions(self):
"""Parses the command line options.
In case of command line errors, it will show the usage and exit the
program.
"""
parser = optparse.OptionParser(usage="\n%s" % USAGE,
version=("%%prog (ganeti) %s" %
constants.RELEASE_VERSION),
option_list=OPTIONS)
options, args = parser.parse_args()
if len(args) < 1 or options.os is None:
Usage()
if options.mem_size:
options.maxmem_size = options.mem_size
options.minmem_size = options.mem_size
elif options.minmem_size > options.maxmem_size:
Err("Maximum memory lower than minimum memory")
if options.disk_template not in _SUPPORTED_DISK_TEMPLATES:
Err("Unknown or unsupported disk template '%s'" % options.disk_template)
if options.disk_template == constants.DT_DISKLESS:
disk_size = disk_growth = []
options.do_addremove_disks = False
else:
disk_size = [utils.ParseUnit(v) for v in options.disk_size.split(",")]
disk_growth = [utils.ParseUnit(v)
for v in options.disk_growth.split(",")]
if len(disk_growth) != len(disk_size):
Err("Wrong disk sizes/growth combination")
if ((disk_size and options.disk_template == constants.DT_DISKLESS) or
(not disk_size and options.disk_template != constants.DT_DISKLESS)):
Err("Wrong disk count/disk template combination")
self.disk_size = disk_size
self.disk_growth = disk_growth
self.disk_count = len(disk_size)
if options.nodes and options.iallocator:
Err("Give either the nodes option or the iallocator option, not both")
if options.http_check and not options.name_check:
Err("Can't enable HTTP checks without name checks")
self.opts = options
self.instances = args
self.bep = {
constants.BE_MINMEM: options.minmem_size,
constants.BE_MAXMEM: options.maxmem_size,
constants.BE_VCPUS: options.vcpu_count,
}
self.hypervisor = None
self.hvp = {}
if options.hypervisor:
self.hypervisor, self.hvp = options.hypervisor
if options.reboot_types is None:
options.reboot_types = constants.REBOOT_TYPES
else:
options.reboot_types = options.reboot_types.split(",")
rt_diff = set(options.reboot_types).difference(constants.REBOOT_TYPES)
if rt_diff:
Err("Invalid reboot types specified: %s" % utils.CommaJoin(rt_diff))
socket.setdefaulttimeout(options.net_timeout)
def GetState(self):
"""Read the cluster state from the master daemon."""
if self.opts.nodes:
names = self.opts.nodes.split(",")
else:
names = []
try:
qcl = GetClient(query=True)
result = qcl.QueryNodes(names, ["name", "offline", "drained"], False)
except errors.GenericError, err:
err_code, msg = cli.FormatError(err)
Err(msg, exit_code=err_code)
finally:
qcl.Close()
self.nodes = [data[0] for data in result if not (data[1] or data[2])]
op_diagnose = opcodes.OpOsDiagnose(output_fields=["name",
"variants",
"hidden"],
names=[])
result = self.ExecOp(True, op_diagnose)
if not result:
Err("Can't get the OS list")
found = False
for (name, variants, _) in result:
if self.opts.os in cli.CalculateOSNames(name, variants):
found = True
break
if not found:
Err("OS '%s' not found" % self.opts.os)
cluster_info = self.cl.QueryClusterInfo()
self.cluster_info = cluster_info
if not self.cluster_info:
Err("Can't get cluster info")
default_nic_params = self.cluster_info["nicparams"][constants.PP_DEFAULT]
self.cluster_default_nicparams = default_nic_params
if self.hypervisor is None:
self.hypervisor = self.cluster_info["default_hypervisor"]
self.hv_can_migrate = \
hypervisor.GetHypervisorClass(self.hypervisor).CAN_MIGRATE
@_DoCheckInstances
@_DoBatch(False)
def BurnCreateInstances(self):
"""Create the given instances.
"""
self.to_rem = []
mytor = izip(cycle(self.nodes),
islice(cycle(self.nodes), 1, None),
self.instances)
Log("Creating instances")
for pnode, snode, instance in mytor:
Log("instance %s", instance, indent=1)
if self.opts.iallocator:
pnode = snode = None
msg = "with iallocator %s" % self.opts.iallocator
elif self.opts.disk_template not in constants.DTS_INT_MIRROR:
snode = None
msg = "on %s" % pnode
else:
msg = "on %s, %s" % (pnode, snode)
Log(msg, indent=2)
op = opcodes.OpInstanceCreate(instance_name=instance,
disks=[{"size": size}
for size in self.disk_size],
disk_template=self.opts.disk_template,
nics=self.opts.nics,
mode=constants.INSTANCE_CREATE,
os_type=self.opts.os,
pnode=pnode,
snode=snode,
start=True,
ip_check=self.opts.ip_check,
name_check=self.opts.name_check,
wait_for_sync=True,
file_driver="loop",
file_storage_dir=None,
iallocator=self.opts.iallocator,
beparams=self.bep,
hvparams=self.hvp,
hypervisor=self.hypervisor,
osparams=self.opts.osparams,
)
remove_instance = lambda name: lambda: self.to_rem.append(name)
self.ExecOrQueue(instance, [op], post_process=remove_instance(instance))
@_DoBatch(False)
def BurnModifyRuntimeMemory(self):
"""Alter the runtime memory."""
Log("Setting instance runtime memory")
for instance in self.instances:
Log("instance %s", instance, indent=1)
tgt_mem = self.bep[constants.BE_MINMEM]
op = opcodes.OpInstanceSetParams(instance_name=instance,
runtime_mem=tgt_mem)
Log("Set memory to %s MB", tgt_mem, indent=2)
self.ExecOrQueue(instance, [op])
@_DoBatch(False)
def BurnGrowDisks(self):
"""Grow both the os and the swap disks by the requested amount, if any."""
Log("Growing disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
for idx, growth in enumerate(self.disk_growth):
if growth > 0:
op = opcodes.OpInstanceGrowDisk(instance_name=instance, disk=idx,
amount=growth, wait_for_sync=True)
Log("increase disk/%s by %s MB", idx, growth, indent=2)
self.ExecOrQueue(instance, [op])
@_DoBatch(True)
def BurnReplaceDisks1D8(self):
"""Replace disks on primary and secondary for drbd8."""
Log("Replacing disks on the same nodes")
early_release = self.opts.early_release
for instance in self.instances:
Log("instance %s", instance, indent=1)
ops = []
for mode in constants.REPLACE_DISK_SEC, constants.REPLACE_DISK_PRI:
op = opcodes.OpInstanceReplaceDisks(instance_name=instance,
mode=mode,
disks=list(range(self.disk_count)),
early_release=early_release)
Log("run %s", mode, indent=2)
ops.append(op)
self.ExecOrQueue(instance, ops)
@_DoBatch(True)
def BurnReplaceDisks2(self):
"""Replace secondary node."""
Log("Changing the secondary node")
mode = constants.REPLACE_DISK_CHG
mytor = izip(islice(cycle(self.nodes), 2, None),
self.instances)
for tnode, instance in mytor:
Log("instance %s", instance, indent=1)
if self.opts.iallocator:
tnode = None
msg = "with iallocator %s" % self.opts.iallocator
else:
msg = tnode
op = opcodes.OpInstanceReplaceDisks(instance_name=instance,
mode=mode,
remote_node=tnode,
iallocator=self.opts.iallocator,
disks=[],
early_release=self.opts.early_release)
Log("run %s %s", mode, msg, indent=2)
self.ExecOrQueue(instance, [op])
@_DoCheckInstances
@_DoBatch(False)
def BurnFailover(self):
"""Failover the instances."""
Log("Failing over instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceFailover(instance_name=instance,
ignore_consistency=False)
self.ExecOrQueue(instance, [op])
@_DoCheckInstances
@_DoBatch(False)
def BurnMove(self):
"""Move the instances."""
Log("Moving instances")
mytor = izip(islice(cycle(self.nodes), 1, None),
self.instances)
for tnode, instance in mytor:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceMove(instance_name=instance,
target_node=tnode)
self.ExecOrQueue(instance, [op])
@_DoBatch(False)
def BurnMigrate(self):
"""Migrate the instances."""
Log("Migrating instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = opcodes.OpInstanceMigrate(instance_name=instance, mode=None,
cleanup=False)
op2 = opcodes.OpInstanceMigrate(instance_name=instance, mode=None,
cleanup=True)
Log("migration and migration cleanup", indent=2)
self.ExecOrQueue(instance, [op1, op2])
@_DoCheckInstances
@_DoBatch(False)
def BurnImportExport(self):
"""Export the instance, delete it, and import it back.
"""
Log("Exporting and re-importing instances")
mytor = izip(cycle(self.nodes),
islice(cycle(self.nodes), 1, None),
islice(cycle(self.nodes), 2, None),
self.instances)
qcl = GetClient(query=True)
for pnode, snode, enode, instance in mytor:
Log("instance %s", instance, indent=1)
# read the full name of the instance
((full_name, ), ) = qcl.QueryInstances([instance], ["name"], False)
if self.opts.iallocator:
pnode = snode = None
import_log_msg = ("import from %s"
" with iallocator %s" %
(enode, self.opts.iallocator))
elif self.opts.disk_template not in constants.DTS_INT_MIRROR:
snode = None
import_log_msg = ("import from %s to %s" %
(enode, pnode))
else:
import_log_msg = ("import from %s to %s, %s" %
(enode, pnode, snode))
exp_op = opcodes.OpBackupExport(instance_name=instance,
target_node=enode,
mode=constants.EXPORT_MODE_LOCAL,
shutdown=True)
rem_op = opcodes.OpInstanceRemove(instance_name=instance,
ignore_failures=True)
imp_dir = utils.PathJoin(pathutils.EXPORT_DIR, full_name)
imp_op = opcodes.OpInstanceCreate(instance_name=instance,
disks=[{"size": size}
for size in self.disk_size],
disk_template=self.opts.disk_template,
nics=self.opts.nics,
mode=constants.INSTANCE_IMPORT,
src_node=enode,
src_path=imp_dir,
pnode=pnode,
snode=snode,
start=True,
ip_check=self.opts.ip_check,
name_check=self.opts.name_check,
wait_for_sync=True,
file_storage_dir=None,
file_driver="loop",
iallocator=self.opts.iallocator,
beparams=self.bep,
hvparams=self.hvp,
osparams=self.opts.osparams,
)
erem_op = opcodes.OpBackupRemove(instance_name=instance)
Log("export to node %s", enode, indent=2)
Log("remove instance", indent=2)
Log(import_log_msg, indent=2)
Log("remove export", indent=2)
self.ExecOrQueue(instance, [exp_op, rem_op, imp_op, erem_op])
qcl.Close()
@staticmethod
def StopInstanceOp(instance):
"""Stop given instance."""
return opcodes.OpInstanceShutdown(instance_name=instance)
@staticmethod
def StartInstanceOp(instance):
"""Start given instance."""
return opcodes.OpInstanceStartup(instance_name=instance, force=False)
@staticmethod
def RenameInstanceOp(instance, instance_new):
"""Rename instance."""
return opcodes.OpInstanceRename(instance_name=instance,
new_name=instance_new)
@_DoCheckInstances
@_DoBatch(True)
def BurnStopStart(self):
"""Stop/start the instances."""
Log("Stopping and starting instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2])
@_DoBatch(False)
def BurnRemove(self):
"""Remove the instances."""
Log("Removing instances")
for instance in self.to_rem:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceRemove(instance_name=instance,
ignore_failures=True)
self.ExecOrQueue(instance, [op])
def BurnRename(self):
"""Rename the instances.
Note that this function will not execute in parallel, since we
only have one target for rename.
"""
Log("Renaming instances")
rename = self.opts.rename
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_stop1 = self.StopInstanceOp(instance)
op_stop2 = self.StopInstanceOp(rename)
op_rename1 = self.RenameInstanceOp(instance, rename)
op_rename2 = self.RenameInstanceOp(rename, instance)
op_start1 = self.StartInstanceOp(rename)
op_start2 = self.StartInstanceOp(instance)
self.ExecOp(False, op_stop1, op_rename1, op_start1)
self._CheckInstanceAlive(rename)
self.ExecOp(False, op_stop2, op_rename2, op_start2)
self._CheckInstanceAlive(instance)
@_DoCheckInstances
@_DoBatch(True)
def BurnReinstall(self):
"""Reinstall the instances."""
Log("Reinstalling instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = opcodes.OpInstanceReinstall(instance_name=instance)
Log("reinstall without passing the OS", indent=2)
op3 = opcodes.OpInstanceReinstall(instance_name=instance,
os_type=self.opts.os)
Log("reinstall specifying the OS", indent=2)
op4 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2, op3, op4])
@_DoCheckInstances
@_DoBatch(True)
def BurnReboot(self):
"""Reboot the instances."""
Log("Rebooting instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
ops = []
for reboot_type in self.opts.reboot_types:
op = opcodes.OpInstanceReboot(instance_name=instance,
reboot_type=reboot_type,
ignore_secondaries=False)
Log("reboot with type '%s'", reboot_type, indent=2)
ops.append(op)
self.ExecOrQueue(instance, ops)
@_DoCheckInstances
@_DoBatch(True)
def BurnRenameSame(self):
"""Rename the instances to their own name."""
Log("Renaming the instances to their own name")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = self.RenameInstanceOp(instance, instance)
Log("rename to the same name", indent=2)
op4 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2, op4])
@_DoCheckInstances
@_DoBatch(True)
def BurnActivateDisks(self):
"""Activate and deactivate disks of the instances."""
Log("Activating/deactivating disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_start = self.StartInstanceOp(instance)
op_act = opcodes.OpInstanceActivateDisks(instance_name=instance)
op_deact = opcodes.OpInstanceDeactivateDisks(instance_name=instance)
op_stop = self.StopInstanceOp(instance)
Log("activate disks when online", indent=2)
Log("activate disks when offline", indent=2)
Log("deactivate disks (when offline)", indent=2)
self.ExecOrQueue(instance, [op_act, op_stop, op_act, op_deact, op_start])
@_DoCheckInstances
@_DoBatch(False)
def BurnAddRemoveDisks(self):
"""Add and remove an extra disk for the instances."""
Log("Adding and removing disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_add = opcodes.OpInstanceSetParams(
instance_name=instance,
disks=[(constants.DDM_ADD, {"size": self.disk_size[0]})])
op_rem = opcodes.OpInstanceSetParams(
instance_name=instance, disks=[(constants.DDM_REMOVE, {})])
op_stop = self.StopInstanceOp(instance)
op_start = self.StartInstanceOp(instance)
Log("adding a disk", indent=2)
Log("removing last disk", indent=2)
self.ExecOrQueue(instance, [op_add, op_stop, op_rem, op_start])
@_DoBatch(False)
def BurnAddRemoveNICs(self):
"""Add, change and remove an extra NIC for the instances."""
Log("Adding and removing NICs")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_add = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_ADD, {})])
op_chg = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_MODIFY,
-1, {"mac": constants.VALUE_GENERATE})])
op_rem = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_REMOVE, {})])
Log("adding a NIC", indent=2)
Log("changing a NIC", indent=2)
Log("removing last NIC", indent=2)
self.ExecOrQueue(instance, [op_add, op_chg, op_rem])
def ConfdCallback(self, reply):
"""Callback for confd queries"""
if reply.type == confd_client.UPCALL_REPLY:
if reply.server_reply.status != constants.CONFD_REPL_STATUS_OK:
Err("Query %s gave non-ok status %s: %s" % (reply.orig_request,
reply.server_reply.status,
reply.server_reply))
if reply.orig_request.type == constants.CONFD_REQ_PING:
Log("Ping: OK", indent=1)
elif reply.orig_request.type == constants.CONFD_REQ_CLUSTER_MASTER:
if reply.server_reply.answer == self.cluster_info["master"]:
Log("Master: OK", indent=1)
else:
Err("Master: wrong: %s" % reply.server_reply.answer)
elif reply.orig_request.type == constants.CONFD_REQ_NODE_ROLE_BYNAME:
if reply.server_reply.answer == constants.CONFD_NODE_ROLE_MASTER:
Log("Node role for master: OK", indent=1)
else:
Err("Node role for master: wrong: %s" % reply.server_reply.answer)
def DoConfdRequestReply(self, req):
self.confd_counting_callback.RegisterQuery(req.rsalt)
self.confd_client.SendRequest(req, async=False)
while not self.confd_counting_callback.AllAnswered():
if not self.confd_client.ReceiveReply():
Err("Did not receive all expected confd replies")
break
def BurnConfd(self):
"""Run confd queries for our instances.
The following confd queries are tested:
- CONFD_REQ_PING: simple ping
- CONFD_REQ_CLUSTER_MASTER: cluster master
- CONFD_REQ_NODE_ROLE_BYNAME: node role, for the master
"""
Log("Checking confd results")
filter_callback = confd_client.ConfdFilterCallback(self.ConfdCallback)
counting_callback = confd_client.ConfdCountingCallback(filter_callback)
self.confd_counting_callback = counting_callback
self.confd_client = confd_client.GetConfdClient(counting_callback)
req = confd_client.ConfdClientRequest(type=constants.CONFD_REQ_PING)
self.DoConfdRequestReply(req)
req = confd_client.ConfdClientRequest(
type=constants.CONFD_REQ_CLUSTER_MASTER)
self.DoConfdRequestReply(req)
req = confd_client.ConfdClientRequest(
type=constants.CONFD_REQ_NODE_ROLE_BYNAME,
query=self.cluster_info["master"])
self.DoConfdRequestReply(req)
def _CheckInstanceAlive(self, instance):
"""Check if an instance is alive by doing http checks.
This will try to retrieve the url on the instance /hostname.txt
and check that it contains the hostname of the instance. In case
we get ECONNREFUSED, we retry up to the net timeout seconds, for
any other error we abort.
"""
if not self.opts.http_check:
return
end_time = time.time() + self.opts.net_timeout
url = None
while time.time() < end_time and url is None:
try:
url = self.url_opener.open("http://%s/hostname.txt" % instance)
except IOError:
# here we can have connection refused, no route to host, etc.
time.sleep(1)
if url is None:
raise InstanceDown(instance, "Cannot contact instance")
hostname = url.read().strip()
url.close()
if hostname != instance:
raise InstanceDown(instance, ("Hostname mismatch, expected %s, got %s" %
(instance, hostname)))
def BurninCluster(self):
"""Test a cluster intensively.
This will create instances and then start/stop/failover them.
It is safe for existing instances but could impact performance.
"""
Log("Testing global parameters")
if (len(self.nodes) == 1 and
self.opts.disk_template not in _SINGLE_NODE_DISK_TEMPLATES):
Err("When one node is available/selected the disk template must"
" be one of %s" % utils.CommaJoin(_SINGLE_NODE_DISK_TEMPLATES))
if self.opts.do_confd_tests and not constants.ENABLE_CONFD:
Err("You selected confd tests but confd was disabled at configure time")
has_err = True
try:
self.BurnCreateInstances()
if self.bep[constants.BE_MINMEM] < self.bep[constants.BE_MAXMEM]:
self.BurnModifyRuntimeMemory()
if self.opts.do_replace1 and \
self.opts.disk_template in constants.DTS_INT_MIRROR:
self.BurnReplaceDisks1D8()
if (self.opts.do_replace2 and len(self.nodes) > 2 and
self.opts.disk_template in constants.DTS_INT_MIRROR):
self.BurnReplaceDisks2()
if (self.opts.disk_template in constants.DTS_GROWABLE and
compat.any(n > 0 for n in self.disk_growth)):
self.BurnGrowDisks()
if self.opts.do_failover and \
self.opts.disk_template in constants.DTS_MIRRORED:
self.BurnFailover()
if self.opts.do_migrate:
if self.opts.disk_template not in constants.DTS_MIRRORED:
Log("Skipping migration (disk template %s does not support it)",
self.opts.disk_template)
elif not self.hv_can_migrate:
Log("Skipping migration (hypervisor %s does not support it)",
self.hypervisor)
else:
self.BurnMigrate()
if (self.opts.do_move and len(self.nodes) > 1 and
self.opts.disk_template in [constants.DT_PLAIN, constants.DT_FILE]):
self.BurnMove()
if (self.opts.do_importexport and
self.opts.disk_template in _IMPEXP_DISK_TEMPLATES):
self.BurnImportExport()
if self.opts.do_reinstall:
self.BurnReinstall()
if self.opts.do_reboot:
self.BurnReboot()
if self.opts.do_renamesame:
self.BurnRenameSame()
if self.opts.do_addremove_disks:
self.BurnAddRemoveDisks()
default_nic_mode = self.cluster_default_nicparams[constants.NIC_MODE]
# Don't add/remove nics in routed mode, as we would need an ip to add
# them with
if self.opts.do_addremove_nics:
if default_nic_mode == constants.NIC_MODE_BRIDGED:
self.BurnAddRemoveNICs()
else:
Log("Skipping nic add/remove as the cluster is not in bridged mode")
if self.opts.do_activate_disks:
self.BurnActivateDisks()
if self.opts.rename:
self.BurnRename()
if self.opts.do_confd_tests:
self.BurnConfd()
if self.opts.do_startstop:
self.BurnStopStart()
has_err = False
finally:
if has_err:
Log("Error detected: opcode buffer follows:\n\n")
Log(self.GetFeedbackBuf())
Log("\n\n")
if not self.opts.keep_instances:
try:
self.BurnRemove()
except Exception, err: # pylint: disable=W0703
if has_err: # already detected errors, so errors in removal
# are quite expected
Log("Note: error detected during instance remove: %s", err)
else: # non-expected error
raise
return constants.EXIT_SUCCESS
def Main():
"""Main function.
"""
utils.SetupLogging(pathutils.LOG_BURNIN, sys.argv[0],
debug=False, stderr_logging=True)
return Burner().BurninCluster()
|
"""Test connection to weather station.
This is a simple utility to test communication with the weather
station. If this doesn't work, then there's a problem that needs to be
sorted out before trying any of the other programs. Likely problems
include not properly installing `libusb
<http://libusb.wiki.sourceforge.net/>`_ or `PyUSB
<http://pyusb.berlios.de/>`_. Less likely problems include an
incompatibility between libusb and some operating systems. The most
unlikely problem is that you forgot to connect the weather station to
your computer! ::
%s
"""
__usage__ = """
usage: python TestWeatherStation.py [options]
options are:
--help display this help
-d | --decode display meaningful values instead of raw data
-h | --history count display the last "count" readings
-l | --live display 'live' data
-m | --logged display 'logged' data
-u | --unknown display unknown fixed block values
-v | --verbose increase amount of reassuring messages
(repeat for even more messages e.g. -vvv)
"""
__doc__ %= __usage__
__usage__ = __doc__.split('\n')[0] + __usage__
import datetime
import getopt
import sys
from pywws.DataStore import safestrptime
from pywws.Logger import ApplicationLogger
from pywws import WeatherStation
def raw_dump(pos, data):
print "%04x" % pos,
for item in data:
print "%02x" % item,
print
def main(argv=None):
if argv is None:
argv = sys.argv
try:
opts, args = getopt.getopt(
argv[1:], "dh:lmuv",
('help', 'decode', 'history=', 'live', 'logged', 'unknown', 'verbose'))
except getopt.error, msg:
print >>sys.stderr, 'Error: %s\n' % msg
print >>sys.stderr, __usage__.strip()
return 1
# check arguments
if len(args) != 0:
print >>sys.stderr, 'Error: no arguments allowed\n'
print >>sys.stderr, __usage__.strip()
return 2
# process options
history_count = 0
decode = False
live = False
logged = False
unknown = False
verbose = 0
for o, a in opts:
if o == '--help':
print __usage__.strip()
return 0
elif o in ('-d', '--decode'):
decode = True
elif o in ('-h', '--history'):
history_count = int(a)
elif o in ('-l', '--live'):
live = True
logged = False
elif o in ('-m', '--logged'):
live = False
logged = True
elif o in ('-u', '--unknown'):
unknown = True
elif o in ('-v', '--verbose'):
verbose += 1
# do it!
logger = ApplicationLogger(verbose)
ws = WeatherStation.weather_station()
raw_fixed = ws.get_raw_fixed_block()
if not raw_fixed:
print "No valid data block found"
return 3
if decode:
# dump entire fixed block
print ws.get_fixed_block()
# dump a few selected items
print "min -> temp_out ->", ws.get_fixed_block(['min', 'temp_out'])
print "alarm -> hum_out ->", ws.get_fixed_block(['alarm', 'hum_out'])
print "rel_pressure ->", ws.get_fixed_block(['rel_pressure'])
print "abs_pressure ->", ws.get_fixed_block(['abs_pressure'])
else:
for ptr in range(0x0000, 0x0100, 0x20):
raw_dump(ptr, raw_fixed[ptr:ptr+0x20])
if unknown:
for k in sorted(ws.fixed_format):
if 'unk' in k:
print k, ws.get_fixed_block([k])
for k in sorted(ws.fixed_format):
if 'settings' in k or 'display' in k or 'alarm' in k:
bits = ws.get_fixed_block([k])
for b in sorted(bits):
if 'bit' in b:
print k, b, bits[b]
if history_count > 0:
fixed_block = ws.get_fixed_block()
print "Recent history"
ptr = fixed_block['current_pos']
date = safestrptime(fixed_block['date_time'], '%Y-%m-%d %H:%M')
for i in range(history_count):
if decode:
data = ws.get_data(ptr)
print date, data
date = date - datetime.timedelta(minutes=data['delay'])
else:
raw_dump(ptr, ws.get_raw_data(ptr))
ptr = ws.dec_ptr(ptr)
if live:
for data, ptr, logged in ws.live_data():
print "%04x" % ptr,
print data['idx'].strftime('%H:%M:%S'),
del data['idx']
print data
if logged:
for data, ptr, logged in ws.live_data(logged_only=True):
print "%04x" % ptr,
print data['idx'].strftime('%H:%M:%S'),
del data['idx']
print data
del ws
return 0
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
pass
|
import subprocess
import hashlib, os, re
import datetime
import licensesapi
from xml.dom.minidom import parseString
CACHING = False
TIMESTAMP = re.compile("Date:\s+([0-9]+)\s")
def get(cmd, cacheable=True):
if cacheable and CACHING:
h = hashlib.md5(cmd).hexdigest()
if h in os.listdir('licenses/cache'):
print ' getting from cache: ', cmd
print ' ', h
return open('licenses/cache/' + h).read()
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
data = p.stdout.read()
if cacheable and CACHING:
open('licenses/cache/' + h, 'w').write(data)
print ' written to cache: ', cmd
return data
def get_used_tex(m):
used = []
for i in open(m):
i = i.split(')')
if len(i) < 4:
continue
tex = i[3].strip().split(' ')[0]
if tex not in used:
used.append(tex)
return used
def get_used_resources_from_ufo_script(scriptFileName):
"Return file name from base/ used by the script"
return set([])
class Resource(object):
"Identify a file resource and links with other resources"
def __init__(self, fileName):
self.fileName = fileName
self.license = None
self.copyright = None
self.source = None
self.revision = None
self.usedByMaps = set([])
self.useTextures = set([])
self.usedByScripts = set([])
self.useResources = set([])
def isImage(self):
return self.fileName.endswith('.jpg') or self.fileName.endswith('.tga') or self.fileName.endswith('.png')
def __repr__(self):
return str((self.license, self.copyright, self.source, self.revision))
class Resources(object):
"""
Manage a set of resources
TODO compute a map with 'short' name
"""
def __init__(self):
self.resources = {}
self.computedBaseDir = set()
self.timestamp = None
def computeResources(self, dir):
self.computeResourcesFromGit(dir)
def getRevisionFromDate(self):
"""
We use a timestamp
"""
if self.timestamp == None:
content = get("git log -1 --date=raw")
timestamps = TIMESTAMP.findall(content)
assert(len(timestamps) == 1)
self.timestamp = int(timestamps[0])
return self.timestamp
def computeResourcesFromGit(self, dir):
"""
UFO:AI store properties into a LICENSES file
"""
if "ok" in self.computedBaseDir:
return
self.computedBaseDir.add("ok")
# generate a revision from date
revision = self.getRevisionFromDate()
# read all licenses entries
licenses = licensesapi.LicenseSet("LICENSES")
entries = licenses.get_entries()
for entry in entries:
if entry.filename in self.resources:
resource = self.resources[entry.filename]
else:
resource = Resource(entry.filename)
self.resources[entry.filename] = resource
resource.revision = revision
resource.copyright = entry.author
resource.license = entry.license
resource.source = entry.source
def getResource(self, fileName):
"Get a resource from a name without extension"
# Must we load the basedir?
if not (fileName in self.resources):
basedir = fileName.split('/')
basedir = basedir[0]
self.computeResources(basedir)
# Get metadata from cache
if fileName in self.resources:
resource = self.resources[fileName]
# Gen object for non versioned file
else:
resource = Resource(fileName)
self.resources[fileName] = resource
# FIXME A little hackish to know if it is versioned
if os.path.isdir(fileName):
content = get("git log -n1 %s" % fileName)
if "Date:" in content:
resource.revision = self.getRevisionFromDate()
else:
resource.revision = self.getRevisionFromDate()
return resource
def getResourceByShortImageName(self, fileName):
"Get a resource from a name without extension"
if os.path.exists(fileName + ".png"):
return self.getResource(fileName + ".png")
if os.path.exists(fileName + ".jpg"):
return self.getResource(fileName + ".jpg")
if os.path.exists(fileName + ".tga"):
return self.getResource(fileName + ".tga")
return None
def computeResourceUsageInUFOScripts(self):
"Read UFO scripts and create relations with other resources"
for i in os.walk('base/ufos'):
for ufoname in i[2]:
if not ufoname.endswith('.ufo'):
continue
ufoname = i[0] + '/' + ufoname
uforesource = self.getResource(ufoname)
for name in get_used_resources_from_ufo_script(ufoname):
resource = self.getResource(name)
resource.usedByScripts.add(uforesource)
uforesource.useResources.add(resource)
def computeTextureUsageInMaps(self):
"Read maps and create relations with other resources"
print 'Parse texture usage in maps...'
files = set([])
for i in os.walk('base/maps'):
for mapname in i[2]:
if not mapname.endswith('.map'):
continue
mapname = i[0] + '/' + mapname
files.add(mapname)
for i in os.walk('radiant/prefabs'):
for mapname in i[2]:
if not mapname.endswith('.map'):
continue
mapname = i[0] + '/' + mapname
files.add(mapname)
for mapname in files:
mapmeta = self.getResource(mapname)
for tex in get_used_tex(mapname):
texname = "base/textures/" + tex
texmeta = self.getResourceByShortImageName(texname)
# texture missing (or wrong python parsing)
if texmeta == None:
print "Warning: \"" + texname + "\" from map \"" + mapname + "\" does not exist"
continue
texmeta.usedByMaps.add(mapmeta)
mapmeta.useTextures.add(texmeta)
|
import pygame
import daemon
import time
import pynotify
import os
import argparse
def main():
parser = argparse.ArgumentParser(description='JoyVol: control audio (or anything else really) based on joystick buttons')
parser.add_argument('--js', dest='joystick_id', action='store_const',
const=sum, default=0,
help='the pygame ID of the joystick')
parser.add_argument('--button', dest='button_id', action='store_const',
const=sum, default=0,
help='the pygame ID of the button')
parser.add_argument('--libnotify', dest='libnotify', action='store_true')
parser.add_argument('--no-libnotify', dest='libnotify', action='store_false')
parser.set_defaults(libnotify=True)
args = vars(parser.parse_args())
pygame.init()
pygame.joystick.init()
js = pygame.joystick.Joystick(0)
js.init()
pynotify.init("Joyvol")
while True:
for event in pygame.event.get():
if (event.type == pygame.JOYBUTTONDOWN and
event.dict['joy'] == args['joystick_id'] and
event.dict['button'] == args['button_id']):
os.system("amixer -q set 'Surround',0 100")
if args['libnotify']:
pynotify.Notification("UNMUTE", "Headphones stowed, unmuting speakers" ).show()
if (event.type == pygame.JOYBUTTONUP and
event.dict['joy'] == args['joystick_id'] and
event.dict['button'] == args['button_id']):
os.system("amixer -q set 'Surround',0 0")
if args['libnotify']:
pynotify.Notification("MUTE", "Headphones in use, muting speakers" ).show()
time.sleep(.5)
main()
|
import os,sys
if not os.environ.has_key('AUTOOAM_HOME'):
os.environ['AUTOOAM_HOME'] = os.getcwd()
import autooam.testlib.vagboxes as vagboxes
import emtools.common.logutils as logutils
Log = logutils.getLogger(__name__)
def list_boxes():
boxes = []
h = os.popen('vagrant box list')
for line in h.readlines():
b = line.strip()
b = line.split()[0]
if len(b):
boxes.append(b)
return boxes
if __name__ == "__main__":
autooamdir = os.getcwd()
homedir = os.environ['HOME']
print 'This script will assist you in setting up your environment to run autooam.'
print ''
if not os.path.exists('setenv'):
print ''
print 'You don\'t have a setenv script, I will create one for you...'
print 'This script sets several environment variables including:'
print ' AUTOOAM_HOME'
print ' PYTHONPATH'
print ' PATH'
print 'You will need to source this file each time you login. We don\t'
print 'set these globally in .bashrc so that we can share one account on an'
print 'autooam machine with different users. The command will look like this:'
print ' . ./setenv'
print ''
f = open('setenv','w')
f.write('''#!/bin/bash
ANSIBLE_HOME=%s/ansible
export AUTOOAM_HOME=%s
export INFINIDB_EM_TOOLS_HOME=%s/infinidb-em-tools
export ANSIBLE_LIBRARY=$INFINIDB_EM_TOOLS_HOME/share/infinidb:$ANSIBLE_HOME/library
export PYTHONPATH=$AUTOOAM_HOME:$INFINIDB_EM_TOOLS_HOME:$ANSIBLE_HOME/lib:$PYTHONPATH
export PATH=$PATH:/opt/vagrant/bin:$ANSIBLE_HOME/bin
''' % (homedir,autooamdir,homedir))
f.close()
os.chmod('setenv',0755)
else:
print 'You already have a setenv script. Skipping this step.'
print ''
print 'The rest of the setup steps are only applicable if you are on a machine'
print 'where you intend to actually run the framework (as opposed to develop and'
print 'unit test.'
print ''
print 'Do you want to continue? (y) ',
ans = sys.stdin.readline().strip().lower()
if ans != 'n':
print ''
print 'NOTE: you will need sudo access and may be prompted for your sudo password.'
print ''
print 'First we will check to see if you need to install the /opt/autooam area'
if not os.path.exists('/opt/autooam'):
os.system('sudo mkdir /opt/autooam')
if not os.path.exists('/net/srvengcm1/Calpont/exports/autooam'):
print 'ERROR - it looks like automount of /net/srvengcm1 is not setup. Contact your admin.'
else:
print 'Copying files from /net/srvengcm1/Calpont/exports/autooam. This may take awhile...'
os.system('sudo rsync -av /net/srvengcm1/Calpont/exports/autooam /opt')
print 'Next we will check your NFS export of this autooam directory.'
if not os.path.exists('/etc/exports'):
print 'ERROR - it looks like NFS is not setup on this machine. Contact your admin'
else:
ret = os.system('grep %s /etc/exports > /dev/null 2>&1' % autooamdir)
ret = ret >> 8
if ret != 0:
ret1 = os.system('sudo bash -c "echo \'%s 192.168.0.0/255.255.0.0(rw,sync,all_squash,no_subtree_check,anonuid=%d,anongid=%d)\' >> /etc/exports"' % (autooamdir,os.getuid(),os.getgid())) >> 8
ret2 = os.system('sudo exportfs -a') >> 8
if ret1 or ret2:
print 'ERROR - unable to add AUTOOAM_HOME NFS export'
else:
print 'Your AUTOOAM_HOME NFS export is now configured.'
else:
print 'Your AUTOOAM_HOME NFS export is already configured.'
ret = os.system('grep /opt/autooam /etc/exports > /dev/null 2>&1')
ret = ret >> 8
if ret != 0:
ret1 = os.system('sudo bash -c "echo \'/opt/autooam 192.168.0.0/255.255.0.0(ro,sync,all_squash,no_subtree_check,anonuid=%d,anongid=%d)\' >> /etc/exports"' % (os.getuid(),os.getgid())) >> 8
ret2 = os.system('sudo exportfs -a') >> 8
if ret1 or ret2:
print 'ERROR - unable to add /opt/autooam NFS export'
else:
print 'Your /opt/autooam NFS export is now configured.'
else:
print 'Your /opt/autooam NFS export is already configured.'
print ''
print 'Next we will check to see if you need to install any vagrant boxes'
rc = os.system('which vagrant') >> 8
if rc != 0:
print 'ERROR - it looks like vagrant is not setup on this machine. Contact your admin'
else:
these_boxes = list_boxes()
need_to_update = []
for b in vagboxes.list_all():
if not b in these_boxes:
need_to_update.append(b)
if len(need_to_update):
print "You are missing one or more vagrant boxes, would you like to install(this may take awhile...)? (y) ",
ans = sys.stdin.readline().strip().lower()
if ans != 'n':
for b in vagboxes.list_all():
try:
these_boxes.index(b)
print 'Skipping %s, already installed.' % (b)
except:
print 'Will install box %s.' % (b)
boxrepo = '/net/srvengcm1/Calpont/exports/vagrant_boxes'
if not os.path.exists(boxrepo):
Log.error("Cannot access the box repository at %s, make sure automount is set up!" % boxrepo)
sys.exit(-1)
cmd = "vagrant box add %s %s/%s.box" % (b, boxrepo, b)
Log.debug(cmd)
os.system( cmd )
else:
print "All vagrant boxes are installed"
print ''
print 'Setup Finished!'
|
from django.apps import apps
from django.test import TestCase
from misago.core import threadstore
from .. import migrationutils
from ..models import SettingsGroup
class DBConfMigrationUtilsTests(TestCase):
def setUp(self):
self.test_group = {
'key': 'test_group',
'name': "Test settings",
'description': "Those are test settings.",
'settings': (
{
'setting': 'fish_name',
'name': "Fish's name",
'value': "Eric",
'field_extra': {
'min_length': 2,
'max_length': 255
},
},
{
'setting': 'fish_license_no',
'name': "Fish's license number",
'default_value': '123-456',
'field_extra': {
'max_length': 255
},
},
)
}
migrationutils.migrate_settings_group(apps, self.test_group)
self.groups_count = SettingsGroup.objects.count()
def tearDown(self):
threadstore.clear()
def test_get_custom_group_and_settings(self):
"""tests setup created settings group"""
custom_group = migrationutils.get_group(
apps.get_model('misago_conf', 'SettingsGroup'),
self.test_group['key'])
self.assertEqual(custom_group.key, self.test_group['key'])
self.assertEqual(custom_group.name, self.test_group['name'])
self.assertEqual(custom_group.description,
self.test_group['description'])
custom_settings = migrationutils.get_custom_settings_values(
custom_group)
self.assertEqual(custom_settings['fish_name'], 'Eric')
self.assertTrue('fish_license_no' not in custom_settings)
def test_change_group_key(self):
"""migrate_settings_group changed group key"""
new_group = {
'key': 'new_test_group',
'name': "New test settings",
'description': "Those are updated test settings.",
'settings': (
{
'setting': 'fish_new_name',
'name': "Fish's new name",
'value': "Eric",
'field_extra': {
'min_length': 2,
'max_length': 255
},
},
{
'setting': 'fish_new_license_no',
'name': "Fish's changed license number",
'default_value': '123-456',
'field_extra': {
'max_length': 255
},
},
)
}
migrationutils.migrate_settings_group(
apps, new_group, old_group_key=self.test_group['key'])
db_group = migrationutils.get_group(
apps.get_model('misago_conf', 'SettingsGroup'), new_group['key'])
self.assertEqual(SettingsGroup.objects.count(), self.groups_count)
self.assertEqual(db_group.key, new_group['key'])
self.assertEqual(db_group.name, new_group['name'])
self.assertEqual(db_group.description,
new_group['description'])
for setting in new_group['settings']:
db_setting = db_group.setting_set.get(setting=setting['setting'])
self.assertEqual(db_setting.name, setting['name'])
|
import MFI_Getter
import pandas as pd
import timeit
start = timeit.default_timer()
symbolfile = open("symbols.txt")
symbolslistR = symbolfile.read()
symbolslist = symbolslistR.split('\n')
dfF = MFI_Getter.getMFI('KING')
for s in symbolslist:
try:
df = MFI_Getter.getMFI(s)
dfF = dfF.append(df)
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
print message, s
dfF.to_csv('MFI'+'.csv')
stop = timeit.default_timer()
print "start= ",start,"stop= ",stop
|
"""
WSGI config for opcon project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opcon.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
from django.db import models
from cardndice.games.models import Game
from cardndice.players.models import Player
class Match(models.Model):
game = models.ForeignKey(Game, core=True)
def __str__(self):
return self.game.name
class Meta:
ordering = ["id"]
class Admin:
pass
class Result (models.Model):
match = models.ForeignKey(Match)
player = models.ForeignKey(Player, edit_inline=models.TABULAR,
num_in_admin=3)
result = models.IntegerField(default=0, core=True)
def __str__(self):
return self.player.name
class Meta:
ordering = ["-result"]
class Admin:
pass
|
import math
from colorsys import *
from animations.AbstractAnimation import AbstractAnimation
from animations.inputs.AudioInput import AudioInput
SIN_CHANGE_PER_TIME = 0.5
SIN_CHANGE_PER_PX = 3.0
SIN_SIZE_PER_STRIP = 20.0
EVENT_THRESHOLD = 1.5
EVENT_BRIGHTNESS = 1.33
class AudioRainbow(AbstractAnimation):
def __init__(self, config):
super().__init__(config)
def start(self):
super().start()
self.audio = AudioInput()
def stop(self):
super().stop()
self.audio.stop()
self.audio = None
def render(self, bulbs):
nbrBulbs = len(bulbs)
intensities = self.audio.getBinsIntensity(nbrBulbs)
for idx in range(0, nbrBulbs):
brightness = self.config.brightness
intensity = intensities[idx]
if intensity >= EVENT_THRESHOLD:
eventIntensity = (intensity - EVENT_THRESHOLD) / 2.0
intensityModifier = brightness * EVENT_BRIGHTNESS * eventIntensity
brightness = int(brightness * intensityModifier)
if brightness > 31:
brightness = 31
self.renderBulb(bulbs[idx], brightness)
def renderBulb(self, bulb, brightness):
i = bulb.counter
for y in range(0, bulb.strands):
for x in range(0, bulb.pixels):
hue = math.sin((i*SIN_CHANGE_PER_TIME + x*SIN_CHANGE_PER_PX) / SIN_SIZE_PER_STRIP)
normalized_hue = (hue + 1.0) / 2 # Normalize to 0..1
r,g,b = hsv_to_rgb(normalized_hue, 1.0, 1.0)
r = int(r * 255)
g = int(g * 255)
b = int(b * 255)
bulb.pixelBuffer[(x+y*bulb.pixels)*bulb.bpp + 0] = brightness
bulb.pixelBuffer[(x+y*bulb.pixels)*bulb.bpp + 1] = b
bulb.pixelBuffer[(x+y*bulb.pixels)*bulb.bpp + 2] = g
bulb.pixelBuffer[(x+y*bulb.pixels)*bulb.bpp + 3] = r
|
import sys
import os
import shutil
sys.path.append( '../pymod' )
sys.path.append( '../gcore' )
from osgeo import gdal
import gdaltest
import test_cli_utilities
import tiff_ovr
def test_gdaladdo_1():
if test_cli_utilities.get_gdaladdo_path() is None:
return 'skip'
shutil.copy('../gcore/data/mfloat32.vrt', 'tmp/mfloat32.vrt')
shutil.copy('../gcore/data/float32.tif', 'tmp/float32.tif')
(out, err) = gdaltest.runexternal_out_and_err(test_cli_utilities.get_gdaladdo_path() + ' tmp/mfloat32.vrt 2 4')
if not (err is None or err == '') :
gdaltest.post_reason('got error/warning')
print(err)
return 'fail'
ds = gdal.Open('tmp/mfloat32.vrt')
ret = tiff_ovr.tiff_ovr_check(ds)
ds = None
os.remove('tmp/mfloat32.vrt')
os.remove('tmp/mfloat32.vrt.ovr')
os.remove('tmp/float32.tif')
return ret
def test_gdaladdo_2():
if test_cli_utilities.get_gdaladdo_path() is None:
return 'skip'
shutil.copyfile( '../gcore/data/nodata_byte.tif', 'tmp/ovr5.tif' )
gdaltest.runexternal(test_cli_utilities.get_gdaladdo_path() + ' -r average tmp/ovr5.tif 2')
ds = gdal.Open('tmp/ovr5.tif')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
exp_cs = 1130
if cs != exp_cs:
gdaltest.post_reason( 'got wrong overview checksum.' )
print(exp_cs, cs)
return 'fail'
ds = None
os.remove('tmp/ovr5.tif')
return 'success'
def test_gdaladdo_3():
if test_cli_utilities.get_gdaladdo_path() is None:
return 'skip'
shutil.copyfile( '../gcore/data/nodata_byte.tif', 'tmp/test_gdaladdo_3.tif' )
gdaltest.runexternal(test_cli_utilities.get_gdaladdo_path() + ' -ro tmp/test_gdaladdo_3.tif 2')
ds = gdal.Open('tmp/test_gdaladdo_3.tif')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
exp_cs = 1152
if cs != exp_cs:
gdaltest.post_reason( 'got wrong overview checksum.' )
print(exp_cs, cs)
return 'fail'
ds = None
try:
os.stat('tmp/test_gdaladdo_3.tif.ovr')
except:
gdaltest.post_reason( 'no external overview.' )
return 'fail'
return 'success'
def test_gdaladdo_4():
if test_cli_utilities.get_gdaladdo_path() is None:
return 'skip'
gdaltest.runexternal(test_cli_utilities.get_gdaladdo_path() + ' -clean tmp/test_gdaladdo_3.tif')
ds = gdal.Open('tmp/test_gdaladdo_3.tif')
cnt = ds.GetRasterBand(1).GetOverviewCount()
ds = None
if cnt != 0:
gdaltest.post_reason( 'did not clean overviews.' )
return 'fail'
try:
os.stat('tmp/test_gdaladdo_3.tif.ovr')
gdaltest.post_reason( '.ovr file still exists' )
return 'fail'
except:
pass
os.remove('tmp/test_gdaladdo_3.tif')
return 'success'
gdaltest_list = [
test_gdaladdo_1,
test_gdaladdo_2,
test_gdaladdo_3,
test_gdaladdo_4
]
if __name__ == '__main__':
gdaltest.setup_run( 'test_gdaladdo' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
|
import sys
import os
import os.path
from .sixext import PY3
from .sixext.moves import configparser
import locale
import pwd
import stat
import re
from .codes import *
from . import logger
from . import os_utils
from .sixext import to_unicode
if PY3:
QString = type("")
def cmp(a, b):
return (a > b) - (a < b)
log = logger.Logger('', logger.Logger.LOG_LEVEL_INFO, logger.Logger.LOG_TO_CONSOLE)
log.set_level('info')
MINIMUM_PYQT_MAJOR_VER = 3
MINIMUM_PYQT_MINOR_VER = 14
MINIMUM_QT_MAJOR_VER = 3
MINIMUM_QT_MINOR_VER = 0
def to_bool(s, default=False):
if isinstance(s, str) and s:
if s[0].lower() in ['1', 't', 'y']:
return True
elif s[0].lower() in ['0', 'f', 'n']:
return False
elif isinstance(s, bool):
return s
return default
class Properties(dict):
def __getattr__(self, attr):
if attr in list(self.keys()):
return self.__getitem__(attr)
else:
return ""
def __setattr__(self, attr, val):
self.__setitem__(attr, val)
prop = Properties()
class ConfigBase(object):
def __init__(self, filename):
self.filename = filename
self.conf = configparser.ConfigParser()
self.read()
def get(self, section, key, default=to_unicode('')):
try:
return self.conf.get(section, key)
except (configparser.NoOptionError, configparser.NoSectionError):
return default
def set(self, section, key, value):
if not self.conf.has_section(section):
self.conf.add_section(section)
self.conf.set(section, key, value)
self.write()
def sections(self):
return self.conf.sections()
def has_section(self, section):
return self.conf.has_section(section)
def options(self, section):
return self.conf.options(section)
keys = options
def read(self):
if self.filename is not None:
filename = self.filename
if filename.startswith("/root/"):
# Don't try opening a file in root's home directory.
log.error("attempted to read from '%s'" % self.filename)
return
try:
fp = open(self.filename, "r")
try:
self.conf.readfp(fp)
except (configparser.DuplicateOptionError):
log.warn("Found Duplicate Entery in %s" % self.filename)
self.CheckDuplicateEntries()
fp.close()
except (OSError, IOError, configparser.MissingSectionHeaderError):
log.debug("Unable to open file %s for reading." % self.filename)
def write(self):
if self.filename is not None:
filename = self.filename
if filename.startswith("/root/") or filename.startswith("/etc/"):
# Don't try writing a file in root's home directory or
# the system-wide config file.
# See bug #479178.
log.error("attempted to write to '%s'" % self.filename)
return
try:
fp = open(self.filename, "w")
self.conf.write(fp)
fp.close()
except (OSError, IOError):
log.debug("Unable to open file %s for writing." % self.filename)
def CheckDuplicateEntries(self):
try:
f = open(self.filename,'r')
data = f.read()
f.close()
except IOError:
data =""
final_data =''
for a in data.splitlines():
if not a or a not in final_data:
final_data = final_data +'\n' +a
import tempfile
fd, self.filename = tempfile.mkstemp()
f = open(self.filename,'w')
f.write(final_data)
f.close()
self.read()
os.unlink(self.filename)
class SysConfig(ConfigBase):
def __init__(self):
ConfigBase.__init__(self, '/etc/hp/hplip.conf')
class State(ConfigBase):
def __init__(self):
if not os.path.exists('/var/lib/hp/') and os.geteuid() == 0:
os.makedirs('/var/lib/hp/')
cmd = 'chmod 755 /var/lib/hp/'
os_utils.execute(cmd)
ConfigBase.__init__(self, '/var/lib/hp/hplip.state')
class UserConfig(ConfigBase):
def __init__(self):
sts, prop.user_dir = os_utils.getHPLIPDir()
if not os.geteuid() == 0:
prop.user_config_file = os.path.join(prop.user_dir, 'hplip.conf')
if not os.path.exists(prop.user_config_file):
try:
open(prop.user_config_file, 'w').close()
s = os.stat(os.path.dirname(prop.user_config_file))
os.chown(prop.user_config_file, s[stat.ST_UID], s[stat.ST_GID])
except IOError:
pass
ConfigBase.__init__(self, prop.user_config_file)
else:
# If running as root, conf file is None
prop.user_config_file = None
ConfigBase.__init__(self, None)
def workingDirectory(self):
t = self.get('last_used', 'working_dir', os.path.expanduser("~"))
try:
t = t.decode('utf-8')
except UnicodeError:
log.error("Invalid unicode: %s" % t)
log.debug("working directory: %s" % t)
return t
def setWorkingDirectory(self, t):
self.set('last_used', 'working_dir', t.encode('utf-8'))
log.debug("working directory: %s" % t.encode('utf-8'))
os.umask(0o037)
sys_conf = SysConfig()
sys_state = State()
user_conf = UserConfig()
try:
prop.locale, prop.encoding = locale.getdefaultlocale()
except ValueError:
prop.locale = 'en_US'
prop.encoding = 'UTF8'
prop.version = sys_conf.get('hplip', 'version', '0.0.0') # e.g., 3.9.2b.10
_p, _x = re.compile(r'(\d\w*)', re.I), []
for _y in prop.version.split('.')[:3]:
_z = _p.match(_y)
if _z is not None:
_x.append(_z.group(1))
prop.installed_version = '.'.join(_x) # e.g., '3.9.2'
try:
prop.installed_version_int = int(''.join(['%02x' % int(_y) for _y in _x]), 16) # e.g., 0x030902 -> 198914
except ValueError:
prop.installed_version_int = 0
prop.home_dir = sys_conf.get('dirs', 'home', os.path.realpath(os.path.normpath(os.getcwd())))
prop.username = pwd.getpwuid(os.getuid())[0]
pdb = pwd.getpwnam(prop.username)
prop.userhome = pdb[5]
prop.history_size = 50
prop.data_dir = os.path.join(prop.home_dir, 'data')
prop.image_dir = os.path.join(prop.home_dir, 'data', 'images')
prop.xml_dir = os.path.join(prop.home_dir, 'data', 'xml')
prop.models_dir = os.path.join(prop.home_dir, 'data', 'models')
prop.localization_dir = os.path.join(prop.home_dir, 'data', 'localization')
prop.max_message_len = 8192
prop.max_message_read = 65536
prop.read_timeout = 90
prop.ppd_search_path = '/usr/share;/usr/local/share;/usr/lib;/usr/local/lib;/usr/libexec;/opt;/usr/lib64'
prop.ppd_search_pattern = 'HP-*.ppd.*'
prop.ppd_download_url = 'http://www.linuxprinting.org/ppd-o-matic.cgi'
prop.ppd_file_suffix = '-hpijs.ppd'
prop.gui_build = to_bool(sys_conf.get('configure', 'gui-build', '0'))
prop.net_build = to_bool(sys_conf.get('configure', 'network-build', '0'))
prop.par_build = to_bool(sys_conf.get('configure', 'pp-build', '0'))
prop.usb_build = True
prop.scan_build = to_bool(sys_conf.get('configure', 'scanner-build', '0'))
prop.fax_build = to_bool(sys_conf.get('configure', 'fax-build', '0'))
prop.doc_build = to_bool(sys_conf.get('configure', 'doc-build', '0'))
prop.foomatic_xml_install = to_bool(sys_conf.get('configure', 'foomatic-xml-install', '0'))
prop.foomatic_ppd_install = to_bool(sys_conf.get('configure', 'foomatic-ppd-install', '0'))
prop.hpcups_build = to_bool(sys_conf.get('configure', 'hpcups-install', '0'))
prop.hpijs_build = to_bool(sys_conf.get('configure', 'hpijs-install', '0'))
spinner = "\|/-\|/-"
spinpos = 0
enable_spinner = True
def change_spinner_state(enable =True):
global enable_spinner
enable_spinner = enable
def update_spinner():
global spinner, spinpos, enable_spinner
if enable_spinner and not log.is_debug() and sys.stdout.isatty():
sys.stdout.write("\b" + spinner[spinpos])
spinpos=(spinpos + 1) % 8
sys.stdout.flush()
def cleanup_spinner():
global enable_spinner
if enable_spinner and not log.is_debug() and sys.stdout.isatty():
sys.stdout.write("\b \b")
sys.stdout.flush()
ERROR_STRINGS = {
ERROR_SUCCESS : 'No error',
ERROR_UNKNOWN_ERROR : 'Unknown error',
ERROR_DEVICE_NOT_FOUND : 'Device not found',
ERROR_INVALID_DEVICE_ID : 'Unknown/invalid device-id field',
ERROR_INVALID_DEVICE_URI : 'Unknown/invalid device-uri field',
ERROR_DATA_LENGTH_EXCEEDS_MAX : 'Data length exceeds maximum',
ERROR_DEVICE_IO_ERROR : 'Device I/O error',
ERROR_NO_PROBED_DEVICES_FOUND : 'No probed devices found',
ERROR_DEVICE_BUSY : 'Device busy',
ERROR_DEVICE_STATUS_NOT_AVAILABLE : 'DeviceStatus not available',
ERROR_INVALID_SERVICE_NAME : 'Invalid service name',
ERROR_ERROR_INVALID_CHANNEL_ID : 'Invalid channel-id (service name)',
ERROR_CHANNEL_BUSY : 'Channel busy',
ERROR_DEVICE_DOES_NOT_SUPPORT_OPERATION : 'Device does not support operation',
ERROR_DEVICEOPEN_FAILED : 'Device open failed',
ERROR_INVALID_DEVNODE : 'Invalid device node',
ERROR_INVALID_HOSTNAME : "Invalid hostname ip address",
ERROR_INVALID_PORT_NUMBER : "Invalid JetDirect port number",
ERROR_NO_CUPS_QUEUE_FOUND_FOR_DEVICE : "No CUPS queue found for device.",
ERROR_DATFILE_ERROR: "DAT file error",
ERROR_INVALID_TIMEOUT: "Invalid timeout",
ERROR_IO_TIMEOUT: "I/O timeout",
ERROR_FAX_INCOMPATIBLE_OPTIONS: "Incompatible fax options",
ERROR_FAX_INVALID_FAX_FILE: "Invalid fax file",
ERROR_FAX_FILE_NOT_FOUND: "Fax file not found",
ERROR_INTERNAL : 'Unknown internal error',
}
class Error(Exception):
def __init__(self, opt=ERROR_INTERNAL):
self.opt = opt
self.msg = ERROR_STRINGS.get(opt, ERROR_STRINGS[ERROR_INTERNAL])
log.debug("Exception: %d (%s)" % (opt, self.msg))
Exception.__init__(self, self.msg, opt)
supported_locales = { 'en_US': ('us', 'en', 'en_us', 'american', 'america', 'usa', 'english'),}
#'zh_CN': ('zh', 'cn', 'zh_cn' , 'china', 'chinese', 'prc'),
#'de_DE': ('de', 'de_de', 'german', 'deutsche'),
#'fr_FR': ('fr', 'fr_fr', 'france', 'french', 'français'),
#'it_IT': ('it', 'it_it', 'italy', 'italian', 'italiano'),
#'ru_RU': ('ru', 'ru_ru', 'russian'),
#'pt_BR': ('pt', 'br', 'pt_br', 'brazil', 'brazilian', 'portuguese', 'brasil', 'portuguesa'),
#'es_MX': ('es', 'mx', 'es_mx', 'mexico', 'spain', 'spanish', 'espanol', 'español'),
#}
|
def incOrSet(dicName,keyName,incBy,initVal):
if keyName in dicName:
dicName[keyName]+=incBy
else:
dicName[keyName]=initVal
|
"""
Slovenian-specific definitions of relationships
"""
from gramps.gen.lib import Person
import gramps.gen.relationship
_ancestors = [ u"", u"starš", u"stari starš", u"prastari starš" ]
_fathers = [ u"", u"oče", u"ded", u"praded", u"prapraded" ]
_mothers = [ u"", u"mati", u"babica", u"prababica", u"praprababica" ]
_descendants = [
u"", u"otrok", u"vnuk(inja)", u"pravnuk(inja)", u"prapravnuk(inja)" ]
_sons = [ u"", u"sin", u"vnuk", u"pravnuk", u"prapravnuk" ]
_daughters = [ u"", u"hči", u"vnukinja", u"pravnukinja", u"prapravnukinja" ]
_maleCousins = [ u"", u"brat", u"bratranec", u"mali bratranec" ]
_femaleCousins = [ u"", u"sestra", u"sestrična", u"mala sestrična" ]
_someCousins = [ u"", u"brat ali sestra", u"bratranec ali sestrična",
u"mali bratranec ali mala sestrična" ]
_aunts = [ u"", u"teta", u"stara teta", u"prateta", u"praprateta" ]
_uncles = [ u"", u"stric", u"stari stric", u"prastric", u"praprastric" ]
_nieces = [ u"", u"nečakinja", u"pranečakinja", u"prapranečakinja" ]
_nephews = [ u"", u"nečak", u"pranečak", u"prapranečak" ]
_children = [ u"", u"otroci", u"vnuki", u"pravnuki", u"prapravnuki" ]
_parents = [ u"", u"starši", u"stari starši", u"prastarši", u"praprastarši" ]
_siblings = [ u"", u"sorojenci", u"strici", u"stari strici", u"prastrici",
u"praprastrici" ]
_neph_niec = [ u"", u"nečaki", u"pranečaki", u"prapranečaki" ]
class RelationshipCalculator(gramps.gen.relationship.RelationshipCalculator):
"""
RelationshipCalculator Class
"""
def __init__(self):
gramps.gen.relationship.RelationshipCalculator.__init__(self)
def getAncestor(self, level):
if level > len(_ancestors)-1:
return u"%s-krat-pra-prednik" % (level-2)
else:
return _ancestors[level]
def getFather(self, level):
if level > len(_fathers)-1:
return u"%s-krat-pra-ded" % (level-2)
else:
return _fathers[level]
def getMother(self, level):
if level > len(_mothers)-1:
return u"%s-krat-pra-babica" % (level-2)
else:
return _mothers[level]
def getSon(self, level):
if level > len(_sons)-1:
return u"%s-krat-pra-vnuk" % (level-2)
else:
return _sons[level]
def getDaughter(self, level):
if level > len(_daughters)-1:
return u"%s-krat-pra-vnukinja" % (level-2)
else:
return _daughters[level]
def getDescendant(self, level):
if level > len(_descendants)-1:
return u"%s-krat-pra-vnuk(inja)" % (level-2)
else:
return _descendants[level]
def getMaleCousin(self, level):
if level > len(_maleCousins)-1:
return u"bratranec v %s. kolenu" % (level*2)
else:
return _maleCousins[level]
def getFemaleCousin(self, level):
if level > len(_femaleCousins)-1:
return u"sestrična v %s. kolenu" % (level*2)
else:
return _femaleCousins[level]
def getSomeCousin(self, level):
if level > len(_someCousins)-1:
return u"bratranec ali sestrična v %s. kolenu" % (level*2)
else:
return _someCousins[level]
def getSuffix(self, distance, level):
# distance-level = 2Gb <=> Gb=1
if distance-level == 2 or distance < 6:
return u""
else:
return u" v %s. kolenu" % (distance)
def getAunt(self, distance, level):
if distance == 5 and level == 1:
return u"mala teta"
elif level > len(_aunts)-1:
return u"%s-krat-pra-teta%s" % (level-2, self.getSuffix(distance, level))
else:
return u"%s%s" % (_aunts[level], self.getSuffix(distance, level))
def getUncle(self, distance, level):
if distance == 5 and level == 1:
return u"mali stric"
elif level > len(_uncles)-1:
return u"%s-krat-pra-stric%s" % (level-2, self.getSuffix(distance, level))
else:
return u"%s%s" % (_uncles[level], self.getSuffix(distance, level))
def getNiece(self, distance, level):
if distance == 5 and level == 1:
return u"mala nečakinja"
elif level > len(_nieces)-1:
return u"%s-krat-pra-nečakinja%s" % (level-1, self.getSuffix(distance, level))
else:
return u"%s%s" % (_nieces[level], self.getSuffix(distance, level))
def getNephew(self, distance, level):
if distance == 5 and level == 1:
return u"mali nečak"
elif level > len(_nephews)-1:
return u"%s-krat-pra-nečak%s" % (level-1, self.getSuffix(distance, level))
else:
return u"%s%s" % (_nephews[level], self.getSuffix(distance, level))
def get_single_relationship_string(
self, Ga, Gb, gender_a, gender_b, reltocommon_a, reltocommon_b,
only_birth=True, in_law_a=False, in_law_b=False):
"""
Provide a string that describes the relationsip between a person, and
another person. E.g. "grandparent" or "child".
To be used as: 'person b is the grandparent of a', this will
be in translation string :
'person b is the %(relation)s of a'
Note that languages with gender should add 'the' inside the
translation, so eg in french:
'person b est %(relation)s de a'
where relation will be here: le grandparent
Ga and Gb can be used to mathematically calculate the relationship.
See the Wikipedia entry for more information:
http://en.wikipedia.org/wiki/Cousin#Mathematical_definitions
"""
if Gb == 0:
if Ga == 0: rel_str = "ista oseba"
elif gender_b == Person.MALE:
rel_str = (self.getFather(Ga))
elif gender_b == Person.FEMALE:
rel_str = (self.getMother(Ga))
else:
rel_str = (self.getAncestor(Ga))
elif Ga == 0:
if gender_b == Person.MALE:
rel_str = (self.getSon(Gb))
elif gender_b == Person.FEMALE:
rel_str = (self.getDaughter(Gb))
else:
rel_str = (self.getDescendant(Gb))
elif Ga == Gb:
if gender_b == Person.MALE:
rel_str = (self.getMaleCousin(Gb))
elif gender_b == Person.FEMALE:
rel_str = (self.getFemaleCousin(Gb))
else:
rel_str = (self.getSomeCousin(Gb))
elif Ga > Gb:
if gender_b == Person.FEMALE:
rel_str = (self.getAunt(Ga+Gb, Ga-Gb))
else:
rel_str = (self.getUncle(Ga+Gb, Ga-Gb)) # we'll use male for unknown sex
else: #Ga < Gb
if gender_b == Person.FEMALE:
rel_str = (self.getNiece(Ga+Gb, Gb-Ga))
else:
rel_str = (self.getNephew(Ga+Gb, Gb-Ga)) # we'll use male for unknown sex
return rel_str
def get_sibling_relationship_string(self, sib_type, gender_a, gender_b,
in_law_a=False, in_law_b=False):
""" Determine the string giving the relation between two siblings of
type sib_type.
Eg: b is the brother of a
Here 'brother' is the string we need to determine
This method gives more details about siblings than
get_single_relationship_string can do.
DON'T TRANSLATE THIS PROCEDURE IF LOGIC IS EQUAL IN YOUR LANGUAGE,
AND SAME METHODS EXIST (get_uncle, get_aunt, get_sibling
"""
gender = gender_b #we don't need gender_a
inlaw = in_law_a or in_law_b
if sib_type == self.HALF_SIB_MOTHER or sib_type == self.HALF_SIB_FATHER:
prefix = u"pol"
else:
prefix = u""
if sib_type < self.STEP_SIB:
# ie. NORM_SIB or one of HALF_SIBs
if not inlaw:
if gender == Person.MALE:
rel_str = u"%sbrat" % (prefix)
elif gender == Person.FEMALE:
rel_str = u"%ssestra" % (prefix)
else:
rel_str = u"%sbrat ali %ssestra" % (prefix, prefix)
else:
if gender == Person.MALE:
rel_str = u"%ssvak" % (prefix)
elif gender == Person.FEMALE:
rel_str = u"%ssvakinja" % (prefix)
else:
rel_str = u"%ssvak ali %ssvakinja" % (prefix, prefix)
else:
rel_str = u""
return rel_str
def get_plural_relationship_string(
self, Ga, Gb, reltocommon_a='', reltocommon_b='', only_birth=True,
in_law_a=False, in_law_b=False):
distance = Ga+Gb
rel_str = u"sorodniki v %s. kolenu" % (distance)
if Ga == 0:
# These are descendants
if Gb < len(_children):
rel_str = _children[Gb]
else:
rel_str = u"%s-krat-pra-vnuki" % (Gb-2)
elif Gb == 0:
# These are parents/grand parents
if Ga < len(_parents):
rel_str = _parents[Ga]
else:
rel_str = u"%s-krat-pra-starši" % (Ga-2)
elif Gb == 1:
# These are siblings/aunts/uncles
if Ga < len(_siblings):
rel_str = _siblings[Ga]
else:
rel_str = u"%s-krat-pra-strici" % (Ga-2)
elif Ga == 1:
# These are nieces/nephews
if Gb < len(_neph_niec):
rel_str = _neph_niec[Gb]
else:
rel_str = u"%s-krat-pra-nečaki" % (Gb-1)
elif Ga == Gb:
# These are cousins in the same generation
if Ga == 2:
rel_str = u"bratranci"
elif Ga == 3:
rel_str = u"mali bratranci"
else:
rel_str = u"bratranci v %s. kolenu" % (distance)
elif Ga > Gb:
# These are cousins in different generations with the second person
# being in a higher generation from the common ancestor than the
# first person.
level = Ga - Gb
if distance == 5:
rel_str = u"mali strici"
elif level < len(_siblings)-1:
# len-1 and level+1 to skip the siblings in uncles' levels
rel_str = u"%s v %s. kolenu" % (_siblings[level+1], distance)
else:
rel_str = u"%s-krat-pra-strici v %s. kolenu" % (level-2, distance)
else: #Gb > Ga:
# These are cousins in different generations with the second person
# being in a lower generation from the common ancestor than the
# first person.
level = Gb - Ga
if distance == 5:
rel_str = u"mali nečaki"
elif level < len(_neph_niec):
rel_str = u"%s v %s. kolenu" % (_neph_niec[level], distance)
else:
rel_str = u"%s-krat-pra-nečaki v %s. kolenu" % (level-1, distance)
if in_law_b == True:
rel_str = "zakonci, ki jih imajo %s" % rel_str
return rel_str
if __name__ == "__main__":
"""TRANSLATORS, copy this if statement at the bottom of your
rel_xx.py module, and test your work with:
python src/plugins/rel/rel_xx.py
"""
from gramps.gen.relationship import test
RC = RelationshipCalculator()
test(RC, True)
|
import json
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
import traceback
import subprocess
import os
from itertools import groupby
from motion_app.process import get_motion_config
from motion_app.models import EventFile
from bson import json_util
import datetime
import pytz
import base64
info = {
"label": "MOTION",
"desc": "Record videos, take pictures, create timelapse files and stream videos using a camera"
}
lis_signals = [
{"name": "SIGUSR1", "btn": "create movie", "desc": "Motion will create an mpeg file of the current event"}
]
def _get_pid():
s = subprocess.Popen("ps aux", shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read()
lis = [line for line in s.split("\n") if line.split()[-2:]==['sudo', 'motion'] and not('bin/sh' in line)]
if len(lis) != 1:
print 'could not get pid, len(lis) = %s' %len(lis)
for l in lis:
print l
return None
pid = lis[0].split()[1]
return pid
def home(request, template_name='motion_app/home.html'):
conf = get_motion_config()
host = request.get_host().lstrip('http://').split(':')[0]
#camera is accessed from lan, the host is the ip of the rpi
if host.startswith('192.168.1'):
cameraport = str(conf["webcam_port"])
#camera is accessed from wan, port needs to be calculated
else:
#try get lan ip, because based on it we will find the camera ip
try:
fp = '/home/pi/data/status'
f = file(fp, "r")
s = f.read()
f.close()
ip_lan = json.loads(s)['ip_lan']
if not ip_lan.startswith('192.168.1'):
raise BaseException('')
cameraport = '9' + ip_lan.split('.')[-1][1:3] + '2'
except:
cameraport = conf["webcam_port"]
print ">> status: previous status not loadable"
motion_conf = {k: {"h" : "<code>%s</code> (currently %s)" %(k, v), "v": v} for (k,v) in conf.iteritems()}
d = {"motion_conf": motion_conf, "lis_signals": lis_signals, "info": info}
d["stream_address"] = 'http://' + host + ":" + cameraport
return render_to_response(template_name, d, context_instance=RequestContext(request))
def send_signal(request):
d = {}
try:
signal = request.GET['cmd']
pid = _get_pid()
if pid:
s = subprocess.Popen("sudo kill -s %s %s" %(signal, pid), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read()
d["msg"] = "%done"
else:
d["msg"] = "could not find pid"
print request
return HttpResponse(json.dumps(d), content_type='application/json')
except:
d["error"] = traceback.format_exc()
print d["error"]
return HttpResponse(json.dumps(d), content_type='application/json')
def du(request):
d = {}
try:
d['data'] = subprocess.Popen("cd /home/pi/data/motion_app&&du -h", shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read().split("\n")
except:
d['error']=traceback.format_exc()
return HttpResponse(json.dumps(d), content_type='application/json')
def get_files(request):
d = {}
try:
folder = get_motion_config()["target_dir"]
if not os.path.isdir(folder):
d["error"] = "the directory %s does not exist, please start the motion app first" %folder
else:
files = os.listdir(folder)
fun = lambda x: x.split('.')[-1]
for key, group in groupby(sorted(files, key=fun),fun ):
fl = sorted(list(group))
d2 = {"count": len(fl)}
if fl:
d2.update({'first': fl[0], 'last': fl[-1]})
else:
d2.update({'first': '-', 'last': "-"})
if key != 'jpg':
d2.update({"files": fl})
d[key] = d2
return HttpResponse(json.dumps(d), content_type='application/json')
except:
d["error"] = traceback.format_exc()
print d["error"]
return HttpResponse(json.dumps(d), content_type='application/json')
def register_event(request):
d = {}
try:
data = {}
data["dt"] = datetime.datetime.utcnow()
data["label"] = request.GET['label']
data["path"] = request.GET['path']
ef = EventFile(data=json_util.dumps(data))
ef.save()
d["msg"] = "done"
return HttpResponse(json.dumps(d), content_type='application/json')
except:
d["error"] = traceback.format_exc()
print d["error"]
return HttpResponse(json.dumps(d), content_type='application/json')
def recent_events(request):
lis = [
"motion_detected"
,"event_start"
,"event_end"
,"picture_save"
,"motion_detected"
,"area_detected"
,"movie_start"
,"movie_end"
,"camera_lost"
]
d = {}
folder = "/home/pi/data/motion_app"
try:
if "name" in request.GET:
name = request.GET["name"]
if name == "all":
for name in lis:
if os.path.isfile(os.path.join(folder, name)):
f = file(os.path.join(folder, name), "r")
d[name] = f.read()
f.close()
else:
d['msg'] = d.get('msg', "") + "%s not found, " %name
elif name in lis:
if os.path.isfile(os.path.join(folder, name)):
f = file(os.path.join(folder, name), "r")
d[name] = f.read()
f.close()
else:
raise BaseException('%s not found in environment variables' %name)
else:
raise BaseException('%s is not a valid variable to ask for' %name)
else:
raise BaseException('name parameter is required')
except:
d["error"] = traceback.format_exc()
return HttpResponse(json.dumps(d), content_type='application/json')
def gantt_data(request):
d={}
try:
#dbdata = [{'dt':datetime.datetime(2010,1,1) + datetime.timedelta(hours=7 * i+1), "event":["movie_start", "movie_end"][i%2]}for i in range(100)]
dbdata = [ev for ev in [json_util.loads(e.data) for e in Event.objects.all()] if ev['label'] in ["movie_start", "movie_end"]]
dbdata = sorted(dbdata, key=lambda x:x['dt'])
#print dbdata
lis=[]
d = {}
for p in dbdata:
print p
day = datetime.datetime(p['dt'].year, p['dt'].month, p['dt'].day, tzinfo=pytz.utc)
if p["label"] == "movie_start":
d = {'start': p['dt'], 'date': p['dt'].strftime('%Y%m%d'), 'startHour':((p['dt']-day).total_seconds()/3600.)}
elif p["label"] == "movie_end":
if "startHour" in d:
if p['dt'].date() == d['start'].date():
d['endHour'] = ((p['dt'] - day).total_seconds()/3600.)
d["status"] = "SUCCEEDED"
d.pop('start')
lis.append(d)
elif p['dt'].date() > d['start'].date():
d['endHour'] = 24
d.pop('start')
lis.append(d)
lis.append({'date': p['dt'].strftime('%Y%m%d'), 'startHour': 0, 'endHour': p['dt'].hour})
else:
print 'sdfsdddddddddddddddddddd'
raise BaseException('xx')
d = {}
else:
continue
now = datetime.datetime.utcnow()
nowday = datetime.datetime(now.year, now.month, now.day)
d = {'date': now.strftime('%Y%m%d'), 'startHour':((now - nowday).total_seconds()/3600.)}
d['endHour'] = 24.
d["status"] = "RUNNING"
lis.append(d)
dic = {}
dic['data'] = lis
return HttpResponse(json.dumps(dic), content_type='application/json')
except:
d = {}
d["error"] = str(traceback.format_exc())
print d["error"]
return HttpResponse(json.dumps(d), content_type='application/json')
def getfile(request):
try:
path = request.GET["path"]
f = file(path, "rb")
s = f.read()
b64 = base64.b64encode(s)
f.close()
return HttpResponse(json.dumps({'data': b64}), content_type='application/json')
except:
d = {}
d["error"] = str(traceback.format_exc())
print d["error"]
return HttpResponse(json.dumps(d), content_type='application/json')
|
"""Common initialization core for woo.
This file is executed when anything is imported from woo for the first time.
It loads woo plugins and injects c++ class constructors to the __builtins__
(that might change in the future, though) namespace, making them available
everywhere.
"""
from wooMain import options as wooOptions
import warnings,traceback
import sys,os,os.path,re,string
WIN=sys.platform=='win32'
PY3K=(sys.version_info[0]==3)
if WIN:
class WooOsEnviron:
'''Class setting env vars via both CRT and win32 API, so that values can be read back
with getenv. This is needed for proper setup of OpenMP (which read OMP_NUM_THREADS).'''
def __setitem__(self,name,val):
import ctypes
# in windows set, value in CRT in addition to the one manipulated via win32 api
# http://msmvps.com/blogs/senthil/archive/2009/10/413/when-what-you-set-is-not-what-you-get-setenvironmentvariable-and-getenv.aspx
## use python's runtime
##ctypes.cdll[ctypes.util.find_msvcrt()]._putenv("%s=%s"%(name,value))
# call MSVCRT (unversioned) as well
ctypes.cdll.msvcrt._putenv("%s=%s"%(name,val))
os.environ[name]=val
def __getitem__(self,name): return os.environ[name]
wooOsEnviron=WooOsEnviron()
# this was set in wooMain (with -D, -vv etc), set again so that c++ sees it
if 'WOO_DEBUG' is os.environ: wooOsEnviron['WOO_DEBUG']=os.environ['WOO_DEBUG']
else:
wooOsEnviron=os.environ
wooOsEnviron['LC_NUMERIC']='C'
if wooOptions.ompCores:
cc=wooOptions.ompCores
if wooOptions.ompThreads!=len(cc) and wooOptions.ompThreads>0:
print 'wooOptions.ompThreads =',str(wooOptions.ompThreads)
warnings.warn('ompThreads==%d ignored, using %d since ompCores are specified.'%(wooOptions.ompThreads,len(cc)))
wooOptions.ompThreads=len(cc)
wooOsEnviron['GOMP_CPU_AFFINITY']=' '.join([str(cc[0])]+[str(c) for c in cc])
wooOsEnviron['OMP_NUM_THREADS']=str(len(cc))
elif wooOptions.ompThreads:
wooOsEnviron['OMP_NUM_THREADS']=str(wooOptions.ompThreads)
elif 'OMP_NUM_THREADS' not in os.environ:
import multiprocessing
wooOsEnviron['OMP_NUM_THREADS']=str(multiprocessing.cpu_count())
import distutils.sysconfig
soSuffix=distutils.sysconfig.get_config_vars()['SO']
if not WIN and (wooOptions.quirks & wooOptions.quirkIntel) and 'DISPLAY' in os.environ:
import os,subprocess
try:
vgas=subprocess.check_output("LC_ALL=C lspci | grep VGA",shell=True,stderr=subprocess.STDOUT,universal_newlines=True).split('\n')
if len(vgas)==1 and 'Intel' in vgas[0]:
# popen does not raise exception if it fails
try:
glx=subprocess.check_output("LC_ALL=C glxinfo | grep 'OpenGL version string:'",shell=True,stderr=subprocess.STDOUT,universal_newlines=True).split('\n')
# this should cover broken drivers down to Ubuntu 12.04 which shipped Mesa 8.0
if len(glx)==1 and re.match('.* Mesa (9\.[01]|8\.0)\..*',glx[0]):
print 'Intel GPU + Mesa < 9.2 detected, setting LIBGL_ALWAYS_SOFTWARE=1\n\t(use --quirks=0 to disable)'
os.environ['LIBGL_ALWAYS_SOFTWARE']='1'
except subprocess.CalledProcessError: pass # failed glxinfo call, such as when not installed
except subprocess.CalledProcessError: pass # failed lspci call...?!
if WIN:
# http://stackoverflow.com/questions/1447575/symlinks-on-windows/4388195#4388195
#
# unfortunately symlinks are something dangerous under windows, it is a priviledge which must be granted
# BUT the user must NOT be in the Administrators group?!
# http://superuser.com/questions/124679/how-do-i-create-an-mklink-in-windows-7-home-premium-as-a-regular-user
#
# for that reason, we use hardlinks (below), which are allowed to everybody
# Since this would break if files were not on the same partition, we copy _cxxInternal*.pyd
# to a tempdir first (see below). It will still fail on filesystems not supporting hardlinks
# (FAT probably)
def win_symlink(source,link_name):
import ctypes, os.path
csl=ctypes.windll.kernel32.CreateSymbolicLinkW
csl.argtypes=(ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
csl.restype=ctypes.c_ubyte
flags=0
if source is not None and os.path.isdir(source): flags=1
if csl(link_name,source,flags)==0: raise ctypes.WinError()
def win_hardlink(source,link_name):
import ctypes
csl=ctypes.windll.kernel32.CreateHardLinkW
csl.argtypes=(ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_void_p)
csl.restype=ctypes.c_ubyte
if csl(link_name,source,None)==0: raise IOError('Hardlinking failed (files not on the same partition?)')
warnings.simplefilter('default')
if PY3K: warnings.simplefilter('ignore',ResourceWarning)
import minieigen
cxxInternalName='_cxxInternal'
if wooOptions.flavor: cxxInternalName+='_'+re.sub('[^a-zA-Z0-9_]','_',wooOptions.flavor)
if wooOptions.debug: cxxInternalName+='_debug'
try:
if not WIN:
_cxxInternal=__import__('woo.'+cxxInternalName,fromlist='woo')
else:
## on windows, copy _cxxInternal*.pyd to the tempdir first, so that we can hardlink to it later
## symlinks are unusable, as they require elevated process (??)
## it must be copied before it gets imported, so we create tempdir ourselves
## and pass it via WOO_TEMP to woo::Master ctor, which will just use it
import tempfile, pkgutil, imp, shutil
tmpdir=wooOsEnviron['WOO_TEMP']=tempfile.mkdtemp(prefix='woo-tmp-')
if not hasattr(sys,'frozen'):
loader=pkgutil.get_loader('woo.'+cxxInternalName)
if not loader: raise ImportError("Unable to get loader for module woo.%s"%cxxInternalName)
pydFile=loader.filename
else:
# frozen install should have full path in sys.argv[0]
pydFile=os.path.dirname(sys.argv[0])+'/woo.'+cxxInternalName+soSuffix
if not os.path.exists(pydFile): raise ImportError("Unable to locate loadable module for woo._cxxInternal in frozen installation: the file %s does not exist"%pydFile)
f=tmpdir+'/'+cxxInternalName+soSuffix
shutil.copy2(pydFile,f)
_cxxInternal=imp.load_dynamic('woo._cxxInternal',f)
pidfile=tmpdir+'/'+'pid'
except ImportError:
print 'Error importing woo.%s (--flavor=%s).'%(cxxInternalName,wooOptions.flavor if wooOptions.flavor else ' ')
#traceback.print_exc()
import glob
sos=glob.glob(re.sub('__init__.py$','',__file__)+'/_cxxInternal_*'+soSuffix)
flavs=[re.sub('(^.*/_cxxInternal_)(.*)(\\'+soSuffix+'$)',r'\2',so) for so in sos]
if sos:
maxFlav=max([len(flav) for flav in flavs])
print 'Available flavors are:'
for so,flav in zip(sos,flavs):
print '\t{0: <{1}}\t{2}'.format(flav,maxFlav,so)
raise
sys.modules['woo._cxxInternal']=_cxxInternal
cxxInternalFile=_cxxInternal.__file__
from . import core
master=core.Master.instance
from . import apiversion
if PY3K:
if sys.version_info<(3,4):
print 'WARNING: in Python 3.x, importing only works in Python >= 3.4 properly. Your version %s will most likely break right here.'%(sys.version)
# will only work when http://bugs.python.org/issue16421 is fixed (python 3.4??)
allSubmodules=set()
import imp
for mod in master.compiledPyModules:
if 'WOO_DEBUG' in os.environ: print 'Loading compiled module',mod,'from',cxxInternalFile
# this inserts the module to sys.modules automatically
m=imp.load_dynamic(mod,cxxInternalFile)
# now put the module where it belongs
mm=mod.split('.')
if mm[0]!='woo': print 'ERROR: non-woo module %s imported from the shared lib? Expect troubles.'%mod
elif len(mm)==2:
allSubmodules.add(mm[1])
globals()[mm[1]]=m
else: setattr(eval('.'.join(mm[1:-1])),mm[-1],mm)
else:
# WORKAROUND: create temporary symlinks
def hack_loadCompiledModulesViaLinks(compiledModDir,tryInAnotherTempdir=True):
allSubmodules=set()
import os,sys
if not os.path.exists(compiledModDir): os.mkdir(compiledModDir)
sys.path=[compiledModDir]+sys.path
# move _customConverters to the start, so that imports reyling on respective converters don't fail
# remove woo._cxxInternal since it is imported already
cpm=master.compiledPyModules
cc='woo._customConverters'
#assert cc in cpm # FIXME: temporarily disabled
## HACK: import _gts this way until it gets separated
cpm=[cc]+[m for m in cpm if m!=cc and m!='woo._cxxInternal']
# run imports now
for iMod,mod in enumerate(cpm):
modpath=mod.split('.')
linkName=os.path.join(compiledModDir,modpath[-1])+soSuffix # use just the last part to avoid hierarchy
if WIN:
try:
win_hardlink(os.path.abspath(cxxInternalFile),linkName)
except IOError:
sys.stderr.write('Creating hardlink failed - on Windows, _cxxInternal.pyd is copied to tempdir before being imported, so that hardlinks should be on the same partition. What\'s happening here? If you are using FAT filesystem, you are out of luck. With NTFS, hardlinks should work. Please report this error so that it can be fixed or worked around.\n')
raise
else: os.symlink(os.path.abspath(cxxInternalFile),linkName)
if 'WOO_DEBUG' in os.environ:
print 'Loading compiled module',mod,'from symlink',linkName
print 'modpath =',modpath
sys.stdout.flush()
try: sys.modules[mod]=__import__(modpath[-1])
except ImportError:
# compiled without GTS
if mod=='_gts' and False:
if 'WOO_DEBUG' in os.environ: print 'Loading compiled module _gts: _gts module probably not compiled-in (ImportError)'
pass
else: raise # otherwise it is serious
if len(modpath)==1: pass # nothing to do, adding to sys.modules is enough
if len(modpath)>=2: # subdmodule must be added to module
globals()[modpath[1]]=sys.modules['.'.join(modpath[:2])]
allSubmodules.add(modpath[1])
if len(modpath)>=3: # must be added to module and submodule
setattr(globals()[modpath[1]],modpath[2],sys.modules[mod])
if len(modpath)>=4:
raise RuntimeError('Module %s does not have 2 or 3 path items and will not be imported properly.'%mod)
sys.path=sys.path[1:] # remove temp dir from the path again
return allSubmodules
allSubmodules=hack_loadCompiledModulesViaLinks(master.tmpFilename()) # this will be a directory
config=sys.modules['woo.config']
if 'gts' in config.features:
if 'gts' in sys.modules: raise RuntimeError("Woo was compiled with woo.gts; do not import external gts module, they clash with each other.")
from . import gts
# so that it does not get imported twice
sys.modules['gts']=gts
import atexit, shutil, threading, glob
def exitCleanup(path):
#print 'Purging',path
shutil.rmtree(path)
if not WIN:
atexit.register(exitCleanup,master.tmpFileDir)
def cleanOldTemps(prefix,keep):
try: import psutil
except ImportError:
sys.stderr.write('Not cleaning old temps, since the psutil module is missing.\n')
return
for d in glob.glob(prefix+'/woo-tmp-*'):
if d==keep: continue
pidfile=d+'/pid'
if not os.path.exists(pidfile): continue
try:
with open(pidfile) as f: pid=int(f.readlines()[0][:-1])
if not psutil.pid_exists(pid):
sys.stderr.write('Purging old %s (pid=%d)\n'%(d,pid))
shutil.rmtree(d)
except: pass
threading.Thread(target=cleanOldTemps,args=(os.path.dirname(master.tmpFileDir),master.tmpFileDir)).start()
if wooOptions.ompThreads>1 or wooOptions.ompCores:
if 'openmp' not in config.features:
warnings.warn('--threads and --cores ignored, since compiled without OpenMP.')
elif master.numThreads!=wooOptions.ompThreads:
warnings.warn('--threads/--cores did not set number of OpenMP threads correctly (requested %d, current %d). Was OpenMP initialized in this process already?'%(wooOptions.ompThreads,master.numThreads))
elif master.numThreads>1:
if 'OMP_NUM_THREADS' in os.environ:
if master.numThreads!=int(os.environ['OMP_NUM_THREADS']): warnings.warn('OMP_NUM_THREADS==%s, but woo.master.numThreads==%d'%(os.environ['OMP_NUM_THREADS'],master.numThreads))
else:
warnings.warn('OpenMP is using %d cores without --threads/--cores being used - the default should be 1'%master.numThreads)
if wooOptions.clDev:
if 'opencl' in config.features:
if wooOptions.clDev:
try:
clDev=[int(a) for a in wooOptions.clDev.split(',')]
if len(clDev)==1: clDev.append(-1) # default device
if not len(clDev) in (1,2): raise ValueError()
except (IndexError, ValueError, AssertionError):
raise ValueError('Invalid --cl-dev specification %s, should an integer (platform), or a comma-separated couple (platform,device) of integers'%opts.clDev)
master.defaultClDev=clDev
else: warnings.warn("--cl-dev ignored, since compiled without OpenCL.")
if not PY3K:
from . import _customConverters
__all__=['master']+list(allSubmodules)
from . import system
system.setExitHandlers()
from minieigen import *
from . import _monkey
from . import _units
unit=_units.unit # allow woo.unit['mm']
from . import pyderived
from . import apiversion
try:
# don't import at all if rebuilding (rebuild might fail)
if wooOptions.rebuilding: raise ImportError
import wooExtra
import pkgutil, zipimport
extrasLoaded=[]
for importer, modname, ispkg in pkgutil.iter_modules(wooExtra.__path__):
try:
m=__import__('wooExtra.'+modname,fromlist='wooExtra')
extrasLoaded.append(modname)
if hasattr(sys,'frozen') and not hasattr(m,'__loader__') and len(m.__path__)==1:
zip=m.__path__[0].split('/wooExtra/')[0].split('\\wooExtra\\')[0]
if not (zip.endswith('.zip') or zip.endswith('.egg')):
print 'wooExtra.%s: not a .zip or .egg, no __loader__ set (%s)'%(modname,zip)
else:
print 'wooExtra.%s: setting __loader__ and __file__'%modname
m.__loader__=zipimport.zipimporter(zip)
m.__file__=os.path.join(m.__path__[0],os.path.basename(m.__file__))
except ImportError:
sys.stderr.write('ERROR importing wooExtra.%s:'%modname)
raise
# disable informative message if plain import into python script
if sys.argv[0].split('/')[-1].startswith('woo'): sys.stderr.write('wooExtra modules loaded: %s.\n'%(', '.join(extrasLoaded)))
except ImportError:
# no wooExtra packages are installed
pass
|
import os
import numpy as np
import matplotlib.pyplot as plt
import pylibconfig2
from ergoPack import ergoPlot
configFile = '../cfg/transferCZ.cfg'
cfg = pylibconfig2.Config()
cfg.read_file(configFile)
fileFormat = cfg.general.fileFormat
second_to_year = 1 / (60 * 60 * 24 * 365)
time_dim = cfg.units.L / cfg.units.c0 * second_to_year
cases = {'Deterministic': 0., 'Stochastic': 0.01}
field_h = (1, 'H', 'h', 'm')
field_T = (2, 'SST', 'T', r'$^\circ C$')
field_u_A = (3, 'wind stress due to coupling', 'u_A', 'm/s')
field_taux = (4, 'external wind-stress', 'taux', 'm/s')
nino3 = ('E', 'nino3')
nino4 = ('W', 'nino4')
indicesName = []
fieldsDef = []
dimObs = len(cfg.caseDef.indicesName)
for d in np.arange(dimObs):
if cfg.caseDef.indicesName[d] == 'nino3':
indicesName.append(nino3)
elif cfg.caseDef.indicesName[d] == 'nino4':
indicesName.append(nino4)
if cfg.caseDef.fieldsName[d] == 'T':
fieldsDef.append(field_T)
if cfg.caseDef.fieldsName[d] == 'h':
fieldsDef.append(field_h)
compName0 = '%s%s' % (indicesName[0][0], fieldsDef[0][1])
ev_xlabel = '%s (%s)' % (compName0, fieldsDef[0][3])
compName1 = '%s%s' % (indicesName[1][0], fieldsDef[1][1])
ev_ylabel = '%s (%s)' % (compName1, fieldsDef[1][3])
units = {'T': cfg.units.delta_T, 'h': cfg.units.H}
ev_xlabel = '%s (%s)' % (compName0, fieldsDef[0][3])
ev_ylabel = '%s (%s)' % (compName1, fieldsDef[1][3])
tss = {}
times = {}
for case, eps in cases.items():
dataDir = 'zc_1eof_mu{:04d}_eps{:04d}_seed0'.format(
int(cfg.caseDef.mu * 1000 + 0.1), int(eps * 1000 + 0.1))
indicesDir = cfg.general.indicesDir
xorbit = []
t = []
for idxName, fieldName in zip(
cfg.caseDef.indicesName, cfg.caseDef.fieldsName):
filePath = os.path.join(indicesDir, dataDir, idxName + '.txt')
data = np.loadtxt(filePath)
xorbit.append(np.expand_dims(data[:, 1] * units[fieldName], axis=1))
nt = np.min([xo.shape[0] for xo in xorbit])
tss[case] = np.concatenate([xo[:nt] for xo in xorbit], axis=1)
times[case] = data[:nt, 0] * time_dim
srcPostfix = "%s%s_mu%04d_eps%04d" % (cfg.caseDef.prefix, cfg.caseDef.simType,
np.round(cfg.caseDef.mu * 1000, 1),
np.round(cfg.caseDef.eps * 1000, 1))
obsName = ''
for d in np.arange(dimObs):
obsName += '_%s_%s' % (fieldsDef[d][2], indicesName[d][1])
dstPostfix = '_%s%s' % (srcPostfix, obsName)
fig = plt.figure()
ax = fig.add_subplot(111)
ax2 = ax.twinx()
itmax = 10000
lss = ['-', '--']
colors = [c['color'] for c in plt.rcParams['axes.prop_cycle']]
for k, case in enumerate(tss):
time, ts = times[case], tss[case]
ax.plot(time[:itmax], ts[:itmax, 0], linestyle=lss[k],
color=colors[0], label=case)
ax2.plot(time[:itmax], ts[:itmax, 1], linestyle=lss[k],
color=colors[1], label=case)
ax.set_xlabel('time (y)')
ax.set_ylabel(ev_xlabel)
ax2.set_ylabel(ev_ylabel)
ax.legend(loc='best')
if cfg.caseDef.mu >= 2.9:
xticks = np.arange(25., 28.6, 0.5)
time_slices = [slice(nt - 1000, nt), slice(nt-1000, nt)]
lws = [2, 2]
else:
xticks = np.arange(25., 27.1, 0.5)
time_slices = [slice(1000, nt), slice(nt-2000, nt)]
lws = [1, 2]
fig = plt.figure()
ax = fig.add_subplot(111)
lss = ['-', '--']
colors = [c['color'] for c in plt.rcParams['axes.prop_cycle']]
for k, case in enumerate(tss):
time, ts = times[case], tss[case]
time_slice = time_slices[k]
ax.plot(ts[time_slice, 0], ts[time_slice, 1], linestyle=lss[k],
linewidth=lws[k], color=colors[k], label=case)
ax.set_xticks(xticks)
ax.set_xlabel(ev_xlabel, fontsize=ergoPlot.fs_xlabel)
ax.set_ylabel(ev_ylabel, fontsize=ergoPlot.fs_ylabel)
plt.setp(ax.get_xticklabels(), fontsize=ergoPlot.fs_xticklabels)
plt.setp(ax.get_yticklabels(), fontsize=ergoPlot.fs_yticklabels)
ax.legend(loc='upper left', fontsize=ergoPlot.fs_legend_labels)
series_dir = os.path.join(cfg.general.plotDir, 'series')
os.makedirs(series_dir, exist_ok=True)
dstFile = os.path.join(
series_dir, 'series{}.{}'.format(dstPostfix, ergoPlot.figFormat))
fig.savefig(dstFile, bbox_inches=ergoPlot.bbox_inches,
dpi=ergoPlot.dpi)
|
"""`main` is the top level module for the Flask application."""
import json
from experiment_datastore_google import AdminDatastore, ClientDatastore, IteratedClientDatastore
from custom_exceptions import DuplicateEntryError, ResourceError, DataFormatError
cfg_file = open('server.cfg')
cfg = json.load(cfg_file)
cfg_file.close()
admin_datastore = AdminDatastore()
client_datastore = ClientDatastore()
iterated_client_datastore = IteratedClientDatastore()
from flask import Flask, jsonify, abort, request, make_response, url_for, render_template
from flask.ext.httpauth import HTTPBasicAuth
app = Flask(__name__, static_url_path = "")
auth = HTTPBasicAuth()
dashauth = HTTPBasicAuth()
@app.route('/psycloud/admin/api/experiments/<experiment_id>',
methods=['DELETE'])
@auth.login_required
def remove_experiment(experiment_id):
try:
admin_datastore.remove_experiment(experiment_id)
return valid_request('deleted experiment', experiment_id)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/api/experiments/<experiment_id>/data',
methods=['GET'])
@auth.login_required
def get_experiment_data(experiment_id):
status_filter = None
args = request.args
if 'status' in args:
status_filter = args['status']
try:
participant_list = admin_datastore.get_data(experiment_id, status_filter=status_filter)
return valid_request('participants', participant_list)
except Exception, e:
raise
return bad_request(str(e))
def create_iterated_experiment(data):
experiment_name = data['experiment_name']
num_participants = data['num_participants']
config = data['config']
try:
experiment_key = admin_datastore.create_iterated_experiment(experiment_name,
num_participants, config)
return valid_request('experiment_id', experiment_key.urlsafe())
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/api/experiments',
methods=['POST'])
@auth.login_required
def create_experiment():
data = request.get_json()
if 'experiment_type' in data:
if data['experiment_type'] == 'iterated':
# This is an iterated experiment
return create_iterated_experiment(data)
experiment_name = data['experiment_name']
try:
if 'participants' in data:
# the data contains a participant list
participant_list = data['participants']
experiment_key = admin_datastore.create_experiment_with_participants(experiment_name, participant_list)
else:
# the data does not contain a participant list
# Creating an empty experiment where participants have no stimuli
num_participants = data['num_participants']
max_number_stimuli = data['max_number_stimuli']
experiment_key = admin_datastore.create_experiment(experiment_name,
num_participants, max_number_stimuli)
except Exception, e:
raise
return bad_request(str(e))
else:
return valid_request('experiment_id', experiment_key.urlsafe())
@app.route('/psycloud/admin/api/experiments',
methods=['GET'])
@auth.login_required
def get_experiment_list():
try:
experiment_list = admin_datastore.get_experiments()
return valid_request('experiments', experiment_list)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/api/experiments/<experiment_id>',
methods=['GET'])
@auth.login_required
def get_experiment(experiment_id):
try:
experiment = datastore.get_experiments(experiment_id=experiment_id)
return valid_request('experiment', experiment)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/api/experiments/<experiment_id>',
methods=['PUT'])
@auth.login_required
def modify_experiment(experiment_id):
pass
@app.route('/psycloud/admin/api/experiments/<experiment_id>/participants',
methods=['GET'])
@auth.login_required
def get_participant_list(experiment_id):
keys_only = False
status_filter = None
args = request.args
if 'keys_only' in args:
keys_only = bool(args['keys_only'])
if 'status' in args:
status_filter = args['status']
try:
participant_list = admin_datastore.get_participants(experiment_id, keys_only=keys_only, status_filter=status_filter)
return valid_request('participants', participant_list)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/api/experiments/<experiment_id>/participants',
methods=['POST'])
@auth.login_required
def save_participant_list(experiment_id):
pass
@app.route('/psycloud/admin/api/experiments/<experiment_id>/participants/<participant_index>',
methods=['POST'])
@auth.login_required
def save_participant(experiment_id, participant_index):
pass
@app.route('/psycloud/admin/api/experiments/<experiment_id>/participants/<participant_id>',
methods=['PUT'])
@auth.login_required
def modify_participant(experiment_id, participant_id):
pass
@app.route('/psycloud/admin/api/experiments/<experiment_id>/coupons',
methods=['POST'])
@auth.login_required
def save_coupons(experiment_id):
data = request.get_json()
try:
coupon_list = admin_datastore.save_coupons(experiment_id, data)
return valid_request('coupons', coupon_list)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/api/experiments/<experiment_id>/coupons',
methods=['GET'])
@auth.login_required
def get_coupons(experiment_id):
try:
coupon_list = admin_datastore.get_coupons(experiment_id)
return valid_request('coupons', coupon_list)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/dashboard', methods=['GET'])
@dashauth.login_required
def dashboard_main():
try:
experiment_list = admin_datastore.get_experiments(include_participant_counts=True)
exps = []
for exp in experiment_list:
exps.append({'name': exp['experiment_name'], 'id':exp['id'], 'type':exp['experiment_type'],
'num_available': exp['num_available'],
'num_active': exp['num_active'],
'num_completed': exp['num_completed'],
'num_participants': exp['num_participants']})
return render_template('main_dashboard.html', params=exps)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/dashboard/experiment/<exp_id>', methods=['GET'])
@dashauth.login_required
def dashboard_view_experiment(exp_id):
try:
experiment = admin_datastore.get_experiments(experiment_id=exp_id)
return jsonify(experiment)
except Exception, e:
raise
return bad_request(str(e))
def dashboard_view_participant_list(exp_id, status='COMPLETED'):
templates = {'ACTIVE': 'active_dash.html', 'COMPLETED': 'completed_dash.html'}
try:
participant_list = admin_datastore.get_participants(exp_id, keys_only=False, status_filter=status)
return render_template(templates[status], exp_id=exp_id, subs=participant_list)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/dashboard/experiment/<exp_id>/active', methods=['GET'])
@dashauth.login_required
def dashboard_view_active(exp_id):
return dashboard_view_participant_list(exp_id, status='ACTIVE')
@app.route('/psycloud/admin/dashboard/experiment/<exp_id>/completed', methods=['GET'])
@dashauth.login_required
def dashboard_view_completed(exp_id):
return dashboard_view_participant_list(exp_id, status='COMPLETED')
@app.route('/psycloud/admin/dashboard/participant/<uid>', methods=['GET'])
@dashauth.login_required
def dashboard_view_participant(uid):
try:
participant = client_datastore.get_participant(uid)
participant['stimuli'] = client_datastore.get_stimuli(uid)
participant['responses'] = client_datastore.get_responses(uid)
return jsonify(participant)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/admin/dashboard/experiment/<exp_id>/completed/download_data', methods=['GET'])
@dashauth.login_required
def dashboard_download_completed_participant_data(exp_id):
try:
completed_participant_list = admin_datastore.get_data(exp_id, status_filter='COMPLETED')
return jsonify({'participants': completed_participant_list})
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/',
methods=['POST'])
def register_participant():
'''
Register a new participant.
Assumes json input contains an experiment_id and optional registration_coupon
Returns a participant_short_id if successful.
Returns an error if registration coupon already registered or experiment_id not found.
registration_code might be a mechanical turk id, for example.
'''
data = request.get_json()
experiment_id = data['experiment_id']
if 'registration_coupon' in data:
registration_coupon = data['registration_coupon']
else:
registration_coupon = None
try:
short_id = client_datastore.register(experiment_id, registration_coupon=registration_coupon)
return valid_request('participant_id', short_id)
except DuplicateEntryError:
abort(409)
except ResourceError:
abort(410)
except Exception, e:
raise # this is for debugging purposes
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/stimuli/',
methods=['GET'])
def get_stimuli_list(participant_id):
'''Retrieves a list of stimuli.'''
try:
stimulus_list = client_datastore.get_stimuli(participant_id)
return valid_request('stimuli', stimulus_list)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/stimuli/',
methods=['POST'])
def save_stimuli_list(participant_id):
'''Saves a list of stimuli.'''
stimuli_to_save = request.get_json()
try:
saved_stimuli = client_datastore.save_stimuli(participant_id, stimuli_to_save)
return valid_request('stimuli', saved_stimuli)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/stimuli/<int:stimulus_number>',
methods=['GET'])
def get_stimulus_by_number(participant_id, stimulus_number):
'''Retrieve a specific stimulus'''
try:
stimulus_list = client_datastore.get_stimuli(participant_id, stimulus_number=stimulus_number)
return valid_request('stimulus', stimulus_list[0])
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/stimuli/<int:stimulus_number>',
methods=['POST'])
def save_stimulus_by_number(participant_id, stimulus_number):
'''Save a specific stimulus'''
stimulus_to_save = request.get_json()
stimulus_to_save['stimulus_index'] = stimulus_number
try:
saved_stimuli = client_datastore.save_stimuli(participant_id, [stimulus_to_save])
return valid_request('stimulus', saved_stimuli[0])
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/stimuli/max_count/',
methods=['GET'])
def get_stimuli_max_count(participant_id):
'''Returns the maximum number of stimuli that are allowed.'''
try:
max_count = client_datastore.get_max_number_stimuli(participant_id)
return valid_request('max_count', max_count)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/responses/',
methods=['GET'])
def get_response_list(participant_id):
'''Retrieve a list of responses'''
try:
response_list = client_datastore.get_responses(participant_id)
return valid_request('responses', response_list)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/responses/',
methods=['POST'])
def save_response_list(participant_id):
'''Save a list of responses'''
responses_to_save = request.get_json()
try:
saved_responses = client_datastore.save_responses(participant_id, responses_to_save)
return valid_request('responses', saved_responses)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/responses/<int:stimulus_number>',
methods=['GET'])
def get_response(participant_id, stimulus_number):
'''Retrieve a specific response'''
try:
response_list = client_datastore.get_responses(participant_id, stimulus_number=stimulus_number)
return valid_request('response', response_list[0])
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/responses/<int:stimulus_number>',
methods=['POST'])
def save_response(participant_id, stimulus_number):
'''Save a specific response'''
response_to_save = request.get_json()
response_to_save['stimulus_index'] = stimulus_number
try:
saved_responses = client_datastore.save_responses(participant_id, [response_to_save])
return valid_request('response', saved_responses[0])
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/stimuli/current/',
methods=['GET'])
def get_current_stimulus_index(participant_id):
'''Returns the current stimulus number.'''
try:
stimulus_index = client_datastore.get_current_stimulus(participant_id)
return valid_request('stimulus_index', stimulus_index)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/stimuli/current/',
methods=['PUT'])
def set_current_stimulus_index(participant_id):
'''Sets the current stimulus number.'''
data = request.get_json()
stimulus_index = data['stimulus_index']
try:
client_datastore.set_current_stimulus(participant_id, stimulus_index)
return valid_request('stimulus_index', stimulus_index)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/current_status/',
methods=['GET'])
def get_participant_status(participant_id):
'''Returns the participant status.'''
try:
current_status = client_datastore.get_status(participant_id)
return valid_request('current_status', current_status)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/current_status/',
methods=['PUT'])
def set_participant_status(participant_id):
'''Sets the participant status.'''
data = request.get_json()
current_status = data['current_status']
try:
client_datastore.set_status(participant_id, current_status)
return valid_request('current_status', current_status)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/details/',
methods=['GET'])
def get_participant_details(participant_id):
'''Returns the participant details.'''
try:
details = client_datastore.get_details(participant_id)
return valid_request('details', details)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/details/',
methods=['PUT'])
def set_participant_details(participant_id):
'''Sets the participant details.'''
data = request.get_json()
details = data['details']
try:
client_datastore.set_details(participant_id, details)
return valid_request('details', details)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/confirmation_code/',
methods=['GET'])
def get_confirmation_code(participant_id):
'''Returns the participant confirmation code.'''
try:
confirmation_code = client_datastore.get_confirmation_code(participant_id)
return valid_request('confirmation_code', confirmation_code)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/chain_types/',
methods=['GET'])
def get_iterated_chain_types(participant_id):
'''Returns the available iterated chain types for the participant'''
try:
chain_types = iterated_client_datastore.get_chain_types(participant_id)
return valid_request('chain_types', chain_types)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/chain/<chain_type>/',
methods=['GET'])
def get_sample_from_chain(participant_id, chain_type):
'''Returns a sample from the chain specified by chain_type'''
try:
sample = iterated_client_datastore.get_sample_from_chain(participant_id, chain_type)
return valid_request('sample', sample)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/psycloud/api/participant/<participant_id>/chain/<chain_type>/',
methods=['POST'])
def save_sample_to_chain(participant_id, chain_type):
'''Saves a new sample from the chain specified by chain_type'''
new_sample = request.get_json()
try:
saved_sample = iterated_client_datastore.save_sample_to_chain(participant_id,
chain_type, new_sample)
return valid_request('sample', saved_sample)
except Exception, e:
raise
return bad_request(str(e))
@app.route('/experiment/<exp_kind>/<exp_short_id>', methods=['GET'])
def experiment_start(exp_kind, exp_short_id):
'''Serves up an experiment'''
return render_template('experiments/%s/index.html' % exp_kind, expId=exp_short_id, expKind=exp_kind)
@auth.get_password
def get_password(username):
if username == cfg['adminuser']:
return cfg['adminpass']
return None
@dashauth.get_password
def get_password(username):
if username == cfg['dashuser']:
return cfg['dashpass']
return None
@auth.error_handler
def unauthorized():
return make_response(jsonify( { 'status':403, 'message': 'Unauthorized' } ), 403)
# return 403 instead of 401 to prevent browsers from displaying the default auth dialog
def valid_request(kind_of_data, data):
return jsonify({'status':200, 'message':'OK', 'result':{kind_of_data:data}}), 200
def bad_request(e):
return jsonify( {'status':400, 'message':'Bad Request', 'result':e}), 400
@app.errorhandler(404)
def page_not_found(e):
return jsonify( {'status':404, 'message':'Not Found'}), 404
@app.errorhandler(409)
def duplicate_registration(e):
return jsonify( {'status':409, 'message':'Duplicate registraton.'}), 409
@app.errorhandler(410)
def experiment_full(e):
return jsonify( {'status':410, 'message':'Experiment full.'}), 410
@app.errorhandler(500)
def page_not_found(e):
# return 'Sorry, unexpected error: {}'.format(e), 500
return jsonify( {'status':500, 'message':'Unexpected Error', 'result':e}), 500
|
'''Pychemqt, Chemical Engineering Process simulator
Copyright (C) 2009-2017, Juan José Gómez Romera <jjgomera@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.'''
from lib import unidades
from lib.meos import MEoS
class D5(MEoS):
"""Multiparameter equation of state for decamethylcyclopentasiloxane"""
name = "decamethylcyclopentasiloxane"
CASNumber = "541-02-6"
formula = "C10H30O5Si5"
synonym = "D5"
_refPropName = "D5"
_coolPropName = "D5"
rhoc = unidades.Density(292.570762680819)
Tc = unidades.Temperature(619.23462341)
Pc = unidades.Pressure(1161.46, "kPa")
M = 370.7697 # g/mol
Tt = unidades.Temperature(226.0)
Tb = unidades.Temperature(484.05)
f_acent = 0.658
momentoDipolar = unidades.DipoleMoment(1.349, "Debye")
# id=1671
f = 8.314472
CP1 = {"ao": -34.898/f,
"an": [1861.5e-3/f, -1403.4e-6/f, 500.0e-9/f],
"pow": [1, 2, 3]}
colonna = {
"__type__": "Helmholtz",
"__name__": "Helmholtz equation of state for hexamethyldisiloxane of "
"Colonna (2006).",
"__doi__": {"autor": "Colonna, P., Nannan, N.R., Guardone, A., "
"Lemmon, E.W.",
"title": "Multiparameter Equations of State for Selected "
"Siloxanes",
"ref": "Fluid Phase Equilibria, 244:193-211, 2006.",
"doi": "10.1016/j.fluid.2006.04.015"},
"R": 8.314472,
"cp": CP1,
"ref": "NBP",
"Tmin": 300, "Tmax": 673.0, "Pmax": 30000.0, "rhomax": 2.83,
"nr1": [1.40844725, -2.29248044, 0.42851607, -0.73506382, 0.16103808,
0.29643278e-3],
"d1": [1, 1, 1, 2, 3, 7],
"t1": [0.25, 1.125, 1.5, 1.375, 0.25, 0.875],
"nr2": [0.82412481, 0.15214274, -0.68495890, -0.55703624e-1,
0.13055391e-1, -0.31853761e-1],
"d2": [2, 5, 1, 4, 3, 4],
"t2": [0.625, 1.75, 3.625, 3.625, 14.5, 12.0],
"c2": [1, 1, 2, 2, 3, 3],
"gamma2": [1]*6}
eq = colonna,
_vapor_Pressure = {
"eq": 3,
"n": [-0.99967e1, 0.70091e1, -0.72265e1, -0.62938e1],
"t": [1.0, 1.5, 1.87, 3.8]}
_liquid_Density = {
"eq": 1,
"n": [0.303988e3, -0.110342e4, 0.134359e4, -0.705243e3, 0.164540e3],
"t": [0.57, 0.65, 0.73, 0.84, 0.96]}
_vapor_Density = {
"eq": 2,
"n": [-0.37577e1, -0.47669e1, -0.24233e2, -0.29872e3, 0.34441e3,
-0.32498e3],
"t": [0.459, 1.02, 2.6, 6.7, 7.7, 11.0]}
|
import pytest
import sys
import os
DOSSIER_COURANT = os.path.dirname(os.path.abspath(__file__))
DOSSIER_PARENT = os.path.dirname(DOSSIER_COURANT)
sys.path.append(DOSSIER_PARENT)
from vespa.ho import HO
from vespa.ho_ph import HO_PH
@pytest.fixture(scope='module')
def ho_ph_instance():
v = HO_PH('testnode', "127.0.0.1", 1337, None, run=False)
return v
def test_instance_ho_ph(ho_ph_instance):
assert isinstance(ho_ph_instance, HO)
def test_ho_ph_attributes(ho_ph_instance):
assert ho_ph_instance.have_backend == False
def test_ho_ph_send(ho_ph_instance):
assert ho_ph_instance.send("test") == ['help#']
def test_ninja_method(ho_ph_instance):
ho_ph_instance.ninjaMethod()
|
"""
WSGI config for buttsworth project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
buttsworth : a django chatterbot for David Buttsworth
Copyright (C) 2015 Gregory Martin
@yro 12.2015
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "buttsworth.settings")
application = get_wsgi_application()
|
from pandas import DataFrame, read_csv
from sklearn import linear_model
from math import exp
data = read_csv('Illumina_normalized_scaled.txt', header = 0, index_col = 0, sep = '\t')
data = data.T
oligos = ['ILMN_2154115', 'ILMN_1755115', 'ILMN_1661537', 'ILMN_1711516', 'ILMN_1767281', 'ILMN_1779813', 'ILMN_1699100', 'ILMN_1715393', 'ILMN_1789387', 'ILMN_1812877']
data = data[oligos]
model = read_csv('model_parameters_illumina.txt', header = 0, index_col = 0, sep = '\t')
patients = data.index.values
patients = patients.tolist()
predictions = []
coefs = []
intercept = model.ix['Intercept:']
for patient in patients:
patientvalues = data.ix[patient]
values = []
for i in oligos:
value = patientvalues[i]
coef = model.ix[i]
values.append(value)
coefs.append(coef)
value = 0
for x in range(len(values)):
a = coefs[x] * values[x]
value += a
prediction = 1/(1 + exp(-(intercept + value)))
print('Patient:', patient, 'Prediction:', prediction)
predictions.append(str(prediction))
predictions = ','.join(predictions)
f = open('predictions_list_illumina.txt', 'w')
f.write(predictions)
f.close()
|
"""
.. inheritance-diagram:: pyopus.parallel.base
:parts: 1
**Base classes for virtual machines (PyOPUS subsystem name: VM)**
A **spawner task** is a task that can spawn new tasks on hosts in the virtual
machine. All other tasks are **worker tasks**.
**Mirroring and local storage**
Often tasks in the virtual machine require the use of additional storage (like
a harddisk) for storing the parallel algorithm's input files which are
identical for all hosts. These files must therefore be distributed to all
hosts in a virtual machine before the computation can begin.
One way to make all virtual machines see the same directory structure is to
use network filesystems like NFS or SMBFS. In this approach the storage
physically resides one a computer in the network (file server). This storage
is exported and mounted by all hosts in the virtual machine.
Take for instance that folder ``/shareme/foo`` on the file server is exported.
Hosts in the virtual machine mount this folder under ``/home``. This way all
hosts see the ``/shareme/foo`` folder located on the physical storage of the
server as their own folder ``/home``.
This approach is very simple and ensures that all hosts see the same input
files in the same place. Unfortunately as the number of hosts grows and the
amount of data read operatins to the shared folder grows, the network quickly
becomes saturated and the computational performance of the virtual machine
dramatically decreases because tasks must wait on slow data read operations.
A more scalable solution is to use the local storage of every host to store
the algoruthm's input files. This has the downside that before the computation
begins these files must be distributed to all hosts.
On the other hand parallel algorithms often require some local storage for
storing various intermediate files. If this local storage is in the form of a
shared folder an additional problem occurs when multiple hosts try to write a
file with the same name to a physically same place, but with different content.
A solution to the intermediate file problem is to use every host's phisically
local storage (e.g. its local harddisk) for storing the intermediate files.
The solution to these problems in PyOPUS is to use mirroring. Mirroring is
configured through two environmental variables. ``PARALLEL_LOCAL_STORAGE``
specifies the path to the folder where the folders for the local storage of
input and intermediate files will be created. The ``PARALLEL_MIRRORED_STORAGE``
environmental variable specifies a colon (``:``) separated list of paths to the
directories that are mounted from a common file server. The first such path in
``PARALLEL_MIRRORED_STORAGE`` on all hosts corresponds to the same directory
on the file server. The same goes for the second, the third, etc. paths listed
in ``PARALLEL_MIRRORED_STORAGE``.
The ``PARALLEL_LOCAL_STORAGE`` and the ``PARALLEL_MIRRORED_STORAGE``
environmental variables must be set on all hosts in a virtual machine. It is
usually set to ``/localhome/USERNAME`` which must physically reside on the
local machine. ``PARALLEL_MIRRORED_STORAGE`` should be writeable by the user
that is running the spawned processes.
Both ``PARALLEL_LOCAL_STORAGE`` and ``PARALLEL_MIRRORED_STORAGE`` can use UNIX
style user home directory expansion (e.g. ``~user`` expands to ``/home/user``).
**Path translation in mirroring operations**
Suppose the ``PARALLEL_MIRRORED_STORAGE`` environmental variable is set to
``/foo:/bar`` on host1 and ``/d1:/d2`` on host2. This means that the following
directories are equivalent (mounted from the same exported folder on a file
server)
======= =======
host1 host2
======= =======
/foo /d1
/bar /d2
======= =======
So ``/foo`` on host1 represents the same physical storage as ``/d1`` on host2.
Similarly ``/bar`` on host1 represents the same physical storage as ``/d2`` on
host2. Usually only ``/home`` is common to all hosts in a virtual machine so
``PARALLEL_MIRRORED_STORAGE`` is set to ``/home`` on all hosts.
Path translation converts a path on host1 into a path to the physically
same object (mounted from the same exported directory on the file
server) on host2. The following is an example of path translation.
============= =============
Path on host1 Path on host2
============= =============
/foo/a /d1/a
/bar/b /d2/b
============= =============
"""
from inspect import ismethod, isclass
from ..misc.debug import DbgMsg, DbgMsgOut
from ..misc.env import environ
from glob import iglob
import os, sys, shutil, time
__all__ = [ 'TaskID', 'HostID', 'Msg', 'MsgTaskExit', 'MsgHostDelete', 'MsgHostAdd',
'MsgTaskResult', 'VirtualMachine', 'getNumberOfCores' ]
firstVM=None
class TaskID(object):
"""
Basic task identifier class that is used for identifying a task in a
virtual machine. :class:`TaskID` objects must support comparison
(:func:`__cmp__`), hashing (:func:`__hash__`), and conversion to a string
(:func:`__str__`).
"""
@staticmethod
def bad():
"""
A static member function. Called with ``TaskID.bad()``.
Returns an invalid task ID.
"""
return TaskID()
def __init__(self):
pass
def __cmp__(self, other):
if type(self)==type(other):
return 0
elif type(self)<type(other):
return -1
else:
return 1
def __hash__(self):
return 0
def __str__(self):
return "NOTASK"
def valid(self):
"""
Returns ``True`` if this :class:`TaskID` object is valid.
"""
return False
class HostID(object):
"""
Basic host identifier class that is used for identifying a host in a
virtual machine. :class:`HostID` objects must support comparison
(:func:`__cmp__`), hashing (:func:`__hash__`), and conversion to a string
(:func:`__str__`).
"""
@staticmethod
def bad():
"""
A static member function. Called with ``HostID.bad()``.
Returns an invalid host ID.
"""
return TaskID()
def __init__(self):
pass
def __cmp__(self, other):
if type(self)==type(other):
return 0
elif type(self)<type(other):
return -1
else:
return 1
def __hash__(self):
return 0
def __str__(self):
return "NOHOST"
def valid(self):
"""
Returns ``True`` if this :class:`HostID` object is valid.
"""
return False
class Msg(object):
"""
Base class for a message used in task-to-task communication.
"""
pass
class MsgTaskExit(Msg):
"""
This is message that signals a task has exited.
The :class:`TaskID` object corresponding to the task is stored in the
:attr:`taskID` member.
"""
def __init__(self, taskID):
Msg.__init__(self)
self.taskID=taskID
class MsgHostDelete(Msg):
"""
This is message that signals a host has exited from the virtual machine.
The :class:`HostID` object corresponding to the host is stored in the
:attr:`hostID` member.
"""
def __init__(self, hostID):
Msg.__init__(self)
self.hostID=hostID
class MsgHostAdd(Msg):
"""
This is message that signals new hosts were added to the virtual machine.
The list of :class:`HostID` objects corresponding to the added hosts is
stored in the :attr:`hostIDs` member.
"""
def __init__(self, hostIDs):
Msg.__init__(self)
self.hostIDs=hostIDs
class MsgTaskResult(Msg):
"""
This is message that is sent to the process that spawned a Python function
in a virtual machine. The message holds a boolean flag that tells if the
function succeeded to run and the return value of the function.
The boolean flag and the return value can be found in :attr:`success` and
:attr:`returnValue` members.
"""
def __init__(self, success, returnValue):
Msg.__init__(self)
self.success=success
self.returnValue=returnValue
class VirtualMachine(object):
"""
The base class for accessing hosts working in parallel.
*debug* specifies the debug level. If it is greater than 0 debug messages
are printed on the standard output.
*startupDir* specifies the working directory where the spawned
functions will wake up. If set to ``None``, the underlying virtual machine
default is used.
If *translateStartupDir* is ``True`` path translation is applied to
*startupDir* just as it is to *mirrorMap*. If mirroring is defined
(*mirrorMap* is given) then no translation is performed on *startupDir*
In this case *startupDIr* is treated as relative path with respect to
the local storage.
*mirrorMap* is a dictionary specifying filesystem objects (files and
directories) on the spawner which are to be mirrored (copied) to local
storage on the host where the task is spawned. The keys represent
paths on the spawner while the values represent the corresponding
paths (relative to the local storage dorectory) on the host where the
task will be spawned. Keys can use UNIX style globbing (anything the
:func:`glob.glob` function can handle is OK). If *mirrorMap* is set to
``None``, no mirroring is performed.
For the mirroring to work the filesystem objects on the spawner must be
in the folders specified in the ``PARALLEL_MIRRORED_STORAGE``
environmental variable. This is because mirroring is performed by local
copy operations which require the source to be on a mounted network
filesystem.
To find out more about setting the working directory and mirroring see
the :meth:`prepareEnvironment` method.
"""
def __init__(self, debug=0, startupDir=None, translateStartupDir=True, mirrorMap=None):
# Debug level
self.debug=debug
# Process startupDir
if translateStartupDir and mirrorMap is not None and startupDir is not None:
(self.startupDirIndex, self.startupDirSuffix)=self.translateToAbstractPath(startupDir)
else:
self.startupDirIndex=None
self.startupDirSuffix=startupDir
# Process local storage map
if mirrorMap is None:
self.mirrorList=None
else:
# Build mirror list
# Start with empty list
self.mirrorList=[]
# Go through all entries in mirrorMap
for (masterObject, target) in mirrorMap.iteritems():
(index, suffix)=self.translateToAbstractPath(masterObject)
# Append to self.mirrorList
self.mirrorList.append((index, suffix, target))
if self.debug:
DbgMsgOut("VM", "Mirroring '"+masterObject+"' from '"+suffix+"' in ("+str(index)+") to '"+target+"' in local storage.")
@staticmethod
def slots():
"""
Returns the number of slots for tasks in a virtual machine.
Every processor represents a slot for one task.
"""
return 0
@staticmethod
def freeSlots():
"""
Returns the number of free slots for tasks in the virtual machine.
"""
return 0
@staticmethod
def hosts():
"""
Returns the list of :class:`HostID` objects representing the nosts in
the virtual machine. Works only for hosts that are spawners.
"""
return []
@staticmethod
def taskID():
"""
Returns the :class:`TaskID` object corresponding to the calling task.
"""
return None
@staticmethod
def hostID():
"""
Returns the :class:`HostID` object corresponding to the host on which
the caller task runs.
"""
return None
@staticmethod
def parentTaskID():
"""
Returns the :class:`TaskID` object corresponding to the task that
spawned the caller task.
"""
return None
@staticmethod
def formatSpawnerConfig():
"""
Formats the configuration information gathered by a spawner task as a
string. Works only if called by a spawner task.
"""
return ""
@classmethod
def dummy(cls):
pass
# This function prepares a function descriptor for spawning so that member functions can also be spawned
@classmethod
def func2desc(cls, func):
if ismethod(func):
# Handles instance methods and classmethods
# Get self, pack it together with method name
return (func.im_self, func.__name__)
else:
# Plain function, just return the function
return func
# This function reconstructs a function from a function descriptor
@classmethod
def desc2func(cls, desc):
if type(desc) is tuple:
# Instance methods and classmethods
s,n=desc
if isclass(s):
# Class, look in __dict__
return getattr(s, n)
else:
# Instance, use __getattribute__
return s.__getattribute__(n)
else:
# Plain function
return desc
# This must be overriden by every derived class.
def spawnFunction(self, function, args=(), kwargs={}, count=-1, targetList=None, sendBack=True):
"""
Spawns a *count* instances of a Python *function* on remote hosts and
passes *args* and *kwargs* to the function. Spawning a function
actually means to start a Python interpreter, import the function, and
call it with *args* and *kwargs*.
If *count* is ``None`` the number of tasks is select in such way that
all available slots are filled.
*function*, *args*, and *kwargs* must be pickleable.
*targetList* specifies a list of hosts on which the function instances
will be spawned. If it is ``None`` all hosts in the virtual machine are
candidates for the spawned instances of the function.
If *sendBack* is ``True`` the spawned tasks return the status and the
return value of the function back to the spawner after the function
exits. The return value must be pickleable.
Returns a list of :class:`TaskID` objects representing the spawned tasks.
Works only if called by a spawner task.
"""
raise Exception, DbgMsg("VM", "Spawning not implemented.")
def checkForIncoming(self):
"""
Returns ``True`` if there is a message waiting to be received.
"""
raise Exception, DbgMsg("VM", "Message check not implemented.")
def receiveMessage(self, timeout=-1.0):
"""
Receives a *message* (a Python object) and returns a tuple
(*senderTaskId*, *message*)
The sender of the *message* can be identified through the
*senderTaskId* object of class :class:`TaskID`.
If *timeout* is negative the function waits (blocks) until some message
arrives. If *timeout*>0 seconds pass without receiving a message, an
empty tuple is returned. Zero *timeout* performs a nonblocking receive
which returns an empty tuple if no message is received.
In case of an error the return value is ``None``.
"""
raise Exception, DbgMsg("VM", "Reception of messages not implemented.")
def sendMessage(self, destination, message):
"""
Sends *message* (a Python object) to a task with :class:`TaskID`
*destination*. Returns ``True`` on success.
"""
raise Exception, DbgMsg("VM", "Sending of messages not implemented.")
@staticmethod
def finalize():
"""
Cleans up after a parallel program.
Should be called before exit.
"""
pass
def clearLocalStorage(self, timeout=-1.0):
"""
This function spawns the :func:`localStorageCleaner` function on all
slots in the virtual machine. The spawned instances remove the local
storage that was created for the slot.
*timeout* is applied where needed. Negative values stand for infinite
*timeout*.
This function should never be called if there are tasks running in the
virtual machine because it will remove their local storage.
This function should be called only by the spawner.
"""
# Get hosts
hostIDs=self.hosts()
# Spawn a cleaner on every host
taskIDs=[]
for hostID in hostIDs:
taskID=self.spawnFunction(localStorageCleaner, kwargs={'vm': self}, count=1, targetList=[hostID])
taskIDs.extend(taskID)
# Collect return values and task exit messages from all hosts confirming that cleanup is finished.
taskIDs=set(taskIDs)
mark=time.time()
while len(taskIDs)>0:
remains=timeout
if timeout>=0:
remains=timeout-(time.time()-mark)
if remains<=0:
break
recv=self.receiveMessage(remains)
if recv is not None and len(recv)==2:
(sourceID, msg)=recv
if type(msg) is MsgTaskExit:
# Remove taskID from set of spawned task IDs.
if sourceID in taskIDs:
taskIDs.remove(sourceID)
else:
# Throw away other messages.
pass
# These are helper methods
def translateToAbstractPath(self, path):
"""
Translates a *path* on the local machine to a tuple (*index*, *suffix*)
where *index* denotes the index of the path entry in the
``PARALLEL_MIRRORED_STORAGE`` environmental variable and *suffix* is
the path relative to that entry.
Note that *suffix* is a relative path even if it starts with ``/``.
"""
# Make path canonical
canonical=os.path.realpath(path)
# Mark it as not found in PARALLEL_MIRRORED_STORAGE
for index in range(len(ParallelMirroredStorage)):
masterMirroredDir=ParallelMirroredStorage[index]
if canonical.find(masterMirroredDir)==0:
found=True
# We have a mirrored direcotry prefix, get relative path.
suffix=os.path.relpath(canonical, masterMirroredDir)
return (index, suffix)
# Failed to translate
raise Exception, DbgMsg("VM", "'"+path+"' not in PARALLEL_MIRRORED_STORAGE.")
def translateToActualPath(self, index, relPath):
"""
Translates a *index* and *relPath* to an actual path on the local
machine.
This is the inverse of the :meth:`translateToAbstractPath` method.
"""
if index>0 or index>=len(ParallelMirroredStorage):
raise Exception, DbgMsg("VM", "PARALLEL_MIRRORED_STORAGE should have at least "+str(index+1)+"members.")
return os.path.join(ParallelMirroredStorage[index], relPath)
def createLocalStorage(self, subpath):
"""
Creates a local storage directory subtree given by *subpath* under
``PARALLEL_LOCAL_STORAGE``.
If a local storage directory with the same name already exists it is
suffixed by an underscore and a hexadecimal number.
Returns the path to the created local storage directory.
"""
if ParallelLocalStorage is None:
raise Exception, DbgMsg("VM", "PARALLEL_LOCAL_STORAGE is not set.")
# Build storage directory name
taskStorage=os.path.join(ParallelLocalStorage, subpath)
# Change name if it already exists, add numeric suffix (hex).
counter=1
while os.path.lexists(taskStorage):
# Directory exists, try another one (add numeric suffix)
taskStorage=os.path.join(ParallelLocalStorage, subpath+("_%x" % counter))
counter+=1
try:
os.makedirs(taskStorage)
except:
raise Exception, DbgMsg("VM", "Failed to create local storage in '"+taskStorage+"'")
return taskStorage
# Create local storage, mirror, and set working directory.
# Return local storage path. If no mirroring was performed, return None.
def prepareEnvironment(self):
"""
Prepares the working environment (working directory and local storage)
for a spawned function. This method is called by a spawned task. The
mirroring information is received from the spawner (spawner's
*mirrorList*) at function spawn time. Spawned task's virtual machine
object is namely the spawner's virtual machine object sent to the
spawned task.
* If mirroring is not configured with the :meth:`setSpawn` method, the
working directory is the one specified as *startupDir* with an
appropriate path translation applied to it. If it is ``None`` the
working directory is determined by the undelying virtual machine
library (e.g. MPI).
* If mirroring is configured with :meth:`setSpawn` (*mirrorList* is not
``None``), a local storage directory is created by calling
:meth:`createLocalStorage` with *subpath* set to the PID of the
calling process in hexadecimal notation.
Next mirroring is performed by traversing all members of the
processed *mirrorList* dictionary received from the spawner which is
a list of tuples of the form (*index*, *suffix*, *target*) where
*index* and *suffix* specify the source filesystem object to mirror
(see the :meth:`translateToAbstractPath` method for the explanation
of *index* and *suffix*) while *target* is the destination where the
object will be copied.
The source can be specified with globbing characters (anything the
:func:`glob.glob` function can handle is OK).
*target* is the path relative to the local storage directory where
the source will be copied. Renaming of the source is not possible.
*target* always specifies the destination directory.
If source is a directory, symbolic links within it are copied as
symbolic links which means that they should be relative and point to
mirrored filesystem objects in order to remain valid after mirroring.
If *startupDir* was given and it is not ``None`` the working
directory is set to path given by *startupDir* that is relative to
the local storage directory.
If *startupDir* is ``None`` the working directory is set to the
local storage directory.
Returns the path to the local storage directory.
"""
if self.mirrorList is None:
# No mirroring. Change working directory and return.
if self.startupDirSuffix is not None:
if self.startupDirIndex is None:
startupPath=self.startupDirSuffix
else:
startupPath=self.translateToActualPath(self.startupDirIndex, self.startupDirSuffix)
if self.debug:
DbgMsgOut("VM", "Changing working directory to '"+startupPath+"'.")
os.chdir(startupPath)
return None
# Have mirroring.
# Create local storage directory
if self.debug:
DbgMsgOut("VM", "Creating local storage.")
taskStorage=self.createLocalStorage("%x" % os.getpid())
if self.debug:
DbgMsgOut("VM", "Local storage created in '"+taskStorage+"'.")
# Change working directory to local storage
os.chdir(taskStorage)
# Copy local storage subdirs (master must do some preparation first)
# Worker instructions for directory copyiing are in the vm object received from parent
for mirrorDirective in self.mirrorList:
(index, suffix, target)=mirrorDirective
# Translate
sourcePath=self.translateToActualPath(index, suffix)
# Debug
if self.debug:
DbgMsgOut("VM", "Mirroring '"+sourcePath+"' to '"+target+"'.")
sys.stdout.flush()
# Do the globbing
for source in iglob(sourcePath):
if os.path.isdir(source):
# Copying a directory. Destination is the directory where the copy of the tree will be created.
# Renaming is not possible. Copy symlinks as symlinks.
# Get the name of the copied object (copytree wants a destination object name).
(srcPath, srcName)=os.path.split(source)
# Copy tree, create destination directory if it does not exist.
shutil.copytree(source, os.path.join(target, srcName), symlinks=True)
else:
# Copying a file. Destination is the directory where the copy of the file will be created.
# Renaming is not possible. Copy file itself, not symlink
# Get the name of the copied object.
(srcPath, srcName)=os.path.split(source)
# Create destination directory if it does not exist.
if not os.path.exists(target):
os.makedirs(target)
if not os.path.isdir(target):
raise Exceptin, DbgMsg("VM", "Mirroring destination exists, but is not a directory.")
# Copy file.
shutil.copy(source, target)
# See if workDir is given
if self.startupDirSuffix is not None:
# startupDir is relative to local storage directory.
tmpDir=os.path.join(taskStorage, self.startupDirSuffix)
if self.debug:
DbgMsgOut("VM", "Changing working directory to '"+tmpDir+"'.")
os.chdir(tmpDir)
return taskStorage
def cleanupEnvironment(self, taskStorage):
"""
Removes the working environment prepared for a remote task.
*taskStorage* is the path to the local storage directory.
This directory is removed.
If *taskStorage* is ``None`` nothing is removed.
"""
if taskStorage is None:
return
if self.debug:
DbgMsgOut("VM", "Removing '"+taskStorage+"'.")
# Go to ParallelLocalStorage so we are not in our own way.
os.chdir(ParallelLocalStorage)
# Remove task storage folder
shutil.rmtree(taskStorage, True)
def localStorageCleaner(vm=None):
"""
This is the function spawned by the
:meth:`VirtualMachine.clearLocalStorage` method.
The function looks in the directory given by the ``PARALLEL_LOCAL_STORAGE``
environmental variable and removes everything in that directory.
"""
# Traverse storageRoot entries.
entries=os.listdir(ParallelLocalStorage)
# Go to ParallelLocalStorage so we are not in our own way.
os.chdir(ParallelLocalStorage)
# Go through entries.
for entry in entries:
completeEntry=os.path.join(ParallelLocalStorage, entry)
# Ignore errors (delete everythiong we can delete).
try:
if os.path.isdir(completeEntry):
shutil.rmtree(completeEntry, True)
else:
os.remove(completeEntry)
if vm.debug:
DbgMsgOut("VM", "Removing '"+completeEntry+"'.")
except:
if vm.debug:
DbgMsgOut("VM", "Failed to remove '"+completeEntry+"'.")
def getNumberOfCores():
"""
Returns the number of available CPU cores.
Works for Linux, Unix, MacOS, and Windows.
Uses code from Parallel Python (http://www.parallelpython.com).
"""
# Taken from Parallel Python. Thanks.
# For Linux, Unix and MacOS
if hasattr(os, "sysconf"):
if "SC_NPROCESSORS_ONLN" in os.sysconf_names:
# Linux and Unix
ncpus = os.sysconf("SC_NPROCESSORS_ONLN")
if isinstance(ncpus, int) and ncpus > 0:
return ncpus
else:
# MacOS X
return int(os.popen2("sysctl -n hw.ncpu")[1].read())
# For Windows
if "NUMBER_OF_PROCESSORS" in environ:
ncpus = int(environ["NUMBER_OF_PROCESSORS"])
if ncpus > 0:
return ncpus
# Default
return 1
if 'PARALLEL_LOCAL_STORAGE' in environ:
ParallelLocalStorage=environ['PARALLEL_LOCAL_STORAGE']
try:
# Expand user home (~, ~name (unix only))
ParallelLocalStorage=os.path.expanduser(ParallelLocalStorage)
# Normalize path, get canonical path (full path, eliminate symlinks)
ParallelLocalStorage=os.path.realpath(ParallelLocalStorage)
# Create if not there yet
if not os.path.exists(ParallelLocalStorage):
# Not there, create it (along with all missing directories in the path)
os.makedirs(ParallelLocalStorage)
except KeyboardInterrupt:
DbgMsgOut("VM", "Keyboard interrupt.")
raise
except:
DbgMsgOut("VM", "Failed to process local storage dir "+ParallelLocalStorage+".")
raise
else:
ParallelLocalStorage=None
if 'PARALLEL_MIRRORED_STORAGE' in environ:
ParallelMirroredStorage=environ['PARALLEL_MIRRORED_STORAGE']
# Split list in directories
ParallelMirroredStorage=ParallelMirroredStorage.split(':')
# Process list
for i in range(0, len(ParallelMirroredStorage)):
mirrored=ParallelMirroredStorage[i]
try:
# Expand user home (~, ~name (unix only))
mirrored=os.path.expanduser(mirrored)
# Normalize path, get canonical path (full path, eliminate symlinks)
mirrored=os.path.realpath(mirrored)
except KeyboardInterrupt:
print "VM: keyboard interrupt"
raise
except:
print "VM: failed to process mirrored storage dir", mirrored
ParallelMirroredStorage[i]=mirrored
del mirrored
else:
ParallelMirroredStorage=[]
|
from base import BaseParser
from libs.torrent import Torrent
from defusedxml import lxml
from lxml import html
from StringIO import StringIO
import datetime
import requests
class Parser(BaseParser):
"""Parser for HDCity torrent list"""
def __init__(self, config=None, logger=None, name=""):
super(Parser, self).__init__(logger, name)
self.config = config
self.baseUrl = 'https://www.hdcity.li/'
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml',
'Host': 'hdcity.li',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'es-ES,es;q=0.8',
'Referer': 'https://hdcity.li/',
'Cookie': config["cookie"]
}
self.logger = logger
def parse(self):
self.logger.i("Checking HDCity torrents")
url = '{baseUrl}index.php?page=torrents'.format(baseUrl=self.baseUrl)
r = requests.get(url, headers=self.headers)
if not r.status_code == 200:
return []
tree = lxml.parse(StringIO(r.content), parser=html.HTMLParser())
results = []
for index, row in enumerate(tree.xpath('// form[@name="deltorrent"]/tr/td/table/tr')):
# Ignore first row (Headers) and last (Empty row)
if index == 0 or index == 36:
continue
title = row.xpath('./td[2]/a/text()')[0].lstrip()
freeleech = True if row.xpath('./td[2]/img[@title="Gold 100% Free"]') else False
seeders = self.checkOrZero(row.xpath('./td[6]/a/text()'))
leechers = self.checkOrZero(row.xpath('./td[7]/a/text()'))
completed = self.checkOrZero(row.xpath('./td[8]/a/text()'))
link = row.xpath('./td[3]/a/@href')[0]
uploadedAt = row.xpath('./td[5]/text()')
date = datetime.datetime.strptime(
uploadedAt[0].encode('utf-8'), '%d/%m/%Y')
results.append(Torrent(title=title, link=link, freeleech=freeleech, seeders=seeders, leechers=leechers, completed=completed, date=date))
return results
|
import getopt, datetime, os, subprocess, sys
os.chdir('../')
def main(argv):
try:
opts, args = getopt.getopt(argv, "m:", ["message="])
except getopt.GetoptError:
sys.exit(2)
for opt, arg in opts:
if opt in ("-m", "--message"):
message = arg
major_v = 0
minor_v = 2
#read minor minor release number
f = open('prep-release/minor_minor_number.txt', 'r')
ln = f.readlines()
f.close()
minor_minor_v = int(ln[0].strip()) + 1
#write incremented minor minor release number
f = open('prep-release/minor_minor_number.txt', 'w')
f.write(str(minor_minor_v))
f.close()
builddate = datetime.datetime.now().strftime("%d-%b-%Y %H:%M")
#set git tag
gittag = str(major_v) + '.' + str(minor_v) + '.' + str(minor_minor_v)
f = open('setup.py', 'r')
ln = f.readlines()
f.close()
for i in range(len(ln)):
if ln[i].strip().split('=')[0].strip() == "version":
ln[i] = ' version="' + gittag +'",\n'
f = open('setup.py', 'w')
f.writelines(ln)
f.close()
f = open('setup-pyqt4.py', 'r')
ln = f.readlines()
f.close()
for i in range(len(ln)):
if ln[i].strip().split('=')[0].strip() == "version":
ln[i] = ' version="' + gittag +'",\n'
f = open('setup-pyqt4.py', 'w')
f.writelines(ln)
f.close()
f = open('setup-pyside.py', 'r')
ln = f.readlines()
f.close()
for i in range(len(ln)):
if ln[i].strip().split('=')[0].strip() == "version":
ln[i] = ' version="' + gittag +'",\n'
f = open('setup-pyside.py', 'w')
f.writelines(ln)
f.close()
f = open('pysoundanalyser/_version_info.py', 'r')
ln = f.readlines()
f.close()
for i in range(len(ln)):
if ln[i].strip().split('=')[0].strip() == "pysoundanalyser_version":
ln[i] = 'pysoundanalyser_version = "' + gittag +'"\n'
if ln[i].strip().split('=')[0].strip() == "pysoundanalyser_builddate":
ln[i] = 'pysoundanalyser_builddate = "' + builddate +'"\n'
f = open('pysoundanalyser/_version_info.py', 'w')
f.writelines(ln)
f.close()
f = open('pysoundanalyser/doc/conf.py', 'r')
ln = f.readlines()
f.close()
for i in range(len(ln)):
if ln[i].strip().split('=')[0].strip() == "version":
ln[i] = 'version = "' + gittag +'",\n'
if ln[i].strip().split('=')[0].strip() == "release":
ln[i] = 'release = "' + gittag + '",\n'
f = open('pysoundanalyser/doc/conf.py', 'w')
f.writelines(ln)
f.close()
f = open('pysoundanalyser.desktop', 'r')
ln = f.readlines()
f.close()
for i in range(len(ln)):
if ln[i].strip().split('=')[0].strip() == "Version":
ln[i] = 'Version = ' + gittag +',\n'
f = open('pysoundanalyser.desktop', 'w')
f.writelines(ln)
f.close()
subprocess.call('git commit -a -m"' + message+'"', shell=True)
#tag the commit so that it can be easily retrieved
subprocess.call('git tag -a "' + gittag +'"' + ' -m "' + gittag +'"', shell=True)
if __name__ == "__main__":
main(sys.argv[1:])
|
"""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.shortcuts import render, get_object_or_404
from eventex.core.models import Speaker, Talk
def home(request):
"""
View to render the home page.
"""
return render(request, 'index.html')
def speaker_detail(request, slug):
"""
View to render the speaker detail page.
"""
speaker = get_object_or_404(Speaker, slug=slug)
context = {'speaker': speaker}
return render(
request,
'core/speaker_detail.html',
context
)
def talk_list(request):
"""
View to render the talk list page.
"""
context = {
'morning_talks': Talk.objects.at_morning(),
'afternoon_talks': Talk.objects.at_afternoon()
}
return render(request, 'core/talk_list.html', context)
def talk_detail(request, pk):
"""
View to render the talk detail page.
"""
talk = get_object_or_404(Talk, pk=pk)
context = {
'talk': talk,
}
return render(request, 'core/talk_detail.html', context)
|
__author__ = 'davide'
import numpy as np
from numpy.lib.stride_tricks import as_strided
def _sum_sq_diff(input_image, template, valid_mask):
"""This function performs template matching. The metric used is Sum of
Squared Difference (SSD). The input taken is the template who's match is
to be found in image.
Parameters
---------
input_image : array, np.float
Input image of shape (M, N)
template : array, np.float
(window, window) Template who's match is to be found in input_image.
valid_mask : array, np.float
(window, window), governs differences which are to be considered for
SSD computation. Masks out the unknown or unfilled pixels and gives a
higher weightage to the center pixel, decreasing as the distance from
center pixel increases.
Returns
------
ssd : array, np.float
(M - window +1, N - window + 1) The desired SSD values for all
positions in the input_image
"""
total_weight = valid_mask.sum()
window_size = template.shape
y = as_strided(input_image,
shape=((input_image.shape[0] - window_size[0] + 1,
input_image.shape[1] - window_size[1] + 1,) +
window_size),
strides=input_image.strides * 2)
ssd = np.einsum('ijkl, kl, kl->ij', y, template, valid_mask,
dtype=np.float)
# Refer to the comment below for the explanation
ssd *= - 2
ssd += np.einsum('ijkl, ijkl, kl->ij', y, y, valid_mask)
ssd += np.einsum('ij, ij, ij', template, template, valid_mask)
ssd /= total_weight
min_template = np.unravel_index(ssd.argmin(), ssd.shape)
return min_template
if __name__ == "__main__":
img = np.random.random_integers(0, 3, (4, 4))
print(img)
template = np.ones(9).reshape(3, 3)
valid_mask = template.copy()
valid_mask[1, 1] = 10
s = _sum_sq_diff(img, template, valid_mask)
print(s)
|
'''
setup board.h for chibios
'''
import argparse, sys, fnmatch, os, dma_resolver, shlex, pickle, re
import shutil
parser = argparse.ArgumentParser("chibios_pins.py")
parser.add_argument(
'-D', '--outdir', type=str, default=None, help='Output directory')
parser.add_argument(
'--bootloader', action='store_true', default=False, help='configure for bootloader')
parser.add_argument(
'hwdef', type=str, default=None, help='hardware definition file')
args = parser.parse_args()
f4f7_vtypes = ['MODER', 'OTYPER', 'OSPEEDR', 'PUPDR', 'ODR', 'AFRL', 'AFRH']
f1_vtypes = ['CRL', 'CRH', 'ODR']
f1_input_sigs = ['RX', 'MISO', 'CTS']
f1_output_sigs = ['TX', 'MOSI', 'SCK', 'RTS', 'CH1', 'CH2', 'CH3', 'CH4']
af_labels = ['USART', 'UART', 'SPI', 'I2C', 'SDIO', 'SDMMC', 'OTG', 'JT', 'TIM', 'CAN']
vtypes = []
pincount = {
'A': 16,
'B': 16,
'C': 16,
'D': 16,
'E': 16,
'F': 16,
'G': 16,
'H': 2,
'I': 0,
'J': 0,
'K': 0
}
ports = pincount.keys()
portmap = {}
config = {}
allpins = []
bytype = {}
bylabel = {}
spidev = []
romfs = {}
spi_list = []
alllines = []
env_vars = {}
build_flags = []
imu_list = []
compass_list = []
baro_list = []
mcu_type = None
dual_USB_enabled = False
def is_int(str):
'''check if a string is an integer'''
try:
int(str)
except Exception:
return False
return True
def error(str):
'''show an error and exit'''
print("Error: " + str)
sys.exit(1)
def get_mcu_lib(mcu):
'''get library file for the chosen MCU'''
import importlib
try:
return importlib.import_module(mcu)
except ImportError:
error("Unable to find module for MCU %s" % mcu)
def setup_mcu_type_defaults():
'''setup defaults for given mcu type'''
global pincount, ports, portmap, vtypes
lib = get_mcu_lib(mcu_type)
if hasattr(lib, 'pincount'):
pincount = lib.pincount
if mcu_series.startswith("STM32F1"):
vtypes = f1_vtypes
else:
vtypes = f4f7_vtypes
ports = pincount.keys()
# setup default as input pins
for port in ports:
portmap[port] = []
for pin in range(pincount[port]):
portmap[port].append(generic_pin(port, pin, None, 'INPUT', []))
def get_alt_function(mcu, pin, function):
'''return alternative function number for a pin'''
lib = get_mcu_lib(mcu)
if function.endswith('_TXINV') or function.endswith('_RXINV'):
# RXINV and TXINV are special labels for inversion pins, not alt-functions
return None
if hasattr(lib, "AltFunction_map"):
alt_map = lib.AltFunction_map
else:
# just check if Alt Func is available or not
for l in af_labels:
if function.startswith(l):
return 0
return None
if function and function.endswith("_RTS") and (
function.startswith('USART') or function.startswith('UART')):
# we do software RTS
return None
for l in af_labels:
if function.startswith(l):
s = pin + ":" + function
if not s in alt_map:
error("Unknown pin function %s for MCU %s" % (s, mcu))
return alt_map[s]
return None
def have_type_prefix(ptype):
'''return True if we have a peripheral starting with the given peripheral type'''
for t in bytype.keys():
if t.startswith(ptype):
return True
return False
def get_ADC1_chan(mcu, pin):
'''return ADC1 channel for an analog pin'''
import importlib
try:
lib = importlib.import_module(mcu)
ADC1_map = lib.ADC1_map
except ImportError:
error("Unable to find ADC1_Map for MCU %s" % mcu)
if not pin in ADC1_map:
error("Unable to find ADC1 channel for pin %s" % pin)
return ADC1_map[pin]
class generic_pin(object):
'''class to hold pin definition'''
def __init__(self, port, pin, label, type, extra):
global mcu_series
self.portpin = "P%s%u" % (port, pin)
self.port = port
self.pin = pin
self.label = label
self.type = type
self.extra = extra
self.af = None
if type == 'OUTPUT':
self.sig_dir = 'OUTPUT'
else:
self.sig_dir = 'INPUT'
if mcu_series.startswith("STM32F1") and self.label is not None:
self.f1_pin_setup()
# check that labels and pin types are consistent
for prefix in ['USART', 'UART', 'TIM']:
if label is None or type is None:
continue
if type.startswith(prefix):
a1 = label.split('_')
a2 = type.split('_')
if a1[0] != a2[0]:
error("Peripheral prefix mismatch for %s %s %s" % (self.portpin, label, type))
def f1_pin_setup(self):
for l in af_labels:
if self.label.startswith(l):
if self.label.endswith(tuple(f1_input_sigs)):
self.sig_dir = 'INPUT'
self.extra.append('FLOATING')
elif self.label.endswith(tuple(f1_output_sigs)):
self.sig_dir = 'OUTPUT'
elif l == 'I2C':
self.sig_dir = 'OUTPUT'
elif l == 'OTG':
self.sig_dir = 'OUTPUT'
else:
error("Unknown signal type %s:%s for %s!" % (self.portpin, self.label, mcu_type))
def has_extra(self, v):
'''return true if we have the given extra token'''
return v in self.extra
def extra_prefix(self, prefix):
'''find an extra token starting with the given prefix'''
for e in self.extra:
if e.startswith(prefix):
return e
return None
def extra_value(self, name, type=None, default=None):
'''find an extra value of given type'''
v = self.extra_prefix(name)
if v is None:
return default
if v[len(name)] != '(' or v[-1] != ')':
error("Badly formed value for %s: %s\n" % (name, v))
ret = v[len(name) + 1:-1]
if type is not None:
try:
ret = type(ret)
except Exception:
error("Badly formed value for %s: %s\n" % (name, ret))
return ret
def is_RTS(self):
'''return true if this is a RTS pin'''
if self.label and self.label.endswith("_RTS") and (
self.type.startswith('USART') or self.type.startswith('UART')):
return True
return False
def is_CS(self):
'''return true if this is a CS pin'''
return self.has_extra("CS") or self.type == "CS"
def get_MODER(self):
'''return one of ALTERNATE, OUTPUT, ANALOG, INPUT'''
if self.af is not None:
v = "ALTERNATE"
elif self.type == 'OUTPUT':
v = "OUTPUT"
elif self.type.startswith('ADC'):
v = "ANALOG"
elif self.is_CS():
v = "OUTPUT"
elif self.is_RTS():
v = "OUTPUT"
else:
v = "INPUT"
return "PIN_MODE_%s(%uU)" % (v, self.pin)
def get_OTYPER(self):
'''return one of PUSHPULL, OPENDRAIN'''
v = 'PUSHPULL'
if self.type.startswith('I2C'):
# default I2C to OPENDRAIN
v = 'OPENDRAIN'
values = ['PUSHPULL', 'OPENDRAIN']
for e in self.extra:
if e in values:
v = e
return "PIN_OTYPE_%s(%uU)" % (v, self.pin)
def get_OSPEEDR(self):
'''return one of SPEED_VERYLOW, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH'''
# on STM32F4 these speeds correspond to 2MHz, 25MHz, 50MHz and 100MHz
values = ['SPEED_VERYLOW', 'SPEED_LOW', 'SPEED_MEDIUM', 'SPEED_HIGH']
v = 'SPEED_MEDIUM'
for e in self.extra:
if e in values:
v = e
return "PIN_O%s(%uU)" % (v, self.pin)
def get_PUPDR(self):
'''return one of FLOATING, PULLUP, PULLDOWN'''
values = ['FLOATING', 'PULLUP', 'PULLDOWN']
v = 'FLOATING'
if self.is_CS():
v = "PULLUP"
# generate pullups for UARTs
if (self.type.startswith('USART') or
self.type.startswith('UART')) and (
(self.label.endswith('_TX') or
self.label.endswith('_RX') or
self.label.endswith('_CTS') or
self.label.endswith('_RTS'))):
v = "PULLUP"
# generate pullups for SDIO and SDMMC
if (self.type.startswith('SDIO') or
self.type.startswith('SDMMC')) and (
(self.label.endswith('_D0') or
self.label.endswith('_D1') or
self.label.endswith('_D2') or
self.label.endswith('_D3') or
self.label.endswith('_CMD'))):
v = "PULLUP"
for e in self.extra:
if e in values:
v = e
return "PIN_PUPDR_%s(%uU)" % (v, self.pin)
def get_ODR_F1(self):
'''return one of LOW, HIGH'''
values = ['LOW', 'HIGH']
v = 'HIGH'
if self.type == 'OUTPUT':
v = 'LOW'
elif self.label is not None and self.label.startswith('I2C'):
v = 'LOW'
for e in self.extra:
if e in values:
v = e
#for some controllers input pull up down is selected by ODR
if self.type == "INPUT":
v = 'LOW'
if 'PULLUP' in self.extra:
v = "HIGH"
return "PIN_ODR_%s(%uU)" % (v, self.pin)
def get_ODR(self):
'''return one of LOW, HIGH'''
if mcu_series.startswith("STM32F1"):
return self.get_ODR_F1()
values = ['LOW', 'HIGH']
v = 'HIGH'
for e in self.extra:
if e in values:
v = e
return "PIN_ODR_%s(%uU)" % (v, self.pin)
def get_AFIO(self):
'''return AFIO'''
af = self.af
if af is None:
af = 0
return "PIN_AFIO_AF(%uU, %uU)" % (self.pin, af)
def get_AFRL(self):
'''return AFIO low 8'''
if self.pin >= 8:
return None
return self.get_AFIO()
def get_AFRH(self):
'''return AFIO high 8'''
if self.pin < 8:
return None
return self.get_AFIO()
def get_CR_F1(self):
'''return CR FLAGS for STM32F1xx'''
#Check Speed
if self.sig_dir != "INPUT" or self.af is not None:
speed_values = ['SPEED_LOW', 'SPEED_MEDIUM', 'SPEED_HIGH']
v = 'SPEED_MEDIUM'
for e in self.extra:
if e in speed_values:
v = e
speed_str = "PIN_%s(%uU) |" % (v, self.pin)
elif self.is_CS():
speed_str = "PIN_SPEED_LOW(%uU) |" % (self.pin)
else:
speed_str = ""
if self.af is not None:
if self.label.endswith('_RX'):
# uart RX is configured as a input, and can be pullup, pulldown or float
if 'PULLUP' in self.extra or 'PULLDOWN' in self.extra:
v = 'PUD'
else:
v = "NOPULL"
elif self.label.startswith('I2C'):
v = "AF_OD"
else:
v = "AF_PP"
elif self.is_CS():
v = "OUTPUT_PP"
elif self.sig_dir == 'OUTPUT':
if 'OPENDRAIN' in self.extra:
v = 'OUTPUT_OD'
else:
v = "OUTPUT_PP"
elif self.type.startswith('ADC'):
v = "ANALOG"
else:
v = "PUD"
if 'FLOATING' in self.extra:
v = "NOPULL"
mode_str = "PIN_MODE_%s(%uU)" % (v, self.pin)
return "%s %s" % (speed_str, mode_str)
def get_CR(self):
'''return CR FLAGS'''
if mcu_series.startswith("STM32F1"):
return self.get_CR_F1()
if self.sig_dir != "INPUT":
speed_values = ['SPEED_LOW', 'SPEED_MEDIUM', 'SPEED_HIGH']
v = 'SPEED_MEDIUM'
for e in self.extra:
if e in speed_values:
v = e
speed_str = "PIN_%s(%uU) |" % (v, self.pin)
else:
speed_str = ""
#Check Alternate function
if self.type.startswith('I2C'):
v = "AF_OD"
elif self.sig_dir == 'OUTPUT':
if self.af is not None:
v = "AF_PP"
else:
v = "OUTPUT_PP"
elif self.type.startswith('ADC'):
v = "ANALOG"
elif self.is_CS():
v = "OUTPUT_PP"
elif self.is_RTS():
v = "OUTPUT_PP"
else:
v = "PUD"
if 'FLOATING' in self.extra:
v = "NOPULL"
mode_str = "PIN_MODE_%s(%uU)" % (v, self.pin)
return "%s %s" % (speed_str, mode_str)
def get_CRH(self):
if self.pin < 8:
return None
return self.get_CR()
def get_CRL(self):
if self.pin >= 8:
return None
return self.get_CR()
def __str__(self):
str = ''
if self.af is not None:
str += " AF%u" % self.af
if self.type.startswith('ADC1'):
str += " ADC1_IN%u" % get_ADC1_chan(mcu_type, self.portpin)
if self.extra_value('PWM', type=int):
str += " PWM%u" % self.extra_value('PWM', type=int)
return "P%s%u %s %s%s" % (self.port, self.pin, self.label, self.type,
str)
def get_config(name, column=0, required=True, default=None, type=None, spaces=False):
'''get a value from config dictionary'''
if not name in config:
if required and default is None:
error("missing required value %s in hwdef.dat" % name)
return default
if len(config[name]) < column + 1:
if not required:
return None
error("missing required value %s in hwdef.dat (column %u)" % (name,
column))
if spaces:
ret = ' '.join(config[name][column:])
else:
ret = config[name][column]
if type is not None:
if type == int and ret.startswith('0x'):
try:
ret = int(ret,16)
except Exception:
error("Badly formed config value %s (got %s)" % (name, ret))
else:
try:
ret = type(ret)
except Exception:
error("Badly formed config value %s (got %s)" % (name, ret))
return ret
def get_mcu_config(name, required=False):
'''get a value from the mcu dictionary'''
lib = get_mcu_lib(mcu_type)
if not hasattr(lib, 'mcu'):
error("Missing mcu config for %s" % mcu_type)
if not name in lib.mcu:
if required:
error("Missing required mcu config %s for %s" % (name, mcu_type))
return None
return lib.mcu[name]
def enable_can(f):
'''setup for a CAN enabled board'''
f.write('#define HAL_WITH_UAVCAN 1\n')
env_vars['HAL_WITH_UAVCAN'] = '1'
def has_sdcard_spi():
'''check for sdcard connected to spi bus'''
for dev in spidev:
if(dev[0] == 'sdcard'):
return True
return False
def write_mcu_config(f):
'''write MCU config defines'''
f.write('// MCU type (ChibiOS define)\n')
f.write('#define %s_MCUCONF\n' % get_config('MCU'))
mcu_subtype = get_config('MCU', 1)
if mcu_subtype.endswith('xx'):
f.write('#define %s_MCUCONF\n\n' % mcu_subtype[:-2])
f.write('#define %s\n\n' % mcu_subtype)
f.write('// crystal frequency\n')
f.write('#define STM32_HSECLK %sU\n\n' % get_config('OSCILLATOR_HZ'))
f.write('// UART used for stdout (printf)\n')
if get_config('STDOUT_SERIAL', required=False):
f.write('#define HAL_STDOUT_SERIAL %s\n\n' % get_config('STDOUT_SERIAL'))
f.write('// baudrate used for stdout (printf)\n')
f.write('#define HAL_STDOUT_BAUDRATE %u\n\n' % get_config('STDOUT_BAUDRATE', type=int))
if have_type_prefix('SDIO'):
f.write('// SDIO available, enable POSIX filesystem support\n')
f.write('#define USE_POSIX\n\n')
f.write('#define HAL_USE_SDC TRUE\n')
build_flags.append('USE_FATFS=yes')
elif have_type_prefix('SDMMC'):
f.write('// SDMMC available, enable POSIX filesystem support\n')
f.write('#define USE_POSIX\n\n')
f.write('#define HAL_USE_SDC TRUE\n')
f.write('#define STM32_SDC_USE_SDMMC1 TRUE\n')
build_flags.append('USE_FATFS=yes')
elif has_sdcard_spi():
f.write('// MMC via SPI available, enable POSIX filesystem support\n')
f.write('#define USE_POSIX\n\n')
f.write('#define HAL_USE_MMC_SPI TRUE\n')
f.write('#define HAL_USE_SDC FALSE\n')
f.write('#define HAL_SDCARD_SPI_HOOK TRUE\n')
build_flags.append('USE_FATFS=yes')
else:
f.write('#define HAL_USE_SDC FALSE\n')
build_flags.append('USE_FATFS=no')
env_vars['DISABLE_SCRIPTING'] = True
if 'OTG1' in bytype:
f.write('#define STM32_USB_USE_OTG1 TRUE\n')
f.write('#define HAL_USE_USB TRUE\n')
f.write('#define HAL_USE_SERIAL_USB TRUE\n')
if 'OTG2' in bytype:
f.write('#define STM32_USB_USE_OTG2 TRUE\n')
if have_type_prefix('CAN') and not 'AP_PERIPH' in env_vars:
enable_can(f)
if get_config('PROCESS_STACK', required=False):
env_vars['PROCESS_STACK'] = get_config('PROCESS_STACK')
else:
env_vars['PROCESS_STACK'] = "0x2000"
if get_config('MAIN_STACK', required=False):
env_vars['MAIN_STACK'] = get_config('MAIN_STACK')
else:
env_vars['MAIN_STACK'] = "0x400"
if get_config('IOMCU_FW', required=False):
env_vars['IOMCU_FW'] = get_config('IOMCU_FW')
else:
env_vars['IOMCU_FW'] = 0
if get_config('PERIPH_FW', required=False):
env_vars['PERIPH_FW'] = get_config('PERIPH_FW')
else:
env_vars['PERIPH_FW'] = 0
# write any custom STM32 defines
for d in alllines:
if d.startswith('STM32_'):
f.write('#define %s\n' % d)
if d.startswith('define '):
f.write('#define %s\n' % d[7:])
flash_size = get_config('FLASH_SIZE_KB', type=int)
f.write('#define BOARD_FLASH_SIZE %u\n' % flash_size)
env_vars['BOARD_FLASH_SIZE'] = flash_size
f.write('#define CRT1_AREAS_NUMBER 1\n')
flash_reserve_start = get_config(
'FLASH_RESERVE_START_KB', default=16, type=int)
f.write('\n// location of loaded firmware\n')
f.write('#define FLASH_LOAD_ADDRESS 0x%08x\n' % (0x08000000 + flash_reserve_start*1024))
if args.bootloader:
f.write('#define FLASH_BOOTLOADER_LOAD_KB %u\n' % get_config('FLASH_BOOTLOADER_LOAD_KB', type=int))
f.write('\n')
ram_map = get_mcu_config('RAM_MAP', True)
f.write('// memory regions\n')
regions = []
total_memory = 0
for (address, size, flags) in ram_map:
regions.append('{(void*)0x%08x, 0x%08x, 0x%02x }' % (address, size*1024, flags))
total_memory += size
f.write('#define HAL_MEMORY_REGIONS %s\n' % ', '.join(regions))
f.write('#define HAL_MEMORY_TOTAL_KB %u\n' % total_memory)
f.write('#define HAL_RAM0_START 0x%08x\n' % ram_map[0][0])
ram_reserve_start = get_config('RAM_RESERVE_START', default=0, type=int)
if ram_reserve_start > 0:
f.write('#define HAL_RAM_RESERVE_START 0x%08x\n' % ram_reserve_start)
f.write('\n// CPU serial number (12 bytes)\n')
f.write('#define UDID_START 0x%08x\n\n' % get_mcu_config('UDID_START', True))
f.write('\n// APJ board ID (for bootloaders)\n')
f.write('#define APJ_BOARD_ID %s\n' % get_config('APJ_BOARD_ID'))
lib = get_mcu_lib(mcu_type)
build_info = lib.build
if mcu_series.startswith("STM32F1"):
cortex = "cortex-m3"
env_vars['CPU_FLAGS'] = ["-mcpu=%s" % cortex]
build_info['MCU'] = cortex
else:
cortex = "cortex-m4"
env_vars['CPU_FLAGS'] = [ "-mcpu=%s" % cortex, "-mfpu=fpv4-sp-d16", "-mfloat-abi=hard"]
build_info['MCU'] = cortex
if not args.bootloader:
env_vars['CPU_FLAGS'].append('-u_printf_float')
build_info['ENV_UDEFS'] = "-DCHPRINTF_USE_FLOAT=1"
# setup build variables
for v in build_info.keys():
build_flags.append('%s=%s' % (v, build_info[v]))
# setup for bootloader build
if args.bootloader:
f.write('''
''')
if env_vars.get('ROMFS_UNCOMPRESSED', False):
f.write('#define HAL_ROMFS_UNCOMPRESSED\n')
def write_ldscript(fname):
'''write ldscript.ld for this board'''
flash_size = get_config('FLASH_USE_MAX_KB', type=int, default=0)
if flash_size == 0:
flash_size = get_config('FLASH_SIZE_KB', type=int)
# space to reserve for bootloader and storage at start of flash
flash_reserve_start = get_config(
'FLASH_RESERVE_START_KB', default=16, type=int)
env_vars['FLASH_RESERVE_START_KB'] = str(flash_reserve_start)
# space to reserve for storage at end of flash
flash_reserve_end = get_config('FLASH_RESERVE_END_KB', default=0, type=int)
# ram layout
ram_map = get_mcu_config('RAM_MAP', True)
flash_base = 0x08000000 + flash_reserve_start * 1024
if not args.bootloader:
flash_length = flash_size - (flash_reserve_start + flash_reserve_end)
else:
flash_length = get_config('FLASH_BOOTLOADER_LOAD_KB', type=int)
print("Generating ldscript.ld")
f = open(fname, 'w')
ram0_start = ram_map[0][0]
ram0_len = ram_map[0][1] * 1024
# possibly reserve some memory for app/bootloader comms
ram_reserve_start = get_config('RAM_RESERVE_START', default=0, type=int)
ram0_start += ram_reserve_start
ram0_len -= ram_reserve_start
f.write('''/* generated ldscript.ld */
MEMORY
{
flash : org = 0x%08x, len = %uK
ram0 : org = 0x%08x, len = %u
}
INCLUDE common.ld
''' % (flash_base, flash_length, ram0_start, ram0_len))
def copy_common_linkerscript(outdir, hwdef):
dirpath = os.path.dirname(hwdef)
shutil.copy(os.path.join(dirpath, "../common/common.ld"),
os.path.join(outdir, "common.ld"))
def get_USB_IDs():
'''return tuple of USB VID/PID'''
global dual_USB_enabled
if dual_USB_enabled:
# use pidcodes allocated ID
default_vid = 0x1209
default_pid = 0x5740
else:
default_vid = 0x0483
default_pid = 0x5740
return (get_config('USB_VENDOR', type=int, default=default_vid), get_config('USB_PRODUCT', type=int, default=default_pid))
def write_USB_config(f):
'''write USB config defines'''
if not have_type_prefix('OTG'):
return
f.write('// USB configuration\n')
(USB_VID, USB_PID) = get_USB_IDs()
f.write('#define HAL_USB_VENDOR_ID 0x%04x\n' % int(USB_VID))
f.write('#define HAL_USB_PRODUCT_ID 0x%04x\n' % int(USB_PID))
f.write('#define HAL_USB_STRING_MANUFACTURER "%s"\n' % get_config("USB_STRING_MANUFACTURER", default="ArduPilot"))
default_product = "%BOARD%"
if args.bootloader:
default_product += "-BL"
f.write('#define HAL_USB_STRING_PRODUCT "%s"\n' % get_config("USB_STRING_PRODUCT", default=default_product))
f.write('#define HAL_USB_STRING_SERIAL "%s"\n' % get_config("USB_STRING_SERIAL", default="%SERIAL%"))
f.write('\n\n')
def write_SPI_table(f):
'''write SPI device table'''
f.write('\n// SPI device table\n')
devlist = []
for dev in spidev:
if len(dev) != 7:
print("Badly formed SPIDEV line %s" % dev)
name = '"' + dev[0] + '"'
bus = dev[1]
devid = dev[2]
cs = dev[3]
mode = dev[4]
lowspeed = dev[5]
highspeed = dev[6]
if not bus.startswith('SPI') or not bus in spi_list:
error("Bad SPI bus in SPIDEV line %s" % dev)
if not devid.startswith('DEVID') or not is_int(devid[5:]):
error("Bad DEVID in SPIDEV line %s" % dev)
if not cs in bylabel or not bylabel[cs].is_CS():
error("Bad CS pin in SPIDEV line %s" % dev)
if not mode in ['MODE0', 'MODE1', 'MODE2', 'MODE3']:
error("Bad MODE in SPIDEV line %s" % dev)
if not lowspeed.endswith('*MHZ') and not lowspeed.endswith('*KHZ'):
error("Bad lowspeed value %s in SPIDEV line %s" % (lowspeed, dev))
if not highspeed.endswith('*MHZ') and not highspeed.endswith('*KHZ'):
error("Bad highspeed value %s in SPIDEV line %s" % (highspeed,
dev))
cs_pin = bylabel[cs]
pal_line = 'PAL_LINE(GPIO%s,%uU)' % (cs_pin.port, cs_pin.pin)
devidx = len(devlist)
f.write(
'#define HAL_SPI_DEVICE%-2u SPIDesc(%-17s, %2u, %2u, %-19s, SPIDEV_%s, %7s, %7s)\n'
% (devidx, name, spi_list.index(bus), int(devid[5:]), pal_line,
mode, lowspeed, highspeed))
devlist.append('HAL_SPI_DEVICE%u' % devidx)
f.write('#define HAL_SPI_DEVICE_LIST %s\n\n' % ','.join(devlist))
def write_SPI_config(f):
'''write SPI config defines'''
global spi_list
for t in bytype.keys():
if t.startswith('SPI'):
spi_list.append(t)
spi_list = sorted(spi_list)
if len(spi_list) == 0:
f.write('#define HAL_USE_SPI FALSE\n')
return
devlist = []
for dev in spi_list:
n = int(dev[3:])
devlist.append('HAL_SPI%u_CONFIG' % n)
f.write(
'#define HAL_SPI%u_CONFIG { &SPID%u, %u, STM32_SPI_SPI%u_DMA_STREAMS }\n'
% (n, n, n, n))
f.write('#define HAL_SPI_BUS_LIST %s\n\n' % ','.join(devlist))
write_SPI_table(f)
def parse_spi_device(dev):
'''parse a SPI:xxx device item'''
a = dev.split(':')
if len(a) != 2:
error("Bad SPI device: %s" % dev)
return 'hal.spi->get_device("%s")' % a[1]
def parse_i2c_device(dev):
'''parse a I2C:xxx:xxx device item'''
a = dev.split(':')
if len(a) != 3:
error("Bad I2C device: %s" % dev)
busaddr = int(a[2],base=0)
if a[1] == 'ALL_EXTERNAL':
return ('FOREACH_I2C_EXTERNAL(b)', 'GET_I2C_DEVICE(b,0x%02x)' % (busaddr))
elif a[1] == 'ALL_INTERNAL':
return ('FOREACH_I2C_INTERNAL(b)', 'GET_I2C_DEVICE(b,0x%02x)' % (busaddr))
elif a[1] == 'ALL':
return ('FOREACH_I2C(b)', 'GET_I2C_DEVICE(b,0x%02x)' % (busaddr))
busnum = int(a[1])
return ('', 'GET_I2C_DEVICE(%u,0x%02x)' % (busnum, busaddr))
def seen_str(dev):
'''return string representation of device for checking for duplicates'''
return str(dev[:2])
def write_IMU_config(f):
'''write IMU config defines'''
global imu_list
devlist = []
wrapper = ''
seen = set()
for dev in imu_list:
if seen_str(dev) in seen:
error("Duplicate IMU: %s" % seen_str(dev))
seen.add(seen_str(dev))
driver = dev[0]
for i in range(1,len(dev)):
if dev[i].startswith("SPI:"):
dev[i] = parse_spi_device(dev[i])
elif dev[i].startswith("I2C:"):
(wrapper, dev[i]) = parse_i2c_device(dev[i])
n = len(devlist)+1
devlist.append('HAL_INS_PROBE%u' % n)
f.write(
'#define HAL_INS_PROBE%u %s ADD_BACKEND(AP_InertialSensor_%s::probe(*this,%s))\n'
% (n, wrapper, driver, ','.join(dev[1:])))
if len(devlist) > 0:
f.write('#define HAL_INS_PROBE_LIST %s\n\n' % ';'.join(devlist))
def write_MAG_config(f):
'''write MAG config defines'''
global compass_list
devlist = []
seen = set()
for dev in compass_list:
if seen_str(dev) in seen:
error("Duplicate MAG: %s" % seen_str(dev))
seen.add(seen_str(dev))
driver = dev[0]
probe = 'probe'
wrapper = ''
a = driver.split(':')
driver = a[0]
if len(a) > 1 and a[1].startswith('probe'):
probe = a[1]
for i in range(1,len(dev)):
if dev[i].startswith("SPI:"):
dev[i] = parse_spi_device(dev[i])
elif dev[i].startswith("I2C:"):
(wrapper, dev[i]) = parse_i2c_device(dev[i])
n = len(devlist)+1
devlist.append('HAL_MAG_PROBE%u' % n)
f.write(
'#define HAL_MAG_PROBE%u %s ADD_BACKEND(DRIVER_%s, AP_Compass_%s::%s(%s))\n'
% (n, wrapper, driver, driver, probe, ','.join(dev[1:])))
if len(devlist) > 0:
f.write('#define HAL_MAG_PROBE_LIST %s\n\n' % ';'.join(devlist))
def write_BARO_config(f):
'''write barometer config defines'''
global baro_list
devlist = []
seen = set()
for dev in baro_list:
if seen_str(dev) in seen:
error("Duplicate BARO: %s" % seen_str(dev))
seen.add(seen_str(dev))
driver = dev[0]
probe = 'probe'
wrapper = ''
a = driver.split(':')
driver = a[0]
if len(a) > 1 and a[1].startswith('probe'):
probe = a[1]
for i in range(1,len(dev)):
if dev[i].startswith("SPI:"):
dev[i] = parse_spi_device(dev[i])
elif dev[i].startswith("I2C:"):
(wrapper, dev[i]) = parse_i2c_device(dev[i])
if dev[i].startswith('hal.i2c_mgr'):
dev[i] = 'std::move(%s)' % dev[i]
n = len(devlist)+1
devlist.append('HAL_BARO_PROBE%u' % n)
f.write(
'#define HAL_BARO_PROBE%u %s ADD_BACKEND(AP_Baro_%s::%s(*this,%s))\n'
% (n, wrapper, driver, probe, ','.join(dev[1:])))
if len(devlist) > 0:
f.write('#define HAL_BARO_PROBE_LIST %s\n\n' % ';'.join(devlist))
def get_gpio_bylabel(label):
'''get GPIO(n) setting on a pin label, or -1'''
p = bylabel.get(label)
if p is None:
return -1
return p.extra_value('GPIO', type=int, default=-1)
def get_extra_bylabel(label, name, default=None):
'''get extra setting for a label by name'''
p = bylabel.get(label)
if p is None:
return default
return p.extra_value(name, type=str, default=default)
def write_UART_config(f):
'''write UART config defines'''
global dual_USB_enabled
if get_config('UART_ORDER', required=False) is None:
return
uart_list = config['UART_ORDER']
f.write('\n// UART configuration\n')
# write out driver declarations for HAL_ChibOS_Class.cpp
devnames = "ABCDEFGH"
sdev = 0
idx = 0
num_empty_uarts = 0
for dev in uart_list:
if dev == 'EMPTY':
f.write('#define HAL_UART%s_DRIVER Empty::UARTDriver uart%sDriver\n' %
(devnames[idx], devnames[idx]))
num_empty_uarts += 1
else:
f.write(
'#define HAL_UART%s_DRIVER ChibiOS::UARTDriver uart%sDriver(%u)\n'
% (devnames[idx], devnames[idx], sdev))
sdev += 1
idx += 1
for idx in range(len(uart_list), len(devnames)):
f.write('#define HAL_UART%s_DRIVER Empty::UARTDriver uart%sDriver\n' %
(devnames[idx], devnames[idx]))
if 'IOMCU_UART' in config:
f.write('#define HAL_WITH_IO_MCU 1\n')
idx = len(uart_list)
f.write('#define HAL_UART_IOMCU_IDX %u\n' % idx)
f.write(
'#define HAL_UART_IO_DRIVER ChibiOS::UARTDriver uart_io(HAL_UART_IOMCU_IDX)\n'
)
uart_list.append(config['IOMCU_UART'][0])
f.write('#define HAL_HAVE_SERVO_VOLTAGE 1\n') # make the assumption that IO gurantees servo monitoring
# all IOMCU capable boards have SBUS out
f.write('#define AP_FEATURE_SBUS_OUT 1\n')
else:
f.write('#define HAL_WITH_IO_MCU 0\n')
f.write('\n')
need_uart_driver = False
OTG2_index = None
devlist = []
have_rts_cts = False
for dev in uart_list:
if dev.startswith('UART'):
n = int(dev[4:])
elif dev.startswith('USART'):
n = int(dev[5:])
elif dev.startswith('OTG'):
n = int(dev[3:])
elif dev.startswith('EMPTY'):
continue
else:
error("Invalid element %s in UART_ORDER" % dev)
devlist.append('HAL_%s_CONFIG' % dev)
if dev + "_RTS" in bylabel:
p = bylabel[dev + '_RTS']
rts_line = 'PAL_LINE(GPIO%s,%uU)' % (p.port, p.pin)
have_rts_cts = True
else:
rts_line = "0"
if dev.startswith('OTG2'):
f.write(
'#define HAL_%s_CONFIG {(BaseSequentialStream*) &SDU2, true, false, 0, 0, false, 0, 0}\n'
% dev)
OTG2_index = uart_list.index(dev)
dual_USB_enabled = True
elif dev.startswith('OTG'):
f.write(
'#define HAL_%s_CONFIG {(BaseSequentialStream*) &SDU1, true, false, 0, 0, false, 0, 0}\n'
% dev)
else:
need_uart_driver = True
f.write(
"#define HAL_%s_CONFIG { (BaseSequentialStream*) &SD%u, false, "
% (dev, n))
if mcu_series.startswith("STM32F1"):
f.write("%s, " % rts_line)
else:
f.write("STM32_%s_RX_DMA_CONFIG, STM32_%s_TX_DMA_CONFIG, %s, " %
(dev, dev, rts_line))
# add inversion pins, if any
f.write("%d, " % get_gpio_bylabel(dev + "_RXINV"))
f.write("%s, " % get_extra_bylabel(dev + "_RXINV", "POL", "0"))
f.write("%d, " % get_gpio_bylabel(dev + "_TXINV"))
f.write("%s}\n" % get_extra_bylabel(dev + "_TXINV", "POL", "0"))
if have_rts_cts:
f.write('#define AP_FEATURE_RTSCTS 1\n')
if OTG2_index is not None:
f.write('#define HAL_OTG2_UART_INDEX %d\n' % OTG2_index)
f.write('''
''' % (OTG2_index, OTG2_index))
f.write('#define HAL_HAVE_DUAL_USB_CDC 1\n')
f.write('#define HAL_UART_DEVICE_LIST %s\n\n' % ','.join(devlist))
if not need_uart_driver and not args.bootloader:
f.write('''
''')
num_uarts = len(devlist)
if 'IOMCU_UART' in config:
num_uarts -= 1
f.write('#define HAL_UART_NUM_SERIAL_PORTS %u\n' % (num_uarts+num_empty_uarts))
def write_UART_config_bootloader(f):
'''write UART config defines'''
if get_config('UART_ORDER', required=False) is None:
return
uart_list = config['UART_ORDER']
f.write('\n// UART configuration\n')
devlist = []
have_uart = False
OTG2_index = None
for u in uart_list:
if u.startswith('OTG2'):
devlist.append('(BaseChannel *)&SDU2')
OTG2_index = uart_list.index(u)
elif u.startswith('OTG'):
devlist.append('(BaseChannel *)&SDU1')
else:
unum = int(u[-1])
devlist.append('(BaseChannel *)&SD%u' % unum)
have_uart = True
f.write('#define BOOTLOADER_DEV_LIST %s\n' % ','.join(devlist))
if OTG2_index is not None:
f.write('#define HAL_OTG2_UART_INDEX %d\n' % OTG2_index)
if not have_uart:
f.write('''
''')
def write_I2C_config(f):
'''write I2C config defines'''
if not have_type_prefix('I2C'):
print("No I2C peripherals")
f.write('''
''')
return
if not 'I2C_ORDER' in config:
print("Missing I2C_ORDER config")
return
i2c_list = config['I2C_ORDER']
f.write('// I2C configuration\n')
if len(i2c_list) == 0:
error("I2C_ORDER invalid")
devlist = []
# write out config structures
for dev in i2c_list:
if not dev.startswith('I2C') or dev[3] not in "1234":
error("Bad I2C_ORDER element %s" % dev)
n = int(dev[3:])
devlist.append('HAL_I2C%u_CONFIG' % n)
f.write('''
'''
% (n, n, n, n, n, n, n, n, n, n, n, n))
f.write('\n#define HAL_I2C_DEVICE_LIST %s\n\n' % ','.join(devlist))
def parse_timer(str):
'''parse timer channel string, i.e TIM8_CH2N'''
result = re.match(r'TIM([0-9]*)_CH([1234])(N?)', str)
if result:
tim = int(result.group(1))
chan = int(result.group(2))
compl = result.group(3) == 'N'
if tim < 1 or tim > 17:
error("Bad timer number %s in %s" % (tim, str))
return (tim, chan, compl)
else:
error("Bad timer definition %s" % str)
def write_PWM_config(f):
'''write PWM config defines'''
rc_in = None
rc_in_int = None
alarm = None
pwm_out = []
pwm_timers = []
for l in bylabel.keys():
p = bylabel[l]
if p.type.startswith('TIM'):
if p.has_extra('RCIN'):
rc_in = p
elif p.has_extra('RCININT'):
rc_in_int = p
elif p.has_extra('ALARM'):
alarm = p
else:
if p.extra_value('PWM', type=int) is not None:
pwm_out.append(p)
if p.type not in pwm_timers:
pwm_timers.append(p.type)
if not pwm_out and not alarm:
print("No PWM output defined")
f.write('''
''')
if rc_in is not None:
(n, chan, compl) = parse_timer(rc_in.label)
if compl:
# it is an inverted channel
f.write('#define HAL_RCIN_IS_INVERTED\n')
if chan not in [1, 2]:
error(
"Bad channel number, only channel 1 and 2 supported for RCIN")
f.write('// RC input config\n')
f.write('#define HAL_USE_ICU TRUE\n')
f.write('#define STM32_ICU_USE_TIM%u TRUE\n' % n)
f.write('#define RCIN_ICU_TIMER ICUD%u\n' % n)
f.write('#define RCIN_ICU_CHANNEL ICU_CHANNEL_%u\n' % chan)
f.write('#define STM32_RCIN_DMA_STREAM STM32_TIM_TIM%u_CH%u_DMA_STREAM\n' % (n, chan))
f.write('#define STM32_RCIN_DMA_CHANNEL STM32_TIM_TIM%u_CH%u_DMA_CHAN\n' % (n, chan))
f.write('\n')
if rc_in_int is not None:
(n, chan, compl) = parse_timer(rc_in_int.label)
if compl:
error('Complementary channel is not supported for RCININT %s' % rc_in_int.label)
f.write('// RC input config\n')
f.write('#define HAL_USE_EICU TRUE\n')
f.write('#define STM32_EICU_USE_TIM%u TRUE\n' % n)
f.write('#define RCININT_EICU_TIMER EICUD%u\n' % n)
f.write('#define RCININT_EICU_CHANNEL EICU_CHANNEL_%u\n' % chan)
f.write('\n')
if alarm is not None:
(n, chan, compl) = parse_timer(alarm.label)
if compl:
error("Complementary channel is not supported for ALARM %s" % alarm.label)
f.write('\n')
f.write('// Alarm PWM output config\n')
f.write('#define STM32_PWM_USE_TIM%u TRUE\n' % n)
f.write('#define STM32_TIM%u_SUPPRESS_ISR\n' % n)
chan_mode = [
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED',
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED'
]
chan_mode[chan - 1] = 'PWM_OUTPUT_ACTIVE_HIGH'
pwm_clock = 1000000
period = 1000
f.write('''#define HAL_PWM_ALARM \\
{ /* pwmGroup */ \\
%u, /* Timer channel */ \\
{ /* PWMConfig */ \\
%u, /* PWM clock frequency. */ \\
%u, /* Initial PWM period 20ms. */ \\
NULL, /* no callback */ \\
{ /* Channel Config */ \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL} \\
}, \\
0, 0 \\
}, \\
&PWMD%u /* PWMDriver* */ \\
}\n''' %
(chan-1, pwm_clock, period, chan_mode[0],
chan_mode[1], chan_mode[2], chan_mode[3], n))
else:
f.write('\n')
f.write('// No Alarm output pin defined\n')
f.write('#undef HAL_PWM_ALARM\n')
f.write('\n')
f.write('// PWM timer config\n')
for t in sorted(pwm_timers):
n = int(t[3:])
f.write('#define STM32_PWM_USE_TIM%u TRUE\n' % n)
f.write('#define STM32_TIM%u_SUPPRESS_ISR\n' % n)
f.write('\n')
f.write('// PWM output config\n')
groups = []
have_complementary = False
for t in sorted(pwm_timers):
group = len(groups) + 1
n = int(t[3:])
chan_list = [255, 255, 255, 255]
chan_mode = [
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED',
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED'
]
alt_functions = [ 0, 0, 0, 0 ]
pal_lines = [ '0', '0', '0', '0' ]
for p in pwm_out:
if p.type != t:
continue
(n, chan, compl) = parse_timer(p.label)
pwm = p.extra_value('PWM', type=int)
chan_list[chan - 1] = pwm - 1
if compl:
chan_mode[chan - 1] = 'PWM_COMPLEMENTARY_OUTPUT_ACTIVE_HIGH'
have_complementary = True
else:
chan_mode[chan - 1] = 'PWM_OUTPUT_ACTIVE_HIGH'
alt_functions[chan - 1] = p.af
pal_lines[chan - 1] = 'PAL_LINE(GPIO%s, %uU)' % (p.port, p.pin)
groups.append('HAL_PWM_GROUP%u' % group)
if n in [1, 8]:
# only the advanced timers do 8MHz clocks
advanced_timer = 'true'
else:
advanced_timer = 'false'
pwm_clock = 1000000
period = 20000 * pwm_clock / 1000000
f.write('''#if defined(STM32_TIM_TIM%u_UP_DMA_STREAM) && defined(STM32_TIM_TIM%u_UP_DMA_CHAN)
f.write('''#define HAL_PWM_GROUP%u { %s, \\
{%u, %u, %u, %u}, \\
/* Group Initial Config */ \\
{ \\
%u, /* PWM clock frequency. */ \\
%u, /* Initial PWM period 20ms. */ \\
NULL, /* no callback */ \\
{ \\
/* Channel Config */ \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL} \\
}, 0, 0}, &PWMD%u, \\
HAL_PWM%u_DMA_CONFIG, \\
{ %u, %u, %u, %u }, \\
{ %s, %s, %s, %s }}\n''' %
(group, advanced_timer,
chan_list[0], chan_list[1], chan_list[2], chan_list[3],
pwm_clock, period,
chan_mode[0], chan_mode[1], chan_mode[2], chan_mode[3],
n, n,
alt_functions[0], alt_functions[1], alt_functions[2], alt_functions[3],
pal_lines[0], pal_lines[1], pal_lines[2], pal_lines[3]))
f.write('#define HAL_PWM_GROUPS %s\n\n' % ','.join(groups))
if have_complementary:
f.write('#define STM32_PWM_USE_ADVANCED TRUE\n')
def write_ADC_config(f):
'''write ADC config defines'''
f.write('// ADC config\n')
adc_chans = []
for l in bylabel:
p = bylabel[l]
if not p.type.startswith('ADC'):
continue
chan = get_ADC1_chan(mcu_type, p.portpin)
scale = p.extra_value('SCALE', default=None)
if p.label == 'VDD_5V_SENS':
f.write('#define ANALOG_VCC_5V_PIN %u\n' % chan)
f.write('#define HAL_HAVE_BOARD_VOLTAGE 1\n')
if p.label == 'FMU_SERVORAIL_VCC_SENS':
f.write('#define FMU_SERVORAIL_ADC_CHAN %u\n' % chan)
f.write('#define HAL_HAVE_SERVO_VOLTAGE 1\n')
adc_chans.append((chan, scale, p.label, p.portpin))
adc_chans = sorted(adc_chans)
vdd = get_config('STM32_VDD')
if vdd[-1] == 'U':
vdd = vdd[:-1]
vdd = float(vdd) * 0.01
f.write('#define HAL_ANALOG_PINS { \\\n')
for (chan, scale, label, portpin) in adc_chans:
scale_str = '%.2f/4096' % vdd
if scale is not None and scale != '1':
scale_str = scale + '*' + scale_str
f.write('{ %2u, %12s }, /* %s %s */ \\\n' % (chan, scale_str, portpin,
label))
f.write('}\n\n')
def write_GPIO_config(f):
'''write GPIO config defines'''
f.write('// GPIO config\n')
gpios = []
gpioset = set()
for l in bylabel:
p = bylabel[l]
gpio = p.extra_value('GPIO', type=int)
if gpio is None:
continue
if gpio in gpioset:
error("Duplicate GPIO value %u" % gpio)
gpioset.add(gpio)
# see if it is also a PWM pin
pwm = p.extra_value('PWM', type=int, default=0)
port = p.port
pin = p.pin
gpios.append((gpio, pwm, port, pin, p))
gpios = sorted(gpios)
for (gpio, pwm, port, pin, p) in gpios:
f.write('#define HAL_GPIO_LINE_GPIO%u PAL_LINE(GPIO%s, %2uU)\n' % (gpio, port, pin))
f.write('#define HAL_GPIO_PINS { \\\n')
for (gpio, pwm, port, pin, p) in gpios:
f.write('{ %3u, true, %2u, PAL_LINE(GPIO%s, %2uU)}, /* %s */ \\\n' %
(gpio, pwm, port, pin, p))
# and write #defines for use by config code
f.write('}\n\n')
f.write('// full pin define list\n')
last_label = None
for l in sorted(list(set(bylabel.keys()))):
p = bylabel[l]
label = p.label
label = label.replace('-', '_')
if label == last_label:
continue
last_label = label
f.write('#define HAL_GPIO_PIN_%-20s PAL_LINE(GPIO%s,%uU)\n' %
(label, p.port, p.pin))
f.write('\n')
def bootloader_path():
# always embed a bootloader if it is available
this_dir = os.path.realpath(__file__)
rootdir = os.path.relpath(os.path.join(this_dir, "../../../../.."))
hwdef_dirname = os.path.basename(os.path.dirname(args.hwdef))
bootloader_filename = "%s_bl.bin" % (hwdef_dirname,)
bootloader_path = os.path.join(rootdir,
"Tools",
"bootloaders",
bootloader_filename)
if os.path.exists(bootloader_path):
return os.path.realpath(bootloader_path)
return None
def add_bootloader():
'''added bootloader to ROMFS'''
bp = bootloader_path()
if bp is not None:
romfs["bootloader.bin"] = bp
def write_ROMFS(outdir):
'''create ROMFS embedded header'''
romfs_list = []
for k in romfs.keys():
romfs_list.append((k, romfs[k]))
env_vars['ROMFS_FILES'] = romfs_list
def setup_apj_IDs():
'''setup the APJ board IDs'''
env_vars['APJ_BOARD_ID'] = get_config('APJ_BOARD_ID')
env_vars['APJ_BOARD_TYPE'] = get_config('APJ_BOARD_TYPE', default=mcu_type)
(USB_VID, USB_PID) = get_USB_IDs()
env_vars['USBID'] = '0x%04x/0x%04x' % (USB_VID, USB_PID)
def write_peripheral_enable(f):
'''write peripheral enable lines'''
f.write('// peripherals enabled\n')
for type in sorted(bytype.keys()):
if type.startswith('USART') or type.startswith('UART'):
dstr = 'STM32_SERIAL_USE_%-6s' % type
f.write('#ifndef %s\n' % dstr)
f.write('#define %s TRUE\n' % dstr)
f.write('#endif\n')
if type.startswith('SPI'):
f.write('#define STM32_SPI_USE_%s TRUE\n' % type)
if type.startswith('OTG'):
f.write('#define STM32_USB_USE_%s TRUE\n' % type)
if type.startswith('I2C'):
f.write('#define STM32_I2C_USE_%s TRUE\n' % type)
def get_dma_exclude(periph_list):
'''return list of DMA devices to exclude from DMA'''
dma_exclude = []
for periph in periph_list:
if periph not in bylabel:
continue
p = bylabel[periph]
if p.has_extra('NODMA'):
dma_exclude.append(periph)
return dma_exclude
def write_hwdef_header(outfilename):
'''write hwdef header file'''
print("Writing hwdef setup in %s" % outfilename)
f = open(outfilename, 'w')
f.write('''/*
generated hardware definitions from hwdef.dat - DO NOT EDIT
*/
''')
write_mcu_config(f)
write_SPI_config(f)
write_ADC_config(f)
write_GPIO_config(f)
write_IMU_config(f)
write_MAG_config(f)
write_BARO_config(f)
write_peripheral_enable(f)
dma_resolver.write_dma_header(f, periph_list, mcu_type,
dma_exclude=get_dma_exclude(periph_list),
dma_priority=get_config('DMA_PRIORITY',default='TIM* SPI*', spaces=True),
dma_noshare=get_config('DMA_NOSHARE',default='', spaces=True))
if not args.bootloader:
write_PWM_config(f)
write_I2C_config(f)
write_UART_config(f)
else:
write_UART_config_bootloader(f)
setup_apj_IDs()
write_USB_config(f)
add_bootloader()
if len(romfs) > 0:
f.write('#define HAL_HAVE_AP_ROMFS_EMBEDDED_H 1\n')
if mcu_series.startswith('STM32F1'):
f.write('''
/*
* I/O ports initial setup, this configuration is established soon after reset
* in the initialization code.
* Please refer to the STM32 Reference Manual for details.
*/
''')
else:
f.write('''
/*
* I/O ports initial setup, this configuration is established soon after reset
* in the initialization code.
* Please refer to the STM32 Reference Manual for details.
*/
''')
for port in sorted(ports):
f.write("/* PORT%s:\n" % port)
for pin in range(pincount[port]):
p = portmap[port][pin]
if p.label is not None:
f.write(" %s\n" % p)
f.write("*/\n\n")
if pincount[port] == 0:
# handle blank ports
for vtype in vtypes:
f.write("#define VAL_GPIO%s_%-7s 0x0\n" % (port,
vtype))
f.write("\n\n\n")
continue
for vtype in vtypes:
f.write("#define VAL_GPIO%s_%-7s (" % (p.port, vtype))
first = True
for pin in range(pincount[port]):
p = portmap[port][pin]
modefunc = getattr(p, "get_" + vtype)
v = modefunc()
if v is None:
continue
if not first:
f.write(" | \\\n ")
f.write(v)
first = False
if first:
# there were no pin definitions, use 0
f.write("0")
f.write(")\n\n")
def build_peripheral_list():
'''build a list of peripherals for DMA resolver to work on'''
peripherals = []
done = set()
prefixes = ['SPI', 'USART', 'UART', 'I2C']
for p in allpins:
type = p.type
if type in done:
continue
for prefix in prefixes:
if type.startswith(prefix):
ptx = type + "_TX"
prx = type + "_RX"
peripherals.append(ptx)
peripherals.append(prx)
if not ptx in bylabel:
bylabel[ptx] = p
if not prx in bylabel:
bylabel[prx] = p
if type.startswith('ADC'):
peripherals.append(type)
if type.startswith('SDIO') or type.startswith('SDMMC'):
if not mcu_series.startswith("STM32H7"):
peripherals.append(type)
if type.startswith('TIM'):
if p.has_extra('RCIN'):
label = p.label
if label[-1] == 'N':
label = label[:-1]
peripherals.append(label)
elif not p.has_extra('ALARM') and not p.has_extra('RCININT'):
# get the TIMn_UP DMA channels for DShot
label = type + '_UP'
if not label in peripherals and not p.has_extra('NODMA'):
peripherals.append(label)
done.add(type)
return peripherals
def write_env_py(filename):
'''write out env.py for environment variables to control the build process'''
# see if board has a defaults.parm file
defaults_filename = os.path.join(os.path.dirname(args.hwdef), 'defaults.parm')
if os.path.exists(defaults_filename) and not args.bootloader:
print("Adding defaults.parm")
env_vars['DEFAULT_PARAMETERS'] = os.path.abspath(defaults_filename)
# CHIBIOS_BUILD_FLAGS is passed to the ChibiOS makefile
env_vars['CHIBIOS_BUILD_FLAGS'] = ' '.join(build_flags)
pickle.dump(env_vars, open(filename, "wb"))
def romfs_add(romfs_filename, filename):
'''add a file to ROMFS'''
romfs[romfs_filename] = filename
def romfs_wildcard(pattern):
'''add a set of files to ROMFS by wildcard'''
base_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
(pattern_dir, pattern) = os.path.split(pattern)
for f in os.listdir(os.path.join(base_path, pattern_dir)):
if fnmatch.fnmatch(f, pattern):
romfs[f] = os.path.join(pattern_dir, f)
def process_line(line):
'''process one line of pin definition file'''
global allpins, imu_list, compass_list, baro_list
a = shlex.split(line)
# keep all config lines for later use
alllines.append(line)
if a[0].startswith('P') and a[0][1] in ports and a[0] in config:
error("Pin %s redefined" % a[0])
config[a[0]] = a[1:]
if a[0] == 'MCU':
global mcu_type, mcu_series
mcu_type = a[2]
mcu_series = a[1]
setup_mcu_type_defaults()
if a[0].startswith('P') and a[0][1] in ports:
# it is a port/pin definition
try:
port = a[0][1]
pin = int(a[0][2:])
label = a[1]
type = a[2]
extra = a[3:]
except Exception:
error("Bad pin line: %s" % a)
return
p = generic_pin(port, pin, label, type, extra)
portmap[port][pin] = p
allpins.append(p)
if not type in bytype:
bytype[type] = []
bytype[type].append(p)
bylabel[label] = p
af = get_alt_function(mcu_type, a[0], label)
if af is not None:
p.af = af
if a[0] == 'SPIDEV':
spidev.append(a[1:])
if a[0] == 'IMU':
imu_list.append(a[1:])
if a[0] == 'COMPASS':
compass_list.append(a[1:])
if a[0] == 'BARO':
baro_list.append(a[1:])
if a[0] == 'ROMFS':
romfs_add(a[1],a[2])
if a[0] == 'ROMFS_WILDCARD':
romfs_wildcard(a[1])
if a[0] == 'undef':
print("Removing %s" % a[1])
config.pop(a[1], '')
bytype.pop(a[1],'')
bylabel.pop(a[1],'')
#also remove all occurences of defines in previous lines if any
for line in alllines[:]:
if line.startswith('define') and a[1] == line.split()[1]:
alllines.remove(line)
newpins = []
for pin in allpins:
if pin.type == a[1]:
continue
if pin.label == a[1]:
continue
if pin.portpin == a[1]:
continue
newpins.append(pin)
allpins = newpins
if a[1] == 'IMU':
imu_list = []
if a[1] == 'COMPASS':
compass_list = []
if a[1] == 'BARO':
baro_list = []
if a[0] == 'env':
print("Adding environment %s" % ' '.join(a[1:]))
if len(a[1:]) < 2:
error("Bad env line for %s" % a[0])
env_vars[a[1]] = ' '.join(a[2:])
def process_file(filename):
'''process a hwdef.dat file'''
try:
f = open(filename, "r")
except Exception:
error("Unable to open file %s" % filename)
for line in f.readlines():
line = line.strip()
if len(line) == 0 or line[0] == '#':
continue
a = shlex.split(line)
if a[0] == "include" and len(a) > 1:
include_file = a[1]
if include_file[0] != '/':
dir = os.path.dirname(filename)
include_file = os.path.normpath(
os.path.join(dir, include_file))
print("Including %s" % include_file)
process_file(include_file)
else:
process_line(line)
process_file(args.hwdef)
outdir = args.outdir
if outdir is None:
outdir = '/tmp'
if not "MCU" in config:
error("Missing MCU type in config")
mcu_type = get_config('MCU', 1)
print("Setup for MCU %s" % mcu_type)
periph_list = build_peripheral_list()
write_hwdef_header(os.path.join(outdir, "hwdef.h"))
write_ldscript(os.path.join(outdir, "ldscript.ld"))
write_ROMFS(outdir)
copy_common_linkerscript(outdir, args.hwdef)
write_env_py(os.path.join(outdir, "env.py"))
|
'''
Name of the Task : Messy Folder
KIITFEST ID : KF36723
Operating System : MacOS Sierra
Programming Language used: Python
External modules used (if any) : os,sys
Additional instructions to use the program (if any) : The files to be organized must exist in current working directory.
'''
import os
import sys
path1 = os.getcwd()
print path1
if __name__ == '__main__':
# Define the folder names for each extension type here
folderNames = {".txt" : "txt",
".csv" : "csv",
".mp3" : "mp3"}
# NOTE: Assuming files in current folder
directory = "./"
files = os.listdir(directory)
# Traverse all of the files
for filename in files:
# Grab the extension of the file
extension = os.path.splitext(filename)[1]
if extension in folderNames:
# Grab the folder where this file should be stored
folder = os.path.join(directory, folderNames[extension])
# Create the folder if it does not already exist
if not os.path.exists(folder):
os.mkdir(folder)
# Move the file into the folder
os.rename(os.path.join(directory, filename),
os.path.join(folder, filename))
|
import sys, os
extensions = ['sphinx.ext.pngmath', 'sphinx.ext.ifconfig']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Ginkgo'
copyright = u'2010, Jeet Sukumaran and Mark T. Holder'
version = '3.9'
release = '3.9.0'
exclude_trees = []
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
html_show_sourcelink = False
htmlhelp_basename = 'Ginkgodoc'
latex_documents = [
('index', 'Ginkgo.tex', u'Ginkgo Documentation',
u'Jeet Sukumaran and Mark T. Holder', 'manual'),
]
|
"""twistd plugin for XMPP net."""
"""
Kontalk XMPP server
Copyright (C) 2014 Kontalk Devteam <devteam@kontalk.org>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import demjson
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker, MultiService
class Options(usage.Options):
optParameters = [["config", "c", "net.conf", "Configuration file."]]
class KontalkNetServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "kontalk-net"
description = "Kontalk XMPP Net component."
options = Options
def makeService(self, options):
from kontalk.xmppserver.component.net import NetComponent
from kontalk.xmppserver import log
# load configuration
fp = open(options['config'], 'r')
config = demjson.decode(fp.read(), allow_comments=True)
fp.close()
log.init(config)
appl = MultiService()
comp = NetComponent(config)
comp.setServiceParent(appl)
comp.setup().setServiceParent(appl)
return appl
serviceMaker = KontalkNetServiceMaker()
|
import abc
import datetime
import re
from itertools import zip_longest
from .utils import (DoesntMatchException, EMPTY_CELL, ConfigurationError,
instantiate_if_class_lst)
class ResultContext(object):
'''An object that is passed through match methods to store the
result. Implement emit in a concrete subclass'''
def __init__(self):
self.root = None
self.stack = []
def push(self, level):
if not self.stack:
self.root = level
self.stack.append(level)
return self
@property
def current(self):
return self.stack[-1]
def pop(self):
self.stack.pop()
def emit(self, name, o):
raise NotImplementedError()
def commit(self, o1, o2):
raise NotImplementedError()
def debug(self, *args):
pass
def __enter__(self):
return self
def __exit__(self, etype, evalue, tb):
if etype is None and len(self.stack) >= 2:
self.commit(self.stack[-2], self.stack[-1])
self.stack.pop()
else:
self.pop()
return False
class AbstractVisitor(abc.ABC):
def __call__(self, o):
type_name = type(o).__name__
if type_name in self.dispatch:
return getattr(self, self.dispatch[type_name])(o)
elif hasattr(o, 'visit'):
return o.visit(self)
elif isinstance(o, dict):
return {k: self(v) for k, v in o.items()}
elif isinstance(o, (list, tuple)):
return [self(i) for i in o]
else:
return o
def table_str(table):
return '\n'.join(', '.join("%s" % i for i in l) for l in table)
class QuickPrint(AbstractVisitor):
dispatch = {
'ResultTable': 'visit_table_with_header'
}
def __init__(self, *show):
if not show:
show = None
elif show[0] is None:
show = ()
self.show = show
def visit_table(self, o):
return str(o)
def visit_line(self, o):
return str(o)
def visit_table_with_header(self, o):
data = {'_header': getattr(o, 'top_headers', ''),
'_column': getattr(o, 'left_headers', ''),
'_top_left': getattr(o, 'top_left', ''),
'_data': (len(o.data), max(len(i) for i in o.data)
if o.data else 0)
}
if self.show is None:
return data
return {k: data[k] for k in self.show}
class ResultObject(object):
def __init__(self, name, *args, **kwargs):
self.name = name
super(ResultObject, self).__init__(*args, **kwargs)
def set_args(self, *args):
pass
class ResultDict(ResultObject, dict):
def visit(self, visitor):
return {k: visitor(v) for (k, v) in self.items()}
def add(self, name, value):
suffix = None
while True:
n = (name if suffix is None
else (name + '_%d' % suffix))
if n not in self:
break
suffix = (suffix or 0) + 1
self[n] = value
def __getattr__(self, name):
if name.startswith('_'):
raise AttributeError(name)
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __repr__(self):
return "Dict %s (%s)" % (
self.name, list(self.keys()))
class ResultList(ResultObject, list):
def visit(self, visitor):
method = getattr(visitor, 'visit_list', None)
if method is None:
return [visitor(x) for x in self]
else:
return method(self)
def add(self, name, value):
self.append(value)
def __repr__(self):
return ("List %s (%s)" %
(self.name,
', '.join(str(i) for i in self)))
def _rindex(lst, x):
"""reverse index (index of first element from the end)"""
return len(lst) - 1 - lst[::-1].index(x)
class StripCellLine(object):
'''A transformer used by Lines to remove trailing and ending empty
cells
'''
def __init__(self, left=True, right=True):
self.left = left
self.right = right
def get_mask(self, line):
return [cell.is_empty for cell in line]
def __call__(self, line):
empties = self.get_mask(line)
if all(empties):
return []
if self.right:
line = line[:_rindex(empties, 0) + 1]
if self.left:
line = line[empties.index(0):]
return line
class StripLine(StripCellLine):
def get_mask(self, line):
return [value == EMPTY_CELL for value in line]
def non_empty(line):
'''A transformer that matches only non empty lines. Other will
raise a DoesntMatchException'''
if not line:
raise DoesntMatchException('Empty line')
return line
def _array_access(array, positions):
if isinstance(positions, slice):
return array[positions]
else:
return [array[i] for i in positions]
class Match(object):
'''A transformer that matches lines that contain the given
regex. Use combine to decide if all or any item should match
:param regex regex: a regular expression
:param list position: a list of positions or a slice
:param function combine: function that decides if the whole line
matches
'''
def __init__(self, regex, position=None, combine=None):
if isinstance(regex, str):
regex = re.compile(regex)
self.regex = regex
self.combine = combine or any
if isinstance(position, int):
self.position = [position]
elif position is None:
self.position = slice(None, None)
else:
self.position = position
def __call__(self, line):
sline = line
if sline and hasattr(sline[0], 'value'):
sline = [cell.value for cell in line]
sline = [str(i) for i in sline]
if not self.combine([self.regex.match(p) for p in _array_access(sline, self.position)]):
raise DoesntMatchException("%s doesn't match %s" %
(sline[self.position],
self.regex.pattern))
return line
def get_value(line):
'''A transformer that converts a list of cells to a list of values'''
return [c.value if not c.is_merged else EMPTY_CELL for c in line]
def table_transform(**kwargs):
dct = {}
if 'wrap' in kwargs:
dct['wrap'] = lambda self, table, fun=kwargs['wrap']: fun(table)
if 'process_line' in kwargs:
dct['process_line'] = lambda self, table, line, fun=kwargs['process_line']: fun(table, line)
return type('table_tranform', (TableTransform,), dct)
def match_if(fun):
def __match(line, fun=fun):
if not fun(line):
raise DoesntMatchException
return line
return __match
class ResultLine(ResultObject, list):
def set_args(self, transforms=None):
self._transforms = transforms or [StripCellLine(), non_empty, get_value]
def visit(self, visitor):
visitor.visit_line(self)
def set_value(self, line):
line = list(line)
for t in self._transforms:
line = t(line)
try:
self[:] = line
except TypeError as e:
raise TypeError(
'One of the transforms '
'did not return a list, check line_args'
) from e
class ResultTable(ResultObject):
'''An object to store the content of a matched Table.
This is a'''
def __init__(self, name, transforms=None, iffail='no match'):
self.name = name
self.data = []
self.count = 0
self.set_args(transforms, iffail)
def set_args(self, transforms=None, iffail='no match'):
self.transforms = instantiate_if_class_lst(transforms or [],
TableTransform)
self.iffail = {'no match': DoesntMatchException,
'fail': None}[iffail]
for transform in self.transforms:
transform.init(self)
def append_table(self, line):
for transform in self.transforms:
line = transform.process_line(self, line)
if line is None:
break
self.count += 1
def wrap(self):
for transform in self.transforms:
try:
transform.wrap(self)
except Exception as e:
if self.iffail is not None:
raise DoesntMatchException() from e
else:
raise
def __repr__(self):
return "Table %s (%s)" % (self.name, self.data)
class PythonObjectContext(ResultContext):
"""Store the results are a hierarchy of objects that mimics the
initial hierarchy of patterns"""
types = {'list': ResultList,
'dict': ResultDict,
'line': ResultLine,
'table': ResultTable}
def __init__(self):
super(PythonObjectContext, self).__init__()
def push_named(self, name, type_):
if type_ is None:
o = self.current # !!! won't work with rollback
else:
o = self.types[type_](name=name)
return super(PythonObjectContext, self).push(o)
def emit(self, name, o):
self.current.add(name, o)
def commit(self, o1, o2):
o1.add(o2.name, o2)
def __getattr__(self, name):
return getattr(self.root, name)
def __getitem__(self, name):
return self.root[name]
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.root)
__str__ = __repr__
class ListContext(PythonObjectContext):
'''a context that returns a dictionary where the key is the name
of the pattern'''
class DefaultResult(dict):
def __init__(self, name):
self.name = name
dict.__init__(self)
def append(self, arg):
name, value = arg
self.setdefault(name, []).append(value)
def update(self, o):
for k, v in o.items():
self.setdefault(k, []).extend(v)
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(key)
types = {'list': DefaultResult,
'dict': DefaultResult,
'line': ResultLine,
'table': ResultTable}
def emit(self, name, o):
self.current.append((name, o))
def commit(self, o1, o2):
if isinstance(o2, ListContext.DefaultResult):
o1.update(o2)
else:
o1.append((o2.name, o2))
class DebugContext(ListContext):
'''A result context that implements the debug function'''
def debug(self, *args):
print(' ' * len(self.stack), *args)
def pop(self):
self.debug(' ' * len(self.stack), '--')
super(DebugContext, self).pop()
def commit(self, *args):
self.debug(' ' * len(self.stack), '++')
super(DebugContext, self).commit(*args)
class TableTransform(object):
def init(self, table):
pass
def wrap(self, table):
pass
def process_line(self, table, line):
return line
class TableNotEmpty(TableTransform):
def process_line(self, table, line):
if not any(line):
return None
return line
def wrap(self, table):
if not table.data:
raise DoesntMatchException('TableNotEmpty failed: No data in table')
class GetValue(TableTransform):
"""Transforms a list of cells into a list of strings. All built in
processors expect GetValue to be included as the first
transformation."""
def __init__(self, include_merged=True):
self.include_merged = include_merged
def process_line(self, table, line):
if self.include_merged:
return [x.value for x in line]
else:
return [x.value for x in line if not x.is_merged]
class IgnoreIf(TableTransform):
def __init__(self, test):
self.test = test
def process_line(self, table, line):
if not self.test(line):
return line
return None
class FillData(TableTransform):
"""Adds the line to the table data"""
def process_line(self, table, line):
table.data.append(line)
class HeaderTableTransform(TableTransform):
"""Extract the first lines and first columns
as the top and left headers
:param int top_header: number of lines, 1 by default
:param int left_column: number of columns, 1 by default
"""
def __init__(self, top_header=1, left_column=1):
self.top_header = top_header
self.left_column = left_column
def init(self, table):
table.left_headers = [[] for _ in range(self.left_column)]
table.top_left = [[] for _ in range(self.left_column)]
table.top_headers = []
def _append_to_cols(self, columns, line):
for h, c in zip_longest(columns, line):
h.append(c)
def process_line(self, table, line):
if not line:
return
if self.left_column:
left = line[:self.left_column]
line = line[self.left_column:]
if table.count >= self.top_header:
self._append_to_cols(table.left_headers, left)
else:
self._append_to_cols(table.top_left, left)
if self.top_header and table.count < self.top_header:
table.top_headers.append(line)
else:
return line
return None
def wrap(self, table):
if len(getattr(table, 'top_headers', [])) < self.top_header:
raise DoesntMatchException
if len(getattr(table, 'left_headers', [])) < self.left_column:
raise DoesntMatchException
class KeepOnly(TableTransform):
def __init__(self, left_header=None, top_header=None, data=None):
self.left_header = left_header
self.top_header = top_header
self.data = data
def wrap(self, table):
if self.top_header:
table.top_headers = _array_access(table.top_headers, self.top_header)
if self.left_header:
table.left_headers = _array_access(table.left_headers, self.left_header)
if self.data:
table.data = _array_access(table.data, self.data)
class FillHeaderBlanks(TableTransform):
'''Replaces empty strings with previous data'''
def __init__(self, *indexes, **kwargs):
if not indexes:
raise ConfigurationError('No indexes in FillHeaderBlanks')
self.which = kwargs.get('which', 'top_headers')
if self.which not in ['top_headers', 'left_headers']:
raise ConfigurationError('"which" must be top_headers or left_headers')
self.indexes = indexes
def wrap(self, table):
result = []
indexes = self.indexes
for i, header in enumerate(getattr(table, self.which)):
result.append(_repeat_existing(header) if i in indexes else header)
setattr(table, self.which, result)
def RepeatExisting(*rows):
return FillHeaderBlanks(*rows, which='top_headers')
def _find_non_empty_rows(list_of_lists):
return [i for i, line in enumerate(list_of_lists)
if any(x != EMPTY_CELL for x in line)]
class RemoveEmptyLines(TableTransform):
'''Remove empyt lines or empty columns in the table. Note: could
be really simplified with numpy'''
def __init__(self, line_type='rows'):
if line_type not in ['rows', 'columns']:
raise ConfigurationError(
"line_type must be 'rows' or 'columns' - got %s"
% repr(line_type))
self.line_type = line_type
def wrap(self, table):
if self.line_type == 'columns':
Transpose().wrap(table)
data_rows = _find_non_empty_rows(table.data)
table.data = [table.data[i] for i in data_rows]
if hasattr(table, 'left_headers'):
tlf = transpose(table.left_headers)
table.left_headers = transpose(tlf[i] for i in data_rows)
if self.line_type == 'columns':
Transpose().wrap(table)
class ToMap(TableTransform):
"""Transforms the data from a list of lists to a map. The keys are
the combination of terms in the headers (top and left) and the
values are the table data"""
def wrap(self, table):
result = {}
for lefts, row in zip_longest(zip_longest(
*table.left_headers), table.data):
for tops, cell in zip_longest(zip_longest(*table.top_headers), row):
key = tuple(lefts) + tuple(tops)
result[key] = cell
table.data = result
def _join_header(lines, char):
return [char.join("%s" % s for s in u) for u in zip_longest(*lines)]
class MergeHeader(TableTransform):
"""merges several lines in the header into one"""
def __init__(self, join_top=(), join_left=(), ch='.'):
if not all(isinstance(i, int) for i in join_top):
raise ConfigurationError('ids must be ints, got %s' % join_top)
if not all(isinstance(i, int) for i in join_left):
raise ConfigurationError('ids must be ints, got %s' % join_left)
self.join_char = ch
self.join_left = join_left
self.join_top = join_top
def merge(self, header, ids):
to_merge = [header[i] for i in ids]
not_merge = [h for i, h in enumerate(header) if i not in ids]
to_merge = [_join_header(to_merge, self.join_char)]
return to_merge + not_merge
def wrap(self, table):
if self.join_top:
table.top_headers = self.merge(table.top_headers, self.join_top)
if self.join_left:
table.left_headers = self.merge(table.left_headers, self.join_left)
def transpose(list_of_lists):
return list(list(r) for r in zip_longest(*list_of_lists))
class Transpose(TableTransform):
"""Transforms lines into columns and columns to lines"""
def wrap(self, table):
if hasattr(table, 'top_headers') and hasattr(table, 'left_headers'):
table.top_headers, table.left_headers = (
table.left_headers, table.top_headers)
table.data = transpose(table.data)
def parse_time_func(*formats):
if not formats:
raise ConfigurationError('Expect to have at least one date format to parse')
def parse_(s, formats=formats):
e = None
for format in formats:
try:
return datetime.datetime.strptime(s, format)
except ValueError as e_:
e = e_
continue
raise e # there has to be an error
return parse_
class ToDate(TableTransform):
"""Transforms strings into dates in the header. Use merge if the
date is spread over several lines"""
def __init__(self, header_id, strftime, is_top=True, join='/'):
self.header_id = header_id
self.is_top = is_top
if isinstance(strftime, str):
self.strftime = parse_time_func(strftime)
elif isinstance(strftime, (tuple, list)):
self.strftime = parse_time_func(*strftime)
else:
self.strftime = strftime
self.join = join
def wrap(self, table):
headers = table.top_headers if self.is_top else table.left_headers
dates_str = headers.pop(self.header_id)
result = []
for d in dates_str:
try:
result.append(self.strftime(d))
except ValueError:
result.append(d)
headers.append(result)
def _repeat_existing(line):
current = None
result = []
for i in line:
if i != EMPTY_CELL:
current = i
result.append(current)
return result
DEFAULT_TRANSFORMS = [GetValue, HeaderTableTransform, FillData, TableNotEmpty]
|
from .main.main import main
main()
|
"""
Created on Sun Aug 14 14:53:28 2016
@author: Luciano Masullo, Federico Barabas
"""
import os
import numpy as np
import math
import configparser
from scipy.ndimage.measurements import center_of_mass
from skimage.feature import peak_local_max
try:
import skimage.filters as filters
except ImportError:
import skimage.filter as filters
from skimage.transform import probabilistic_hough_line
from ringfinder.neurosimulations import simAxon
def saveConfig(main, filename=None):
if filename is None:
filename = os.path.join(os.getcwd(), 'config')
config = configparser.ConfigParser()
config['Loading'] = {
'STORM px nm': main.STORMPxEdit.text(),
'STORM magnification': main.magnificationEdit.text(),
'STED px nm': main.STEDPxEdit.text()}
config['Analysis'] = {
'ROI size nm': main.roiSizeEdit.text(),
'Gaussian sigma filter nm': main.sigmaEdit.text(),
'nsigmas threshold': main.intThresEdit.text(),
'Lines min length nm': main.lineLengthEdit.text(),
'Ring periodicity nm': main.wvlenEdit.text(),
'Sinusoidal pattern power': main.sinPowerEdit.text(),
'Angular step deg': main.thetaStepEdit.text(),
'Delta angle deg': main.deltaThEdit.text(),
'Discrimination threshold': main.corrThresEdit.text(),
'Area threshold %': main.minAreaEdit.text()}
with open(filename, 'w') as configfile:
config.write(configfile)
def saveDefaultConfig(filename=None):
if filename is None:
filename = os.path.join(os.getcwd(), 'config')
config = configparser.ConfigParser()
config['Loading'] = {
'STORM px nm': '13.3', 'STORM magnification': '10', 'STED px nm': '20'}
config['Analysis'] = {
'ROI size nm': '1000', 'Gaussian sigma filter nm': '100',
'nsigmas threshold': '0.5', 'Lines min length nm': '300',
'Ring periodicity nm': '180', 'Sinusoidal pattern power': '6',
'Angular step deg': '3', 'Delta angle deg': '20',
'Discrimination threshold': '0.2', 'Area threshold %': '20'}
with open(filename, 'w') as configfile:
config.write(configfile)
def loadConfig(main, filename=None):
if filename is None:
filename = os.path.join(os.getcwd(), 'config')
config = configparser.ConfigParser()
config.read(filename)
loadConfig = config['Loading']
main.STORMPxEdit.setText(loadConfig['STORM px nm'])
main.magnificationEdit.setText(loadConfig['STORM magnification'])
main.STEDPxEdit.setText(loadConfig['STED px nm'])
analysisConfig = config['Analysis']
main.roiSizeEdit.setText(analysisConfig['ROI size nm'])
main.sigmaEdit.setText(analysisConfig['Gaussian sigma filter nm'])
main.intThresEdit.setText(analysisConfig['nsigmas threshold'])
main.lineLengthEdit.setText(analysisConfig['Lines min length nm'])
main.wvlenEdit.setText(analysisConfig['Ring periodicity nm'])
main.sinPowerEdit.setText(analysisConfig['Sinusoidal pattern power'])
main.thetaStepEdit.setText(analysisConfig['Angular step deg'])
main.deltaThEdit.setText(analysisConfig['Delta angle deg'])
main.corrThresEdit.setText(analysisConfig['Discrimination threshold'])
main.minAreaEdit.setText(analysisConfig['Area threshold %'])
def pearson(a, b):
"""2D pearson coefficient of two matrixes a and b"""
# Subtracting mean values
an = a - np.mean(a)
bn = b - np.mean(b)
# Vectorized versions of c, d, e
c_vect = an*bn
d_vect = an*an
e_vect = bn*bn
# Finally get r using those vectorized versions
r_out = np.sum(c_vect)/math.sqrt(np.sum(d_vect)*np.sum(e_vect))
return r_out
def cosTheta(a, b):
"""Angle between two vectors a and b"""
if np.linalg.norm(a) == 0 or np.linalg.norm(b) == 0:
return 0
cosTheta = np.dot(a, b)/(np.linalg.norm(a)*np.linalg.norm(b))
return cosTheta
def blockshaped(arr, nrows, ncols):
"""
Return an array of shape (n, nrows, ncols) where
n * nrows * ncols = arr.size
If arr is a 2D array, the returned array should look like n subblocks with
each subblock preserving the "physical" layout of arr.
"""
h, w = arr.shape
nrows = np.int(nrows)
ncols = np.int(ncols)
return (arr.reshape(h//nrows, nrows, -1, ncols)
.swapaxes(1, 2)
.reshape(-1, nrows, ncols))
def unblockshaped(arr, h, w):
"""
Return an array of shape (h, w) where
h * w = arr.size
If arr is of shape (n, nrows, ncols), n sublocks of shape (nrows, ncols),
then the returned array preserves the "physical" layout of the sublocks.
"""
n, nrows, ncols = arr.shape
return (arr.reshape(h//nrows, -1, nrows, ncols)
.swapaxes(1, 2)
.reshape(h, w))
def firstNmax(coord, image, N):
"""Returns the first N max in an image from an array of coord of the max
in the image"""
if np.shape(coord)[0] < N:
return []
else:
aux = np.zeros(np.shape(coord)[0])
for i in np.arange(np.shape(coord)[0]):
aux[i] = image[coord[i, 0], coord[i, 1]]
auxmax = aux.argsort()[-N:][::-1]
coordinates3 = []
for i in np.arange(0, N):
coordinates3.append(coord[auxmax[i]])
coord3 = np.asarray(coordinates3)
return coord3
def arrayExt(array):
"""Extends an array in a specific way"""
y = array[::-1]
z = []
z.append(y)
z.append(array)
z.append(y)
z = np.array(z)
z = np.reshape(z, 3*np.size(array))
return z
def getDirection(data, binary, minLen, debug=False):
"""Returns the direction (angle) of the neurite in the image data.
binary: boolean array with same shape as data. True means that the pixel
belongs to a neuron and False means background.
minLen: minimum line length in px."""
th0, sigmaTh, lines = linesFromBinary(binary, minLen, debug)
if debug:
text = 'Angle = {0:.1f} +- {1:.1f} from {2:.0f} lines'
print(text.format(th0, sigmaTh, len(lines)))
try:
if len(lines) > 1:
# TO DO: find optimal threshold, 20 is arbitrary
if sigmaTh < 15:
return th0, lines
else:
if debug:
print('sigmaTh too high')
return None, lines
else:
if debug:
print('Only one line found')
return None, lines
except:
# if sigmaTh is None (no lines), this happens
if debug:
print('No lines were found')
return None, lines
def linesFromBinary(binaryData, minLen, debug=False):
# find edges
edges = filters.sobel(binaryData)
# get directions
lines = probabilistic_hough_line(edges, threshold=10, line_length=minLen,
line_gap=3)
if lines == []:
if debug:
print('No lines detected with Hough line algorithm')
return None, None, lines
else:
angleArr = np.zeros(len(lines))
for l in np.arange(len(lines)):
p0, p1 = lines[l]
# get the m coefficient of the lines and the angle
try:
m = (p1[0] - p0[0])/(p1[1] - p0[1])
angle = (180/np.pi)*np.arctan(m)
except ZeroDivisionError:
angle = 90
angleArr[l] = angle
# Before calculating the mean angle, we have to make sure we're using
# the same quadrant for all the angles. We refer all the angles to the
# first one
opt = np.array([180, 0, -180])
for i in np.arange(1, len(angleArr)):
dists = np.abs(angleArr[0] - (opt + angleArr[i]))
angleArr[i] += opt[np.argmin(dists)]
mean, std = np.mean(angleArr), np.std(angleArr)
# We like angles in [0, 180)
if mean < 0:
mean += 180
# histogram method for getDirection
hrange = (-180, 180)
arr = np.histogram(angleArr, bins=45, range=hrange)
dig = (arr[0] != 0).astype(int)
angleGroups = [np.split(arr[0], np.where(np.diff(dig) != 0)[0] + 1),
np.split(arr[1], np.where(np.diff(dig) != 0)[0] + 1)]
angleGroupsSum = [np.sum(np.array(b)) for b in angleGroups[0]]
biggerAngleGroup = angleGroups[1][np.argmax(angleGroupsSum)]
if debug:
print('Total number of lines: {}, Number of lines in biggest group: {}'.format(np.sum(angleGroupsSum), np.max(angleGroupsSum)))
if np.max(angleGroupsSum)/np.sum(angleGroupsSum) > 0.49:
mean = np.mean(biggerAngleGroup)
std = np.std(biggerAngleGroup)
# We like angles in [0, 180)
if mean < 0:
mean += 180
else:
mean = None
std = None
return mean, std, lines
def fitMethod(data, mask, thres, minLen, thStep, deltaTh, wvlen, sinPow,
developer=False):
mask = blocksMask[25]
y0, x0 = np.array(center_of_mass(~mask), dtype=int)
yMax, xMax = mask.shape
if ~mask[y0, x0]:
theta = np.arange(0, math.pi, thStep*math.pi/180)
m = np.tan(theta)
# Taking care of infinite slope case
theta = theta[np.abs(m) < 1000]
m = m[np.abs(m) < 1000]
lineLength = 0
for i in np.arange(len(theta)):
# Find right intersection between line and neuron edge
x, y = x0, y0
while ~mask[y, x] and x < xMax:
print(x, y)
x += 1
y = int(m[i]*(x - x0) + y0)
# If it gets out of the image
if abs(y) >= mask.shape[0] or y < 0:
x -= 1
y = int(m[i]*(x - x0) + y0)
break
yq, xq = y, x
# Find left intersection between line and neuron edge
y, x = y0, x0
while ~mask[y, x] and x > 0:
print(x, y)
x -= 1
y = int(m[i]*(x - x0) + y0)
# If it gets out of the image
if abs(y) >= mask.shape[0] or y < 0:
x += 1
y = int(m[i]*(x - x0) + y0)
break
yp, xp = y, x
# vertical line case
# We keep coordinates of longest line
newLength = math.sqrt((xq - xp)**2 + (yq - yp)**2)
if lineLength < newLength:
lineLength = newLength
x2, y2 = xq, yq
x1, y1 = xp, yp
def corrMethod(data, mask, minLen, thStep, deltaTh, wvlen, sinPow,
developer=False):
"""Searches for rings by correlating the image data with a given
sinusoidal pattern
data: 2D image data
thres: discrimination threshold for the correlated data.
sigma: gaussian filter sigma to blur the image, in px
minLen: minimum line length in px.
thStep: angular step size
deltaTh: maximum pattern rotation angle for correlation matching
wvlen: wavelength of the ring pattern, in px
sinPow: power of the pattern function
developer (bool): enables additional output of algorithms
returns:
corrMax: the maximum (in function of the rotated angle) correlation value
at the image data
thetaMax: simulated axon's rotation angle with maximum correlation value
phaseMax: simulated axon's phase with maximum correlation value at thetaMax
rings (bool): ring presence"""
# phase steps are set to 20, TO DO: explore this parameter
phase = np.arange(0, 21, 1)
corrPhase = np.zeros(np.size(phase))
# line angle calculated
th0, lines = getDirection(data, np.invert(mask), minLen, developer)
if th0 is None:
theta = np.arange(0, 180, thStep)
# result = np.nan means there's no neuron in the block
corrPhaseArg = np.zeros(np.size(theta))
corrPhaseArg[:] = np.nan
corrTheta = np.zeros(np.size(theta))
corrTheta[:] = np.nan
corrMax = np.nan
thetaMax = np.nan
phaseMax = np.nan
else:
try:
if developer:
theta = np.arange(np.min([th0 - deltaTh, 0]), 180, thStep)
else:
theta = np.arange(th0 - deltaTh, th0 + deltaTh, thStep)
except TypeError:
th0 = 90
deltaTh = 90
theta = np.arange(0, 180, thStep)
corrPhaseArg = np.zeros(np.size(theta))
corrTheta = np.zeros(np.size(theta))
subImgSize = np.shape(data)[0]
# for now we correlate with the full sin2D pattern
for t in np.arange(len(theta)):
for p in phase:
# creates simulated axon
axonTheta = simAxon(subImgSize, wvlen, theta[t], p*.025, a=0,
b=sinPow).data
axonThetaMasked = np.ma.array(axonTheta, mask=mask)
dataMasked = np.ma.array(data, mask=mask)
# saves correlation for the given phase p
corrPhase[p] = pearson(dataMasked, axonThetaMasked)
# saves the correlation for the best p, and given angle i
corrTheta[t - 1] = np.max(corrPhase)
corrPhaseArg[t - 1] = .025*np.argmax(corrPhase)
# get theta, phase and correlation with greatest correlation value
# Find indices within (th0 - deltaTh, th0 + deltaTh)
ix = np.where(np.logical_and(th0 - deltaTh <= theta,
theta <= th0 + deltaTh))
i = np.argmax(corrTheta[ix])
thetaMax = theta[ix][i]
phaseMax = corrPhaseArg[ix][i]
corrMax = np.max(corrTheta[ix])
return th0, corrTheta, corrMax, thetaMax, phaseMax
def FFTMethod(data, thres=0.4):
"""A method for actin/spectrin ring finding. It performs FFT 2D analysis
and looks for maxima at 180 nm in the frequency spectrum."""
# calculate new fft2
fft2output = np.real(np.fft.fftshift(np.fft.fft2(data)))
# take abs value and log10 for better visualization
fft2output = np.abs(np.log10(fft2output))
# calculate local intensity maxima
coord = peak_local_max(fft2output, min_distance=2, threshold_rel=thres)
# take first 3 max
coord = firstNmax(coord, fft2output, N=3)
# size of the subimqge of interest
A = np.shape(data)[0]
# max and min radius in pixels, 9 -> 220 nm, 12 -> 167 nm
rmin, rmax = (9, 12)
# auxarrays: ringBool, D
# ringBool is checked to define wether there are rings or not
ringBool = []
# D saves the distances of local maxima from the centre of the fft2
D = []
# loop for calculating all the distances d, elements of array D
for i in np.arange(0, np.shape(coord)[0]):
d = np.linalg.norm([A/2, A/2], coord[i])
D.append(d)
if A*(rmin/100) < d < A*(rmax/100):
ringBool.append(1)
# condition for ringBool: all elements d must correspond to
# periods between 170 and 220 nm
rings = np.sum(ringBool) == np.shape(coord)[0]-1 and np.sum(ringBool) > 0
return fft2output, coord, (rmin, rmax), rings
def pointsMethod(self, data, thres=.3):
"""A method for actin/spectrin ring finding. It finds local maxima in the
image (points) and then if there are three or more in a row considers that
to be rings."""
points = peak_local_max(data, min_distance=6, threshold_rel=thres)
points = firstNmax(points, data, N=7)
D = []
if points == []:
rings = False
else:
dmin = 8
dmax = 11
# look up every point
for i in np.arange(0, np.shape(points)[0]-1):
# calculate the distance of every point to the others
for j in np.arange(i + 1, np.shape(points)[0]):
d1 = np.linalg.norm(points[i], points[j])
# if there are two points at the right distance then
if dmin < d1 < dmax:
for k in np.arange(0, np.shape(points)[0]-1):
# check the distance between the last point
# and the other points in the list
if k != i & k != j:
d2 = np.linalg.norm(points[j], points[k])
else:
d2 = 0
# calculate the angle between vector i-j
# and j-k with i, j, k points
v1 = points[i]-points[j]
v2 = points[j]-points[k]
t = cosTheta(v1, v2)
# if point k is at right distance from point j and
# the angle is flat enough
if dmin < d2 < dmax and np.abs(t) > 0.8:
# save the three points and plot the connections
D.append([points[i], points[j], points[k]])
else:
pass
rings = len(D) > 0
return points, D, rings
|
# Lemiere Yves
# Juillet 2017
import matplotlib.pyplot as plt
import random
def bunch_of_random_real(param_min,param_max,number_of_sample):
tmp_list = []
for i in range(number_of_sample):
tmp_list.append(random.uniform(param_min,param_max))
return tmp_list
def bunch_of_gauss_random(param_mu,param_sigma,number_of_sample):
tmp_list = []
for i in range(number_of_sample):
tmp_list.append(random.gauss(param_mu,param_sigma))
return tmp_list
if __name__ == "__main__":
debug = True
if debug:
print("************************")
print("* Welcome in random_0 *")
print("************************\n")
random.seed(1)
print("Define the expected random distribution")
input_N = input("Number of values to generate ? ")
input_min_value = input("Minimum value from range ? ")
input_max_value = input("Maximum value from range ? ")
N =int(input_N)
min_value =int(input_min_value)
max_value =int(input_max_value)
x = bunch_of_random_real(int(min_value),int(max_value),int(N))
y = bunch_of_random_real(int(min_value),int(max_value),int(N))
number_of_bar = (max_value-min_value)
# Display
plt.plot(x,y,'*')
plt.show()
plt.hist2d(x,y,bins=40)
plt.colorbar()
plt.show()
input_mean_value = input("Mean position of gauss distribution ? ")
input_sigma_value = input("Width of gauss distribution ? ")
g_x = bunch_of_gauss_random(int(input_mean_value),int(input_sigma_value),int(N))
g_y = bunch_of_gauss_random(int(input_mean_value),int(input_sigma_value),int(N))
plt.plot(g_x,g_y,'*')
plt.show()
plt.hist2d(g_x,g_y,bins=40)
plt.colorbar()
plt.show()
|
from __future__ import unicode_literals
import locale
import logging
import requests
from photini.configstore import key_store
from photini.photinimap import GeocoderBase, PhotiniMap
from photini.pyqt import Busy, catch_all, Qt, QtCore, QtWidgets, scale_font
logger = logging.getLogger(__name__)
translate = QtCore.QCoreApplication.translate
class BingGeocoder(GeocoderBase):
interval = 50
def query(self, params, url):
params['key'] = self.api_key
with Busy():
self.rate_limit()
try:
rsp = requests.get(url, params=params, timeout=5)
except Exception as ex:
logger.error(str(ex))
return []
if rsp.status_code >= 400:
logger.error('Search error %d', rsp.status_code)
return []
if rsp.headers['X-MS-BM-WS-INFO'] == '1':
logger.error(translate(
'MapTabBing', 'Server overload, please try again'))
self.block_timer.start(5000)
rsp = rsp.json()
if rsp['statusCode'] != 200:
logger.error('Search error %d: %s',
rsp['statusCode'], rsp['statusDescription'])
return []
resource_sets = rsp['resourceSets']
if not resource_sets:
logger.error('No results found')
return []
return resource_sets
def get_altitude(self, coords):
coords = [float(x) for x in coords.split(',')]
params = {
'points' : '{:.5f},{:.5f}'.format(*coords),
'heights': 'sealevel',
}
resource_sets = self.cached_query(
params, 'http://dev.virtualearth.net/REST/v1/Elevation/List')
if resource_sets:
return resource_sets[0]['resources'][0]['elevations'][0]
return None
def search(self, search_string, bounds=None):
params = {
'query' : search_string,
'maxRes': '20',
}
lang, encoding = locale.getdefaultlocale()
if lang:
params['culture'] = lang.replace('_', '-')
if bounds:
north, east, south, west = bounds
params['userMapView'] = '{:.4f},{:.4f},{:.4f},{:.4f}'.format(
south, west, north, east)
for resource_set in self.cached_query(
params, 'http://dev.virtualearth.net/REST/v1/Locations'):
for resource in resource_set['resources']:
south, west, north, east = resource['bbox']
yield north, east, south, west, resource['name']
def search_terms(self):
widget = QtWidgets.QLabel(translate(
'MapTabBing', 'Search and altitude lookup\nprovided by Bing'))
widget.setAlignment(Qt.AlignRight)
scale_font(widget, 80)
return [widget]
class TabWidget(PhotiniMap):
api_key = key_store.get('bingmap', 'api_key')
@staticmethod
def tab_name():
return translate('MapTabBing', 'Map (&Bing)')
def get_geocoder(self):
return BingGeocoder(parent=self)
def get_head(self):
url = 'http://www.bing.com/api/maps/mapcontrol?callback=initialize'
url += '&key=' + self.api_key
lang, encoding = locale.getdefaultlocale()
if lang:
culture = lang.replace('_', '-')
url += '&setMkt=' + culture
language, sep, region = culture.partition('-')
url += '&setLang=' + language
if self.app.options.test:
url += '&branch=experimental'
return ''' <script type="text/javascript"
src="{}" async>
</script>'''.format(url)
@catch_all
def new_status(self, status):
super(TabWidget, self).new_status(status)
if 'session_id' in status:
# use map session key to make API calls non-billable
self.geocoder.api_key = status['session_id']
|
from phystricks import *
def PVRFoobvAzpZTq():
pspict,fig = SinglePicture("PVRFoobvAzpZTq")
pspict.dilatation(0.5)
D=Point(0,0)
E=Point(8,0)
c1=Circle(D,12)
c2=Circle(E,6)
F=Intersection(c1,c2)[1]
triangle=Polygon(D,E,F)
S=triangle.edges[2].midpoint()
T=triangle.edges[1].midpoint()
mes1=Segment(D,S).get_measure(-0.3,0.1,None,"\( 6\)",pspict=pspict,position="corner")
mes4=Segment(T,F).get_measure(0.3,-0.1,None,"\( 7\)",pspict=pspict,position="corner")
pspict.DrawGraphs(mes1,mes4,triangle)
pspict.comment="The marks 6 and 7 are well positioned"
fig.no_figure()
fig.conclude()
fig.write_the_file()
|
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import flt, nowdate, get_url
from erpnext.accounts.party import get_party_account, get_party_bank_account
from erpnext.accounts.utils import get_account_currency
from erpnext.accounts.doctype.payment_entry.payment_entry import get_payment_entry, get_company_defaults
from frappe.integrations.utils import get_payment_gateway_controller
from frappe.utils.background_jobs import enqueue
from erpnext.erpnext_integrations.stripe_integration import create_stripe_subscription
from erpnext.accounts.doctype.subscription_plan.subscription_plan import get_plan_rate
class PaymentRequest(Document):
def validate(self):
if self.get("__islocal"):
self.status = 'Draft'
self.validate_reference_document()
self.validate_payment_request_amount()
self.validate_currency()
self.validate_subscription_details()
def validate_reference_document(self):
if not self.reference_doctype or not self.reference_name:
frappe.throw(_("To create a Payment Request reference document is required"))
def validate_payment_request_amount(self):
existing_payment_request_amount = \
get_existing_payment_request_amount(self.reference_doctype, self.reference_name)
if existing_payment_request_amount:
ref_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if (hasattr(ref_doc, "order_type") \
and getattr(ref_doc, "order_type") != "Shopping Cart"):
ref_amount = get_amount(ref_doc)
if existing_payment_request_amount + flt(self.grand_total)> ref_amount:
frappe.throw(_("Total Payment Request amount cannot be greater than {0} amount")
.format(self.reference_doctype))
def validate_currency(self):
ref_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if self.payment_account and ref_doc.currency != frappe.db.get_value("Account", self.payment_account, "account_currency"):
frappe.throw(_("Transaction currency must be same as Payment Gateway currency"))
def validate_subscription_details(self):
if self.is_a_subscription:
amount = 0
for subscription_plan in self.subscription_plans:
payment_gateway = frappe.db.get_value("Subscription Plan", subscription_plan.plan, "payment_gateway")
if payment_gateway != self.payment_gateway_account:
frappe.throw(_('The payment gateway account in plan {0} is different from the payment gateway account in this payment request').format(subscription_plan.name))
rate = get_plan_rate(subscription_plan.plan, quantity=subscription_plan.qty)
amount += rate
if amount != self.grand_total:
frappe.msgprint(_("The amount of {0} set in this payment request is different from the calculated amount of all payment plans: {1}. Make sure this is correct before submitting the document.").format(self.grand_total, amount))
def on_submit(self):
if self.payment_request_type == 'Outward':
self.db_set('status', 'Initiated')
return
send_mail = self.payment_gateway_validation()
ref_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if (hasattr(ref_doc, "order_type") and getattr(ref_doc, "order_type") == "Shopping Cart") \
or self.flags.mute_email:
send_mail = False
if send_mail:
self.set_payment_request_url()
self.send_email()
self.make_communication_entry()
def on_cancel(self):
self.check_if_payment_entry_exists()
self.set_as_cancelled()
def make_invoice(self):
ref_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if (hasattr(ref_doc, "order_type") and getattr(ref_doc, "order_type") == "Shopping Cart"):
from erpnext.selling.doctype.sales_order.sales_order import make_sales_invoice
si = make_sales_invoice(self.reference_name, ignore_permissions=True)
si = si.insert(ignore_permissions=True)
si.submit()
def payment_gateway_validation(self):
try:
controller = get_payment_gateway_controller(self.payment_gateway)
if hasattr(controller, 'on_payment_request_submission'):
return controller.on_payment_request_submission(self)
else:
return True
except Exception:
return False
def set_payment_request_url(self):
if self.payment_account:
self.payment_url = self.get_payment_url()
if self.payment_url:
self.db_set('payment_url', self.payment_url)
if self.payment_url or not self.payment_gateway_account:
self.db_set('status', 'Initiated')
def get_payment_url(self):
if self.reference_doctype != "Fees":
data = frappe.db.get_value(self.reference_doctype, self.reference_name, ["company", "customer_name"], as_dict=1)
else:
data = frappe.db.get_value(self.reference_doctype, self.reference_name, ["student_name"], as_dict=1)
data.update({"company": frappe.defaults.get_defaults().company})
controller = get_payment_gateway_controller(self.payment_gateway)
controller.validate_transaction_currency(self.currency)
if hasattr(controller, 'validate_minimum_transaction_amount'):
controller.validate_minimum_transaction_amount(self.currency, self.grand_total)
return controller.get_payment_url(**{
"amount": flt(self.grand_total, self.precision("grand_total")),
"title": data.company.encode("utf-8"),
"description": self.subject.encode("utf-8"),
"reference_doctype": "Payment Request",
"reference_docname": self.name,
"payer_email": self.email_to or frappe.session.user,
"payer_name": frappe.safe_encode(data.customer_name),
"order_id": self.name,
"currency": self.currency
})
def set_as_paid(self):
if frappe.session.user == "Guest":
frappe.set_user("Administrator")
payment_entry = self.create_payment_entry()
self.make_invoice()
return payment_entry
def create_payment_entry(self, submit=True):
"""create entry"""
frappe.flags.ignore_account_permission = True
ref_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if self.reference_doctype == "Sales Invoice":
party_account = ref_doc.debit_to
elif self.reference_doctype == "Purchase Invoice":
party_account = ref_doc.credit_to
else:
party_account = get_party_account("Customer", ref_doc.get("customer"), ref_doc.company)
party_account_currency = ref_doc.get("party_account_currency") or get_account_currency(party_account)
bank_amount = self.grand_total
if party_account_currency == ref_doc.company_currency and party_account_currency != self.currency:
party_amount = ref_doc.base_grand_total
else:
party_amount = self.grand_total
payment_entry = get_payment_entry(self.reference_doctype, self.reference_name,
party_amount=party_amount, bank_account=self.payment_account, bank_amount=bank_amount)
payment_entry.update({
"reference_no": self.name,
"reference_date": nowdate(),
"remarks": "Payment Entry against {0} {1} via Payment Request {2}".format(self.reference_doctype,
self.reference_name, self.name)
})
if payment_entry.difference_amount:
company_details = get_company_defaults(ref_doc.company)
payment_entry.append("deductions", {
"account": company_details.exchange_gain_loss_account,
"cost_center": company_details.cost_center,
"amount": payment_entry.difference_amount
})
if submit:
payment_entry.insert(ignore_permissions=True)
payment_entry.submit()
return payment_entry
def send_email(self):
"""send email with payment link"""
email_args = {
"recipients": self.email_to,
"sender": None,
"subject": self.subject,
"message": self.get_message(),
"now": True,
"attachments": [frappe.attach_print(self.reference_doctype, self.reference_name,
file_name=self.reference_name, print_format=self.print_format)]}
enqueue(method=frappe.sendmail, queue='short', timeout=300, is_async=True, **email_args)
def get_message(self):
"""return message with payment gateway link"""
context = {
"doc": frappe.get_doc(self.reference_doctype, self.reference_name),
"payment_url": self.payment_url
}
if self.message:
return frappe.render_template(self.message, context)
def set_failed(self):
pass
def set_as_cancelled(self):
self.db_set("status", "Cancelled")
def check_if_payment_entry_exists(self):
if self.status == "Paid":
if frappe.get_all("Payment Entry Reference",
filters={"reference_name": self.reference_name, "docstatus": ["<", 2]},
fields=["parent"],
limit=1):
frappe.throw(_("Payment Entry already exists"), title=_('Error'))
def make_communication_entry(self):
"""Make communication entry"""
comm = frappe.get_doc({
"doctype":"Communication",
"subject": self.subject,
"content": self.get_message(),
"sent_or_received": "Sent",
"reference_doctype": self.reference_doctype,
"reference_name": self.reference_name
})
comm.insert(ignore_permissions=True)
def get_payment_success_url(self):
return self.payment_success_url
def on_payment_authorized(self, status=None):
if not status:
return
shopping_cart_settings = frappe.get_doc("Shopping Cart Settings")
if status in ["Authorized", "Completed"]:
redirect_to = None
self.run_method("set_as_paid")
# if shopping cart enabled and in session
if (shopping_cart_settings.enabled and hasattr(frappe.local, "session")
and frappe.local.session.user != "Guest"):
success_url = shopping_cart_settings.payment_success_url
if success_url:
redirect_to = ({
"Orders": "/orders",
"Invoices": "/invoices",
"My Account": "/me"
}).get(success_url, "/me")
else:
redirect_to = get_url("/orders/{0}".format(self.reference_name))
return redirect_to
def create_subscription(self, payment_provider, gateway_controller, data):
if payment_provider == "stripe":
return create_stripe_subscription(gateway_controller, data)
@frappe.whitelist(allow_guest=True)
def make_payment_request(**args):
"""Make payment request"""
args = frappe._dict(args)
ref_doc = frappe.get_doc(args.dt, args.dn)
grand_total = get_amount(ref_doc)
if args.loyalty_points and args.dt == "Sales Order":
from erpnext.accounts.doctype.loyalty_program.loyalty_program import validate_loyalty_points
loyalty_amount = validate_loyalty_points(ref_doc, int(args.loyalty_points))
frappe.db.set_value("Sales Order", args.dn, "loyalty_points", int(args.loyalty_points), update_modified=False)
frappe.db.set_value("Sales Order", args.dn, "loyalty_amount", loyalty_amount, update_modified=False)
grand_total = grand_total - loyalty_amount
gateway_account = get_gateway_details(args) or frappe._dict()
bank_account = (get_party_bank_account(args.get('party_type'), args.get('party'))
if args.get('party_type') else '')
existing_payment_request = None
if args.order_type == "Shopping Cart":
existing_payment_request = frappe.db.get_value("Payment Request",
{"reference_doctype": args.dt, "reference_name": args.dn, "docstatus": ("!=", 2)})
if existing_payment_request:
frappe.db.set_value("Payment Request", existing_payment_request, "grand_total", grand_total, update_modified=False)
pr = frappe.get_doc("Payment Request", existing_payment_request)
else:
if args.order_type != "Shopping Cart":
existing_payment_request_amount = \
get_existing_payment_request_amount(args.dt, args.dn)
if existing_payment_request_amount:
grand_total -= existing_payment_request_amount
pr = frappe.new_doc("Payment Request")
pr.update({
"payment_gateway_account": gateway_account.get("name"),
"payment_gateway": gateway_account.get("payment_gateway"),
"payment_account": gateway_account.get("payment_account"),
"payment_request_type": args.get("payment_request_type"),
"currency": ref_doc.currency,
"grand_total": grand_total,
"email_to": args.recipient_id or "",
"subject": _("Payment Request for {0}").format(args.dn),
"message": gateway_account.get("message") or get_dummy_message(ref_doc),
"reference_doctype": args.dt,
"reference_name": args.dn,
"party_type": args.get("party_type"),
"party": args.get("party"),
"bank_account": bank_account
})
if args.order_type == "Shopping Cart" or args.mute_email:
pr.flags.mute_email = True
if args.submit_doc:
pr.insert(ignore_permissions=True)
pr.submit()
if args.order_type == "Shopping Cart":
frappe.db.commit()
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = pr.get_payment_url()
if args.return_doc:
return pr
return pr.as_dict()
def get_amount(ref_doc):
"""get amount based on doctype"""
dt = ref_doc.doctype
if dt in ["Sales Order", "Purchase Order"]:
grand_total = flt(ref_doc.grand_total) - flt(ref_doc.advance_paid)
elif dt in ["Sales Invoice", "Purchase Invoice"]:
if ref_doc.party_account_currency == ref_doc.currency:
grand_total = flt(ref_doc.outstanding_amount)
else:
grand_total = flt(ref_doc.outstanding_amount) / ref_doc.conversion_rate
elif dt == "Fees":
grand_total = ref_doc.outstanding_amount
if grand_total > 0 :
return grand_total
else:
frappe.throw(_("Payment Entry is already created"))
def get_existing_payment_request_amount(ref_dt, ref_dn):
existing_payment_request_amount = frappe.db.sql("""
select sum(grand_total)
from `tabPayment Request`
where
reference_doctype = %s
and reference_name = %s
and docstatus = 1
and status != 'Paid'
""", (ref_dt, ref_dn))
return flt(existing_payment_request_amount[0][0]) if existing_payment_request_amount else 0
def get_gateway_details(args):
"""return gateway and payment account of default payment gateway"""
if args.get("payment_gateway"):
return get_payment_gateway_account(args.get("payment_gateway"))
if args.order_type == "Shopping Cart":
payment_gateway_account = frappe.get_doc("Shopping Cart Settings").payment_gateway_account
return get_payment_gateway_account(payment_gateway_account)
gateway_account = get_payment_gateway_account({"is_default": 1})
return gateway_account
def get_payment_gateway_account(args):
return frappe.db.get_value("Payment Gateway Account", args,
["name", "payment_gateway", "payment_account", "message"],
as_dict=1)
@frappe.whitelist()
def get_print_format_list(ref_doctype):
print_format_list = ["Standard"]
print_format_list.extend([p.name for p in frappe.get_all("Print Format",
filters={"doc_type": ref_doctype})])
return {
"print_format": print_format_list
}
@frappe.whitelist(allow_guest=True)
def resend_payment_email(docname):
return frappe.get_doc("Payment Request", docname).send_email()
@frappe.whitelist()
def make_payment_entry(docname):
doc = frappe.get_doc("Payment Request", docname)
return doc.create_payment_entry(submit=False).as_dict()
def make_status_as_paid(doc, method):
for ref in doc.references:
payment_request_name = frappe.db.get_value("Payment Request",
{"reference_doctype": ref.reference_doctype, "reference_name": ref.reference_name,
"docstatus": 1})
if payment_request_name:
doc = frappe.get_doc("Payment Request", payment_request_name)
if doc.status != "Paid":
doc.db_set('status', 'Paid')
frappe.db.commit()
def get_dummy_message(doc):
return frappe.render_template("""{% if doc.contact_person -%}
<p>Dear {{ doc.contact_person }},</p>
{%- else %}<p>Hello,</p>{% endif %}
<p>{{ _("Requesting payment against {0} {1} for amount {2}").format(doc.doctype,
doc.name, doc.get_formatted("grand_total")) }}</p>
<a href="{{ payment_url }}">{{ _("Make Payment") }}</a>
<p>{{ _("If you have any questions, please get back to us.") }}</p>
<p>{{ _("Thank you for your business!") }}</p>
""", dict(doc=doc, payment_url = '{{ payment_url }}'))
@frappe.whitelist()
def get_subscription_details(reference_doctype, reference_name):
if reference_doctype == "Sales Invoice":
subscriptions = frappe.db.sql("""SELECT parent as sub_name FROM `tabSubscription Invoice` WHERE invoice=%s""",reference_name, as_dict=1)
subscription_plans = []
for subscription in subscriptions:
plans = frappe.get_doc("Subscription", subscription.sub_name).plans
for plan in plans:
subscription_plans.append(plan)
return subscription_plans
@frappe.whitelist()
def make_payment_order(source_name, target_doc=None):
from frappe.model.mapper import get_mapped_doc
def set_missing_values(source, target):
target.payment_order_type = "Payment Request"
target.append('references', {
'reference_doctype': source.reference_doctype,
'reference_name': source.reference_name,
'amount': source.grand_total,
'supplier': source.party,
'payment_request': source_name,
'mode_of_payment': source.mode_of_payment,
'bank_account': source.bank_account,
'account': source.account
})
doclist = get_mapped_doc("Payment Request", source_name, {
"Payment Request": {
"doctype": "Payment Order",
}
}, target_doc, set_missing_values)
return doclist
|
import unittest as ut
import unittest_decorators as utx
import espressomd
import numpy as np
@utx.skipIfMissingFeatures("ROTATION")
class Rotation(ut.TestCase):
s = espressomd.System(box_l=[1.0, 1.0, 1.0])
s.cell_system.skin = 0
s.time_step = 0.01
def test_langevin(self):
"""Applies langevin thermostat and checks that correct axes get
thermalized"""
s = self.s
s.thermostat.set_langevin(gamma=1, kT=1, seed=42)
for x in (0, 1):
for y in (0, 1):
for z in (0, 1):
s.part.clear()
s.part.add(id=0, pos=(0, 0, 0), rotation=(x, y, z),
quat=(1, 0, 0, 0), omega_body=(0, 0, 0),
torque_lab=(0, 0, 0))
s.integrator.run(500)
self.validate(x, 0)
self.validate(y, 1)
self.validate(z, 2)
def validate(self, rotate, coord):
if rotate:
# self.assertNotEqual(self.s.part[0].torque_body[coord],0)
self.assertNotEqual(self.s.part[0].omega_body[coord], 0)
else:
# self.assertEqual(self.s.part[0].torque_body[coord],0)
self.assertEqual(self.s.part[0].omega_body[coord], 0)
@utx.skipIfMissingFeatures("EXTERNAL_FORCES")
def test_axes_changes(self):
"""Verifies that rotation axes in body and space frame stay the same
and other axes don't"""
s = self.s
s.part.clear()
s.part.add(id=0, pos=(0.9, 0.9, 0.9), ext_torque=(1, 1, 1))
s.thermostat.turn_off()
for dir in (0, 1, 2):
# Reset orientation
s.part[0].quat = [1, 0, 0, 0]
# Enable rotation in a single direction
rot = [0, 0, 0]
rot[dir] = 1
s.part[0].rotation = rot
s.integrator.run(30)
s.integrator.run(100)
# Check other axes:
for axis in [1, 0, 0], [0, 1, 0], [0, 0, 1]:
if rot == axis:
# The axis for which rotation is on should coincide in body
# and space frame
self.assertAlmostEqual(
np.dot(rot, s.part[0].convert_vector_body_to_space(rot)), 1, places=8)
else:
# For non-rotation axis, body and space frame should differ
self.assertLess(
np.dot(axis, s.part[0].convert_vector_body_to_space(axis)), 0.95)
def test_frame_conversion_and_rotation(self):
s = self.s
s.part.clear()
p = s.part.add(pos=np.random.random(3), rotation=(1, 1, 1))
# Space and body frame co-incide?
np.testing.assert_allclose(
np.copy(p.director), p.convert_vector_body_to_space((0, 0, 1)), atol=1E-10)
# Random vector should still co-incide
v = (1., 5.5, 17)
np.testing.assert_allclose(
v, p.convert_vector_space_to_body(v), atol=1E-10)
np.testing.assert_allclose(
v, p.convert_vector_body_to_space(v), atol=1E-10)
# Particle rotation
p.rotate((1, 2, 0), np.pi / 4)
# Check angle for director
self.assertAlmostEqual(
np.arccos(np.dot(p.director, (0, 0, 1))), np.pi / 4, delta=1E-10)
# Check other vector
v = (5, -7, 3)
v_r = p.convert_vector_body_to_space(v)
self.assertAlmostEqual(np.dot(v, v), np.dot(v_r, v_r), delta=1e-10)
np.testing.assert_allclose(
p.convert_vector_space_to_body(v_r), v, atol=1E-10)
# Rotation axis should co-incide
np.testing.assert_allclose(
(1, 2, 0), p.convert_vector_body_to_space((1, 2, 0)))
# Check rotation axis with all elements set
p.rotate(axis=(-5, 2, 17), angle=1.)
v = (5, -7, 3)
v_r = p.convert_vector_body_to_space(v)
self.assertAlmostEqual(np.dot(v, v), np.dot(v_r, v_r), delta=1e-10)
np.testing.assert_allclose(
p.convert_vector_space_to_body(v_r), v, atol=1E-10)
def test_rotation_mpi_communication(self):
s = self.s
s.part.clear()
# place particle in cell with MPI rank 0
p = s.part.add(pos=0.01 * self.s.box_l, rotation=(1, 1, 1))
p.rotate((1, 0, 0), -np.pi / 2)
np.testing.assert_array_almost_equal(
np.copy(p.director), [0, 1, 0], decimal=10)
# place particle in cell with MPI rank N-1
p = s.part.add(pos=0.99 * self.s.box_l, rotation=(1, 1, 1))
p.rotate((1, 0, 0), -np.pi / 2)
np.testing.assert_array_almost_equal(
np.copy(p.director), [0, 1, 0], decimal=10)
if __name__ == "__main__":
ut.main()
|
"""Functions for editing general objects in seeddb.
(Not netboxes and services).
"""
import logging
from socket import gethostbyaddr, gethostbyname, error as SocketError
from IPy import IP
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect, Http404
from django.db.models import Q
from django.urls import reverse, NoReverseMatch
from django.utils import six
from nav.web.message import new_message, Messages
from nav.models.manage import Netbox, NetboxCategory, NetboxGroup
_logger = logging.getLogger(__name__)
def render_edit(
request,
model,
form_model,
object_id,
redirect,
template='seeddb/edit.html',
lon=None,
lat=None,
extra_context=None,
action='edit',
):
"""Handles editing for objects in seeddb."""
if not extra_context:
extra_context = {}
obj = _get_object(model, object_id)
verbose_name = model._meta.verbose_name
if not obj and (lat and lon):
obj = model(position='({0},{1})'.format(lat, lon))
original_pk = getattr(obj, 'pk', None)
if action == 'copy' and original_pk:
obj.pk = None
if request.method == 'POST':
form = form_model(request.POST, instance=obj)
if form.is_valid():
# TODO: It's kinda hackish to put this here. Discuss during review
# Store devices in group when editing a device group (which we
# have no idea if we are doing or not)
if model == NetboxGroup:
netboxes = request.POST.getlist('netboxes')
_logger.debug('netboxes in group: %s', netboxes)
# Save model but make sure m2m is not saved. See
# https://docs.djangoproject.com/en/1.4/topics/db/models
# /#extra-fields-on-many-to-many-relationships
obj = form.save(commit=False)
obj.save()
_connect_group_to_devices(obj, netboxes)
else:
obj = form.save()
new_message(request, "Saved %s %s" % (verbose_name, obj), Messages.SUCCESS)
try:
return HttpResponseRedirect(reverse(redirect, args=(obj.pk,)))
except NoReverseMatch:
return HttpResponseRedirect(reverse(redirect))
else:
form = form_model(instance=obj)
context = {
'object': obj,
'form': form,
'title': 'Add new %s' % verbose_name,
'verbose_name': verbose_name,
'sub_active': {'add': True},
}
if obj:
if obj.pk:
context.update(
{
'title': 'Edit %s "%s"' % (verbose_name, obj),
'sub_active': {'edit': True},
}
)
else:
context.update(
{
'title': 'Copy %s "%s"' % (verbose_name, original_pk),
'sub_active': {'edit': True},
}
)
extra_context.update(context)
return render(request, template, extra_context)
def _get_object(model, object_id, identifier_attr='pk'):
"""Get object if it exists, else raise 404"""
try:
return get_object_or_404(model, **{identifier_attr: object_id})
except Http404:
pass
def resolve_ip_and_sysname(name):
"""Given a name that can be either an ip or a hostname/domain name, this
function looks up IP and hostname.
name - ip or hostname
Returns:
- tuple with ip-addres and sysname
"""
try:
ip_addr = IP(name)
except ValueError:
ip_addr = IP(gethostbyname(name))
try:
sysname = gethostbyaddr(six.text_type(ip_addr))[0]
except SocketError:
sysname = six.text_type(ip_addr)
return (ip_addr, sysname)
def does_ip_exist(ip_addr, netbox_id=None):
"""Checks if the given IP already exist in database.
Parameters:
* ip_addr - the IP addres to look for.
* netbox_id - a netbox primary key that can have the given ip_addr, and
the function will still return False.
Returns:
- True if the IP already exists in the database (and the netbox with the
IP is not the same as the given netbox_id).
- False if not.
"""
if netbox_id:
ip_qs = Netbox.objects.filter(Q(ip=six.text_type(ip_addr)), ~Q(id=netbox_id))
else:
ip_qs = Netbox.objects.filter(ip=six.text_type(ip_addr))
return ip_qs.count() > 0
def does_sysname_exist(sysname, netbox_id=None):
"""Checks if given sysname exists in database.
Parameters:
* sysname - the sysname addres to look for.
* netbox_id - a netbox primary key that can have the given ip_addr, and
the function will still return False.
Returns:
- True if the sysname already exists in the database (and the netbox with
the sysname is not the same as the given netbox_id).
- False if not.
"""
if netbox_id:
sysname_qs = Netbox.objects.filter(Q(sysname=sysname), ~Q(id=netbox_id))
else:
sysname_qs = Netbox.objects.filter(sysname=sysname)
return sysname_qs.count() > 0
def _connect_group_to_devices(group, netbox_ids):
"""
Connect a NetboxGroup and Netboxes by creating instances of
NetboxCategories
:param nav.models.manage.NetboxGroup group: A netboxgroup
:param list[str] netbox_ids: a result from a request.POST.getlist that
should contain netbox id's as strings
"""
netboxids = [int(x) for x in netbox_ids]
# Delete existing netboxcategories that are not in request
NetboxCategory.objects.filter(category=group).exclude(
netbox__pk__in=netboxids
).delete()
# Add new netboxcategories that are in request
for netboxid in netboxids:
try:
NetboxCategory.objects.get(category=group, netbox__pk=netboxid)
except NetboxCategory.DoesNotExist:
netbox = Netbox.objects.get(pk=netboxid)
NetboxCategory.objects.create(category=group, netbox=netbox)
|
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
import django.core.serializers.json
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('materia', '0072_auto_20200427_1021'),
]
operations = [
migrations.AddField(
model_name='documentoacessorio',
name='metadata',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=None, encoder=django.core.serializers.json.DjangoJSONEncoder, null=True, verbose_name='Metadados'),
),
migrations.AlterField(
model_name='documentoacessorio',
name='nome',
field=models.CharField(max_length=50, verbose_name='Título do Documento'),
),
]
|
from pyspace.planet import PlanetArray
from pyspace.simulator import BarnesSimulator
import numpy
x, y, z = numpy.mgrid[0:500:5j, 0:500:5j, 0:500:5j]
x = x.ravel(); y = y.ravel(); z = z.ravel()
pa = PlanetArray(x, y, z)
sim =BarnesSimulator(pa, 1, 1, 0, sim_name = "square_grid")
sim.simulate(1000, dump_output = True)
|
from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.db.models import Sum
from django.utils import timezone
from django.db import models
class Category(models.Model):
title = models.CharField(max_length=55, default="Unknown")
level = models.IntegerField(default=0)
parent = models.ForeignKey('self', blank=True, null=True)
def __unicode__(self):
return self.title
def no_of_podcasts(self):
return self.podcastcategories_set.count()
class Podcast(models.Model):
title = models.CharField(max_length=100, default="Unknown")
link = models.CharField(max_length=255)
copyright = models.CharField(max_length=100, null=True)
description = models.TextField(null=True)
language = models.CharField(max_length=100, null=True)
itunes_author = models.CharField(max_length=100, null=True)
itunes_block = models.BooleanField(default=False)
itunes_complete = models.BooleanField(default=False)
itunes_explicit = models.BooleanField(default=False)
itunes_image = models.CharField(max_length=255, null=True)
itunes_owner_name = models.CharField(max_length=100, null=True)
itunes_owner_email = models.CharField(max_length=100, null=True)
itunes_subtitle = models.TextField(null=True)
itunes_summary = models.TextField(null=True)
last_updated = models.DateTimeField('Last updated', default=datetime.now())
def __unicode__(self):
return self.title
@property
def sorted_episode_set(self):
return self.episode_set.order_by('-pub_date')
def total_rating(self):
return self.userpodcast_set.aggregate(Sum('rating'))
class PodcastCategories(models.Model):
category = models.ForeignKey(Category)
podcast = models.ForeignKey(Podcast)
def __unicode__(self):
return u'%s - %s' % (self.category.title, self.podcast.title)
class UserPodcast(models.Model):
user = models.ForeignKey(User)
podcast = models.ForeignKey(Podcast)
following = models.BooleanField(default=False)
rating = models.IntegerField(default=0)
last_updated = models.DateTimeField('Last Updated', default=datetime.now())
def __unicode__(self):
return '%s - %s' % (self.user.username, self.podcast.title)
def no_of_unarchived(self):
return UserEpisode.objects.filter(user=self.user, episode__podcast=self.podcast, archived=False).count()
class UserProfile(models.Model):
user = models.ForeignKey(User)
public_profile = models.BooleanField(default=False)
share_episodes = models.BooleanField(default=True)
share_podcasts = models.BooleanField(default=True)
def __unicode__(self):
if self.user.first_name or self.user.last_name:
return u'%s %s' % (self.user.first_name, self.user.last_name)
else:
return self.user.username
class Episode(models.Model):
podcast = models.ForeignKey(Podcast)
title = models.CharField(max_length=100)
guid = models.CharField(max_length=255, unique=True)
enclosureUrl = models.CharField(max_length=255, null=True)
enclosureLength = models.IntegerField(default=-1)
enclosureType = models.CharField(max_length=30, null=True)
itunes_author = models.CharField(max_length=100, null=True)
itunes_block = models.BooleanField(default=False)
itunes_duration = models.IntegerField(default=-1)
itunes_itunesIsClosedCaption = models.BooleanField(default=False)
itunes_image = models.CharField(max_length=255, null=True)
itunes_explicit = models.BooleanField(default=False)
itunes_subtitle = models.TextField(null=True)
itunes_summary = models.TextField(null=True)
pub_date = models.DateTimeField('Date published')
def __unicode__(self):
return self.title
def was_published_recently(self):
return self.pub_date >= timezone.now() - timedelta(days=7)
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
def itunes_duration_as_string(self):
return str(timedelta(seconds=self.itunes_duration))
def total_rating(self):
return self.userepisode_set.aggregate(Sum('rating'))
class UserEpisode(models.Model):
PLAYING_UNPLAYED, PLAYING_PARTIALLY, PLAYING_FINISHED = range(0, 3)
PlayingStatus = (
(PLAYING_UNPLAYED, 'Unplayed'),
(PLAYING_PARTIALLY, 'Partially played'),
(PLAYING_FINISHED, 'Finished'),
)
user = models.ForeignKey(User)
episode = models.ForeignKey(Episode)
archived = models.BooleanField(default=False)
starred = models.BooleanField(default=False)
rating = models.IntegerField(default=0)
playing_status = models.IntegerField(choices=PlayingStatus, default=PLAYING_UNPLAYED)
playing_current_time = models.IntegerField(default=0)
last_updated = models.DateTimeField('Last Updated', default=datetime.now())
def __unicode__(self):
return '%s - %s' % (self.user.username, self.episode.title)
class UserUser(models.Model):
source = models.ForeignKey(User, related_name='useruser_source')
target = models.ForeignKey(User, related_name='useruser_target')
following = models.BooleanField(default=False)
last_updated = models.DateTimeField('Last Updated', default=datetime.now())
def __unicode__(self):
return '%s - %s' % (self.source.username, self.target.username)
|
import numpy.testing as npt
import numpy as np
from pyhdf.SD import SD, SDC
from utilities import learning_data as ld
class Test:
def test_get_unique_variable_prefixes(self):
variable_names = ['penguin0', 'penguin1']
npt.assert_array_equal(ld.get_unique_variable_prefixes(variable_names), ['penguin'])
variable_names.append('ferret0')
npt.assert_array_equal(ld.get_unique_variable_prefixes(variable_names), ['penguin', 'ferret'])
def test_get_variable_groups(self):
variable_names = ['penguin0', 'penguin1']
unique_prefixes = ld.get_unique_variable_prefixes(variable_names)
npt.assert_array_equal(ld.get_variable_groups(variable_names, unique_prefixes),
[['penguin0', 'penguin1']])
variable_names.append('ferret0')
unique_prefixes = ld.get_unique_variable_prefixes(variable_names)
npt.assert_array_equal(ld.get_variable_groups(variable_names, unique_prefixes),
[['penguin0', 'penguin1'], ['ferret0']])
def test_get_variable_type_indices(self):
npt.assert_array_equal(ld.get_variable_type_indices([[]]), [])
variable_groups = [['penguin0', 'penguin1']]
npt.assert_array_equal(ld.get_variable_type_indices(variable_groups), [1])
variable_groups.append([])
npt.assert_array_equal(ld.get_variable_type_indices(variable_groups), [1])
variable_groups.append(['ferret0'])
npt.assert_array_equal(ld.get_variable_type_indices(variable_groups), [1, 2])
def test_to_from_hdf(self, tmpdir):
file_name = str(tmpdir) + "/penguin.hdf"
variable_names = ['lst1', 'lst2', 'lst3', 'snow1', 'snow2', 'ndvi1', 'ndvi2', 'ndvi3', 'ndvi4', 'ndvi5']
dat = np.random.rand(10, 11)
years = range(10)
learning_data = ld.LearningData()
learning_data.from_data(dat, variable_names, 'penguin')
learning_data.meta_layers['years'] = np.array(years)
learning_data.attributes = {'penguin': 'yes', 'tophat': 'no'}
learning_data.to_hdf(file_name)
training_data = ld.LearningData()
training_data.from_hdf(file_name)
assert training_data.num_variables == 10
assert training_data.num_observations == 10
npt.assert_array_equal(training_data.variable_names, ['lst1', 'lst2', 'lst3', 'snow1', 'snow2', 'ndvi1',
'ndvi2', 'ndvi3', 'ndvi4', 'ndvi5'])
npt.assert_array_equal(training_data.unique_variable_prefixes, ['lst', 'snow', 'ndvi'])
npt.assert_array_equal(training_data.variable_type_indices, [2, 4, 9])
assert training_data.name == 'penguin'
npt.assert_array_equal(training_data.meta_layers['years'], np.array(years))
npt.assert_array_equal(training_data.predictors, dat[:, :-1])
npt.assert_array_equal(training_data.response, dat[:, -1])
npt.assert_array_equal(training_data.design_matrix.dat, dat)
assert training_data.attributes['penguin'] == 'yes'
assert training_data.attributes['tophat'] == 'no'
def test_good_values(self):
dat = np.random.rand(10, 11)
dat[1, 2] = np.inf
dat[5, 3] = -np.inf
dat[7, 9] = np.nan
dat[9, 3] = np.inf
learning_data = ld.LearningData()
learning_data.from_data(dat, None, 'penguin')
assert np.isnan(learning_data.predictors).any() == False
assert np.isfinite(learning_data.predictors).all() == True
|
import random
from logs import *
class Card(object):
"""Creates the card objects used in game"""
def __init__(self, name, attack, money, cost, name_padding=15, num_padding=2):
self.name = name
self.cost = cost
self.attack = attack
self.money = money
self.name_padding = name_padding
self.num_padding = num_padding
self.padded_vals = (
str(self.cost).ljust(self.num_padding),
self.name.ljust(self.name_padding),
str(self.attack).ljust(self.num_padding),
str(self.money).ljust(self.num_padding),
)
def __str__(self):
"""outputs string of the card details when called as print Card()"""
s_out = "Cost: {0} ~ {1} ~ Stats ... Attack: {2}, Money: {3}".format(
*self.padded_vals)
return s_out
def get_attack(self):
return self.attack
def get_money(self):
return self.money
@wrap_all(log_me)
class CommonActions(object):
"""Contains the common actions
used by all game classes
"""
def __init__(self):
# self.art = Art()
pass
def deck_to_hand(self):
"""
Move cards from central.central deck
to active central.central deck
Container is the dictionary within the
class that need to be called with the
getattr()
"""
# For each index in player hand
# Refills player hand from player deck.
# If deck is empty, discard pile is shuffled
# and becomes deck
for i in xrange(0, self.hand_size):
# Shuffle deck computer.pC['hand_size times
# if length of deck = 0
# Will only be done once
if len(self.deck) == 0:
self.logger.debug("Deck length is zero!")
if len(self.discard) == 0:
self.logger.debug("Discard length is also zero!")
self.logger.debug("Exiting the deck_to_hand routine as no more cards.")
return
random.shuffle(self.discard) # Shuffle discard pile
self.logger.debug("shuffled deck")
self.deck = self.discard # Make deck the discard
self.discard = [] # empty the discard pile
self.logger.debug("Moved discard pile to deck. Discard pile set to empty.")
card = self.deck.pop()
self.hand.append(card)
self.logger.debug("Iteration #{}: Drawn {} from deck and added to hand".format(i,card.name))
pass
def print_active_cards(self, title=None, index=False):
"""Display cards in active"""
if title is None: title = "Your Played Cards"
# switch depending on player type
self.logger.debug("Actor is: {}".format(type(self).__name__))
title = self.art.make_title(title)
self.player_logger(title)
self._print_cards(self.active, index=index)
self.player_logger(self.art.underline)
pass
def deck_creator(self, deck_list):
"""Creates the deck from a list of dictionaries
_Input_
list of dicts.
dict contents:
"card" : dict containing all **kwargs for Card()
"count" : number of cards with these settings to create
_Output_
list of Card() types
Expected input example:
[{"count":1, "card":{"name":'Archer', "attack":3, "money":0, "cost":2}},
{"count":2, "card":{"name":'Baker', "attack":0, "money":0, "cost":2}}]
Expected Output example:
[Card('Archer', 3,0,2), Card('Baker', 0,0,2), Card('Baker', 0,0,2)]
"""
deck = [] # get deck ready
for card in deck_list:
for _ in xrange(card["count"]):
# passes the dictionary as a keyword arg (**kwarg)
deck.append(Card(
name_padding=self.parent.max_card_name_len,
num_padding=2,
**card["params"]
))
self.logger.debug("Created {}x{}".format(card["count"], card["params"]["name"]))
return deck
def _print_cards(self, cards, index=False):
"""Prints out the cards provided"""
# max card name length
if len(cards) == 0:
self.logger.game(self.art.index_buffer+ \
"Nothing interesting to see here...")
else:
for i, card in enumerate(cards):
num_str = "[{}] ".format(i) if index else self.art.index_buffer
self.logger.game(num_str + "{}".format(card))
pass
@wrap_all(log_me)
class CommonUserActions(object):
"""Contains actions for user and computer"""
def __init__(self):
pass
def newgame(self):
# revert to initial state
for attr, val in self.init.iteritems():
setattr(self, attr, val)
self.active = []
self.hand = []
self.discard = []
self.deck = self.deck_creator(self.deck_settings)
pass
def end_turn(self):
"""Ends the turn of the user"""
self.logger.debug("Ending Turn: {}".format(self.name))
# If player has cards in the hand add to discard pile
self.discard_hand()
# If there cards in active deck
# then move all cards from active to discard
self.discard_active_cards()
# Move cards from deck to hand
self.deck_to_hand()
pass
def play_all_cards(self):
"""transfer all cards from hand to active
add values in hand to current totals
should only be used by User and Computer
"""
for i in xrange(0, len(self.hand)):
card = self.hand.pop()
self.active.append(card)
self.logger.debug("Iteration #{}: Drawn {} from deck and added to active deck".format(i,card.name))
self.__add_values_to_total(card)
pass
def play_a_card(self, card_number):
"""plays a specific card...
Transfer card to active
add values in hand to current totals
"""
i=0
card_number = int(card_number)
# Transfer card to active
# add values in hand to current totals
card = self.hand.pop(card_number)
self.active.append(card)
self.logger.debug("Iteration #{}: Drawn {} from deck and added to active deck".format(i,card.name))
self.__add_values_to_total(card)
pass
def __add_values_to_total(self, card):
"""Adds money and attack to total"""
money_i = card.get_money()
attack_i = card.get_attack()
self.logger.debug("Money:{}+{} Attack:{}+{}".format(self.money, money_i, self.attack, attack_i))
self.money += money_i
self.attack += attack_i
pass
def discard_hand(self):
"""If there are cards in the hand add to discard pile"""
if (len(self.hand) > 0 ):
# Iterate through all cards in player hand
for i in xrange(0, len(self.hand)):
card = self.hand.pop()
self.logger.debug("Iteration #{}: Moving {} from hand and added to discard pile".format(i, card.name))
self.discard.append(card)
else:
self.logger.debug("Hand length is zero. No cards to discard.")
pass
def discard_active_cards(self):
"""If there cards in PC active deck
then move all cards from active to discard"""
if (len(self.active) > 0 ):
for i in xrange(0, len(self.active)):
card = self.active.pop()
self.logger.debug("Iteration #{}: Moving {} from hand and added to discard pile".format(i, card.name))
self.discard.append(card)
else:
self.logger.debug("Active Deck length is zero. No cards to discard.")
pass
def display_values(self, attack=None, money=None):
""" Display player values"""
# allows forced values
if attack is None: attack = self.attack
if money is None: money = self.money
padded_name = self.name.ljust(self.parent.max_player_name_len)
out_str = "{} Values :: ".format(padded_name)
out_str += " Attack: {} Money: {}".format(
attack, money)
self.player_logger("")
self.player_logger(out_str)
self.player_logger("")
pass
def show_health(self):
"""Shows players' health"""
# creates an attribute based on the class
padded_name = self.name.ljust(self.parent.max_player_name_len)
out_str = "{} Health : ".format(padded_name)
out_str += "{}".format(self.health)
self.player_logger(out_str)
pass
def attack_player(self, other_player):
""" Attack another player
other_player expected input is a class
that corresponds to another sibling player
an example of this from self = game.User() would be:
self.attack(self.parent.computer)
which would attack the computer form the player
"""
self.logger.debug("{0} Attacking {1} with strength {2}".format(self.name, other_player.name, self.attack))
self.logger.debug("{0} Health before attack: {1}".format(other_player.name, other_player.health))
other_player.health -= self.attack
self.attack = 0
self.logger.debug("{0} Attack: {1}".format(self.name, self.attack))
pass
def reset_vals(self):
"""resets money and attack"""
self.logger.debug("Money and Attack set to 0 for {}".format(self.name))
self.money = 0
self.attack = 0
pass
|
from pyramid.request import Request
from pyramid.interfaces import IRequestExtensions
from pytest import fixture, mark
import pylf.icons
MIMETYPES = [
("text", "plain", "text-plain.png"),
("image", "jpeg", "image-jpeg.png"),
("inode", "directory", "inode-directory.png"),
("audio", "x-generic", "audio-x-generic.png"),
("unknown", None, "unknown.png"),
]
@fixture
def icons_dir(tmpdir):
for mtype, subtype, fname in MIMETYPES:
with tmpdir.join(fname).open("w"):
pass
return tmpdir
def test_load_mimetypes(icons_dir):
res = pylf.icons.load_mimetypes(str(icons_dir))
for mtype, subtype, fname in MIMETYPES:
assert (mtype, subtype) in res
assert res[(mtype, subtype)] == fname
assert len(res) == len(MIMETYPES)
def test_from_settins(icons_dir):
settings = {
"icons_path": str(icons_dir),
}
cut = pylf.icons.Icons.from_settings(settings)
assert cut.path == str(icons_dir)
@mark.parametrize(
("mtype", "expected"),
[
("text/plain", "text-plain.png"),
("audio/vorbis", "audio-x-generic.png"),
("video/ogg", "unknown.png"),
],
)
def test_for_mimetype(icons_dir, mtype, expected):
cut = pylf.icons.Icons(str(icons_dir))
assert cut.for_mimetype(mtype) == expected
def test_includeme(testconfig, icons_dir):
testconfig.get_settings()["icons_path"] = str(icons_dir)
testconfig.include(pylf.icons)
testconfig.commit()
req_exts = testconfig.registry.queryUtility(IRequestExtensions)
req = Request({})
req._set_extensions(req_exts)
assert hasattr(req, "icon_path")
icon_path = req.icon_path
assert icon_path((None, None)) == "/icons/unknown.png"
assert icon_path(("text/plain", None)) == "/icons/text-plain.png"
|
from base import Encoder
import datetime
from scalar import ScalarEncoder
import numpy
from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA
class DateEncoder(Encoder):
"""A date encoder encodes a date according to encoding parameters
specified in its constructor.
The input to a date encoder is a datetime.datetime object. The output
is the concatenation of several sub-encodings, each of which encodes
a different aspect of the date. Which sub-encodings are present, and
details of those sub-encodings, are specified in the DateEncoder
constructor.
Each parameter describes one attribute to encode. By default, the attribute
is not encoded.
season (season of the year; units = day):
(int) width of attribute; default radius = 91.5 days (1 season)
(tuple) season[0] = width; season[1] = radius
dayOfWeek (monday = 0; units = day)
(int) width of attribute; default radius = 1 day
(tuple) dayOfWeek[0] = width; dayOfWeek[1] = radius
weekend (boolean: 0, 1)
(int) width of attribute
holiday (boolean: 0, 1)
(int) width of attribute
timeOfday (midnight = 0; units = hour)
(int) width of attribute: default radius = 4 hours
(tuple) timeOfDay[0] = width; timeOfDay[1] = radius
"""
############################################################################
def __init__(self, season=0, dayOfWeek=0, weekend=0, holiday=0, timeOfDay=0, customDays=0,
name = ''):
self.width = 0
self.description = []
self.name = name
# This will contain a list of (name, encoder, offset) tuples for use by
# the decode() method
self.encoders = []
self.seasonEncoder = None
if season != 0:
# Ignore leapyear differences -- assume 366 days in a year
# Radius = 91.5 days = length of season
# Value is number of days since beginning of year (0 - 355)
if hasattr(season, "__getitem__"):
w = season[0]
radius = season[1]
else:
w = season
radius = 91.5
self.seasonEncoder = ScalarEncoder(w = w, minval=0, maxval=366,
radius=radius, periodic=True,
name="season")
self.seasonOffset = self.width
self.width += self.seasonEncoder.getWidth()
self.description.append(("season", self.seasonOffset))
self.encoders.append(("season", self.seasonEncoder, self.seasonOffset))
self.dayOfWeekEncoder = None
if dayOfWeek != 0:
# Value is day of week (floating point)
# Radius is 1 day
if hasattr(dayOfWeek, "__getitem__"):
w = dayOfWeek[0]
radius = dayOfWeek[1]
else:
w = dayOfWeek
radius = 1
self.dayOfWeekEncoder = ScalarEncoder(w = w, minval=0, maxval=7,
radius=radius, periodic=True,
name="day of week")
self.dayOfWeekOffset = self.width
self.width += self.dayOfWeekEncoder.getWidth()
self.description.append(("day of week", self.dayOfWeekOffset))
self.encoders.append(("day of week", self.dayOfWeekEncoder, self.dayOfWeekOffset))
self.weekendEncoder = None
if weekend != 0:
# Binary value. Not sure if this makes sense. Also is somewhat redundant
# with dayOfWeek
#Append radius if it was not provided
if not hasattr(weekend, "__getitem__"):
weekend = (weekend,1)
self.weekendEncoder = ScalarEncoder(w = weekend[0], minval = 0, maxval=1,
periodic=False, radius=weekend[1],
name="weekend")
self.weekendOffset = self.width
self.width += self.weekendEncoder.getWidth()
self.description.append(("weekend", self.weekendOffset))
self.encoders.append(("weekend", self.weekendEncoder, self.weekendOffset))
#Set up custom days encoder, first argument in tuple is width
#second is either a single day of the week or a list of the days
#you want encoded as ones.
self.customDaysEncoder = None
if customDays !=0:
customDayEncoderName = ""
daysToParse = []
assert len(customDays)==2, "Please provide a w and the desired days"
if isinstance(customDays[1], list):
for day in customDays[1]:
customDayEncoderName+=str(day)+" "
daysToParse=customDays[1]
elif isinstance(customDays[1], str):
customDayEncoderName+=customDays[1]
daysToParse = [customDays[1]]
else:
assert False, "You must provide either a list of days or a single day"
#Parse days
self.customDays = []
for day in daysToParse:
if(day.lower() in ["mon","monday"]):
self.customDays+=[0]
elif day.lower() in ["tue","tuesday"]:
self.customDays+=[1]
elif day.lower() in ["wed","wednesday"]:
self.customDays+=[2]
elif day.lower() in ["thu","thursday"]:
self.customDays+=[3]
elif day.lower() in ["fri","friday"]:
self.customDays+=[4]
elif day.lower() in ["sat","saturday"]:
self.customDays+=[5]
elif day.lower() in ["sun","sunday"]:
self.customDays+=[6]
else:
assert False, "Unable to understand %s as a day of week" % str(day)
self.customDaysEncoder = ScalarEncoder(w=customDays[0], minval = 0, maxval=1,
periodic=False, radius=1,
name=customDayEncoderName)
self.customDaysOffset = self.width
self.width += self.customDaysEncoder.getWidth()
self.description.append(("customdays", self.customDaysOffset))
self.encoders.append(("customdays", self.customDaysEncoder, self.customDaysOffset))
self.holidayEncoder = None
if holiday != 0:
# A "continuous" binary value. = 1 on the holiday itself and smooth ramp
# 0->1 on the day before the holiday and 1->0 on the day after the holiday.
self.holidayEncoder = ScalarEncoder(w = holiday, minval = 0, maxval=1,
periodic=False, radius=1,
name="holiday")
self.holidayOffset = self.width
self.width += self.holidayEncoder.getWidth()
self.description.append(("holiday", self.holidayOffset))
self.encoders.append(("holiday", self.holidayEncoder, self.holidayOffset))
self.timeOfDayEncoder = None
if timeOfDay != 0:
# Value is time of day in hours
# Radius = 4 hours, e.g. morning, afternoon, evening, early night,
# late night, etc.
if hasattr(timeOfDay, "__getitem__"):
w = timeOfDay[0]
radius = timeOfDay[1]
else:
w = timeOfDay
radius = 4
self.timeOfDayEncoder = ScalarEncoder(w = w, minval=0, maxval=24,
periodic=True, radius=radius, name="time of day")
self.timeOfDayOffset = self.width
self.width += self.timeOfDayEncoder.getWidth()
self.description.append(("time of day", self.timeOfDayOffset))
self.encoders.append(("time of day", self.timeOfDayEncoder, self.timeOfDayOffset))
############################################################################
def getWidth(self):
return self.width
############################################################################
def getScalarNames(self, parentFieldName=''):
""" See method description in base.py """
names = []
# This forms a name which is the concatenation of the parentFieldName
# passed in and the encoder's own name.
def _formFieldName(encoder):
if parentFieldName == '':
return encoder.name
else:
return '%s.%s' % (parentFieldName, encoder.name)
# -------------------------------------------------------------------------
# Get the scalar values for each sub-field
if self.seasonEncoder is not None:
names.append(_formFieldName(self.seasonEncoder))
if self.dayOfWeekEncoder is not None:
names.append(_formFieldName(self.dayOfWeekEncoder))
if self.customDaysEncoder is not None:
names.append(_formFieldName(self.customDaysEncoder))
if self.weekendEncoder is not None:
names.append(_formFieldName(self.weekendEncoder))
if self.holidayEncoder is not None:
names.append(_formFieldName(self.holidayEncoder))
if self.timeOfDayEncoder is not None:
names.append(_formFieldName(self.timeOfDayEncoder))
return names
############################################################################
def getEncodedValues(self, input):
""" See method description in base.py """
if input == SENTINEL_VALUE_FOR_MISSING_DATA:
return numpy.array([None])
assert isinstance(input, datetime.datetime)
values = []
# -------------------------------------------------------------------------
# Get the scalar values for each sub-field
timetuple = input.timetuple()
timeOfDay = timetuple.tm_hour + float(timetuple.tm_min)/60.0
if self.seasonEncoder is not None:
dayOfYear = timetuple.tm_yday
# input.timetuple() computes the day of year 1 based, so convert to 0 based
values.append(dayOfYear-1)
if self.dayOfWeekEncoder is not None:
dayOfWeek = timetuple.tm_wday #+ timeOfDay / 24.0
values.append(dayOfWeek)
if self.weekendEncoder is not None:
# saturday, sunday or friday evening
if timetuple.tm_wday == 6 or timetuple.tm_wday == 5 \
or (timetuple.tm_wday == 4 and timeOfDay > 18):
weekend = 1
else:
weekend = 0
values.append(weekend)
if self.customDaysEncoder is not None:
if timetuple.tm_wday in self.customDays:
customDay = 1
else:
customDay = 0
values.append(customDay)
if self.holidayEncoder is not None:
# A "continuous" binary value. = 1 on the holiday itself and smooth ramp
# 0->1 on the day before the holiday and 1->0 on the day after the holiday.
# Currently the only holiday we know about is December 25
# holidays is a list of holidays that occur on a fixed date every year
holidays = [(12, 25)]
val = 0
for h in holidays:
# hdate is midnight on the holiday
hdate = datetime.datetime(timetuple.tm_year, h[0], h[1], 0, 0, 0)
if input > hdate:
diff = input - hdate
if diff.days == 0:
# return 1 on the holiday itself
val = 1
break
elif diff.days == 1:
# ramp smoothly from 1 -> 0 on the next day
val = 1.0 - (float(diff.seconds) / (86400))
break
else:
diff = hdate - input
if diff.days == 0:
# ramp smoothly from 0 -> 1 on the previous day
val = 1.0 - (float(diff.seconds) / 86400)
values.append(val)
if self.timeOfDayEncoder is not None:
values.append(timeOfDay)
return values
############################################################################
def getScalars(self, input):
""" See method description in base.py
Parameters:
-----------------------------------------------------------------------
input: A datetime object representing the time being encoded
Returns: A numpy array of the corresponding scalar values in
the following order:
[season, dayOfWeek, weekend, holiday, timeOfDay]
Note: some of these fields might be omitted if they were not
specified in the encoder
"""
return numpy.array(self.getEncodedValues(input))
############################################################################
def getBucketIndices(self, input):
""" See method description in base.py """
if input == SENTINEL_VALUE_FOR_MISSING_DATA:
# Encoder each sub-field
return [None] * len(self.encoders)
else:
assert isinstance(input, datetime.datetime)
# Get the scalar values for each sub-field
scalars = self.getScalars(input)
# Encoder each sub-field
result = []
for i in xrange(len(self.encoders)):
(name, encoder, offset) = self.encoders[i]
result.extend(encoder.getBucketIndices(scalars[i]))
return result
############################################################################
def encodeIntoArray(self, input, output):
""" See method description in base.py """
if input == SENTINEL_VALUE_FOR_MISSING_DATA:
output[0:] = 0
else:
if not isinstance(input, datetime.datetime):
raise ValueError("Input is type %s, expected datetime. Value: %s" % (
type(input), str(input)))
# Get the scalar values for each sub-field
scalars = self.getScalars(input)
# Encoder each sub-field
for i in xrange(len(self.encoders)):
(name, encoder, offset) = self.encoders[i]
encoder.encodeIntoArray(scalars[i], output[offset:])
############################################################################
def getDescription(self):
return self.description
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.