text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# crossComptSimpleReacGSSA.py
#
# Filename:crossComptSimpleReacGSSA.py
# Author: Upinder S. Bhalla
# Maintainer:
# Created: Oct 12 16:26:05 2014 (+0530)
# Version:
# Last-Updated: May 16 2017
# By: Upinder S. Bhalla
# Update #: Dec 14 2021
# By: HarshaRani .G.V
# URL:
# Keywords:
# Compatibility:
#
# Commentary:
# set solver over cross compartment has updated
#
# Change log:
# This program is part of 'MOOSE', the
# Messaging Object Oriented Simulation Environment.
# Copyright (C) 2013 Upinder S. Bhalla. and NCBS
# It is made available under the terms of the
# GNU Lesser General Public License version 2.1
# See the file COPYING.LIB for the full notice.
import math
import pylab
import numpy
import moose
from moose.fixXreacs import fixXreacs
def makeModel():
# create container for model
model = moose.Neutral( 'model' )
compt0 = moose.CubeMesh( '/model/compt0' )
compt0.volume = 1e-18
compt1 = moose.CubeMesh( '/model/compt1' )
compt1.volume = 1e-19
compt2 = moose.CubeMesh( '/model/compt2' )
compt2.volume = 1e-20
# Position containers so that they abut each other, with
# compt1 in the middle.
side = compt1.dy
compt0.y1 += side
compt0.y0 += side
compt2.x1 += side
compt2.x0 += side
print(('Volumes = ', compt0.volume, compt1.volume, compt2.volume))
# create molecules and reactions
a = moose.Pool( '/model/compt0/a' )
b = moose.Pool( '/model/compt1/b' )
c = moose.Pool( '/model/compt2/c' )
reac0 = moose.Reac( '/model/compt1/reac0' )
reac1 = moose.Reac( '/model/compt1/reac1' )
# connect them up for reactions
moose.connect( reac0, 'sub', a, 'reac' )
moose.connect( reac0, 'prd', b, 'reac' )
moose.connect( reac1, 'sub', b, 'reac' )
moose.connect( reac1, 'prd', c, 'reac' )
# Assign parameters
a.concInit = 1
b.concInit = 12.1
c.concInit = 1
reac0.Kf = 0.1
reac0.Kb = 0.1
reac1.Kf = 0.1
reac1.Kb = 0.1
# Create the output tables
graphs = moose.Neutral( '/model/graphs' )
outputA = moose.Table2 ( '/model/graphs/concA' )
outputB = moose.Table2 ( '/model/graphs/concB' )
outputC = moose.Table2 ( '/model/graphs/concC' )
# connect up the tables
moose.connect( outputA, 'requestOut', a, 'getConc' );
moose.connect( outputB, 'requestOut', b, 'getConc' );
moose.connect( outputC, 'requestOut', c, 'getConc' );
fixXreacs('/model')
# Build the solvers. No need for diffusion in this version.
ksolve0 = moose.Gsolve( '/model/compt0/ksolve0' )
ksolve1 = moose.Gsolve( '/model/compt1/ksolve1' )
ksolve2 = moose.Gsolve( '/model/compt2/ksolve2' )
stoich0 = moose.Stoich( '/model/compt0/stoich0' )
stoich1 = moose.Stoich( '/model/compt1/stoich1' )
stoich2 = moose.Stoich( '/model/compt2/stoich2' )
# Configure solvers
stoich0.compartment = compt0
stoich1.compartment = compt1
stoich2.compartment = compt2
stoich0.ksolve = ksolve0
stoich1.ksolve = ksolve1
stoich2.ksolve = ksolve2
stoich0.reacSystemPath = '/model/compt0/#'
stoich1.reacSystemPath = '/model/compt1/#'
stoich2.reacSystemPath = '/model/compt2/#'
'''
stoich1.buildXreacs( stoich0 )
stoich1.buildXreacs( stoich2 )
stoich0.filterXreacs()
stoich1.filterXreacs()
stoich2.filterXreacs()
'''
def main():
"""
This example illustrates a simple cross compartment reaction::
a <===> b <===> c
Here each molecule is in a different compartment.
The initial conditions are such that the end conc on all compartments
should be 2.0.
The time course depends on which compartment the Reac object is
embedded in.
The cleanest thing numerically and also conceptually is to have both
reactions in the same compartment, in this case the middle one
(**compt1**).
The initial conditions have a lot of **B**. The equilibrium with
**C** is fast and so **C** shoots up and passes **B**, peaking at
about (2.5,9). This is also just
about the crossover point.
**A** starts low and slowly climbs up to equilibrate.
If we put **reac0** in **compt0** and **reac1** in **compt1**,
it behaves the same
qualitiatively but now the peak is at around (1, 5.2)
This configuration of reactions makes sense from the viewpoint of
having the
reactions always in the compartment with the smaller volume, which is
important if we need to have junctions where many small voxels talk to
one big voxel in another compartment.
Note that putting the reacs in other compartments doesn't work and in
some combinations (e.g., **reac0** in **compt0** and **reac1** in
**compt2**) give
numerical instability.
"""
simdt = 0.1
plotdt = 0.1
runtime = 100.0
makeModel()
# MOOSE autoschedules everything.
moose.reinit()
moose.start( runtime ) # Run the model for 100 seconds.
print("All concs should converge to 2.0 even though vols differ:")
for x in moose.wildcardFind( '/model/compt#/#[ISA=PoolBase]' ):
print((x.name, x.conc))
# Iterate through all plots, dump their contents to data.plot.
for x in moose.wildcardFind( '/model/graphs/conc#' ):
t = numpy.linspace( 0, runtime, x.vector.size ) # sec
pylab.plot( t, x.vector, label=x.name )
pylab.legend()
pylab.show()
quit()
# Run the 'main' if this script is executed standalone.
if __name__ == '__main__':
main()
|
BhallaLab/moose-examples
|
snippets/crossComptSimpleReacGSSA.py
|
Python
|
gpl-2.0
| 5,545
|
[
"MOOSE"
] |
86abfbb39aa514009e0aec7e514b520140c3814665f2e19de64400e6833bc3c0
|
# vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from sparktk.loggers import log_load; log_load(__name__); del log_load
from sparktk.propobj import PropertiesObject
from sparktk import TkContext
import os
__all__ = ["train", "load", "GaussianMixtureModel"]
def train(frame,
observation_columns,
column_scalings,
k=2,
max_iterations=20,
convergence_tol=0.01,
seed=None):
"""
Creates a GaussianMixtureModel by training on the given frame
:param frame: (Frame) frame of training data
:param observation_columns: (List(str)) names of columns containing the observations for training
:param column_scalings: (List(float)) column scalings for each of the observation columns. The scaling
value is multiplied by the corresponding value in the observation column
:param k: (Optional(int)) number of clusters
:param max_iterations: (Optional(int)) number of iterations for which the algorithm should run
:param convergence_tol: (Optional(float)) Largest change in log-likelihood at which convergence is considered to have occurred
:param seed: (Optional(int)) seed for randomness
:return: GaussianMixtureModel
"""
if frame is None:
raise ValueError("frame cannot be None")
tc = frame._tc
_scala_obj = get_scala_obj(tc)
seed = int(os.urandom(2).encode('hex'),16) if seed is None else seed
scala_columns = tc.jutils.convert.to_scala_vector_string(observation_columns)
scala_scalings = tc.jutils.convert.to_scala_vector_double(column_scalings)
scala_model = _scala_obj.train(frame._scala,
scala_columns,
scala_scalings,
k,
max_iterations,
convergence_tol,
seed)
return GaussianMixtureModel(tc, scala_model)
def load(path, tc=TkContext.implicit):
"""load GaussianMixtureModel from given path"""
TkContext.validate(tc)
return tc.load(path, GaussianMixtureModel)
def get_scala_obj(tc):
"""Gets reference to the scala object"""
return tc.sc._jvm.org.trustedanalytics.sparktk.models.clustering.gmm.GaussianMixtureModel
class Gaussian(PropertiesObject):
"""
Gaussian sigma and mu values for a trained GaussianMixtureModel
"""
def __init__(self, tc, scala_result):
self._tc = tc
self._mu = scala_result.mu()
self._sigma = scala_result.sigma()
@property
def mu(self):
""" (list[float]) The mean vector of the distribution """
return list(self._tc.jutils.convert.from_scala_seq(self._mu))
@property
def sigma(self):
""" (list[list[float]]) The covariance matrix of the distribution """
sigma_list = self._tc.jutils.convert.from_scala_seq(self._sigma)
return [list(self._tc.jutils.convert.from_scala_seq(s)) for s in sigma_list]
class GaussianMixtureModel(PropertiesObject):
"""
A trained GaussianMixtureModel model
Example
-------
>>> import numpy as np
>>> frame = tc.frame.create([[2, "ab"],
... [1,"cd"],
... [7,"ef"],
... [1,"gh"],
... [9,"ij"],
... [2,"kl"],
... [0,"mn"],
... [6,"op"],
... [5,"qr"]],
... [("data", float), ("name", str)])
>>> frame.inspect()
[#] data name
===============
[0] 2 ab
[1] 1 cd
[2] 7 ef
[3] 1 gh
[4] 9 ij
[5] 2 kl
[6] 0 mn
[7] 6 op
[8] 5 qr
>>> model = tc.models.clustering.gmm.train(frame, ["data"], [1.0], 3 ,seed=1)
>>> model.k
3
<skip>
>>> for g in model.gaussians:
... print g
mu = [1.1984786097160265]
sigma = [[0.5599222134199012]]
mu = [6.643997733061858]
sigma = [[2.19222016401446]]
mu = [6.79435719737145]
sigma = [[2.2637494400157774]]
</skip>
<hide>
>>> expected_mu = [1.1984454608177824,6.6173304476544335,6.79969916638852]
>>> expected_sigma = [[0.5599200477022921],[2.1848346923369246],[2.2623755196701305]]
>>> actual_mu = [g.mu[0] for g in model.gaussians]
>>> actual_sigma = [g.sigma[0] for g in model.gaussians]
>>> assert(np.allclose(expected_mu, actual_mu, atol=1e+01))
>>> assert(np.allclose(expected_sigma, actual_sigma, atol=1e+01))
</hide>
>>> predicted_frame = model.predict(frame)
<skip>
>>> predicted_frame.inspect()
[#] data name predicted_cluster
==================================
[0] 9.0 ij 0
[1] 2.0 ab 1
[2] 0.0 mn 1
[3] 5.0 qr 0
[4] 7.0 ef 0
[5] 1.0 cd 1
[6] 1.0 gh 1
[7] 6.0 op 0
[8] 2.0 kl 1
</skip>
<hide>
>>> x = predicted_frame.take(9)
>>> val = set(map(lambda y : y[2], x))
>>> newlist = [[z[1] for z in x if z[2]==a]for a in val]
>>> act_out = [[s.encode('ascii') for s in list] for list in newlist]
>>> act_out.sort(key=lambda x: x[0])
>>> act_out
[['ab', 'mn', 'cd', 'gh', 'kl'], ['ij', 'qr', 'ef', 'op']]
>>> exp_out = [['ij','qr','ef','op'], ['ab','mn','cd','gh','kl']]
>>> result = False
>>> for list in act_out:
... if list not in exp_out:
... result = False
... else:
... result = True
>>> result
True
</hide>
>>> model.observation_columns
[u'data']
>>> model.column_scalings
[1.0]
>>> model.save("sandbox/gmm")
>>> restored = tc.load("sandbox/gmm")
>>> model.cluster_sizes(frame) == restored.cluster_sizes(frame)
True
"""
def __init__(self, tc, scala_model):
self._tc = tc
tc.jutils.validate_is_jvm_instance_of(scala_model, get_scala_obj(tc))
self._scala = scala_model
@staticmethod
def _from_scala(tc, scala_model):
"""Loads a trained gaussian mixture model from a scala model"""
return GaussianMixtureModel(tc, scala_model)
@property
def observation_columns(self):
"""observation columns used for model training"""
return list(self._tc.jutils.convert.from_scala_seq(self._scala.observationColumns()))
@property
def column_scalings(self):
"""column containing the scalings used for model training"""
return list(self._tc.jutils.convert.from_scala_seq(self._scala.columnScalings()))
@property
def k(self):
"""maximum limit for number of resulting clusters"""
return self._scala.k()
@property
def max_iterations(self):
"""maximum number of iterations"""
return self._scala.maxIterations()
@property
def convergence_tol(self):
"""convergence tolerance"""
return self._scala.convergenceTol()
@property
def seed(self):
"""seed used during training of the model"""
return self._scala.seed()
@property
def gaussians(self):
"""Gaussian object, which contains the mu and sigma values"""
g = self._tc.jutils.convert.from_scala_seq(self._scala.gaussians())
results = []
for i in g:
results.append(Gaussian(self._tc, i))
return results
def cluster_sizes(self, frame):
"""a map of clusters and their sizes"""
cs = self._scala.computeGmmClusterSize(frame._scala)
return self._tc.jutils.convert.scala_map_to_python(cs)
def predict(self, frame, columns=None):
"""
Predicts the labels for the observation columns in the given input frame. Creates a new frame
with the existing columns and a new predicted column.
Parameters
----------
:param frame: (Frame) Frame used for predicting the values
:param c: (List[str]) Names of the observation columns.
:return: (Frame) A new frame containing the original frame's columns and a prediction column
"""
from sparktk.frame.frame import Frame
c = self.__columns_to_option(columns)
return Frame(self._tc, self._scala.predict(frame._scala, c))
def __columns_to_option(self, c):
if c is not None:
c = self._tc.jutils.convert.to_scala_vector_string(c)
return self._tc.jutils.convert.to_scala_option(c)
def save(self, path):
"""save the trained model to the given path"""
self._scala.save(self._tc._scala_sc, path)
def export_to_mar(self, path):
"""
Exports the trained model as a model archive (.mar) to the specified path
Parameters
----------
:param path: (str) Path to save the trained model
:return: (str) Full path to the saved .mar file
"""
if isinstance(path, basestring):
return self._scala.exportToMar(self._tc._scala_sc, path)
del PropertiesObject
|
karthikvadla16/spark-tk
|
python/sparktk/models/clustering/gmm.py
|
Python
|
apache-2.0
| 10,237
|
[
"Gaussian"
] |
73346a2f6fb87e4da20f91082bef839d1b8324d0f7c293793576424abaa4936f
|
from __future__ import (absolute_import, division, print_function, unicode_literals)
import datetime
import os
import re
import sys
#======================================================================================
def find_basedir(project, subproject):
""" Returns the base directory. If the subproject is known to be in MantidQt or Vates, it uses that.
The default is current dir + Framework
Parameters
---------
project : the project, Framework, MantidQt, etc.
subproject : the subproject, Kernel, API, etc.
Returns
-------
basedir = base directory
header_folder = the folder name under the inc/ subfolder.
"""
header_folder = "Mantid" + subproject
if project == "MantidQt": header_folder = "MantidQt" + subproject
scriptdir = os.path.split(__file__)[0] #Folder of Code/Build
codedir = os.path.split(scriptdir)[0] #Folder of Code/
basedir = os.path.join(codedir, project, subproject)
return (basedir, header_folder)
#======================================================================================
def redo_cmake_section(lines, cmake_tag, add_this_line, remove_this_line=""):
""" Read the LINES of a file. Find first "set ( cmake_tag",
read all the lines to get all the files,
add your new line,
sort them,
rewrite. Only touches first section found to avoid messing up any other set
sections in the rest of the file
"""
search_for1 = "set ( " + cmake_tag
search_for2 = "set (" + cmake_tag
# List of files in the thingie
files = []
lines_before = []
lines_after = []
section_num = 0
section_processed = False
for line in lines:
if line.strip().startswith(search_for1): section_num = 1
if line.strip().startswith(search_for2): section_num = 1
if section_num == 0:
# These are the lines before
lines_before.append(line)
elif not section_processed and section_num == 1:
#this is a line with the name of a file
line = line.strip()
# Take off the tag
if line.startswith(search_for1): line = line[len(search_for1):].strip()
if line.startswith(search_for2): line = line[len(search_for2):].strip()
# Did we reach the last one?
if line.endswith(")"):
section_num = 2
section_processed = True
line = line[0:len(line) - 1].strip()
if len(line) > 0:
files.append(line)
else:
# These are lines after
lines_after.append(line)
# Add the new file to the list of files
if len(add_this_line) > 0:
files.append(add_this_line)
# Use a set to keep only unique linese
files = set(files)
# Remove an entry from the cmake list
try:
if len(remove_this_line) > 0:
files.remove(remove_this_line)
except:
# Ignore missing entry.
pass
files = list(files)
# Sort-em alphabetically
files.sort()
lines = lines_before
lines.append("set ( " + cmake_tag)
for file in files:
lines.append("\t" + file)
lines.append(")") # close the parentheses
lines += lines_after
return lines
#======================================================================
def fix_cmake_format(subproject):
""" Just fix the CMAKE format"""
cmake_path = os.path.join(os.path.curdir, "Framework/" + subproject + "/CMakeLists.txt")
source = open(cmake_path).read()
lines = source.split("\n");
lines = redo_cmake_section(lines, "SRC_FILES", "")
lines = redo_cmake_section(lines, "INC_FILES", "")
lines = redo_cmake_section(lines, "TEST_FILES", "")
f = open(cmake_path, 'w')
text = "\n".join(lines)
f.write(text)
f.close()
#======================================================================
def fix_all_cmakes():
""" Fix all cmake files """
projects = ["Algorithms", "DataObjects", "MDAlgorithms", "API",
"Geometry", "CurveFitting", "ICat", "MDEvents",
"DataHandling", "Kernel", "Nexus", "Crystal"]
for proj in projects:
fix_cmake_format(proj)
#======================================================================
def add_to_cmake(subproject, classname, args, subfolder):
""" Add the class to the cmake list of the given class
Parameters:
subproject : API, Kernel
classname : name of the class
args : argparse args
subfolder : subfolder under inc and src
"""
basedir, header_folder = find_basedir(args.project, subproject)
cmake_path = os.path.join(basedir, "CMakeLists.txt")
source = open(cmake_path).read()
lines = source.split("\n");
if args.header:
lines = redo_cmake_section(lines, "INC_FILES", "inc/" + header_folder + "/" + subfolder + classname + ".h")
if args.cpp:
lines = redo_cmake_section(lines, "SRC_FILES", "src/" + subfolder + classname + ".cpp")
if args.test:
lines = redo_cmake_section(lines, "TEST_FILES", classname + "Test.h")
f = open(cmake_path, 'w')
text = "\n".join(lines)
f.write(text)
f.close()
#======================================================================
def remove_from_cmake(subproject, classname, args, subfolder):
""" Removes the class from the cmake list of the given project """
basedir, header_folder = find_basedir(args.project, subproject)
cmake_path = os.path.join(basedir, "CMakeLists.txt")
source = open(cmake_path).read()
lines = source.split("\n");
if args.header:
lines = redo_cmake_section(lines, "INC_FILES", "", "inc/" + header_folder + "/"+ subfolder + classname + ".h")
if args.cpp:
lines = redo_cmake_section(lines, "SRC_FILES", "", "src/" + subfolder + classname + ".cpp")
if args.test:
lines = redo_cmake_section(lines, "TEST_FILES", "", classname + "Test.h")
f = open(cmake_path, 'w')
text = "\n".join(lines)
f.write(text)
f.close()
|
dymkowsk/mantid
|
buildconfig/cmakelists_utils.py
|
Python
|
gpl-3.0
| 6,084
|
[
"CRYSTAL"
] |
2312082d166ad6f37e819d9fb3b3e0f9f3e54eb6626f7a4af13bc2d94c30056e
|
#
# Copyright (c) 2016 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, print_function
import os
import unittest
from commoncode import text
from licensedcode import models
from license_test_utils import make_license_test_function
"""
Validate that each license text and each rule is properly detected.
"""
def build_license_validation_tests(licenses_by_key, cls):
"""
Dynamically build an individual test method for each license texts in a licenses
`data_set` then mapping attaching the test method to the `cls` test class.
"""
for license_key, license_obj in licenses_by_key.items():
if license_obj.text_file and os.path.exists(license_obj.text_file):
test_name = ('test_validate_self_detection_of_text_for_' + text.python_safe_name(license_key))
# also verify that we are detecting exactly with the license rule itself
test_method = make_license_test_function(
license_key, license_obj.text_file, license_obj.data_file, test_name, detect_negative=True, trace_text=True)
setattr(cls, test_name, test_method)
class TestValidateLicenseTextDetection(unittest.TestCase):
# Test functions are attached to this class at import time
pass
build_license_validation_tests(models.get_licenses(), TestValidateLicenseTextDetection)
def build_rule_validation_tests(rules, cls):
"""
Dynamically build an individual test method for each rule texts in a rules
`data_set` then mapping attaching the test method to the `cls` test class.
"""
for rule in rules:
expected_identifier = rule.identifier
test_name = ('test_validate_self_detection_of_rule_for_' + text.python_safe_name(expected_identifier))
test_method = make_license_test_function(
rule.licenses, rule.text_file, rule.data_file, test_name, detect_negative=not rule.negative(), trace_text=True
)
setattr(cls, test_name, test_method)
class TestValidateLicenseRuleSelfDetection(unittest.TestCase):
# Test functions are attached to this class at import time
pass
build_rule_validation_tests(models.load_rules(), TestValidateLicenseRuleSelfDetection)
|
yasharmaster/scancode-toolkit
|
tests/licensedcode/test_detection_validate.py
|
Python
|
apache-2.0
| 3,512
|
[
"VisIt"
] |
14291c0eeb24d9d4dbc22d9eba5e059a468876bd67362029d24585d43a922053
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2006-2008 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
##
""" Dialogs for payment method management"""
import gtk
from kiwi.ui.objectlist import ObjectList, Column
from stoqlib.api import api
from stoqlib.domain.payment.method import PaymentMethod
from stoqlib.gui.base.dialogs import BasicDialog, run_dialog
from stoqlib.gui.editors.paymentmethodeditor import (PaymentMethodEditor,
CardPaymentMethodEditor)
from stoqlib.gui.search.searcheditor import SearchEditorToolBar
from stoqlib.lib.translation import stoqlib_gettext
_ = stoqlib_gettext
class PaymentMethodsDialog(BasicDialog):
# TODO Bug 2406 will avoid duplicating code here
size = (400, 400)
title = _("Payment Method Settings")
# TODO: implement editor for 'multiple' payment method.
METHOD_EDITORS = {u'card': CardPaymentMethodEditor,
u'money': PaymentMethodEditor,
u'check': PaymentMethodEditor,
u'credit': PaymentMethodEditor,
u'bill': PaymentMethodEditor,
u'deposit': PaymentMethodEditor,
u'store_credit': PaymentMethodEditor}
def __init__(self, store):
BasicDialog.__init__(self,
hide_footer=True, size=PaymentMethodsDialog.size,
title=PaymentMethodsDialog.title)
self._can_edit = False
self.store = store
self._setup_list()
self._setup_slaves()
def _setup_slaves(self):
self._toolbar_slave = SearchEditorToolBar()
self._toolbar_slave.connect("edit", self._on_edit_button__clicked)
self._toolbar_slave.new_button.hide()
self._toolbar_slave.edit_button.set_sensitive(False)
self.attach_slave("extra_holder", self._toolbar_slave)
def _setup_list(self):
methods = PaymentMethod.get_editable_methods(self.store)
self.klist = ObjectList(self._get_columns(), methods,
gtk.SELECTION_BROWSE)
self.klist.connect("selection-changed",
self._on_klist__selection_changed)
self.klist.connect("row-activated", self._on_klist__row_activated)
self.klist.connect("cell-edited", self.on_cell_edited)
self.main.remove(self.main.get_child())
self.main.add(self.klist)
self.klist.show()
def _get_columns(self):
return [Column('description', title=_('Payment Method'), data_type=str,
expand=True),
Column('is_active', title=_('Active'), data_type=bool,
editable=True)]
def _edit_item(self, item):
editor = self.METHOD_EDITORS.get(item.method_name, None)
if not editor:
raise TypeError('Invalid payment method adapter: %s'
% item.method_name)
store = api.new_store()
item = store.fetch(item)
retval = run_dialog(editor, self, store, item)
store.confirm(retval)
store.close()
#
# Callbacks
#
def on_cell_edited(self, klist, obj, attr):
# All the payment methods could be (de)activate, except the 'money'
# payment method.
if obj.method_name != u'money':
store = obj.store
store.commit()
else:
obj.is_active = True
def _on_klist__selection_changed(self, list, data):
self._can_edit = (data and
data.method_name in self.METHOD_EDITORS.keys())
self._toolbar_slave.edit_button.set_sensitive(self._can_edit)
def _on_edit_button__clicked(self, toolbar_slave):
assert self._can_edit
self._edit_item(self.klist.get_selected())
def _on_klist__row_activated(self, list, data):
if not self._can_edit:
return
self._edit_item(data)
|
andrebellafronte/stoq
|
stoqlib/gui/dialogs/paymentmethod.py
|
Python
|
gpl-2.0
| 4,775
|
[
"VisIt"
] |
e6b990240dca4f9d8e8021ef967782fdbfc09b9e854ad0e7a707f07f458e85fe
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'GUI.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1414, 1137)
font = QtGui.QFont()
MainWindow.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/icons/human-brain.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setStyleSheet("")
MainWindow.setIconSize(QtCore.QSize(1024, 1024))
self.centralwidget = QtWidgets.QWidget(MainWindow)
font = QtGui.QFont()
self.centralwidget.setFont(font)
self.centralwidget.setStyleSheet("")
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.widget_Run_Close = QtWidgets.QWidget(self.centralwidget)
font = QtGui.QFont()
self.widget_Run_Close.setFont(font)
self.widget_Run_Close.setObjectName("widget_Run_Close")
self.horizontalLayout_57 = QtWidgets.QHBoxLayout(self.widget_Run_Close)
self.horizontalLayout_57.setObjectName("horizontalLayout_57")
self.label_7 = QtWidgets.QLabel(self.widget_Run_Close)
font = QtGui.QFont()
self.label_7.setFont(font)
self.label_7.setOpenExternalLinks(True)
self.label_7.setObjectName("label_7")
self.horizontalLayout_57.addWidget(self.label_7)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_57.addItem(spacerItem)
self.pushButton_Load = QtWidgets.QPushButton(self.widget_Run_Close)
font = QtGui.QFont()
self.pushButton_Load.setFont(font)
self.pushButton_Load.setStyleSheet("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/icons/icons/folder.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_Load.setIcon(icon1)
self.pushButton_Load.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Load.setObjectName("pushButton_Load")
self.horizontalLayout_57.addWidget(self.pushButton_Load)
self.pushButton_SaveAs = QtWidgets.QPushButton(self.widget_Run_Close)
font = QtGui.QFont()
self.pushButton_SaveAs.setFont(font)
self.pushButton_SaveAs.setStyleSheet("")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/icons/icons/save.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_SaveAs.setIcon(icon2)
self.pushButton_SaveAs.setIconSize(QtCore.QSize(24, 24))
self.pushButton_SaveAs.setObjectName("pushButton_SaveAs")
self.horizontalLayout_57.addWidget(self.pushButton_SaveAs)
self.pushButton_Reset = QtWidgets.QPushButton(self.widget_Run_Close)
font = QtGui.QFont()
self.pushButton_Reset.setFont(font)
self.pushButton_Reset.setStyleSheet("")
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/icons/icons/reset-password.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_Reset.setIcon(icon3)
self.pushButton_Reset.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Reset.setObjectName("pushButton_Reset")
self.horizontalLayout_57.addWidget(self.pushButton_Reset)
self.pushButton_Run = QtWidgets.QPushButton(self.widget_Run_Close)
font = QtGui.QFont()
self.pushButton_Run.setFont(font)
self.pushButton_Run.setLayoutDirection(QtCore.Qt.LeftToRight)
self.pushButton_Run.setStyleSheet("")
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(":/icons/icons/confirm.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_Run.setIcon(icon4)
self.pushButton_Run.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Run.setObjectName("pushButton_Run")
self.horizontalLayout_57.addWidget(self.pushButton_Run)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_57.addItem(spacerItem1)
self.gridLayout.addWidget(self.widget_Run_Close, 2, 1, 1, 1)
self.widget_Project_Tree = QtWidgets.QWidget(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget_Project_Tree.sizePolicy().hasHeightForWidth())
self.widget_Project_Tree.setSizePolicy(sizePolicy)
self.widget_Project_Tree.setStyleSheet("")
self.widget_Project_Tree.setObjectName("widget_Project_Tree")
self.verticalLayout_11 = QtWidgets.QVBoxLayout(self.widget_Project_Tree)
self.verticalLayout_11.setObjectName("verticalLayout_11")
self.treeWidget_Project_Browser = QtWidgets.QTreeWidget(self.widget_Project_Tree)
self.treeWidget_Project_Browser.setMinimumSize(QtCore.QSize(380, 450))
self.treeWidget_Project_Browser.setMaximumSize(QtCore.QSize(370, 16777215))
self.treeWidget_Project_Browser.setStyleSheet("")
self.treeWidget_Project_Browser.setFrameShape(QtWidgets.QFrame.NoFrame)
self.treeWidget_Project_Browser.setFrameShadow(QtWidgets.QFrame.Plain)
self.treeWidget_Project_Browser.setIconSize(QtCore.QSize(48, 48))
self.treeWidget_Project_Browser.setIndentation(0)
self.treeWidget_Project_Browser.setAnimated(True)
self.treeWidget_Project_Browser.setObjectName("treeWidget_Project_Browser")
font = QtGui.QFont()
font.setPointSize(14)
self.treeWidget_Project_Browser.headerItem().setFont(0, font)
item_0 = QtWidgets.QTreeWidgetItem(self.treeWidget_Project_Browser)
font = QtGui.QFont()
font.setPointSize(13)
item_0.setFont(0, font)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(":/icons/icons/computer-system.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon5)
item_0 = QtWidgets.QTreeWidgetItem(self.treeWidget_Project_Browser)
font = QtGui.QFont()
font.setPointSize(13)
item_0.setFont(0, font)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(":/icons/icons/cog.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon6)
item_0 = QtWidgets.QTreeWidgetItem(self.treeWidget_Project_Browser)
font = QtGui.QFont()
font.setPointSize(13)
item_0.setFont(0, font)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(":/icons/icons/portfolio.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon7)
item_0 = QtWidgets.QTreeWidgetItem(self.treeWidget_Project_Browser)
font = QtGui.QFont()
font.setPointSize(20)
item_0.setFont(0, font)
item_0.setFlags(QtCore.Qt.NoItemFlags)
item_0 = QtWidgets.QTreeWidgetItem(self.treeWidget_Project_Browser)
font = QtGui.QFont()
font.setPointSize(20)
item_0.setFont(0, font)
item_0.setFlags(QtCore.Qt.NoItemFlags)
item_0 = QtWidgets.QTreeWidgetItem(self.treeWidget_Project_Browser)
font = QtGui.QFont()
font.setPointSize(13)
item_0.setFont(0, font)
brush = QtGui.QBrush(QtGui.QColor(211, 215, 207))
brush.setStyle(QtCore.Qt.NoBrush)
item_0.setForeground(0, brush)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(":/icons/icons/gears.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon8)
self.treeWidget_Project_Browser.header().setVisible(False)
self.treeWidget_Project_Browser.header().setDefaultSectionSize(70)
self.treeWidget_Project_Browser.header().setMinimumSectionSize(48)
self.verticalLayout_11.addWidget(self.treeWidget_Project_Browser)
self.widget = QtWidgets.QWidget(self.widget_Project_Tree)
self.widget.setObjectName("widget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.widget)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_OSS_Logo = QtWidgets.QLabel(self.widget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_OSS_Logo.sizePolicy().hasHeightForWidth())
self.label_OSS_Logo.setSizePolicy(sizePolicy)
self.label_OSS_Logo.setMaximumSize(QtCore.QSize(270, 290))
self.label_OSS_Logo.setText("")
self.label_OSS_Logo.setPixmap(QtGui.QPixmap(":/icons/icons/Updated_OSS_DBS_logo.jpeg"))
self.label_OSS_Logo.setScaledContents(True)
self.label_OSS_Logo.setAlignment(QtCore.Qt.AlignCenter)
self.label_OSS_Logo.setObjectName("label_OSS_Logo")
self.gridLayout_2.addWidget(self.label_OSS_Logo, 0, 0, 1, 1)
self.verticalLayout_11.addWidget(self.widget)
self.gridLayout.addWidget(self.widget_Project_Tree, 0, 0, 3, 1)
self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget)
font = QtGui.QFont()
self.stackedWidget.setFont(font)
self.stackedWidget.setToolTip("")
self.stackedWidget.setStyleSheet("")
self.stackedWidget.setObjectName("stackedWidget")
self.pageSimulationState = QtWidgets.QWidget()
self.pageSimulationState.setObjectName("pageSimulationState")
self.gridLayout_16 = QtWidgets.QGridLayout(self.pageSimulationState)
self.gridLayout_16.setContentsMargins(9, -1, -1, -1)
self.gridLayout_16.setObjectName("gridLayout_16")
self.scrollArea_4 = QtWidgets.QScrollArea(self.pageSimulationState)
self.scrollArea_4.setStyleSheet("")
self.scrollArea_4.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea_4.setWidgetResizable(True)
self.scrollArea_4.setObjectName("scrollArea_4")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 974, 1047))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.gridLayout_17 = QtWidgets.QGridLayout(self.scrollAreaWidgetContents)
self.gridLayout_17.setObjectName("gridLayout_17")
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_17.addItem(spacerItem2, 1, 1, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_17.addItem(spacerItem3, 3, 0, 1, 1)
self.groupBox_4 = QtWidgets.QGroupBox(self.scrollAreaWidgetContents)
self.groupBox_4.setTitle("")
self.groupBox_4.setObjectName("groupBox_4")
self.gridLayout_44 = QtWidgets.QGridLayout(self.groupBox_4)
self.gridLayout_44.setObjectName("gridLayout_44")
self.gridLayout_18 = QtWidgets.QGridLayout()
self.gridLayout_18.setContentsMargins(-1, 0, -1, -1)
self.gridLayout_18.setObjectName("gridLayout_18")
self.checkBox_IFFT_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_IFFT_Ready.setFont(font)
self.checkBox_IFFT_Ready.setText("")
self.checkBox_IFFT_Ready.setObjectName("checkBox_IFFT_Ready")
self.gridLayout_18.addWidget(self.checkBox_IFFT_Ready, 10, 1, 1, 1)
self.checkBox_Voxel_orr_DTI = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Voxel_orr_DTI.setFont(font)
self.checkBox_Voxel_orr_DTI.setToolTip("")
self.checkBox_Voxel_orr_DTI.setText("")
self.checkBox_Voxel_orr_DTI.setObjectName("checkBox_Voxel_orr_DTI")
self.gridLayout_18.addWidget(self.checkBox_Voxel_orr_DTI, 1, 1, 1, 1)
self.checkBox_Init_Neuron_Model_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Init_Neuron_Model_Ready.setFont(font)
self.checkBox_Init_Neuron_Model_Ready.setText("")
self.checkBox_Init_Neuron_Model_Ready.setObjectName("checkBox_Init_Neuron_Model_Ready")
self.gridLayout_18.addWidget(self.checkBox_Init_Neuron_Model_Ready, 2, 1, 1, 1)
self.label_47 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_47.setFont(font)
self.label_47.setObjectName("label_47")
self.gridLayout_18.addWidget(self.label_47, 6, 0, 1, 1)
self.label_45 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_45.setFont(font)
self.label_45.setObjectName("label_45")
self.gridLayout_18.addWidget(self.label_45, 4, 0, 1, 1)
self.pushButton_Neuron_Model_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Neuron_Model_Ready.setText("")
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(":/icons/icons/info.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_Neuron_Model_Ready.setIcon(icon9)
self.pushButton_Neuron_Model_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Neuron_Model_Ready.setFlat(True)
self.pushButton_Neuron_Model_Ready.setObjectName("pushButton_Neuron_Model_Ready")
self.gridLayout_18.addWidget(self.pushButton_Neuron_Model_Ready, 4, 2, 1, 1)
self.pushButton_Init_Neuron_Model_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Init_Neuron_Model_Ready.setText("")
self.pushButton_Init_Neuron_Model_Ready.setIcon(icon9)
self.pushButton_Init_Neuron_Model_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Init_Neuron_Model_Ready.setFlat(True)
self.pushButton_Init_Neuron_Model_Ready.setObjectName("pushButton_Init_Neuron_Model_Ready")
self.gridLayout_18.addWidget(self.pushButton_Init_Neuron_Model_Ready, 2, 2, 1, 1)
self.checkBox_Parallel_Computing_Interrupted = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Parallel_Computing_Interrupted.setFont(font)
self.checkBox_Parallel_Computing_Interrupted.setText("")
self.checkBox_Parallel_Computing_Interrupted.setObjectName("checkBox_Parallel_Computing_Interrupted")
self.gridLayout_18.addWidget(self.checkBox_Parallel_Computing_Interrupted, 9, 1, 1, 1)
self.label_48 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_48.setFont(font)
self.label_48.setObjectName("label_48")
self.gridLayout_18.addWidget(self.label_48, 7, 0, 1, 1)
self.pushButton_Init_Mesh_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Init_Mesh_Ready.setText("")
self.pushButton_Init_Mesh_Ready.setIcon(icon9)
self.pushButton_Init_Mesh_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Init_Mesh_Ready.setFlat(True)
self.pushButton_Init_Mesh_Ready.setObjectName("pushButton_Init_Mesh_Ready")
self.gridLayout_18.addWidget(self.pushButton_Init_Mesh_Ready, 3, 2, 1, 1)
self.label_50 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_50.setFont(font)
self.label_50.setObjectName("label_50")
self.gridLayout_18.addWidget(self.label_50, 9, 0, 1, 1)
self.label_42 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_42.setFont(font)
self.label_42.setObjectName("label_42")
self.gridLayout_18.addWidget(self.label_42, 1, 0, 1, 1)
self.checkBox_Parallel_Computing_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Parallel_Computing_Ready.setFont(font)
self.checkBox_Parallel_Computing_Ready.setText("")
self.checkBox_Parallel_Computing_Ready.setObjectName("checkBox_Parallel_Computing_Ready")
self.gridLayout_18.addWidget(self.checkBox_Parallel_Computing_Ready, 8, 1, 1, 1)
self.label_41 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_41.setFont(font)
self.label_41.setObjectName("label_41")
self.gridLayout_18.addWidget(self.label_41, 0, 0, 1, 1)
self.checkBox_Voxel_orr_MRI = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Voxel_orr_MRI.setFont(font)
self.checkBox_Voxel_orr_MRI.setToolTip("")
self.checkBox_Voxel_orr_MRI.setText("")
self.checkBox_Voxel_orr_MRI.setObjectName("checkBox_Voxel_orr_MRI")
self.gridLayout_18.addWidget(self.checkBox_Voxel_orr_MRI, 0, 1, 1, 1)
self.label_43 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_43.setFont(font)
self.label_43.setObjectName("label_43")
self.gridLayout_18.addWidget(self.label_43, 2, 0, 1, 1)
self.label_49 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_49.setFont(font)
self.label_49.setObjectName("label_49")
self.gridLayout_18.addWidget(self.label_49, 8, 0, 1, 1)
self.label_44 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_44.setFont(font)
self.label_44.setObjectName("label_44")
self.gridLayout_18.addWidget(self.label_44, 3, 0, 1, 1)
self.checkBox_Adapted_Mesh_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Adapted_Mesh_Ready.setFont(font)
self.checkBox_Adapted_Mesh_Ready.setText("")
self.checkBox_Adapted_Mesh_Ready.setObjectName("checkBox_Adapted_Mesh_Ready")
self.gridLayout_18.addWidget(self.checkBox_Adapted_Mesh_Ready, 7, 1, 1, 1)
self.pushButton_CSF_Mesh_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_CSF_Mesh_Ready.setText("")
self.pushButton_CSF_Mesh_Ready.setIcon(icon9)
self.pushButton_CSF_Mesh_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_CSF_Mesh_Ready.setFlat(True)
self.pushButton_CSF_Mesh_Ready.setObjectName("pushButton_CSF_Mesh_Ready")
self.gridLayout_18.addWidget(self.pushButton_CSF_Mesh_Ready, 6, 2, 1, 1)
self.label_46 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_46.setFont(font)
self.label_46.setObjectName("label_46")
self.gridLayout_18.addWidget(self.label_46, 5, 0, 1, 1)
self.pushButton_Voxel_orr_MRI = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Voxel_orr_MRI.setText("")
self.pushButton_Voxel_orr_MRI.setIcon(icon9)
self.pushButton_Voxel_orr_MRI.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Voxel_orr_MRI.setFlat(True)
self.pushButton_Voxel_orr_MRI.setObjectName("pushButton_Voxel_orr_MRI")
self.gridLayout_18.addWidget(self.pushButton_Voxel_orr_MRI, 0, 2, 1, 1)
self.checkBox_Init_Mesh_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Init_Mesh_Ready.setFont(font)
self.checkBox_Init_Mesh_Ready.setText("")
self.checkBox_Init_Mesh_Ready.setObjectName("checkBox_Init_Mesh_Ready")
self.gridLayout_18.addWidget(self.checkBox_Init_Mesh_Ready, 3, 1, 1, 1)
self.pushButton_Parallel_Computing_Interrupted = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Parallel_Computing_Interrupted.setText("")
self.pushButton_Parallel_Computing_Interrupted.setIcon(icon9)
self.pushButton_Parallel_Computing_Interrupted.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Parallel_Computing_Interrupted.setFlat(True)
self.pushButton_Parallel_Computing_Interrupted.setObjectName("pushButton_Parallel_Computing_Interrupted")
self.gridLayout_18.addWidget(self.pushButton_Parallel_Computing_Interrupted, 9, 2, 1, 1)
self.label_51 = QtWidgets.QLabel(self.groupBox_4)
font = QtGui.QFont()
self.label_51.setFont(font)
self.label_51.setObjectName("label_51")
self.gridLayout_18.addWidget(self.label_51, 10, 0, 1, 1)
self.checkBox_CSF_Mesh_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_CSF_Mesh_Ready.setFont(font)
self.checkBox_CSF_Mesh_Ready.setText("")
self.checkBox_CSF_Mesh_Ready.setObjectName("checkBox_CSF_Mesh_Ready")
self.gridLayout_18.addWidget(self.checkBox_CSF_Mesh_Ready, 6, 1, 1, 1)
self.pushButton_Adapted_Mesh_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Adapted_Mesh_Ready.setText("")
self.pushButton_Adapted_Mesh_Ready.setIcon(icon9)
self.pushButton_Adapted_Mesh_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Adapted_Mesh_Ready.setFlat(True)
self.pushButton_Adapted_Mesh_Ready.setObjectName("pushButton_Adapted_Mesh_Ready")
self.gridLayout_18.addWidget(self.pushButton_Adapted_Mesh_Ready, 7, 2, 1, 1)
self.checkBox_Adjusted_Neuron_Model_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Adjusted_Neuron_Model_Ready.setFont(font)
self.checkBox_Adjusted_Neuron_Model_Ready.setText("")
self.checkBox_Adjusted_Neuron_Model_Ready.setObjectName("checkBox_Adjusted_Neuron_Model_Ready")
self.gridLayout_18.addWidget(self.checkBox_Adjusted_Neuron_Model_Ready, 4, 1, 1, 1)
self.pushButton_Signl_Generation_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Signl_Generation_Ready.setText("")
self.pushButton_Signl_Generation_Ready.setIcon(icon9)
self.pushButton_Signl_Generation_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Signl_Generation_Ready.setFlat(True)
self.pushButton_Signl_Generation_Ready.setObjectName("pushButton_Signl_Generation_Ready")
self.gridLayout_18.addWidget(self.pushButton_Signl_Generation_Ready, 5, 2, 1, 1)
self.pushButton_IFFT_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_IFFT_Ready.setText("")
self.pushButton_IFFT_Ready.setIcon(icon9)
self.pushButton_IFFT_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_IFFT_Ready.setFlat(True)
self.pushButton_IFFT_Ready.setObjectName("pushButton_IFFT_Ready")
self.gridLayout_18.addWidget(self.pushButton_IFFT_Ready, 10, 2, 1, 1)
self.checkBox_Signal_Generation_Ready = QtWidgets.QCheckBox(self.groupBox_4)
font = QtGui.QFont()
self.checkBox_Signal_Generation_Ready.setFont(font)
self.checkBox_Signal_Generation_Ready.setText("")
self.checkBox_Signal_Generation_Ready.setObjectName("checkBox_Signal_Generation_Ready")
self.gridLayout_18.addWidget(self.checkBox_Signal_Generation_Ready, 5, 1, 1, 1)
self.pushButton_Voxel_orr_DTI = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Voxel_orr_DTI.setText("")
self.pushButton_Voxel_orr_DTI.setIcon(icon9)
self.pushButton_Voxel_orr_DTI.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Voxel_orr_DTI.setFlat(True)
self.pushButton_Voxel_orr_DTI.setObjectName("pushButton_Voxel_orr_DTI")
self.gridLayout_18.addWidget(self.pushButton_Voxel_orr_DTI, 1, 2, 1, 1)
self.pushButton_Parallel_Computing_Ready = QtWidgets.QPushButton(self.groupBox_4)
self.pushButton_Parallel_Computing_Ready.setText("")
self.pushButton_Parallel_Computing_Ready.setIcon(icon9)
self.pushButton_Parallel_Computing_Ready.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Parallel_Computing_Ready.setFlat(True)
self.pushButton_Parallel_Computing_Ready.setObjectName("pushButton_Parallel_Computing_Ready")
self.gridLayout_18.addWidget(self.pushButton_Parallel_Computing_Ready, 8, 2, 1, 1)
self.gridLayout_44.addLayout(self.gridLayout_18, 0, 0, 1, 1)
self.gridLayout_17.addWidget(self.groupBox_4, 0, 0, 1, 1)
self.scrollArea_4.setWidget(self.scrollAreaWidgetContents)
self.gridLayout_16.addWidget(self.scrollArea_4, 0, 0, 1, 1)
self.stackedWidget.addWidget(self.pageSimulationState)
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.gridLayout_28 = QtWidgets.QGridLayout(self.page)
self.gridLayout_28.setObjectName("gridLayout_28")
spacerItem4 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_28.addItem(spacerItem4, 1, 0, 1, 1)
self.toolBox = QtWidgets.QToolBox(self.page)
self.toolBox.setMinimumSize(QtCore.QSize(0, 700))
self.toolBox.setObjectName("toolBox")
self.page_4 = QtWidgets.QWidget()
self.page_4.setGeometry(QtCore.QRect(0, 0, 974, 540))
self.page_4.setObjectName("page_4")
self.gridLayout_36 = QtWidgets.QGridLayout(self.page_4)
self.gridLayout_36.setObjectName("gridLayout_36")
self.groupBoxBrainGeometry = QtWidgets.QGroupBox(self.page_4)
font = QtGui.QFont()
self.groupBoxBrainGeometry.setFont(font)
self.groupBoxBrainGeometry.setStyleSheet("")
self.groupBoxBrainGeometry.setTitle("")
self.groupBoxBrainGeometry.setObjectName("groupBoxBrainGeometry")
self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBoxBrainGeometry)
self.gridLayout_3.setObjectName("gridLayout_3")
self.gridLayout_19 = QtWidgets.QGridLayout()
self.gridLayout_19.setContentsMargins(0, -1, -1, -1)
self.gridLayout_19.setObjectName("gridLayout_19")
self.label_Approx_Geom_Centered_On_MRI = QtWidgets.QLabel(self.groupBoxBrainGeometry)
font = QtGui.QFont()
self.label_Approx_Geom_Centered_On_MRI.setFont(font)
self.label_Approx_Geom_Centered_On_MRI.setStyleSheet("")
self.label_Approx_Geom_Centered_On_MRI.setObjectName("label_Approx_Geom_Centered_On_MRI")
self.gridLayout_19.addWidget(self.label_Approx_Geom_Centered_On_MRI, 3, 0, 1, 1)
self.widget_Approximating_Dimensions = QtWidgets.QWidget(self.groupBoxBrainGeometry)
self.widget_Approximating_Dimensions.setObjectName("widget_Approximating_Dimensions")
self.horizontalLayout_12 = QtWidgets.QHBoxLayout(self.widget_Approximating_Dimensions)
self.horizontalLayout_12.setContentsMargins(30, 0, 0, 0)
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.verticalLayout_7 = QtWidgets.QVBoxLayout()
self.verticalLayout_7.setContentsMargins(0, -1, -1, -1)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.label_X_Length_2 = QtWidgets.QLabel(self.widget_Approximating_Dimensions)
font = QtGui.QFont()
self.label_X_Length_2.setFont(font)
self.label_X_Length_2.setObjectName("label_X_Length_2")
self.verticalLayout_7.addWidget(self.label_X_Length_2)
self.horizontalLayout_12.addLayout(self.verticalLayout_7)
self.gridLayout_19.addWidget(self.widget_Approximating_Dimensions, 2, 0, 1, 1)
self.pushButton_Brain_Shape = QtWidgets.QPushButton(self.groupBoxBrainGeometry)
self.pushButton_Brain_Shape.setText("")
self.pushButton_Brain_Shape.setIcon(icon9)
self.pushButton_Brain_Shape.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Brain_Shape.setFlat(True)
self.pushButton_Brain_Shape.setObjectName("pushButton_Brain_Shape")
self.gridLayout_19.addWidget(self.pushButton_Brain_Shape, 0, 2, 1, 1)
self.lineEdit_Brain_Shape = QtWidgets.QLineEdit(self.groupBoxBrainGeometry)
font = QtGui.QFont()
self.lineEdit_Brain_Shape.setFont(font)
self.lineEdit_Brain_Shape.setText("")
self.lineEdit_Brain_Shape.setObjectName("lineEdit_Brain_Shape")
self.gridLayout_19.addWidget(self.lineEdit_Brain_Shape, 0, 1, 1, 1)
self.label_Brain_Shape = QtWidgets.QLabel(self.groupBoxBrainGeometry)
font = QtGui.QFont()
self.label_Brain_Shape.setFont(font)
self.label_Brain_Shape.setStyleSheet("")
self.label_Brain_Shape.setObjectName("label_Brain_Shape")
self.gridLayout_19.addWidget(self.label_Brain_Shape, 0, 0, 1, 1)
self.label_Dimensions_From_MRI = QtWidgets.QLabel(self.groupBoxBrainGeometry)
font = QtGui.QFont()
self.label_Dimensions_From_MRI.setFont(font)
self.label_Dimensions_From_MRI.setStyleSheet("")
self.label_Dimensions_From_MRI.setObjectName("label_Dimensions_From_MRI")
self.gridLayout_19.addWidget(self.label_Dimensions_From_MRI, 1, 0, 1, 1)
self.widget_Approx_Geometry_Center = QtWidgets.QWidget(self.groupBoxBrainGeometry)
self.widget_Approx_Geometry_Center.setMinimumSize(QtCore.QSize(0, 0))
self.widget_Approx_Geometry_Center.setObjectName("widget_Approx_Geometry_Center")
self.gridLayout_22 = QtWidgets.QGridLayout(self.widget_Approx_Geometry_Center)
self.gridLayout_22.setContentsMargins(30, 0, 0, 0)
self.gridLayout_22.setObjectName("gridLayout_22")
self.horizontalLayout_121 = QtWidgets.QHBoxLayout()
self.horizontalLayout_121.setContentsMargins(-1, 0, 0, 0)
self.horizontalLayout_121.setObjectName("horizontalLayout_121")
self.label_Approx_Geometry_Center = QtWidgets.QLabel(self.widget_Approx_Geometry_Center)
font = QtGui.QFont()
self.label_Approx_Geometry_Center.setFont(font)
self.label_Approx_Geometry_Center.setObjectName("label_Approx_Geometry_Center")
self.horizontalLayout_121.addWidget(self.label_Approx_Geometry_Center)
self.gridLayout_22.addLayout(self.horizontalLayout_121, 0, 0, 1, 1)
self.gridLayout_19.addWidget(self.widget_Approx_Geometry_Center, 4, 0, 1, 1)
self.horizontalLayout_73 = QtWidgets.QHBoxLayout()
self.horizontalLayout_73.setObjectName("horizontalLayout_73")
self.checkBox_Dimensions_From_MRI = QtWidgets.QCheckBox(self.groupBoxBrainGeometry)
font = QtGui.QFont()
self.checkBox_Dimensions_From_MRI.setFont(font)
self.checkBox_Dimensions_From_MRI.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkBox_Dimensions_From_MRI.setText("")
self.checkBox_Dimensions_From_MRI.setIconSize(QtCore.QSize(16, 16))
self.checkBox_Dimensions_From_MRI.setObjectName("checkBox_Dimensions_From_MRI")
self.horizontalLayout_73.addWidget(self.checkBox_Dimensions_From_MRI)
self.gridLayout_19.addLayout(self.horizontalLayout_73, 1, 1, 1, 1)
self.widget_Approximating_Dimensions_2 = QtWidgets.QWidget(self.groupBoxBrainGeometry)
self.widget_Approximating_Dimensions_2.setObjectName("widget_Approximating_Dimensions_2")
self.horizontalLayout_13 = QtWidgets.QHBoxLayout(self.widget_Approximating_Dimensions_2)
self.horizontalLayout_13.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.verticalLayout_16 = QtWidgets.QVBoxLayout()
self.verticalLayout_16.setObjectName("verticalLayout_16")
self.lineEdit_Approximating_Dimensions = QtWidgets.QLineEdit(self.widget_Approximating_Dimensions_2)
self.lineEdit_Approximating_Dimensions.setObjectName("lineEdit_Approximating_Dimensions")
self.verticalLayout_16.addWidget(self.lineEdit_Approximating_Dimensions)
self.horizontalLayout_13.addLayout(self.verticalLayout_16)
self.gridLayout_19.addWidget(self.widget_Approximating_Dimensions_2, 2, 1, 1, 1)
self.checkBox_Approx_Geom_Centered_On_MRI = QtWidgets.QCheckBox(self.groupBoxBrainGeometry)
font = QtGui.QFont()
self.checkBox_Approx_Geom_Centered_On_MRI.setFont(font)
self.checkBox_Approx_Geom_Centered_On_MRI.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkBox_Approx_Geom_Centered_On_MRI.setText("")
self.checkBox_Approx_Geom_Centered_On_MRI.setObjectName("checkBox_Approx_Geom_Centered_On_MRI")
self.gridLayout_19.addWidget(self.checkBox_Approx_Geom_Centered_On_MRI, 3, 1, 1, 1)
self.widget_Approx_Geometry_Center_2 = QtWidgets.QWidget(self.groupBoxBrainGeometry)
self.widget_Approx_Geometry_Center_2.setObjectName("widget_Approx_Geometry_Center_2")
self.gridLayout_32 = QtWidgets.QGridLayout(self.widget_Approx_Geometry_Center_2)
self.gridLayout_32.setContentsMargins(0, 0, 0, 0)
self.gridLayout_32.setSpacing(0)
self.gridLayout_32.setObjectName("gridLayout_32")
self.horizontalLayout_79 = QtWidgets.QHBoxLayout()
self.horizontalLayout_79.setObjectName("horizontalLayout_79")
self.lineEdit_Approx_Geometry_Center = QtWidgets.QLineEdit(self.widget_Approx_Geometry_Center_2)
font = QtGui.QFont()
self.lineEdit_Approx_Geometry_Center.setFont(font)
self.lineEdit_Approx_Geometry_Center.setObjectName("lineEdit_Approx_Geometry_Center")
self.horizontalLayout_79.addWidget(self.lineEdit_Approx_Geometry_Center)
self.gridLayout_32.addLayout(self.horizontalLayout_79, 0, 0, 1, 1)
self.gridLayout_19.addWidget(self.widget_Approx_Geometry_Center_2, 4, 1, 1, 1)
self.pushButton_Dimensions_From_MRI = QtWidgets.QPushButton(self.groupBoxBrainGeometry)
self.pushButton_Dimensions_From_MRI.setText("")
self.pushButton_Dimensions_From_MRI.setIcon(icon9)
self.pushButton_Dimensions_From_MRI.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Dimensions_From_MRI.setFlat(True)
self.pushButton_Dimensions_From_MRI.setObjectName("pushButton_Dimensions_From_MRI")
self.gridLayout_19.addWidget(self.pushButton_Dimensions_From_MRI, 1, 2, 1, 1)
self.widget_Approximating_Dimensions_3 = QtWidgets.QWidget(self.groupBoxBrainGeometry)
self.widget_Approximating_Dimensions_3.setObjectName("widget_Approximating_Dimensions_3")
self.verticalLayout = QtWidgets.QVBoxLayout(self.widget_Approximating_Dimensions_3)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName("verticalLayout")
self.pushButton_Approx_Geometry_Center = QtWidgets.QPushButton(self.widget_Approximating_Dimensions_3)
self.pushButton_Approx_Geometry_Center.setText("")
self.pushButton_Approx_Geometry_Center.setIcon(icon9)
self.pushButton_Approx_Geometry_Center.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Approx_Geometry_Center.setFlat(True)
self.pushButton_Approx_Geometry_Center.setObjectName("pushButton_Approx_Geometry_Center")
self.verticalLayout.addWidget(self.pushButton_Approx_Geometry_Center)
self.gridLayout_19.addWidget(self.widget_Approximating_Dimensions_3, 2, 2, 1, 1)
self.widget_Approx_Geometry_Center_3 = QtWidgets.QWidget(self.groupBoxBrainGeometry)
self.widget_Approx_Geometry_Center_3.setObjectName("widget_Approx_Geometry_Center_3")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.widget_Approx_Geometry_Center_3)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.pushButton_X_Length = QtWidgets.QPushButton(self.widget_Approx_Geometry_Center_3)
self.pushButton_X_Length.setText("")
self.pushButton_X_Length.setIcon(icon9)
self.pushButton_X_Length.setIconSize(QtCore.QSize(24, 24))
self.pushButton_X_Length.setFlat(True)
self.pushButton_X_Length.setObjectName("pushButton_X_Length")
self.verticalLayout_2.addWidget(self.pushButton_X_Length)
self.gridLayout_19.addWidget(self.widget_Approx_Geometry_Center_3, 4, 2, 1, 1)
self.pushButton_Approx_Geometry_Center_2 = QtWidgets.QPushButton(self.groupBoxBrainGeometry)
self.pushButton_Approx_Geometry_Center_2.setText("")
self.pushButton_Approx_Geometry_Center_2.setIcon(icon9)
self.pushButton_Approx_Geometry_Center_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Approx_Geometry_Center_2.setFlat(True)
self.pushButton_Approx_Geometry_Center_2.setObjectName("pushButton_Approx_Geometry_Center_2")
self.gridLayout_19.addWidget(self.pushButton_Approx_Geometry_Center_2, 3, 2, 1, 1)
self.gridLayout_3.addLayout(self.gridLayout_19, 0, 0, 1, 1)
self.gridLayout_36.addWidget(self.groupBoxBrainGeometry, 0, 0, 1, 1)
spacerItem5 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_36.addItem(spacerItem5, 0, 1, 1, 1)
spacerItem6 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_36.addItem(spacerItem6, 1, 0, 1, 1)
self.toolBox.addItem(self.page_4, "")
self.page_5 = QtWidgets.QWidget()
self.page_5.setGeometry(QtCore.QRect(0, 0, 974, 540))
self.page_5.setObjectName("page_5")
self.gridLayout_37 = QtWidgets.QGridLayout(self.page_5)
self.gridLayout_37.setObjectName("gridLayout_37")
spacerItem7 = QtWidgets.QSpacerItem(923, 17, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_37.addItem(spacerItem7, 0, 1, 1, 1)
self.groupBox = QtWidgets.QGroupBox(self.page_5)
self.groupBox.setTitle("")
self.groupBox.setObjectName("groupBox")
self.gridLayout_9 = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout_9.setObjectName("gridLayout_9")
self.gridLayout_21 = QtWidgets.QGridLayout()
self.gridLayout_21.setObjectName("gridLayout_21")
self.label_26 = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
self.label_26.setFont(font)
self.label_26.setObjectName("label_26")
self.gridLayout_21.addWidget(self.label_26, 2, 2, 1, 1)
self.doubleSpinBox_Encapsulation_Thickness = QtWidgets.QDoubleSpinBox(self.groupBox)
font = QtGui.QFont()
self.doubleSpinBox_Encapsulation_Thickness.setFont(font)
self.doubleSpinBox_Encapsulation_Thickness.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Encapsulation_Thickness.setDecimals(4)
self.doubleSpinBox_Encapsulation_Thickness.setMinimum(0.0)
self.doubleSpinBox_Encapsulation_Thickness.setMaximum(1e+54)
self.doubleSpinBox_Encapsulation_Thickness.setSingleStep(0.1)
self.doubleSpinBox_Encapsulation_Thickness.setProperty("value", 0.5)
self.doubleSpinBox_Encapsulation_Thickness.setObjectName("doubleSpinBox_Encapsulation_Thickness")
self.gridLayout_21.addWidget(self.doubleSpinBox_Encapsulation_Thickness, 2, 1, 1, 1)
self.label_Conductivity_Scaling = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
self.label_Conductivity_Scaling.setFont(font)
self.label_Conductivity_Scaling.setObjectName("label_Conductivity_Scaling")
self.gridLayout_21.addWidget(self.label_Conductivity_Scaling, 4, 0, 1, 1)
self.label_19 = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
self.label_19.setFont(font)
self.label_19.setObjectName("label_19")
self.gridLayout_21.addWidget(self.label_19, 5, 0, 1, 1)
self.label_Encapsulation_Conductivity = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
self.label_Encapsulation_Conductivity.setFont(font)
self.label_Encapsulation_Conductivity.setObjectName("label_Encapsulation_Conductivity")
self.gridLayout_21.addWidget(self.label_Encapsulation_Conductivity, 3, 0, 1, 1)
self.checkBox_CPE_Active = QtWidgets.QCheckBox(self.groupBox)
font = QtGui.QFont()
self.checkBox_CPE_Active.setFont(font)
self.checkBox_CPE_Active.setText("")
self.checkBox_CPE_Active.setObjectName("checkBox_CPE_Active")
self.gridLayout_21.addWidget(self.checkBox_CPE_Active, 0, 1, 1, 1)
self.label_CPE_Active = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
self.label_CPE_Active.setFont(font)
self.label_CPE_Active.setObjectName("label_CPE_Active")
self.gridLayout_21.addWidget(self.label_CPE_Active, 0, 0, 1, 1)
self.label_Encapsulation_Thickness = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
self.label_Encapsulation_Thickness.setFont(font)
self.label_Encapsulation_Thickness.setObjectName("label_Encapsulation_Thickness")
self.gridLayout_21.addWidget(self.label_Encapsulation_Thickness, 2, 0, 1, 1)
self.pushButton_Encapsulation_Thickness = QtWidgets.QPushButton(self.groupBox)
self.pushButton_Encapsulation_Thickness.setText("")
self.pushButton_Encapsulation_Thickness.setIcon(icon9)
self.pushButton_Encapsulation_Thickness.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Encapsulation_Thickness.setFlat(True)
self.pushButton_Encapsulation_Thickness.setObjectName("pushButton_Encapsulation_Thickness")
self.gridLayout_21.addWidget(self.pushButton_Encapsulation_Thickness, 2, 3, 1, 1)
self.pushButton_CPE_Active = QtWidgets.QPushButton(self.groupBox)
self.pushButton_CPE_Active.setText("")
self.pushButton_CPE_Active.setIcon(icon9)
self.pushButton_CPE_Active.setIconSize(QtCore.QSize(24, 24))
self.pushButton_CPE_Active.setFlat(True)
self.pushButton_CPE_Active.setObjectName("pushButton_CPE_Active")
self.gridLayout_21.addWidget(self.pushButton_CPE_Active, 0, 3, 1, 1)
self.pushButton_Encapsulation_Conductivity = QtWidgets.QPushButton(self.groupBox)
self.pushButton_Encapsulation_Conductivity.setText("")
self.pushButton_Encapsulation_Conductivity.setIcon(icon9)
self.pushButton_Encapsulation_Conductivity.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Encapsulation_Conductivity.setFlat(True)
self.pushButton_Encapsulation_Conductivity.setObjectName("pushButton_Encapsulation_Conductivity")
self.gridLayout_21.addWidget(self.pushButton_Encapsulation_Conductivity, 3, 3, 1, 1)
self.pushButton_Conductivity_Scaling = QtWidgets.QPushButton(self.groupBox)
font = QtGui.QFont()
self.pushButton_Conductivity_Scaling.setFont(font)
self.pushButton_Conductivity_Scaling.setText("")
self.pushButton_Conductivity_Scaling.setIcon(icon9)
self.pushButton_Conductivity_Scaling.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Conductivity_Scaling.setFlat(True)
self.pushButton_Conductivity_Scaling.setObjectName("pushButton_Conductivity_Scaling")
self.gridLayout_21.addWidget(self.pushButton_Conductivity_Scaling, 4, 3, 1, 1)
self.pushButton_Encapsulation_Scaling = QtWidgets.QPushButton(self.groupBox)
self.pushButton_Encapsulation_Scaling.setText("")
self.pushButton_Encapsulation_Scaling.setIcon(icon9)
self.pushButton_Encapsulation_Scaling.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Encapsulation_Scaling.setFlat(True)
self.pushButton_Encapsulation_Scaling.setObjectName("pushButton_Encapsulation_Scaling")
self.gridLayout_21.addWidget(self.pushButton_Encapsulation_Scaling, 5, 3, 1, 1)
self.comboBox_Encapsulation_Tissue_Type = QtWidgets.QComboBox(self.groupBox)
font = QtGui.QFont()
self.comboBox_Encapsulation_Tissue_Type.setFont(font)
self.comboBox_Encapsulation_Tissue_Type.setObjectName("comboBox_Encapsulation_Tissue_Type")
self.comboBox_Encapsulation_Tissue_Type.addItem("")
self.comboBox_Encapsulation_Tissue_Type.addItem("")
self.comboBox_Encapsulation_Tissue_Type.addItem("")
self.gridLayout_21.addWidget(self.comboBox_Encapsulation_Tissue_Type, 3, 1, 1, 1)
self.doubleSpinBox_Conductivity_Scaling = QtWidgets.QDoubleSpinBox(self.groupBox)
font = QtGui.QFont()
self.doubleSpinBox_Conductivity_Scaling.setFont(font)
self.doubleSpinBox_Conductivity_Scaling.setToolTip("")
self.doubleSpinBox_Conductivity_Scaling.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Conductivity_Scaling.setDecimals(4)
self.doubleSpinBox_Conductivity_Scaling.setMaximum(1e+62)
self.doubleSpinBox_Conductivity_Scaling.setSingleStep(0.001)
self.doubleSpinBox_Conductivity_Scaling.setProperty("value", 1.0)
self.doubleSpinBox_Conductivity_Scaling.setObjectName("doubleSpinBox_Conductivity_Scaling")
self.gridLayout_21.addWidget(self.doubleSpinBox_Conductivity_Scaling, 4, 1, 1, 1)
self.doubleSpinBox_Permittivity_Scaling = QtWidgets.QDoubleSpinBox(self.groupBox)
font = QtGui.QFont()
self.doubleSpinBox_Permittivity_Scaling.setFont(font)
self.doubleSpinBox_Permittivity_Scaling.setToolTip("")
self.doubleSpinBox_Permittivity_Scaling.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Permittivity_Scaling.setDecimals(4)
self.doubleSpinBox_Permittivity_Scaling.setMaximum(1e+62)
self.doubleSpinBox_Permittivity_Scaling.setSingleStep(0.001)
self.doubleSpinBox_Permittivity_Scaling.setProperty("value", 1.0)
self.doubleSpinBox_Permittivity_Scaling.setObjectName("doubleSpinBox_Permittivity_Scaling")
self.gridLayout_21.addWidget(self.doubleSpinBox_Permittivity_Scaling, 5, 1, 1, 1)
self.gridLayout_9.addLayout(self.gridLayout_21, 0, 0, 1, 1)
self.gridLayout_37.addWidget(self.groupBox, 0, 0, 1, 1)
spacerItem8 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_37.addItem(spacerItem8, 1, 1, 1, 1)
self.toolBox.addItem(self.page_5, "")
self.page_6 = QtWidgets.QWidget()
self.page_6.setGeometry(QtCore.QRect(0, 0, 974, 540))
self.page_6.setObjectName("page_6")
self.gridLayout_38 = QtWidgets.QGridLayout(self.page_6)
self.gridLayout_38.setObjectName("gridLayout_38")
spacerItem9 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_38.addItem(spacerItem9, 0, 1, 1, 1)
self.groupBoxNeuronModel = QtWidgets.QGroupBox(self.page_6)
font = QtGui.QFont()
self.groupBoxNeuronModel.setFont(font)
self.groupBoxNeuronModel.setStyleSheet("")
self.groupBoxNeuronModel.setTitle("")
self.groupBoxNeuronModel.setObjectName("groupBoxNeuronModel")
self.gridLayout_29 = QtWidgets.QGridLayout(self.groupBoxNeuronModel)
self.gridLayout_29.setObjectName("gridLayout_29")
self.gridLayout_24 = QtWidgets.QGridLayout()
self.gridLayout_24.setObjectName("gridLayout_24")
self.pushButton_Neuron_Model_Array_Prepared_2 = QtWidgets.QPushButton(self.groupBoxNeuronModel)
self.pushButton_Neuron_Model_Array_Prepared_2.setText("")
self.pushButton_Neuron_Model_Array_Prepared_2.setIcon(icon9)
self.pushButton_Neuron_Model_Array_Prepared_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Neuron_Model_Array_Prepared_2.setFlat(True)
self.pushButton_Neuron_Model_Array_Prepared_2.setObjectName("pushButton_Neuron_Model_Array_Prepared_2")
self.gridLayout_24.addWidget(self.pushButton_Neuron_Model_Array_Prepared_2, 5, 3, 1, 1)
self.label_V_Init_3 = QtWidgets.QLabel(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.label_V_Init_3.setFont(font)
self.label_V_Init_3.setObjectName("label_V_Init_3")
self.gridLayout_24.addWidget(self.label_V_Init_3, 5, 0, 1, 1)
self.label_40 = QtWidgets.QLabel(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.label_40.setFont(font)
self.label_40.setObjectName("label_40")
self.gridLayout_24.addWidget(self.label_40, 2, 2, 1, 1)
self.label_Diam_Fib_2 = QtWidgets.QLabel(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.label_Diam_Fib_2.setFont(font)
self.label_Diam_Fib_2.setObjectName("label_Diam_Fib_2")
self.gridLayout_24.addWidget(self.label_Diam_Fib_2, 2, 0, 1, 1)
self.label_V_Init_2 = QtWidgets.QLabel(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.label_V_Init_2.setFont(font)
self.label_V_Init_2.setObjectName("label_V_Init_2")
self.gridLayout_24.addWidget(self.label_V_Init_2, 4, 0, 1, 1)
self.label_Pattern_Model_Name_2 = QtWidgets.QLabel(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.label_Pattern_Model_Name_2.setFont(font)
self.label_Pattern_Model_Name_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_Pattern_Model_Name_2.setObjectName("label_Pattern_Model_Name_2")
self.gridLayout_24.addWidget(self.label_Pattern_Model_Name_2, 1, 0, 1, 1)
self.lineEdit_Pattern_Model_Name = QtWidgets.QLineEdit(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.lineEdit_Pattern_Model_Name.setFont(font)
self.lineEdit_Pattern_Model_Name.setObjectName("lineEdit_Pattern_Model_Name")
self.gridLayout_24.addWidget(self.lineEdit_Pattern_Model_Name, 1, 1, 1, 1)
self.lineEdit_Fiber_Diameter = QtWidgets.QLineEdit(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.lineEdit_Fiber_Diameter.setFont(font)
self.lineEdit_Fiber_Diameter.setObjectName("lineEdit_Fiber_Diameter")
self.gridLayout_24.addWidget(self.lineEdit_Fiber_Diameter, 2, 1, 1, 1)
self.label_N_Ranvier_2 = QtWidgets.QLabel(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.label_N_Ranvier_2.setFont(font)
self.label_N_Ranvier_2.setObjectName("label_N_Ranvier_2")
self.gridLayout_24.addWidget(self.label_N_Ranvier_2, 3, 0, 1, 1)
self.comboBox_Axon_Model_Type = QtWidgets.QComboBox(self.groupBoxNeuronModel)
self.comboBox_Axon_Model_Type.setObjectName("comboBox_Axon_Model_Type")
self.comboBox_Axon_Model_Type.addItem("")
self.comboBox_Axon_Model_Type.addItem("")
self.gridLayout_24.addWidget(self.comboBox_Axon_Model_Type, 0, 1, 1, 1)
self.label_9 = QtWidgets.QLabel(self.groupBoxNeuronModel)
self.label_9.setObjectName("label_9")
self.gridLayout_24.addWidget(self.label_9, 0, 0, 1, 1)
self.pushButton_Diam_Fib_2 = QtWidgets.QPushButton(self.groupBoxNeuronModel)
self.pushButton_Diam_Fib_2.setText("")
self.pushButton_Diam_Fib_2.setIcon(icon9)
self.pushButton_Diam_Fib_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Diam_Fib_2.setFlat(True)
self.pushButton_Diam_Fib_2.setObjectName("pushButton_Diam_Fib_2")
self.gridLayout_24.addWidget(self.pushButton_Diam_Fib_2, 2, 3, 1, 1)
self.pushButton_Axon_Model_Type = QtWidgets.QPushButton(self.groupBoxNeuronModel)
self.pushButton_Axon_Model_Type.setText("")
self.pushButton_Axon_Model_Type.setIcon(icon9)
self.pushButton_Axon_Model_Type.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Axon_Model_Type.setFlat(True)
self.pushButton_Axon_Model_Type.setObjectName("pushButton_Axon_Model_Type")
self.gridLayout_24.addWidget(self.pushButton_Axon_Model_Type, 0, 3, 1, 1)
self.pushButton_Pattern_Model_Name_2 = QtWidgets.QPushButton(self.groupBoxNeuronModel)
self.pushButton_Pattern_Model_Name_2.setText("")
self.pushButton_Pattern_Model_Name_2.setIcon(icon9)
self.pushButton_Pattern_Model_Name_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Pattern_Model_Name_2.setFlat(True)
self.pushButton_Pattern_Model_Name_2.setObjectName("pushButton_Pattern_Model_Name_2")
self.gridLayout_24.addWidget(self.pushButton_Pattern_Model_Name_2, 1, 3, 1, 1)
self.lineEdit_N_Ranvier = QtWidgets.QLineEdit(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.lineEdit_N_Ranvier.setFont(font)
self.lineEdit_N_Ranvier.setObjectName("lineEdit_N_Ranvier")
self.gridLayout_24.addWidget(self.lineEdit_N_Ranvier, 3, 1, 1, 1)
self.doubleSpinBox_V_Init = QtWidgets.QDoubleSpinBox(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.doubleSpinBox_V_Init.setFont(font)
self.doubleSpinBox_V_Init.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_V_Init.setDecimals(4)
self.doubleSpinBox_V_Init.setMinimum(-100000000.0)
self.doubleSpinBox_V_Init.setMaximum(100000000000.0)
self.doubleSpinBox_V_Init.setSingleStep(10.0)
self.doubleSpinBox_V_Init.setProperty("value", -80.0)
self.doubleSpinBox_V_Init.setObjectName("doubleSpinBox_V_Init")
self.gridLayout_24.addWidget(self.doubleSpinBox_V_Init, 4, 1, 1, 1)
self.checkBox_Neuron_Model_Array_Prepared = QtWidgets.QCheckBox(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.checkBox_Neuron_Model_Array_Prepared.setFont(font)
self.checkBox_Neuron_Model_Array_Prepared.setStyleSheet("")
self.checkBox_Neuron_Model_Array_Prepared.setText("")
self.checkBox_Neuron_Model_Array_Prepared.setIconSize(QtCore.QSize(24, 24))
self.checkBox_Neuron_Model_Array_Prepared.setTristate(False)
self.checkBox_Neuron_Model_Array_Prepared.setObjectName("checkBox_Neuron_Model_Array_Prepared")
self.gridLayout_24.addWidget(self.checkBox_Neuron_Model_Array_Prepared, 5, 1, 1, 1)
self.label_38 = QtWidgets.QLabel(self.groupBoxNeuronModel)
font = QtGui.QFont()
self.label_38.setFont(font)
self.label_38.setObjectName("label_38")
self.gridLayout_24.addWidget(self.label_38, 4, 2, 1, 1)
self.pushButton_N_Ranvier_2 = QtWidgets.QPushButton(self.groupBoxNeuronModel)
self.pushButton_N_Ranvier_2.setText("")
self.pushButton_N_Ranvier_2.setIcon(icon9)
self.pushButton_N_Ranvier_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_N_Ranvier_2.setFlat(True)
self.pushButton_N_Ranvier_2.setObjectName("pushButton_N_Ranvier_2")
self.gridLayout_24.addWidget(self.pushButton_N_Ranvier_2, 3, 3, 1, 1)
self.pushButton_V_Init_2 = QtWidgets.QPushButton(self.groupBoxNeuronModel)
self.pushButton_V_Init_2.setText("")
self.pushButton_V_Init_2.setIcon(icon9)
self.pushButton_V_Init_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_V_Init_2.setFlat(True)
self.pushButton_V_Init_2.setObjectName("pushButton_V_Init_2")
self.gridLayout_24.addWidget(self.pushButton_V_Init_2, 4, 3, 1, 1)
self.widget_Name_Of_External_Neuron_Array_2 = QtWidgets.QWidget(self.groupBoxNeuronModel)
self.widget_Name_Of_External_Neuron_Array_2.setObjectName("widget_Name_Of_External_Neuron_Array_2")
self.gridLayout_42 = QtWidgets.QGridLayout(self.widget_Name_Of_External_Neuron_Array_2)
self.gridLayout_42.setContentsMargins(0, 0, 0, 0)
self.gridLayout_42.setSpacing(0)
self.gridLayout_42.setObjectName("gridLayout_42")
self.lineEdit_Name_Prepared_Neuron_Array = QtWidgets.QLineEdit(self.widget_Name_Of_External_Neuron_Array_2)
font = QtGui.QFont()
self.lineEdit_Name_Prepared_Neuron_Array.setFont(font)
self.lineEdit_Name_Prepared_Neuron_Array.setObjectName("lineEdit_Name_Prepared_Neuron_Array")
self.gridLayout_42.addWidget(self.lineEdit_Name_Prepared_Neuron_Array, 0, 0, 1, 1)
self.gridLayout_24.addWidget(self.widget_Name_Of_External_Neuron_Array_2, 6, 1, 1, 1)
self.widget_Name_Of_External_Neuron_Array = QtWidgets.QWidget(self.groupBoxNeuronModel)
self.widget_Name_Of_External_Neuron_Array.setMinimumSize(QtCore.QSize(0, 29))
self.widget_Name_Of_External_Neuron_Array.setObjectName("widget_Name_Of_External_Neuron_Array")
self.gridLayout_41 = QtWidgets.QGridLayout(self.widget_Name_Of_External_Neuron_Array)
self.gridLayout_41.setContentsMargins(0, 0, 0, 0)
self.gridLayout_41.setSpacing(0)
self.gridLayout_41.setObjectName("gridLayout_41")
self.label_Name_Prepared_Neuron_Array_2 = QtWidgets.QLabel(self.widget_Name_Of_External_Neuron_Array)
font = QtGui.QFont()
self.label_Name_Prepared_Neuron_Array_2.setFont(font)
self.label_Name_Prepared_Neuron_Array_2.setObjectName("label_Name_Prepared_Neuron_Array_2")
self.gridLayout_41.addWidget(self.label_Name_Prepared_Neuron_Array_2, 0, 0, 1, 1)
self.gridLayout_24.addWidget(self.widget_Name_Of_External_Neuron_Array, 6, 0, 1, 1)
self.widget_Name_Of_External_Neuron_Array_3 = QtWidgets.QWidget(self.groupBoxNeuronModel)
self.widget_Name_Of_External_Neuron_Array_3.setObjectName("widget_Name_Of_External_Neuron_Array_3")
self.gridLayout_43 = QtWidgets.QGridLayout(self.widget_Name_Of_External_Neuron_Array_3)
self.gridLayout_43.setContentsMargins(0, 0, 0, 0)
self.gridLayout_43.setSpacing(0)
self.gridLayout_43.setObjectName("gridLayout_43")
self.pushButton_Name_Prepared_Neuron_Array_2 = QtWidgets.QPushButton(self.widget_Name_Of_External_Neuron_Array_3)
self.pushButton_Name_Prepared_Neuron_Array_2.setText("")
self.pushButton_Name_Prepared_Neuron_Array_2.setIcon(icon9)
self.pushButton_Name_Prepared_Neuron_Array_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Name_Prepared_Neuron_Array_2.setFlat(True)
self.pushButton_Name_Prepared_Neuron_Array_2.setObjectName("pushButton_Name_Prepared_Neuron_Array_2")
self.gridLayout_43.addWidget(self.pushButton_Name_Prepared_Neuron_Array_2, 0, 0, 1, 1)
self.gridLayout_24.addWidget(self.widget_Name_Of_External_Neuron_Array_3, 6, 3, 1, 1)
self.gridLayout_29.addLayout(self.gridLayout_24, 0, 0, 1, 1)
self.gridLayout_38.addWidget(self.groupBoxNeuronModel, 0, 0, 1, 1)
spacerItem10 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_38.addItem(spacerItem10, 1, 0, 1, 1)
self.toolBox.addItem(self.page_6, "")
self.page_7 = QtWidgets.QWidget()
self.page_7.setGeometry(QtCore.QRect(0, 0, 974, 540))
self.page_7.setObjectName("page_7")
self.gridLayout_39 = QtWidgets.QGridLayout(self.page_7)
self.gridLayout_39.setObjectName("gridLayout_39")
spacerItem11 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_39.addItem(spacerItem11, 0, 1, 1, 1)
self.groupBoxComputationalParameters = QtWidgets.QGroupBox(self.page_7)
font = QtGui.QFont()
self.groupBoxComputationalParameters.setFont(font)
self.groupBoxComputationalParameters.setTitle("")
self.groupBoxComputationalParameters.setObjectName("groupBoxComputationalParameters")
self.gridLayout_4 = QtWidgets.QGridLayout(self.groupBoxComputationalParameters)
self.gridLayout_4.setObjectName("gridLayout_4")
self.widget_16 = QtWidgets.QWidget(self.groupBoxComputationalParameters)
self.widget_16.setObjectName("widget_16")
self.gridLayout_11 = QtWidgets.QGridLayout(self.widget_16)
self.gridLayout_11.setContentsMargins(0, 0, 0, 0)
self.gridLayout_11.setObjectName("gridLayout_11")
self.gridLayout_4.addWidget(self.widget_16, 1, 0, 1, 1)
self.gridLayout_25 = QtWidgets.QGridLayout()
self.gridLayout_25.setObjectName("gridLayout_25")
self.label_2 = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.gridLayout_25.addWidget(self.label_2, 1, 2, 1, 1)
self.label_Signal_Type = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_Signal_Type.setFont(font)
self.label_Signal_Type.setObjectName("label_Signal_Type")
self.gridLayout_25.addWidget(self.label_Signal_Type, 0, 0, 1, 1)
self.label_Frequency = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_Frequency.setFont(font)
self.label_Frequency.setObjectName("label_Frequency")
self.gridLayout_25.addWidget(self.label_Frequency, 1, 0, 1, 1)
self.label_T_Step = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_T_Step.setFont(font)
self.label_T_Step.setObjectName("label_T_Step")
self.gridLayout_25.addWidget(self.label_T_Step, 3, 0, 1, 1)
self.label_Phi = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_Phi.setFont(font)
self.label_Phi.setObjectName("label_Phi")
self.gridLayout_25.addWidget(self.label_Phi, 4, 0, 1, 1)
self.label_T = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_T.setFont(font)
self.label_T.setObjectName("label_T")
self.gridLayout_25.addWidget(self.label_T, 2, 0, 1, 1)
self.comboBox_Signal_Type = QtWidgets.QComboBox(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.comboBox_Signal_Type.setFont(font)
self.comboBox_Signal_Type.setObjectName("comboBox_Signal_Type")
self.comboBox_Signal_Type.addItem("")
self.comboBox_Signal_Type.addItem("")
self.comboBox_Signal_Type.addItem("")
self.comboBox_Signal_Type.addItem("")
self.gridLayout_25.addWidget(self.comboBox_Signal_Type, 0, 1, 1, 1)
self.label_Amplitude_Scale = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_Amplitude_Scale.setFont(font)
self.label_Amplitude_Scale.setObjectName("label_Amplitude_Scale")
self.gridLayout_25.addWidget(self.label_Amplitude_Scale, 5, 0, 1, 1)
self.doubleSpinBox_Frequency = QtWidgets.QDoubleSpinBox(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.doubleSpinBox_Frequency.setFont(font)
self.doubleSpinBox_Frequency.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Frequency.setDecimals(4)
self.doubleSpinBox_Frequency.setMinimum(-100000000000.0)
self.doubleSpinBox_Frequency.setMaximum(1000000000000.0)
self.doubleSpinBox_Frequency.setSingleStep(10.0)
self.doubleSpinBox_Frequency.setProperty("value", 130.0)
self.doubleSpinBox_Frequency.setObjectName("doubleSpinBox_Frequency")
self.gridLayout_25.addWidget(self.doubleSpinBox_Frequency, 1, 1, 1, 1)
self.pushButton_Frequency = QtWidgets.QPushButton(self.groupBoxComputationalParameters)
self.pushButton_Frequency.setText("")
self.pushButton_Frequency.setIcon(icon9)
self.pushButton_Frequency.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Frequency.setFlat(True)
self.pushButton_Frequency.setObjectName("pushButton_Frequency")
self.gridLayout_25.addWidget(self.pushButton_Frequency, 1, 3, 1, 1)
self.pushButton_Signal_Type = QtWidgets.QPushButton(self.groupBoxComputationalParameters)
self.pushButton_Signal_Type.setText("")
self.pushButton_Signal_Type.setIcon(icon9)
self.pushButton_Signal_Type.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Signal_Type.setFlat(True)
self.pushButton_Signal_Type.setObjectName("pushButton_Signal_Type")
self.gridLayout_25.addWidget(self.pushButton_Signal_Type, 0, 3, 1, 1)
self.doubleSpinBox_T = QtWidgets.QDoubleSpinBox(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.doubleSpinBox_T.setFont(font)
self.doubleSpinBox_T.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_T.setDecimals(4)
self.doubleSpinBox_T.setMinimum(-1e+19)
self.doubleSpinBox_T.setMaximum(1e+19)
self.doubleSpinBox_T.setProperty("value", 60.0)
self.doubleSpinBox_T.setObjectName("doubleSpinBox_T")
self.gridLayout_25.addWidget(self.doubleSpinBox_T, 2, 1, 1, 1)
self.label = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label.setFont(font)
self.label.setObjectName("label")
self.gridLayout_25.addWidget(self.label, 2, 2, 1, 1)
self.doubleSpinBox_T_Step = QtWidgets.QDoubleSpinBox(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.doubleSpinBox_T_Step.setFont(font)
self.doubleSpinBox_T_Step.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_T_Step.setDecimals(6)
self.doubleSpinBox_T_Step.setMaximum(1e+59)
self.doubleSpinBox_T_Step.setSingleStep(1.0)
self.doubleSpinBox_T_Step.setProperty("value", 1.0)
self.doubleSpinBox_T_Step.setObjectName("doubleSpinBox_T_Step")
self.gridLayout_25.addWidget(self.doubleSpinBox_T_Step, 3, 1, 1, 1)
self.label_3 = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.gridLayout_25.addWidget(self.label_3, 3, 2, 1, 1)
self.doubleSpinBox_Signal_Shift = QtWidgets.QDoubleSpinBox(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.doubleSpinBox_Signal_Shift.setFont(font)
self.doubleSpinBox_Signal_Shift.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Signal_Shift.setDecimals(4)
self.doubleSpinBox_Signal_Shift.setMinimum(-1e+38)
self.doubleSpinBox_Signal_Shift.setMaximum(1e+36)
self.doubleSpinBox_Signal_Shift.setObjectName("doubleSpinBox_Signal_Shift")
self.gridLayout_25.addWidget(self.doubleSpinBox_Signal_Shift, 4, 1, 1, 1)
self.label_4 = QtWidgets.QLabel(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.gridLayout_25.addWidget(self.label_4, 4, 2, 1, 1)
self.doubleSpinBox_Amplitude_Scale = QtWidgets.QDoubleSpinBox(self.groupBoxComputationalParameters)
font = QtGui.QFont()
self.doubleSpinBox_Amplitude_Scale.setFont(font)
self.doubleSpinBox_Amplitude_Scale.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Amplitude_Scale.setDecimals(4)
self.doubleSpinBox_Amplitude_Scale.setMinimum(0.0)
self.doubleSpinBox_Amplitude_Scale.setMaximum(1e+53)
self.doubleSpinBox_Amplitude_Scale.setProperty("value", 1.0)
self.doubleSpinBox_Amplitude_Scale.setObjectName("doubleSpinBox_Amplitude_Scale")
self.gridLayout_25.addWidget(self.doubleSpinBox_Amplitude_Scale, 5, 1, 1, 1)
self.pushButton_T = QtWidgets.QPushButton(self.groupBoxComputationalParameters)
self.pushButton_T.setText("")
self.pushButton_T.setIcon(icon9)
self.pushButton_T.setIconSize(QtCore.QSize(24, 24))
self.pushButton_T.setFlat(True)
self.pushButton_T.setObjectName("pushButton_T")
self.gridLayout_25.addWidget(self.pushButton_T, 2, 3, 1, 1)
self.pushButton_T_Step = QtWidgets.QPushButton(self.groupBoxComputationalParameters)
self.pushButton_T_Step.setText("")
self.pushButton_T_Step.setIcon(icon9)
self.pushButton_T_Step.setIconSize(QtCore.QSize(24, 24))
self.pushButton_T_Step.setFlat(True)
self.pushButton_T_Step.setObjectName("pushButton_T_Step")
self.gridLayout_25.addWidget(self.pushButton_T_Step, 3, 3, 1, 1)
self.pushButton_Phi = QtWidgets.QPushButton(self.groupBoxComputationalParameters)
self.pushButton_Phi.setText("")
self.pushButton_Phi.setIcon(icon9)
self.pushButton_Phi.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Phi.setFlat(True)
self.pushButton_Phi.setObjectName("pushButton_Phi")
self.gridLayout_25.addWidget(self.pushButton_Phi, 4, 3, 1, 1)
self.pushButton_Amplitude_Scale = QtWidgets.QPushButton(self.groupBoxComputationalParameters)
self.pushButton_Amplitude_Scale.setText("")
self.pushButton_Amplitude_Scale.setIcon(icon9)
self.pushButton_Amplitude_Scale.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Amplitude_Scale.setFlat(True)
self.pushButton_Amplitude_Scale.setObjectName("pushButton_Amplitude_Scale")
self.gridLayout_25.addWidget(self.pushButton_Amplitude_Scale, 5, 3, 1, 1)
self.gridLayout_4.addLayout(self.gridLayout_25, 0, 0, 1, 1)
self.gridLayout_39.addWidget(self.groupBoxComputationalParameters, 0, 0, 1, 1)
spacerItem12 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_39.addItem(spacerItem12, 1, 0, 1, 1)
self.toolBox.addItem(self.page_7, "")
self.page_8 = QtWidgets.QWidget()
self.page_8.setGeometry(QtCore.QRect(0, 0, 974, 540))
self.page_8.setObjectName("page_8")
self.gridLayout_40 = QtWidgets.QGridLayout(self.page_8)
self.gridLayout_40.setObjectName("gridLayout_40")
spacerItem13 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_40.addItem(spacerItem13, 0, 1, 1, 1)
self.groupBoxXXX = QtWidgets.QGroupBox(self.page_8)
font = QtGui.QFont()
self.groupBoxXXX.setFont(font)
self.groupBoxXXX.setStyleSheet("")
self.groupBoxXXX.setTitle("")
self.groupBoxXXX.setObjectName("groupBoxXXX")
self.gridLayout_30 = QtWidgets.QGridLayout(self.groupBoxXXX)
self.gridLayout_30.setObjectName("gridLayout_30")
self.widget_Spectrum_Truncation_Method = QtWidgets.QWidget(self.groupBoxXXX)
self.widget_Spectrum_Truncation_Method.setObjectName("widget_Spectrum_Truncation_Method")
self.gridLayout_31 = QtWidgets.QGridLayout(self.widget_Spectrum_Truncation_Method)
self.gridLayout_31.setContentsMargins(16, 0, 0, 0)
self.gridLayout_31.setObjectName("gridLayout_31")
self.widget_Truncate_Already_Obtained_Full_Solution = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_Truncate_Already_Obtained_Full_Solution.setObjectName("widget_Truncate_Already_Obtained_Full_Solution")
self.gridLayout_15 = QtWidgets.QGridLayout(self.widget_Truncate_Already_Obtained_Full_Solution)
self.gridLayout_15.setContentsMargins(0, 0, 0, 0)
self.gridLayout_15.setSpacing(0)
self.gridLayout_15.setObjectName("gridLayout_15")
self.label_Truncate_Already_Obtained_Full_Solution = QtWidgets.QLabel(self.widget_Truncate_Already_Obtained_Full_Solution)
font = QtGui.QFont()
self.label_Truncate_Already_Obtained_Full_Solution.setFont(font)
self.label_Truncate_Already_Obtained_Full_Solution.setObjectName("label_Truncate_Already_Obtained_Full_Solution")
self.gridLayout_15.addWidget(self.label_Truncate_Already_Obtained_Full_Solution, 0, 0, 1, 1)
self.gridLayout_31.addWidget(self.widget_Truncate_Already_Obtained_Full_Solution, 2, 1, 1, 1)
self.widget_STM_6 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_STM_6.setObjectName("widget_STM_6")
self.verticalLayout_54 = QtWidgets.QVBoxLayout(self.widget_STM_6)
self.verticalLayout_54.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_54.setSpacing(0)
self.verticalLayout_54.setObjectName("verticalLayout_54")
self.pushButton_Truncation_Parameter_2 = QtWidgets.QPushButton(self.widget_STM_6)
self.pushButton_Truncation_Parameter_2.setText("")
self.pushButton_Truncation_Parameter_2.setIcon(icon9)
self.pushButton_Truncation_Parameter_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Truncation_Parameter_2.setFlat(True)
self.pushButton_Truncation_Parameter_2.setObjectName("pushButton_Truncation_Parameter_2")
self.verticalLayout_54.addWidget(self.pushButton_Truncation_Parameter_2)
self.gridLayout_31.addWidget(self.widget_STM_6, 1, 5, 1, 1)
self.widget_STM_5 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_STM_5.setObjectName("widget_STM_5")
self.verticalLayout_49 = QtWidgets.QVBoxLayout(self.widget_STM_5)
self.verticalLayout_49.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_49.setSpacing(0)
self.verticalLayout_49.setObjectName("verticalLayout_49")
self.spinBox_Truncation_Parameter = QtWidgets.QSpinBox(self.widget_STM_5)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinBox_Truncation_Parameter.sizePolicy().hasHeightForWidth())
self.spinBox_Truncation_Parameter.setSizePolicy(sizePolicy)
self.spinBox_Truncation_Parameter.setMaximum(999999999)
self.spinBox_Truncation_Parameter.setProperty("value", 100)
self.spinBox_Truncation_Parameter.setObjectName("spinBox_Truncation_Parameter")
self.verticalLayout_49.addWidget(self.spinBox_Truncation_Parameter)
self.gridLayout_31.addWidget(self.widget_STM_5, 1, 3, 1, 1)
self.widget_STM_1 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_STM_1.setObjectName("widget_STM_1")
self.verticalLayout_51 = QtWidgets.QVBoxLayout(self.widget_STM_1)
self.verticalLayout_51.setContentsMargins(20, 0, 0, 0)
self.verticalLayout_51.setSpacing(0)
self.verticalLayout_51.setObjectName("verticalLayout_51")
self.label_Truncation_Parameter = QtWidgets.QLabel(self.widget_STM_1)
font = QtGui.QFont()
self.label_Truncation_Parameter.setFont(font)
self.label_Truncation_Parameter.setObjectName("label_Truncation_Parameter")
self.verticalLayout_51.addWidget(self.label_Truncation_Parameter)
self.gridLayout_31.addWidget(self.widget_STM_1, 0, 1, 1, 1)
self.widget_STM_3 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_STM_3.setObjectName("widget_STM_3")
self.verticalLayout_53 = QtWidgets.QVBoxLayout(self.widget_STM_3)
self.verticalLayout_53.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_53.setSpacing(0)
self.verticalLayout_53.setObjectName("verticalLayout_53")
self.pushButton_Truncation_Parameter = QtWidgets.QPushButton(self.widget_STM_3)
self.pushButton_Truncation_Parameter.setText("")
self.pushButton_Truncation_Parameter.setIcon(icon9)
self.pushButton_Truncation_Parameter.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Truncation_Parameter.setFlat(True)
self.pushButton_Truncation_Parameter.setObjectName("pushButton_Truncation_Parameter")
self.verticalLayout_53.addWidget(self.pushButton_Truncation_Parameter)
self.gridLayout_31.addWidget(self.widget_STM_3, 0, 5, 1, 1)
self.widget_STM_4 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_STM_4.setObjectName("widget_STM_4")
self.verticalLayout_50 = QtWidgets.QVBoxLayout(self.widget_STM_4)
self.verticalLayout_50.setContentsMargins(20, 0, 0, 0)
self.verticalLayout_50.setSpacing(0)
self.verticalLayout_50.setObjectName("verticalLayout_50")
self.label_17 = QtWidgets.QLabel(self.widget_STM_4)
self.label_17.setObjectName("label_17")
self.verticalLayout_50.addWidget(self.label_17)
self.gridLayout_31.addWidget(self.widget_STM_4, 1, 1, 1, 1)
self.widget_STM_2 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_STM_2.setObjectName("widget_STM_2")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.widget_STM_2)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setSpacing(0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.lineEdit_Truncation_Parameter = QtWidgets.QLineEdit(self.widget_STM_2)
font = QtGui.QFont()
self.lineEdit_Truncation_Parameter.setFont(font)
self.lineEdit_Truncation_Parameter.setObjectName("lineEdit_Truncation_Parameter")
self.horizontalLayout_4.addWidget(self.lineEdit_Truncation_Parameter)
self.label_21 = QtWidgets.QLabel(self.widget_STM_2)
font = QtGui.QFont()
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.horizontalLayout_4.addWidget(self.label_21)
self.gridLayout_31.addWidget(self.widget_STM_2, 0, 3, 1, 1)
self.widget_Truncate_Already_Obtained_Full_Solution_2 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_Truncate_Already_Obtained_Full_Solution_2.setObjectName("widget_Truncate_Already_Obtained_Full_Solution_2")
self.gridLayout_20 = QtWidgets.QGridLayout(self.widget_Truncate_Already_Obtained_Full_Solution_2)
self.gridLayout_20.setContentsMargins(0, 0, 0, 0)
self.gridLayout_20.setSpacing(0)
self.gridLayout_20.setObjectName("gridLayout_20")
self.checkBox_Truncate_The_Obtained_Full_Solution = QtWidgets.QCheckBox(self.widget_Truncate_Already_Obtained_Full_Solution_2)
font = QtGui.QFont()
self.checkBox_Truncate_The_Obtained_Full_Solution.setFont(font)
self.checkBox_Truncate_The_Obtained_Full_Solution.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkBox_Truncate_The_Obtained_Full_Solution.setText("")
self.checkBox_Truncate_The_Obtained_Full_Solution.setObjectName("checkBox_Truncate_The_Obtained_Full_Solution")
self.gridLayout_20.addWidget(self.checkBox_Truncate_The_Obtained_Full_Solution, 0, 0, 1, 1)
self.gridLayout_31.addWidget(self.widget_Truncate_Already_Obtained_Full_Solution_2, 2, 3, 1, 1)
self.widget_Truncate_Already_Obtained_Full_Solution_3 = QtWidgets.QWidget(self.widget_Spectrum_Truncation_Method)
self.widget_Truncate_Already_Obtained_Full_Solution_3.setObjectName("widget_Truncate_Already_Obtained_Full_Solution_3")
self.gridLayout_23 = QtWidgets.QGridLayout(self.widget_Truncate_Already_Obtained_Full_Solution_3)
self.gridLayout_23.setContentsMargins(0, 0, 0, 0)
self.gridLayout_23.setSpacing(0)
self.gridLayout_23.setObjectName("gridLayout_23")
self.pushButton_Truncate_The_Obtained_Full_Solution = QtWidgets.QPushButton(self.widget_Truncate_Already_Obtained_Full_Solution_3)
self.pushButton_Truncate_The_Obtained_Full_Solution.setText("")
self.pushButton_Truncate_The_Obtained_Full_Solution.setIcon(icon9)
self.pushButton_Truncate_The_Obtained_Full_Solution.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Truncate_The_Obtained_Full_Solution.setFlat(True)
self.pushButton_Truncate_The_Obtained_Full_Solution.setObjectName("pushButton_Truncate_The_Obtained_Full_Solution")
self.gridLayout_23.addWidget(self.pushButton_Truncate_The_Obtained_Full_Solution, 0, 0, 1, 1)
self.gridLayout_31.addWidget(self.widget_Truncate_Already_Obtained_Full_Solution_3, 2, 5, 1, 1)
self.gridLayout_30.addWidget(self.widget_Spectrum_Truncation_Method, 4, 0, 1, 1)
self.gridLayout_33 = QtWidgets.QGridLayout()
self.gridLayout_33.setObjectName("gridLayout_33")
self.label_6 = QtWidgets.QLabel(self.groupBoxXXX)
font = QtGui.QFont()
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.gridLayout_33.addWidget(self.label_6, 2, 0, 1, 1)
self.checkBox_external_grounding = QtWidgets.QCheckBox(self.groupBoxXXX)
#self.checkBox_Show_Paraview_Screenshots = QtWidgets.QCheckBox(self.groupBoxXXX)
font = QtGui.QFont()
self.checkBox_external_grounding.setFont(font)
self.checkBox_external_grounding.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkBox_external_grounding.setText("")
self.checkBox_external_grounding.setObjectName("checkBox_external_grounding")
self.gridLayout_33.addWidget(self.checkBox_external_grounding, 2, 1, 1, 1)
self.pushButton_external_grounding = QtWidgets.QPushButton(self.groupBoxXXX)
self.pushButton_external_grounding.setText("")
self.pushButton_external_grounding.setIcon(icon9)
self.pushButton_external_grounding.setIconSize(QtCore.QSize(24, 24))
self.pushButton_external_grounding.setFlat(True)
self.pushButton_external_grounding.setObjectName("pushButton_external_grounding")
self.gridLayout_33.addWidget(self.pushButton_external_grounding, 2, 3, 1, 1)
#self.checkBox_external_grounding = QtWidgets.QCheckBox(self.groupBoxXXX)
spacerItem14 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_33.addItem(spacerItem14, 2, 2, 1, 1)
self.gridLayout_30.addLayout(self.gridLayout_33, 5, 0, 1, 1)
self.gridLayout_26 = QtWidgets.QGridLayout()
self.gridLayout_26.setObjectName("gridLayout_26")
self.pushButton_Full_Field_IFFT = QtWidgets.QPushButton(self.groupBoxXXX)
font = QtGui.QFont()
self.pushButton_Full_Field_IFFT.setFont(font)
self.pushButton_Full_Field_IFFT.setText("")
self.pushButton_Full_Field_IFFT.setIcon(icon9)
self.pushButton_Full_Field_IFFT.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Full_Field_IFFT.setFlat(True)
self.pushButton_Full_Field_IFFT.setObjectName("pushButton_Full_Field_IFFT")
self.gridLayout_26.addWidget(self.pushButton_Full_Field_IFFT, 5, 3, 1, 1)
self.spinBox_Number_Of_Processors = QtWidgets.QSpinBox(self.groupBoxXXX)
font = QtGui.QFont()
self.spinBox_Number_Of_Processors.setFont(font)
self.spinBox_Number_Of_Processors.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.spinBox_Number_Of_Processors.setMinimum(0)
self.spinBox_Number_Of_Processors.setMaximum(1000000000)
self.spinBox_Number_Of_Processors.setObjectName("spinBox_Number_Of_Processors")
self.gridLayout_26.addWidget(self.spinBox_Number_Of_Processors, 2, 1, 1, 1)
self.pushButton_El_Order_2 = QtWidgets.QPushButton(self.groupBoxXXX)
self.pushButton_El_Order_2.setText("")
self.pushButton_El_Order_2.setIcon(icon9)
self.pushButton_El_Order_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_El_Order_2.setFlat(True)
self.pushButton_El_Order_2.setObjectName("pushButton_El_Order_2")
self.gridLayout_26.addWidget(self.pushButton_El_Order_2, 1, 3, 1, 1)
self.pushButton_Y_Length_2 = QtWidgets.QPushButton(self.groupBoxXXX)
self.pushButton_Y_Length_2.setText("")
self.pushButton_Y_Length_2.setIcon(icon9)
self.pushButton_Y_Length_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Y_Length_2.setFlat(True)
self.pushButton_Y_Length_2.setObjectName("pushButton_Y_Length_2")
self.gridLayout_26.addWidget(self.pushButton_Y_Length_2, 3, 3, 1, 1)
self.label_Number_Of_Processors = QtWidgets.QLabel(self.groupBoxXXX)
font = QtGui.QFont()
self.label_Number_Of_Processors.setFont(font)
self.label_Number_Of_Processors.setObjectName("label_Number_Of_Processors")
self.gridLayout_26.addWidget(self.label_Number_Of_Processors, 2, 0, 1, 1)
self.label_Skip_Mesh_Refinement = QtWidgets.QLabel(self.groupBoxXXX)
font = QtGui.QFont()
self.label_Skip_Mesh_Refinement.setFont(font)
self.label_Skip_Mesh_Refinement.setObjectName("label_Skip_Mesh_Refinement")
self.gridLayout_26.addWidget(self.label_Skip_Mesh_Refinement, 4, 0, 1, 1)
self.checkBox_Skip_Mesh_Refinement = QtWidgets.QCheckBox(self.groupBoxXXX)
font = QtGui.QFont()
self.checkBox_Skip_Mesh_Refinement.setFont(font)
self.checkBox_Skip_Mesh_Refinement.setText("")
self.checkBox_Skip_Mesh_Refinement.setObjectName("checkBox_Skip_Mesh_Refinement")
self.gridLayout_26.addWidget(self.checkBox_Skip_Mesh_Refinement, 4, 1, 1, 1)
self.checkBox_FEniCS_MPI = QtWidgets.QCheckBox(self.groupBoxXXX)
self.checkBox_FEniCS_MPI.setText("")
self.checkBox_FEniCS_MPI.setObjectName("checkBox_FEniCS_MPI")
self.gridLayout_26.addWidget(self.checkBox_FEniCS_MPI, 3, 1, 1, 1)
self.label_20 = QtWidgets.QLabel(self.groupBoxXXX)
self.label_20.setObjectName("label_20")
self.gridLayout_26.addWidget(self.label_20, 3, 0, 1, 1)
self.pushButton_Number_Of_Processors_2 = QtWidgets.QPushButton(self.groupBoxXXX)
self.pushButton_Number_Of_Processors_2.setText("")
self.pushButton_Number_Of_Processors_2.setIcon(icon9)
self.pushButton_Number_Of_Processors_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Number_Of_Processors_2.setFlat(True)
self.pushButton_Number_Of_Processors_2.setObjectName("pushButton_Number_Of_Processors_2")
self.gridLayout_26.addWidget(self.pushButton_Number_Of_Processors_2, 2, 3, 1, 1)
self.pushButton_Skip_Mesh_Refinement_2 = QtWidgets.QPushButton(self.groupBoxXXX)
self.pushButton_Skip_Mesh_Refinement_2.setText("")
self.pushButton_Skip_Mesh_Refinement_2.setIcon(icon9)
self.pushButton_Skip_Mesh_Refinement_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Skip_Mesh_Refinement_2.setFlat(True)
self.pushButton_Skip_Mesh_Refinement_2.setObjectName("pushButton_Skip_Mesh_Refinement_2")
self.gridLayout_26.addWidget(self.pushButton_Skip_Mesh_Refinement_2, 4, 3, 1, 1)
self.spinBox_El_Order = QtWidgets.QSpinBox(self.groupBoxXXX)
font = QtGui.QFont()
self.spinBox_El_Order.setFont(font)
self.spinBox_El_Order.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.spinBox_El_Order.setMaximum(1000000000)
self.spinBox_El_Order.setProperty("value", 2)
self.spinBox_El_Order.setObjectName("spinBox_El_Order")
self.gridLayout_26.addWidget(self.spinBox_El_Order, 1, 1, 1, 1)
self.checkBox_Full_Field_IFFT = QtWidgets.QCheckBox(self.groupBoxXXX)
font = QtGui.QFont()
self.checkBox_Full_Field_IFFT.setFont(font)
self.checkBox_Full_Field_IFFT.setText("")
self.checkBox_Full_Field_IFFT.setObjectName("checkBox_Full_Field_IFFT")
self.gridLayout_26.addWidget(self.checkBox_Full_Field_IFFT, 5, 1, 1, 1)
self.label_Laplace_Transformation = QtWidgets.QLabel(self.groupBoxXXX)
font = QtGui.QFont()
self.label_Laplace_Transformation.setFont(font)
self.label_Laplace_Transformation.setObjectName("label_Laplace_Transformation")
self.gridLayout_26.addWidget(self.label_Laplace_Transformation, 0, 0, 1, 1)
self.label_Full_Field_IFFT = QtWidgets.QLabel(self.groupBoxXXX)
font = QtGui.QFont()
self.label_Full_Field_IFFT.setFont(font)
self.label_Full_Field_IFFT.setObjectName("label_Full_Field_IFFT")
self.gridLayout_26.addWidget(self.label_Full_Field_IFFT, 5, 0, 1, 1)
self.label_El_Order = QtWidgets.QLabel(self.groupBoxXXX)
font = QtGui.QFont()
self.label_El_Order.setFont(font)
self.label_El_Order.setObjectName("label_El_Order")
self.gridLayout_26.addWidget(self.label_El_Order, 1, 0, 1, 1)
self.comboBox_Laplace_Formulation = QtWidgets.QComboBox(self.groupBoxXXX)
font = QtGui.QFont()
self.comboBox_Laplace_Formulation.setFont(font)
self.comboBox_Laplace_Formulation.setStyleSheet("")
self.comboBox_Laplace_Formulation.setObjectName("comboBox_Laplace_Formulation")
self.comboBox_Laplace_Formulation.addItem("")
self.comboBox_Laplace_Formulation.addItem("")
self.gridLayout_26.addWidget(self.comboBox_Laplace_Formulation, 0, 1, 1, 1)
self.pushButton_Laplace_Formulation_2 = QtWidgets.QPushButton(self.groupBoxXXX)
self.pushButton_Laplace_Formulation_2.setText("")
self.pushButton_Laplace_Formulation_2.setIcon(icon9)
self.pushButton_Laplace_Formulation_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Laplace_Formulation_2.setFlat(True)
self.pushButton_Laplace_Formulation_2.setObjectName("pushButton_Laplace_Formulation_2")
self.gridLayout_26.addWidget(self.pushButton_Laplace_Formulation_2, 0, 3, 1, 1)
spacerItem15 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_26.addItem(spacerItem15, 2, 4, 1, 1)
self.gridLayout_30.addLayout(self.gridLayout_26, 0, 0, 1, 1)
self.gridLayout_27 = QtWidgets.QGridLayout()
self.gridLayout_27.setObjectName("gridLayout_27")
self.pushButton_Sprectrum_Truncation_Method = QtWidgets.QPushButton(self.groupBoxXXX)
self.pushButton_Sprectrum_Truncation_Method.setText("")
self.pushButton_Sprectrum_Truncation_Method.setIcon(icon9)
self.pushButton_Sprectrum_Truncation_Method.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Sprectrum_Truncation_Method.setFlat(True)
self.pushButton_Sprectrum_Truncation_Method.setObjectName("pushButton_Sprectrum_Truncation_Method")
self.gridLayout_27.addWidget(self.pushButton_Sprectrum_Truncation_Method, 0, 3, 1, 1)
self.comboBox_Spectrum_Truncation_Method = QtWidgets.QComboBox(self.groupBoxXXX)
font = QtGui.QFont()
self.comboBox_Spectrum_Truncation_Method.setFont(font)
self.comboBox_Spectrum_Truncation_Method.setObjectName("comboBox_Spectrum_Truncation_Method")
self.comboBox_Spectrum_Truncation_Method.addItem("")
self.comboBox_Spectrum_Truncation_Method.addItem("")
self.comboBox_Spectrum_Truncation_Method.addItem("")
self.comboBox_Spectrum_Truncation_Method.addItem("")
self.gridLayout_27.addWidget(self.comboBox_Spectrum_Truncation_Method, 0, 1, 1, 1)
self.label_Sprectrum_Truncation_Method = QtWidgets.QLabel(self.groupBoxXXX)
font = QtGui.QFont()
self.label_Sprectrum_Truncation_Method.setFont(font)
self.label_Sprectrum_Truncation_Method.setObjectName("label_Sprectrum_Truncation_Method")
self.gridLayout_27.addWidget(self.label_Sprectrum_Truncation_Method, 0, 0, 1, 1)
spacerItem16 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_27.addItem(spacerItem16, 0, 2, 1, 1)
self.gridLayout_30.addLayout(self.gridLayout_27, 3, 0, 1, 1)
self.line = QtWidgets.QFrame(self.groupBoxXXX)
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.gridLayout_30.addWidget(self.line, 2, 0, 1, 1)
self.gridLayout_40.addWidget(self.groupBoxXXX, 0, 0, 1, 1)
spacerItem17 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_40.addItem(spacerItem17, 1, 0, 1, 1)
self.toolBox.addItem(self.page_8, "")
self.gridLayout_28.addWidget(self.toolBox, 0, 0, 1, 1)
self.stackedWidget.addWidget(self.page)
self.page_9 = QtWidgets.QWidget()
self.page_9.setObjectName("page_9")
self.gridLayout_51 = QtWidgets.QGridLayout(self.page_9)
self.gridLayout_51.setObjectName("gridLayout_51")
self.scrollArea = QtWidgets.QScrollArea(self.page_9)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents_4 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_4.setGeometry(QtCore.QRect(0, 0, 958, 1065))
self.scrollAreaWidgetContents_4.setObjectName("scrollAreaWidgetContents_4")
self.gridLayout_52 = QtWidgets.QGridLayout(self.scrollAreaWidgetContents_4)
self.gridLayout_52.setObjectName("gridLayout_52")
self.groupBox_5 = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_4)
self.groupBox_5.setTitle("")
self.groupBox_5.setObjectName("groupBox_5")
self.gridLayout_45 = QtWidgets.QGridLayout(self.groupBox_5)
self.gridLayout_45.setObjectName("gridLayout_45")
self.gridLayout_Images = QtWidgets.QGridLayout()
self.gridLayout_Images.setObjectName("gridLayout_Images")
self.label_Image_Placed_Neurons = QtWidgets.QLabel(self.groupBox_5)
self.label_Image_Placed_Neurons.setMaximumSize(QtCore.QSize(16777215, 640))
self.label_Image_Placed_Neurons.setText("")
self.label_Image_Placed_Neurons.setPixmap(QtGui.QPixmap(":/icons/icons/image_placeholder.png"))
self.label_Image_Placed_Neurons.setAlignment(QtCore.Qt.AlignCenter)
self.label_Image_Placed_Neurons.setObjectName("label_Image_Placed_Neurons")
self.gridLayout_Images.addWidget(self.label_Image_Placed_Neurons, 0, 0, 1, 1)
self.pushButton_Placed_Neurons = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_Placed_Neurons.setObjectName("pushButton_Placed_Neurons")
self.gridLayout_Images.addWidget(self.pushButton_Placed_Neurons, 1, 0, 1, 1)
self.gridLayout_45.addLayout(self.gridLayout_Images, 1, 0, 1, 1)
self.gridLayout_49 = QtWidgets.QGridLayout()
self.gridLayout_49.setObjectName("gridLayout_49")
self.label_Image_Signal_Recovered = QtWidgets.QLabel(self.groupBox_5)
self.label_Image_Signal_Recovered.setMaximumSize(QtCore.QSize(16777215, 640))
self.label_Image_Signal_Recovered.setText("")
self.label_Image_Signal_Recovered.setPixmap(QtGui.QPixmap(":/icons/icons/image_placeholder.png"))
self.label_Image_Signal_Recovered.setAlignment(QtCore.Qt.AlignCenter)
self.label_Image_Signal_Recovered.setObjectName("label_Image_Signal_Recovered")
self.gridLayout_49.addWidget(self.label_Image_Signal_Recovered, 0, 0, 1, 1)
self.pushButton_Signal_Recovered = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_Signal_Recovered.setObjectName("pushButton_Signal_Recovered")
self.gridLayout_49.addWidget(self.pushButton_Signal_Recovered, 1, 0, 1, 1)
self.gridLayout_45.addLayout(self.gridLayout_49, 2, 0, 1, 1)
self.gridLayout_46 = QtWidgets.QGridLayout()
self.gridLayout_46.setObjectName("gridLayout_46")
self.label_Image_Adapted_Mesh = QtWidgets.QLabel(self.groupBox_5)
self.label_Image_Adapted_Mesh.setMaximumSize(QtCore.QSize(16777215, 640))
self.label_Image_Adapted_Mesh.setText("")
self.label_Image_Adapted_Mesh.setPixmap(QtGui.QPixmap(":/icons/icons/image_placeholder.png"))
self.label_Image_Adapted_Mesh.setAlignment(QtCore.Qt.AlignCenter)
self.label_Image_Adapted_Mesh.setObjectName("label_Image_Adapted_Mesh")
self.gridLayout_46.addWidget(self.label_Image_Adapted_Mesh, 0, 0, 1, 1)
self.pushButton_Adapted_Mesh = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_Adapted_Mesh.setObjectName("pushButton_Adapted_Mesh")
self.gridLayout_46.addWidget(self.pushButton_Adapted_Mesh, 1, 0, 1, 1)
self.gridLayout_45.addLayout(self.gridLayout_46, 4, 0, 1, 1)
self.gridLayout_48 = QtWidgets.QGridLayout()
self.gridLayout_48.setObjectName("gridLayout_48")
self.label_Image_CSF_Full_Refinement = QtWidgets.QLabel(self.groupBox_5)
self.label_Image_CSF_Full_Refinement.setMaximumSize(QtCore.QSize(16777215, 640))
self.label_Image_CSF_Full_Refinement.setText("")
self.label_Image_CSF_Full_Refinement.setPixmap(QtGui.QPixmap(":/icons/icons/image_placeholder.png"))
self.label_Image_CSF_Full_Refinement.setAlignment(QtCore.Qt.AlignCenter)
self.label_Image_CSF_Full_Refinement.setObjectName("label_Image_CSF_Full_Refinement")
self.gridLayout_48.addWidget(self.label_Image_CSF_Full_Refinement, 0, 0, 1, 1)
self.pushButton_CSF_Full_Refinement = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_CSF_Full_Refinement.setObjectName("pushButton_CSF_Full_Refinement")
self.gridLayout_48.addWidget(self.pushButton_CSF_Full_Refinement, 1, 0, 1, 1)
self.gridLayout_45.addLayout(self.gridLayout_48, 3, 0, 1, 1)
self.gridLayout_47 = QtWidgets.QGridLayout()
self.gridLayout_47.setObjectName("gridLayout_47")
self.label_Image_Signal_Convoluted_1st_Point = QtWidgets.QLabel(self.groupBox_5)
self.label_Image_Signal_Convoluted_1st_Point.setMaximumSize(QtCore.QSize(16777215, 640))
self.label_Image_Signal_Convoluted_1st_Point.setText("")
self.label_Image_Signal_Convoluted_1st_Point.setPixmap(QtGui.QPixmap(":/icons/icons/image_placeholder.png"))
self.label_Image_Signal_Convoluted_1st_Point.setAlignment(QtCore.Qt.AlignCenter)
self.label_Image_Signal_Convoluted_1st_Point.setObjectName("label_Image_Signal_Convoluted_1st_Point")
self.gridLayout_47.addWidget(self.label_Image_Signal_Convoluted_1st_Point, 0, 0, 1, 1)
self.pushButton_Signal_Convoluted_1st_Point = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_Signal_Convoluted_1st_Point.setObjectName("pushButton_Signal_Convoluted_1st_Point")
self.gridLayout_47.addWidget(self.pushButton_Signal_Convoluted_1st_Point, 1, 0, 1, 1)
self.gridLayout_45.addLayout(self.gridLayout_47, 6, 0, 1, 1)
self.gridLayout_50 = QtWidgets.QGridLayout()
self.gridLayout_50.setObjectName("gridLayout_50")
self.label_Image_Axon_Activation = QtWidgets.QLabel(self.groupBox_5)
self.label_Image_Axon_Activation.setMaximumSize(QtCore.QSize(16777215, 640))
self.label_Image_Axon_Activation.setText("")
self.label_Image_Axon_Activation.setPixmap(QtGui.QPixmap(":/icons/icons/image_placeholder.png"))
self.label_Image_Axon_Activation.setAlignment(QtCore.Qt.AlignCenter)
self.label_Image_Axon_Activation.setObjectName("label_Image_Axon_Activation")
self.gridLayout_50.addWidget(self.label_Image_Axon_Activation, 0, 0, 1, 1)
self.pushButton_Axon_Activation = QtWidgets.QPushButton(self.groupBox_5)
self.pushButton_Axon_Activation.setObjectName("pushButton_Axon_Activation")
self.gridLayout_50.addWidget(self.pushButton_Axon_Activation, 1, 0, 1, 1)
self.gridLayout_45.addLayout(self.gridLayout_50, 7, 0, 1, 1)
self.gridLayout_52.addWidget(self.groupBox_5, 0, 0, 1, 1)
self.scrollArea.setWidget(self.scrollAreaWidgetContents_4)
self.gridLayout_51.addWidget(self.scrollArea, 0, 0, 1, 1)
self.stackedWidget.addWidget(self.page_9)
self.Simulation_Parameters = QtWidgets.QWidget()
self.Simulation_Parameters.setObjectName("Simulation_Parameters")
self.gridLayout_6 = QtWidgets.QGridLayout(self.Simulation_Parameters)
self.gridLayout_6.setObjectName("gridLayout_6")
self.scrollArea_2 = QtWidgets.QScrollArea(self.Simulation_Parameters)
font = QtGui.QFont()
self.scrollArea_2.setFont(font)
self.scrollArea_2.setStyleSheet("")
self.scrollArea_2.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.scrollArea_2.setLineWidth(3)
self.scrollArea_2.setWidgetResizable(True)
self.scrollArea_2.setObjectName("scrollArea_2")
self.scrollAreaWidgetContents_3 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_3.setGeometry(QtCore.QRect(0, 0, 974, 516))
self.scrollAreaWidgetContents_3.setObjectName("scrollAreaWidgetContents_3")
self.gridLayout_10 = QtWidgets.QGridLayout(self.scrollAreaWidgetContents_3)
self.gridLayout_10.setObjectName("gridLayout_10")
self.widget_CPE_Active = QtWidgets.QWidget(self.scrollAreaWidgetContents_3)
self.widget_CPE_Active.setObjectName("widget_CPE_Active")
self.gridLayout_5 = QtWidgets.QGridLayout(self.widget_CPE_Active)
self.gridLayout_5.setContentsMargins(0, 0, 0, 0)
self.gridLayout_5.setObjectName("gridLayout_5")
self.gridLayout_10.addWidget(self.widget_CPE_Active, 0, 0, 1, 1)
self.widget_18 = QtWidgets.QWidget(self.scrollAreaWidgetContents_3)
self.widget_18.setObjectName("widget_18")
self.gridLayout_13 = QtWidgets.QGridLayout(self.widget_18)
self.gridLayout_13.setContentsMargins(0, 0, 0, 0)
self.gridLayout_13.setObjectName("gridLayout_13")
self.verticalLayout_41 = QtWidgets.QVBoxLayout()
self.verticalLayout_41.setObjectName("verticalLayout_41")
self.horizontalLayout_192 = QtWidgets.QHBoxLayout()
self.horizontalLayout_192.setSpacing(6)
self.horizontalLayout_192.setObjectName("horizontalLayout_192")
self.label_10 = QtWidgets.QLabel(self.widget_18)
self.label_10.setObjectName("label_10")
self.horizontalLayout_192.addWidget(self.label_10)
self.verticalLayout_41.addLayout(self.horizontalLayout_192)
self.horizontalLayout_193 = QtWidgets.QHBoxLayout()
self.horizontalLayout_193.setObjectName("horizontalLayout_193")
self.label_Phi_Vector = QtWidgets.QLabel(self.widget_18)
font = QtGui.QFont()
self.label_Phi_Vector.setFont(font)
self.label_Phi_Vector.setObjectName("label_Phi_Vector")
self.horizontalLayout_193.addWidget(self.label_Phi_Vector)
self.verticalLayout_41.addLayout(self.horizontalLayout_193)
self.horizontalLayout_11 = QtWidgets.QHBoxLayout()
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.label_5 = QtWidgets.QLabel(self.widget_18)
self.label_5.setObjectName("label_5")
self.horizontalLayout_11.addWidget(self.label_5)
self.verticalLayout_41.addLayout(self.horizontalLayout_11)
self.gridLayout_13.addLayout(self.verticalLayout_41, 0, 0, 1, 1)
self.verticalLayout_42 = QtWidgets.QVBoxLayout()
self.verticalLayout_42.setObjectName("verticalLayout_42")
self.horizontalLayout_200 = QtWidgets.QHBoxLayout()
self.horizontalLayout_200.setObjectName("horizontalLayout_200")
self.checkBox_Current_Control = QtWidgets.QCheckBox(self.widget_18)
font = QtGui.QFont()
self.checkBox_Current_Control.setFont(font)
self.checkBox_Current_Control.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkBox_Current_Control.setText("")
self.checkBox_Current_Control.setObjectName("checkBox_Current_Control")
self.horizontalLayout_200.addWidget(self.checkBox_Current_Control)
self.verticalLayout_42.addLayout(self.horizontalLayout_200)
self.horizontalLayout_201 = QtWidgets.QHBoxLayout()
self.horizontalLayout_201.setObjectName("horizontalLayout_201")
self.lineEdit_Phi_Vector = QtWidgets.QLineEdit(self.widget_18)
font = QtGui.QFont()
self.lineEdit_Phi_Vector.setFont(font)
self.lineEdit_Phi_Vector.setObjectName("lineEdit_Phi_Vector")
self.horizontalLayout_201.addWidget(self.lineEdit_Phi_Vector)
self.verticalLayout_42.addLayout(self.horizontalLayout_201)
self.horizontalLayout_59 = QtWidgets.QHBoxLayout()
self.horizontalLayout_59.setObjectName("horizontalLayout_59")
self.comboBox_Solver_Type = QtWidgets.QComboBox(self.widget_18)
self.comboBox_Solver_Type.setObjectName("comboBox_Solver_Type")
self.comboBox_Solver_Type.addItem("")
self.comboBox_Solver_Type.addItem("")
self.comboBox_Solver_Type.addItem("")
self.comboBox_Solver_Type.addItem("")
self.horizontalLayout_59.addWidget(self.comboBox_Solver_Type)
self.verticalLayout_42.addLayout(self.horizontalLayout_59)
self.gridLayout_13.addLayout(self.verticalLayout_42, 0, 1, 1, 1)
self.verticalLayout_43 = QtWidgets.QVBoxLayout()
self.verticalLayout_43.setObjectName("verticalLayout_43")
self.horizontalLayout_208 = QtWidgets.QHBoxLayout()
self.horizontalLayout_208.setObjectName("horizontalLayout_208")
self.pushButton_Current_Control = QtWidgets.QPushButton(self.widget_18)
self.pushButton_Current_Control.setText("")
self.pushButton_Current_Control.setIcon(icon9)
self.pushButton_Current_Control.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Current_Control.setFlat(True)
self.pushButton_Current_Control.setObjectName("pushButton_Current_Control")
self.horizontalLayout_208.addWidget(self.pushButton_Current_Control)
self.verticalLayout_43.addLayout(self.horizontalLayout_208)
self.horizontalLayout_209 = QtWidgets.QHBoxLayout()
self.horizontalLayout_209.setObjectName("horizontalLayout_209")
self.pushButton_Phi_Vector = QtWidgets.QPushButton(self.widget_18)
self.pushButton_Phi_Vector.setText("")
self.pushButton_Phi_Vector.setIcon(icon9)
self.pushButton_Phi_Vector.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Phi_Vector.setFlat(True)
self.pushButton_Phi_Vector.setObjectName("pushButton_Phi_Vector")
self.horizontalLayout_209.addWidget(self.pushButton_Phi_Vector)
self.verticalLayout_43.addLayout(self.horizontalLayout_209)
self.verticalLayout_48 = QtWidgets.QVBoxLayout()
self.verticalLayout_48.setContentsMargins(0, 0, -1, 0)
self.verticalLayout_48.setSpacing(0)
self.verticalLayout_48.setObjectName("verticalLayout_48")
self.pushButton_Solver_Type = QtWidgets.QPushButton(self.widget_18)
self.pushButton_Solver_Type.setText("")
self.pushButton_Solver_Type.setIcon(icon9)
self.pushButton_Solver_Type.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Solver_Type.setFlat(True)
self.pushButton_Solver_Type.setObjectName("pushButton_Solver_Type")
self.verticalLayout_48.addWidget(self.pushButton_Solver_Type)
self.verticalLayout_43.addLayout(self.verticalLayout_48)
self.gridLayout_13.addLayout(self.verticalLayout_43, 0, 2, 1, 1)
self.gridLayout_10.addWidget(self.widget_18, 1, 1, 1, 1)
spacerItem18 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_10.addItem(spacerItem18, 1, 2, 1, 1)
spacerItem19 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_10.addItem(spacerItem19, 2, 1, 1, 1)
self.scrollArea_2.setWidget(self.scrollAreaWidgetContents_3)
self.gridLayout_6.addWidget(self.scrollArea_2, 2, 0, 1, 1)
self.toolBox_2 = QtWidgets.QToolBox(self.Simulation_Parameters)
self.toolBox_2.setObjectName("toolBox_2")
self.page_2 = QtWidgets.QWidget()
self.page_2.setGeometry(QtCore.QRect(0, 0, 974, 452))
self.page_2.setObjectName("page_2")
self.gridLayout_7 = QtWidgets.QGridLayout(self.page_2)
self.gridLayout_7.setObjectName("gridLayout_7")
spacerItem20 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_7.addItem(spacerItem20, 0, 1, 1, 1)
self.groupBox_2 = QtWidgets.QGroupBox(self.page_2)
self.groupBox_2.setTitle("")
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayout_8 = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout_8.setObjectName("gridLayout_8")
self.gridLayout_34 = QtWidgets.QGridLayout()
self.gridLayout_34.setObjectName("gridLayout_34")
self.pushButton_MRI_Data_Name = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_MRI_Data_Name.setText("")
self.pushButton_MRI_Data_Name.setIcon(icon9)
self.pushButton_MRI_Data_Name.setIconSize(QtCore.QSize(24, 24))
self.pushButton_MRI_Data_Name.setFlat(True)
self.pushButton_MRI_Data_Name.setObjectName("pushButton_MRI_Data_Name")
self.gridLayout_34.addWidget(self.pushButton_MRI_Data_Name, 0, 2, 1, 1)
self.label_CSF_Index = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_CSF_Index.setFont(font)
self.label_CSF_Index.setObjectName("label_CSF_Index")
self.gridLayout_34.addWidget(self.label_CSF_Index, 4, 0, 1, 1)
self.label_WM_Index = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_WM_Index.setFont(font)
self.label_WM_Index.setObjectName("label_WM_Index")
self.gridLayout_34.addWidget(self.label_WM_Index, 5, 0, 1, 1)
self.label_DTI_Data_Name = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_DTI_Data_Name.setFont(font)
self.label_DTI_Data_Name.setObjectName("label_DTI_Data_Name")
self.gridLayout_34.addWidget(self.label_DTI_Data_Name, 2, 0, 1, 1)
self.label_MRI_Data_Name = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_MRI_Data_Name.setFont(font)
self.label_MRI_Data_Name.setObjectName("label_MRI_Data_Name")
self.gridLayout_34.addWidget(self.label_MRI_Data_Name, 0, 0, 1, 1)
self.label_GM_Index = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_GM_Index.setFont(font)
self.label_GM_Index.setObjectName("label_GM_Index")
self.gridLayout_34.addWidget(self.label_GM_Index, 6, 0, 1, 1)
self.label_MRI_Data_Name_2 = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_MRI_Data_Name_2.setFont(font)
self.label_MRI_Data_Name_2.setObjectName("label_MRI_Data_Name_2")
self.gridLayout_34.addWidget(self.label_MRI_Data_Name_2, 1, 0, 1, 1)
self.label_MRI_Data_Name_3 = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_MRI_Data_Name_3.setFont(font)
self.label_MRI_Data_Name_3.setObjectName("label_MRI_Data_Name_3")
self.gridLayout_34.addWidget(self.label_MRI_Data_Name_3, 3, 0, 1, 1)
self.lineEdit_MRI_Data_Name = QtWidgets.QLineEdit(self.groupBox_2)
font = QtGui.QFont()
self.lineEdit_MRI_Data_Name.setFont(font)
self.lineEdit_MRI_Data_Name.setInputMask("")
self.lineEdit_MRI_Data_Name.setObjectName("lineEdit_MRI_Data_Name")
self.gridLayout_34.addWidget(self.lineEdit_MRI_Data_Name, 0, 1, 1, 1)
self.label_Default_Material = QtWidgets.QLabel(self.groupBox_2)
font = QtGui.QFont()
self.label_Default_Material.setFont(font)
self.label_Default_Material.setObjectName("label_Default_Material")
self.gridLayout_34.addWidget(self.label_Default_Material, 7, 0, 1, 1)
self.checkBox_MRI_m = QtWidgets.QCheckBox(self.groupBox_2)
font = QtGui.QFont()
self.checkBox_MRI_m.setFont(font)
self.checkBox_MRI_m.setToolTip("")
self.checkBox_MRI_m.setText("")
self.checkBox_MRI_m.setObjectName("checkBox_MRI_m")
self.gridLayout_34.addWidget(self.checkBox_MRI_m, 1, 1, 1, 1)
self.lineEdit_DTI_Data_Name = QtWidgets.QLineEdit(self.groupBox_2)
font = QtGui.QFont()
self.lineEdit_DTI_Data_Name.setFont(font)
self.lineEdit_DTI_Data_Name.setObjectName("lineEdit_DTI_Data_Name")
self.gridLayout_34.addWidget(self.lineEdit_DTI_Data_Name, 2, 1, 1, 1)
self.checkBox_DTI_m = QtWidgets.QCheckBox(self.groupBox_2)
font = QtGui.QFont()
self.checkBox_DTI_m.setFont(font)
self.checkBox_DTI_m.setText("")
self.checkBox_DTI_m.setObjectName("checkBox_DTI_m")
self.gridLayout_34.addWidget(self.checkBox_DTI_m, 3, 1, 1, 1)
self.doubleSpinBox_CSF_Index = QtWidgets.QDoubleSpinBox(self.groupBox_2)
font = QtGui.QFont()
self.doubleSpinBox_CSF_Index.setFont(font)
self.doubleSpinBox_CSF_Index.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_CSF_Index.setDecimals(4)
self.doubleSpinBox_CSF_Index.setMinimum(0.0)
self.doubleSpinBox_CSF_Index.setMaximum(1e+50)
self.doubleSpinBox_CSF_Index.setObjectName("doubleSpinBox_CSF_Index")
self.gridLayout_34.addWidget(self.doubleSpinBox_CSF_Index, 4, 1, 1, 1)
self.doubleSpinBox_WM_Index = QtWidgets.QDoubleSpinBox(self.groupBox_2)
font = QtGui.QFont()
self.doubleSpinBox_WM_Index.setFont(font)
self.doubleSpinBox_WM_Index.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_WM_Index.setDecimals(4)
self.doubleSpinBox_WM_Index.setMinimum(0.0)
self.doubleSpinBox_WM_Index.setMaximum(1e+72)
self.doubleSpinBox_WM_Index.setObjectName("doubleSpinBox_WM_Index")
self.gridLayout_34.addWidget(self.doubleSpinBox_WM_Index, 5, 1, 1, 1)
self.doubleSpinBox_GM_Index = QtWidgets.QDoubleSpinBox(self.groupBox_2)
font = QtGui.QFont()
self.doubleSpinBox_GM_Index.setFont(font)
self.doubleSpinBox_GM_Index.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_GM_Index.setInputMethodHints(QtCore.Qt.ImhFormattedNumbersOnly)
self.doubleSpinBox_GM_Index.setDecimals(4)
self.doubleSpinBox_GM_Index.setMinimum(0.0)
self.doubleSpinBox_GM_Index.setMaximum(1e+75)
self.doubleSpinBox_GM_Index.setObjectName("doubleSpinBox_GM_Index")
self.gridLayout_34.addWidget(self.doubleSpinBox_GM_Index, 6, 1, 1, 1)
self.comboBox_Default_Material = QtWidgets.QComboBox(self.groupBox_2)
font = QtGui.QFont()
self.comboBox_Default_Material.setFont(font)
self.comboBox_Default_Material.setObjectName("comboBox_Default_Material")
self.comboBox_Default_Material.addItem("")
self.comboBox_Default_Material.addItem("")
self.comboBox_Default_Material.addItem("")
self.gridLayout_34.addWidget(self.comboBox_Default_Material, 7, 1, 1, 1)
self.pushButton_MRI_mm = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_MRI_mm.setText("")
self.pushButton_MRI_mm.setIcon(icon9)
self.pushButton_MRI_mm.setIconSize(QtCore.QSize(24, 24))
self.pushButton_MRI_mm.setFlat(True)
self.pushButton_MRI_mm.setObjectName("pushButton_MRI_mm")
self.gridLayout_34.addWidget(self.pushButton_MRI_mm, 1, 2, 1, 1)
self.pushButton_DTI_Data_Name = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_DTI_Data_Name.setText("")
self.pushButton_DTI_Data_Name.setIcon(icon9)
self.pushButton_DTI_Data_Name.setIconSize(QtCore.QSize(24, 24))
self.pushButton_DTI_Data_Name.setFlat(True)
self.pushButton_DTI_Data_Name.setObjectName("pushButton_DTI_Data_Name")
self.gridLayout_34.addWidget(self.pushButton_DTI_Data_Name, 2, 2, 1, 1)
self.pushButton_DTI_mm = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_DTI_mm.setText("")
self.pushButton_DTI_mm.setIcon(icon9)
self.pushButton_DTI_mm.setIconSize(QtCore.QSize(24, 24))
self.pushButton_DTI_mm.setFlat(True)
self.pushButton_DTI_mm.setObjectName("pushButton_DTI_mm")
self.gridLayout_34.addWidget(self.pushButton_DTI_mm, 3, 2, 1, 1)
self.pushButton_CSF_index = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_CSF_index.setText("")
self.pushButton_CSF_index.setIcon(icon9)
self.pushButton_CSF_index.setIconSize(QtCore.QSize(24, 24))
self.pushButton_CSF_index.setFlat(True)
self.pushButton_CSF_index.setObjectName("pushButton_CSF_index")
self.gridLayout_34.addWidget(self.pushButton_CSF_index, 4, 2, 1, 1)
self.pushButton_WM_Index = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_WM_Index.setText("")
self.pushButton_WM_Index.setIcon(icon9)
self.pushButton_WM_Index.setIconSize(QtCore.QSize(24, 24))
self.pushButton_WM_Index.setFlat(True)
self.pushButton_WM_Index.setObjectName("pushButton_WM_Index")
self.gridLayout_34.addWidget(self.pushButton_WM_Index, 5, 2, 1, 1)
self.pushButton_WM_Index_2 = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_WM_Index_2.setText("")
self.pushButton_WM_Index_2.setIcon(icon9)
self.pushButton_WM_Index_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_WM_Index_2.setFlat(True)
self.pushButton_WM_Index_2.setObjectName("pushButton_WM_Index_2")
self.gridLayout_34.addWidget(self.pushButton_WM_Index_2, 6, 2, 1, 1)
self.pushButton_Default_Material = QtWidgets.QPushButton(self.groupBox_2)
self.pushButton_Default_Material.setText("")
self.pushButton_Default_Material.setIcon(icon9)
self.pushButton_Default_Material.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Default_Material.setFlat(True)
self.pushButton_Default_Material.setObjectName("pushButton_Default_Material")
self.gridLayout_34.addWidget(self.pushButton_Default_Material, 7, 2, 1, 1)
self.gridLayout_8.addLayout(self.gridLayout_34, 0, 0, 1, 1)
self.gridLayout_7.addWidget(self.groupBox_2, 0, 0, 1, 1)
spacerItem21 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_7.addItem(spacerItem21, 1, 0, 1, 1)
self.toolBox_2.addItem(self.page_2, "")
self.page_3 = QtWidgets.QWidget()
self.page_3.setGeometry(QtCore.QRect(0, 0, 536, 349))
self.page_3.setObjectName("page_3")
self.gridLayout_12 = QtWidgets.QGridLayout(self.page_3)
self.gridLayout_12.setObjectName("gridLayout_12")
self.groupBox_3 = QtWidgets.QGroupBox(self.page_3)
self.groupBox_3.setTitle("")
self.groupBox_3.setObjectName("groupBox_3")
self.gridLayout_14 = QtWidgets.QGridLayout(self.groupBox_3)
self.gridLayout_14.setObjectName("gridLayout_14")
self.gridLayout_35 = QtWidgets.QGridLayout()
self.gridLayout_35.setObjectName("gridLayout_35")
self.pushButton_Implantation_Coordinate_X = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_Implantation_Coordinate_X.setText("")
self.pushButton_Implantation_Coordinate_X.setIcon(icon9)
self.pushButton_Implantation_Coordinate_X.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Implantation_Coordinate_X.setFlat(True)
self.pushButton_Implantation_Coordinate_X.setObjectName("pushButton_Implantation_Coordinate_X")
self.gridLayout_35.addWidget(self.pushButton_Implantation_Coordinate_X, 1, 3, 1, 1)
self.label_2nd_Point_On_Lead_Y = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_2nd_Point_On_Lead_Y.setFont(font)
self.label_2nd_Point_On_Lead_Y.setObjectName("label_2nd_Point_On_Lead_Y")
self.gridLayout_35.addWidget(self.label_2nd_Point_On_Lead_Y, 5, 0, 1, 1)
self.label_Implantation_Coordinate_X = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_Implantation_Coordinate_X.setFont(font)
self.label_Implantation_Coordinate_X.setObjectName("label_Implantation_Coordinate_X")
self.gridLayout_35.addWidget(self.label_Implantation_Coordinate_X, 1, 0, 1, 1)
self.label_12 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_12.setFont(font)
self.label_12.setObjectName("label_12")
self.gridLayout_35.addWidget(self.label_12, 2, 2, 1, 1)
self.label_Implantation_Coordinate_Y = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_Implantation_Coordinate_Y.setFont(font)
self.label_Implantation_Coordinate_Y.setObjectName("label_Implantation_Coordinate_Y")
self.gridLayout_35.addWidget(self.label_Implantation_Coordinate_Y, 2, 0, 1, 1)
self.label_2nd_Point_On_Lead_X = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_2nd_Point_On_Lead_X.setFont(font)
self.label_2nd_Point_On_Lead_X.setObjectName("label_2nd_Point_On_Lead_X")
self.gridLayout_35.addWidget(self.label_2nd_Point_On_Lead_X, 4, 0, 1, 1)
self.pushButton_Electrode_Type = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_Electrode_Type.setMinimumSize(QtCore.QSize(0, 16))
font = QtGui.QFont()
self.pushButton_Electrode_Type.setFont(font)
self.pushButton_Electrode_Type.setText("")
self.pushButton_Electrode_Type.setIcon(icon9)
self.pushButton_Electrode_Type.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Electrode_Type.setFlat(True)
self.pushButton_Electrode_Type.setObjectName("pushButton_Electrode_Type")
self.gridLayout_35.addWidget(self.pushButton_Electrode_Type, 0, 3, 1, 1)
self.pushButton_Implantation_Coordinate_Y = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_Implantation_Coordinate_Y.setText("")
self.pushButton_Implantation_Coordinate_Y.setIcon(icon9)
self.pushButton_Implantation_Coordinate_Y.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Implantation_Coordinate_Y.setFlat(True)
self.pushButton_Implantation_Coordinate_Y.setObjectName("pushButton_Implantation_Coordinate_Y")
self.gridLayout_35.addWidget(self.pushButton_Implantation_Coordinate_Y, 2, 3, 1, 1)
self.comboBox_Electrode_Type = QtWidgets.QComboBox(self.groupBox_3)
font = QtGui.QFont()
self.comboBox_Electrode_Type.setFont(font)
self.comboBox_Electrode_Type.setObjectName("comboBox_Electrode_Type")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.comboBox_Electrode_Type.addItem("")
self.gridLayout_35.addWidget(self.comboBox_Electrode_Type, 0, 1, 1, 1)
self.label_Turn_Around_Lead_Axis = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_Turn_Around_Lead_Axis.setFont(font)
self.label_Turn_Around_Lead_Axis.setObjectName("label_Turn_Around_Lead_Axis")
self.gridLayout_35.addWidget(self.label_Turn_Around_Lead_Axis, 7, 0, 1, 1)
self.label_Electrode_Type = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_Electrode_Type.setFont(font)
self.label_Electrode_Type.setObjectName("label_Electrode_Type")
self.gridLayout_35.addWidget(self.label_Electrode_Type, 0, 0, 1, 1)
self.doubleSpinBox_Implantation_Coordinate_Y = QtWidgets.QDoubleSpinBox(self.groupBox_3)
font = QtGui.QFont()
self.doubleSpinBox_Implantation_Coordinate_Y.setFont(font)
self.doubleSpinBox_Implantation_Coordinate_Y.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Implantation_Coordinate_Y.setDecimals(4)
self.doubleSpinBox_Implantation_Coordinate_Y.setMinimum(-1e+61)
self.doubleSpinBox_Implantation_Coordinate_Y.setMaximum(1e+77)
self.doubleSpinBox_Implantation_Coordinate_Y.setObjectName("doubleSpinBox_Implantation_Coordinate_Y")
self.gridLayout_35.addWidget(self.doubleSpinBox_Implantation_Coordinate_Y, 2, 1, 1, 1)
self.label_11 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_11.setFont(font)
self.label_11.setObjectName("label_11")
self.gridLayout_35.addWidget(self.label_11, 1, 2, 1, 1)
self.doubleSpinBox_Implantation_Coordinate_X = QtWidgets.QDoubleSpinBox(self.groupBox_3)
font = QtGui.QFont()
self.doubleSpinBox_Implantation_Coordinate_X.setFont(font)
self.doubleSpinBox_Implantation_Coordinate_X.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Implantation_Coordinate_X.setDecimals(4)
self.doubleSpinBox_Implantation_Coordinate_X.setMinimum(-1e+57)
self.doubleSpinBox_Implantation_Coordinate_X.setMaximum(1e+76)
self.doubleSpinBox_Implantation_Coordinate_X.setObjectName("doubleSpinBox_Implantation_Coordinate_X")
self.gridLayout_35.addWidget(self.doubleSpinBox_Implantation_Coordinate_X, 1, 1, 1, 1)
self.doubleSpinBox_Implantation_Coordinate_Z = QtWidgets.QDoubleSpinBox(self.groupBox_3)
font = QtGui.QFont()
self.doubleSpinBox_Implantation_Coordinate_Z.setFont(font)
self.doubleSpinBox_Implantation_Coordinate_Z.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Implantation_Coordinate_Z.setDecimals(4)
self.doubleSpinBox_Implantation_Coordinate_Z.setMinimum(-1e+64)
self.doubleSpinBox_Implantation_Coordinate_Z.setMaximum(1e+63)
self.doubleSpinBox_Implantation_Coordinate_Z.setObjectName("doubleSpinBox_Implantation_Coordinate_Z")
self.gridLayout_35.addWidget(self.doubleSpinBox_Implantation_Coordinate_Z, 3, 1, 1, 1)
self.label_Implantation_Coordinate_Z = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_Implantation_Coordinate_Z.setFont(font)
self.label_Implantation_Coordinate_Z.setObjectName("label_Implantation_Coordinate_Z")
self.gridLayout_35.addWidget(self.label_Implantation_Coordinate_Z, 3, 0, 1, 1)
self.label_2nd_Point_On_Lead_Z = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_2nd_Point_On_Lead_Z.setFont(font)
self.label_2nd_Point_On_Lead_Z.setObjectName("label_2nd_Point_On_Lead_Z")
self.gridLayout_35.addWidget(self.label_2nd_Point_On_Lead_Z, 6, 0, 1, 1)
self.label_13 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_13.setFont(font)
self.label_13.setObjectName("label_13")
self.gridLayout_35.addWidget(self.label_13, 3, 2, 1, 1)
self.doubleSpinBox_2nd_Point_On_Lead_X = QtWidgets.QDoubleSpinBox(self.groupBox_3)
font = QtGui.QFont()
self.doubleSpinBox_2nd_Point_On_Lead_X.setFont(font)
self.doubleSpinBox_2nd_Point_On_Lead_X.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_2nd_Point_On_Lead_X.setDecimals(4)
self.doubleSpinBox_2nd_Point_On_Lead_X.setMinimum(-1e+73)
self.doubleSpinBox_2nd_Point_On_Lead_X.setMaximum(1e+66)
self.doubleSpinBox_2nd_Point_On_Lead_X.setObjectName("doubleSpinBox_2nd_Point_On_Lead_X")
self.gridLayout_35.addWidget(self.doubleSpinBox_2nd_Point_On_Lead_X, 4, 1, 1, 1)
self.label_14 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_14.setFont(font)
self.label_14.setObjectName("label_14")
self.gridLayout_35.addWidget(self.label_14, 4, 2, 1, 1)
self.doubleSpinBox_2nd_Point_On_Lead_Y = QtWidgets.QDoubleSpinBox(self.groupBox_3)
font = QtGui.QFont()
self.doubleSpinBox_2nd_Point_On_Lead_Y.setFont(font)
self.doubleSpinBox_2nd_Point_On_Lead_Y.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_2nd_Point_On_Lead_Y.setInputMethodHints(QtCore.Qt.ImhFormattedNumbersOnly)
self.doubleSpinBox_2nd_Point_On_Lead_Y.setDecimals(4)
self.doubleSpinBox_2nd_Point_On_Lead_Y.setMinimum(-1e+61)
self.doubleSpinBox_2nd_Point_On_Lead_Y.setMaximum(1e+67)
self.doubleSpinBox_2nd_Point_On_Lead_Y.setObjectName("doubleSpinBox_2nd_Point_On_Lead_Y")
self.gridLayout_35.addWidget(self.doubleSpinBox_2nd_Point_On_Lead_Y, 5, 1, 1, 1)
self.label_15 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_15.setFont(font)
self.label_15.setObjectName("label_15")
self.gridLayout_35.addWidget(self.label_15, 5, 2, 1, 1)
self.doubleSpinBox_2nd_Point_On_Lead_Z = QtWidgets.QDoubleSpinBox(self.groupBox_3)
font = QtGui.QFont()
self.doubleSpinBox_2nd_Point_On_Lead_Z.setFont(font)
self.doubleSpinBox_2nd_Point_On_Lead_Z.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_2nd_Point_On_Lead_Z.setDecimals(4)
self.doubleSpinBox_2nd_Point_On_Lead_Z.setMinimum(-1e+68)
self.doubleSpinBox_2nd_Point_On_Lead_Z.setMaximum(1e+61)
self.doubleSpinBox_2nd_Point_On_Lead_Z.setObjectName("doubleSpinBox_2nd_Point_On_Lead_Z")
self.gridLayout_35.addWidget(self.doubleSpinBox_2nd_Point_On_Lead_Z, 6, 1, 1, 1)
self.label_16 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_16.setFont(font)
self.label_16.setObjectName("label_16")
self.gridLayout_35.addWidget(self.label_16, 6, 2, 1, 1)
self.doubleSpinBox_Turn_Around_Lead_Axis = QtWidgets.QDoubleSpinBox(self.groupBox_3)
font = QtGui.QFont()
self.doubleSpinBox_Turn_Around_Lead_Axis.setFont(font)
self.doubleSpinBox_Turn_Around_Lead_Axis.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.doubleSpinBox_Turn_Around_Lead_Axis.setDecimals(2)
self.doubleSpinBox_Turn_Around_Lead_Axis.setMinimum(-1222222222222222.0)
self.doubleSpinBox_Turn_Around_Lead_Axis.setMaximum(1e+39)
self.doubleSpinBox_Turn_Around_Lead_Axis.setSingleStep(5.0)
self.doubleSpinBox_Turn_Around_Lead_Axis.setProperty("value", 20.0)
self.doubleSpinBox_Turn_Around_Lead_Axis.setObjectName("doubleSpinBox_Turn_Around_Lead_Axis")
self.gridLayout_35.addWidget(self.doubleSpinBox_Turn_Around_Lead_Axis, 7, 1, 1, 1)
self.label_39 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
self.label_39.setFont(font)
self.label_39.setObjectName("label_39")
self.gridLayout_35.addWidget(self.label_39, 7, 2, 1, 1)
self.pushButton_Implantation_Coordinate_Z = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_Implantation_Coordinate_Z.setText("")
self.pushButton_Implantation_Coordinate_Z.setIcon(icon9)
self.pushButton_Implantation_Coordinate_Z.setIconSize(QtCore.QSize(24, 24))
self.pushButton_Implantation_Coordinate_Z.setFlat(True)
self.pushButton_Implantation_Coordinate_Z.setObjectName("pushButton_Implantation_Coordinate_Z")
self.gridLayout_35.addWidget(self.pushButton_Implantation_Coordinate_Z, 3, 3, 1, 1)
self.pushButton_2nd_Point_On_Lead_X = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_2nd_Point_On_Lead_X.setText("")
self.pushButton_2nd_Point_On_Lead_X.setIcon(icon9)
self.pushButton_2nd_Point_On_Lead_X.setIconSize(QtCore.QSize(24, 24))
self.pushButton_2nd_Point_On_Lead_X.setFlat(True)
self.pushButton_2nd_Point_On_Lead_X.setObjectName("pushButton_2nd_Point_On_Lead_X")
self.gridLayout_35.addWidget(self.pushButton_2nd_Point_On_Lead_X, 4, 3, 1, 1)
self.pushButton_2nd_Point_On_Lead_Y = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_2nd_Point_On_Lead_Y.setText("")
self.pushButton_2nd_Point_On_Lead_Y.setIcon(icon9)
self.pushButton_2nd_Point_On_Lead_Y.setIconSize(QtCore.QSize(24, 24))
self.pushButton_2nd_Point_On_Lead_Y.setFlat(True)
self.pushButton_2nd_Point_On_Lead_Y.setObjectName("pushButton_2nd_Point_On_Lead_Y")
self.gridLayout_35.addWidget(self.pushButton_2nd_Point_On_Lead_Y, 5, 3, 1, 1)
self.pushButton_2nd_Point_On_Lead_Z = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_2nd_Point_On_Lead_Z.setText("")
self.pushButton_2nd_Point_On_Lead_Z.setIcon(icon9)
self.pushButton_2nd_Point_On_Lead_Z.setIconSize(QtCore.QSize(24, 24))
self.pushButton_2nd_Point_On_Lead_Z.setFlat(True)
self.pushButton_2nd_Point_On_Lead_Z.setObjectName("pushButton_2nd_Point_On_Lead_Z")
self.gridLayout_35.addWidget(self.pushButton_2nd_Point_On_Lead_Z, 6, 3, 1, 1)
self.pushButton_2nd_Point_On_Lead_Z_2 = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_2nd_Point_On_Lead_Z_2.setText("")
self.pushButton_2nd_Point_On_Lead_Z_2.setIcon(icon9)
self.pushButton_2nd_Point_On_Lead_Z_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_2nd_Point_On_Lead_Z_2.setFlat(True)
self.pushButton_2nd_Point_On_Lead_Z_2.setObjectName("pushButton_2nd_Point_On_Lead_Z_2")
self.gridLayout_35.addWidget(self.pushButton_2nd_Point_On_Lead_Z_2, 7, 3, 1, 1)
self.gridLayout_14.addLayout(self.gridLayout_35, 0, 0, 1, 1)
self.gridLayout_12.addWidget(self.groupBox_3, 0, 0, 1, 1)
spacerItem22 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_12.addItem(spacerItem22, 0, 1, 1, 1)
spacerItem23 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_12.addItem(spacerItem23, 1, 0, 1, 1)
self.toolBox_2.addItem(self.page_3, "")
self.gridLayout_6.addWidget(self.toolBox_2, 0, 0, 1, 1)
self.line_2 = QtWidgets.QFrame(self.Simulation_Parameters)
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.gridLayout_6.addWidget(self.line_2, 1, 0, 1, 1)
self.stackedWidget.addWidget(self.Simulation_Parameters)
self.gridLayout.addWidget(self.stackedWidget, 0, 1, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.stackedWidget.setCurrentIndex(0)
self.toolBox.setCurrentIndex(0)
self.toolBox_2.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "OSS-DBS v0.3 for Lead-DBS"))
self.label_7.setText(_translate("MainWindow", "Icons by <a href=\"https://adioma.com\">Adioma</a> "))
self.pushButton_Load.setText(_translate("MainWindow", " Load "))
self.pushButton_SaveAs.setText(_translate("MainWindow", " Save As "))
self.pushButton_Reset.setText(_translate("MainWindow", " Reset "))
self.pushButton_Run.setText(_translate("MainWindow", " Run "))
self.treeWidget_Project_Browser.headerItem().setText(0, _translate("MainWindow", "Project Browser"))
__sortingEnabled = self.treeWidget_Project_Browser.isSortingEnabled()
self.treeWidget_Project_Browser.setSortingEnabled(False)
self.treeWidget_Project_Browser.topLevelItem(0).setText(0, _translate("MainWindow", "Simulation State"))
self.treeWidget_Project_Browser.topLevelItem(1).setText(0, _translate("MainWindow", "Simulation Setup"))
self.treeWidget_Project_Browser.topLevelItem(2).setText(0, _translate("MainWindow", "Visualization"))
self.treeWidget_Project_Browser.topLevelItem(5).setText(0, _translate("MainWindow", "Advanced Simulation Parameters"))
self.treeWidget_Project_Browser.setSortingEnabled(__sortingEnabled)
self.pageSimulationState.setAccessibleName(_translate("MainWindow", "Page Simulation"))
self.label_47.setText(_translate("MainWindow", "CSF Refinement done"))
self.label_45.setText(_translate("MainWindow", "Adjusted Neuron Array ready"))
self.pushButton_Neuron_Model_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'wf_segoe-ui_normal,Segoe UI,Segoe WP,Tahoma,Arial,sans-serif,serif,EmojiFont\'; font-size:14pt; color:#212121; background-color:#ffffff;\">Check if Neuron array was already adjusted and Vert_of_Neural_model_NEURON.csv was created in Neuron_model_arrays/.</span></p></body></html>"))
self.pushButton_Init_Neuron_Model_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if neuron array was not modified, and if All_neuron_models.csv and Neuron_model_misc.csv in Neuron_model_arrays/ are already created.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: This step is also required for external neuron arrays to create meta data.</span></p></body></html>"))
self.label_48.setText(_translate("MainWindow", "Adaptive Refinement done"))
self.pushButton_Init_Mesh_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if geometry, initial mesh requirements, electrode and signal vector were not modifed, and if Mesh_unref.xml, Mesh_unref_facet_region.xml and Mesh_unref_physical_region.xml in Meshes/ are already created.</span></p></body></html>"))
self.label_50.setText(_translate("MainWindow", "Continue Interrupted Computations"))
self.label_42.setText(_translate("MainWindow", "DTI Data ready"))
self.label_41.setText(_translate("MainWindow", "MRI Data ready"))
self.label_43.setText(_translate("MainWindow", "Initial Neuron Array ready"))
self.label_49.setText(_translate("MainWindow", "Computations in Spectrum done"))
self.label_44.setText(_translate("MainWindow", "Geometry and Initial Mesh ready"))
self.pushButton_CSF_Mesh_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if the mesh has already undergone CSF refinement, and CSF_ref/mesh_adapt_CSF(Scaling_number).xml.gz was saved in CSF_ref/. Not relevant if mesh refinement is skipped.</span></p></body></html>"))
self.label_46.setText(_translate("MainWindow", "Signal Generation done"))
self.pushButton_Voxel_orr_MRI.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if provided MRI data was already processed by the platform and corresponding files were saved in MRI_DTI_derived/.</span></p></body></html>"))
self.checkBox_Init_Mesh_Ready.setToolTip(_translate("MainWindow", "If initial mesh files were already prepared, folder \"Meshes\" will be preserved"))
self.pushButton_Parallel_Computing_Interrupted.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if parallel computations in the frequency spectrum were interrupted. OSS Platform will continue from the last frequency pack.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: Currently, you can\'t change the size of frequency pack, i.e. number of processors.</span></p></body></html>"))
self.label_51.setText(_translate("MainWindow", "Scaling and IFFT done"))
self.pushButton_Adapted_Mesh_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if the mesh has undergone adaptive refinement, and mesh_adapt.xml.gz was saved in Results_adaptive/. Not relevant if mesh refinement is skipped.</span></p></body></html>"))
self.pushButton_Signl_Generation_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if the DBS signal was already created. Important: If the amplitude of the signal on contacts is changed proportionally (so that the field shape is preserved), you can exploit linear properties of the system and change the amplitude with \'Signal Scaling\' instead.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: The Signal Scaling can be applied in voltage-controlled stimulation or current-controlled stimulation with two sources (including the ground).</span></p></body></html>"))
self.pushButton_IFFT_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if Field scaling and IFFT were already completed.</span></p></body></html>"))
self.pushButton_Voxel_orr_DTI.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if provided DTI data was already processed by the platform and corresponding files were saved in MRI_DTI_derived/.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: Meta data of DTI is dependent on MRI data, so if MRI data was reprocessed, DTI data should be also reprocessed.</span></p></body></html>"))
self.pushButton_Parallel_Computing_Ready.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if parallel computations in the frequency spectrum were already fully completed. </span></p></body></html>"))
self.label_Approx_Geom_Centered_On_MRI.setText(_translate("MainWindow", "Approx. Geometry Centered on MRI"))
self.label_X_Length_2.setText(_translate("MainWindow", "Approximating Dimension"))
self.pushButton_Brain_Shape.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide name of a file with the brain geometry or enter 0 to create an approximation with ellipsoid. The geometry file should be copied to /oss_platform/OSS_platform directory.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Supported formats</span><span style=\" font-size:14pt;\">: brep/step/iges.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important:</span><span style=\" font-size:14pt;\"> Brain geometry should be in the same coordinates as the provided MRI data and in mm!</span></p></body></html>"))
self.lineEdit_Brain_Shape.setPlaceholderText(_translate("MainWindow", "example.step"))
self.label_Brain_Shape.setText(_translate("MainWindow", "Brain geometry file"))
self.label_Dimensions_From_MRI.setText(_translate("MainWindow", "Dimensions from MRI"))
self.label_Approx_Geometry_Center.setText(_translate("MainWindow", "Approx. Geometry Center"))
self.lineEdit_Approximating_Dimensions.setPlaceholderText(_translate("MainWindow", "[0.0, 0.0, 0.0]"))
self.lineEdit_Approx_Geometry_Center.setPlaceholderText(_translate("MainWindow", "[0.0, 0.0, 0.0]"))
self.pushButton_Dimensions_From_MRI.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if dimensions of the approximating ellipspoid should be defined by inscribing it to the MRI data box.</span></p></body></html>"))
self.pushButton_Approx_Geometry_Center.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide dimensions (lengths in mm) of the approximating ellipsoid as [x,y,z]. For example, [10.0,12.0,11.0].</span></p></body></html>"))
self.pushButton_X_Length.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide coordinates of the center point of the approximating ellipsoid as [x,y,z]. For example, [10.0,-0.2,44.6].</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: The coordinates should be given in the MRI data space.</span></p></body></html>"))
self.pushButton_Approx_Geometry_Center_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if the approximating ellispoid should be centered on the MRI data box.</span></p></body></html>"))
self.toolBox.setItemText(self.toolBox.indexOf(self.page_4), _translate("MainWindow", "Brain Geometry"))
self.label_26.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">mm</span></p></body></html>"))
self.label_Conductivity_Scaling.setText(_translate("MainWindow", "Conductivity Scaling"))
self.label_19.setText(_translate("MainWindow", "Permittivity Scaling"))
self.label_Encapsulation_Conductivity.setText(_translate("MainWindow", "Encapsulation Tissue Type"))
self.label_CPE_Active.setText(_translate("MainWindow", "CPE Active"))
self.label_Encapsulation_Thickness.setText(_translate("MainWindow", "Encapsulation Thickness"))
self.pushButton_Encapsulation_Thickness.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide thickness of the encapsulation layer around the electrode lead</span></p></body></html>"))
self.pushButton_CPE_Active.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if constant phase element should be applied on the active contacts.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: Currently, works only for voltage-controlled mode with two sources (including the ground).</span></p></body></html>"))
self.pushButton_Encapsulation_Conductivity.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose base material for the encapsulation layer. Base material defines its dispersiveness.</span></p></body></html>"))
self.pushButton_Conductivity_Scaling.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose scaling of conductivity for the base material.</span></p></body></html>"))
self.pushButton_Encapsulation_Scaling.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose scaling of permittivity for the base material.</span></p></body></html>"))
self.comboBox_Encapsulation_Tissue_Type.setItemText(0, _translate("MainWindow", "CSF"))
self.comboBox_Encapsulation_Tissue_Type.setItemText(1, _translate("MainWindow", "White Matter"))
self.comboBox_Encapsulation_Tissue_Type.setItemText(2, _translate("MainWindow", "Grey Matter"))
self.toolBox.setItemText(self.toolBox.indexOf(self.page_5), _translate("MainWindow", "Electrode-Tissue Interface"))
self.pushButton_Neuron_Model_Array_Prepared_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if a neuron array was prepared outside of OSS Platform (recommended option for simulations with pathway activation)</span></p></body></html>"))
self.label_V_Init_3.setText(_translate("MainWindow", "External Neuron Array"))
self.label_40.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">μm</span></p></body></html>"))
self.label_Diam_Fib_2.setText(_translate("MainWindow", "Fiber Diameter"))
self.label_V_Init_2.setText(_translate("MainWindow", "Initial Membrane Potential"))
self.label_Pattern_Model_Name_2.setText(_translate("MainWindow", "Pattern Model Name"))
self.lineEdit_Pattern_Model_Name.setPlaceholderText(_translate("MainWindow", "Pattern_model.csv"))
self.lineEdit_Fiber_Diameter.setPlaceholderText(_translate("MainWindow", "[0.0]"))
self.label_N_Ranvier_2.setText(_translate("MainWindow", "Number of Ranvier Nodes"))
self.comboBox_Axon_Model_Type.setItemText(0, _translate("MainWindow", "McIntyre2002"))
self.comboBox_Axon_Model_Type.setItemText(1, _translate("MainWindow", "Reilly2016"))
self.label_9.setText(_translate("MainWindow", "Axon Model Type"))
self.pushButton_Diam_Fib_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide fiber diameter of myelinated axon. Currently acceptable values: 2.0, 3.0, 5.7, 7.3, 8.7, 10.0, 11.5, 12.8, 14.0, 15.0, 16.0. If External neuron array is provided in .h5 format, number of given fiber diameters should equal number of data sets(populations). For example, in STN_pathways.h5 we might have 3 data sets (direct, indirect and hyperdirect), and parameter Fiber Diameter can be given as [5.7,7.3,5.7].</span></p><p><br/></p></body></html>"))
self.pushButton_Axon_Model_Type.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose a computational model for axon that will be simulated in NEURON. The models have different morphologies: McIntyre2002 explicitely defines different internodal segments, while Reilly2016 lumps it to one internodal compartment. That must be taken into account when an external neuron array is provided.</span></p></body></html>"))
self.pushButton_Pattern_Model_Name_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Optional. Provide name of a file with pattern for neuron model (not neuron array!). Coordinates of pattern should be in mm, file should be copied to /oss_platform/OSS_platform directory in .csv format.</span></p></body></html>"))
self.lineEdit_N_Ranvier.setPlaceholderText(_translate("MainWindow", "[0]"))
self.label_38.setText(_translate("MainWindow", "mV"))
self.pushButton_N_Ranvier_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'wf_segoe-ui_normal,Segoe UI,Segoe WP,Tahoma,Arial,sans-serif,serif,EmojiFont\'; font-size:14pt; color:#212121; background-color:#ffffff;\">Provide number of Ranvier nodes on a single neuron (axon) model. If External neuron array is provided in .h5 format, number of Ranvier nodes can differ among data sets(populations), and the number of entries in \'Number of Ranvier Nodes\' should equal number of data sets (populations). For example, in Hyperdirect_pathway.h5 we might have 3 data sets of axons (STN_SN,STN_GPi,STN_Motor_cortex), and \'Number of Ranvier Nodes\' can be defined as [10,15,30]. </span><span style=\" font-size:14pt;\""))
self.pushButton_V_Init_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Set up initial membrane potential.</span></p></body></html>"))
self.lineEdit_Name_Prepared_Neuron_Array.setPlaceholderText(_translate("MainWindow", "External_array.h5"))
self.label_Name_Prepared_Neuron_Array_2.setText(_translate("MainWindow", "Name of the External Array"))
self.pushButton_Name_Prepared_Neuron_Array_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide name of the externally prepared array. The file can be in .csv (if morphology of the neurons is the same) or .h5 format, and it should be copied to /oss_platform/OSS_platform directory. Coordinates of the neuron compartments should be in the MRI data space. More in the OSS Platform tutorial.</span></p></body></html>"))
self.toolBox.setItemText(self.toolBox.indexOf(self.page_6), _translate("MainWindow", "Neuron Model"))
self.label_2.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">Hz</span></p></body></html>"))
self.label_Signal_Type.setText(_translate("MainWindow", "Signal Type"))
self.label_Frequency.setText(_translate("MainWindow", "Repetition Rate"))
self.label_T_Step.setText(_translate("MainWindow", "Time Step"))
self.label_Phi.setText(_translate("MainWindow", "Signal Shift"))
self.label_T.setText(_translate("MainWindow", "Pulse Width"))
self.comboBox_Signal_Type.setItemText(0, _translate("MainWindow", "Rectangle"))
self.comboBox_Signal_Type.setItemText(1, _translate("MainWindow", "Decreasing Ramp"))
self.comboBox_Signal_Type.setItemText(2, _translate("MainWindow", "Increasing Ramp"))
self.comboBox_Signal_Type.setItemText(3, _translate("MainWindow", "Central Triangle"))
self.label_Amplitude_Scale.setText(_translate("MainWindow", "Signal Scaling"))
self.pushButton_Frequency.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide repetition rate of the DBS signal i.e. float number of DBS pulses per second. Typical values are in the range of 100 - 250 Hz.</span></p></body></html>"))
self.pushButton_Signal_Type.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose type of DBS stimulation signal. The apex in case of ramps and triangular signals is defined by Signal Vector. New signal types will be added in the next version.</span></p></body></html>"))
self.label.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">μs</span></p></body></html>"))
self.label_3.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">μs</span></p></body></html>"))
self.label_4.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">μs</span></p></body></html>"))
self.pushButton_T.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide width of DBS signal in time. You can check the generated signal in Images/.</span></p></body></html>"))
self.pushButton_T_Step.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide time step for the signal generation. Large time steps can lead to distorted signals. You can check the accuracy of the generated signal in Images/.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: The same time step will be used for estimation of neuron model activation. We recommend to use 1-5 microsecond step.</span></p></body></html>"))
self.pushButton_Phi.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide shift of DBS signal in time. You can check the generated signal in Images/.</span></p></body></html>"))
self.pushButton_Amplitude_Scale.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide scaling of Signal Vector. This can be used when the whole simulation is completed, and we want to check activation with a higher/lower signals. For this, linearity of the system can be utilized and the solution is simply scaled.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: This can be applied only in voltage-controlled stimulation or current-controlled stimulation with two sources (including the ground). Also, the signal on all contacts will be changed proportionally. i.e. field shape will be preserved.</span></p></body></html>"))
self.toolBox.setItemText(self.toolBox.indexOf(self.page_7), _translate("MainWindow", "DBS Signal"))
self.label_Truncate_Already_Obtained_Full_Solution.setText(_translate("MainWindow", "Truncate already Obtained Full Solution"))
self.pushButton_Truncation_Parameter_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">In case of High Amplitude and Cutoff Methods, provide number of frequencies (integer) in the trucated spectrum. For Octave Band Method, provide the frequency, after which calculations in octave bands will be used.</span></p></body></html>"))
self.label_Truncation_Parameter.setText(_translate("MainWindow", "Truncation Parameter"))
self.pushButton_Truncation_Parameter.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">In case of High Amplitude and Cutoff Methods, provide number of frequencies (integer) in the trucated spectrum. For Octave Band Method, provide the frequency, after which calculations in octave bands will be used.</span></p></body></html>"))
self.label_17.setText(_translate("MainWindow", "Truncation Parameter"))
self.lineEdit_Truncation_Parameter.setPlaceholderText(_translate("MainWindow", "10000.0"))
self.label_21.setText(_translate("MainWindow", "Hz"))
self.pushButton_Truncate_The_Obtained_Full_Solution.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if the Spectrum Truncation Method will be applied to the already computed full spectrum solution. Can be used only for High Amplitude and Cutoff Methods.</span></p></body></html>"))
self.label_6.setText(_translate("MainWindow", "External grounding"))
self.pushButton_external_grounding.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if grounding with casing is to be applied </span></p></body></html>"))
self.pushButton_Full_Field_IFFT.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if IFFT should be conducted for the whole domain. Used for VTA estimation and visualization of extracellular field in time. Recommended if the number of neuron array\'s compartments is bigger than the number of dofs of the mesh. Can take a while, if the mesh is large.</span></p></body></html>"))
self.pushButton_El_Order_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose order of Lagrange element for potential calculations. In case of current-controlled stimulation or simulation with CPE, we recommend to use at least 2nd order elements.</span></p></body></html>"))
self.pushButton_Y_Length_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check to solve the FEM system of Laplace equation using parallelization when conducting adaptive mesh refinement.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important:</span><span style=\" font-size:14pt;\"> Use MPI with caution. Though generally reliable, the number of processors might require adjustments to achieve a converenge of the solver.</span></p></body></html>"))
self.label_Number_Of_Processors.setText(_translate("MainWindow", "Number of Processors"))
self.label_Skip_Mesh_Refinement.setText(_translate("MainWindow", "Skip Mesh Refinement"))
self.label_20.setText(_translate("MainWindow", "FEniCS MPI"))
#self.pushButton_Number_Of_Processors_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose number of processors that will be used for parallel field calculation in frequency spectrum, IFFT and neuron model simulations, as well as for solving the FEM system of Laplace equation (if FEniCS MPI is checked). We recommend not to choose the amount higher than number of physical cores on your computer/server.</span></p></body></html>"))
self.pushButton_Number_Of_Processors_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose number of processors that will be used for parallel field calculation in frequency spectrum, IFFT and neuron model simulations, as well as for solving the FEM system of Laplace equation (if FEniCS MPI is checked). We recommend not to choose the amount higher than number of physical cores on your computer/server.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: On Linux, the default number is half of all available cores. On other systems, GUI shows 0: by default all cores available for Docker App will be drawn, but users can reduce the number providing it here.</span></p></body></html>"))
self.pushButton_Skip_Mesh_Refinement_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if CSF refinement and adaptive mesh refinement should be skipped. </span></p></body></html>"))
self.label_Laplace_Transformation.setText(_translate("MainWindow", "Laplace Formulation"))
self.label_Full_Field_IFFT.setText(_translate("MainWindow", "Full Field IFFT"))
self.label_El_Order.setText(_translate("MainWindow", "FEM Element Order"))
self.comboBox_Laplace_Formulation.setItemText(0, _translate("MainWindow", "QS"))
self.comboBox_Laplace_Formulation.setItemText(1, _translate("MainWindow", "EQS"))
self.pushButton_Laplace_Formulation_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose which formulation of Laplace equation will be used to calculate electric potential distribution (more details in the publication).</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Note</span><span style=\" font-size:14pt;\">: The quasistatic (QS) formulation is suitable for problems with low dispersive effect and small frequency spectrum. We recommend to run studies with both formulations to estimate quantitative effect of QS approximation, as it is significantly reduces computational costs, which can be crucial for optimization and UQ problems.</span></p></body></html>"))
self.pushButton_Sprectrum_Truncation_Method.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose truncation method for the DBS signal spectrum to reduce number of computations in frequency domain. More details in the publication.</span></p></body></html>"))
self.comboBox_Spectrum_Truncation_Method.setItemText(0, _translate("MainWindow", "No Truncation"))
self.comboBox_Spectrum_Truncation_Method.setItemText(1, _translate("MainWindow", "High Amplitude Method"))
self.comboBox_Spectrum_Truncation_Method.setItemText(2, _translate("MainWindow", "Cutoff Method"))
self.comboBox_Spectrum_Truncation_Method.setItemText(3, _translate("MainWindow", "Octave Band Method"))
self.label_Sprectrum_Truncation_Method.setText(_translate("MainWindow", "Spectrum Truncation Method"))
self.toolBox.setItemText(self.toolBox.indexOf(self.page_8), _translate("MainWindow", "Computational Parameters"))
self.pushButton_Placed_Neurons.setText(_translate("MainWindow", "Display allocated neurons"))
self.pushButton_Signal_Recovered.setText(_translate("MainWindow", "Display the DBS signal"))
self.pushButton_Adapted_Mesh.setText(_translate("MainWindow", "Display the adaptively refined mesh"))
self.pushButton_CSF_Full_Refinement.setText(_translate("MainWindow", "Display the brain tissue mapping after full CSF refinement"))
self.pushButton_Signal_Convoluted_1st_Point.setText(_translate("MainWindow", "Display the electric potential on the first compartment computed with FFEM"))
self.pushButton_Axon_Activation.setText(_translate("MainWindow", "Display activated neurons"))
self.label_10.setText(_translate("MainWindow", "Current-Controlled Mode"))
self.label_Phi_Vector.setText(_translate("MainWindow", "Signal Vector"))
self.label_5.setText(_translate("MainWindow", "Solver Type"))
self.lineEdit_Phi_Vector.setToolTip(_translate("MainWindow", "Enter Vector: Format: [0.00005,0.0]"))
self.lineEdit_Phi_Vector.setPlaceholderText(_translate("MainWindow", "[0.00005,0.0]"))
self.comboBox_Solver_Type.setItemText(0, _translate("MainWindow", "Default"))
self.comboBox_Solver_Type.setItemText(1, _translate("MainWindow", "MUMPS"))
self.comboBox_Solver_Type.setItemText(2, _translate("MainWindow", "BiCGSTAB"))
self.comboBox_Solver_Type.setItemText(3, _translate("MainWindow", "GMRES"))
self.pushButton_Current_Control.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if current will be assigned on the electrode contacts.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: In this case, at least one contact should be put to 0, and it will be treated as the ground. In the next version we will add option of grounding elsewhere.</span></p></body></html>"))
self.pushButton_Phi_Vector.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Set up voltage/current on the electrode contacts. If contact is not active, put None, then t will be treated as a floating conductor. Make sure that number of entries corresponds to the number of contacts on the chosen electrode. Value 0.0 always refers to the ground, not 0.0 A!</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: If you change position of the ground or non-active contacts, the geometry needs to be regenerated.</span></p></body></html>"))
self.pushButton_Solver_Type.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose a solver that will be applied to FEM system of Laplace equation. Corresponding suitable preconditioners are already defined. The choice is dependent on the stimulation mode, the Laplace formulation (QS or EQS) and the presence of floating conductors.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important: </span><span style=\" font-size:14pt;\">I</span><span style=\" font-size:14pt;\">f you are not sure, choose "Default" option.</span></p></body></html>"))
self.pushButton_MRI_Data_Name.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide name of segmented MRI data you want to use for the volume conductor model. The data should be copied to /oss_platform/OSS_platform directory. It can be in .txt,.nii and .nii.gz format. More in the OSS Platform tutorial.</span></p></body></html>"))
self.label_CSF_Index.setText(_translate("MainWindow", "CSF Index"))
self.label_WM_Index.setText(_translate("MainWindow", "WM Index"))
self.label_DTI_Data_Name.setText(_translate("MainWindow", "DTI Data Name"))
self.label_MRI_Data_Name.setText(_translate("MainWindow", "MRI Data Name"))
self.label_GM_Index.setText(_translate("MainWindow", "GM Index"))
self.label_MRI_Data_Name_2.setText(_translate("MainWindow", "MRI (m)"))
self.label_MRI_Data_Name_3.setText(_translate("MainWindow", "DTI (m)"))
self.label_Default_Material.setText(_translate("MainWindow", "Default Tissue"))
self.comboBox_Default_Material.setItemText(0, _translate("MainWindow", "CSF"))
self.comboBox_Default_Material.setItemText(1, _translate("MainWindow", "White Matter"))
self.comboBox_Default_Material.setItemText(2, _translate("MainWindow", "Grey Matter"))
self.pushButton_MRI_mm.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if MRI data is in m (default in mm).</span></p></body></html>"))
self.pushButton_DTI_Data_Name.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Optional. Provide name of DTI/DWI data you want to use to create conductivity tensors. The data should be copied to /oss_platform/OSS_platform directory. It can be in .txt,.nii and .nii.gz format.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Important</span><span style=\" font-size:14pt;\">: DTI and MRI data should be in the same coordinate space. More in the OSS Platform tutorial.</span></p></body></html>"))
self.pushButton_DTI_mm.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Check if DTI data is in m (default in mm).</span></p></body></html>"))
self.pushButton_CSF_index.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide index of cerebrospinal fluid (CSF) in the provided MRI data.</span></p></body></html>"))
self.pushButton_WM_Index.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide index of white matter (WM) in the provided MRI data.</span></p></body></html>"))
self.pushButton_WM_Index_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide index of grey matter (GM) in the provided MRI data.</span></p></body></html>"))
self.pushButton_Default_Material.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose default material for tissue in the MRI data with other indices.</span></p><p><span style=\" font-size:14pt; font-weight:600;\">Note</span><span style=\" font-size:14pt;\">: CSF-like liquid surrounds brain, so it is logical to put default to CSF. On the other hand, CSF voxels might require high refinement, and thus large number of CSF might result in an unreasonably large mesh. If simulation is conducted for the basal-ganlia structures (e.g. STN, GPi), it is usually sufficient to use grey matter as a default material.</span></p></body></html>"))
self.toolBox_2.setItemText(self.toolBox_2.indexOf(self.page_2), _translate("MainWindow", "MRI and DTI Data"))
self.pushButton_Implantation_Coordinate_X.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide X coordinate of the implantation site (tip of the electrode) in the MRI data space.</span></p></body></html>"))
self.label_2nd_Point_On_Lead_Y.setText(_translate("MainWindow", "2nd point on lead Y"))
self.label_Implantation_Coordinate_X.setText(_translate("MainWindow", "Implantation Coordinate X"))
self.label_12.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">mm</span></p></body></html>"))
self.label_Implantation_Coordinate_Y.setText(_translate("MainWindow", "Implantation Coordinate Y"))
self.label_2nd_Point_On_Lead_X.setText(_translate("MainWindow", "2nd point on lead X"))
self.pushButton_Electrode_Type.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Choose electrode type</span></p></body></html>"))
self.pushButton_Implantation_Coordinate_Y.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide Y coordinate of the implantation site (tip of the electrode) in the MRI data space.</span></p></body></html>"))
self.comboBox_Electrode_Type.setItemText(0, _translate("MainWindow", "SNEX100"))
self.comboBox_Electrode_Type.setItemText(1, _translate("MainWindow", "Medtronic3387"))
self.comboBox_Electrode_Type.setItemText(2, _translate("MainWindow", "Medtronic3389"))
self.comboBox_Electrode_Type.setItemText(3, _translate("MainWindow", "Medtronic3391"))
self.comboBox_Electrode_Type.setItemText(4, _translate("MainWindow", "St_Jude6142"))
self.comboBox_Electrode_Type.setItemText(5, _translate("MainWindow", "St_Jude6148"))
self.comboBox_Electrode_Type.setItemText(6, _translate("MainWindow", "St_Jude6173"))
self.comboBox_Electrode_Type.setItemText(7, _translate("MainWindow", "St_Jude6180"))
self.comboBox_Electrode_Type.setItemText(8, _translate("MainWindow", "Boston_Scientific_Vercise"))
self.comboBox_Electrode_Type.setItemText(9, _translate("MainWindow", "Boston_Scientific_Vercise_Cartesia"))
self.comboBox_Electrode_Type.setItemText(10, _translate("MainWindow", "PINS_L301"))
self.comboBox_Electrode_Type.setItemText(11, _translate("MainWindow", "PINS_L302"))
self.comboBox_Electrode_Type.setItemText(12, _translate("MainWindow", "PINS_L303"))
self.label_Turn_Around_Lead_Axis.setText(_translate("MainWindow", "<html><head/><body><p>Turn around lead\'s axis</p></body></html>"))
self.label_Electrode_Type.setText(_translate("MainWindow", "Electrode Type"))
self.label_11.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">mm</span></p></body></html>"))
self.label_Implantation_Coordinate_Z.setText(_translate("MainWindow", "Implantation Coordinate Z"))
self.label_2nd_Point_On_Lead_Z.setText(_translate("MainWindow", "2nd point on lead Z"))
self.label_13.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">mm</span></p></body></html>"))
self.label_14.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">mm</span></p></body></html>"))
self.label_15.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">mm</span></p></body></html>"))
self.label_16.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">mm</span></p></body></html>"))
self.label_39.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-style:italic;\">deg</span></p></body></html>"))
self.pushButton_Implantation_Coordinate_Z.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:14pt;\">Provide Z coordinate of the implantation site (tip of the electrode) in the MRI data space.</span></p></body></html>"))
self.pushButton_2nd_Point_On_Lead_X.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'wf_segoe-ui_normal,Segoe UI,Segoe WP,Tahoma,Arial,sans-serif,serif,EmojiFont\'; font-size:14pt; color:#212121; background-color:#ffffff;\">Provide X coordinate for a second point on the electrode lead to define its tilt.</span></p></body></html>"))
self.pushButton_2nd_Point_On_Lead_Y.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'wf_segoe-ui_normal,Segoe UI,Segoe WP,Tahoma,Arial,sans-serif,serif,EmojiFont\'; font-size:14pt; color:#212121; background-color:#ffffff;\">Provide Y coordinate for a second point on the electrode lead to define its tilt.</span></p></body></html>"))
self.pushButton_2nd_Point_On_Lead_Z.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'wf_segoe-ui_normal,Segoe UI,Segoe WP,Tahoma,Arial,sans-serif,serif,EmojiFont\'; font-size:14pt; color:#212121; background-color:#ffffff;\">Provide Z coordinate for a second point on the electrode lead to define its tilt.</span></p></body></html>"))
self.pushButton_2nd_Point_On_Lead_Z_2.setToolTip(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'wf_segoe-ui_normal,Segoe UI,Segoe WP,Tahoma,Arial,sans-serif,serif,EmojiFont\'; font-size:14pt; color:#212121; background-color:#ffffff;\">Provide rotation around the electrode\'s longitudinal axis (important for non-axisymmetric electrodes).</span></p></body></html>"))
self.toolBox_2.setItemText(self.toolBox_2.indexOf(self.page_3), _translate("MainWindow", "Geometries"))
import icons_rc
import imgs_rc
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
andreashorn/lead_dbs
|
ext_libs/OSS-DBS/OSS_platform/GUI_tree_files/GUI.py
|
Python
|
gpl-3.0
| 176,194
|
[
"NEURON"
] |
aae9f9d68147f12118bced8406f877bf57dc56e863c80ba8def8593f5ca6f2f0
|
import numpy as np
__all__ = ["ARMA"]
def ARMA(length, phi=0.0, theta=0.0, std=1.0):
r"""
Generate time-series of a predefined ARMA-process based on this equation:
:math:`\sum_{j=1}^{\min(p,n-1)} \phi_j \epsilon[n-j] + \sum_{j=1}^{\min(q,n-1)} \theta_j w[n-j]`
where w is white gaussian noise. Equation and algorithm taken from [Eichst2012]_ .
Parameters
----------
length: int
how long the drawn sample will be
phi: float, list or numpy.ndarray, shape (p, )
AR-coefficients
theta: float, list or numpy.ndarray
MA-coefficients
std: float
std of the gaussian white noise that is feeded into the ARMA-model
Returns
-------
e: numpy.ndarray, shape (length, )
time-series of the predefined ARMA-process
References
----------
* Eichstädt, Link, Harris and Elster [Eichst2012]_
"""
# convert to numpy.ndarray
if isinstance(phi, float):
phi = np.array([phi])
elif isinstance(phi, list):
phi = np.array(phi)
if isinstance(theta, float):
theta = np.array([theta])
elif isinstance(theta, list):
theta = np.array(theta)
# initialize e, w
w = np.random.normal(loc=0, scale=std, size=length)
e = np.zeros_like(w)
# define shortcuts
p = len(phi)
q = len(theta)
# iterate series over time
for n, wn in enumerate(w):
min_pn = min(p, n)
min_qn = min(q, n)
e[n] = np.sum(phi[:min_pn].dot(e[n-min_pn:n])) + np.sum(theta[:min_qn].dot(w[n-min_qn:n])) + wn
return e
|
eichstaedtPTB/PyDynamic
|
PyDynamic/misc/noise.py
|
Python
|
lgpl-3.0
| 1,580
|
[
"Gaussian"
] |
d6f39eb3cc6815337db9b2a136a820f6841eb0f22fd3dbf4e957d783adcd4895
|
"""
Provide a library of processing functions.
The processing functions declare their item sources and input parameters along with a identifier, title, and
what UI sections they are likely to appear in.
The processing functions also declare how they are applied to sequences/collections and what form
their output takes (scalar or not).
"""
from __future__ import annotations
# standard libraries
import functools
import gettext
import typing
# third party libraries
import numpy
import scipy.signal
# local libraries
from nion.data import DataAndMetadata
from nion.data import xdata_1_0 as xd
from nion.swift.model import DataItem
from nion.swift.model import Symbolic
from nion.utils import Registry
if typing.TYPE_CHECKING:
from nion.swift import Facade
PersistentDictType = typing.Dict[str, typing.Any]
_ImageDataType = DataAndMetadata._ImageDataType
_ProcessingResult = typing.Union[DataAndMetadata.DataAndMetadata, DataAndMetadata.ScalarAndMetadata, None]
_ = gettext.gettext
class ProcessingComputation:
def __init__(self, processing_component: ProcessingBase, computation: Facade.Computation, **kwargs: typing.Any) -> None:
self.computation = computation
self.processing_component = processing_component
self.__data: typing.Optional[_ImageDataType] = None
self.__xdata: typing.Optional[_ProcessingResult] = None
def execute(self, **kwargs: typing.Any) -> None:
# let the processing component do the processing and store result in the xdata field.
# TODO: handle multiple sources (broadcasting)
is_mapped = self.processing_component.is_scalar or kwargs.get("mapping", "none") != "none"
if is_mapped and len(self.processing_component.sources) == 1 and kwargs[self.processing_component.sources[0]["name"]].xdata.is_collection:
src_name = self.processing_component.sources[0]["name"]
data_source = typing.cast("Facade.DataSource", kwargs[src_name])
xdata = data_source.xdata
assert xdata
self.__xdata = None
indexes = numpy.ndindex(xdata.navigation_dimension_shape) # type: ignore
for index in indexes:
index_data_source = DataItem.DataSource(data_source._display_data_channel, data_source.graphic._graphic if data_source.graphic else None, xdata[index])
index_kw_args = {next(iter(kwargs.keys())): index_data_source}
for k, v in list(kwargs.items())[1:]:
index_kw_args[k] = v
processed_data = self.processing_component.process(**index_kw_args)
if isinstance(processed_data, DataAndMetadata.DataAndMetadata):
# handle array data
index_xdata = processed_data
if self.__xdata is None:
self.__data = numpy.empty(xdata.navigation_dimension_shape + index_xdata.datum_dimension_shape, dtype=index_xdata.data_dtype)
self.__xdata = DataAndMetadata.new_data_and_metadata(
self.__data, index_xdata.intensity_calibration,
tuple(xdata.navigation_dimensional_calibrations) + tuple(index_xdata.datum_dimensional_calibrations),
None, None, DataAndMetadata.DataDescriptor(xdata.is_sequence, xdata.collection_dimension_count, index_xdata.datum_dimension_count))
if self.__data is not None:
self.__data[index] = index_xdata.data
elif isinstance(processed_data, DataAndMetadata.ScalarAndMetadata):
# handle scalar data
index_scalar = processed_data
if self.__xdata is None:
self.__data = numpy.empty(xdata.navigation_dimension_shape, dtype=type(index_scalar.value))
self.__xdata = DataAndMetadata.new_data_and_metadata(
self.__data, index_scalar.calibration,
tuple(xdata.navigation_dimensional_calibrations),
None, None, DataAndMetadata.DataDescriptor(xdata.is_sequence, 0, xdata.collection_dimension_count))
if self.__data is not None:
self.__data[index] = index_scalar.value
elif not self.processing_component.is_scalar:
self.__xdata = self.processing_component.process(**kwargs)
def commit(self) -> None:
# store the xdata into the target. this is guaranteed to run on the main thread.
if self.__xdata:
self.computation.set_referenced_xdata("target", typing.cast(DataAndMetadata.DataAndMetadata, self.__xdata))
class ProcessingBase:
def __init__(self) -> None:
self.processing_id = str()
self.title = str()
self.sections: typing.Set[str] = set()
self.sources: typing.List[PersistentDictType] = list()
self.parameters: typing.List[PersistentDictType] = list()
self.attributes: PersistentDictType = dict()
self.is_mappable = False
self.is_scalar = False
def register_computation(self) -> None:
Symbolic.register_computation_type(self.processing_id, functools.partial(ProcessingComputation, self))
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult: ...
class ProcessingFFT(ProcessingBase):
def __init__(self, **kwargs: typing.Any) -> None:
super().__init__()
self.processing_id = "fft"
self.title = _("FFT")
self.sections = {"fourier"}
self.sources = [
{"name": "src", "label": _("Source"), "croppable": True, "requirements": [{"type": "datum_rank", "values": (1, 2)}]},
]
self.is_mappable = True
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult:
cropped_display_xdata = src.cropped_display_xdata
return xd.fft(cropped_display_xdata) if cropped_display_xdata else None
class ProcessingIFFT(ProcessingBase):
def __init__(self, **kwargs: typing.Any) -> None:
super().__init__()
self.processing_id = "inverse_fft"
self.title = _("Inverse FFT")
self.sections = {"fourier"}
self.sources = [
{"name": "src", "label": _("Source"), "use_display_data": False, "requirements": [{"type": "datum_rank", "values": (1, 2)}]},
]
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult:
src_xdata = src.xdata
return xd.ifft(src_xdata) if src_xdata else None
class ProcessingGaussianWindow(ProcessingBase):
def __init__(self, **kwargs: typing.Any) -> None:
super().__init__()
self.processing_id = "gaussian_window"
self.title = _("Gaussian Window")
self.sections = {"windows"}
self.sources = [
{
"name": "src",
"label": _("Source"),
"croppable": True,
"requirements": [
{"type": "datum_rank", "values": (1, 2)},
{"type": "datum_calibrations", "units": "equal"},
]
},
]
self.parameters = [
{"name": "sigma", "type": "real", "value": 1.0}
]
self.is_mappable = True
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult:
sigma = kwargs.get("sigma", 1.0)
src_xdata = src.xdata
if src_xdata and src_xdata.datum_dimension_count == 1:
w = src_xdata.datum_dimension_shape[0]
return src_xdata * scipy.signal.gaussian(src_xdata.datum_dimension_shape[0], std=w / 2) # type: ignore
elif src_xdata and src_xdata.datum_dimension_count == 2:
# uses circularly rotated approach of generating 2D filter from 1D
h, w = src_xdata.datum_dimension_shape
y, x = numpy.meshgrid(numpy.linspace(-h / 2, h / 2, h), numpy.linspace(-w / 2, w / 2, w)) # type: ignore
s = 1 / (min(w, h) * sigma)
r = numpy.sqrt(y * y + x * x) * s
return src_xdata * numpy.exp(-0.5 * r * r) # type: ignore
return None
class ProcessingHammingWindow(ProcessingBase):
def __init__(self, **kwargs: typing.Any) -> None:
super().__init__()
self.processing_id = "hamming_window"
self.title = _("Hamming Window")
self.sections = {"windows"}
self.sources = [
{"name": "src", "label": _("Source"), "croppable": True, "requirements": [{"type": "datum_rank", "values": (1, 2)}]},
]
self.is_mappable = True
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult:
src_xdata = src.xdata
if src_xdata and src_xdata.datum_dimension_count == 1:
return src_xdata * scipy.signal.hamming(src_xdata.datum_dimension_shape[0]) # type: ignore
elif src_xdata and src_xdata.datum_dimension_count == 2:
# uses outer product approach of generating 2D filter from 1D
h, w = src_xdata.datum_dimension_shape
w0 = numpy.reshape(scipy.signal.hamming(w), (1, w)) # type: ignore
w1 = numpy.reshape(scipy.signal.hamming(h), (h, 1)) # type: ignore
return src_xdata * w0 * w1
return None
class ProcessingHannWindow(ProcessingBase):
def __init__(self, **kwargs: typing.Any) -> None:
super().__init__()
self.processing_id = "hann_window"
self.title = _("Hann Window")
self.sections = {"windows"}
self.sources = [
{"name": "src", "label": _("Source"), "croppable": True, "requirements": [{"type": "datum_rank", "values": (1, 2)}]},
]
self.is_mappable = True
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult:
src_xdata = src.xdata
if src_xdata and src_xdata.datum_dimension_count == 1:
return src_xdata * scipy.signal.hann(src_xdata.datum_dimension_shape[0]) # type: ignore
elif src_xdata and src_xdata.datum_dimension_count == 2:
# uses outer product approach of generating 2D filter from 1D
h, w = src_xdata.datum_dimension_shape
w0 = numpy.reshape(scipy.signal.hann(w), (1, w)) # type: ignore
w1 = numpy.reshape(scipy.signal.hann(h), (h, 1)) # type: ignore
return src_xdata * w0 * w1
return None
class ProcessingMappedSum(ProcessingBase):
def __init__(self, **kwargs: typing.Any) -> None:
super().__init__()
self.processing_id = "mapped_sum"
self.title = _("Mapped Sum")
self.sections = {"scalar-maps"}
self.sources = [
{"name": "src", "label": _("Source"), "croppable": True, "requirements": [{"type": "datum_rank", "values": (1, 2)}]},
]
self.is_mappable = True
self.is_scalar = True
self.attributes["connection_type"] = "map"
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult:
filtered_xdata = src.filtered_xdata
if filtered_xdata:
return DataAndMetadata.ScalarAndMetadata.from_value(numpy.sum(filtered_xdata), filtered_xdata.intensity_calibration) # type: ignore
return None
class ProcessingMappedAverage(ProcessingBase):
def __init__(self, **kwargs: typing.Any) -> None:
super().__init__()
self.processing_id = "mapped_average"
self.title = _("Mapped Average")
self.sections = {"scalar-maps"}
self.sources = [
{"name": "src", "label": _("Source"), "croppable": True, "requirements": [{"type": "datum_rank", "values": (1, 2)}]},
]
self.is_mappable = True
self.is_scalar = True
self.attributes["connection_type"] = "map"
def process(self, *, src: DataItem.DataSource, **kwargs: typing.Any) -> _ProcessingResult:
filtered_xdata = src.filtered_xdata
if filtered_xdata:
return DataAndMetadata.ScalarAndMetadata.from_value(numpy.average(filtered_xdata), filtered_xdata.intensity_calibration) # type: ignore
return None
# Registry.register_component(ProcessingFFT(), {"processing-component"})
# Registry.register_component(ProcessingIFFT(), {"processing-component"})
Registry.register_component(ProcessingGaussianWindow(), {"processing-component"})
Registry.register_component(ProcessingHammingWindow(), {"processing-component"})
Registry.register_component(ProcessingHannWindow(), {"processing-component"})
Registry.register_component(ProcessingMappedSum(), {"processing-component"})
Registry.register_component(ProcessingMappedAverage(), {"processing-component"})
def init() -> None: pass
|
nion-software/nionswift
|
nion/swift/model/Processing.py
|
Python
|
gpl-3.0
| 12,839
|
[
"Gaussian"
] |
889b409e246ac923f08b6252f95effee904a9d96f4d8e9603401afa2d0769929
|
import os
import sys
import json
from datamanagerpkg import ProtonCommunication_data_manager
from datamanagerpkg import GalaxyCommunication_data_manager
from sequencer.models import Experiments, GalaxyUsers
from sequencer.models import GalaxyJobs, ExperimentRawData
from sequencer.models import UserCommonJobs,Supportedfiles
from sequencer.models import Workflows,WorkflowsTools
##########################
#URL SEQUENCER
##########################
from GlobalVariables import sequencer_base_url
from GlobalVariables import sequencer_user
from GlobalVariables import sequencer_password
from GlobalVariables import sequencer_severName
from GlobalVariables import sequencer_ExperimentLimit
from GlobalVariables import toolsInformation
##########################
#URL GALAXY
##########################
from GlobalVariables import galaxy_base_url
from GlobalVariables import apiKey
##########################
#NAs DIr folder
##########################
from GlobalVariables import nasInput
from GlobalVariables import CNVfolderName
from GlobalVariables import plasmaFolderName
from GlobalVariables import nasResults
from GlobalVariables import workflowPath
##########################
#SMTP folder
##########################
from GlobalVariables import smtpServerAphp
from GlobalVariables import smtpPortServer
from GlobalVariables import fromAddrOfficial
from sequencer.views import getDataPath
from pprint import pprint
import json
def uploadAWorkflowToDatabase(pathToWorkflow):
with open(pathToWorkflow) as data_file:
data = json.load(data_file)
pprint(data)
#now I have the key in order
stepkey=data['steps'].keys()
stepkey = [int(x) for x in stepkey]
stepkey.sort()
#create a workflow object
#~ u'annotation': u'plasma workflow to generates all the data',u'name': u'Plasma_mutation',
tryexp = None
try:
tryexp = Workflows.objects.get(name=str(data['name']))
except Workflows.DoesNotExist:
tryexp = None
if (tryexp == None):
workflow_local=Workflows(name=str(data['name']),description=str(data['name']))
workflow_local.save()
workflow_local = Workflows.objects.get(name=str(data['name']))
for step in stepkey:
if data['steps'][str(step)]['tool_id']!=None:
#create a tool
print("find a Tool to add, try to add this new tool to the database")
print(str(data['steps'][str(step)]['tool_id']))
try:
tryexp = WorkflowsTools.objects.get(primary_name=str(data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json"))
except WorkflowsTools.DoesNotExist:
tryexp = None
#~ if tryexp == None:
print("tool found was not added to the DB. We Add now this new tool")
newtool=WorkflowsTools(primary_name=str(data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json"),
name=str(data['steps'][str(step)]['tool_id']),
version=str(data['steps'][str(step)]['tool_version']))
newtool.save()
print("Add the tool definition to the Workflow and link it to the current workflow.")
workflow_local.tools_list.add(newtool)
workflow_local.save()
print("Name of the json file where the tool is define:" +data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json")
#create a tool
with open(toolsInformation+data['steps'][str(step)]['tool_id']+"_"+data['steps'][str(step)]['tool_version']+".json") as data_file_tool:
tool = json.load(data_file_tool)
#~ print(tool['function'][0])
print("#######################input")
#~ print(tool['function'][0]['input'])
for dataInput in tool['function'][0]['input'] :
try:
tryexp = Supportedfiles.objects.get(dataDescription=str(dataInput['dataDescription']))
except Supportedfiles.DoesNotExist:
tryexp = None
newfile=Supportedfiles(dataHandle=str(dataInput['dataHandle']),dataDescription=str(dataInput['dataDescription']),dataFormatEdamOntology=str(dataInput['format'][0]['uri']))
newfile.save()
newtool.inputlist.add(newfile)
newtool.save()
#~ print("#######################dataInpty")
print("#######################output")
for dataInput in tool['function'][0]['input'] :
try:
tryexp = Supportedfiles.objects.get(dataDescription=str(dataInput['dataDescription']))
except Supportedfiles.DoesNotExist:
tryexp = None
#~ if tryexp == None:
newfile=Supportedfiles(dataHandle=str(dataInput['dataHandle']),dataDescription=str(dataInput['dataDescription']),dataFormatEdamOntology=str(dataInput['format'][0]['uri']) )
newfile.save()
newtool.outputlist.add(newfile)
newtool.save()
def AddaWorkflowTool(this_tool):
try:
tryexp = WorkflowsTools.objects.get(primary_name=str(this_tool[0]['id']+"_"+this_tool[0]['version']+".json"))
except WorkflowsTools.DoesNotExist:
tryexp = None
print("tool found was not added to the DB. We Add now this new tool")
newtool=WorkflowsTools(primary_name=str(this_tool[0]['id']+"_"+this_tool[0]['version']+".json"),
name=str(this_tool[0]['id']),
version=str(this_tool[0]['version']))
newtool.save()
print("Add the tool definition to the Workflow and link it to the current workflow.")
print("Name of the json file where the tool is define:" +str(this_tool[0]['id']+"_"+this_tool[0]['version']+".json"))
#create a tool
with open(toolsInformation+str(this_tool[0]['id']+"_"+this_tool[0]['version']+".json")) as data_file_tool:
tool = json.load(data_file_tool)
print("#######################input")
for dataInput in tool['function'][0]['input'] :
try:
tryexp = Supportedfiles.objects.get(dataDescription=str(dataInput['dataDescription']))
except Supportedfiles.DoesNotExist:
tryexp = None
newfile=Supportedfiles(dataHandle=str(dataInput['dataHandle']),dataDescription=str(dataInput['dataDescription']),dataFormatEdamOntology=str(dataInput['format'][0]['uri']))
newfile.save()
newtool.inputlist.add(newfile)
newtool.save()
#~ print("#######################dataInpty")
print("#######################output")
for dataInput in tool['function'][0]['input'] :
try:
tryexp = Supportedfiles.objects.get(dataDescription=str(dataInput['dataDescription']))
except Supportedfiles.DoesNotExist:
tryexp = None
newfile=Supportedfiles(dataHandle=str(dataInput['dataHandle']),dataDescription=str(dataInput['dataDescription']),dataFormatEdamOntology=str(dataInput['format'][0]['uri']) )
newfile.save()
newtool.outputlist.add(newfile)
newtool.save()
if __name__ == "__main__":
print("#######################")
print("#######################")
pathTosamtools='/nas_Dir/workflow/Galaxy-Workflow-demo_samtools.ga'
print("Upload a specific workflow to the database : demo_samtools")
uploadAWorkflowToDatabase(pathTosamtools)
print("#######################")
print("#######################")
pathToWorkflow='/nas_Dir/workflow/Galaxy-Workflow-Plasma_mutation.ga'
print("Upload a specific workflow to the database : Plasma_mutation")
uploadAWorkflowToDatabase(pathToWorkflow)
print("JOB DONE")
|
CARPEM/GalaxyDocker
|
data-manager-hegp/analysisManager/analysismanager/Add_Workflows.py
|
Python
|
mit
| 8,118
|
[
"Galaxy"
] |
0ea9ef275d483736841f365aff1ed528c1cf8178c9e44128acb2aace40abfa5c
|
import mdtraj as md
import tempfile
import logging
from .utils import getoutput
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG, format="LOG: %(message)s")
# http://ambermd.org/tutorials/advanced/tutorial15/Tutorial2.xhtml
# Run tLEaP with input file:
# $ tleap -f commands.in
TLEAP_TEMPLATE = """
source leaprc.gaff
%(mol2_section)s
box = loadPdb %(box_filename)s
%(amberparams_section)s
setbox box centers
saveAmberParm box %(prmtop_filename)s %(inpcrd_filename)s
quit
"""
#loadmol2_section will look something like this:
#BMI = loadmol2 bmi.mol2
#BF4 = loadmol2 bf4.mol2
#ACN = loadmol2 acn.mol2
#loadamberparams_section looks like this:
#loadamberparams frcmod.bf4
#loadamberparams frcmod.bmi
#loadamberparams frcmod.acn
def build_mixture_prmtop(mol2_filenames, frcmod_filenames, box_filename, prmtop_filename, inpcrd_filename):
"""Create a prmtop and inpcrd from a collection of mol2 and frcmod files
as well as a single box PDB. We have used this for setting up
simulations of neat liquids or binary mixtures.
Parameters
----------
mol2_filenames : list(str)
Filenames of GAFF flavored mol2 files. Each must contain exactly
ONE ligand.
frcmod_filenames : str
Filename of input GAFF frcmod filenames.
box_filename : str
Filename of PDB containing an arbitrary box of the mol2 molecules.
prmtop_filename : str
output prmtop filename. Should have suffix .prmtop
inpcrd_filename : str
output inpcrd filename. Should have suffix .inpcrd
Returns
-------
tleap_commands : str
The string of commands piped to tleap for building the prmtop
and inpcrd files. This will *already* have been run, but the
output can be useful for debugging or archival purposes.
Notes
-----
This can be easily broken if there are missing, duplicated, or
inconsistent ligand residue names in your box, mol2, and frcmod files.
You can use mdtraj to edit the residue names with something like
this: trj.top.residue(0).name = "L1"
"""
# Check for one residue name per mol2 file and uniqueness between all mol2 files
all_names = set()
for filename in mol2_filenames:
t = md.load(filename)
names = set([r.name for r in t.top.residues])
if len(names) != 1:
raise(ValueError("Must have a SINGLE residue name in each mol2 file."))
all_names = all_names.union(list(names))
if len(all_names) != len(mol2_filenames):
raise(ValueError("Must have UNIQUE residue names in each mol2 file."))
all_names = list(all_names)
mol2_section = "\n".join("%s = loadmol2 %s" % (all_names[k], filename) for k, filename in enumerate(mol2_filenames))
amberparams_section = "\n".join("loadamberparams %s" % (filename) for k, filename in enumerate(frcmod_filenames))
tleap_commands = TLEAP_TEMPLATE % dict(mol2_section=mol2_section, amberparams_section=amberparams_section, box_filename=box_filename, prmtop_filename=prmtop_filename, inpcrd_filename=inpcrd_filename)
print(tleap_commands)
file_handle = tempfile.NamedTemporaryFile('w') # FYI Py3K defaults to 'wb' mode, which won't work here.
file_handle.writelines(tleap_commands)
file_handle.flush()
cmd = "tleap -f %s " % file_handle.name
logger.debug(cmd)
output = getoutput(cmd)
logger.debug(output)
file_handle.close()
return tleap_commands
|
kyleabeauchamp/openmoltools
|
openmoltools/amber.py
|
Python
|
gpl-2.0
| 3,518
|
[
"MDTraj"
] |
1ca766adcabd3348e8e6eaa9c6d6aec271030a13417ae222f89c7d4556f7e2e3
|
#!/usr/bin/python3
#
# Polychromatic is licensed under the GPLv3.
# Copyright (C) 2017-2021 Luke Horwell <code@horwell.me>
#
"""
This module abstracts data from the OpenRazer Python library (and daemon)
and parses this for Polychromatic to use.
Project URL: https://github.com/openrazer/openrazer
"""
import os
# Imported on demand:
# import requests _get_device_image() for retrieving device image URLs
from . import _backend
from .. import fx
from openrazer import client as rclient
class Backend(_backend.Backend):
"""
Bindings for the OpenRazer 2.x Python library.
"""
def __init__(self, dbg, common, _):
super().__init__(dbg, common, _)
self.backend_id = "openrazer"
self.logo = "openrazer.svg"
self.version = rclient.__version__
self.project_url = "https://openrazer.github.io"
self.bug_url = "https://github.com/openrazer/openrazer/issues"
self.releases_url = "https://github.com/openrazer/openrazer/releases"
self.license = "GPLv2"
self.config_store = self._get_config_store_path()
# Variables for OpenRazer
self.devman = None
self.devices = None
# Client Settings
self.allow_image_download = True
self.ripple_refresh_rate = 0.05
self.load_client_overrides()
def load_client_overrides(self):
"""
Load any user-defined client settings that Polychromatic should use
interfacing with the daemon. These are stored as individual files inside
the ~/.config/polychromatic/backends/openrazer/ directory.
"""
def _load_override(filename, data_type, default):
path = os.path.join(self.config_store, filename)
if not os.path.exists(path):
return default
with open(path, "r") as f:
data = str(f.readline()).strip()
try:
output = data_type(data)
self.debug("Setting client setting: {0} to {1}".format(filename, output))
return output
except ValueError:
return default
self.allow_image_download = True if _load_override("allow_image_download", int, 1) == 1 else False
self.ripple_refresh_rate = _load_override("ripple_refresh_rate", float, 0.05)
def _reinit_device_manager(self, force_refresh=False):
"""
OpenRazer uses a "Device Manager" containing devices connected. It only
needs to be refreshed when devices are connected/disconnected.
The device manager will be 'cached' for the duration of the session.
"""
if not self.devman or force_refresh:
try:
self.debug("Initialising Device Manager...")
self.devman = rclient.DeviceManager()
self.devman.sync_effects = False
self.devices = self.devman.devices
return True
except Exception as e:
return self.common.get_exception_as_string(e)
return True
def get_device_list(self):
"""
See _backend.get_device_list()
"""
devices = []
uid = -1
if not self.devices:
success = self._reinit_device_manager(force_refresh=True)
if success != True:
return success
for rdevice in self.devices:
uid += 1
devices.append({
"backend": self.backend_id,
"uid": uid,
"name": rdevice.name,
"serial": str(rdevice.serial),
"form_factor": self._get_form_factor(rdevice),
"real_image": self._get_device_image(rdevice),
"zones": self._get_supported_zones(rdevice)
})
return devices
def get_unsupported_devices(self):
"""
See _backend.get_unsupported_devices()
Connected Razer hardware not bound to the daemon likely means the driver/daemon
isn't set up correctly or the hardware isn't supported yet.
"""
devices = []
unknown_list = self._get_unmanaged_razer_usb_pids()
form_factor = self.common.get_form_factor(self._, "unrecognised")
if not unknown_list:
return []
for pid in unknown_list:
# Ignore Kitty headphones duplicate. 1532:0521 for headset, 1532:0F19 for Chroma (#328)
if pid == "0521":
continue
devices.append({
"backend": self.backend_id,
"name": "{0}:{1}".format("1532", pid),
"form_factor": form_factor,
})
return devices
def get_device(self, uid):
"""
See _backend.get_device()
"""
try:
success = self._reinit_device_manager()
if success != True:
return success
rdevice = self.devman.devices[uid]
except IndexError:
return None
except Exception as e:
return self.common.get_exception_as_string(e)
form_factor = self._get_form_factor(rdevice)
real_image = self._get_device_image(rdevice)
_vid_pid = self._get_device_vid_pid(rdevice)
vid = _vid_pid.get("pid")
pid = _vid_pid.get("vid")
# Determine device variables
firmware_version = None
keyboard_layout = None
monochromatic = self._is_device_monochromatic(rdevice)
macros = False # Supports key rebinding
game_mode = None # Keyboards only
matrix = False # Supports custom effects (per-key lighting)
battery_charging = False
battery_level = None
matrix_rows = None
matrix_cols = None
dpi_x = None
dpi_y = None
dpi_min = None
dpi_max = None
dpi_stages = []
poll_rate = None
# Retrieve device variables
if rdevice.has("name"):
name = rdevice.name
else:
self.debug("Device {0} doesn't have a name!".format(uid))
name = "Device " + str(uid)
if rdevice.has("serial"):
serial = str(rdevice.serial)
if not type(serial) == str or len(serial) <= 2:
self.debug("Got bad serial for {0}!".format(name))
serial = "0"
else:
self.debug("Device {0} doesn't have a valid serial!".format(uid))
name = "invalid_device_" + str(uid)
if rdevice.has("firmware_version"):
firmware_version = rdevice.firmware_version
if rdevice.has("keyboard_layout"):
keyboard_layout = rdevice.keyboard_layout
if rdevice.has("lighting_led_matrix"):
matrix = True
matrix_rows = rdevice.fx.advanced.rows
matrix_cols = rdevice.fx.advanced.cols
if rdevice.name == "Razer DeathStalker Chroma":
# See DeathStalkerChromaFX() in get_device_object()
matrix_cols = 6
if rdevice.has("dpi"):
dpi_x = rdevice.dpi[0]
dpi_y = rdevice.dpi[1]
dpi_min = 100
dpi_max = rdevice.max_dpi
default_stages = {
16000: [800, 1800, 4500, 9000, 16000],
8200: [800, 1800, 4800, 6400, 8200]
}
# Generate the DPI stages if they are not known
try:
dpi_stages = default_stages[dpi_max]
except KeyError:
dpi_stages = [
int(dpi_max / 10),
int(dpi_max / 8),
int(dpi_max / 4),
int(dpi_max / 2),
int(dpi_max)
]
if rdevice.has("poll_rate"):
poll_rate = rdevice.poll_rate
if rdevice.has("battery"):
battery_level = rdevice.battery_level
battery_charging = rdevice.is_charging
# Build an index of zones, parameters and what's currently set.
_zones = self._get_supported_zones(rdevice)
zone_labels, zone_icons = self._get_zone_label_and_icons(_zones, name, self._get_form_factor(rdevice))
zone_options = {}
def _device_has_zone_capability(capability):
return self._device_has_zone_capability(rdevice, zone, capability)
for zone in _zones:
options = []
rzone = self._get_zone_as_object(rdevice, zone)
# Brightness - toggle or slider?
brightness_parent, brightness_type = self._get_device_brightness(rdevice, zone)
if brightness_type == int:
options.append({
"id": "brightness",
"label": self._("Brightness"),
"type": "slider",
"value": round(brightness_parent.brightness),
"min": 0,
"max": 100,
"step": 5,
"suffix": "%",
"colours": [] # n/a
})
elif brightness_type == bool:
options.append({
"id": "brightness",
"label": self._("Brightness"),
"type": "toggle",
"active": True if brightness_parent.active else False,
"colours": [] # n/a
})
# Hardware Effects
current_state = self._read_persistence_storage(rdevice, zone)
effect_labels = {
"none": self._("None"),
"spectrum": self._("Spectrum"),
"wave": self._("Wave"),
"reactive": self._("Reactive"),
"breath": self._("Breath"),
"ripple": self._("Ripple"),
"starlight": self._("Starlight"),
"pulsate": self._("Pulsate"),
"blinking": self._("Blink"),
"static": self._("Static")
}
for effect in ["none", "spectrum", "wave", "reactive", "ripple", "static", "pulsate", "blinking"]:
if _device_has_zone_capability(effect):
effect_option = {
"id": effect,
"type": "effect",
"parameters": [],
"colours": [],
"active": current_state["effect"].startswith(effect)
}
try:
effect_option["label"] = effect_labels[effect]
except KeyError:
self.debug("Unknown effect: " + effect)
effect_option["label"] = self._("Unknown")
# Add parameters and determine what is in use
if effect == "wave":
# Change label IDs depending on orientation.
direction_1_data = "right"
direction_1_label = self._("Right")
direction_2_data = "left"
direction_2_label = self._("Left")
if rdevice.type == "mouse":
direction_1_data = "up"
direction_1_label = self._("Up")
direction_2_data = "down"
direction_2_label = self._("Down")
elif rdevice.type == "mousemat":
direction_1_data = "anticlock"
direction_1_label = self._("Clockwise")
direction_2_data = "clock"
direction_2_label = self._("Anti-clockwise")
effect_option["parameters"] = [
{
"id": direction_2_data,
"label": direction_2_label,
"data": 2,
"active": current_state["wave_dir"] == 2,
"colours": []
},
{
"id": direction_1_data,
"label": direction_1_label,
"data": 1,
"active": current_state["wave_dir"] == 1,
"colours": []
}
]
elif effect == "ripple":
if _device_has_zone_capability("ripple_random"):
effect_option["parameters"].append({
"id": "random",
"label": self._("Random"),
"data": "random",
"active": current_state["effect"] == "rippleRandomColour",
"colours": []
})
if _device_has_zone_capability("ripple"):
effect_option["parameters"].append({
"id": "single",
"label": self._("Single"),
"data": "single",
"active": current_state["effect"] == "ripple",
"colours": [current_state["colour_1"]]
})
elif effect == "reactive":
effect_option["parameters"] = [
{
"id": "fast",
"label": self._("Fast"),
"data": 1,
"active": current_state["speed"] == 1,
"colours": [current_state["colour_1"]]
},
{
"id": "medium",
"label": self._("Medium"),
"data": 2,
"active": current_state["speed"] == 2,
"colours": [current_state["colour_1"]]
},
{
"id": "slow",
"label": self._("Slow"),
"data": 3,
"active": current_state["speed"] == 3,
"colours": [current_state["colour_1"]]
},
{
"id": "vslow",
"label": self._("Very Slow"),
"data": 4,
"active": current_state["speed"] == 4,
"colours": [current_state["colour_1"]]
}
]
elif effect in ["static", "pulsate", "blinking"]:
effect_option["colours"] = [current_state["colour_1"]]
effect_option["active"] = True if current_state["effect"].startswith(effect) else False
options.append(effect_option)
# There isn't a single 'lighting_breath' and 'lighting_starlight' in the capabilities list
# -- Breath has up to 4 parameters.
if True in [_device_has_zone_capability("breath_random"),
_device_has_zone_capability("breath_single"),
_device_has_zone_capability("breath_dual"),
_device_has_zone_capability("breath_triple")]:
effect_option = {
"id": "breath",
"label": effect_labels["breath"],
"type": "effect",
"parameters": [],
"colours": [],
"active": current_state["effect"].startswith("breath")
}
param_labels = {
"random": self._("Random"),
"single": self._("Single"),
"dual": self._("Dual"),
"triple": self._("Triple")
}
for _colour_count, param in enumerate(["random", "single", "dual", "triple"]):
if _device_has_zone_capability("breath" + "_" + param):
_colour_list = []
for c in range(1, _colour_count + 1):
_colour_list.append(current_state["colour_" + str(c)])
param_key = {
"id": param,
"label": param_labels[param],
"data": param,
"active": current_state["effect"].endswith(param.capitalize()),
"colours": _colour_list
}
effect_option["parameters"].append(param_key)
options.append(effect_option)
# -- Starlight has up to 3 parameters, plus 3 speeds each.
if True in [_device_has_zone_capability("starlight_random"),
_device_has_zone_capability("starlight_single"),
_device_has_zone_capability("starlight_dual")]:
effect_option = {
"id": "starlight",
"label": effect_labels["starlight"],
"type": "effect",
"parameters": [],
"colours": [],
"active": current_state["effect"].startswith("starlight")
}
param_labels = {
"random": self._("Random"),
"single": self._("Single"),
"dual": self._("Dual")
}
param_speeds = {
"fast": self._("Fast"),
"medium": self._("Medium"),
"slow": self._("Slow"),
}
for _colour_count, param in enumerate(["random", "single", "dual"]):
for speed_no, speed in enumerate(["fast", "medium", "slow"]):
if _device_has_zone_capability("starlight" + "_" + param):
_colour_list = []
for c in range(1, _colour_count + 1):
_colour_list.append(current_state["colour_" + str(c)])
param_key = {
"id": param,
"label": "{0} ({1})".format(param_labels[param], param_speeds[speed]),
"data": "{0}_{1}".format(param, speed),
"active": current_state["effect"].endswith(param.capitalize()) and int(current_state["speed"]) == speed_no + 1,
"colours": _colour_list
}
effect_option["parameters"].append(param_key)
options.append(effect_option)
# Finished building options list
zone_options[zone] = options
# DPI is generated by Polychromatic. See below for fixed DPI devices.
# Other hardware features
def _init_main_if_empty():
if "main" not in zone_options.keys():
zone_options["main"] = []
# -- Game Mode
if rdevice.has("game_mode_led"):
_init_main_if_empty()
zone_options["main"].append({
"id": "game_mode",
"label": self._("Game Mode"),
"type": "toggle",
"active": True if rdevice.game_mode_led else False,
"colours": [] # n/a
})
# -- Fixed DPI
if rdevice.has("available_dpi"):
# Prevent Polychromatic from creating a variable DPI control
dpi_x = None
# Create a combo box to select this device's 'hardcoded' DPI
params = []
for value in rdevice.available_dpi:
params.append({
"id": str(value),
"label": str(value),
"data": str(value),
"active": str(rdevice.dpi[0]) == str(value),
"colours": [] # n/a
})
zone_options["main"].append({
"id": "dpi",
"label": self._("DPI"),
"type": "multichoice",
"parameters": params,
"active": True, # Always a DPI value
"colours": [] # n/a
})
# -- Sync DPI to hardware
if rdevice.has("dpi_stages"):
_init_main_if_empty()
zone_options["main"].append({
"id": "sync_dpi_stages",
"label": self._("DPI Buttons"),
"type": "button",
"button_text": self._("Sync DPI to Hardware"),
"parameters": [],
"active": True,
"colours": [] # n/a
})
# -- Poll Rate
if rdevice.has("poll_rate"):
_init_main_if_empty()
params = []
# Poll rates are fixed
poll_rate_ranges = [125, 500, 1000]
ids = {
125: "poll_low",
500: "poll_mid",
1000: "poll_high"
}
labels = {
125: "125 Hz (~8 ms)",
500: "500 Hz (~2 ms)",
1000: "1000 Hz (~1 ms)"
}
for rate in poll_rate_ranges:
params.append({
"id": ids[rate],
"label": labels[rate],
"data": rate,
"active": poll_rate == rate,
"colours": [] # n/a
})
zone_options["main"].append({
"id": "poll_rate",
"label": self._("Poll Rate"),
"type": "multichoice",
"parameters": params,
"active": True, # Always a poll rate
"colours": [] # n/a
})
# Low power and sleep mode are not exposed individually, but should do when battery is present
if rdevice.has("battery"):
_init_main_if_empty()
# -- Sleep Mode (in seconds)
try:
current_idle_secs = rdevice.get_idle_time()
current_idle_mins = current_idle_secs / 60
zone_options["main"].append({
"id": "idle_time",
"label": self._("Sleep mode after"),
"type": "slider",
"value": int(current_idle_mins),
"min": 1,
"max": 15,
"step": 1,
# TODO: Needs plural support
"suffix": " " + self._("minute(s)"),
"colours": [] # n/a
})
except Exception as e:
self.debug("Could not read get_idle_time. Ignoring.")
self.debug(str(e))
# -- Low Power Mode (by percent)
try:
current_low_power = rdevice.get_low_battery_threshold()
zone_options["main"].append({
"id": "low_battery_threshold",
"label": self._("Low Power Mode"),
"type": "slider",
"value": int(current_low_power),
"min": 1,
"max": 100,
"step": 1,
"suffix": "%",
"colours": [] # n/a
})
except Exception as e:
self.debug("Could not read get_low_battery_threshold. Ignoring.")
self.debug(str(e))
# -- Macros Info
if rdevice.has("macro_mode_led_effect") and rdevice.type == "keyboard":
_init_main_if_empty()
zone_options["main"].append({
"id": "macro_info",
"label": self._("Macros"),
"type": "dialog",
"button_text": self._("About Macro Recording"),
"message": self._("The OpenRazer daemon provides a simple on-the-fly macro recording feature. To use:\n\n" + \
"1. Press FN+[M] to enter macro mode.\n" + \
"2. Press the macro key to assign to. Only M1-M5 are supported.\n" + \
"3. Press the keys in sequence to record.\n" + \
"4. Press FN+[M] to exit macro mode.\n\n" + \
"Macros are retained in memory until the daemon is stopped. The replay speed will be instantaneous.\n\n" + \
"This is not a Polychromatic feature and could disappear in future. This application intends to integrate a key rebinding feature in a future version."),
"colours": [] # n/a
})
# -- Key Mapping Info
if rdevice.type in ["keyboard", "keypad"]:
_init_main_if_empty()
zone_options["main"].append({
"id": "key_info",
"label": self._("Key Mapping"),
"type": "dialog",
"button_text": self._("About Key Mapping"),
"message": self._("Currently, OpenRazer and Polychromatic do not support a convenient key rebinding feature. " + \
"Polychromatic intends to integrate a key mapping solution in a future version.\n\n" + \
"In the meantime, there are third party projects which provide key remapping agnostic to any input device.\n\nFor more information, visit:\n" + \
"https://polychromatic.app/permalink/keymapping/"),
"colours": [] # n/a
})
# Prepare summary of device.
summary = []
_multiple_zones = len(_zones) > 1
# -- Gather current states for effects/brightness.
# -- If all zones are the same, show that status, otherwise state (Multiple)
# -- Not all statuses are shown at once since this can be crowded for some devices.
_effects = []
_effects_labels = {}
_brightness = []
for zone in zone_options:
for option in zone_options[zone]:
if option["type"] == "effect" and option["active"] == True:
_effects.append(option["id"])
_effects_labels[option["id"]] = option["label"]
if option["id"] == "brightness" and "value" in option.keys():
_brightness.append(option["value"])
if option["id"] == "brightness" and "active" in option.keys():
_brightness.append(option["active"])
def is_same(items):
return all(x == items[0] for x in items)
# -- Effects
if len(_effects) > 0:
if is_same(_effects):
summary.append({
"icon": self.common.get_icon("options", _effects[0]),
"label": _effects_labels[_effects[0]]
})
else:
summary.append({
"icon": self.common.get_icon("options", "static"),
"label": self._("(Multiple)")
})
# -- Brightness
if len(_brightness) > 0:
# Only show % suffix for integers
if not is_same(_brightness):
summary.append({
"icon": self.common.get_icon("options", "75"),
"label": self._("(Multiple)")
})
elif _brightness[0] == True:
summary.append({
"icon": self.common.get_icon("options", "100"),
"label": self._("On")
})
elif _brightness[0] in [False, 0]:
summary.append({
"icon": self.common.get_icon("options", "50"),
"label": self._("Off")
})
elif type(_brightness[0]) in [int, float]:
summary.append({
"icon": self.common.get_icon("options", "100"),
"label": "{0}%".format(_brightness[0])
})
# -- Game Mode
if game_mode:
summary.append({
"icon": self.common.get_icon("options", "game_mode"),
"label": self._("Game Mode Enabled")
})
# -- DPI
if dpi_x or dpi_y:
if dpi_x == dpi_y:
summary.append({
"icon": self.common.get_icon("general", "dpi"),
"label": "{0} DPI".format(dpi_x)
})
else:
summary.append({
"icon": self.common.get_icon("general", "dpi"),
"label": "{0}, {1} DPI".format(dpi_x, dpi_y)
})
# -- Poll Rate
if poll_rate:
summary.append({
"icon": self.common.get_icon("options", "poll_rate"),
"label": "{0} Hz".format(poll_rate)
})
# -- Battery Status
if battery_level:
if battery_charging:
icon = "battery-charging"
else:
if battery_level < 10:
icon = "battery-0"
elif battery_level < 30:
icon = "battery-25"
elif battery_level < 55:
icon = "battery-50"
elif battery_level < 90:
icon = "battery-75"
else:
icon = "battery-100"
summary.append({
"icon": self.common.get_icon("general", icon),
"label": "{0}%".format(battery_level)
})
return {
"backend": self.backend_id,
"uid": uid,
"name": name,
"form_factor": form_factor,
"real_image": real_image,
"serial": serial,
"monochromatic": monochromatic,
"vid": vid,
"pid": pid,
"firmware_version": firmware_version,
"keyboard_layout": keyboard_layout,
"summary": summary,
"dpi_x": dpi_x,
"dpi_y": dpi_y,
"dpi_stages": dpi_stages,
"dpi_min": dpi_min,
"dpi_max": dpi_max,
"matrix": matrix,
"matrix_rows": matrix_rows,
"matrix_cols": matrix_cols,
"zone_labels": zone_labels,
"zone_icons": zone_icons,
"zone_options": zone_options
}
def get_device_by_serial(self, serial):
"""
See _backend.get_device_by_serial()
"""
if not self.devices:
success = self._reinit_device_manager(force_refresh=True)
if success != True:
return success
for index, device in enumerate(self.devices):
if device.serial == serial:
return self.get_device(index)
def set_device_state(self, uid, zone, option_id, option_data, colours=[]):
"""
See _backend.set_device_state()
"""
try:
success = self._reinit_device_manager()
if success != True:
return success
rdevice = self.devman.devices[uid]
except IndexError:
return None
except Exception as e:
return self.common.get_exception_as_string(e)
# DPI may not associated with a zone (CLI only)
if not zone:
zone = "main"
rzone = self._get_zone_as_object(rdevice, zone)
# Hardware effects require up to 3 colours. Daemon uses RGB integers (0-255)
colour_hex = colours
colour_1 = [0, 255, 0]
colour_2 = [255, 0, 0]
colour_3 = [0, 0, 255]
if colours:
try:
if colours[0]:
colour_1 = self.common.hex_to_rgb(colours[0])
if colours[1]:
colour_2 = self.common.hex_to_rgb(colours[1])
if colours[2]:
colour_3 = self.common.hex_to_rgb(colours[2])
except IndexError:
# Expected, as not all colours may be needed. Use default.
pass
try:
# Brightness or active?
brightness_parent, brightness_type = self._get_device_brightness(rdevice, zone)
if option_id == "brightness":
# Slider value or CLI string
if brightness_type in [int, str]:
brightness_parent.brightness = int(option_data)
elif brightness_type == bool:
brightness_parent.active = option_data
# Effects and their parameters
elif option_id == "none":
rzone.none()
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "none")
elif option_id == "spectrum":
rzone.spectrum()
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "spectrum")
elif option_id == "wave":
# Params: <direction 1-2>
rzone.wave(int(option_data))
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "wave")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "wave_dir", option_data)
elif option_id == "reactive":
# Params: <red> <green> <blue> <speed 1-4>
rzone.reactive(colour_1[0], colour_1[1], colour_1[2], int(option_data))
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "reactive")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "speed", option_data)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
elif option_id == "blinking":
# Params: <red> <green> <blue>
rzone.blinking(colour_1[0], colour_1[1], colour_1[2])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "blinking")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
elif option_id == "breath" and option_data == "random":
rzone.breath_random()
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "breathRandom")
elif option_id == "breath" and option_data == "single":
# Params: <red> <green> <blue>
rzone.breath_single(colour_1[0], colour_1[1], colour_1[2])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "breathSingle")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
elif option_id == "breath" and option_data == "dual":
# Params: <red1> <green1> <blue1> <red2> <green2> <blue2>
rzone.breath_dual(colour_1[0], colour_1[1], colour_1[2],
colour_2[0], colour_2[1], colour_2[2])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "breathDual")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_2", colour_hex[1])
elif option_id == "breath" and option_data == "triple":
# Params: <red1> <green1> <blue1> <red2> <green2> <blue2> <red3> <green3> <blue3>
rzone.breath_triple(colour_1[0], colour_1[1], colour_1[2],
colour_2[0], colour_2[1], colour_2[2],
colour_3[0], colour_3[1], colour_3[2])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "breathTriple")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_2", colour_hex[1])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_3", colour_hex[2])
elif option_id == "pulsate":
# Params: <red> <green> <blue>
rzone.pulsate(colour_1[0], colour_1[1], colour_1[2])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "pulsate")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
elif option_id == "ripple" and option_data == "single":
# Params: <red> <green> <blue> <speed>
rzone.ripple(colour_1[0], colour_1[1], colour_1[2], self.ripple_refresh_rate)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "ripple")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
elif option_id == "ripple" and option_data == "random":
# Params: <speed>
rzone.ripple_random(self.ripple_refresh_rate)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "rippleRandomColour")
elif option_id == "starlight":
starlight_type = option_data.split("_")[0]
speed_string = option_data.split("_")[1]
speeds = {
"fast": 1,
"medium": 2,
"slow": 3
}
try:
speed = speeds[speed_string]
except KeyError:
self.debug("Invalid/unknown starlight speed!")
return False
if starlight_type == "random":
# Params: <speed>
rzone.starlight_random(speed)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "starlightRandom")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "speed", speed)
elif starlight_type == "single":
# Params: <red> <green> <blue> <speed>
rzone.starlight_single(colour_1[0], colour_1[1], colour_1[2], speed)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "starlightSingle")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "speed", speed)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
elif starlight_type == "dual":
# Params: <red1> <green1> <blue1> <red2> <green2> <blue2> <speed>
rzone.starlight_dual(colour_1[0], colour_1[1], colour_1[2],
colour_2[0], colour_2[1], colour_2[2], speed)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "starlightDual")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "speed", speed)
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_2", colour_hex[1])
elif option_id == "static":
# Params: <red> <green> <blue>
rzone.static(colour_1[0], colour_1[1], colour_1[2])
self._write_persistence_storage_fallback(rdevice, zone, rzone, "effect", "static")
self._write_persistence_storage_fallback(rdevice, zone, rzone, "colour_1", colour_hex[0])
# Other
elif option_id == "game_mode":
# Params: <true/false>
rdevice.game_mode_led = option_data
elif option_id == "dpi":
# Params: <dpi X> <dpi Y>
if rdevice.has("available_dpi"):
# Device only supports fixed DPI X values, such as DeathAdder 3.5G (#209)
rdevice.dpi = (int(option_data), 0)
else:
rdevice.dpi = (int(option_data[0]), int(option_data[1]))
elif option_id == "sync_dpi_stages":
# TODO: This can integrate better after the backend is refactored
# TODO: Polychromatic does not support custom X,Y values, so it's limited here.
# HACK: It's dirty, but directly access preferences, and use the values if enabled.
import json
pref_path = self.common.Paths().preferences
with open(pref_path, "r") as f:
prefs = json.load(f)
custom = prefs["custom"]
if not custom["use_dpi_stages"]:
return self._("Please set up the DPI stages in Polychromatic's preferences (under Customise).")
stages = []
for i in range(1, 6):
stages.append(int(custom["dpi_stage_" + str(i)]))
# [active_stage, [stages: (x,y), (x,y) etc]
rdevice.dpi_stages = (1, [(stages[0], stages[0]),
(stages[1], stages[1]),
(stages[2], stages[2]),
(stages[3], stages[3]),
(stages[4], stages[4])])
elif option_id == "poll_rate":
# Params: (int)
rdevice.poll_rate = int(option_data)
elif option_id == "idle_time":
# Params: (int) [in minutes]
rdevice.set_idle_time(option_data * 60)
elif option_id == "low_battery_threshold":
# Params: (int)
rdevice.set_low_battery_threshold(option_data)
else:
return False
except Exception as e:
return self.common.get_exception_as_string(e)
return True
def _get_form_factor(self, rdevice):
"""
Convert the device type returned by OpenRazer to match one used within Polychromatic.
"""
device_name = rdevice.name
device_type = rdevice.type
# Some of these 'device types' originate from legacy OpenRazer versions
openrazer_to_poly = {
"firefly": "mousemat",
"tartarus": "keypad",
"core": "gpu",
"mug": "accessory"
}
if device_type in openrazer_to_poly:
form_factor_id = openrazer_to_poly[device_type]
else:
form_factor_id = device_type
if device_name.find("Base Station") != -1:
form_factor_id = "stand"
elif device_name.find("Blade") != -1:
form_factor_id = "laptop"
elif device_name.find("Core") != -1:
form_factor_id = "gpu"
elif device_name.find("Nommo") != -1:
form_factor_id = "speaker"
elif device_name.find("Raptor") != -1:
form_factor_id = "display"
return self.common.get_form_factor(self._, form_factor_id)
def _get_zone_as_object(self, rdevice, zone):
"""
Returns an object that directly references this device's "zone".
"""
zone_to_device = {
"main": rdevice.fx,
"logo": rdevice.fx.misc.logo,
"scroll": rdevice.fx.misc.scroll_wheel,
"backlight": rdevice.fx.misc.backlight
}
# Ignore missing left/right classes, most devices do not support these.
try:
zone_to_device["left"] = rdevice.fx.misc.left
zone_to_device["right"] = rdevice.fx.misc.right
except Exception:
pass
# Look for charging classes, most devices do not support these.
try:
zone_to_device["charging"] = rdevice.fx.misc.charging
zone_to_device["fully_charged"] = rdevice.fx.misc.fully_charged
zone_to_device["fast_charging"] = rdevice.fx.misc.fast_charging
except Exception:
pass
return zone_to_device[zone]
def _device_has_zone_capability(self, rdevice, zone, capability):
"""
Returns a boolean whether the capability is available for the specified zone.
For example, "active" for zone "logo" will check "lighting_logo_active"
"""
zone_to_capability = {
"main": "lighting",
"logo": "lighting_logo",
"scroll": "lighting_scroll",
"backlight": "lighting_backlight",
"left": "lighting_left",
"right": "lighting_right",
"charging": "lighting_charging",
"fast_charging": "lighting_fast_charging",
"fully_charged": "lighting_fully_charged",
}
return rdevice.has(zone_to_capability[zone] + "_" + capability)
def _get_supported_zones(self, rdevice):
"""
Returns a list of zones that are supported by the device.
"""
zones = []
if rdevice.has("lighting"):
zones.append("main")
if rdevice.has("lighting_logo") or rdevice.has("lighting_logo_active"):
zones.append("logo")
if rdevice.has("lighting_scroll") or rdevice.has("lighting_scroll_active"):
zones.append("scroll")
if rdevice.has("lighting_left"):
zones.append("left")
if rdevice.has("lighting_right"):
zones.append("right")
if rdevice.has("lighting_backlight"):
zones.append("backlight")
if rdevice.has("lighting_charging"):
zones.append("charging")
if rdevice.has("lighting_fast_charging"):
zones.append("fast_charging")
if rdevice.has("lighting_fully_charged"):
zones.append("fully_charged")
return zones
def _get_zone_label_and_icons(self, zones, device_name, form_factor):
"""
Returns the name of icons for a device's lighting areas.
For example, on a Razer Hex mouse, "logo" would be hex ring buttons.
Params:
zones (list) List of zones specified in _get_supported_zones
device_name (str) Name of device, used to determine special labels
form_factor (dict) Output of self._get_form_factor()
Returns:
zone_labels (dict)
zone_icons (dict)
"""
zone_labels = {}
zone_icons = {}
# Always include the 'main' zone metadata (e.g. mice DPI, name, serial)
zones.append("main")
labels = {
"logo": self._("Logo"),
"scroll": self._("Scroll Wheel"),
"backlight": self._("Backlight"),
"left": self._("Left"),
"right": self._("Right"),
"charging": self._("While Charging"),
"fast_charging": self._("While Fast Charging"),
"fully_charged": self._("When Fully Charged"),
}
for zone in zones:
if zone in labels:
label = labels[zone]
else:
label = self._("Unknown")
if zone == "logo" and device_name.startswith("Razer Nex"):
label = self._("Hex Ring")
icon = self.common.get_icon("zones", "naga-hex-ring")
elif zone == "logo" and device_name.startswith("Razer Blade"):
label = self._("Laptop Lid")
icon = self.common.get_icon("zones", "blade-logo")
else:
icon = self.common.get_icon("zones", zone)
# 'main' refers to the base hardware, e.g. overall mouse
if zone == "main":
label = form_factor["label"]
icon = form_factor["icon"]
zone_labels[zone] = label
zone_icons[zone] = icon
return zone_labels, zone_icons
def _get_unmanaged_razer_usb_pids(self):
"""
Returns a list of PIDs of Razer hardware that is physically plugged in,
but is inaccessible by the daemon.
This usually means the installation is incomplete or the device is not
supported by the driver.
"""
all_usb_pids = self.helpers.get_usb_pids_by_vid("1532")
reg_pids = []
unreg_pids = []
# Get VIDs and PIDs from daemon to exclude them.
if self.devices:
for device in self.devices:
vidpid = self._get_device_vid_pid(device)
reg_pids.append(vidpid.get("pid"))
# Identify Razer VIDs that are not registered in the daemon
for usb in all_usb_pids:
if usb in reg_pids:
continue
unreg_pids.append(usb)
return unreg_pids
def _get_device_vid_pid(self, rdevice):
"""
Extracts VID:PID from the daemon's device object in list format: [VID,PID]
In the event OpenRazer's _vid and _pid is inaccessible, then 0000 is returned.
Returns:
{vid, pid} Success: A dictionary consisting of the VID and PID.
"""
try:
vid = str(hex(rdevice._vid))[2:].upper().rjust(4, '0')
pid = str(hex(rdevice._pid))[2:].upper().rjust(4, '0')
except Exception:
self.debug("VID PID unavailable for " + rdevice.name + ". Using dummy ID.")
vid = "0000"
pid = "0000"
return {
"vid": vid,
"pid": pid
}
def _get_device_image(self, rdevice):
"""
OpenRazer doesn't store device images, they are referenced by a URL.
This function will download a copy of the image for caching purposes.
"""
if not self.allow_image_download:
return ""
import requests
try:
# OpenRazer 2.9.0 onwards (#1127)
image_url = rdevice.device_image
except AttributeError:
# OpenRazer 2.8.0 but is backwards compatible
image_url = rdevice.razer_urls["top_img"]
except KeyError:
return ""
# Save images in Polychromatic's config directory under "device_images"
device_images_dir = os.path.join(self.config_store, "images")
if not os.path.exists(device_images_dir):
self.debug("Creating folder for device images: " + device_images_dir)
os.makedirs(device_images_dir)
image_path = os.path.join(device_images_dir, rdevice.name + "." + image_url.split(".")[-1])
# Image already cached?
if os.path.exists(image_path) and os.stat(image_path).st_size > 8:
return image_path
# No image?
if not image_url:
self.debug("No device image specified for " + rdevice.name)
return ""
self.debug("Retrieving device image for " + rdevice.name)
self.debug("URL: " + image_url)
try:
r = requests.get(image_url)
if r.status_code == 200:
open(image_path, "wb").write(r.content)
self.debug("Success!")
return image_path
self.debug("Error: Got status code {0} for '{1}'".format(rdevice.name, str(r.status_code)))
except Exception as e:
self.debug("Error: Got exception while retrieving image for '{0}'...".format(rdevice.name))
self.debug(str(e) + '\n')
return ""
def _is_device_monochromatic(self, device):
"""
Returns a boolean to state whether the device supports per-lighting but
only works with the 'green' value from RGB.
"""
# E.g. Razer BlackWidow Ultimate 2016 only outputs "green" RGB
if str(device.name).find("Ultimate") != -1 and device.type == "keyboard":
return True
return False
def _get_device_brightness(self, rdevice, zone):
"""
Returns both the object and data type required for setting the brightness
of the specified zone.
OpenRazer has two kinds of adjusting lighting:
.brightness = a variable between 0 and 100.
.active = an on/off state.
Returns None if brightness is unsupported for the zone.
Returns a list:
(object) The parent object to reference 'brightness' or 'active'
(data type) The data type expected by this object.
Example returns:
- [a.fx, int] for main 'brightness'
- [a.fx.misc.logo, bool] for logo 'active'
"""
# -- Device uses a variable (0-100) and it's 'main' so use the root element
if rdevice.has("brightness") and zone == "main":
return [rdevice, int]
rzone = self._get_zone_as_object(rdevice, zone)
# -- Device is a 'brightness' nested under the zone object
if self._device_has_zone_capability(rdevice, zone, "brightness"):
return [rzone, int]
# -- Device uses an on/off state (zones only)
if self._device_has_zone_capability(rdevice, zone, "active"):
return [rzone, bool]
# -- Device does not support brightness/toggle options
return [None, None]
def _convert_colour_bytes(self, raw):
"""
Convert the daemon's '.colors' function to a string hex.
"""
input_hex = str(raw.hex())
primary_hex = "#000000"
secondary_hex = "#000000"
tertiary_hex = "#000000"
if len(input_hex) >= 6:
primary_hex = input_hex[:6]
if len(input_hex) >= 12:
secondary_hex = input_hex[6:12]
if len(input_hex) >= 18:
tertiary_hex = input_hex[12:18]
return {
"primary": "#" + primary_hex,
"secondary": "#" + secondary_hex,
"tertiary": "#" + tertiary_hex
}
def _read_persistence_storage(self, rdevice, zone):
"""
OpenRazer 3.0 uses persistence storage to track the last effect,
colours and parameters. If the daemon currently running does not have
this feature, continue with a file-based fallback.
"""
rzone = self._get_zone_as_object(rdevice, zone)
try:
if not hasattr(rzone, "effect"):
# Current OpenRazer version doesn't have persistence
self.debug("Daemon persistence unavailable, falling back.")
return self._read_persistence_storage_fallback(rdevice, zone)
except Exception:
# Device/zone does not need persistence, return generic data
# https://github.com/openrazer/openrazer/issues/1380
self.debug("Persistence read unnecessary for {0} (Zone: {1})".format(rdevice.name, zone))
return {
"effect": "",
"colour_1": "#000000",
"colour_2": "#000000",
"colour_3": "#000000",
"wave_dir": 1,
"speed": 2
}
try:
colours = self._convert_colour_bytes(rzone.colors)
return {
"effect": str(rzone.effect),
"colour_1": colours["primary"],
"colour_2": colours["secondary"],
"colour_3": colours["tertiary"],
"wave_dir": int(rzone.wave_dir),
"speed": int(rzone.speed)
}
except Exception as e:
self.debug("Failed to read persistence, falling back!")
self.debug("The exception was: " + str(e))
return self._read_persistence_storage_fallback(rdevice, zone)
def _get_persistence_storage_fallback_path(self):
"""
Prepare the 'fallback' persistence storage if the daemon's is unavailable.
"""
storage_dir = os.path.join(self.config_store, "persistence")
if not os.path.exists(storage_dir):
os.makedirs(storage_dir)
return storage_dir
def _read_persistence_storage_fallback(self, rdevice, zone):
"""
In case the daemon's persistence storage is unavailable, use flat files
stored on the filesystem.
"""
storage_dir = self._get_persistence_storage_fallback_path()
key_name_suffix = "{0}_{1}".format(rdevice.serial, zone)
def _get_data(data_name, data_type, default_value):
file_path = os.path.join(storage_dir, key_name_suffix + "_" + data_name)
if not os.path.exists(file_path):
return default_value
with open(file_path) as f:
return data_type(f.readline())
return {
"effect": _get_data("effect", str, "spectrum"),
"colour_1": _get_data("colour_1", str, "#00FF00"),
"colour_2": _get_data("colour_2", str, "#FF0000"),
"colour_3": _get_data("colour_3", str, "#0000FF"),
"wave_dir": _get_data("wave_dir", int, 1),
"speed": _get_data("speed", int, 2)
}
def _write_persistence_storage_fallback(self, rdevice, zone, rzone, key, value):
"""
If the daemon does not support persistence storage (e.g. old version)
then write to files instead.
Setting effects also summons this function, in case the daemon version
doesn't have the persistence feature, otherwise the state would be lost.
"""
try:
if hasattr(rzone, "effect"):
# No need to write to file, daemon will have processed persistence
return
except Exception:
# Workaround API throwing a DBUS exception
# https://github.com/openrazer/openrazer/issues/1380
return
storage_dir = self._get_persistence_storage_fallback_path()
key_name_suffix = "{0}_{1}_{2}".format(rdevice.serial, zone, key)
file_path = os.path.join(storage_dir, key_name_suffix)
with open(file_path, "w") as f:
f.write(str(value))
def get_device_object(self, uid):
"""
See _backend.get_device_object()
"""
try:
success = self._reinit_device_manager()
if success != True:
return success
rdevice = self.devman.devices[uid]
except IndexError:
return None
except Exception as e:
return self.common.get_exception_as_string(e)
if not rdevice.has("lighting_led_matrix"):
return "Device does not support 'lighting_led_matrix'"
class OpenRazerCustomFX(fx.FX):
def __init__(self, rows, cols, name, backend, form_factor, serial, rdevice):
"""
Parameter 'rdevice' holds the OpenRazer daemon's device object.
"""
super().__init__(rows, cols, name, backend, form_factor, serial)
self._rdevice = rdevice
def set(self, x, y, red, green, blue):
self._rdevice.fx.advanced.matrix[y, x] = (red, green, blue)
def draw(self):
self._rdevice.fx.advanced.draw()
def clear(self):
self._rdevice.fx.advanced.matrix.reset()
def brightness(self, percent):
self._rdevice.brightness = percent
class DeathStalkerChromaFX(OpenRazerCustomFX):
"""
This device has a matrix of 12x1, but every second LED (2,4,6,8,10,12)
actually blends with the previous LED (1,3,5,7,9,11) (#335)
"""
def set(self, x, y, red, green, blue):
# Matrix is halfed. "Stretch" LEDs across two for one.
# 0-based: Even (normal), odd (blend)
# Example: [0] -> [0,1] [5] -> [10,11]
self._rdevice.fx.advanced.matrix[y, (x * 2)] = (red, green, blue)
self._rdevice.fx.advanced.matrix[y, (x * 2) + 1] = (red, green, blue)
# Overrides for quirky devices
if rdevice.name == "Razer DeathStalker Chroma" and rdevice.fx.advanced.cols == 12:
# OpenRazer changed this matrix after 3.0.1 (6 => 12)
return DeathStalkerChromaFX(1, 6, str(rdevice.name), self.backend_id, self._get_form_factor(rdevice)["id"], str(rdevice.serial), rdevice)
return OpenRazerCustomFX(int(rdevice.fx.advanced.rows),
int(rdevice.fx.advanced.cols),
str(rdevice.name),
self.backend_id,
self._get_form_factor(rdevice)["id"],
str(rdevice.serial),
rdevice)
def restart(self):
"""
Immediately restart the daemon process.
"""
import time
# Stop any process running
self.debug("Running: openrazer-daemon -s")
os.system("openrazer-daemon -s")
# Give chance to stop, but kill to be sure.
self.debug("Waiting for openrazer-daemon to stop (2s)...")
time.sleep(2)
os.system("killall openrazer-daemon")
# Start again
self.debug("Running: openrazer-daemon")
os.system("openrazer-daemon")
self.debug("Waiting for openrazer-daemon to start (2s)...")
time.sleep(2)
|
lah7/polychromatic
|
pylib/backends/openrazer.py
|
Python
|
gpl-3.0
| 62,072
|
[
"Firefly",
"VisIt"
] |
f582f261a4575930a10f247285d0e48700ef81ac06b1f812abb0f4ea8792dc38
|
import os
from ase.io import read
from ase.neb import NEB
from ase.calculators.turbomole import Turbomole
from ase.optimize import BFGS
initial = read('initial.coord')
final = read('final.coord')
os.system('rm -f coord; cp initial.coord coord')
# Make a band consisting of 5 configs:
configs = [initial]
configs += [initial.copy() for i in range(3)]
configs += [final]
band = NEB(configs, climb=True)
# Interpolate linearly the positions of the not-endpoint-configs:
band.interpolate()
#Set calculators
for config in configs:
config.set_calculator(Turbomole())
# Optimize the Path:
relax = BFGS(band, trajectory='neb.traj')
relax.run(fmax=0.05)
|
grhawk/ASE
|
tools/doc/ase/calculators/turbomole_ex2_diffuse_usingNEB.py
|
Python
|
gpl-2.0
| 657
|
[
"ASE",
"TURBOMOLE"
] |
54529637b29908608bc16bb69c43bd3fd107d1cb8c3c30c847070a4aa675bf33
|
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Interface module for the H5md core implementation."""
import sys
from ...script_interface import PScriptInterface # pylint: disable=import
from ...code_info import features
class UnitSystem:
"""
Data class for writing H5MD trajectories with
`physical units <https://nongnu.org/h5md/modules/units.html>`_.
There are four settable units: 'mass', 'length', 'time', 'charge'.
Units should be written as strings following the specifications defined
`here <https://nongnu.org/h5md/modules/units.html#unit-string>`_,
e.g. ``UnitSystem(time='ps', mass='u', length='nm', charge='e')``.
"""
def __init__(self, **kwargs):
self.mass = ''
self.time = ''
self.length = ''
self.charge = ''
for key, value in kwargs.items():
assert hasattr(self, key), 'unknown dimension ' + key
setattr(self, key, value or '')
if self.length and self.mass and self.time:
self.force = f'{self.length} {self.mass} {self.time}-2'
else:
self.force = ''
if self.length and self.time:
self.velocity = f'{self.length} {self.time}-1'
else:
self.velocity = ''
if 'H5MD' not in features():
class H5md:
def __init__(self, *args, **kwargs):
raise RuntimeError("H5md not available.")
else:
class H5md:
"""H5md file object.
Used for accessing the H5MD core implementation.
.. note::
Bonds will be written to the file automatically if they exist.
Parameters
----------
file_path : :obj:`str`
Path to the trajectory file.
unit_system : :obj:`UnitSystem`, optional
Physical units for the data.
"""
def __init__(self, file_path, unit_system=UnitSystem()):
self.h5md_instance = PScriptInterface(
"ScriptInterface::Writer::H5md", file_path=file_path, script_path=sys.argv[0],
mass_unit=unit_system.mass, length_unit=unit_system.length,
time_unit=unit_system.time,
force_unit=unit_system.force,
velocity_unit=unit_system.velocity,
charge_unit=unit_system.charge
)
def get_params(self):
"""Get the parameters from the script interface."""
return self.h5md_instance.get_params()
def write(self):
"""Call the H5md write method."""
self.h5md_instance.call_method("write")
def flush(self):
"""Call the H5md flush method."""
self.h5md_instance.call_method("flush")
def close(self):
"""Close the H5md file."""
self.h5md_instance.call_method("close")
|
espressomd/espresso
|
src/python/espressomd/io/writer/h5md.py
|
Python
|
gpl-3.0
| 3,488
|
[
"ESPResSo"
] |
a4e504da4ae02ab6d4150161885c0e33e9dfab8d1ec21842c056e28d8891c1c2
|
#!/usr/bin/env python
import unittest
import random
import nemo
class IzNetwork(nemo.Network):
def __init__(self):
nemo.Network.__init__(self)
self._type = self.add_neuron_type('Izhikevich')
def add_neuron(self, nidx, a, b, c, d, sigma, u, v):
nemo.Network.add_neuron(self, self._type, nidx,
{'a':a ,'b': b,'c': c,'d': d,'sigma': sigma}, {'u': u, 'v': v})
def randomSource():
return random.randint(0, 999)
def randomTarget():
return randomSource()
def randomDelay():
return random.randint(1, 20)
def randomWeight():
return random.uniform(-1.0, 1.0)
def randomPlastic():
return random.choice([True, False])
def randomParameterIndex():
return random.randint(0, 4)
def randomStateIndex():
return random.randint(0, 1)
def arg(vlen, gen):
"""
Return either a fixed-length vector or a scalar, with values drawn from 'gen'
"""
vector = random.choice([True, False])
if vector:
return [gen() for n in range(vlen)]
else:
return gen()
class TestFunctions(unittest.TestCase):
def test_network_set_neuron(self):
""" create a simple network and make sure we can get and set parameters
and state variables """
a = 0.02
b = 0.2
c = -65.0+15.0*0.25
d = 8.0-6.0*0.25
v = -65.0
u = b * v
sigma = 5.0
net = IzNetwork()
# This should only succeed for existing neurons
self.assertRaises(RuntimeError, net.set_neuron, 0, a, b, c, d, sigma, u, v)
net.add_neuron(0, a, b, c-0.1, d, sigma, u, v-1.0)
# Getters should fail if given invalid neuron or parameter
self.assertRaises(RuntimeError, net.get_neuron_parameter, 1, 0) # neuron
self.assertRaises(RuntimeError, net.get_neuron_state, 1, 0) # neuron
self.assertRaises(RuntimeError, net.get_neuron_parameter, 0, 5) # parameter
self.assertRaises(RuntimeError, net.get_neuron_state, 0, 2) # state
e = 0.1
# Test setting whole neuron, reading back by parts
net.set_neuron(0, a-e, b-e, c-e, d-e, sigma-e, u-e, v-e)
# Since Python uses double precision and NeMo uses single precision
# internally, the parameters may not be exactly the same after reading
# back.
places = 5
self.assertAlmostEqual(net.get_neuron_parameter(0, 0), a-e, places)
self.assertAlmostEqual(net.get_neuron_parameter(0, 1), b-e, places)
self.assertAlmostEqual(net.get_neuron_parameter(0, 2), c-e, places)
self.assertAlmostEqual(net.get_neuron_parameter(0, 3), d-e, places)
self.assertAlmostEqual(net.get_neuron_parameter(0, 4), sigma-e, places)
self.assertAlmostEqual(net.get_neuron_state(0, 0), u-e, places)
self.assertAlmostEqual(net.get_neuron_state(0, 1), v-e, places)
# Test setting and reading back neuron by parts
net.set_neuron_parameter(0, 0, a)
self.assertAlmostEqual(net.get_neuron_parameter(0, 0), a, places)
net.set_neuron_parameter(0, 1, b)
self.assertAlmostEqual(net.get_neuron_parameter(0, 1), b, places)
net.set_neuron_parameter(0, 2, c)
self.assertAlmostEqual(net.get_neuron_parameter(0, 2), c, places)
net.set_neuron_parameter(0, 3, d)
self.assertAlmostEqual(net.get_neuron_parameter(0, 3), d, places)
net.set_neuron_parameter(0, 4, sigma)
self.assertAlmostEqual(net.get_neuron_parameter(0, 4), sigma, places)
net.set_neuron_state(0, 0, u)
self.assertAlmostEqual(net.get_neuron_state(0, 0), u, places)
net.set_neuron_state(0, 1, v)
self.assertAlmostEqual(net.get_neuron_state(0, 1), v, places)
# Individual setters should fail if given invalid neuron or parameter
self.assertRaises(RuntimeError, net.set_neuron_parameter, 1, 0, 0.0) # neuron
self.assertRaises(RuntimeError, net.set_neuron_state, 1, 0, 0.0) # neuron
self.assertRaises(RuntimeError, net.set_neuron_parameter, 0, 5, 0.0) # parameter
self.assertRaises(RuntimeError, net.set_neuron_state, 0, 2, 0.0) # state
def check_neuron_function(self, fun, ncount):
vlen = random.randint(2, ncount)
a = arg(vlen, random.random)
b = arg(vlen, random.random)
c = arg(vlen, random.random)
d = arg(vlen, random.random)
u = arg(vlen, random.random)
v = arg(vlen, random.random)
s = arg(vlen, random.random)
vectorized = any(isinstance(x, list) for x in [a, b, c, d, u, v, s])
if vectorized:
fun(range(vlen), a, b, c, d, s, u, v)
else:
fun(random.randint(0,1000), a, b, c, d, s, u, v)
def test_add_neuron(self):
"""
The add_neuron method supports either vector or scalar input. This
test calls set_synapse in a large number of ways, checking for
catastrophics failures in the boost::python layer
"""
for test in range(1000):
net = IzNetwork()
self.check_neuron_function(net.add_neuron, ncount=1000)
def test_set_neuron(self):
"""
The set_neuron method supports either vector or scalar input. This
test calls set_synapse in a large number of ways, checking for
catastrophics failures in the boost::python layer
"""
net = IzNetwork()
ncount = 1000
net.add_neuron(range(ncount), 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
for test in range(1000):
self.check_neuron_function(net.set_neuron, ncount=1000)
sim = nemo.Simulation(net, nemo.Configuration())
for test in range(1000):
self.check_neuron_function(sim.set_neuron, ncount=1000)
def check_set_neuron_vector(self, obj, pop):
"""
Test vector/scalar forms of set_neuron for either network or simulation
pop -- list of neuron
"""
for test in range(1000):
vlen = random.randint(2, 100)
# We need uniqe neurons here, for defined behaviour
vector = random.choice([True, False])
if vector:
neuron = random.sample(pop, vlen)
value = [random.random() for n in neuron]
else:
neuron = random.choice(pop)
value = random.random()
def assertListsAlmostEqual(value, ret):
if vector:
self.assertEqual(vlen, len(ret))
self.assertEqual(vlen, len(value))
self.assertEqual(vlen, len(neuron))
[self.assertAlmostEqual(a, b, 5) for (a,b) in zip(value, ret)]
else:
self.assertAlmostEqual(value, ret, 5)
# check neuron parameter
param = randomParameterIndex()
obj.set_neuron_parameter(neuron, param, value)
ret = obj.get_neuron_parameter(neuron, param)
assertListsAlmostEqual(value, ret)
# check neuron state
var = randomStateIndex()
obj.set_neuron_state(neuron, var, value)
ret = obj.get_neuron_state(neuron, var)
assertListsAlmostEqual(value, ret)
def test_network_set_neuron_vector(self):
"""
Test for failures in vector/scalar form of set_neuron
The set_neuron_parameter methods supports either vector or scalar
input. This test calls this function in a large number of ways,
checking for catastrophics failures in the boost::python layer
"""
net = IzNetwork()
pop = range(1000)
for n in pop:
net.add_neuron(n, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
self.check_set_neuron_vector(net, pop)
def test_sim_set_neuron_vector(self):
"""
Test for failures in vector/scalar form of set_neuron
The set_neuron_parameter methods supports either vector or scalar
input. This test calls this function in a large number of ways,
checking for catastrophics failures in the boost::python layer
"""
net = IzNetwork()
conf = nemo.Configuration()
pop = range(1000)
for n in pop:
net.add_neuron(n, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
sim = nemo.Simulation(net, conf)
self.check_set_neuron_vector(sim, pop)
def simple_network(self):
net = IzNetwork()
net.add_neuron(0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
net.add_neuron(1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
net.add_synapse(0, 1, 1, 5.0, False)
net.add_synapse(1, 0, 1, 5.0, False)
return (net, nemo.Simulation(net, nemo.Configuration()))
def test_get_neuron_scalar(self):
"""
Test that singleton arguments to neuron getters work as either scalar
or singleton list.
"""
def check(x):
x.get_neuron_state([0], 0)
x.get_neuron_state(0, 0)
x.get_neuron_parameter([0], 0)
x.get_neuron_parameter(0, 0)
(net, sim) = self.simple_network()
check(net)
check(sim)
def test_set_neuron_scalar(self):
"""
Test that singleton arguments to neuron setters work as either scalar
or singleton list.
"""
def check(x):
x.set_neuron_state([0], 0, [0])
x.set_neuron_state(0, 0, 0)
x.set_neuron_parameter([0], 0, [0])
x.set_neuron_parameter(0, 0, 0)
(net, sim) = self.simple_network()
check(net)
check(sim)
def test_get_synapse_scalar(self):
"""
Test that singleton arguments to synapse getters work as either scalar
or singleton list.
"""
def check(x):
x.get_synapse_source(0)
x.get_synapse_source([0])
x.get_synapse_target(0)
x.get_synapse_target([0])
x.get_synapse_delay(0)
x.get_synapse_delay([0])
x.get_synapse_weight(0)
x.get_synapse_weight([0])
x.get_synapse_plastic(0)
x.get_synapse_plastic([0])
(net, sim) = self.simple_network()
check(net)
check(sim)
def test_add_synapse(self):
"""
The add_synapse method supports either vector or scalar input. This
test calls set_synapse in a large number of ways, checking for
catastrophics failures in the boost::python layer
"""
net = IzNetwork()
for test in range(1000):
vlen = random.randint(2, 500)
source = arg(vlen, randomSource)
target = arg(vlen, randomTarget)
delay = arg(vlen, randomDelay)
weight = arg(vlen, randomWeight)
plastic = arg(vlen, randomPlastic)
ids = net.add_synapse(source, target, delay, weight, plastic)
vectorized = any(isinstance(n, list) for n in [source, target, delay, weight, plastic])
if vectorized:
self.assertTrue(isinstance(ids, list))
self.assertEqual(len(ids), vlen)
else:
self.assertFalse(isinstance(ids, list))
def test_get_synapses_from_unconnected(self):
net = IzNetwork()
net.add_neuron(0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
self.assertEqual(len(net.get_synapses_from(0)), 0)
sim = nemo.Simulation(net, nemo.Configuration())
self.assertEqual(len(sim.get_synapses_from(0)), 0)
def test_get_synapse(self):
"""
Test scalar and vector form of synapse getters
Synapse getters have both scalar and vector forms. To test these,
construct a network with fixed connectivity where all synapse
properties are functions of the source and target, then read back and
verify that the values are as expected.
"""
def delay(source, target):
return 1 + ((source + target) % 20)
def plastic(source, target):
return (source + target) % 1 == 0
def weight(source, target):
return float(source) + float(target)
ncount = 100
net = IzNetwork()
for src in range(ncount):
net.add_neuron(src, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
for tgt in range(src+1):
net.add_synapse(src, tgt, delay(src, tgt), weight(src, tgt), plastic(src, tgt))
conf = nemo.Configuration()
sim = nemo.Simulation(net, conf)
def check_scalar(x, known_source, sid, source, target):
self.assertEqual(known_source, source)
self.assertEqual(x.get_synapse_delay(sid), delay(source, target))
self.assertEqual(x.get_synapse_weight(sid), weight(source, target))
self.assertEqual(x.get_synapse_plastic(sid), plastic(source, target))
def check(x):
for src in range(ncount):
all_synapses = x.get_synapses_from(src)
# read a random number of these out-of-order
n_queried = random.randint(1, len(all_synapses))
queried = random.sample(all_synapses, n_queried)
if len(queried) == 1:
queried = queried[0]
sources = x.get_synapse_source(queried)
targets = x.get_synapse_target(queried)
if n_queried == 1:
check_scalar(x, src, queried, sources, targets)
else:
for (sid, qsrc, tgt) in zip(queried, sources, targets):
check_scalar(x, src, sid, qsrc, tgt)
def check_iterator(x):
# Make synapse getter can deal with the iterator returned by the
# the synapse query
for src in range(ncount):
srcs = x.get_synapse_source(x.get_synapses_from(src))
check(net)
check(sim)
check_iterator(net)
check_iterator(sim)
if __name__ == '__main__':
unittest.main()
|
brainstudio-team/NeMo
|
src/api/python/test.py
|
Python
|
gpl-2.0
| 14,056
|
[
"NEURON"
] |
82e0b0af10ce90494dacfe1e69fe3550c3a4770ee6276e5864596a78e5f39e7e
|
# -*- coding: utf-8 -*-
"""
Tests the "preview" selector in the LMS that allows changing between Staff, Learner, and Content Groups.
"""
from textwrap import dedent
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.lms.instructor_dashboard import InstructorDashboardPage
from common.test.acceptance.pages.lms.staff_view import StaffCoursewarePage
from common.test.acceptance.tests.helpers import UniqueCourseTest, create_user_partition_json
from xmodule.partitions.partitions import ENROLLMENT_TRACK_PARTITION_ID, MINIMUM_STATIC_PARTITION_ID, Group
@attr(shard=20)
class StaffViewTest(UniqueCourseTest):
"""
Tests that verify the staff view.
"""
USERNAME = "STAFF_TESTER"
EMAIL = "johndoe@example.com"
def setUp(self):
super(StaffViewTest, self).setUp()
self.courseware_page = CoursewarePage(self.browser, self.course_id)
# Install a course with sections/problems, tabs, updates, and handouts
self.course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
self.populate_course_fixture(self.course_fixture) # pylint: disable=no-member
self.course_fixture.install()
# Auto-auth register for the course.
# Do this as global staff so that you will see the Staff View
AutoAuthPage(self.browser, username=self.USERNAME, email=self.EMAIL,
course_id=self.course_id, staff=True).visit()
def _goto_staff_page(self):
"""
Open staff page with assertion
"""
self.courseware_page.visit()
staff_page = StaffCoursewarePage(self.browser, self.course_id)
self.assertEqual(staff_page.staff_view_mode, 'Staff')
return staff_page
@attr(shard=20)
class CourseWithoutContentGroupsTest(StaffViewTest):
"""
Setup for tests that have no content restricted to specific content groups.
"""
def populate_course_fixture(self, course_fixture):
"""
Populates test course with chapter, sequential, and 2 problems.
"""
problem_data = dedent("""
<problem markdown="Simple Problem" max_attempts="" weight="">
<p>Choose Yes.</p>
<choiceresponse>
<checkboxgroup>
<choice correct="true">Yes</choice>
</checkboxgroup>
</choiceresponse>
</problem>
""")
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('problem', 'Test Problem 1', data=problem_data),
XBlockFixtureDesc('problem', 'Test Problem 2', data=problem_data)
)
)
)
@attr(shard=20)
class StaffViewToggleTest(CourseWithoutContentGroupsTest):
"""
Tests for the staff view toggle button.
"""
def test_instructor_tab_visibility(self):
"""
Test that the instructor tab is hidden when viewing as a student.
"""
course_page = self._goto_staff_page()
self.assertTrue(course_page.has_tab('Instructor'))
course_page.set_staff_view_mode('Learner')
self.assertEqual(course_page.staff_view_mode, 'Learner')
self.assertFalse(course_page.has_tab('Instructor'))
@attr(shard=20)
class StaffDebugTest(CourseWithoutContentGroupsTest):
"""
Tests that verify the staff debug info.
"""
def test_reset_attempts_empty(self):
"""
Test that we reset even when there is no student state
"""
staff_debug_page = self._goto_staff_page().open_staff_debug_info()
staff_debug_page.reset_attempts()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(
u'Successfully reset the attempts for user {}'.format(self.USERNAME), msg,
)
def test_delete_state_empty(self):
"""
Test that we delete properly even when there isn't state to delete.
"""
staff_debug_page = self._goto_staff_page().open_staff_debug_info()
staff_debug_page.delete_state()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(
u'Successfully deleted student state for user {}'.format(self.USERNAME), msg,
)
def test_reset_attempts_state(self):
"""
Successfully reset the student attempts
"""
staff_page = self._goto_staff_page()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.reset_attempts()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(
u'Successfully reset the attempts for user {}'.format(self.USERNAME), msg,
)
def test_rescore_problem(self):
"""
Rescore the student
"""
staff_page = self._goto_staff_page()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.rescore()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully rescored problem for user {}'.format(self.USERNAME), msg)
def test_rescore_problem_if_higher(self):
"""
Rescore the student
"""
staff_page = self._goto_staff_page()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.rescore_if_higher()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully rescored problem to improve score for user {}'.format(self.USERNAME), msg)
def test_student_state_delete(self):
"""
Successfully delete the student state with an answer
"""
staff_page = self._goto_staff_page()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.delete_state()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully deleted student state for user {}'.format(self.USERNAME), msg)
def test_student_by_email(self):
"""
Successfully reset the student attempts using their email address
"""
staff_page = self._goto_staff_page()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.reset_attempts(self.EMAIL)
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully reset the attempts for user {}'.format(self.EMAIL), msg)
def test_bad_student(self):
"""
Test negative response with invalid user
"""
staff_page = self._goto_staff_page()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.delete_state('INVALIDUSER')
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Failed to delete student state for user. User does not exist.', msg)
def test_reset_attempts_for_problem_loaded_via_ajax(self):
"""
Successfully reset the student attempts for problem loaded via ajax.
"""
staff_page = self._goto_staff_page()
staff_page.load_problem_via_ajax()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.reset_attempts()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully reset the attempts for user {}'.format(self.USERNAME), msg)
def test_rescore_state_for_problem_loaded_via_ajax(self):
"""
Rescore the student for problem loaded via ajax.
"""
staff_page = self._goto_staff_page()
staff_page.load_problem_via_ajax()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.rescore()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully rescored problem for user {}'.format(self.USERNAME), msg)
def test_student_state_delete_for_problem_loaded_via_ajax(self):
"""
Successfully delete the student state for problem loaded via ajax.
"""
staff_page = self._goto_staff_page()
staff_page.load_problem_via_ajax()
staff_page.answer_problem()
staff_debug_page = staff_page.open_staff_debug_info()
staff_debug_page.delete_state()
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully deleted student state for user {}'.format(self.USERNAME), msg)
class CourseWithContentGroupsTest(StaffViewTest):
"""
Verifies that changing the "View this course as" selector works properly for content groups.
"""
def setUp(self):
super(CourseWithContentGroupsTest, self).setUp()
# pylint: disable=protected-access
self.course_fixture._update_xblock(self.course_fixture._course_location, {
"metadata": {
u"user_partitions": [
create_user_partition_json(
MINIMUM_STATIC_PARTITION_ID,
'Configuration alpha,beta',
'Content Group Partition',
[
Group(MINIMUM_STATIC_PARTITION_ID + 1, 'alpha'),
Group(MINIMUM_STATIC_PARTITION_ID + 2, 'beta')
],
scheme="cohort"
)
],
},
})
def populate_course_fixture(self, course_fixture):
"""
Populates test course with chapter, sequential, and 3 problems.
One problem is visible to all, one problem is visible only to Group "alpha", and
one problem is visible only to Group "beta".
"""
problem_data = dedent("""
<problem markdown="Simple Problem" max_attempts="" weight="">
<choiceresponse>
<label>Choose Yes.</label>
<checkboxgroup>
<choice correct="true">Yes</choice>
</checkboxgroup>
</choiceresponse>
</problem>
""")
self.alpha_text = "VISIBLE TO ALPHA"
self.beta_text = "VISIBLE TO BETA"
self.audit_text = "VISIBLE TO AUDIT"
self.everyone_text = "VISIBLE TO EVERYONE"
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc(
'problem',
self.alpha_text,
data=problem_data,
metadata={"group_access": {MINIMUM_STATIC_PARTITION_ID: [MINIMUM_STATIC_PARTITION_ID + 1]}}
),
XBlockFixtureDesc(
'problem',
self.beta_text,
data=problem_data,
metadata={"group_access": {MINIMUM_STATIC_PARTITION_ID: [MINIMUM_STATIC_PARTITION_ID + 2]}}
),
XBlockFixtureDesc(
'problem',
self.audit_text,
data=problem_data,
# Below 1 is the hardcoded group ID for "Audit"
metadata={"group_access": {ENROLLMENT_TRACK_PARTITION_ID: [1]}}
),
XBlockFixtureDesc(
'problem',
self.everyone_text,
data=problem_data
)
)
)
)
)
@attr(shard=20)
def test_staff_sees_all_problems(self):
"""
Scenario: Staff see all problems
Given I have a course with a cohort user partition
And problems that are associated with specific groups in the user partition
When I view the courseware in the LMS with staff access
Then I see all the problems, regardless of their group_access property
"""
course_page = self._goto_staff_page()
verify_expected_problem_visibility(
self,
course_page,
[self.alpha_text, self.beta_text, self.audit_text, self.everyone_text]
)
@attr(shard=3)
def test_student_not_in_content_group(self):
"""
Scenario: When previewing as a learner, only content visible to all is shown
Given I have a course with a cohort user partition
And problems that are associated with specific groups in the user partition
When I view the courseware in the LMS with staff access
And I change to previewing as a Learner
Then I see only problems visible to all users
"""
course_page = self._goto_staff_page()
course_page.set_staff_view_mode('Learner')
verify_expected_problem_visibility(self, course_page, [self.everyone_text])
@attr(shard=3)
def test_as_student_in_alpha(self):
"""
Scenario: When previewing as a learner in group alpha, only content visible to alpha is shown
Given I have a course with a cohort user partition
And problems that are associated with specific groups in the user partition
When I view the courseware in the LMS with staff access
And I change to previewing as a Learner in group alpha
Then I see only problems visible to group alpha
"""
course_page = self._goto_staff_page()
course_page.set_staff_view_mode('Learner in alpha')
verify_expected_problem_visibility(self, course_page, [self.alpha_text, self.everyone_text])
@attr(shard=3)
def test_as_student_in_beta(self):
"""
Scenario: When previewing as a learner in group beta, only content visible to beta is shown
Given I have a course with a cohort user partition
And problems that are associated with specific groups in the user partition
When I view the courseware in the LMS with staff access
And I change to previewing as a Learner in group beta
Then I see only problems visible to group beta
"""
course_page = self._goto_staff_page()
course_page.set_staff_view_mode('Learner in beta')
verify_expected_problem_visibility(self, course_page, [self.beta_text, self.everyone_text])
@attr(shard=3)
def test_as_student_in_audit(self):
"""
Scenario: When previewing as a learner in the audit enrollment track, only content visible to audit is shown
Given I have a course with an enrollment_track user partition
And problems that are associated with specific groups in the user partition
When I view the courseware in the LMS with staff access
And I change to previewing as a Learner in audit enrollment track
Then I see only problems visible to audit enrollment track
"""
course_page = self._goto_staff_page()
course_page.set_staff_view_mode('Learner in Audit')
verify_expected_problem_visibility(self, course_page, [self.audit_text, self.everyone_text])
def create_cohorts_and_assign_students(self, student_a_username, student_b_username):
"""
Adds 2 manual cohorts, linked to content groups, to the course.
Each cohort is assigned one learner.
"""
instructor_dashboard_page = InstructorDashboardPage(self.browser, self.course_id)
instructor_dashboard_page.visit()
cohort_management_page = instructor_dashboard_page.select_cohort_management()
cohort_management_page.is_cohorted = True
def add_cohort_with_student(cohort_name, content_group, student):
""" Create cohort and assign learner to it. """
cohort_management_page.add_cohort(cohort_name, content_group=content_group)
cohort_management_page.add_students_to_selected_cohort([student])
add_cohort_with_student("Cohort Alpha", "alpha", student_a_username)
add_cohort_with_student("Cohort Beta", "beta", student_b_username)
cohort_management_page.wait_for_ajax()
@attr('a11y')
def test_course_page(self):
"""
Run accessibility audit for course staff pages.
"""
course_page = self._goto_staff_page()
course_page.a11y_audit.config.set_rules({
'ignore': [
'aria-allowed-attr', # TODO: AC-559
'aria-roles', # TODO: AC-559,
'aria-valid-attr', # TODO: AC-559
'color-contrast', # TODO: AC-559
'link-href', # TODO: AC-559
'section', # TODO: AC-559
]
})
course_page.a11y_audit.check_for_accessibility_errors()
def verify_expected_problem_visibility(test, courseware_page, expected_problems):
"""
Helper method that checks that the expected problems are visible on the current page.
"""
test.assertEqual(
len(expected_problems), courseware_page.num_xblock_components, "Incorrect number of visible problems"
)
for index, expected_problem in enumerate(expected_problems):
test.assertIn(expected_problem, courseware_page.xblock_components[index].text)
|
Stanford-Online/edx-platform
|
common/test/acceptance/tests/lms/test_lms_user_preview.py
|
Python
|
agpl-3.0
| 17,924
|
[
"VisIt"
] |
a7d9786ed45301236fef27a655fc0307e96214dfc0e8a3ecceb17492dce5e074
|
"""Testing for kernels for Gaussian processes."""
# Author: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# License: BSD 3 clause
import pytest
import numpy as np
from inspect import signature
from sklearn.gaussian_process.kernels import _approx_fprime
from sklearn.metrics.pairwise import (
PAIRWISE_KERNEL_FUNCTIONS,
euclidean_distances,
pairwise_kernels,
)
from sklearn.gaussian_process.kernels import (
RBF,
Matern,
RationalQuadratic,
ExpSineSquared,
DotProduct,
ConstantKernel,
WhiteKernel,
PairwiseKernel,
KernelOperator,
Exponentiation,
CompoundKernel,
)
from sklearn.base import clone
from sklearn.utils._testing import (
assert_almost_equal,
assert_array_equal,
assert_array_almost_equal,
assert_allclose,
fails_if_pypy,
)
X = np.random.RandomState(0).normal(0, 1, (5, 2))
Y = np.random.RandomState(0).normal(0, 1, (6, 2))
kernel_rbf_plus_white = RBF(length_scale=2.0) + WhiteKernel(noise_level=3.0)
kernels = [
RBF(length_scale=2.0),
RBF(length_scale_bounds=(0.5, 2.0)),
ConstantKernel(constant_value=10.0),
2.0 * RBF(length_scale=0.33, length_scale_bounds="fixed"),
2.0 * RBF(length_scale=0.5),
kernel_rbf_plus_white,
2.0 * RBF(length_scale=[0.5, 2.0]),
2.0 * Matern(length_scale=0.33, length_scale_bounds="fixed"),
2.0 * Matern(length_scale=0.5, nu=0.5),
2.0 * Matern(length_scale=1.5, nu=1.5),
2.0 * Matern(length_scale=2.5, nu=2.5),
2.0 * Matern(length_scale=[0.5, 2.0], nu=0.5),
3.0 * Matern(length_scale=[2.0, 0.5], nu=1.5),
4.0 * Matern(length_scale=[0.5, 0.5], nu=2.5),
RationalQuadratic(length_scale=0.5, alpha=1.5),
ExpSineSquared(length_scale=0.5, periodicity=1.5),
DotProduct(sigma_0=2.0),
DotProduct(sigma_0=2.0) ** 2,
RBF(length_scale=[2.0]),
Matern(length_scale=[2.0]),
]
for metric in PAIRWISE_KERNEL_FUNCTIONS:
if metric in ["additive_chi2", "chi2"]:
continue
kernels.append(PairwiseKernel(gamma=1.0, metric=metric))
# Numerical precisions errors in PyPy
@fails_if_pypy
@pytest.mark.parametrize("kernel", kernels)
def test_kernel_gradient(kernel):
# Compare analytic and numeric gradient of kernels.
K, K_gradient = kernel(X, eval_gradient=True)
assert K_gradient.shape[0] == X.shape[0]
assert K_gradient.shape[1] == X.shape[0]
assert K_gradient.shape[2] == kernel.theta.shape[0]
def eval_kernel_for_theta(theta):
kernel_clone = kernel.clone_with_theta(theta)
K = kernel_clone(X, eval_gradient=False)
return K
K_gradient_approx = _approx_fprime(kernel.theta, eval_kernel_for_theta, 1e-10)
assert_almost_equal(K_gradient, K_gradient_approx, 4)
@pytest.mark.parametrize(
"kernel",
[
kernel
for kernel in kernels
# skip non-basic kernels
if not (
isinstance(kernel, KernelOperator) or isinstance(kernel, Exponentiation)
)
],
)
def test_kernel_theta(kernel):
# Check that parameter vector theta of kernel is set correctly.
theta = kernel.theta
_, K_gradient = kernel(X, eval_gradient=True)
# Determine kernel parameters that contribute to theta
init_sign = signature(kernel.__class__.__init__).parameters.values()
args = [p.name for p in init_sign if p.name != "self"]
theta_vars = map(
lambda s: s[0 : -len("_bounds")], filter(lambda s: s.endswith("_bounds"), args)
)
assert set(hyperparameter.name for hyperparameter in kernel.hyperparameters) == set(
theta_vars
)
# Check that values returned in theta are consistent with
# hyperparameter values (being their logarithms)
for i, hyperparameter in enumerate(kernel.hyperparameters):
assert theta[i] == np.log(getattr(kernel, hyperparameter.name))
# Fixed kernel parameters must be excluded from theta and gradient.
for i, hyperparameter in enumerate(kernel.hyperparameters):
# create copy with certain hyperparameter fixed
params = kernel.get_params()
params[hyperparameter.name + "_bounds"] = "fixed"
kernel_class = kernel.__class__
new_kernel = kernel_class(**params)
# Check that theta and K_gradient are identical with the fixed
# dimension left out
_, K_gradient_new = new_kernel(X, eval_gradient=True)
assert theta.shape[0] == new_kernel.theta.shape[0] + 1
assert K_gradient.shape[2] == K_gradient_new.shape[2] + 1
if i > 0:
assert theta[:i] == new_kernel.theta[:i]
assert_array_equal(K_gradient[..., :i], K_gradient_new[..., :i])
if i + 1 < len(kernel.hyperparameters):
assert theta[i + 1 :] == new_kernel.theta[i:]
assert_array_equal(K_gradient[..., i + 1 :], K_gradient_new[..., i:])
# Check that values of theta are modified correctly
for i, hyperparameter in enumerate(kernel.hyperparameters):
theta[i] = np.log(42)
kernel.theta = theta
assert_almost_equal(getattr(kernel, hyperparameter.name), 42)
setattr(kernel, hyperparameter.name, 43)
assert_almost_equal(kernel.theta[i], np.log(43))
@pytest.mark.parametrize(
"kernel",
[
kernel
for kernel in kernels
# Identity is not satisfied on diagonal
if kernel != kernel_rbf_plus_white
],
)
def test_auto_vs_cross(kernel):
# Auto-correlation and cross-correlation should be consistent.
K_auto = kernel(X)
K_cross = kernel(X, X)
assert_almost_equal(K_auto, K_cross, 5)
@pytest.mark.parametrize("kernel", kernels)
def test_kernel_diag(kernel):
# Test that diag method of kernel returns consistent results.
K_call_diag = np.diag(kernel(X))
K_diag = kernel.diag(X)
assert_almost_equal(K_call_diag, K_diag, 5)
def test_kernel_operator_commutative():
# Adding kernels and multiplying kernels should be commutative.
# Check addition
assert_almost_equal((RBF(2.0) + 1.0)(X), (1.0 + RBF(2.0))(X))
# Check multiplication
assert_almost_equal((3.0 * RBF(2.0))(X), (RBF(2.0) * 3.0)(X))
def test_kernel_anisotropic():
# Anisotropic kernel should be consistent with isotropic kernels.
kernel = 3.0 * RBF([0.5, 2.0])
K = kernel(X)
X1 = np.array(X)
X1[:, 0] *= 4
K1 = 3.0 * RBF(2.0)(X1)
assert_almost_equal(K, K1)
X2 = np.array(X)
X2[:, 1] /= 4
K2 = 3.0 * RBF(0.5)(X2)
assert_almost_equal(K, K2)
# Check getting and setting via theta
kernel.theta = kernel.theta + np.log(2)
assert_array_equal(kernel.theta, np.log([6.0, 1.0, 4.0]))
assert_array_equal(kernel.k2.length_scale, [1.0, 4.0])
@pytest.mark.parametrize(
"kernel", [kernel for kernel in kernels if kernel.is_stationary()]
)
def test_kernel_stationary(kernel):
# Test stationarity of kernels.
K = kernel(X, X + 1)
assert_almost_equal(K[0, 0], np.diag(K))
@pytest.mark.parametrize("kernel", kernels)
def test_kernel_input_type(kernel):
# Test whether kernels is for vectors or structured data
if isinstance(kernel, Exponentiation):
assert kernel.requires_vector_input == kernel.kernel.requires_vector_input
if isinstance(kernel, KernelOperator):
assert kernel.requires_vector_input == (
kernel.k1.requires_vector_input or kernel.k2.requires_vector_input
)
def test_compound_kernel_input_type():
kernel = CompoundKernel([WhiteKernel(noise_level=3.0)])
assert not kernel.requires_vector_input
kernel = CompoundKernel([WhiteKernel(noise_level=3.0), RBF(length_scale=2.0)])
assert kernel.requires_vector_input
def check_hyperparameters_equal(kernel1, kernel2):
# Check that hyperparameters of two kernels are equal
for attr in set(dir(kernel1) + dir(kernel2)):
if attr.startswith("hyperparameter_"):
attr_value1 = getattr(kernel1, attr)
attr_value2 = getattr(kernel2, attr)
assert attr_value1 == attr_value2
@pytest.mark.parametrize("kernel", kernels)
def test_kernel_clone(kernel):
# Test that sklearn's clone works correctly on kernels.
kernel_cloned = clone(kernel)
# XXX: Should this be fixed?
# This differs from the sklearn's estimators equality check.
assert kernel == kernel_cloned
assert id(kernel) != id(kernel_cloned)
# Check that all constructor parameters are equal.
assert kernel.get_params() == kernel_cloned.get_params()
# Check that all hyperparameters are equal.
check_hyperparameters_equal(kernel, kernel_cloned)
@pytest.mark.parametrize("kernel", kernels)
def test_kernel_clone_after_set_params(kernel):
# This test is to verify that using set_params does not
# break clone on kernels.
# This used to break because in kernels such as the RBF, non-trivial
# logic that modified the length scale used to be in the constructor
# See https://github.com/scikit-learn/scikit-learn/issues/6961
# for more details.
bounds = (1e-5, 1e5)
kernel_cloned = clone(kernel)
params = kernel.get_params()
# RationalQuadratic kernel is isotropic.
isotropic_kernels = (ExpSineSquared, RationalQuadratic)
if "length_scale" in params and not isinstance(kernel, isotropic_kernels):
length_scale = params["length_scale"]
if np.iterable(length_scale):
# XXX unreached code as of v0.22
params["length_scale"] = length_scale[0]
params["length_scale_bounds"] = bounds
else:
params["length_scale"] = [length_scale] * 2
params["length_scale_bounds"] = bounds * 2
kernel_cloned.set_params(**params)
kernel_cloned_clone = clone(kernel_cloned)
assert kernel_cloned_clone.get_params() == kernel_cloned.get_params()
assert id(kernel_cloned_clone) != id(kernel_cloned)
check_hyperparameters_equal(kernel_cloned, kernel_cloned_clone)
def test_matern_kernel():
# Test consistency of Matern kernel for special values of nu.
K = Matern(nu=1.5, length_scale=1.0)(X)
# the diagonal elements of a matern kernel are 1
assert_array_almost_equal(np.diag(K), np.ones(X.shape[0]))
# matern kernel for coef0==0.5 is equal to absolute exponential kernel
K_absexp = np.exp(-euclidean_distances(X, X, squared=False))
K = Matern(nu=0.5, length_scale=1.0)(X)
assert_array_almost_equal(K, K_absexp)
# matern kernel with coef0==inf is equal to RBF kernel
K_rbf = RBF(length_scale=1.0)(X)
K = Matern(nu=np.inf, length_scale=1.0)(X)
assert_array_almost_equal(K, K_rbf)
assert_allclose(K, K_rbf)
# test that special cases of matern kernel (coef0 in [0.5, 1.5, 2.5])
# result in nearly identical results as the general case for coef0 in
# [0.5 + tiny, 1.5 + tiny, 2.5 + tiny]
tiny = 1e-10
for nu in [0.5, 1.5, 2.5]:
K1 = Matern(nu=nu, length_scale=1.0)(X)
K2 = Matern(nu=nu + tiny, length_scale=1.0)(X)
assert_array_almost_equal(K1, K2)
# test that coef0==large is close to RBF
large = 100
K1 = Matern(nu=large, length_scale=1.0)(X)
K2 = RBF(length_scale=1.0)(X)
assert_array_almost_equal(K1, K2, decimal=2)
@pytest.mark.parametrize("kernel", kernels)
def test_kernel_versus_pairwise(kernel):
# Check that GP kernels can also be used as pairwise kernels.
# Test auto-kernel
if kernel != kernel_rbf_plus_white:
# For WhiteKernel: k(X) != k(X,X). This is assumed by
# pairwise_kernels
K1 = kernel(X)
K2 = pairwise_kernels(X, metric=kernel)
assert_array_almost_equal(K1, K2)
# Test cross-kernel
K1 = kernel(X, Y)
K2 = pairwise_kernels(X, Y, metric=kernel)
assert_array_almost_equal(K1, K2)
@pytest.mark.parametrize("kernel", kernels)
def test_set_get_params(kernel):
# Check that set_params()/get_params() is consistent with kernel.theta.
# Test get_params()
index = 0
params = kernel.get_params()
for hyperparameter in kernel.hyperparameters:
if isinstance("string", type(hyperparameter.bounds)):
if hyperparameter.bounds == "fixed":
continue
size = hyperparameter.n_elements
if size > 1: # anisotropic kernels
assert_almost_equal(
np.exp(kernel.theta[index : index + size]), params[hyperparameter.name]
)
index += size
else:
assert_almost_equal(
np.exp(kernel.theta[index]), params[hyperparameter.name]
)
index += 1
# Test set_params()
index = 0
value = 10 # arbitrary value
for hyperparameter in kernel.hyperparameters:
if isinstance("string", type(hyperparameter.bounds)):
if hyperparameter.bounds == "fixed":
continue
size = hyperparameter.n_elements
if size > 1: # anisotropic kernels
kernel.set_params(**{hyperparameter.name: [value] * size})
assert_almost_equal(
np.exp(kernel.theta[index : index + size]), [value] * size
)
index += size
else:
kernel.set_params(**{hyperparameter.name: value})
assert_almost_equal(np.exp(kernel.theta[index]), value)
index += 1
@pytest.mark.parametrize("kernel", kernels)
def test_repr_kernels(kernel):
# Smoke-test for repr in kernels.
repr(kernel)
def test_rational_quadratic_kernel():
kernel = RationalQuadratic(length_scale=[1.0, 1.0])
message = (
"RationalQuadratic kernel only supports isotropic "
"version, please use a single "
"scalar for length_scale"
)
with pytest.raises(AttributeError, match=message):
kernel(X)
|
manhhomienbienthuy/scikit-learn
|
sklearn/gaussian_process/tests/test_kernels.py
|
Python
|
bsd-3-clause
| 13,726
|
[
"Gaussian"
] |
bed2ec0b8bd12943b1a80a2e34b141c52ea83b7bbcba515b344a949f89ab899e
|
from __future__ import print_function
try:
from future_builtins import zip
except ImportError:
pass
from . import moose as _moose
_tick = 8
_base = '/_utils'
_path = _base + '/y{0}'
_counter = 0
_plots = []
_moose.Neutral( _base )
_defaultFields = {
_moose.Compartment : 'Vm',
_moose.ZombieCompartment : 'Vm',
_moose.HHChannel: 'Gk',
_moose.ZombieHHChannel: 'Gk',
_moose.HHChannel2D: 'Gk',
_moose.SynChan: 'Gk',
_moose.CaConc: 'Ca',
_moose.ZombieCaConc: 'Ca',
_moose.Pool: 'conc',
_moose.ZombiePool: 'conc',
_moose.ZPool: 'conc',
_moose.BufPool: 'conc',
_moose.ZombieBufPool: 'conc',
_moose.ZBufPool: 'conc',
_moose.FuncPool: 'conc',
_moose.ZombieFuncPool: 'conc',
_moose.ZFuncPool: 'conc',
}
def _defaultField( obj ):
return _defaultFields[ type( obj ) ]
def setDt( dt ):
'''-----------
Description
-----------
Sets time-step for recording values.
---------
Arguments
---------
dt: Time-step for recording values.
-------
Returns
-------
Nothing.'''
_moose.setClock( _tick, dt )
class SetupError( Exception ):
pass
def _time( npoints = None ):
import numpy
if npoints is None:
try:
npoints = len( _plots[ 0 ].vec )
except IndexError:
raise SetupError(
'List of time-points cannot be constructed because '
'no plots have been set up yet.'
)
begin = 0.0
end = _moose.Clock( '/clock' ).currentTime
return numpy.linspace( begin, end, npoints )
class _Plot( _moose.Table ):
def __init__( self, path, obj, field, label ):
_moose.Table.__init__( self, path )
self._table = _moose.Table( path )
self.obj = obj
self.field = field
self.label = label
@property
def values( self ):
return self._table.vec
@property
def size( self ):
return len( self.values )
@property
def time( self ):
return _time( self.size )
def __iter__( self ):
return iter( self.values )
def record( obj, field = None, label = None ):
'''
'''
global _counter
# Checking if object is an iterable like list or a tuple, but not a string.
if hasattr( obj, '__iter__' ):
return [ record( o, field, label ) for o in obj ]
if isinstance( obj, str ):
obj = _moose.element( obj )
if field is None:
field = _defaultField( obj )
path = _path.format( _counter )
_counter += 1
p = _Plot( path, obj, field, label )
_plots.append( p )
_moose.connect( p, "requestData", obj, 'get_' + field )
_moose.useClock( _tick, path, "process" )
return p
def _label( plot, labelFormat = '{path}.{field}' ):
# Over-ride label format if label has been given explicitly.
if plot.label:
labelFormat = plot.label
return labelFormat.format(
path = plot.obj.path,
name = plot.obj.name,
field = plot.field
)
def _selectedPlots( selected ):
if selected is None:
# Returning a copy of this list, instead of reference. The returned
# list will be manipulated later.
return _plots[ : ]
elif isinstance( selected, _Plot ):
return [ selected ]
else:
return selected
def saveCSV(
fileName,
selected = None,
delimiter = '\t',
header = True,
headerCommentCharacter = '#',
labelFormat = '{path}.{field}',
timeCol = True,
timeHeader = 'Time',
fileMode = 'w' ):
'''
'''
import csv
plots = _selectedPlots( selected )
if header:
header = []
if timeCol:
header.append( timeHeader )
for plot in plots:
header.append( _label( plot, labelFormat ) )
header[ 0 ] = headerCommentCharacter + header[ 0 ]
if timeCol:
plots.insert( 0, _time() )
with open( fileName, fileMode ) as fout:
writer = csv.writer( fout, delimiter = delimiter )
if header:
writer.writerow( header )
writer.writerows( list(zip( *plots )) )
def saveXPLOT(
fileName,
selected = None,
labelFormat = '{path}.{field}',
fileMode = 'w' ):
'''
'''
plots = _selectedPlots( selected )
with open( fileName, fileMode ) as fout:
write = lambda line: fout.write( line + '\n' )
for ( i, plot ) in enumerate( plots ):
label = '/plotname ' + _label( plot, labelFormat )
if i > 0:
write( '' )
write( '/newplot' )
write( label )
for value in plot:
write( str( value ) )
def show(
selected = None,
combine = True,
labelFormat = '{path}.{field}',
xLabel = 'Time (s)',
yLabel = '{field}' ):
'''
'''
try:
from matplotlib import pyplot as plt
except ImportError:
print("Warning: recording.show(): Cannot find 'matplotlib'. Not showing plots.")
return
plots = _selectedPlots( selected )
if combine:
plt.figure()
for plot in plots:
if not combine:
plt.figure()
print(_label(plot))
plt.plot( plot.time, plot.values, label = _label( plot ) )
plt.legend()
plt.show()
def HDF5():
pass
|
subhacom/moose-core
|
python/moose/recording.py
|
Python
|
gpl-3.0
| 4,723
|
[
"MOOSE"
] |
853de3e7cc58c0a441635b1c611f37aaae2bdf3e4ce073152b420d6784f14392
|
# Copyright (C) 2015-2021 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import copy
import importlib
import inspect
import itertools
import logging
import os
import pickle
import shutil
import tempfile
import time
import uuid
from abc import ABCMeta, abstractmethod
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
from contextlib import contextmanager
from io import BytesIO
from typing import Dict, Optional, Union, Set
import dill
from toil.common import Config, Toil, addOptions, safeUnpickleFromStream
from toil.deferred import DeferredFunction
from toil.fileStores import FileID
from toil.lib.expando import Expando
from toil.lib.conversions import human2bytes
from toil.lib.resources import (get_total_cpu_time,
get_total_cpu_time_and_memory_usage)
from toil.resource import ModuleDescriptor
from toil.statsAndLogging import set_logging_from_options
logger = logging.getLogger(__name__)
class JobPromiseConstraintError(RuntimeError):
"""
Represents a problem where a job is being asked to promise its return
value, but it has not yet been hit in the topological order of the job
graph.
"""
def __init__(self, promisingJob, recipientJob=None):
"""
:param toil.job.Job promisingJob: The job being asked for its return value.
:param toil.job.Job recipientJob: The job receiving the return value, if any.
"""
self.promisingJob = promisingJob
self.recipientJob = recipientJob
if recipientJob is None:
# Come up with a vaguer error message
super().__init__(f"Job {promisingJob.description} cannot promise its return value to a job that is not its successor")
else:
# Write a full error message
super().__init__(f"Job {promisingJob.description} cannot promise its return value to non-successor {recipientJob.description}")
class ConflictingPredecessorError(Exception):
def __init__(self, predecessor: 'Job', successor: 'Job'):
super().__init__(f'The given job: "{predecessor.description}" is already a predecessor of job: "{successor.description}".')
class TemporaryID:
"""
Placeholder for a job ID used by a JobDescription that has not yet been
registered with any JobStore.
Needs to be held:
* By JobDescription objects to record normal relationships.
* By Jobs to key their connected-component registries and to record
predecessor relationships to facilitate EncapsulatedJob adding
itself as a child.
* By Services to tie back to their hosting jobs, so the service
tree can be built up from Service objects.
"""
def __init__(self):
"""
Assign a unique temporary ID that won't collide with anything.
"""
self._value = uuid.uuid4()
def __str__(self):
return self.__repr__()
def __repr__(self):
return f'TemporaryID({self._value})'
def __hash__(self):
return hash(self._value)
def __eq__(self, other):
return isinstance(other, TemporaryID) and self._value == other._value
def __ne__(self, other):
return not isinstance(other, TemporaryID) or self._value != other._value
class Requirer:
"""
Base class implementing the storage and presentation of requirements for
cores, memory, disk, and preemptability as properties.
"""
def __init__(self, requirements):
"""
Parse and save the given requirements.
:param dict requirements: Dict from string to number, string, or bool
describing a set of resource requirments. 'cores', 'memory',
'disk', and 'preemptable' fields, if set, are parsed and broken out
into properties. If unset, the relevant property will be
unspecified, and will be pulled from the assigned Config object if
queried (see :meth:`toil.job.Requirer.assignConfig`). If
unspecified and no Config object is assigned, an AttributeError
will be raised at query time.
"""
super().__init__()
# We can have a toil.common.Config assigned to fill in default values
# for e.g. job requirements not explicitly specified.
self._config = None
# Save requirements, parsing and validating anything that needs parsing or validating.
# Don't save Nones.
self._requirementOverrides = {k: self._parseResource(k, v) for (k, v) in requirements.items() if v is not None}
def assignConfig(self, config):
"""
Assign the given config object to be used to provide default values.
Must be called exactly once on a loaded JobDescription before any
requirements are queried.
:param toil.common.Config config: Config object to query
"""
if self._config is not None:
raise RuntimeError(f"Config assigned multiple times to {self}")
self._config = config
def __getstate__(self):
"""
Return the dict to use as the instance's __dict__ when pickling.
"""
# We want to exclude the config from pickling.
state = self.__dict__.copy()
state['_config'] = None
return state
def __copy__(self):
"""
Return a semantically-shallow copy of the object, for :meth:`copy.copy`.
"""
# See https://stackoverflow.com/a/40484215 for how to do an override
# that uses the base implementation
# Hide this override
implementation = self.__copy__
self.__copy__ = None
# Do the copy which omits the config via __getstate__ override
clone = copy.copy(self)
# Put back the override on us and the copy
self.__copy__ = implementation
clone.__copy__ = implementation
if self._config is not None:
# Share a config reference
clone.assignConfig(self._config)
return clone
def __deepcopy__(self, memo):
"""
Return a semantically-deep copy of the object, for :meth:`copy.deepcopy`.
"""
# See https://stackoverflow.com/a/40484215 for how to do an override
# that uses the base implementation
# Hide this override
implementation = self.__deepcopy__
self.__deepcopy__ = None
# Do the deepcopy which omits the config via __getstate__ override
clone = copy.deepcopy(self, memo)
# Put back the override on us and the copy
self.__deepcopy__ = implementation
clone.__deepcopy__ = implementation
if self._config is not None:
# Share a config reference
clone.assignConfig(self._config)
return clone
@staticmethod
def _parseResource(name, value):
"""
Parse a Toil resource requirement value and apply resource-specific type checks. If the
value is a string, a binary or metric unit prefix in it will be evaluated and the
corresponding integral value will be returned.
:param str name: The name of the resource
:param str|int|float|bool|None value: The resource value
:rtype: int|float|bool|None
>>> Requirer._parseResource('cores', None)
>>> Requirer._parseResource('cores', 1), Requirer._parseResource('disk', 1), \
Requirer._parseResource('memory', 1)
(1, 1, 1)
>>> Requirer._parseResource('cores', '1G'), Requirer._parseResource('disk', '1G'), \
Requirer._parseResource('memory', '1G')
(1073741824, 1073741824, 1073741824)
>>> Requirer._parseResource('cores', 1.1)
1.1
>>> Requirer._parseResource('disk', 1.1) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: The 'disk' requirement does not accept values that are of <type 'float'>
>>> Requirer._parseResource('memory', object()) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: The 'memory' requirement does not accept values that are of ...
"""
if value is None:
# Anything can be None.
return value
if name in ('memory', 'disk', 'cores'):
# These should be numbers that accept things like "5G".
if isinstance(value, (str, bytes)):
value = human2bytes(value)
if isinstance(value, int):
return value
elif isinstance(value, float) and name == 'cores':
# But only cores can be fractional.
return value
else:
raise TypeError(f"The '{name}' requirement does not accept values that are of type {type(value)}")
elif name == 'preemptable':
if isinstance(value, str):
if value.tolower() == 'true':
return True
elif value.tolower() == 'false':
return False
else:
raise ValueError(f"The '{name}' requirement, as a string, must be 'true' or 'false' but is {value}")
elif isinstance(value, int):
if value == 1:
return True
if value == 0:
return False
else:
raise ValueError(f"The '{name}' requirement, asn an int, must be 1 or 0 but is {value}")
elif isinstance(value, bool):
return value
else:
raise TypeError(f"The '{name}' requirement does not accept values that are of type {type(value)}")
else:
# Anything else we just pass along without opinons
return value
def _fetchRequirement(self, requirement):
"""
Get the value of the specified requirement ('blah') by looking it up in
our requirement storage and querying 'defaultBlah' on the config if it
isn't set. If the config would be queried but isn't associated, raises
AttributeError.
:param str requirement: The name of the resource
:rtype: int|float|bool|None
"""
if requirement in self._requirementOverrides:
value = self._requirementOverrides[requirement]
if value is None:
raise AttributeError(f"Encountered explicit None for '{requirement}' requirement of {self}")
return value
elif self._config is not None:
value = getattr(self._config, 'default' + requirement.capitalize())
if value is None:
raise AttributeError(f"Encountered None for default '{requirement}' requirement in config: {self._config}")
return value
else:
raise AttributeError(f"Default value for '{requirement}' requirement of {self} cannot be determined")
@property
def requirements(self):
"""
Dict containing all non-None, non-defaulted requirements.
:rtype: dict
"""
return dict(self._requirementOverrides)
@property
def disk(self) -> int:
"""
The maximum number of bytes of disk required.
:rtype: int
"""
return self._fetchRequirement('disk')
@disk.setter
def disk(self, val):
self._requirementOverrides['disk'] = self._parseResource('disk', val)
@property
def memory(self):
"""
The maximum number of bytes of memory required.
:rtype: int
"""
return self._fetchRequirement('memory')
@memory.setter
def memory(self, val):
self._requirementOverrides['memory'] = self._parseResource('memory', val)
@property
def cores(self):
"""
The number of CPU cores required.
:rtype: int|float
"""
return self._fetchRequirement('cores')
@cores.setter
def cores(self, val):
self._requirementOverrides['cores'] = self._parseResource('cores', val)
@property
def preemptable(self):
"""
Whether a preemptable node is permitted, or a nonpreemptable one is required.
:rtype: bool
"""
return self._fetchRequirement('preemptable')
@preemptable.setter
def preemptable(self, val):
self._requirementOverrides['preemptable'] = self._parseResource('preemptable', val)
class JobDescription(Requirer):
"""
Stores all the information that the Toil Leader ever needs to know about a
Job: requirements information, dependency information, commands to issue,
etc.
Can be obtained from an actual (i.e. executable) Job object, and can be
used to obtain the Job object from the JobStore.
Never contains other Jobs or JobDescriptions: all reference is by ID.
Subclassed into variants for checkpoint jobs and service jobs that have
their specific parameters.
"""
def __init__(self, requirements: Dict[str, Union[int, str, bool]], jobName: str, unitName: str='', displayName: str='', command: Optional[str]=None) -> None:
"""
Create a new JobDescription.
:param dict requirements: Dict from string to number, string, or bool
describing the resource requirments of the job. 'cores', 'memory',
'disk', and 'preemptable' fields, if set, are parsed and broken out
into properties. If unset, the relevant property will be
unspecified, and will be pulled from the assigned Config object if
queried (see :meth:`toil.job.Requirer.assignConfig`).
:param str jobName: Name of the kind of job this is. May be used in job
store IDs and logging. Also used to let the cluster scaler learn a
model for how long the job will take. Ought to be the job class's
name if no real user-defined name is available.
:param str unitName: Name of this instance of this kind of job. May
appear with jobName in logging.
:param str displayName: A human-readable name to identify this
particular job instance. Ought to be the job class's name
if no real user-defined name is available.
"""
# Set requirements
super().__init__(requirements)
# Save names, making sure they are strings and not e.g. bytes.
def makeString(x):
return x if not isinstance(x, bytes) else x.decode('utf-8', errors='replace')
self.jobName = makeString(jobName)
self.unitName = makeString(unitName)
self.displayName = makeString(displayName)
# Set properties that are not fully filled in on creation.
# ID of this job description in the JobStore.
self.jobStoreID = TemporaryID()
# Mostly fake, not-really-executable command string that encodes how to
# find the Job body data that this JobDescription describes, and the
# module(s) needed to unpickle it.
#
# Gets replaced with/rewritten into the real, executable command when
# the leader passes the description off to the batch system to be
# executed.
self.command: Optional[str] = command
# Set scheduling properties that the leader read to think about scheduling.
# The number of times the job should be attempted. Includes the initial
# try, plus the nu,ber of times to retry if the job fails. This number
# is reduced each time the job is run, until it is zero, and then no
# further attempts to run the job are made. If None, taken as the
# default value for this workflow execution.
self._remainingTryCount = None
# Holds FileStore FileIDs of the files that this job has deleted. Used
# to journal deletions of files and recover from a worker crash between
# committing a JobDescription update and actually executing the
# requested deletions.
self.filesToDelete = []
# Holds JobStore Job IDs of the jobs that have been chained into this
# job, and which should be deleted when this job finally is deleted.
self.jobsToDelete = []
# The number of direct predecessors of the job. Needs to be stored at
# the JobDescription to support dynamically-created jobs with multiple
# predecessors. Otherwise, we could reach a job by one path down from
# the root and decide to schedule it without knowing that it is also
# reachable from other paths down from the root.
self.predecessorNumber = 0
# The IDs of predecessor jobs that have finished. Managed by the Leader
# and ToilState, and just stored in the JobDescription. Never changed
# after the job is scheduled, so we don't ahve to worry about
# conflicting updates from workers.
# TODO: Move into ToilState itself so leader stops mutating us so much?
self.predecessorsFinished = set()
# Note that we don't hold IDs of our predecessors. Predecessors know
# about us, and not the other way around. Otherwise we wouldn't be able
# to save ourselves to the job store until our predecessors were saved,
# but they'd also be waiting on us.
# The IDs of all child jobs of the described job.
# Children which are done must be removed with filterSuccessors.
self.childIDs = set()
# The IDs of all follow-on jobs of the described job.
# Follow-ons which are done must be removed with filterSuccessors.
self.followOnIDs = set()
# Dict from ServiceHostJob ID to list of child ServiceHostJobs that start after it.
# All services must have an entry, if only to an empty list.
self.serviceTree = {}
# A jobStoreFileID of the log file for a job. This will be None unless the job failed and
# the logging has been captured to be reported on the leader.
self.logJobStoreFileID = None
def serviceHostIDsInBatches(self):
"""
Get an iterator over all batches of service host job IDs that can be
started at the same time, in the order they need to start in.
"""
# First start all the jobs with no parent
roots = set(self.serviceTree.keys())
for parent, children in self.serviceTree.items():
for child in children:
roots.remove(child)
batch = list(roots)
if len(batch) > 0:
# If there's a first batch, yield it
yield batch
while len(batch) > 0:
nextBatch = []
for started in batch:
# Go find all the children that can start now that we have started.
for child in self.serviceTree[started]:
nextBatch.append(child)
batch = nextBatch
if len(batch) > 0:
# Emit the batch if nonempty
yield batch
def successorsAndServiceHosts(self):
"""
Get an iterator over all child, follow-on, and service job IDs
"""
return itertools.chain(self.childIDs, self.followOnIDs, self.serviceTree.keys())
def allSuccessors(self):
"""
Get an iterator over all child and follow-on job IDs
"""
return itertools.chain(self.childIDs, self.followOnIDs)
@property
def services(self):
"""
Get a collection of the IDs of service host jobs for this job, in arbitrary order.
Will be empty if the job has no unfinished services.
"""
return list(self.serviceTree.keys())
def nextSuccessors(self):
"""
Return the collection of job IDs for the successors of this job that,
according to this job, are ready to run.
If those jobs have multiple predecessor relationships, they may still
be blocked on other jobs.
Returns None when at the final phase (all successors done), and an
empty collection if there are more phases but they can't be entered yet
(e.g. because we are waiting for the job itself to run).
"""
if self.command is not None:
# We ourselves need to run. So there's not nothing to do but no successors are ready.
return []
elif len(self.childIDs) != 0:
# Our children need to run
return self.childIDs
elif len(self.followOnIDs) != 0:
# Our follow-ons need to run
return self.followOnIDs
else:
# Everything is done.
return None
@property
def stack(self):
"""
Get an immutable collection of immutable collections of IDs of successors that need to run still.
Batches of successors are in reverse order of the order they need to run in.
Some successors in each batch may have already been finished. Batches may be empty.
Exists so that code that used the old stack list immutably can work
still. New development should use nextSuccessors(), and all mutations
should use filterSuccessors() (which automatically removes completed
phases).
:return: Batches of successors that still need to run, in reverse
order. An empty batch may exist under a non-empty batch, or at the top
when the job itself is not done.
:rtype: tuple(tuple(str))
"""
result = []
if self.command is not None or len(self.childIDs) != 0 or len(self.followOnIDs) != 0:
# Follow-ons haven't all finished yet
result.append(tuple(self.followOnIDs))
if self.command is not None or len(self.childIDs) != 0:
# Children haven't all finished yet
result.append(tuple(self.childIDs))
return tuple(result)
def filterSuccessors(self, predicate):
"""
Keep only successor jobs for which the given predicate function returns True when called with the job's ID.
Treats all other successors as complete and forgets them.
"""
self.childIDs = {x for x in self.childIDs if predicate(x)}
self.followOnIDs = {x for x in self.followOnIDs if predicate(x)}
def filterServiceHosts(self, predicate):
"""
Keep only services for which the given predicate function returns True when called with the service host job's ID.
Treats all other services as complete and forgets them.
"""
# Get all the services we shouldn't have anymore
toRemove = set()
for serviceID in self.services:
if not predicate(serviceID):
toRemove.add(serviceID)
# Drop everything from that set as a value and a key
self.serviceTree = {k: [x for x in v if x not in toRemove] for k, v in self.serviceTree.items() if k not in toRemove}
def clearSuccessorsAndServiceHosts(self):
"""
Remove all references to child, follow-on, and service jobs associated with the described job.
"""
self.childIDs = set()
self.followOnIDs = set()
self.serviceTree = {}
def replace(self, other):
"""
Take on the ID of another JobDescription, while retaining our own state and type.
When updated in the JobStore, we will save over the other JobDescription.
Useful for chaining jobs: the chained-to job can replace the parent job.
Merges cleanup state from the job being replaced into this one.
:param toil.job.JobDescription other: Job description to replace.
"""
# TODO: also be able to take on the successors of the other job, under
# ours on the stack, somehow.
self.jobStoreID = other.jobStoreID
# Save files and jobs to delete from the job we replaced, so we can
# roll up a whole chain of jobs and delete them when they're all done.
self.filesToDelete += other.filesToDelete
self.jobsToDelete += other.jobsToDelete
def addChild(self, childID):
"""
Make the job with the given ID a child of the described job.
"""
self.childIDs.add(childID)
def addFollowOn(self, followOnID):
"""
Make the job with the given ID a follow-on of the described job.
"""
self.followOnIDs.add(followOnID)
def addServiceHostJob(self, serviceID, parentServiceID=None):
"""
Make the ServiceHostJob with the given ID a service of the described job.
If a parent ServiceHostJob ID is given, that parent service will be started
first, and must have already been added.
"""
# Make sure we aren't clobbering something
assert serviceID not in self.serviceTree
self.serviceTree[serviceID] = []
if parentServiceID is not None:
self.serviceTree[parentServiceID].append(serviceID)
def hasChild(self, childID):
"""
Return True if the job with the given ID is a child of the described job.
"""
return childID in self.childIDs
def hasFollowOn(self, followOnID):
"""
Return True if the job with the given ID is a follow-on of the described job.
"""
return followOnID in self.followOnIDs
def hasServiceHostJob(self, serviceID):
"""
Return True if the ServiceHostJob with the given ID is a service of the described job.
"""
return serviceID in self.serviceTree
def renameReferences(self, renames):
"""
Apply the given dict of ID renames to all references to jobs. Does not
modify our own ID or those of finished predecessors.
IDs not present in the renames dict are left as-is.
:param dict(TemporaryID, str) renames: Rename operations to apply.
"""
self.childIDs = {renames.get(old, old) for old in self.childIDs}
self.followOnIDs = {renames.get(old, old) for old in self.followOnIDs}
self.serviceTree = {renames.get(parent, parent): [renames.get(child, child) for child in children]
for parent, children in self.serviceTree.items()}
def addPredecessor(self):
"""
Notify the JobDescription that a predecessor has been added to its Job.
"""
self.predecessorNumber += 1
def onRegistration(self, jobStore):
"""
Called by the Job saving logic when this JobDescription meets the JobStore and has its ID assigned.
Overridden to perform setup work (like hooking up flag files for service jobs) that requires the JobStore.
:param toil.jobStores.abstractJobStore.AbstractJobStore jobStore: The job store we are being placed into
"""
def setupJobAfterFailure(self, exitReason=None):
"""
Reduce the remainingTryCount if greater than zero and set the memory
to be at least as big as the default memory (in case of exhaustion of memory,
which is common).
Requires a configuration to have been assigned (see :meth:`toil.job.Requirer.assignConfig`).
:param toil.batchSystems.abstractBatchSystem.BatchJobExitReason exitReason: The configuration for the current workflow run.
"""
# Avoid potential circular imports
from toil.batchSystems.abstractBatchSystem import BatchJobExitReason
# Old version of this function used to take a config. Make sure that isn't happening.
assert not isinstance(exitReason, Config), "Passing a Config as an exit reason"
# Make sure we have an assigned config.
assert self._config is not None
if self._config.enableUnlimitedPreemptableRetries and exitReason == BatchJobExitReason.LOST:
logger.info("*Not* reducing try count (%s) of job %s with ID %s",
self.remainingTryCount, self, self.jobStoreID)
else:
self.remainingTryCount = max(0, self.remainingTryCount - 1)
logger.warning("Due to failure we are reducing the remaining try count of job %s with ID %s to %s",
self, self.jobStoreID, self.remainingTryCount)
# Set the default memory to be at least as large as the default, in
# case this was a malloc failure (we do this because of the combined
# batch system)
if exitReason == BatchJobExitReason.MEMLIMIT and self._config.doubleMem:
self.memory = self.memory * 2
logger.warning("We have doubled the memory of the failed job %s to %s bytes due to doubleMem flag",
self, self.memory)
if self.memory < self._config.defaultMemory:
self.memory = self._config.defaultMemory
logger.warning("We have increased the default memory of the failed job %s to %s bytes",
self, self.memory)
if self.disk < self._config.defaultDisk:
self.disk = self._config.defaultDisk
logger.warning("We have increased the disk of the failed job %s to the default of %s bytes",
self, self.disk)
def getLogFileHandle(self, jobStore):
"""
Returns a context manager that yields a file handle to the log file.
Assumes logJobStoreFileID is set.
"""
return jobStore.readFileStream(self.logJobStoreFileID)
@property
def remainingTryCount(self):
"""
The try count set on the JobDescription, or the default based on the
retry count from the config if none is set.
"""
if self._remainingTryCount is not None:
return self._remainingTryCount
elif self._config is not None:
# Our try count should be the number of retries in the config, plus
# 1 for the initial try
return self._config.retryCount + 1
else:
raise AttributeError(f"Try count for {self} cannot be determined")
@remainingTryCount.setter
def remainingTryCount(self, val):
self._remainingTryCount = val
def clearRemainingTryCount(self):
"""
Clear remainingTryCount and set it back to its default value.
:returns: True if a modification to the JobDescription was made, and
False otherwise.
:rtype: bool
"""
if self._remainingTryCount is not None:
# We had a value stored
self._remainingTryCount = None
return True
else:
# No change needed
return False
def __str__(self):
"""
Produce a useful logging string identifying this job.
"""
printedName = "'" + self.jobName + "'"
if self.unitName:
printedName += ' ' + self.unitName
if self.jobStoreID is not None:
printedName += ' ' + str(self.jobStoreID)
return printedName
# Not usable as a key (not hashable) and doesn't have any value-equality.
# There really should only ever be one true version of a JobDescription at
# a time, keyed by jobStoreID.
def __repr__(self):
return '%s( **%r )' % (self.__class__.__name__, self.__dict__)
class ServiceJobDescription(JobDescription):
"""
A description of a job that hosts a service.
"""
def __init__(self, *args, **kwargs):
"""
Create a ServiceJobDescription to describe a ServiceHostJob.
"""
# Make the base JobDescription
super().__init__(*args, **kwargs)
# Set service-specific properties
# An empty file in the jobStore which when deleted is used to signal that the service
# should cease.
self.terminateJobStoreID = None
# Similarly a empty file which when deleted is used to signal that the service is
# established
self.startJobStoreID = None
# An empty file in the jobStore which when deleted is used to signal that the service
# should terminate signaling an error.
self.errorJobStoreID = None
def onRegistration(self, jobStore):
"""
When a ServiceJobDescription first meets the JobStore, it needs to set up its flag files.
"""
super().onRegistration(jobStore)
self.startJobStoreID = jobStore.getEmptyFileStoreID()
self.terminateJobStoreID = jobStore.getEmptyFileStoreID()
self.errorJobStoreID = jobStore.getEmptyFileStoreID()
class CheckpointJobDescription(JobDescription):
"""
A description of a job that is a checkpoint.
"""
def __init__(self, *args, **kwargs):
"""
Create a CheckpointJobDescription to describe a checkpoint job.
"""
# Make the base JobDescription
super().__init__(*args, **kwargs)
# Set checkpoint-specific properties
# None, or a copy of the original command string used to reestablish the job after failure.
self.checkpoint = None
# Files that can not be deleted until the job and its successors have completed
self.checkpointFilesToDelete = []
# Human-readable names of jobs that were run as part of this job's
# invocation, starting with this job
self.chainedJobs = []
def restartCheckpoint(self, jobStore):
"""
Restart a checkpoint after the total failure of jobs in its subtree.
Writes the changes to the jobStore immediately. All the
checkpoint's successors will be deleted, but its try count
will *not* be decreased.
Returns a list with the IDs of any successors deleted.
"""
assert self.checkpoint is not None
successorsDeleted = []
if self.childIDs or self.followOnIDs or self.serviceTree or self.command is not None:
if self.command is not None:
assert self.command == self.checkpoint
logger.debug("Checkpoint job already has command set to run")
else:
self.command = self.checkpoint
jobStore.update(self) # Update immediately to ensure that checkpoint
# is made before deleting any remaining successors
if self.childIDs or self.followOnIDs or self.serviceTree:
# If the subtree of successors is not complete restart everything
logger.debug("Checkpoint job has unfinished successor jobs, deleting children: %s, followOns: %s, services: %s " %
(self.childIDs, self.followOnIDs, self.serviceTree.keys()))
# Delete everything on the stack, as these represent successors to clean
# up as we restart the queue
def recursiveDelete(jobDesc):
# Recursive walk the stack to delete all remaining jobs
for otherJobID in jobDesc.successorsAndServiceHosts():
if jobStore.exists(otherJobID):
recursiveDelete(jobStore.load(otherJobID))
else:
logger.debug("Job %s has already been deleted", otherJobID)
if jobDesc.jobStoreID != self.jobStoreID:
# Delete everything under us except us.
logger.debug("Checkpoint is deleting old successor job: %s", jobDesc.jobStoreID)
jobStore.delete(jobDesc.jobStoreID)
successorsDeleted.append(jobDesc.jobStoreID)
recursiveDelete(self)
# Cut links to the jobs we deleted.
self.clearSuccessorsAndServiceHosts()
# Update again to commit the removal of successors.
jobStore.update(self)
return successorsDeleted
class Job:
"""
Class represents a unit of work in toil.
"""
def __init__(self, memory=None, cores=None, disk=None, preemptable=None,
unitName='', checkpoint=False, displayName='',
descriptionClass=None):
"""
This method must be called by any overriding constructor.
:param memory: the maximum number of bytes of memory the job will require to run.
:param cores: the number of CPU cores required.
:param disk: the amount of local disk space required by the job, expressed in bytes.
:param preemptable: if the job can be run on a preemptable node.
:param unitName: Human-readable name for this instance of the job.
:param checkpoint: if any of this job's successor jobs completely fails,
exhausting all their retries, remove any successor jobs and rerun this job to restart the
subtree. Job must be a leaf vertex in the job graph when initially defined, see
:func:`toil.job.Job.checkNewCheckpointsAreCutVertices`.
:param displayName: Human-readable job type display name.
:param descriptionClass: Override for the JobDescription class used to describe the job.
:type memory: int or string convertible by toil.lib.conversions.human2bytes to an int
:type cores: float, int, or string convertible by toil.lib.conversions.human2bytes to an int
:type disk: int or string convertible by toil.lib.conversions.human2bytes to an int
:type preemptable: bool, int in {0, 1}, or string in {'false', 'true'} in any case
:type unitName: str
:type checkpoint: bool
:type displayName: str
:type descriptionClass: class
"""
# Fill in our various names
jobName = self.__class__.__name__
displayName = displayName if displayName else jobName
# Build a requirements dict for the description
requirements = {'memory': memory, 'cores': cores, 'disk': disk,
'preemptable': preemptable}
if descriptionClass is None:
if checkpoint:
# Actually describe as a checkpoint job
descriptionClass = CheckpointJobDescription
else:
# Use the normal default
descriptionClass = JobDescription
# Create the JobDescription that owns all the scheduling information.
# Make it with a temporary ID until we can be assigned a real one by
# the JobStore.
self._description = descriptionClass(requirements, jobName, unitName=unitName, displayName=displayName)
# Private class variables needed to actually execute a job, in the worker.
# Also needed for setting up job graph structures before saving to the JobStore.
# This dict holds a mapping from TemporaryIDs to the job objects they represent.
# Will be shared among all jobs in a disconnected piece of the job
# graph that hasn't been registered with a JobStore yet.
# Make sure to initially register ourselves.
self._registry = {self._description.jobStoreID: self}
# Job relationships are all stored exactly once in the JobDescription.
# Except for predecessor relationships which are stored here, just
# while the user is creating the job graphs, to check for duplicate
# relationships and to let EncapsulatedJob magically add itself as a
# child. Note that this stores actual Job objects, to call addChild on.
self._directPredecessors = set()
# Note that self.__module__ is not necessarily this module, i.e. job.py. It is the module
# defining the class self is an instance of, which may be a subclass of Job that may be
# defined in a different module.
self.userModule = ModuleDescriptor.forModule(self.__module__).globalize()
# Maps index paths into composite return values to lists of IDs of files containing
# promised values for those return value items. An index path is a tuple of indices that
# traverses a nested data structure of lists, dicts, tuples or any other type supporting
# the __getitem__() protocol.. The special key `()` (the empty tuple) represents the
# entire return value.
self._rvs = collections.defaultdict(list)
self._promiseJobStore = None
self._fileStore = None
self._defer = None
self._tempDir = None
def __str__(self):
"""
Produce a useful logging string to identify this Job and distinguish it
from its JobDescription.
"""
if self.description is None:
return repr(self)
else:
return 'Job(' + str(self.description) + ')'
@property
def jobStoreID(self):
"""
Get the ID of this Job.
:rtype: str|toil.job.TemporaryID
"""
# This is managed by the JobDescription.
return self._description.jobStoreID
@property
def description(self):
"""
Expose the JobDescription that describes this job.
:rtype: toil.job.JobDescription
"""
return self._description
# Instead of being a Requirer ourselves, we pass anything about
# requirements through to the JobDescription.
@property
def disk(self) -> int:
"""
The maximum number of bytes of disk the job will require to run.
:rtype: int
"""
return self.description.disk
@disk.setter
def disk(self, val):
self.description.disk = val
@property
def memory(self):
"""
The maximum number of bytes of memory the job will require to run.
:rtype: int
"""
return self.description.memory
@memory.setter
def memory(self, val):
self.description.memory = val
@property
def cores(self):
"""
The number of CPU cores required.
:rtype: int|float
"""
return self.description.cores
@cores.setter
def cores(self, val):
self.description.cores = val
@property
def preemptable(self):
"""
Whether the job can be run on a preemptable node.
:rtype: bool
"""
return self.description.preemptable
@preemptable.setter
def preemptable(self, val):
self.description.preemptable = val
@property
def checkpoint(self):
"""
Determine if the job is a checkpoint job or not.
:rtype: bool
"""
return isinstance(self._description, CheckpointJobDescription)
def assignConfig(self, config):
"""
Assign the given config object to be used by various actions
implemented inside the Job class.
:param toil.common.Config config: Config object to query
"""
self.description.assignConfig(config)
def run(self, fileStore):
"""
Override this function to perform work and dynamically create successor jobs.
:param toil.fileStores.abstractFileStore.AbstractFileStore fileStore: Used to create local and
globally sharable temporary files and to send log messages to the leader
process.
:return: The return value of the function can be passed to other jobs by means of
:func:`toil.job.Job.rv`.
"""
def _jobGraphsJoined(self, other):
"""
Called whenever the job graphs of this job and the other job may have been merged into one connected component.
Ought to be called on the bigger registry first.
Merges TemporaryID registries if needed.
:param toil.job.Job other: A job possibly from the other connected component
"""
# Maintain the invariant that a whole connected component has a config
# assigned if any job in it does.
if self.description._config is None and other.description._config is not None:
# The other component has a config assigned but this component doesn't.
for job in self._registry.values():
job.assignConfig(other.description._config)
elif other.description._config is None and self.description._config is not None:
# We have a config assigned but the other component doesn't.
for job in other._registry.values():
job.assignConfig(self.description._config)
if len(self._registry) < len(other._registry):
# Merge into the other component instead
other._jobGraphsJoined(self)
else:
if self._registry != other._registry:
# We are in fact joining connected components.
# Steal everything from the other connected component's registry
self._registry.update(other._registry)
for job in other._registry.values():
# Point all their jobs at the new combined registry
job._registry = self._registry
def addChild(self, childJob):
"""
Adds childJob to be run as child of this job. Child jobs will be run \
directly after this job's :func:`toil.job.Job.run` method has completed.
:param toil.job.Job childJob:
:return: childJob
:rtype: toil.job.Job
"""
assert isinstance(childJob, Job)
# Join the job graphs
self._jobGraphsJoined(childJob)
# Remember the child relationship
self._description.addChild(childJob.jobStoreID)
# Record the temporary back-reference
childJob._addPredecessor(self)
return childJob
def hasChild(self, childJob):
"""
Check if childJob is already a child of this job.
:param toil.job.Job childJob:
:return: True if childJob is a child of the job, else False.
:rtype: bool
"""
return self._description.hasChild(childJob.jobStoreID)
def addFollowOn(self, followOnJob):
"""
Adds a follow-on job, follow-on jobs will be run after the child jobs and \
their successors have been run.
:param toil.job.Job followOnJob:
:return: followOnJob
:rtype: toil.job.Job
"""
assert isinstance(followOnJob, Job)
# Join the job graphs
self._jobGraphsJoined(followOnJob)
# Remember the follow-on relationship
self._description.addFollowOn(followOnJob.jobStoreID)
# Record the temporary back-reference
followOnJob._addPredecessor(self)
return followOnJob
def hasPredecessor(self, job: 'Job') -> bool:
"""Check if a given job is already a predecessor of this job."""
return job in self._directPredecessors
def hasFollowOn(self, followOnJob):
"""
Check if given job is already a follow-on of this job.
:param toil.job.Job followOnJob:
:return: True if the followOnJob is a follow-on of this job, else False.
:rtype: bool
"""
return self._description.hasChild(followOnJob.jobStoreID)
def addService(self, service, parentService=None):
"""
Add a service.
The :func:`toil.job.Job.Service.start` method of the service will be called
after the run method has completed but before any successors are run.
The service's :func:`toil.job.Job.Service.stop` method will be called once
the successors of the job have been run.
Services allow things like databases and servers to be started and accessed
by jobs in a workflow.
:raises toil.job.JobException: If service has already been made the child of a job or another service.
:param toil.job.Job.Service service: Service to add.
:param toil.job.Job.Service parentService: Service that will be started before 'service' is
started. Allows trees of services to be established. parentService must be a service
of this job.
:return: a promise that will be replaced with the return value from
:func:`toil.job.Job.Service.start` of service in any successor of the job.
:rtype: toil.job.Promise
"""
if parentService is not None:
if not self.hasService(parentService):
raise JobException("Parent service is not a service of the given job")
if service.hostID is not None:
raise JobException("Service has already been added to a job")
# Create a host job for the service, ad get it an ID
hostingJob = ServiceHostJob(service)
self._jobGraphsJoined(hostingJob)
# Record the relationship to the hosting job, with its parent if any.
self._description.addServiceHostJob(hostingJob.jobStoreID, parentService.hostID if parentService is not None else None)
# For compatibility with old Cactus versions that tinker around with
# our internals, we need to make the hosting job available as
# self._services[-1]. TODO: Remove this when Cactus has updated.
self._services = [hostingJob]
# Return the promise for the service's startup result
return hostingJob.rv()
def hasService(self, service):
"""
Returns True if the given Service is a service of this job, and False otherwise.
"""
return service.hostID is None or self._description.hasServiceHostJob(service.hostID)
##Convenience functions for creating jobs
def addChildFn(self, fn, *args, **kwargs):
"""
Adds a function as a child job.
:param fn: Function to be run as a child job with ``*args`` and ``**kwargs`` as \
arguments to this function. See toil.job.FunctionWrappingJob for reserved \
keyword arguments used to specify resource requirements.
:return: The new child job that wraps fn.
:rtype: toil.job.FunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return self.addChild(PromisedRequirementFunctionWrappingJob.create(fn, *args, **kwargs))
else:
return self.addChild(FunctionWrappingJob(fn, *args, **kwargs))
def addFollowOnFn(self, fn, *args, **kwargs):
"""
Adds a function as a follow-on job.
:param fn: Function to be run as a follow-on job with ``*args`` and ``**kwargs`` as \
arguments to this function. See toil.job.FunctionWrappingJob for reserved \
keyword arguments used to specify resource requirements.
:return: The new follow-on job that wraps fn.
:rtype: toil.job.FunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return self.addFollowOn(PromisedRequirementFunctionWrappingJob.create(fn, *args, **kwargs))
else:
return self.addFollowOn(FunctionWrappingJob(fn, *args, **kwargs))
def addChildJobFn(self, fn, *args, **kwargs):
"""
Adds a job function as a child job. See :class:`toil.job.JobFunctionWrappingJob`
for a definition of a job function.
:param fn: Job function to be run as a child job with ``*args`` and ``**kwargs`` as \
arguments to this function. See toil.job.JobFunctionWrappingJob for reserved \
keyword arguments used to specify resource requirements.
:return: The new child job that wraps fn.
:rtype: toil.job.JobFunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return self.addChild(PromisedRequirementJobFunctionWrappingJob.create(fn, *args, **kwargs))
else:
return self.addChild(JobFunctionWrappingJob(fn, *args, **kwargs))
def addFollowOnJobFn(self, fn, *args, **kwargs):
"""
Add a follow-on job function. See :class:`toil.job.JobFunctionWrappingJob`
for a definition of a job function.
:param fn: Job function to be run as a follow-on job with ``*args`` and ``**kwargs`` as \
arguments to this function. See toil.job.JobFunctionWrappingJob for reserved \
keyword arguments used to specify resource requirements.
:return: The new follow-on job that wraps fn.
:rtype: toil.job.JobFunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return self.addFollowOn(PromisedRequirementJobFunctionWrappingJob.create(fn, *args, **kwargs))
else:
return self.addFollowOn(JobFunctionWrappingJob(fn, *args, **kwargs))
@property
def tempDir(self):
"""
Shortcut to calling :func:`job.fileStore.getLocalTempDir`. Temp dir is created on first call
and will be returned for first and future calls
:return: Path to tempDir. See `job.fileStore.getLocalTempDir`
:rtype: str
"""
if self._tempDir is None:
self._tempDir = self._fileStore.getLocalTempDir()
return self._tempDir
def log(self, text, level=logging.INFO):
"""
convenience wrapper for :func:`fileStore.logToMaster`
"""
self._fileStore.logToMaster(text, level)
@staticmethod
def wrapFn(fn, *args, **kwargs):
"""
Makes a Job out of a function. \
Convenience function for constructor of :class:`toil.job.FunctionWrappingJob`.
:param fn: Function to be run with ``*args`` and ``**kwargs`` as arguments. \
See toil.job.JobFunctionWrappingJob for reserved keyword arguments used \
to specify resource requirements.
:return: The new function that wraps fn.
:rtype: toil.job.FunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return PromisedRequirementFunctionWrappingJob.create(fn, *args, **kwargs)
else:
return FunctionWrappingJob(fn, *args, **kwargs)
@staticmethod
def wrapJobFn(fn, *args, **kwargs):
"""
Makes a Job out of a job function. \
Convenience function for constructor of :class:`toil.job.JobFunctionWrappingJob`.
:param fn: Job function to be run with ``*args`` and ``**kwargs`` as arguments. \
See toil.job.JobFunctionWrappingJob for reserved keyword arguments used \
to specify resource requirements.
:return: The new job function that wraps fn.
:rtype: toil.job.JobFunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return PromisedRequirementJobFunctionWrappingJob.create(fn, *args, **kwargs)
else:
return JobFunctionWrappingJob(fn, *args, **kwargs)
def encapsulate(self, name=None):
"""
Encapsulates the job, see :class:`toil.job.EncapsulatedJob`.
Convenience function for constructor of :class:`toil.job.EncapsulatedJob`.
:param str name: Human-readable name for the encapsulated job.
:return: an encapsulated version of this job.
:rtype: toil.job.EncapsulatedJob
"""
return EncapsulatedJob(self, unitName=name)
####################################################
#The following function is used for passing return values between
#job run functions
####################################################
def rv(self, *path):
"""
Creates a *promise* (:class:`toil.job.Promise`) representing a return value of the job's
run method, or, in case of a function-wrapping job, the wrapped function's return value.
:param (Any) path: Optional path for selecting a component of the promised return value.
If absent or empty, the entire return value will be used. Otherwise, the first
element of the path is used to select an individual item of the return value. For
that to work, the return value must be a list, dictionary or of any other type
implementing the `__getitem__()` magic method. If the selected item is yet another
composite value, the second element of the path can be used to select an item from
it, and so on. For example, if the return value is `[6,{'a':42}]`, `.rv(0)` would
select `6` , `rv(1)` would select `{'a':3}` while `rv(1,'a')` would select `3`. To
select a slice from a return value that is slicable, e.g. tuple or list, the path
element should be a `slice` object. For example, assuming that the return value is
`[6, 7, 8, 9]` then `.rv(slice(1, 3))` would select `[7, 8]`. Note that slicing
really only makes sense at the end of path.
:return: A promise representing the return value of this jobs :meth:`toil.job.Job.run`
method.
:rtype: toil.job.Promise
"""
return Promise(self, path)
def registerPromise(self, path):
if self._promiseJobStore is None:
# We haven't had a job store set to put our return value into, so
# we must not have been hit yet in job topological order.
raise JobPromiseConstraintError(self)
# TODO: can we guarantee self.jobStoreID is populated and so pass that here?
with self._promiseJobStore.writeFileStream() as (fileHandle, jobStoreFileID):
promise = UnfulfilledPromiseSentinel(str(self.description), False)
pickle.dump(promise, fileHandle, pickle.HIGHEST_PROTOCOL)
self._rvs[path].append(jobStoreFileID)
return self._promiseJobStore.config.jobStore, jobStoreFileID
def prepareForPromiseRegistration(self, jobStore):
"""
Ensure that a promise by this job (the promissor) can register with the promissor when
another job referring to the promise (the promissee) is being serialized. The promissee
holds the reference to the promise (usually as part of the the job arguments) and when it
is being pickled, so will the promises it refers to. Pickling a promise triggers it to be
registered with the promissor.
:return:
"""
self._promiseJobStore = jobStore
def _disablePromiseRegistration(self):
"""
Called when the job data is about to be saved in the JobStore.
No promises should attempt to register with the job after this has been
called, because that registration would not be persisted.
"""
self._promiseJobStore = None
####################################################
#Cycle/connectivity checking
####################################################
def checkJobGraphForDeadlocks(self):
"""
Ensures that a graph of Jobs (that hasn't yet been saved to the
JobStore) doesn't contain any pathological relationships between jobs
that would result in deadlocks if we tried to run the jobs.
See :func:`toil.job.Job.checkJobGraphConnected`,
:func:`toil.job.Job.checkJobGraphAcyclic` and
:func:`toil.job.Job.checkNewCheckpointsAreLeafVertices` for more info.
:raises toil.job.JobGraphDeadlockException: if the job graph
is cyclic, contains multiple roots or contains checkpoint jobs that are
not leaf vertices when defined (see :func:`toil.job.Job.checkNewCheckpointsAreLeaves`).
"""
self.checkJobGraphConnected()
self.checkJobGraphAcylic()
self.checkNewCheckpointsAreLeafVertices()
def getRootJobs(self) -> Set['Job']:
"""
Returns the set of root job objects that contain this job.
A root job is a job with no predecessors (i.e. which are not children, follow-ons, or services).
Only deals with jobs created here, rather than loaded from the job store.
"""
# Start assuming all jobs are roots
roots = set(self._registry.keys())
for job in self._registry.values():
for otherID in job.description.successorsAndServiceHosts():
# If anything is a successor or service of anything else, it isn't a root.
if otherID in roots:
# Remove it if we still think it is
roots.remove(otherID)
return {self._registry[jid] for jid in roots}
def checkJobGraphConnected(self):
"""
:raises toil.job.JobGraphDeadlockException: if :func:`toil.job.Job.getRootJobs` does \
not contain exactly one root job.
As execution always starts from one root job, having multiple root jobs will \
cause a deadlock to occur.
Only deals with jobs created here, rather than loaded from the job store.
"""
rootJobs = self.getRootJobs()
if len(rootJobs) != 1:
raise JobGraphDeadlockException("Graph does not contain exactly one"
" root job: %s" % rootJobs)
def checkJobGraphAcylic(self):
"""
:raises toil.job.JobGraphDeadlockException: if the connected component \
of jobs containing this job contains any cycles of child/followOn dependencies \
in the *augmented job graph* (see below). Such cycles are not allowed \
in valid job graphs.
A follow-on edge (A, B) between two jobs A and B is equivalent \
to adding a child edge to B from (1) A, (2) from each child of A, \
and (3) from the successors of each child of A. We call each such edge \
an edge an "implied" edge. The augmented job graph is a job graph including \
all the implied edges.
For a job graph G = (V, E) the algorithm is ``O(|V|^2)``. It is ``O(|V| + |E|)`` for \
a graph with no follow-ons. The former follow-on case could be improved!
Only deals with jobs created here, rather than loaded from the job store.
"""
#Get the root jobs
roots = self.getRootJobs()
if len(roots) == 0:
raise JobGraphDeadlockException("Graph contains no root jobs due to cycles")
#Get implied edges
extraEdges = self._getImpliedEdges(roots)
#Check for directed cycles in the augmented graph
visited = set()
for root in roots:
root._checkJobGraphAcylicDFS([], visited, extraEdges)
def _checkJobGraphAcylicDFS(self, stack, visited, extraEdges):
"""
DFS traversal to detect cycles in augmented job graph.
"""
if self not in visited:
visited.add(self)
stack.append(self)
for successor in [self._registry[jID] for jID in self.description.allSuccessors() if jID in self._registry] + extraEdges[self]:
# Grab all the successors in the current registry (i.e. added form this node) and look at them.
successor._checkJobGraphAcylicDFS(stack, visited, extraEdges)
assert stack.pop() == self
if self in stack:
stack.append(self)
raise JobGraphDeadlockException("A cycle of job dependencies has been detected '%s'" % stack)
@staticmethod
def _getImpliedEdges(roots):
"""
Gets the set of implied edges (between children and follow-ons of a common job). Used in Job.checkJobGraphAcylic.
Only deals with jobs created here, rather than loaded from the job store.
:returns: dict from Job object to list of Job objects that must be done before it can start.
"""
#Get nodes (Job objects) in job graph
nodes = set()
for root in roots:
root._collectAllSuccessors(nodes)
##For each follow-on edge calculate the extra implied edges
#Adjacency list of implied edges, i.e. map of jobs to lists of jobs
#connected by an implied edge
extraEdges = dict([(n, []) for n in nodes])
for job in nodes:
for depth in range(1, len(job.description.stack)):
# Add edges from all jobs in the earlier/upper subtrees to all
# the roots of the later/lower subtrees
upper = job.description.stack[depth]
lower = job.description.stack[depth - 1]
# Find everything in the upper subtree
reacheable = set()
for upperID in upper:
if upperID in job._registry:
# This is a locally added job, not an already-saved job
upperJob = job._registry[upperID]
upperJob._collectAllSuccessors(reacheable)
for inUpper in reacheable:
# Add extra edges to the roots of all the lower subtrees
# But skip anything in the lower subtree not in the current _registry (i.e. not created hear)
extraEdges[inUpper] += [job._registry[lowerID] for lowerID in lower if lowerID in job._registry]
return extraEdges
def checkNewCheckpointsAreLeafVertices(self):
"""
A checkpoint job is a job that is restarted if either it fails, or if any of \
its successors completely fails, exhausting their retries.
A job is a leaf it is has no successors.
A checkpoint job must be a leaf when initially added to the job graph. When its \
run method is invoked it can then create direct successors. This restriction is made
to simplify implementation.
Only works on connected components of jobs not yet added to the JobStore.
:raises toil.job.JobGraphDeadlockException: if there exists a job being added to the graph for which \
checkpoint=True and which is not a leaf.
"""
roots = self.getRootJobs() # Roots jobs of component, these are preexisting jobs in the graph
# All jobs in the component of the job graph containing self
jobs = set()
list(map(lambda x : x._collectAllSuccessors(jobs), roots))
# Check for each job for which checkpoint is true that it is a cut vertex or leaf
for y in [x for x in jobs if x.checkpoint]:
if y not in roots: # The roots are the prexisting jobs
if not Job._isLeafVertex(y):
raise JobGraphDeadlockException("New checkpoint job %s is not a leaf in the job graph" % y)
####################################################
#Deferred function system
####################################################
def defer(self, function, *args, **kwargs):
"""
Register a deferred function, i.e. a callable that will be invoked after the current
attempt at running this job concludes. A job attempt is said to conclude when the job
function (or the :meth:`toil.job.Job.run` method for class-based jobs) returns, raises an
exception or after the process running it terminates abnormally. A deferred function will
be called on the node that attempted to run the job, even if a subsequent attempt is made
on another node. A deferred function should be idempotent because it may be called
multiple times on the same node or even in the same process. More than one deferred
function may be registered per job attempt by calling this method repeatedly with
different arguments. If the same function is registered twice with the same or different
arguments, it will be called twice per job attempt.
Examples for deferred functions are ones that handle cleanup of resources external to
Toil, like Docker containers, files outside the work directory, etc.
:param callable function: The function to be called after this job concludes.
:param list args: The arguments to the function
:param dict kwargs: The keyword arguments to the function
"""
if self._defer is None:
raise Exception('A deferred function may only be registered with a job while that job is running.')
self._defer(DeferredFunction.create(function, *args, **kwargs))
####################################################
#The following nested classes are used for
#creating jobtrees (Job.Runner),
#and defining a service (Job.Service)
####################################################
class Runner():
"""
Used to setup and run Toil workflow.
"""
@staticmethod
def getDefaultArgumentParser():
"""
Get argument parser with added toil workflow options.
:returns: The argument parser used by a toil workflow with added Toil options.
:rtype: :class:`argparse.ArgumentParser`
"""
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
Job.Runner.addToilOptions(parser)
return parser
@staticmethod
def getDefaultOptions(jobStore):
"""
Get default options for a toil workflow.
:param string jobStore: A string describing the jobStore \
for the workflow.
:returns: The options used by a toil workflow.
:rtype: argparse.ArgumentParser values object
"""
parser = Job.Runner.getDefaultArgumentParser()
return parser.parse_args(args=[jobStore])
@staticmethod
def addToilOptions(parser):
"""
Adds the default toil options to an :mod:`optparse` or :mod:`argparse`
parser object.
:param parser: Options object to add toil options to.
:type parser: optparse.OptionParser or argparse.ArgumentParser
"""
addOptions(parser)
@staticmethod
def startToil(job, options):
"""
Deprecated by toil.common.Toil.start. Runs the toil workflow using the given options
(see Job.Runner.getDefaultOptions and Job.Runner.addToilOptions) starting with this
job.
:param toil.job.Job job: root job of the workflow
:raises: toil.leader.FailedJobsException if at the end of function \
their remain failed jobs.
:return: The return value of the root job's run function.
:rtype: Any
"""
set_logging_from_options(options)
with Toil(options) as toil:
if not options.restart:
return toil.start(job)
else:
return toil.restart()
class Service(Requirer, metaclass=ABCMeta):
"""
Abstract class used to define the interface to a service.
Should be subclassed by the user to define services.
Is not executed as a job; runs within a ServiceHostJob.
"""
def __init__(self, memory=None, cores=None, disk=None, preemptable=None, unitName=None):
"""
Memory, core and disk requirements are specified identically to as in \
:func:`toil.job.Job.__init__`.
"""
# Save the requirements in ourselves so they are visible on `self` to user code.
super().__init__({'memory': memory, 'cores': cores, 'disk': disk, 'preemptable': preemptable})
# And the unit name
self.unitName = unitName
# And the name for the hosting job
self.jobName = self.__class__.__name__
# Record that we have as of yet no ServiceHostJob
self.hostID = None
@abstractmethod
def start(self, job):
"""
Start the service.
:param toil.job.Job job: The underlying host job that the service is being run in.
Can be used to register deferred functions, or to access
the fileStore for creating temporary files.
:returns: An object describing how to access the service. The object must be pickleable
and will be used by jobs to access the service (see :func:`toil.job.Job.addService`).
"""
@abstractmethod
def stop(self, job):
"""
Stops the service. Function can block until complete.
:param toil.job.Job job: The underlying host job that the service is being run in.
Can be used to register deferred functions, or to access
the fileStore for creating temporary files.
"""
def check(self):
"""
Checks the service is still running.
:raise exceptions.RuntimeError: If the service failed, this will cause the service job to be labeled failed.
:returns: True if the service is still running, else False. If False then the service job will be terminated,
and considered a success. Important point: if the service job exits due to a failure, it should raise a
RuntimeError, not return False!
"""
def _addPredecessor(self, predecessorJob):
"""Adds a predecessor job to the set of predecessor jobs."""
if predecessorJob in self._directPredecessors:
raise ConflictingPredecessorError(predecessorJob, self)
self._directPredecessors.add(predecessorJob)
# Record the need for the predecessor to finish
self._description.addPredecessor()
@staticmethod
def _isLeafVertex(job):
return next(job.description.successorsAndServiceHosts(), None) is None
@classmethod
def _loadUserModule(cls, userModule):
"""
Imports and returns the module object represented by the given module descriptor.
:type userModule: ModuleDescriptor
"""
return userModule.load()
@classmethod
def _unpickle(cls, userModule, fileHandle, requireInstanceOf=None):
"""
Unpickles an object graph from the given file handle while loading symbols \
referencing the __main__ module from the given userModule instead.
:param userModule:
:param fileHandle: An open, binary-mode file handle.
:param requireInstanceOf: If set, require result to be an instance of this class.
:returns:
"""
def filter_main(module_name, class_name):
try:
if module_name == '__main__':
return getattr(userModule, class_name)
else:
return getattr(importlib.import_module(module_name), class_name)
except:
if module_name == '__main__':
logger.debug('Failed getting %s from module %s.', class_name, userModule)
else:
logger.debug('Failed getting %s from module %s.', class_name, module_name)
raise
class FilteredUnpickler(pickle.Unpickler):
def find_class(self, module, name):
return filter_main(module, name)
unpickler = FilteredUnpickler(fileHandle)
runnable = unpickler.load()
if requireInstanceOf is not None:
assert isinstance(runnable, requireInstanceOf), "Did not find a {} when expected".format(requireInstanceOf)
return runnable
def getUserScript(self):
return self.userModule
def _fulfillPromises(self, returnValues, jobStore):
"""
Sets the values for promises using the return values from this job's run() function.
"""
for path, promiseFileStoreIDs in self._rvs.items():
if not path:
# Note that its possible for returnValues to be a promise, not an actual return
# value. This is the case if the job returns a promise from another job. In
# either case, we just pass it on.
promisedValue = returnValues
else:
# If there is an path ...
if isinstance(returnValues, Promise):
# ... and the value itself is a Promise, we need to created a new, narrower
# promise and pass it on.
promisedValue = Promise(returnValues.job, path)
else:
# Otherwise, we just select the desired component of the return value.
promisedValue = returnValues
for index in path:
promisedValue = promisedValue[index]
for promiseFileStoreID in promiseFileStoreIDs:
# File may be gone if the job is a service being re-run and the accessing job is
# already complete.
if jobStore.fileExists(promiseFileStoreID):
with jobStore.updateFileStream(promiseFileStoreID) as fileHandle:
pickle.dump(promisedValue, fileHandle, pickle.HIGHEST_PROTOCOL)
# Functions associated with Job.checkJobGraphAcyclic to establish that the job graph does not
# contain any cycles of dependencies:
def _collectAllSuccessors(self, visited):
"""
Adds the job and all jobs reachable on a directed path from current node to the given set.
Only considers jobs in this job's subgraph that are newly added, not loaded from the job store.
"""
# Keep our own stack since we may have a stick in the graph long enough
# to exhaust the real stack
todo = [self]
while len(todo) > 0:
job = todo[-1]
todo.pop()
if job not in visited:
visited.add(job)
for successorID in job.description.allSuccessors():
if successorID in self._registry:
# We added this successor locally
todo.append(self._registry[successorID])
def getTopologicalOrderingOfJobs(self):
"""
:returns: a list of jobs such that for all pairs of indices i, j for which i < j, \
the job at index i can be run before the job at index j.
Only considers jobs in this job's subgraph that are newly added, not loaded from the job store.
Ignores service jobs.
:rtype: list[Job]
"""
# List of Job objects in order.
ordering = []
# Set of IDs of visited jobs.
visited = set()
# We need to recurse and traverse the graph without exhausting Python's
# stack, so we keep our own stack of Job objects
todo = [self]
while len(todo) > 0:
job = todo[-1]
todo.pop()
#Do not add the job to the ordering until all its predecessors have been
#added to the ordering
outstandingPredecessor = False
for predJob in job._directPredecessors:
if predJob.jobStoreID not in visited:
outstandingPredecessor = True
break
if outstandingPredecessor:
continue
if job.jobStoreID not in visited:
visited.add(job.jobStoreID)
ordering.append(job)
for otherID in itertools.chain(job.description.followOnIDs, job.description.childIDs):
if otherID in self._registry:
# Stack up descendants so we process children and then follow-ons.
# So stack up follow-ons deeper
todo.append(self._registry[otherID])
return ordering
####################################################
#Storing Jobs into the JobStore
####################################################
def _register(self, jobStore):
"""
If this job lacks a JobStore-assigned ID, assign this job an ID.
Must be called for each job before it is saved to the JobStore for the first time.
:returns: A list with either one old ID, new ID pair, or an empty list
:rtype: list
"""
# TODO: This doesn't really have much to do with the registry. Rename
# the registry.
if isinstance(self.jobStoreID, TemporaryID):
# We need to get an ID.
# Save our fake ID
fake = self.jobStoreID
# Replace it with a real ID
jobStore.assignID(self.description)
# Make sure the JobDescription can do its JobStore-related setup.
self.description.onRegistration(jobStore)
# Return the fake to real mapping
return [(fake, self.description.jobStoreID)]
else:
# We already have an ID. No assignment or reference rewrite necessary.
return []
def _renameReferences(self, renames):
"""
Apply the given dict of ID renames to all references to other jobs.
Ignores the registry, which is shared and assumed to already be updated.
IDs not present in the renames dict are left as-is.
:param dict(TemporaryID, str) renames: Rename operations to apply.
"""
# Do renames in the description
self._description.renameReferences(renames)
def saveBody(self, jobStore):
"""
Save the execution data for just this job to the JobStore, and fill in
the JobDescription with the information needed to retrieve it.
The Job's JobDescription must have already had a real jobStoreID assigned to it.
Does not save the JobDescription.
:param toil.jobStores.abstractJobStore.AbstractJobStore jobStore: The job store
to save the job body into.
"""
# We can't save the job in the right place for cleanup unless the
# description has a real ID.
assert not isinstance(self.jobStoreID, TemporaryID), "Tried to save job {} without ID assigned!".format(self)
# Note that we can't accept any more requests for our return value
self._disablePromiseRegistration()
# Clear out old Cactus compatibility fields that don't need to be
# preserved and shouldn't be serialized.
if hasattr(self, '_services'):
delattr(self, '_services')
# Remember fields we will overwrite
description = self._description
registry = self._registry
directPredecessors = self._directPredecessors
try:
try:
# Drop out the description, which the job store will manage separately
self._description = None
# Fix up the registry and direct predecessors for when the job is
# loaded to be run: the registry should contain just the job itself and
# there should be no predecessors available when the job actually runs.
self._registry = {description.jobStoreID: self}
self._directPredecessors = set()
# Save the body of the job
with jobStore.writeFileStream(description.jobStoreID, cleanup=True) as (fileHandle, fileStoreID):
pickle.dump(self, fileHandle, pickle.HIGHEST_PROTOCOL)
finally:
# Restore important fields (before handling errors)
self._directPredecessors = directPredecessors
self._registry = registry
self._description = description
except JobPromiseConstraintError as e:
# The user is passing promises without regard to predecessor constraints.
if e.recipientJob is None:
# Add ourselves as the recipient job that wanted the promise.
e = JobPromiseConstraintError(e.promisingJob, self)
raise e
# Find the user script.
# Note that getUserScript() may have been overridden. This is intended. If we used
# self.userModule directly, we'd be getting a reference to job.py if the job was
# specified as a function (as opposed to a class) since that is where FunctionWrappingJob
# is defined. What we really want is the module that was loaded as __main__,
# and FunctionWrappingJob overrides getUserScript() to give us just that. Only then can
# filter_main() in _unpickle( ) do its job of resolving any user-defined type or function.
userScript = self.getUserScript().globalize()
# The command connects the body of the job to the JobDescription
self._description.command = ' '.join(('_toil', fileStoreID) + userScript.toCommand())
def _saveJobGraph(self, jobStore, saveSelf=False, returnValues=None):
"""
Save job data and new JobDescriptions to the given job store for this
job and all descending jobs, including services.
Used to save the initial job graph containing the root job of the workflow.
:param toil.jobStores.abstractJobStore.AbstractJobStore jobStore: The job store
to save the jobs into.
:param bool saveSelf: Set to True to save this job along with its children,
follow-ons, and services, or False to just save the children, follow-ons,
and services and to populate the return value.
:param returnValues: The collection of values returned when executing
the job (or starting the service the job is hosting). If saveSelf
is not set, will be used to fulfill the job's return value promises.
"""
# Prohibit cycles and multiple roots
self.checkJobGraphForDeadlocks()
# Make sure everybody in the registry is registered with the job store
# and has an ID. Also rewrite ID references.
allJobs = list(self._registry.values())
# We use one big dict from fake ID to corresponding real ID to rewrite references.
fakeToReal = {}
for job in allJobs:
# Register the job, get the old ID to new ID pair if any, and save that in the fake to real mapping
fakeToReal.update(job._register(jobStore))
if len(fakeToReal) > 0:
# Some jobs changed ID. We need to rebuild the registry and apply the reference rewrites.
# Remake the registry in place
self._registry.clear()
self._registry.update({job.jobStoreID: job for job in allJobs})
for job in allJobs:
# Tell all the jobs (and thus their descriptions and services)
# about the renames.
job._renameReferences(fakeToReal)
# Make sure the whole component is ready for promise registration
for job in allJobs:
job.prepareForPromiseRegistration(jobStore)
# Get an ordering on the non-service jobs which we use for pickling the
# jobs in the correct order to ensure the promises are properly
# established
ordering = self.getTopologicalOrderingOfJobs()
# Set up to save last job first, so promises flow the right way
ordering.reverse()
logger.info("Saving graph of %d jobs, %d new", len(allJobs), len(fakeToReal))
# Make sure we're the root
assert ordering[-1] == self
# Don't verify the ordering length: it excludes service host jobs.
if not saveSelf:
# Fulfil promises for return values (even if value is None)
self._fulfillPromises(returnValues, jobStore)
for job in ordering:
logger.info("Processing job %s", job.description)
for serviceBatch in reversed(list(job.description.serviceHostIDsInBatches())):
# For each batch of service host jobs in reverse order they start
for serviceID in serviceBatch:
logger.info("Processing service %s", serviceID)
if serviceID in self._registry:
# It's a new service
# Find the actual job
serviceJob = self._registry[serviceID]
logger.info("Saving service %s", serviceJob.description)
# Pickle the service body, which triggers all the promise stuff
serviceJob.saveBody(jobStore)
if job != self or saveSelf:
# Now pickle the job itself
job.saveBody(jobStore)
# Now that the job data is on disk, commit the JobDescriptions in
# reverse execution order, in a batch if supported.
with jobStore.batch():
for job in ordering:
for serviceBatch in job.description.serviceHostIDsInBatches():
for serviceID in serviceBatch:
if serviceID in self._registry:
jobStore.create(self._registry[serviceID].description)
if job != self or saveSelf:
jobStore.create(job.description)
def saveAsRootJob(self, jobStore):
"""
Save this job to the given jobStore as the root job of the workflow.
:param toil.jobStores.abstractJobStore.AbstractJobStore jobStore:
:return: the JobDescription describing this job.
"""
# Check if the workflow root is a checkpoint but not a leaf vertex.
# All other job vertices in the graph are checked by checkNewCheckpointsAreLeafVertices
if self.checkpoint and not Job._isLeafVertex(self):
raise JobGraphDeadlockException(
'New checkpoint job %s is not a leaf in the job graph' % self)
# Save the root job and all descendants and services
self._saveJobGraph(jobStore, saveSelf=True)
# Store the name of the first job in a file in case of restart. Up to this point the
# root job is not recoverable. FIXME: "root job" or "first job", which one is it?
jobStore.setRootJob(self.jobStoreID)
# Assign the config from the JobStore as if we were loaded.
# TODO: Find a better way to make this the JobStore's responsibility
self.description.assignConfig(jobStore.config)
return self.description
@classmethod
def loadJob(cls, jobStore, jobDescription):
"""
Retrieves a :class:`toil.job.Job` instance from a JobStore
:param toil.jobStores.abstractJobStore.AbstractJobStore jobStore: The job store.
:param toil.job.JobDescription jobDescription: the JobDescription of the job to retrieve.
:returns: The job referenced by the JobDescription.
:rtype: toil.job.Job
"""
# Grab the command that connects the description to the job body
command = jobDescription.command
commandTokens = command.split()
assert "_toil" == commandTokens[0]
userModule = ModuleDescriptor.fromCommand(commandTokens[2:])
logger.debug('Loading user module %s.', userModule)
userModule = cls._loadUserModule(userModule)
pickleFile = commandTokens[1]
# Get a directory to download the job in
directory = tempfile.mkdtemp()
# Initialize a blank filename so the finally below can't fail due to a
# missing variable
filename = ''
try:
# Get a filename to download the job to.
# Don't use mkstemp because we would need to delete and replace the
# file.
# Don't use a NamedTemporaryFile context manager because its
# context manager exit will crash if we deleted it.
filename = os.path.join(directory, 'job')
# Download the job
if pickleFile == "firstJob":
jobStore.readSharedFile(pickleFile, filename)
else:
jobStore.readFile(pickleFile, filename)
# Open and unpickle
with open(filename, 'rb') as fileHandle:
job = cls._unpickle(userModule, fileHandle, requireInstanceOf=Job)
# Fill in the current description
job._description = jobDescription
# Set up the registry again, so children and follow-ons can be added on the worker
job._registry = {job.jobStoreID: job}
return job
# TODO: We ought to just unpickle straight from a streaming read
finally:
# Clean up the file
if os.path.exists(filename):
os.unlink(filename)
# Clean up the directory we put it in
shutil.rmtree(directory)
def _run(self, jobGraph=None, fileStore=None, **kwargs):
"""
Function which worker calls to ultimately invoke
a job's Job.run method, and then handle created
children/followOn jobs.
May be (but currently is not) overridden by specialized Toil-internal jobs.
Should not be overridden by non-Toil code!
Despite this, it has been overridden by non-Toil code, so we keep it
around and use a hardened kwargs-based interface to try and tolerate
bad behavior by workflows (e.g. Cactus).
When everyone has upgraded to a sufficiently new Cactus, we can remove
this!
:param NoneType jobGraph: Ignored. Here for compatibility with old
Cactus versions that pass two positional arguments.
:param toil.fileStores.abstractFileStore.AbstractFileStore fileStore: the
FileStore to use to access files when running the job. Required.
"""
return self.run(fileStore)
@contextmanager
def _executor(self, stats, fileStore):
"""
This is the core wrapping method for running the job within a worker. It sets up the stats
and logging before yielding. After completion of the body, the function will finish up the
stats and logging, and starts the async update process for the job.
Will modify the job's description with changes that need to be committed back to the JobStore.
"""
if stats is not None:
startTime = time.time()
startClock = get_total_cpu_time()
baseDir = os.getcwd()
yield
# If the job is not a checkpoint job, add the promise files to delete
# to the list of jobStoreFileIDs to delete
# TODO: why is Promise holding a global list here???
if not self.checkpoint:
for jobStoreFileID in Promise.filesToDelete:
# Make sure to wrap the job store ID in a FileID object so the file store will accept it
# TODO: talk directly to the job sotre here instead.
fileStore.deleteGlobalFile(FileID(jobStoreFileID, 0))
else:
# Else copy them to the job description to delete later
self.description.checkpointFilesToDelete = list(Promise.filesToDelete)
Promise.filesToDelete.clear()
# Now indicate the asynchronous update of the job can happen
fileStore.startCommit(jobState=True)
# Change dir back to cwd dir, if changed by job (this is a safety issue)
if os.getcwd() != baseDir:
os.chdir(baseDir)
# Finish up the stats
if stats is not None:
totalCpuTime, totalMemoryUsage = get_total_cpu_time_and_memory_usage()
stats.jobs.append(
Expando(
time=str(time.time() - startTime),
clock=str(totalCpuTime - startClock),
class_name=self._jobName(),
memory=str(totalMemoryUsage)
)
)
def _runner(self, jobStore=None, fileStore=None, defer=None, **kwargs):
"""
This method actually runs the job, and serialises the next jobs.
It marks the job as completed (by clearing its command) and creates the
successor relationships to new successors, but it doesn't actually
commit those updates to the current job into the JobStore.
We take all arguments as keyword arguments, and accept and ignore
additional keyword arguments, for compatibility with workflows (*cough*
Cactus *cough*) which are reaching in and overriding _runner (which
they aren't supposed to do). If everything is passed as name=value it
won't break as soon as we add or remove a parameter.
:param class jobStore: Instance of the job store
:param toil.fileStores.abstractFileStore.AbstractFileStore fileStore: Instance
of a cached or uncached filestore
:param defer: Function yielded by open() context
manager of :class:`toil.DeferredFunctionManager`, which is called to
register deferred functions.
:param kwargs: Catch-all to accept superfluous arguments passed by old
versions of Cactus. Cactus shouldn't override this method, but it does.
:return:
"""
# Make deferred function registration available during run().
self._defer = defer
# Make fileStore available as an attribute during run() ...
self._fileStore = fileStore
# ... but also pass it to _run() as an argument for backwards
# compatibility with workflows that tinker around with our internals,
# and send a fake jobGraph in case they still think jobGraph exists.
returnValues = self._run(jobGraph=None, fileStore=fileStore)
# Clean up state changes made for run()
self._defer = None
self._fileStore = None
# Serialize the new Jobs defined by the run method to the jobStore
self._saveJobGraph(jobStore, saveSelf=False, returnValues=returnValues)
# Clear out the command, because the job is done.
self.description.command = None
# That and the new child/follow-on relationships will need to be
# recorded later by an update() of the JobDescription.
def _jobName(self):
"""
:rtype : string, used as identifier of the job class in the stats report.
"""
return self._description.displayName
class JobException(Exception):
"""
General job exception.
"""
def __init__(self, message):
super().__init__(message)
class JobGraphDeadlockException(JobException):
"""
An exception raised in the event that a workflow contains an unresolvable \
dependency, such as a cycle. See :func:`toil.job.Job.checkJobGraphForDeadlocks`.
"""
def __init__(self, string):
super().__init__(string)
class FunctionWrappingJob(Job):
"""
Job used to wrap a function. In its `run` method the wrapped function is called.
"""
def __init__(self, userFunction, *args, **kwargs):
"""
:param callable userFunction: The function to wrap. It will be called with ``*args`` and
``**kwargs`` as arguments.
The keywords ``memory``, ``cores``, ``disk``, ``preemptable`` and ``checkpoint`` are
reserved keyword arguments that if specified will be used to determine the resources
required for the job, as :func:`toil.job.Job.__init__`. If they are keyword arguments to
the function they will be extracted from the function definition, but may be overridden
by the user (as you would expect).
"""
# Use the user-specified requirements, if specified, else grab the default argument
# from the function, if specified, else default to None
argSpec = inspect.getfullargspec(userFunction)
if argSpec.defaults is None:
argDict = {}
else:
argDict = dict(list(zip(argSpec.args[-len(argSpec.defaults):], argSpec.defaults)))
def resolve(key, default=None, dehumanize=False):
try:
# First, try constructor arguments, ...
value = kwargs.pop(key)
except KeyError:
try:
# ..., then try default value for function keyword arguments, ...
value = argDict[key]
except KeyError:
# ... and finally fall back to a default value.
value = default
# Optionally, convert strings with metric or binary prefixes.
if dehumanize and isinstance(value, str):
value = human2bytes(value)
return value
super().__init__(memory=resolve('memory', dehumanize=True),
cores=resolve('cores', dehumanize=True),
disk=resolve('disk', dehumanize=True),
preemptable=resolve('preemptable'),
checkpoint=resolve('checkpoint', default=False),
unitName=resolve('name', default=None))
self.userFunctionModule = ModuleDescriptor.forModule(userFunction.__module__).globalize()
self.userFunctionName = str(userFunction.__name__)
self.jobName = self.userFunctionName
self._args = args
self._kwargs = kwargs
def _getUserFunction(self):
logger.debug('Loading user function %s from module %s.',
self.userFunctionName,
self.userFunctionModule)
userFunctionModule = self._loadUserModule(self.userFunctionModule)
return getattr(userFunctionModule, self.userFunctionName)
def run(self,fileStore):
userFunction = self._getUserFunction( )
return userFunction(*self._args, **self._kwargs)
def getUserScript(self):
return self.userFunctionModule
def _jobName(self):
return ".".join((self.__class__.__name__,self.userFunctionModule.name,self.userFunctionName))
class JobFunctionWrappingJob(FunctionWrappingJob):
"""
A job function is a function whose first argument is a :class:`.Job`
instance that is the wrapping job for the function. This can be used to
add successor jobs for the function and perform all the functions the
:class:`.Job` class provides.
To enable the job function to get access to the
:class:`toil.fileStores.abstractFileStore.AbstractFileStore` instance (see
:func:`toil.job.Job.run`), it is made a variable of the wrapping job called
fileStore.
To specify a job's resource requirements the following default keyword arguments
can be specified:
- memory
- disk
- cores
For example to wrap a function into a job we would call::
Job.wrapJobFn(myJob, memory='100k', disk='1M', cores=0.1)
"""
@property
def fileStore(self):
return self._fileStore
def run(self, fileStore):
userFunction = self._getUserFunction()
rValue = userFunction(*((self,) + tuple(self._args)), **self._kwargs)
return rValue
class PromisedRequirementFunctionWrappingJob(FunctionWrappingJob):
"""
Handles dynamic resource allocation using :class:`toil.job.Promise` instances.
Spawns child function using parent function parameters and fulfilled promised
resource requirements.
"""
def __init__(self, userFunction, *args, **kwargs):
self._promisedKwargs = kwargs.copy()
# Replace resource requirements in intermediate job with small values.
kwargs.update(dict(disk='1M', memory='32M', cores=0.1))
super().__init__(userFunction, *args, **kwargs)
@classmethod
def create(cls, userFunction, *args, **kwargs):
"""
Creates an encapsulated Toil job function with unfulfilled promised resource
requirements. After the promises are fulfilled, a child job function is created
using updated resource values. The subgraph is encapsulated to ensure that this
child job function is run before other children in the workflow. Otherwise, a
different child may try to use an unresolved promise return value from the parent.
"""
return EncapsulatedJob(cls(userFunction, *args, **kwargs))
def run(self, fileStore):
# Assumes promises are fulfilled when parent job is run
self.evaluatePromisedRequirements()
userFunction = self._getUserFunction()
return self.addChildFn(userFunction, *self._args, **self._promisedKwargs).rv()
def evaluatePromisedRequirements(self):
requirements = ["disk", "memory", "cores"]
# Fulfill resource requirement promises
for requirement in requirements:
try:
if isinstance(self._promisedKwargs[requirement], PromisedRequirement):
self._promisedKwargs[requirement] = self._promisedKwargs[requirement].getValue()
except KeyError:
pass
class PromisedRequirementJobFunctionWrappingJob(PromisedRequirementFunctionWrappingJob):
"""
Handles dynamic resource allocation for job functions.
See :class:`toil.job.JobFunctionWrappingJob`
"""
def run(self, fileStore):
self.evaluatePromisedRequirements()
userFunction = self._getUserFunction()
return self.addChildJobFn(userFunction, *self._args, **self._promisedKwargs).rv()
class EncapsulatedJob(Job):
"""
A convenience Job class used to make a job subgraph appear to be a single job.
Let A be the root job of a job subgraph and B be another job we'd like to run after A
and all its successors have completed, for this use encapsulate::
# Job A and subgraph, Job B
A, B = A(), B()
Aprime = A.encapsulate()
Aprime.addChild(B)
# B will run after A and all its successors have completed, A and its subgraph of
# successors in effect appear to be just one job.
If the job being encapsulated has predecessors (e.g. is not the root job), then the encapsulated
job will inherit these predecessors. If predecessors are added to the job being encapsulated
after the encapsulated job is created then the encapsulating job will NOT inherit these
predecessors automatically. Care should be exercised to ensure the encapsulated job has the
proper set of predecessors.
The return value of an encapsulatd job (as accessed by the :func:`toil.job.Job.rv` function)
is the return value of the root job, e.g. A().encapsulate().rv() and A().rv() will resolve to
the same value after A or A.encapsulate() has been run.
"""
def __init__(self, job, unitName=None):
"""
:param toil.job.Job job: the job to encapsulate.
:param str unitName: human-readable name to identify this job instance.
"""
if job is not None:
# Initial construction, when encapsulating a job
# Giving the root of the subgraph the same resources as the first job in the subgraph.
super().__init__(**job.description.requirements, unitName=unitName)
# Ensure that the encapsulated job has the same direct predecessors as the job
# being encapsulated.
for predJob in job._directPredecessors:
predJob.addChild(self)
self.encapsulatedJob = job
Job.addChild(self, job)
# Use small resource requirements for dummy Job instance.
# But not too small, or the job won't have enough resources to safely start up Toil.
self.encapsulatedFollowOn = Job(disk='100M', memory='512M', cores=0.1, unitName=None if unitName is None else unitName + '-followOn')
Job.addFollowOn(self, self.encapsulatedFollowOn)
else:
# Unpickling on the worker, to be run as a no-op.
# No need to try and hook things up, but nobody can add children or
# follow-ons to us now either.
super().__init__()
self.encapsulatedJob = None
self.encapsulatedFollowOn = None
def addChild(self, childJob):
assert self.encapsulatedFollowOn is not None, \
"Children cannot be added to EncapsulatedJob while it is running"
return Job.addChild(self.encapsulatedFollowOn, childJob)
def addService(self, service, parentService=None):
assert self.encapsulatedFollowOn is not None, \
"Services cannot be added to EncapsulatedJob while it is running"
return Job.addService(self.encapsulatedFollowOn, service, parentService=parentService)
def addFollowOn(self, followOnJob):
assert self.encapsulatedFollowOn is not None, \
"Follow-ons cannot be added to EncapsulatedJob while it is running"
return Job.addFollowOn(self.encapsulatedFollowOn, followOnJob)
def rv(self, *path):
assert self.encapsulatedJob is not None
return self.encapsulatedJob.rv(*path)
def prepareForPromiseRegistration(self, jobStore):
# This one will be called after execution when re-serializing the
# (unchanged) graph of jobs rooted here.
super().prepareForPromiseRegistration(jobStore)
if self.encapsulatedJob is not None:
# Running where the job was created.
self.encapsulatedJob.prepareForPromiseRegistration(jobStore)
def _disablePromiseRegistration(self):
assert self.encapsulatedJob is not None
super()._disablePromiseRegistration()
self.encapsulatedJob._disablePromiseRegistration()
def __reduce__(self):
"""
Called during pickling to define the pickled representation of the job.
We don't want to pickle our internal references to the job we
encapsulate, so we elide them here. When actually run, we're just a
no-op job that can maybe chain.
"""
return self.__class__, (None,)
def getUserScript(self):
assert self.encapsulatedJob is not None
return self.encapsulatedJob.getUserScript()
class ServiceHostJob(Job):
"""
Job that runs a service. Used internally by Toil. Users should subclass Service instead of using this.
"""
def __init__(self, service):
"""
This constructor should not be called by a user.
:param service: The service to wrap in a job.
:type service: toil.job.Job.Service
"""
# Make sure the service hasn't been given a host already.
assert service.hostID is None
# Make ourselves with name info from the Service and a
# ServiceJobDescription that has the service control flags.
super().__init__(**service.requirements,
unitName=service.unitName, descriptionClass=ServiceJobDescription)
# Make sure the service knows it has a host now
service.hostID = self.jobStoreID
# service.__module__ is the module defining the class service is an instance of.
# Will need to be loaded before unpickling the Service
self.serviceModule = ModuleDescriptor.forModule(service.__module__).globalize()
# The service to run, or None if it is still pickled.
# We can't just pickle as part of ourselves because we may need to load
# an additional module.
self.service = service
# The pickled service, or None if it isn't currently pickled.
# We can't just pickle right away because we may owe promises from it.
self.pickledService = None
# Pick up our name form the service.
self.jobName = service.jobName
# This references the parent job wrapper. It is initialised just before
# the job is run. It is used to access the start and terminate flags.
self.jobGraph = None
@property
def fileStore(self):
"""
Return the file store, which the Service may need.
"""
return self._fileStore
def _renameReferences(self, renames):
# When the job store finally hads out IDs we have to fix up the
# back-reference from our Service to us.
super()._renameReferences(renames)
if self.service is not None:
self.service.hostID = renames[self.service.hostID]
# Since the running service has us, make sure they don't try to tack more
# stuff onto us.
def addChild(self, child):
raise RuntimeError("Service host jobs cannot have children, follow-ons, or services")
def addFollowOn(self, followOn):
raise RuntimeError("Service host jobs cannot have children, follow-ons, or services")
def addService(self, service, parentService=None):
raise RuntimeError("Service host jobs cannot have children, follow-ons, or services")
def saveBody(self, jobStore):
"""
Serialize the service itself before saving the host job's body.
"""
# Save unpickled service
service = self.service
# Serialize service
self.pickledService = pickle.dumps(self.service, protocol=pickle.HIGHEST_PROTOCOL)
# Clear real service until we have the module to load it back
self.service = None
# Save body as normal
super().saveBody(jobStore)
# Restore unpickled service
self.service = service
self.pickledService = None
def run(self, fileStore):
# Unpickle the service
logger.debug('Loading service module %s.', self.serviceModule)
userModule = self._loadUserModule(self.serviceModule)
service = self._unpickle(userModule, BytesIO(self.pickledService), requireInstanceOf=Job.Service)
self.pickledService = None
# Make sure it has the config, since it wasn't load()-ed via the JobStore
service.assignConfig(fileStore.jobStore.config)
#Start the service
startCredentials = service.start(self)
try:
#The start credentials must be communicated to processes connecting to
#the service, to do this while the run method is running we
#cheat and set the return value promise within the run method
self._fulfillPromises(startCredentials, fileStore.jobStore)
self._rvs = {} # Set this to avoid the return values being updated after the
#run method has completed!
#Now flag that the service is running jobs can connect to it
logger.debug("Removing the start jobStoreID to indicate that establishment of the service")
assert self.description.startJobStoreID != None
if fileStore.jobStore.fileExists(self.description.startJobStoreID):
fileStore.jobStore.deleteFile(self.description.startJobStoreID)
assert not fileStore.jobStore.fileExists(self.description.startJobStoreID)
#Now block until we are told to stop, which is indicated by the removal
#of a file
assert self.description.terminateJobStoreID != None
while True:
# Check for the terminate signal
if not fileStore.jobStore.fileExists(self.description.terminateJobStoreID):
logger.debug("Detected that the terminate jobStoreID has been removed so exiting")
if not fileStore.jobStore.fileExists(self.description.errorJobStoreID):
raise RuntimeError("Detected the error jobStoreID has been removed so exiting with an error")
break
# Check the service's status and exit if failed or complete
try:
if not service.check():
logger.debug("The service has finished okay, exiting")
break
except RuntimeError:
logger.debug("Detected termination of the service")
raise
time.sleep(fileStore.jobStore.config.servicePollingInterval) #Avoid excessive polling
logger.debug("Service is done")
finally:
# The stop function is always called
service.stop(self)
def getUserScript(self):
return self.serviceModule
class Promise:
"""
References a return value from a :meth:`toil.job.Job.run` or
:meth:`toil.job.Job.Service.start` method as a *promise* before the method itself is run.
Let T be a job. Instances of :class:`.Promise` (termed a *promise*) are returned by T.rv(),
which is used to reference the return value of T's run function. When the promise is passed
to the constructor (or as an argument to a wrapped function) of a different, successor job
the promise will be replaced by the actual referenced return value. This mechanism allows a
return values from one job's run method to be input argument to job before the former job's
run function has been executed.
"""
_jobstore = None
"""
Caches the job store instance used during unpickling to prevent it from being instantiated
for each promise
:type: toil.jobStores.abstractJobStore.AbstractJobStore
"""
filesToDelete = set()
"""
A set of IDs of files containing promised values when we know we won't need them anymore
"""
def __init__(self, job, path):
"""
:param Job job: the job whose return value this promise references
:param path: see :meth:`.Job.rv`
"""
self.job = job
self.path = path
def __reduce__(self):
"""
Called during pickling when a promise (an instance of this class) is about to be be
pickled. Returns the Promise class and construction arguments that will be evaluated
during unpickling, namely the job store coordinates of a file that will hold the promised
return value. By the time the promise is about to be unpickled, that file should be
populated.
"""
# The allocation of the file in the job store is intentionally lazy, we only allocate an
# empty file in the job store if the promise is actually being pickled. This is done so
# that we do not allocate files for promises that are never used.
jobStoreLocator, jobStoreFileID = self.job.registerPromise(self.path)
# Returning a class object here causes the pickling machinery to attempt to instantiate
# the class. We will catch that with __new__ and return an the actual return value instead.
return self.__class__, (jobStoreLocator, jobStoreFileID)
@staticmethod
def __new__(cls, *args):
assert len(args) == 2
if isinstance(args[0], Job):
# Regular instantiation when promise is created, before it is being pickled
return super().__new__(cls)
else:
# Attempted instantiation during unpickling, return promised value instead
return cls._resolve(*args)
@classmethod
def _resolve(cls, jobStoreLocator, jobStoreFileID):
# Initialize the cached job store if it was never initialized in the current process or
# if it belongs to a different workflow that was run earlier in the current process.
if cls._jobstore is None or cls._jobstore.config.jobStore != jobStoreLocator:
cls._jobstore = Toil.resumeJobStore(jobStoreLocator)
cls.filesToDelete.add(jobStoreFileID)
with cls._jobstore.readFileStream(jobStoreFileID) as fileHandle:
# If this doesn't work then the file containing the promise may not exist or be
# corrupted
value = safeUnpickleFromStream(fileHandle)
return value
class PromisedRequirement:
def __init__(self, valueOrCallable, *args):
"""
Class for dynamically allocating job function resource requirements involving
:class:`toil.job.Promise` instances.
Use when resource requirements depend on the return value of a parent function.
PromisedRequirements can be modified by passing a function that takes the
:class:`.Promise` as input.
For example, let f, g, and h be functions. Then a Toil workflow can be
defined as follows::
A = Job.wrapFn(f)
B = A.addChildFn(g, cores=PromisedRequirement(A.rv())
C = B.addChildFn(h, cores=PromisedRequirement(lambda x: 2*x, B.rv()))
:param valueOrCallable: A single Promise instance or a function that
takes args as input parameters.
:param args: variable length argument list
:type args: int or .Promise
"""
if hasattr(valueOrCallable, '__call__'):
assert len(args) != 0, 'Need parameters for PromisedRequirement function.'
func = valueOrCallable
else:
assert len(args) == 0, 'Define a PromisedRequirement function to handle multiple arguments.'
func = lambda x: x
args = [valueOrCallable]
self._func = dill.dumps(func)
self._args = list(args)
def getValue(self):
"""
Returns PromisedRequirement value
"""
func = dill.loads(self._func)
return func(*self._args)
@staticmethod
def convertPromises(kwargs):
"""
Returns True if reserved resource keyword is a Promise or
PromisedRequirement instance. Converts Promise instance
to PromisedRequirement.
:param kwargs: function keyword arguments
:return: bool
"""
for r in ["disk", "memory", "cores"]:
if isinstance(kwargs.get(r), Promise):
kwargs[r] = PromisedRequirement(kwargs[r])
return True
elif isinstance(kwargs.get(r), PromisedRequirement):
return True
return False
class UnfulfilledPromiseSentinel:
"""This should be overwritten by a proper promised value. Throws an
exception when unpickled."""
def __init__(self, fulfillingJobName, unpickled):
self.fulfillingJobName = fulfillingJobName
@staticmethod
def __setstate__(stateDict):
"""Only called when unpickling. This won't be unpickled unless the
promise wasn't resolved, so we throw an exception."""
jobName = stateDict['fulfillingJobName']
raise RuntimeError("This job was passed a promise that wasn't yet resolved when it "
"ran. The job {jobName} that fulfills this promise hasn't yet "
"finished. This means that there aren't enough constraints to "
"ensure the current job always runs after {jobName}. Consider adding a "
"follow-on indirection between this job and its parent, or adding "
"this job as a child/follow-on of {jobName}.".format(jobName=jobName))
|
BD2KGenomics/slugflow
|
src/toil/job.py
|
Python
|
apache-2.0
| 122,790
|
[
"TINKER"
] |
62e15b34d181946e01732f0a34fe6e8eb8e4e467d7c168fb3b788f9713ceff44
|
from __future__ import print_function
__author__ = """Alex "O." Holcombe, Charles Ludowici, """ ## double-quotes will be silently removed, single quotes will be left, eg, O'Connor
import time, sys, platform, os
from math import atan, atan2, pi, cos, sin, sqrt, ceil, radians, degrees
import numpy as np
import psychopy, psychopy.info
import copy
from psychopy import visual, sound, monitors, logging, gui, event, core, data
try:
from helpersAOH import accelerateComputer, openMyStimWindow
except Exception as e:
print(e); print('Problem loading helpersAOH. Check that the file helpersAOH.py in the same directory as this file')
print('Current directory is ',os.getcwd())
eyeTracking = False
if eyeTracking:
try:
import eyelinkEyetrackerForPsychopySUPA3
except Exception as e:
print(e)
print('Problem loading eyelinkEyetrackerForPsychopySUPA3. Check that the file eyelinkEyetrackerForPsychopySUPA3.py in the same directory as this file')
print('While a different version of pylink might make your eyetracking code work, your code appears to generally be out of date. Rewrite your eyetracker code based on the SR website examples')
#Psychopy v1.83.01 broke this, pylink version prevents EyelinkEyetrackerForPsychopySUPA3 stuff from importing. But what really needs to be done is to change eyetracking code to more modern calls, as indicated on SR site
eyeTracking = False
expname= "dot-jump"
demo = False; exportImages = False
autopilot = False
subject='test'
###############################
### Setup the screen parameters ##########
###############################
allowGUI = False
units='deg' #'cm'
waitBlank=False
refreshRate= 85 *1.0; #160 #set to the framerate of the monitor
fullscrn=True; #show in small window (0) or full screen (1)
scrn=True
if True: #just so I can indent all the below
#which screen to display the stimuli. 0 is home screen, 1 is second screen
# create a dialog from dictionary
infoFirst = { 'Autopilot':autopilot, 'Check refresh etc':True, 'Use second screen':scrn, 'Fullscreen (timing errors if not)': fullscrn, 'Screen refresh rate': refreshRate }
OK = gui.DlgFromDict(dictionary=infoFirst,
title='MOT',
order=['Autopilot','Check refresh etc', 'Use second screen', 'Screen refresh rate', 'Fullscreen (timing errors if not)'],
tip={'Check refresh etc': 'To confirm refresh rate and that can keep up, at least when drawing a grating',
'Use second Screen': ''},
)
if not OK.OK:
print('User cancelled from dialog box'); logging.info('User cancelled from dialog box'); core.quit()
autopilot = infoFirst['Autopilot']
checkRefreshEtc = infoFirst['Check refresh etc']
scrn = infoFirst['Use second screen']
print('scrn = ',scrn, ' from dialog box')
fullscrn = infoFirst['Fullscreen (timing errors if not)']
refreshRate = infoFirst['Screen refresh rate']
#monitor parameters
widthPix = 1280 #1440 #monitor width in pixels
heightPix =1024 #900 #monitor height in pixels
monitorwidth = 40.5 #28.5 #monitor width in centimeters
viewdist = 55.; #cm
pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) /np.pi*180)
bgColor = [-1,-1,-1] #black background
monitorname = 'testMonitor' # 'mitsubishi' #in psychopy Monitors Center
mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor
mon.setSizePix( (widthPix,heightPix) )
myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank)
myWin.setRecordFrameIntervals(False)
trialsPerCondition = 2 #default value
refreshMsg2 = ''
if not checkRefreshEtc:
refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED'
refreshRateWrong = False
else: #checkRefreshEtc
runInfo = psychopy.info.RunTimeInfo(
win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips()
refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen)
verbose=True, ## True means report on everything
userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes
)
print('Finished runInfo- which assesses the refresh and processes of this computer')
refreshMsg1 = 'Median frames per second ='+ str( np.round(1000./runInfo["windowRefreshTimeMedian_ms"],1) )
refreshRateTolerancePct = 3
pctOff = abs( (1000./runInfo["windowRefreshTimeMedian_ms"]-refreshRate) / refreshRate)
refreshRateWrong = pctOff > (refreshRateTolerancePct/100.)
if refreshRateWrong:
refreshMsg1 += ' BUT'
refreshMsg1 += ' program assumes ' + str(refreshRate)
refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!'
else:
refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) )
myWinRes = myWin.size
myWin.allowGUI =True
myWin.close() #have to close window to show dialog box
##
### END Setup of the screen parameters ##############################################################################################
####################################
askUserAndConfirmExpParams = True
if autopilot:
subject = 'autoTest'
###############################
### Ask user exp params ##############################################################################################
## askUserAndConfirmExpParams
if askUserAndConfirmExpParams:
dlgLabelsOrdered = list() #new dialog box
myDlg = gui.Dlg(title=expname, pos=(200,400))
if not autopilot:
myDlg.addField('Subject code :', subject)
dlgLabelsOrdered.append('subject')
else:
myDlg.addField('Subject code :', subject)
dlgLabelsOrdered.append('subject')
myDlg.addField('autoPilotTime:', 0, tip='Auto response time relative to cue')
myDlg.addField('randomTime:',False, tip = 'Add (rounded) gaussian N(0,2) error to time offset?')
myDlg.addField('autoPilotSpace:',0, tip='Auto response position relative to cue')
myDlg.addField('randomSpace:',False, tip = 'Add (rounded) gaussian N(0,2) error to space offset?')
dlgLabelsOrdered.append('autoPilotTime')
dlgLabelsOrdered.append('randomTime')
dlgLabelsOrdered.append('autoPilotSpace')
dlgLabelsOrdered.append('randomSpace')
myDlg.addField('Trials per condition (default=' + str(trialsPerCondition) + '):', trialsPerCondition, tip=str(trialsPerCondition))
dlgLabelsOrdered.append('trialsPerCondition')
myDlg.addText(refreshMsg1, color='Black')
if refreshRateWrong:
myDlg.addText(refreshMsg2, color='Red')
msgWrongResolution = ''
if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any():
msgWrongResolution = 'Instead of desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels, screen apparently '+ str(myWinRes[0])+ 'x'+ str(myWinRes[1])
myDlg.addText(msgWrongResolution, color='Red')
print(msgWrongResolution); logging.info(msgWrongResolution)
myDlg.addText('Note: to abort press ESC at response time', color='DimGrey') #works in PsychoPy1.84
#myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) #color names not working for some pre-1.84 versions
myDlg.show()
if myDlg.OK: #unpack information from dialogue box
thisInfo = myDlg.data #this will be a list of data returned from each field added in order
name=thisInfo[dlgLabelsOrdered.index('subject')]
if len(name) > 0: #if entered something
subject = name #change subject default name to what user entered
trialsPerCondition = int( thisInfo[ dlgLabelsOrdered.index('trialsPerCondition') ] ) #convert string to integer
print('trialsPerCondition=',trialsPerCondition)
logging.info('trialsPerCondition ='+str(trialsPerCondition))
if autopilot:
autoSpace = thisInfo[dlgLabelsOrdered.index('autoPilotSpace')]
autoTime = thisInfo[dlgLabelsOrdered.index('autoPilotTime')]
randomTime = thisInfo[dlgLabelsOrdered.index('randomTime')]
randomSpace = thisInfo[dlgLabelsOrdered.index('randomSpace')]
else:
print('User cancelled from dialog box.'); logging.info('User cancelled from dialog box')
logging.flush()
core.quit()
### Ask user exp params
## END askUserAndConfirmExpParams ###############################
##############################################################################################
if os.path.isdir('.'+os.sep+'dataRaw'):
dataDir='dataRaw'
else:
msg= 'dataRaw directory does not exist, so saving data in present working directory'
print(msg); logging.info(msg)
dataDir='.'
timeAndDateStr = time.strftime("%d%b%Y_%H-%M", time.localtime())
fileNameWithPath = dataDir+os.sep+subject+ '_' + expname+timeAndDateStr
if not demo and not exportImages:
saveCodeCmd = 'cp \'' + sys.argv[0] + '\' '+ fileNameWithPath + '.py'
os.system(saveCodeCmd) #save a copy of the code as it was when that subject was run
logF = logging.LogFile(fileNameWithPath+'.log',
filemode='w',#if you set this to 'a' it will append instead of overwriting
level=logging.INFO)#info, data, warnings, and errors will be sent to this logfile
if demo or exportImages:
logging.console.setLevel(logging.ERROR) #only show this level's and higher messages
logging.console.setLevel(logging.WARNING) #DEBUG means set the console to receive nearly all messges, INFO is for everything else, INFO, EXP, DATA, WARNING and ERROR
if refreshRateWrong:
logging.error(refreshMsg1+refreshMsg2)
else: logging.info(refreshMsg1+refreshMsg2)
longerThanRefreshTolerance = 0.27
longFrameLimit = round(1000./refreshRate*(1.0+longerThanRefreshTolerance),3) # round(1000/refreshRate*1.5,2)
msg = 'longFrameLimit='+ str(longFrameLimit) +' Recording trials where one or more interframe interval exceeded this figure '
logging.info(msg); print(msg)
if msgWrongResolution != '':
logging.error(msgWrongResolution)
myWin = openMyStimWindow(mon,widthPix,heightPix,bgColor,allowGUI,units,fullscrn,scrn,waitBlank)
runInfo = psychopy.info.RunTimeInfo(
win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips()
refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen)
verbose=True, ## True means report on everything
userProcsDetailed=True ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes
)
msg = 'second window opening runInfo mean ms='+ str( runInfo["windowRefreshTimeAvg_ms"] )
logging.info(msg); print(msg)
logging.info(runInfo)
logging.info('gammaGrid='+str(mon.getGammaGrid()))
logging.info('linearizeMethod='+str(mon.getLinearizeMethod()))
####Functions. Save time by automating processes like stimulus creation and ordering
############################################################################
def oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, trialObjects):
#n: the frame
#trialObjects: List of stimuli to display
#cuePos: cue serial temporal position
#cueFrames: Number of frames to display the cue
#itemFrames: Number of frames to display the item
#SOAFrames: Stimulus Onset Asynchrony in frames
cueFrame = cuePos * SOAFrames
cueMax = cueFrame + cueFrames
showIdx = int(np.floor(n/SOAFrames))
obj = trialObjects[showIdx]
drawObject = n%SOAFrames < itemFrames
if drawObject:
if n >= cueFrame and n < cueMax:
obj.draw()
cue.draw()
else:
obj.draw()
return True
def oneTrial(stimuli):
dotOrder = np.arange(len(stimuli))
np.random.shuffle(dotOrder)
shuffledStimuli = [stimuli[i] for i in dotOrder]
ts = []
myWin.flip(); myWin.flip() #Make sure raster at top of screen (unless not in blocking mode), and give CPU a chance to finish other tasks
t0 = trialClock.getTime()
for n in range(trialFrames):
fixation.draw()
oneFrameOfStim(n, itemFrames, SOAFrames, cueFrames, cuePos, shuffledStimuli)
myWin.flip()
ts.append(trialClock.getTime() - t0)
return True, shuffledStimuli, dotOrder, ts
def getResponse(trialStimuli):
if autopilot:
spacing = 360./nDots
autoResponseIdx = cuePos + autoTime #The serial position of the response in the stream
if randomTime:
autoResponseIdx += int(round( np.random.normal(0,2) ))
itemAtTemporalSelection = trialStimuli[autoResponseIdx]
unshuffledPositions = [dot.pos.tolist() for dot in stimuli]
itemSpatial = unshuffledPositions.index(itemAtTemporalSelection.pos.tolist())
itemSpatial = itemSpatial + autoSpace
if randomSpace:
itemSpatial += int(round( np.random.normal(0,2) ))
while itemSpatial>23:
itemSpatial = itemSpatial - 23
#Once we have temporal pos of selected item relative to start of the trial
#Need to get the serial spatial pos of this item, so that we can select items around it based on the autoSpace offset
selectionTemporal = trialStimuli.index(stimuli[itemSpatial]) #This seems redundant, but it tests that the item we've selected in space is the cued item in time. if the temporal and spatial offsets are 0, it should be the same as cuePos.
accuracy = cuePos == selectionTemporal
mousePos = (stimuli[itemSpatial].pos[0],stimuli[itemSpatial].pos[1])
expStop = False
item = stimuli[itemSpatial]
return accuracy, item, expStop, mousePos
elif not autopilot:
myMouse = event.Mouse(visible = False,win=myWin)
responded = False
expStop = False
event.clearEvents()
mousePos = (1e6,1e6)
escape = event.getKeys()
myMouse.setPos((0,0))
myMouse.setVisible(True)
while not responded:
for item in trialStimuli:
item.draw()
instruction.draw()
if drawProgress: #Draw progress message
progress.draw()
myWin.flip()
button = myMouse.getPressed()
mousePos = myMouse.getPos()
escapeKey = event.getKeys()
if button[0]:
print('click detected')
responded = True
print('getResponse mousePos:',mousePos)
elif len(escapeKey)>0:
if escapeKey[0] == 'space':
expStop = True
responded = True
return False, np.random.choice(trialStimuli), expStop, (0,0)
clickDistances = []
for item in trialStimuli:
x = mousePos[0] - item.pos[0]
y = mousePos[1] - item.pos[1]
distance = sqrt(x**2 + y**2)
clickDistances.append(distance)
if not expStop:
minDistanceIdx = clickDistances.index(min(clickDistances))
accuracy = minDistanceIdx == cuePos
item = trialStimuli[minDistanceIdx]
myMouse.setVisible(False)
return accuracy, item, expStop, mousePos
def drawStimuli(nDots, radius, center, stimulusObject, sameEachTime = True):
if len(center) > 2 or len(center) < 2:
print('Center coords must be list of length 2')
return None
if not sameEachTime and not isinstance(stimulusObject, (list, tuple)):
print('You want different objects in each position, but your stimuli is not a list or tuple')
return None
if not sameEachTime and isinstance(stimulusObject, (list, tuple)) and len(stimulusObject)!=nDots:
print('You want different objects in each position, but the number of positions does not equal the number of items')
return None
spacing = 360./nDots
stimuli = []
for dot in range(nDots): #have to specify positions for multiples of 90deg because python (computers in general?) can't store exact value of pi and thus cos(pi/2) = 6.123e-17, not 0
angle = dot*spacing
if angle == 0:
xpos = radius
ypos = 0
elif angle == 90:
xpos = 0
ypos = radius
elif angle == 180:
xpos = -radius
ypos = 0
elif angle == 270:
xpos = 0
ypos = -radius
elif angle%90!=0:
xpos = radius*cos(radians(angle))
ypos = radius*sin(radians(angle))
if sameEachTime:
stim = copy.copy(stimulusObject)
elif not sameEachTime:
stim = stimulusObject[dot]
stim.pos = (xpos,ypos)
stimuli.append(stim)
return stimuli
def checkTiming(ts):
interframeIntervals = np.diff(ts) * 1000
#print(interframeIntervals)
frameTimeTolerance=.3 #proportion longer than refreshRate that will not count as a miss
longFrameLimit = np.round(1000/refreshRate*(1.0+frameTimeTolerance),2)
idxsInterframeLong = np.where( interframeIntervals > longFrameLimit ) [0] #frames that exceeded 150% of expected duration
numCasesInterframeLong = len( idxsInterframeLong )
if numCasesInterframeLong > 0:
print(numCasesInterframeLong,'frames of', trialFrames,'were longer than',str(1000/refreshRate*(1.0+frameTimeTolerance)))
return numCasesInterframeLong
######Create visual objects, noise masks, response prompts etc. ###########
######Draw your stimuli here if they don't change across trials, but other parameters do (like timing or distance)
######If you want to automate your stimuli. Do it in a function below and save clutter.
######For instance, maybe you want random pairs of letters. Write a function!
###########################################################################
radius = 4 #circle radius
center = (0,0) #circle centre
fixSize = .1
fixation= visual.Circle(myWin, radius = fixSize , fillColor = (1,1,1), units=units)
cue = visual.Circle(myWin, radius = radius + 2, fillColor = None, lineColor = (1,1,1), units = units)
instruction = visual.TextStim(myWin,pos=(0, -(radius+.5)),colorSpace='rgb',color=(1,1,1),alignHoriz='center', alignVert='center',height=.75,units=units)
instructionText = 'Click the dot that was on screen with the cue.'
instruction.text = instructionText
progress = visual.TextStim(myWin,pos=(0, -(radius+1.5)),colorSpace='rgb',color=(1,1,1),alignHoriz='center', alignVert='center',height=.75,units=units)
##Set up stimuli
stimulus = visual.Circle(myWin, radius = .2, fillColor = (1,1,1) )
nDots = 24
sameEachTime = True #same item each position?
stimuli = drawStimuli(nDots, radius, center, stimulus, sameEachTime)
###Trial timing parameters
SOAMS = 66.667
itemMS = 22.222
ISIMS = SOAMS - itemMS
trialMS = SOAMS * nDots
cueMS = itemMS
SOAFrames = int(np.floor(SOAMS/(1000./refreshRate)))
itemFrames = int(np.floor(itemMS/(1000./refreshRate)))
ISIFrames = int(np.floor(ISIMS/(1000./refreshRate)))
trialFrames = int(nDots*SOAFrames)
cueFrames = int(np.floor(cueMS/(1000./refreshRate)))
print('cueFrames=',cueFrames)
print('itemFrames=',itemFrames)
print('refreshRate =', refreshRate)
print('cueMS from frames =', cueFrames*(1000./refreshRate))
print('num of SOAs in the trial:', trialFrames/SOAFrames)
###############
## Factorial design ###
###############
numResponsesPerTrial = 1 #default. Used to create headers for dataFile
stimList = []
#cuePositions = [dot for dot in range(nDots) if dot not in [0,nDots-1]]
cuePositions = [10]
print('cuePositions: ',cuePositions)
#cuePositions = cuePositions[2:(nDots-3)] #drop the first and final two dots
#Set up the factorial design (list of all conditions)
for cuePos in cuePositions:
stimList.append({'cuePos':cuePos})
trials = data.TrialHandler(stimList, nReps = trialsPerCondition)
####Create output file###
#########################################################################
dataFile = open(fileNameWithPath + '.txt', 'w')
numResponsesPerTrial = 1
#headers for initial datafile rows, they don't get repeated. These appear in the file in the order they appear here.
oneOffHeaders = [
'subject',
'task',
'staircase',
'trialNum'
]
for header in oneOffHeaders:
print(header, '\t', end='', file=dataFile)
#Headers for duplicated datafile rows. These are repeated using numResponsesPerTrial. For instance, we might have two responses in a trial.
duplicatedHeaders = [
'responseSpatialPos',
'responseX',
'responseY',
'correctX',
'correctY',
'clickX',
'clickY',
'accuracy',
'responsePosInStream',
'correctPosInStream'
]
if numResponsesPerTrial == 1:
for header in duplicatedHeaders:
print(header, '\t', end='', file=dataFile)
elif numResponsesPerTrial > 1:
for response in range(numResponsesPerTrial):
for header in duplicatedHeaders:
print(header+str(response), '\t', end='', file=dataFile)
for pos in range(nDots):
print('position'+str(pos),'\t',end='',file=dataFile)
#Headers done. Do a new line
print('longFrames',file=dataFile)
expStop = False
drawProgress = False #draw the progress message?
trialNum=0; numTrialsCorrect=0; framesSaved=0;
print('Starting experiment of',trials.nTotal,'trials. Current trial is trial ',trialNum)
#NextRemindCountText.setText( str(trialNum) + ' of ' + str(trials.nTotal) )
#NextRemindCountText.draw()
myWin.flip()
#end of header
trialClock = core.Clock()
stimClock = core.Clock()
if eyeTracking:
if getEyeTrackingFileFromEyetrackingMachineAtEndOfExperiment:
eyeMoveFile=('EyeTrack_'+subject+'_'+timeAndDateStr+'.EDF')
tracker=Tracker_EyeLink(myWin,trialClock,subject,1, 'HV5',(255,255,255),(0,0,0),False,(widthPix,heightPix))
while trialNum < trials.nTotal and expStop==False:
print(float(trialNum)/trials.nTotal)
if trials.nTotal > 0 and trialNum > 0:
if(float(trialNum)/trials.nTotal)%.25 == 0:
print('setting progress text')
progress.text = 'You have completed ' + str(trialNum) + ' of ' + str(trials.nTotal) + ' trials.'
drawProgress = True
else:
drawProgress = False
fixation.draw()
myWin.flip()
if not autopilot:
core.wait(1)
trial = trials.next()
# print('trial idx is',trials.thisIndex)
cuePos = trial.cuePos
# print(cuePos)
print("Doing trialNum",trialNum)
trialDone, trialStimuli, trialStimuliOrder, ts = oneTrial(stimuli)
#Shift positions so that the list starts at 1, which is positioned at (0,radius), and increases clockwise. This is what the MM code expects
MMPositions = list() #Mixture modelling positions
for dotPos in trialStimuliOrder:
if dotPos < (nDots/4):
MMPositions.append(dotPos + 19)
elif dotPos >= (nDots/4):
MMPositions.append(dotPos -5)
nBlips = checkTiming(ts)
if trialDone:
accuracy, response, expStop, clickPos = getResponse(trialStimuli)
responseCoord = response.pos.tolist()
spatialCoords= [item.pos.tolist() for item in stimuli]
try:
responseSpatialRelativeToXAxis = spatialCoords.index(responseCoord)
except ValueError:
print('coord not in list')
if responseSpatialRelativeToXAxis < (nDots/4):
responseSpatial = responseSpatialRelativeToXAxis + 19
elif responseSpatialRelativeToXAxis >= (nDots/4):
responseSpatial = responseSpatialRelativeToXAxis - 5
trialPositions = [item.pos.tolist() for item in trialStimuli]
responseTemporal = trialPositions.index(responseCoord)
# print('trial positions in sequence:',trialPositions)
# print('position of item nearest to click:',responseSpatial)
# print('Position in sequence of item nearest to click:',responseTemporal)
correctSpatial = trialStimuli[cuePos].pos
correctTemporal = cuePos
print(subject,'\t',
'dot-jump','\t',
False,'\t',
trialNum,'\t',
responseSpatial,'\t',
responseCoord[0],'\t',
responseCoord[1],'\t',
correctSpatial[0],'\t',
correctSpatial[1],'\t',
clickPos[0],'\t',
clickPos[1],'\t',
accuracy,'\t',
responseTemporal,'\t',
correctTemporal,'\t',
end='',
file = dataFile
)
for dot in range(nDots):
print(MMPositions[dot], '\t',end='', file=dataFile)
print(nBlips, file=dataFile)
trialNum += 1
dataFile.flush()
if expStop:
print('Participant cancelled experiment on trial', trialNum)
dataFile.flush()
|
alexholcombe/dot-jump
|
dataRaw/Fixed Cue/test_dot-jump25Oct2016_17-11.py
|
Python
|
gpl-3.0
| 25,090
|
[
"Gaussian"
] |
c3f4c80d87e0b9fc79d4e60b2f7d6e2bc4dda2878546181358211d06384872a7
|
import numpy as np
from scipy.stats import norm
from mushroom_rl.core import Environment, MDPInfo
from mushroom_rl.utils.spaces import *
from mushroom_rl.utils.viewer import Viewer
class PuddleWorld(Environment):
"""
Puddle world as presented in:
"Off-Policy Actor-Critic". Degris T. et al.. 2012.
"""
def __init__(self, start=None, goal=None, goal_threshold=.1, noise_step=.025,
noise_reward=0, reward_goal=0., thrust=.05, puddle_center=None,
puddle_width=None, gamma=.99, horizon=5000):
"""
Constructor.
Args:
start (np.array, None): starting position of the agent;
goal (np.array, None): goal position;
goal_threshold (float, .1): distance threshold of the agent from the
goal to consider it reached;
noise_step (float, .025): noise in actions;
noise_reward (float, 0): standard deviation of gaussian noise in reward;
reward_goal (float, 0): reward obtained reaching goal state;
thrust (float, .05): distance walked during each action;
puddle_center (np.array, None): center of the puddle;
puddle_width (np.array, None): width of the puddle;
gamma (float, .99): discount factor.
horizon (int, 5000): horizon of the problem;
"""
# MDP parameters
self._start = np.array([.2, .4]) if start is None else start
self._goal = np.array([1., 1.]) if goal is None else goal
self._goal_threshold = goal_threshold
self._noise_step = noise_step
self._noise_reward = noise_reward
self._reward_goal = reward_goal
self._thrust = thrust
puddle_center = [[.3, .6], [.4, .5], [.8, .9]] if puddle_center is None else puddle_center
self._puddle_center = [np.array(center) for center in puddle_center]
puddle_width = [[.1, .03], [.03, .1], [.03, .1]] if puddle_width is None else puddle_width
self._puddle_width = [np.array(width) for width in puddle_width]
self._actions = [np.zeros(2) for _ in range(5)]
for i in range(4):
self._actions[i][i // 2] = thrust * (i % 2 * 2 - 1)
# MDP properties
action_space = Discrete(5)
observation_space = Box(0., 1., shape=(2,))
mdp_info = MDPInfo(observation_space, action_space, gamma, horizon)
# Visualization
self._pixels = None
self._viewer = Viewer(1.0, 1.0)
super().__init__(mdp_info)
def reset(self, state=None):
if state is None:
self._state = self._start.copy()
else:
self._state = state
return self._state
def step(self, action):
idx = action[0]
self._state += self._actions[idx] + np.random.uniform(
low=-self._noise_step, high=self._noise_step, size=(2,))
self._state = np.clip(self._state, 0., 1.)
absorbing = np.linalg.norm((self._state - self._goal),
ord=1) < self._goal_threshold
if not absorbing:
reward = np.random.randn() * self._noise_reward + self._get_reward(
self._state)
else:
reward = self._reward_goal
return self._state, reward, absorbing, {}
def render(self):
if self._pixels is None:
img_size = 100
pixels = np.zeros((img_size, img_size, 3))
for i in range(img_size):
for j in range(img_size):
x = i / img_size
y = j / img_size
pixels[i, img_size - 1 - j] = self._get_reward(
np.array([x, y]))
pixels -= pixels.min()
pixels *= 255. / pixels.max()
self._pixels = np.floor(255 - pixels)
self._viewer.background_image(self._pixels)
self._viewer.circle(self._state, 0.01,
color=(0, 255, 0))
goal_area = [
[-self._goal_threshold, 0],
[0, self._goal_threshold],
[self._goal_threshold, 0],
[0, -self._goal_threshold]
]
self._viewer.polygon(self._goal, 0, goal_area,
color=(255, 0, 0), width=1)
self._viewer.display(0.1)
def stop(self):
if self._viewer is not None:
self._viewer.close()
def _get_reward(self, state):
reward = -1.
for cen, wid in zip(self._puddle_center, self._puddle_width):
reward -= 2. * norm.pdf(state[0], cen[0], wid[0]) * norm.pdf(
state[1], cen[1], wid[1])
return reward
|
carloderamo/mushroom
|
mushroom_rl/environments/puddle_world.py
|
Python
|
mit
| 4,695
|
[
"Gaussian"
] |
32e197e690e3acf30d856e354dccd382a393619b207c784b73de2664a21fb9f7
|
###############################################################################################
# Copyright 2016, Neil Slater.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################################
import tfi
import os
import numpy as np
import PIL.Image
import tensorflow as tf
import math
# Each entry corresponds to 240 frames = 8 seconds of video
targets = [
# Previous
'mixed5a_3x3_bottleneck_pre_relu', 3, # Lattice with gems ++
'mixed5a_3x3_bottleneck_pre_relu', 3, # Lattice with gems ++
'mixed5a_3x3_bottleneck_pre_relu', 3, # Lattice with gems ++
'mixed5a_3x3_bottleneck_pre_relu', 3, # Lattice with gems ++
# Quieter bit
'mixed5a_3x3_bottleneck_pre_relu', 3, # Lattice with gems ++
'head1_bottleneck_pre_relu', 62, # Ocean pattern ++
'head0_bottleneck_pre_relu', 116, # Pointy ++
'mixed4b_3x3_bottleneck_pre_relu', 28, # fuzzy links ++
# Added bass drums
'head0_bottleneck_pre_relu', 3, # Harbour/islands ++
'head0_bottleneck_pre_relu', 16, # Baubles ++
'head0_bottleneck_pre_relu', 22, # Arches ++
'head0_bottleneck_pre_relu', 67, # Crystal wings ++
# Louder bit
'head0_bottleneck_pre_relu', 84, # Garden ruins
'mixed4b_3x3_bottleneck_pre_relu', 110, # patterned ++
'mixed5a_3x3_bottleneck_pre_relu', 76, # structured swirls ++
'head0_bottleneck_pre_relu', 0, # Domed buildings ++
'head0_bottleneck_pre_relu', 114, # Tiger ++
'head0_bottleneck_pre_relu', 124, # Glowing doors ++
'mixed4b_3x3_bottleneck_pre_relu', 111, # geigery ++
'mixed4b_3x3_bottleneck_pre_relu', 111, # geigery ++
# Spares
'head1_bottleneck_pre_relu', 64, # Birds
'mixed3a_3x3_pre_relu', 9999, # tf.square( all )
'mixed4a_3x3_bottleneck_pre_relu', 42, # worms
'mixed4b_3x3_bottleneck_pre_relu', 68, # fur
'head0_bottleneck_pre_relu', 18, # Trumpets
'head0_bottleneck_pre_relu', 23, # Eye waves?
'head0_bottleneck_pre_relu', 26, # Network
'head0_bottleneck_pre_relu', 47, # Pyramids
'head0_bottleneck_pre_relu', 53, # Feathers
'head0_bottleneck_pre_relu', 127, # Bead circles
'head0_bottleneck_pre_relu', 120, # Snakes
'head0_bottleneck_pre_relu', 116, # Pointy
'head1_bottleneck_pre_relu', 45, # Odd machinery
'head1_bottleneck_pre_relu', 59, # Ocean pattern
'head1_bottleneck_pre_relu', 65, # Turtles
'head1_bottleneck_pre_relu', 93, # Little buildings
'head1_bottleneck_pre_relu', 108, # Firey patches
'head1_bottleneck_pre_relu', 125, # Appliances
]
channel_step = 240 # 16 targets
start_frame = 960
end_frame = 4800
nframes = end_frame - start_frame - 1 # We don't produce actual frame 960 in the overlap section
margin = 60 # This hides rotation artefacts in the corners
directory = 'animation_stage_02'
if not os.path.exists(directory):
os.makedirs(directory)
# Technically this is the end frame, as we're working backwards towards it
img0 = PIL.Image.open('images/start_frame_1400x840.jpeg')
img0 = np.float32(img0)
colour_guides = [
img0,
np.float32( PIL.Image.open('images/colour_guide_a.jpeg') ),
np.float32( PIL.Image.open('images/colour_guide_b.jpeg') ),
np.float32( PIL.Image.open('images/colour_guide_c.jpeg') )
]
tfi.reset_graph_and_session()
current_img = img0.copy()
cropped_img = current_img[margin:-margin, margin:-margin, :]
tfi.savejpeg( cropped_img, ('{}/frame_{}.jpeg'.format( directory, '%04d' % end_frame ) ) )
slow_zoom = 1.0/0.997
slow_rot = 0.2
fast_zoom = 1.0/0.994
fast_rot = 0.35
for frame in range(nframes):
fno = end_frame - 1 - frame
section_id = ( fno // channel_step )
prev_layer = targets[ (section_id-1) * 2 ]
prev_channel = targets[ (section_id-1) * 2 + 1]
layer = targets[ section_id * 2 ]
channel = targets[ section_id * 2 + 1]
print( 'Rendering frame {}, using layer {}, channel {}'.format( fno, layer, channel ) )
if channel > 1000:
target = tf.square( tfi.T(layer) )
else:
target = tfi.T(layer)[:,:,:,channel]
# Mixed target for first half of each channel_step
if ( fno % 240 ) < 120:
r = (fno % 240)/120.0
ri = 1.0 - r
if prev_layer == layer:
# If the layers match, then shape matches, and we can get a simpler combination
target = tf.reduce_mean( ri * tfi.T(prev_layer)[:,:,:,prev_channel] + r * tfi.T(layer)[:,:,:,channel] )
else:
target = ri * tf.reduce_mean( tfi.T(prev_layer)[:,:,:,prev_channel] ) + r * tf.reduce_mean( tfi.T(layer)[:,:,:,channel] )
else:
target = tf.reduce_mean( tfi.T(layer)[:,:,:,channel] )
rot = slow_rot
zoom = slow_zoom
# We start with section 4
if section_id >= 8:
rot = fast_rot
zoom = fast_zoom
elif section_id == 7:
rot = 0.75 * fast_rot + 0.25 * slow_rot
zoom = 0.75 * fast_zoom + 0.25 * slow_zoom
elif section_id == 6:
rot = 0.5 * fast_rot + 0.5 * slow_rot
zoom = 0.5 * fast_zoom + 0.5 * slow_zoom
elif section_id == 5:
rot = 0.25 * fast_rot + 0.75 * slow_rot
zoom = 0.25 * fast_zoom + 0.75 * slow_zoom
# This should line up with 960 being reverse of same thing in stage_01, which we want!
if ( fno % 480 ) < 240:
rot = -rot
if ( fno > 970 ):
if ( fno % 240 ) < 10:
rot *= ( fno % 240 )/10.0
if ( fno % 240 ) > 230:
rot *= ( 240 - ( fno % 240 ) ) / 10.0
step_val = 1.25
if (fno > 4600):
step_val = 0.5 + 0.75 * (4800 - fno)/200.0
current_img = tfi.mix_images( current_img, colour_guides[ section_id % 4 ], 0.997 )
current_img = tfi.affine_zoom( current_img, zoom, rot )
current_img = tfi.render_deepdream( target, current_img, iter_n=1, step=step_val, octave_n=4, octave_scale=1.5, direct_objective = True )
cropped_img = current_img[margin:-margin, margin:-margin, :]
tfi.savejpeg( cropped_img, ('{}/frame_{}.jpeg'.format( directory, '%04d' % fno ) ) )
if ( fno < 1201 ):
tfi.savejpeg( current_img, ('{}/overlap_frame_{}.jpeg'.format( directory, '%04d' % fno ) ) )
if (frame % 5 == 0):
tfi.reset_graph_and_session()
tfi.close_session()
|
neilslater/tabea_video_project
|
animation_stage_02.py
|
Python
|
apache-2.0
| 6,765
|
[
"CRYSTAL"
] |
4f5194b9f4b6e21aa7d376a140adb4336f5ccce4267735ee0af65b06ff760258
|
# -*- coding: utf-8 -*-
#
# File: setuphandlers.py
#
# Copyright (c) 2008 by []
# Generator: ArchGenXML Version 2.0-beta10
# http://plone.org/products/archgenxml
#
# GNU General Public License (GPL)
#
__author__ = """unknown <unknown>"""
__docformat__ = 'plaintext'
import logging
logger = logging.getLogger('UWOshMusicRecruiting: setuphandlers')
from Products.UWOshMusicRecruiting.config import PROJECTNAME
from Products.UWOshMusicRecruiting.config import DEPENDENCIES
from Products.CMFCore.utils import getToolByName
from Products.ExternalMethod.ExternalMethod import manage_addExternalMethod
import transaction
##code-section HEAD
def install_external_method(folder, id, description, module, function):
if not hasattr(folder, id):
try:
manage_addExternalMethod(folder, id, description, module, function)
folder[id].manage_permission("View", roles=["musicFacultyMember", "Manager"], acquire=0)
folder[id].manage_permission("Access contents information", roles=["musicFacultyMember", "Manager"], acquire=0)
except:
logger.info("Error attempting to install %s external method" % (id))
def install_tab(existing_tabs, p_actions, all_tab_info):
for tab_info in all_tab_info:
if tab_info[0] in existing_tabs:
pass
else:
p_actions.addAction(tab_info[0], tab_info[1], tab_info[2], tab_info[3], tab_info[4], tab_info[5])
def create_folder(context, folder_type, folder_name, id):
portal = getToolByName(context, 'portal_url').getPortalObject()
if len(portal.queryCatalog({'portal_type':folder_type, 'title':folder_name, 'id': id})) is 0:
portal.invokeFactory(id=id, type_name=folder_type)
portal[id].edit(title=folder_name)
def add_instrument_property(self):
p_memberdata = getToolByName(self, 'portal_memberdata')
if not p_memberdata.hasProperty('instrument'):
p_memberdata.manage_addProperty('instrument', '', 'string')
def install_external_methods_to_custom_folder(self):
portal = getToolByName(self, 'portal_url').getPortalObject()
custom_folder = portal.portal_skins.custom
external_methods = \
[
{'id': 'export_all_visits', 'description': '', 'module': 'UWOshMusicRecruiting.export_visits', 'method': 'export_all_visits' },
{'id': 'export_user_visits', 'description': '', 'module': 'UWOshMusicRecruiting.export_visits', 'method': 'export_user_visits' },
{'id': 'import_visits', 'description': '', 'module': 'UWOshMusicRecruiting.import_visits', 'method': 'import_visits' },
{'id': 'mail_by_instrument_type', 'description': '', 'module': 'UWOshMusicRecruiting.mail_by_instrument_type', 'method': 'mail_by_instrument_type' },
]
for external_method in external_methods:
install_external_method(custom_folder, external_method['id'], external_method['description'], external_method['module'], external_method['method'])
def add_portal_tabs(self):
p_actions = getToolByName(self, 'portal_actions')
portal = getToolByName(self, 'portal_url').getPortalObject()
visit_folder = portal.queryCatalog({'portal_type':'VisitFolder'})[0].id
# id, title, url, condition, permission, category
added_buttons = [['create_new_visit_button', 'Create New Visit', 'string:${globals_view/navigationRootUrl}/' +visit_folder+ '/createObject?type_name=Visit', '', 'Add UWOshMusicRecruiting Content', 'portal_tabs'],
['my_visits_button', 'My Visits', 'string:${globals_view/navigationRootUrl}/my-visits','', 'Add UWOshMusicRecruiting Content', 'portal_tabs'],
['my_instruments_button', 'My Instruments', 'string:${globals_view/navigationRootUrl}/my-instruments', '', 'Add UWOshMusicRecruiting Content', 'portal_tabs'],
['export_my_visits_button', 'Export My Visits', 'string:${globals_view/navigationRootUrl}/export_user_visits','', 'Add UWOshMusicRecruiting Content', 'portal_tabs']]
install_tab(map(lambda x: x.getId(), p_actions.listActions()), p_actions, added_buttons)
def setup_site_properties(self):
self.portal_properties.site_properties.manage_changeProperties( {'allowRolesToAddKeywords':['Manager', 'Reviewer', 'musicFacultyMember']} )
self.manage_permission("Add UWOshMusicRecruiting Content", roles=["musicFacultyMember", "Manager"], acquire=0)
##/code-section HEAD
def installGSDependencies(context):
"""Install dependend profiles."""
# XXX Hacky, but works for now. has to be refactored as soon as generic
# setup allows a more flexible way to handle dependencies.
shortContext = context._profile_path.split('/')[-3]
if shortContext != 'UWOshMusicRecruiting':
# the current import step is triggered too many times, this creates infinite recursions
# therefore, we'll only run it if it is triggered from proper context
logger.debug("installGSDependencies will not run in context %s" % shortContext)
return
logger.info("installGSDependencies started")
dependencies = []
if not dependencies:
return
site = context.getSite()
setup_tool = getToolByName(site, 'portal_setup')
qi = getToolByName(site, 'portal_quickinstaller')
for dependency in dependencies:
logger.info(" installing GS dependency %s:" % dependency)
if dependency.find(':') == -1:
dependency += ':default'
old_context = setup_tool.getImportContextID()
setup_tool.setImportContext('profile-%s' % dependency)
importsteps = setup_tool.getImportStepRegistry().sortSteps()
excludes = [
u'UWOshMusicRecruiting-QI-dependencies',
u'UWOshMusicRecruiting-GS-dependencies'
]
importsteps = [s for s in importsteps if s not in excludes]
for step in importsteps:
logger.debug(" running import step %s" % step)
setup_tool.runImportStep(step) # purging flag here?
logger.debug(" finished import step %s" % step)
# let's make quickinstaller aware that this product is installed now
product_name = dependency.split(':')[0]
qi.notifyInstalled(product_name)
logger.debug(" notified QI that %s is installed now" % product_name)
# maybe a savepoint is welcome here (I saw some in optilude's examples)? maybe not? well...
transaction.savepoint()
if old_context: # sometimes, for some unknown reason, the old_context is None, believe me
setup_tool.setImportContext(old_context)
logger.debug(" installed GS dependency %s:" % dependency)
# re-run some steps to be sure the current profile applies as expected
importsteps = setup_tool.getImportStepRegistry().sortSteps()
filter = [
u'typeinfo',
u'workflow',
u'membranetool',
u'factorytool',
u'content_type_registry',
u'membrane-sitemanager'
]
importsteps = [s for s in importsteps if s in filter]
for step in importsteps:
setup_tool.runImportStep(step) # purging flag here?
logger.info("installGSDependencies finished")
def installQIDependencies(context):
"""This is for old-style products using QuickInstaller"""
shortContext = context._profile_path.split('/')[-3]
if shortContext != 'UWOshMusicRecruiting': # avoid infinite recursions
logger.debug("installQIDependencies will not run in context %s" % shortContext)
return
logger.info("installQIDependencies starting")
site = context.getSite()
qi = getToolByName(site, 'portal_quickinstaller')
for dependency in DEPENDENCIES:
if qi.isProductInstalled(dependency):
logger.info(" re-Installing QI dependency %s:" % dependency)
qi.reinstallProducts([dependency])
transaction.savepoint() # is a savepoint really needed here?
logger.debug(" re-Installed QI dependency %s:" % dependency)
else:
if qi.isProductInstallable(dependency):
logger.info(" installing QI dependency %s:" % dependency)
qi.installProduct(dependency)
transaction.savepoint() # is a savepoint really needed here?
logger.debug(" installed dependency %s:" % dependency)
else:
logger.info(" QI dependency %s not installable" % dependency)
raise " QI dependency %s not installable" % dependency
logger.info("installQIDependencies finished")
def updateRoleMappings(context):
"""after workflow changed update the roles mapping. this is like pressing
the button 'Update Security Setting' and portal_workflow"""
shortContext = context._profile_path.split('/')[-3]
if shortContext != 'UWOshMusicRecruiting': # avoid infinite recursions
return
wft = getToolByName(context.getSite(), 'portal_workflow')
wft.updateRoleMappings()
def postInstall(context):
"""Called as at the end of the setup process. """
# the right place for your custom code
shortContext = context._profile_path.split('/')[-3]
if shortContext != 'UWOshMusicRecruiting': # avoid infinite recursions
return
site = context.getSite()
create_folder(site, 'StudentFolder', 'Students', 'StudentFolder')
create_folder(site, 'SchoolFolder', 'Schools', 'SchoolFolder')
create_folder(site, 'ContactFolder', 'Contacts', 'ContactFolder')
create_folder(site, 'FacultyMemberFolder', 'FacultyMembers', 'FacultyMemberFolder')
create_folder(site, 'VisitFolder', 'Visits', 'VisitFolder')
add_instrument_property(site)
install_external_methods_to_custom_folder(site)
add_portal_tabs(site)
setup_site_properties(site)
##code-section FOOT
##/code-section FOOT
|
uwosh/UWOshMusicRecruiting
|
setuphandlers.py
|
Python
|
gpl-2.0
| 9,848
|
[
"VisIt"
] |
2f174017886e046dc6f755222a39942cb110bb94704980faad787b5e59d64c13
|
import numpy
import re
from .tools import *
from orbkit.qcinfo import QCinfo
from orbkit.display import display
from orbkit.orbitals import AOClass, MOClass
def read_gamess(fname, all_mo=False, spin=None, read_properties=False,
**kwargs):
'''Reads all information desired from a Gamess-US output file.
**Parameters:**
fname : str, file descriptor
Specifies the filename for the input file.
fname can also be used with a file descriptor instad of a filename.
all_mo : bool, optional
If True, all molecular orbitals are returned.
**Returns:**
qc (class QCinfo) with attributes geo_spec, geo_info, ao_spec, mo_spec, etot :
See :ref:`Central Variables` for details.
'''
if isinstance(fname, str):
filename = fname
fname = descriptor_from_file(filename, index=0)
else:
filename = fname.name
from io import TextIOWrapper
if isinstance(fname, TextIOWrapper):
flines = fname.readlines() # Read the WHOLE file into RAM
else:
magic = 'This is an Orbkit magic string'
text = fname.read().decode("iso-8859-1").replace('\n','\n{}'.format(magic))
flines = text.split(magic)
flines.pop()
# Initialize the variables
qc = QCinfo()
qc.ao_spec = AOClass([])
qc.mo_spec = MOClass([])
has_alpha = False # Flag for alpha electron set
has_beta = False # Flag for beta electron set
restricted = True # Flag for restricted calculation
sec_flag = None # A Flag specifying the current section
is_pop_ana = True # Flag for population analysis for ground state
keyword = [' ATOM ATOMIC COORDINATES','']
# Keywords for single point calculation and
# geometry optimization
mokey = 'EIGENVECTORS' # Keyword for MOs
unrestopt = False # Flag for unrestricted optimization
bopt = False # Flag for geometry optimization
sym={} # Symmetry of MOs
geo_skip = 1 # Number of lines to skip in geometry section
for il in range(len(flines)):
line = flines[il] # The current line as string
thisline = line.split() # The current line split into segments
# Check the file for keywords
if 'RUNTYP=OPTIMIZE' in line:
keyword = [' COORDINATES OF ALL ATOMS ARE',
'***** EQUILIBRIUM GEOMETRY LOCATED *****']
geo_skip = 2
bopt = True
if 'SCFTYP=UHF' in line:
mokey = ' SET ****'
restricted = False
else:
mokey = 'EIGENVECTORS'
elif keyword[0] in line and keyword[1] in flines[il-1]:
# The section containing information about
# the molecular geometry begins
sec_flag = 'geo_info'
atom_count = 0 # Counter for Atoms
angstrom = not '(BOHR)' in line
elif 'ATOMIC BASIS SET' in line:
# The section containing information about
# the atomic orbitals begins
sec_flag = 'ao_info'
ao_skip = 6 # Number of lines to skip
AO = [] # Atomic orbitals
elif '----- ALPHA SET ' in line:
# The section for alpha electrons
has_alpha = True
has_beta = False
restricted = False
elif '----- BETA SET ' in line:
# The section for alpha electrons
restricted = False
has_alpha = False
has_beta = True
elif mokey in line and len(thisline) < 3:
# The section containing information about
# the molecular orbitals begins
sec_flag = 'mo_info'
mo_skip = 1
len_mo = 0 # Number of MOs
init_mo = False # Initialize new MO section
info_key = None # A Flag specifying the energy and symmetry section
lxlylz = []
if 'ALPHA' in line:
has_alpha = True
mo_skip = 0
elif 'BETA' in line:
has_beta = True
has_alpha = False
mo_skip = 0
elif 'NATURAL ORBITALS' in line and len(thisline) <= 3:
display('The natural orbitals are not extracted.')
elif ' NUMBER OF OCCUPIED ORBITALS (ALPHA) =' in line:
occ = [] # occupation number of molecular orbitals
occ.append(int(thisline[-1]))
elif ' NUMBER OF OCCUPIED ORBITALS (BETA ) =' in line:
occ.append(int(thisline[-1]))
# elif 'ECP POTENTIALS' in line:
# sec_flag = 'ecp_info'
# ecp = ''
elif ' NUMBER OF OCCUPIED ORBITALS (ALPHA) KEPT IS =' in line:
occ = [] # occupation number of molecular orbitals
occ.append(int(thisline[-1]))
elif ' NUMBER OF OCCUPIED ORBITALS (BETA ) KEPT IS =' in line:
occ.append(int(thisline[-1]))
elif 'NUMBER OF STATES REQUESTED' in line and read_properties:
# get the number of excited states and initialize variables for
# transition dipole moment and energies
exc_states = int(line.split('=')[1]) # Number of excited states
# Dipole moments matrix: Diagonal elements -> permanent dipole moments
# Off-diagonal elements -> transition dipole moments
qc.dipole_moments = numpy.zeros(((exc_states+1),(exc_states+1),3))
# Multiplicity of ground and excited states
qc.states['multiplicity'] = numpy.zeros(exc_states+1)
# Energies of ground and excited states
qc.states['energy'] = numpy.zeros(exc_states+1)
qc.states['energy'][0] = qc.etot
qc.states['multiplicity'][0] = gs_multi
dm_flag = None # Flag specifying the dipole moments section
elif 'TRANSITION DIPOLE MOMENTS' in line and read_properties:
# Section containing energies of excited states
sec_flag = 'dm_info'
# Energy and Multiplicity for ground state
elif 'SPIN MULTIPLICITY' in line and read_properties:
# odd way to get gound state multiplicity
gs_multi = int(line.split()[3])
elif 'FINAL' in line and read_properties:
# get (last) energy
qc.etot = float(line.split()[4])
elif 'TOTAL MULLIKEN AND LOWDIN ATOMIC POPULATIONS' in line and is_pop_ana == True and read_properties:
# Read Mulliken and Lowdin Atomic Populations
sec_flag = 'pop_info'
pop_skip = 1
is_pop_ana == False
qc.pop_ana['Lowdin'] = []
qc.pop_ana['Mulliken'] = []
else:
# Check if we are in a specific section
if sec_flag == 'geo_info':
if not geo_skip:
if len(line) < 2:
sec_flag = None
else:
qc.geo_info.append([thisline[0],atom_count+1,thisline[1]])
qc.geo_spec.append([float(ii) for ii in thisline[2:]])
atom_count += 1
elif geo_skip:
geo_skip -= 1
elif sec_flag == 'ao_info':
if not ao_skip:
if ' TOTAL NUMBER OF BASIS SET SHELLS' in line:
sec_flag = None
else:
if len(thisline) == 1:
# Read atom type
at_type = thisline[0]
AO.append([])
new_ao = False
elif len(thisline) == 0 and new_ao == False:
new_ao = True
else:
coeffs = [float(ii) for ii in thisline[3:]]
if new_ao:
ao_type = thisline[1].lower().replace('l','sp')
for i_ao,t_ao in enumerate(ao_type):
AO[-1].append({'atom_type': at_type,
'type': t_ao,
'pnum': 1,
'coeffs': [[coeffs[0],coeffs[1+i_ao]]]})
new_ao = False
else:
for i_ao in range(len(ao_type)):
AO[-1][-len(ao_type)+i_ao]['coeffs'].append([coeffs[0],
coeffs[1+i_ao]])
AO[-1][-len(ao_type)+i_ao]['pnum'] += 1
elif ao_skip:
ao_skip -= 1
elif sec_flag == 'mo_info':
if not mo_skip:
if 'END OF' in line and 'CALCULATION' in line or '-----------' in line:
sec_flag = None
has_alpha = False
has_beta = False
else:
if thisline == []:
info_key = None
init_mo = True
try:
int(flines[il+1].split()[0])
except ValueError:
sec_flag = None
init_mo = False
elif init_mo:
init_len = len(thisline)
lxlylz = []
for ii in range(len(thisline)):
if has_alpha == True or has_beta == True:
qc.mo_spec.append({'coeffs': [],
'energy': 0.0,
'occ_num': 0.0,
'sym': '',
'spin': ''
})
else:
qc.mo_spec.append({'coeffs': [],
'energy': 0.0,
'occ_num': 0.0,
'sym': ''
})
init_mo = False
info_key = 'energy'
elif len(thisline) == init_len and info_key == 'energy':
for ii in range(init_len,0,-1):
qc.mo_spec[-ii]['energy'] = float(thisline[init_len-ii])
info_key = 'symmetry'
elif len(thisline) == init_len and info_key == 'symmetry':
for ii in range(init_len,0,-1):
len_mo += 1
a = thisline[init_len-ii]
if a not in sym.keys(): sym[a] = 1
else: sym[a] = len_mo
if has_alpha:
qc.mo_spec[-ii]['sym'] = '%d.%s_a' % (sym[a], thisline[init_len-ii])
qc.mo_spec[-ii]['spin'] = 'alpha'
elif has_beta:
qc.mo_spec[-ii]['sym'] = '%d.%s_b' % (sym[a], thisline[init_len-ii])
qc.mo_spec[-ii]['spin'] = 'beta'
else:
qc.mo_spec[-ii]['sym'] = '%d.%s' % (sym[a], thisline[init_len-ii])
info_key = 'coeffs'
elif thisline != [] and info_key == 'coeffs':
lxlylz.append((line[11:17]))
for ii, m in enumerate(re.finditer('-?\d+\.\d+', line[16:])):
qc.mo_spec[-init_len+ii]['coeffs'].append(float(m.group()))
elif mo_skip:
mo_skip -= 1
elif sec_flag == 'ecp_info':
if 'THE ECP RUN REMOVES' in line:
sec_flag = None
elif 'PARAMETERS FOR' in line:
if line[17:25].split()[0] != ecp:
ecp = line[17:25].split()[0]
zcore = float(line[51:55].split()[0])
ii_geo = int(line[35:41].split()[0])-1
qc.geo_info[ii_geo][2] = str(float(qc.geo_info[ii_geo][2]) - zcore)
else:
ii_geo = int(line[35:41].split()[0])-1
qc.geo_info[ii_geo][2] = str(float(qc.geo_info[ii_geo][2]) - zcore)
elif sec_flag == 'dm_info':
# instead of giving the output in a useful human and machine readable
# way, gamess output syntax differs for transitions involving the
# ground state compared to transitions between excited states...
if 'GROUND STATE (SCF) DIPOLE=' in line:
# ground state dipole is in debye...convert to atomic units
for ii in range(3):
qc.dipole_moments[0][0][ii] = float(thisline[ii+4])*0.393430307
if 'EXPECTATION VALUE DIPOLE MOMENT FOR EXCITED STATE' in line:
state = (int(line.replace('STATE', 'STATE ').split()[7]))
dm_flag = 'state_info'
if 'TRANSITION FROM THE GROUND STATE TO EXCITED STATE' in line:
state = [0,
int(line.replace('STATE', 'STATE ').split()[8])]
dm_flag = 'transition_info'
if 'TRANSITION BETWEEN EXCITED STATES' in line:
state = [int(thisline[4]),
int(line.replace('AND', 'AND ').split()[6])]
dm_flag = 'transition_info'
if 'NATURAL ORBITAL OCCUPATION NUMBERS FOR EXCITED STATE' in line:
sec_flag = None
dm_flag = None
if dm_flag == 'state_info':
if 'STATE MULTIPLICITY' in line:
qc.states['multiplicity'][state] = int(line.split('=')[1])
if 'STATE ENERGY' in line:
qc.states['energy'][state] = float(line.split('=')[1])
if 'STATE DIPOLE' and 'E*BOHR' in line:
for ii in range(3):
qc.dipole_moments[state][state][ii] = float(thisline[ii+3])
elif dm_flag == 'transition_info':
if 'TRANSITION DIPOLE' and 'E*BOHR' in line:
for ii in range(3):
qc.dipole_moments[state[0]][state[1]][ii] = float(thisline[ii+3])
qc.dipole_moments[state[1]][state[0]][ii] = float(thisline[ii+3])
elif sec_flag == 'pop_info':
if not pop_skip:
if line == '\n':
sec_flag = None
else:
qc.pop_ana = {}
qc.pop_ana['Lowdin'].append(float(thisline[5]))
qc.pop_ana['Mulliken'].append(float(thisline[3]))
elif pop_skip:
pop_skip -= 1
# Check usage of same atomic basis sets
basis_set = {}
for ii in range(len(AO)):
if not AO[ii][0]['atom_type'] in basis_set.keys():
basis_set[AO[ii][0]['atom_type']] = AO[ii]
else:
for jj in range(len(AO[ii])):
if AO[ii][jj]['coeffs'] != basis_set[AO[ii][0]['atom_type']][jj]['coeffs']:
raise IOError('Different basis sets for the same atom.')
# Numpy array
for ii in basis_set.keys():
for jj in range(len(basis_set[ii])):
basis_set[ii][jj]['coeffs'] = numpy.array(basis_set[ii][jj]['coeffs'])
for kk in range(len(qc.mo_spec)):
qc.mo_spec[kk]['coeffs'] = numpy.array(qc.mo_spec[kk]['coeffs'])
# Complement atomic basis sets
for kk in range(len(qc.geo_info)):
for ll in range(len(basis_set[qc.geo_info[kk][0]])):
qc.ao_spec.append({'atom': qc.geo_info[kk][1]-1,
'type': basis_set[qc.geo_info[kk][0]][ll]['type'],
'pnum': basis_set[qc.geo_info[kk][0]][ll]['pnum'],
'coeffs': basis_set[qc.geo_info[kk][0]][ll]['coeffs'],
'lxlylz': None
})
# Reconstruct exponents list for ao_spec
count = 0
for i,j in enumerate(qc.ao_spec):
l = l_deg(lquant[j['type']])
j['lxlylz'] = []
for i in range(l):
j['lxlylz'].append((lxlylz[count].lower().count('x'),
lxlylz[count].lower().count('y'),
lxlylz[count].lower().count('z')))
count += 1
j['lxlylz'] = numpy.array(j['lxlylz'],dtype=numpy.int64)
if restricted:
for ii in range(len(qc.mo_spec)):
if occ[0] and occ[1]:
qc.mo_spec[ii]['occ_num'] += 2.0
occ[0] -= 1
occ[1] -= 1
if not occ[0] and occ[1]:
qc.mo_spec[ii]['occ_num'] += 1.0
occ[1] -= 1
if not occ[1] and occ[0]:
qc.mo_spec[ii]['occ_num'] += 1.0
occ[0] -= 1
if restricted == False:
for ii in range(len(qc.mo_spec)):
if qc.mo_spec[ii]['spin'] == 'alpha' and occ[0] > 0:
qc.mo_spec[ii]['occ_num'] += 1.0
occ[0] -= 1
has_alpha = True
elif qc.mo_spec[ii]['spin'] == 'beta' and occ[1] > 0:
qc.mo_spec[ii]['occ_num'] += 1.0
occ[1] -= 1
has_beta = True
if spin is not None:
if restricted:
raise IOError('The keyword `spin` is only supported for unrestricted calculations.')
if spin != 'alpha' and spin != 'beta':
raise IOError('`spin=%s` is not a valid option' % spin)
elif spin == 'alpha' and has_alpha == True:
display('Reading only molecular orbitals of spin alpha.')
elif spin == 'beta' and has_beta == True:
display('Reading only molecular orbitals of spin beta.')
elif (not has_alpha) and (not has_beta):
raise IOError(
'No spin molecular orbitals available')
elif ((spin == 'alpha' and not has_alpha) or
(spin == 'beta' and not has_beta)):
raise IOError('You requested `%s` orbitals, but None of them are present.'
% spin)
# Are all MOs requested for the calculation?
if not all_mo:
for i in range(len(qc.mo_spec))[::-1]:
if qc.mo_spec[i]['occ_num'] < 0.0000001:
del qc.mo_spec[i]
# Only molecular orbitals of one spin requested?
if spin is not None:
for i in range(len(qc.mo_spec))[::-1]:
if qc.mo_spec[i]['spin'] != spin:
del qc.mo_spec[i]
# Convert geo_info and geo_spec to numpy.ndarrays
qc.format_geo(is_angstrom=angstrom)
qc.mo_spec.update()
qc.ao_spec.update()
return qc
|
orbkit/orbkit
|
orbkit/read/gamess.py
|
Python
|
lgpl-3.0
| 16,936
|
[
"GAMESS"
] |
b73f16824e5b45acbe3223764b7dc015fc1aa93995402f328085f11c7ffd2c96
|
import logging
from django.core.management.base import BaseCommand
from edc_content_type_map.models import ContentTypeMapHelper
logger = logging.getLogger(__name__)
class NullHandler(logging.Handler):
def emit(self, record):
pass
nullhandler = logger.addHandler(NullHandler())
class Command(BaseCommand):
args = ()
help = 'Populate and sync content type map with django content type. (Safe)'
def handle(self, *args, **options):
self.stdout.write('Populating / re-populating from django content type...\n')
ContentTypeMapHelper().populate()
self.stdout.write('Done.')
self.stdout.write('Syncing with membership forms, visit definitions, etc...\n')
ContentTypeMapHelper().sync()
self.stdout.write('Done. You may now check /admin/bhp_content_type_map/contenttypemap/.\n')
|
botswana-harvard/edc-content-type-map
|
edc_content_type_map/management/commands/sync_content_type.py
|
Python
|
gpl-2.0
| 851
|
[
"VisIt"
] |
5f16be9298e0b79f4ebf53688ffc8bc50cc6263edfa3fde9d07d3d1f37f2f965
|
# -----BEGIN ORIGINAL LICENSE-----
# mn_wordlist.c
# Copyright (c) 2000 Oren Tirosh <oren@hishome.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----END ORIGINAL LICENSE-----
_WORDLIST = [
"academy", "acrobat", "active", "actor", "adam", "admiral",
"adrian", "africa", "agenda", "agent", "airline", "airport",
"aladdin", "alarm", "alaska", "albert", "albino", "album",
"alcohol", "alex", "algebra", "alibi", "alice", "alien",
"alpha", "alpine", "amadeus", "amanda", "amazon", "amber",
"america", "amigo", "analog", "anatomy", "angel", "animal",
"antenna", "antonio", "apollo", "april", "archive", "arctic",
"arizona", "arnold", "aroma", "arthur", "artist", "asia",
"aspect", "aspirin", "athena", "athlete", "atlas", "audio",
"august", "austria", "axiom", "aztec", "balance", "ballad",
"banana", "bandit", "banjo", "barcode", "baron", "basic",
"battery", "belgium", "berlin", "bermuda", "bernard", "bikini",
"binary", "bingo", "biology", "block", "blonde", "bonus",
"boris", "boston", "boxer", "brandy", "bravo", "brazil",
"bronze", "brown", "bruce", "bruno", "burger", "burma",
"cabinet", "cactus", "cafe", "cairo", "cake", "calypso",
"camel", "camera", "campus", "canada", "canal", "cannon",
"canoe", "cantina", "canvas", "canyon", "capital", "caramel",
"caravan", "carbon", "cargo", "carlo", "carol", "carpet",
"cartel", "casino", "castle", "castro", "catalog", "caviar",
"cecilia", "cement", "center", "century", "ceramic", "chamber",
"chance", "change", "chaos", "charlie", "charm", "charter",
"chef", "chemist", "cherry", "chess", "chicago", "chicken",
"chief", "china", "cigar", "cinema", "circus", "citizen",
"city", "clara", "classic", "claudia", "clean", "client",
"climax", "clinic", "clock", "club", "cobra", "coconut",
"cola", "collect", "colombo", "colony", "color", "combat",
"comedy", "comet", "command", "compact", "company", "complex",
"concept", "concert", "connect", "consul", "contact", "context",
"contour", "control", "convert", "copy", "corner", "corona",
"correct", "cosmos", "couple", "courage", "cowboy", "craft",
"crash", "credit", "cricket", "critic", "crown", "crystal",
"cuba", "culture", "dallas", "dance", "daniel", "david",
"decade", "decimal", "deliver", "delta", "deluxe", "demand",
"demo", "denmark", "derby", "design", "detect", "develop",
"diagram", "dialog", "diamond", "diana", "diego", "diesel",
"diet", "digital", "dilemma", "diploma", "direct", "disco",
"disney", "distant", "doctor", "dollar", "dominic", "domino",
"donald", "dragon", "drama", "dublin", "duet", "dynamic",
"east", "ecology", "economy", "edgar", "egypt", "elastic",
"elegant", "element", "elite", "elvis", "email", "energy",
"engine", "english", "episode", "equator", "escort", "ethnic",
"europe", "everest", "evident", "exact", "example", "exit",
"exotic", "export", "express", "extra", "fabric", "factor",
"falcon", "family", "fantasy", "fashion", "fiber", "fiction",
"fidel", "fiesta", "figure", "film", "filter", "final",
"finance", "finish", "finland", "flash", "florida", "flower",
"fluid", "flute", "focus", "ford", "forest", "formal",
"format", "formula", "fortune", "forum", "fragile", "france",
"frank", "friend", "frozen", "future", "gabriel", "galaxy",
"gallery", "gamma", "garage", "garden", "garlic", "gemini",
"general", "genetic", "genius", "germany", "global", "gloria",
"golf", "gondola", "gong", "good", "gordon", "gorilla",
"grand", "granite", "graph", "green", "group", "guide",
"guitar", "guru", "hand", "happy", "harbor", "harmony",
"harvard", "havana", "hawaii", "helena", "hello", "henry",
"hilton", "history", "horizon", "hotel", "human", "humor",
"icon", "idea", "igloo", "igor", "image", "impact",
"import", "index", "india", "indigo", "input", "insect",
"instant", "iris", "italian", "jacket", "jacob", "jaguar",
"janet", "japan", "jargon", "jazz", "jeep", "john",
"joker", "jordan", "jumbo", "june", "jungle", "junior",
"jupiter", "karate", "karma", "kayak", "kermit", "kilo",
"king", "koala", "korea", "labor", "lady", "lagoon",
"laptop", "laser", "latin", "lava", "lecture", "left",
"legal", "lemon", "level", "lexicon", "liberal", "libra",
"limbo", "limit", "linda", "linear", "lion", "liquid",
"liter", "little", "llama", "lobby", "lobster", "local",
"logic", "logo", "lola", "london", "lotus", "lucas",
"lunar", "machine", "macro", "madam", "madonna", "madrid",
"maestro", "magic", "magnet", "magnum", "major", "mama",
"mambo", "manager", "mango", "manila", "marco", "marina",
"market", "mars", "martin", "marvin", "master", "matrix",
"maximum", "media", "medical", "mega", "melody", "melon",
"memo", "mental", "mentor", "menu", "mercury", "message",
"metal", "meteor", "meter", "method", "metro", "mexico",
"miami", "micro", "million", "mineral", "minimum", "minus",
"minute", "miracle", "mirage", "miranda", "mister", "mixer",
"mobile", "model", "modem", "modern", "modular", "moment",
"monaco", "monica", "monitor", "mono", "monster", "montana",
"morgan", "motel", "motif", "motor", "mozart", "multi",
"museum", "music", "mustang", "natural", "neon", "nepal",
"neptune", "nerve", "neutral", "nevada", "news", "ninja",
"nirvana", "normal", "nova", "novel", "nuclear", "numeric",
"nylon", "oasis", "object", "observe", "ocean", "octopus",
"olivia", "olympic", "omega", "opera", "optic", "optimal",
"orange", "orbit", "organic", "orient", "origin", "orlando",
"oscar", "oxford", "oxygen", "ozone", "pablo", "pacific",
"pagoda", "palace", "pamela", "panama", "panda", "panel",
"panic", "paradox", "pardon", "paris", "parker", "parking",
"parody", "partner", "passage", "passive", "pasta", "pastel",
"patent", "patriot", "patrol", "patron", "pegasus", "pelican",
"penguin", "pepper", "percent", "perfect", "perfume", "period",
"permit", "person", "peru", "phone", "photo", "piano",
"picasso", "picnic", "picture", "pigment", "pilgrim", "pilot",
"pirate", "pixel", "pizza", "planet", "plasma", "plaster",
"plastic", "plaza", "pocket", "poem", "poetic", "poker",
"polaris", "police", "politic", "polo", "polygon", "pony",
"popcorn", "popular", "postage", "postal", "precise", "prefix",
"premium", "present", "price", "prince", "printer", "prism",
"private", "product", "profile", "program", "project", "protect",
"proton", "public", "pulse", "puma", "pyramid", "queen",
"radar", "radio", "random", "rapid", "rebel", "record",
"recycle", "reflex", "reform", "regard", "regular", "relax",
"report", "reptile", "reverse", "ricardo", "ringo", "ritual",
"robert", "robot", "rocket", "rodeo", "romeo", "royal",
"russian", "safari", "salad", "salami", "salmon", "salon",
"salute", "samba", "sandra", "santana", "sardine", "school",
"screen", "script", "second", "secret", "section", "segment",
"select", "seminar", "senator", "senior", "sensor", "serial",
"service", "sheriff", "shock", "sierra", "signal", "silicon",
"silver", "similar", "simon", "single", "siren", "slogan",
"social", "soda", "solar", "solid", "solo", "sonic",
"soviet", "special", "speed", "spiral", "spirit", "sport",
"static", "station", "status", "stereo", "stone", "stop",
"street", "strong", "student", "studio", "style", "subject",
"sultan", "super", "susan", "sushi", "suzuki", "switch",
"symbol", "system", "tactic", "tahiti", "talent", "tango",
"tarzan", "taxi", "telex", "tempo", "tennis", "texas",
"textile", "theory", "thermos", "tiger", "titanic", "tokyo",
"tomato", "topic", "tornado", "toronto", "torpedo", "total",
"totem", "tourist", "tractor", "traffic", "transit", "trapeze",
"travel", "tribal", "trick", "trident", "trilogy", "tripod",
"tropic", "trumpet", "tulip", "tuna", "turbo", "twist",
"ultra", "uniform", "union", "uranium", "vacuum", "valid",
"vampire", "vanilla", "vatican", "velvet", "ventura", "venus",
"vertigo", "veteran", "victor", "video", "vienna", "viking",
"village", "vincent", "violet", "violin", "virtual", "virus",
"visa", "vision", "visitor", "visual", "vitamin", "viva",
"vocal", "vodka", "volcano", "voltage", "volume", "voyage",
"water", "weekend", "welcome", "western", "window", "winter",
"wizard", "wolf", "world", "xray", "yankee", "yoga",
"yogurt", "yoyo", "zebra", "zero", "zigzag", "zipper",
"zodiac", "zoom", "abraham", "action", "address", "alabama",
"alfred", "almond", "ammonia", "analyze", "annual", "answer",
"apple", "arena", "armada", "arsenal", "atlanta", "atomic",
"avenue", "average", "bagel", "baker", "ballet", "bambino",
"bamboo", "barbara", "basket", "bazaar", "benefit", "bicycle",
"bishop", "blitz", "bonjour", "bottle", "bridge", "british",
"brother", "brush", "budget", "cabaret", "cadet", "candle",
"capitan", "capsule", "career", "cartoon", "channel", "chapter",
"cheese", "circle", "cobalt", "cockpit", "college", "compass",
"comrade", "condor", "crimson", "cyclone", "darwin", "declare",
"degree", "delete", "delphi", "denver", "desert", "divide",
"dolby", "domain", "domingo", "double", "drink", "driver",
"eagle", "earth", "echo", "eclipse", "editor", "educate",
"edward", "effect", "electra", "emerald", "emotion", "empire",
"empty", "escape", "eternal", "evening", "exhibit", "expand",
"explore", "extreme", "ferrari", "first", "flag", "folio",
"forget", "forward", "freedom", "fresh", "friday", "fuji",
"galileo", "garcia", "genesis", "gold", "gravity", "habitat",
"hamlet", "harlem", "helium", "holiday", "house", "hunter",
"ibiza", "iceberg", "imagine", "infant", "isotope", "jackson",
"jamaica", "jasmine", "java", "jessica", "judo", "kitchen",
"lazarus", "letter", "license", "lithium", "loyal", "lucky",
"magenta", "mailbox", "manual", "marble", "mary", "maxwell",
"mayor", "milk", "monarch", "monday", "money", "morning",
"mother", "mystery", "native", "nectar", "nelson", "network",
"next", "nikita", "nobel", "nobody", "nominal", "norway",
"nothing", "number", "october", "office", "oliver", "opinion",
"option", "order", "outside", "package", "pancake", "pandora",
"panther", "papa", "patient", "pattern", "pedro", "pencil",
"people", "phantom", "philips", "pioneer", "pluto", "podium",
"portal", "potato", "prize", "process", "protein", "proxy",
"pump", "pupil", "python", "quality", "quarter", "quiet",
"rabbit", "radical", "radius", "rainbow", "ralph", "ramirez",
"ravioli", "raymond", "respect", "respond", "result", "resume",
"retro", "richard", "right", "risk", "river", "roger",
"roman", "rondo", "sabrina", "salary", "salsa", "sample",
"samuel", "saturn", "savage", "scarlet", "scoop", "scorpio",
"scratch", "scroll", "sector", "serpent", "shadow", "shampoo",
"sharon", "sharp", "short", "shrink", "silence", "silk",
"simple", "slang", "smart", "smoke", "snake", "society",
"sonar", "sonata", "soprano", "source", "sparta", "sphere",
"spider", "sponsor", "spring", "acid", "adios", "agatha",
"alamo", "alert", "almanac", "aloha", "andrea", "anita",
"arcade", "aurora", "avalon", "baby", "baggage", "balloon",
"bank", "basil", "begin", "biscuit", "blue", "bombay",
"brain", "brenda", "brigade", "cable", "carmen", "cello",
"celtic", "chariot", "chrome", "citrus", "civil", "cloud",
"common", "compare", "cool", "copper", "coral", "crater",
"cubic", "cupid", "cycle", "depend", "door", "dream",
"dynasty", "edison", "edition", "enigma", "equal", "eric",
"event", "evita", "exodus", "extend", "famous", "farmer",
"food", "fossil", "frog", "fruit", "geneva", "gentle",
"george", "giant", "gilbert", "gossip", "gram", "greek",
"grille", "hammer", "harvest", "hazard", "heaven", "herbert",
"heroic", "hexagon", "husband", "immune", "inca", "inch",
"initial", "isabel", "ivory", "jason", "jerome", "joel",
"joshua", "journal", "judge", "juliet", "jump", "justice",
"kimono", "kinetic", "leonid", "lima", "maze", "medusa",
"member", "memphis", "michael", "miguel", "milan", "mile",
"miller", "mimic", "mimosa", "mission", "monkey", "moral",
"moses", "mouse", "nancy", "natasha", "nebula", "nickel",
"nina", "noise", "orchid", "oregano", "origami", "orinoco",
"orion", "othello", "paper", "paprika", "prelude", "prepare",
"pretend", "profit", "promise", "provide", "puzzle", "remote",
"repair", "reply", "rival", "riviera", "robin", "rose",
"rover", "rudolf", "saga", "sahara", "scholar", "shelter",
"ship", "shoe", "sigma", "sister", "sleep", "smile",
"spain", "spark", "split", "spray", "square", "stadium",
"star", "storm", "story", "strange", "stretch", "stuart",
"subway", "sugar", "sulfur", "summer", "survive", "sweet",
"swim", "table", "taboo", "target", "teacher", "telecom",
"temple", "tibet", "ticket", "tina", "today", "toga",
"tommy", "tower", "trivial", "tunnel", "turtle", "twin",
"uncle", "unicorn", "unique", "update", "valery", "vega",
"version", "voodoo", "warning", "william", "wonder", "year",
"yellow", "young", "absent", "absorb", "accent", "alfonso",
"alias", "ambient", "andy", "anvil", "appear", "apropos",
"archer", "ariel", "armor", "arrow", "austin", "avatar",
"axis", "baboon", "bahama", "bali", "balsa", "bazooka",
"beach", "beast", "beatles", "beauty", "before", "benny",
"betty", "between", "beyond", "billy", "bison", "blast",
"bless", "bogart", "bonanza", "book", "border", "brave",
"bread", "break", "broken", "bucket", "buenos", "buffalo",
"bundle", "button", "buzzer", "byte", "caesar", "camilla",
"canary", "candid", "carrot", "cave", "chant", "child",
"choice", "chris", "cipher", "clarion", "clark", "clever",
"cliff", "clone", "conan", "conduct", "congo", "content",
"costume", "cotton", "cover", "crack", "current", "danube",
"data", "decide", "desire", "detail", "dexter", "dinner",
"dispute", "donor", "druid", "drum", "easy", "eddie",
"enjoy", "enrico", "epoxy", "erosion", "except", "exile",
"explain", "fame", "fast", "father", "felix", "field",
"fiona", "fire", "fish", "flame", "flex", "flipper",
"float", "flood", "floor", "forbid", "forever", "fractal",
"frame", "freddie", "front", "fuel", "gallop", "game",
"garbo", "gate", "gibson", "ginger", "giraffe", "gizmo",
"glass", "goblin", "gopher", "grace", "gray", "gregory",
"grid", "griffin", "ground", "guest", "gustav", "gyro",
"hair", "halt", "harris", "heart", "heavy", "herman",
"hippie", "hobby", "honey", "hope", "horse", "hostel",
"hydro", "imitate", "info", "ingrid", "inside", "invent",
"invest", "invite", "iron", "ivan", "james", "jester",
"jimmy", "join", "joseph", "juice", "julius", "july",
"justin", "kansas", "karl", "kevin", "kiwi", "ladder",
"lake", "laura", "learn", "legacy", "legend", "lesson",
"life", "light", "list", "locate", "lopez", "lorenzo",
"love", "lunch", "malta", "mammal", "margo", "marion",
"mask", "match", "mayday", "meaning", "mercy", "middle",
"mike", "mirror", "modest", "morph", "morris", "nadia",
"nato", "navy", "needle", "neuron", "never", "newton",
"nice", "night", "nissan", "nitro", "nixon", "north",
"oberon", "octavia", "ohio", "olga", "open", "opus",
"orca", "oval", "owner", "page", "paint", "palma",
"parade", "parent", "parole", "paul", "peace", "pearl",
"perform", "phoenix", "phrase", "pierre", "pinball", "place",
"plate", "plato", "plume", "pogo", "point", "polite",
"polka", "poncho", "powder", "prague", "press", "presto",
"pretty", "prime", "promo", "quasi", "quest", "quick",
"quiz", "quota", "race", "rachel", "raja", "ranger",
"region", "remark", "rent", "reward", "rhino", "ribbon",
"rider", "road", "rodent", "round", "rubber", "ruby",
"rufus", "sabine", "saddle", "sailor", "saint", "salt",
"satire", "scale", "scuba", "season", "secure", "shake",
"shallow", "shannon", "shave", "shelf", "sherman", "shine",
"shirt", "side", "sinatra", "sincere", "size", "slalom",
"slow", "small", "snow", "sofia", "song", "sound",
"south", "speech", "spell", "spend", "spoon", "stage",
"stamp", "stand", "state", "stella", "stick", "sting",
"stock", "store", "sunday", "sunset", "support", "sweden",
"swing", "tape", "think", "thomas", "tictac", "time",
"toast", "tobacco", "tonight", "torch", "torso", "touch",
"toyota", "trade", "tribune", "trinity", "triton", "truck",
"trust", "type", "under", "unit", "urban", "urgent",
"user", "value", "vendor", "venice", "verona", "vibrate",
"virgo", "visible", "vista", "vital", "voice", "vortex",
"waiter", "watch", "wave", "weather", "wedding", "wheel",
"whiskey", "wisdom", "deal", "null", "nurse", "quebec",
"reserve", "reunion", "roof", "singer", "verbal", "amen",
"ego", "fax", "jet", "job", "rio", "ski",
"yes"
]
_WORD_INDECES = {
word: index for index, word in enumerate(_WORDLIST)
}
def index_to_word(index):
return _WORDLIST[index]
def word_to_index(word):
return _WORD_INDECES[word]
|
Cashiuus/python-mnemonicode
|
mnemonicode/_wordlist.py
|
Python
|
bsd-3-clause
| 21,475
|
[
"Amber",
"BLAST",
"CASINO",
"CRYSTAL",
"Galaxy",
"Jaguar",
"NEURON",
"ORCA",
"Octopus"
] |
7e0c3bd44a3d3f9da6da6ee9904f1d85c2dda5cb7d2590ed67e68de04e160fe1
|
#!/usr/bin/env python
# coding: utf-8
# Project Source: https://github.com/renanvicente/zabbix-web-scenario
# Version: 0.0.1
# Author: Renan Vicente
# Mail: renanvice@gmail.com
# Website: http://www.renanvicente.com
# Github: https://www.github.com/renanvicente
# Linkedin: http://www.linkedin.com/pub/renan-silva/6a/802/59b/en
from pyzabbix import ZabbixAPI
import sys
from re import compile,IGNORECASE
reload(sys)
sys.setdefaultencoding("utf-8")
"""
This is a script to add a web scenario and create a trigger.
"""
# The hostname at which the Zabbix web interface is available
def authentication(server_url,user,password):
if server_url and user and password:
ZABBIX_SERVER = server_url
zapi = ZabbixAPI(ZABBIX_SERVER)
try:
# Login to the Zabbix API
zapi.login(user,password)
return zapi
except Exception, e:
print(e)
sys.exit(1)
else:
print('Zabbix Server url , user and password are required, try use --help')
sys.exit(1)
def create_web_scenario(self,name,url,group,hostid,applicationid, url_name='Homepage',status='200'):
request = ZabbixAPI.do_request(self, 'webcheck.get', params={ "filter": {"name": name}})
if request['result']:
print('Host "%s" already registered' % name)
sys.exit(1)
else:
try:
ZabbixAPI.do_request(self, 'webcheck.create',params={"name": name,"hostid": hostid,"applicationid": applicationid, "delay": '60',"retries": '3', "steps": [ { 'name': url_name, 'url': url,'status_codes': status, 'no': '1'} ] } )
triggers = create_trigger(auth,name,url,group)
except Exception, e:
print(e)
sys.exit(1)
def create_by_file(auth, group, hostid, applicationid, filename):
try:
file_to_parse = open(filename,'r')
try:
for line in file_to_parse:
values = line.split(',')
try:
name = values[0]
url = values[1]
except IndexError, e:
print('Need at minimun 2 params Traceback %s:' % e)
sys.exit(1)
try:
url_name = values[2]
except IndexError:
url_name = None
if url_name:
create_web_scenario(auth,name,url,group,hostid,applicationid, url_name)
else:
create_web_scenario(auth,name,url,group,hostid, applicationid)
finally:
file_to_parse.close()
except IOError:
print('could not open the file %s' % filename)
def create_trigger(auth,name,url,group):
triggers = auth.trigger.create(description=name,comments="The website below does not response the HTTP request ( visit website member ) at least 120 seconds, this warning means that the website is down or unstable.\n%s" % url,expression='{%s:web.test.fail[%s].sum(120)}=1' % (group,name),priority=5)
return triggers
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-z","--zabbix",dest="server_url",help='URL for Zabbix Server',metavar='ZABBIX_SERVER')
parser.add_option('-n','--name',dest='name',help='Name of the Host',metavar='NAME')
parser.add_option('-w','--url-name',dest='url_name',help='URL name',metavar='URL_NAME')
parser.add_option('--url',dest='url',help='URL',metavar='URL')
parser.add_option('-s','--status',dest='status',help='Status Code',metavar='STATUS_CODE')
parser.add_option('-u','--user',dest='user',help='User for authentication',metavar='USER')
parser.add_option('-p','--password',dest='password',help='Password for authentication',metavar='PASSWORD')
parser.add_option('-f','--file',dest='filename',help='File with Name,URL',metavar='FILE')
parser.add_option('-g','--group-name',dest='group',help='Host Group Name',metavar='GROUP')
parser.add_option('-i','--host-id',dest='hostid',help='Host ID',metavar='HOSTID')
parser.add_option('-a','--application-id',dest='applicationid',help='Application ID',metavar='Application ID')
(options, args) = parser.parse_args()
auth = authentication(options.server_url,options.user,options.password)
if options.filename:
create_by_file(auth, options.group, options.hostid, options.applicationid, options.filename)
else:
if not options.group:
print('Group must be required')
sys.exit(1)
if options.status:
if options.url_name:
web_scenario = create_web_scenario(auth, options.name,options.url,options.group, options.hostid, options.applicationid, options.url_name,options.status)
else:
web_scenario = create_web_scenario(auth, options.name,None,options.url, options.group, options.hostid, options.applicationid, options.status)
else:
if options.url_name:
web_scenario = create_web_scenario(auth, options.name,options.url, options.group, options.hostid, options.applicationid, options.url_name)
else:
web_scenario = create_web_scenario(auth, options.name,options.url, options.group, options.hostid, options.applicationid)
|
renanvicente/zabbix-web-scenario
|
zabbix_web_scenario.py
|
Python
|
apache-2.0
| 4,945
|
[
"VisIt"
] |
f804802024f12329aab141cb85c041b5982394f3c3f9bbeb1b5640e5beb6cfa7
|
#!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1406885498.538069
__CHEETAH_genTimestamp__ = 'Fri Aug 1 18:31:38 2014'
__CHEETAH_src__ = '/home/wslee2/models/5-wo/force1plus/openpli3.0/build-force1plus/tmp/work/mips32el-oe-linux/enigma2-plugin-extensions-openwebif-1+git5+3c0c4fbdb28d7153bf2140459b553b3d5cdd4149-r0/git/plugin/controllers/views/web/remotecontrol.tmpl'
__CHEETAH_srcLastModified__ = 'Fri Aug 1 18:30:05 2014'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class remotecontrol(Template):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(remotecontrol, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def respond(self, trans=None):
## CHEETAH: main method generated for this template
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
_orig_filter_14692602 = _filter
filterName = u'WebSafe'
if self._CHEETAH__filters.has_key("WebSafe"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
write(u'''<?xml version="1.0" encoding="UTF-8"?>
<e2remotecontrol>
\t<e2result>''')
_v = VFFSL(SL,"result",True) # u'$result' on line 4, col 12
if _v is not None: write(_filter(_v, rawExpr=u'$result')) # from line 4, col 12.
write(u'''</e2result>
\t<e2resulttext>''')
_v = VFFSL(SL,"message",True) # u'$message' on line 5, col 16
if _v is not None: write(_filter(_v, rawExpr=u'$message')) # from line 5, col 16.
write(u'''</e2resulttext>
</e2remotecontrol>
''')
_filter = self._CHEETAH__currentFilter = _orig_filter_14692602
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_remotecontrol= 'respond'
## END CLASS DEFINITION
if not hasattr(remotecontrol, '_initCheetahAttributes'):
templateAPIClass = getattr(remotecontrol, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(remotecontrol)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=remotecontrol()).run()
|
MOA-2011/enigma2-plugin-extensions-openwebif
|
plugin/controllers/views/web/remotecontrol.py
|
Python
|
gpl-2.0
| 5,252
|
[
"VisIt"
] |
81fcf5524958bc2bd25c9f781bd5dc3eb15cdc07b9dc1b9092d95a052ea5d6f1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" functions that creates image from a video file
Primarily intended for plotting, returns correlation images ( local or max )
See Also:
------------
@author andrea giovannucci
"""
from builtins import range
import cv2
import logging
import numpy as np
from scipy.ndimage import convolve, generate_binary_structure
from scipy.sparse import coo_matrix
from typing import Any, List, Optional, Tuple
import caiman as cm
from caiman.source_extraction.cnmf.pre_processing import get_noise_fft
from caiman.source_extraction.cnmf.utilities import get_file_size
def max_correlation_image(Y, bin_size: int = 1000, eight_neighbours: bool = True, swap_dim: bool = True) -> np.ndarray:
"""Computes the max-correlation image for the input dataset Y with bin_size
Args:
Y: np.ndarray (3D or 4D)
Input movie data in 3D or 4D format
bin_size: scalar (integer)
Length of bin_size (if last bin is smaller than bin_size < 2 bin_size is increased to impose uniform bins)
eight_neighbours: Boolean
Use 8 neighbors if true, and 4 if false for 3D data (default = True)
Use 6 neighbors for 4D data, irrespectively
swap_dim: Boolean
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front
Returns:
Cn: d1 x d2 [x d3] matrix,
max correlation image
"""
if swap_dim:
Y = np.transpose(Y, tuple(np.hstack((Y.ndim - 1, list(range(Y.ndim))[:-1]))))
T = Y.shape[0]
if T <= bin_size:
Cn_bins = local_correlations_fft(Y, eight_neighbours=eight_neighbours, swap_dim=False)
return Cn_bins
else:
if T % bin_size < bin_size / 2.:
bin_size = T // (T // bin_size)
n_bins = T // bin_size
Cn_bins = np.zeros(((n_bins,) + Y.shape[1:]))
for i in range(n_bins):
Cn_bins[i] = local_correlations_fft(Y[i * bin_size:(i + 1) * bin_size],
eight_neighbours=eight_neighbours,
swap_dim=False)
logging.debug(i * bin_size)
Cn = np.max(Cn_bins, axis=0)
return Cn
#%%
def local_correlations_fft(Y,
eight_neighbours: bool = True,
swap_dim: bool = True,
opencv: bool = True,
rolling_window=None) -> np.ndarray:
"""Computes the correlation image for the input dataset Y using a faster FFT based method
Args:
Y: np.ndarray (3D or 4D)
Input movie data in 3D or 4D format
eight_neighbours: Boolean
Use 8 neighbors if true, and 4 if false for 3D data (default = True)
Use 6 neighbors for 4D data, irrespectively
swap_dim: Boolean
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front
opencv: Boolean
If True process using open cv method
rolling_window: (undocumented)
Returns:
Cn: d1 x d2 [x d3] matrix, cross-correlation with adjacent pixels
"""
if swap_dim:
Y = np.transpose(Y, tuple(np.hstack((Y.ndim - 1, list(range(Y.ndim))[:-1]))))
Y = Y.astype('float32')
if rolling_window is None:
Y -= np.mean(Y, axis=0)
Ystd = np.std(Y, axis=0)
Ystd[Ystd == 0] = np.inf
Y /= Ystd
else:
Ysum = np.cumsum(Y, axis=0)
Yrm = (Ysum[rolling_window:] - Ysum[:-rolling_window]) / rolling_window
Y[:rolling_window] -= Yrm[0]
Y[rolling_window:] -= Yrm
del Yrm, Ysum
Ystd = np.cumsum(Y**2, axis=0)
Yrst = np.sqrt((Ystd[rolling_window:] - Ystd[:-rolling_window]) / rolling_window)
Yrst[Yrst == 0] = np.inf
Y[:rolling_window] /= Yrst[0]
Y[rolling_window:] /= Yrst
del Ystd, Yrst
if Y.ndim == 4:
if eight_neighbours:
sz = np.ones((3, 3, 3), dtype='float32')
sz[1, 1, 1] = 0
else:
# yapf: disable
sz = np.array([[[0, 0, 0], [0, 1, 0], [0, 0, 0]],
[[0, 1, 0], [1, 0, 1], [0, 1, 0]],
[[0, 0, 0], [0, 1, 0], [0, 0, 0]]],
dtype='float32')
# yapf: enable
else:
if eight_neighbours:
sz = np.ones((3, 3), dtype='float32')
sz[1, 1] = 0
else:
sz = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]], dtype='float32')
if opencv and Y.ndim == 3:
Yconv = np.stack([cv2.filter2D(img, -1, sz, borderType=0) for img in Y])
MASK = cv2.filter2D(np.ones(Y.shape[1:], dtype='float32'), -1, sz, borderType=0)
else:
Yconv = convolve(Y, sz[np.newaxis, :], mode='constant')
MASK = convolve(np.ones(Y.shape[1:], dtype='float32'), sz, mode='constant')
YYconv = Yconv * Y
del Y, Yconv
if rolling_window is None:
Cn = np.mean(YYconv, axis=0) / MASK
else:
YYconv_cs = np.cumsum(YYconv, axis=0)
del YYconv
YYconv_rm = (YYconv_cs[rolling_window:] - YYconv_cs[:-rolling_window]) / rolling_window
del YYconv_cs
Cn = YYconv_rm / MASK
return Cn
def local_correlations_multicolor(Y, swap_dim: bool = True) -> np.ndarray:
"""Computes the correlation image with color depending on orientation
Args:
Y: np.ndarray (3D or 4D)
Input movie data in 3D or 4D format
swap_dim: Boolean
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front
Returns:
rho: d1 x d2 [x d3] matrix, cross-correlation with adjacent pixels
"""
if Y.ndim == 4:
raise Exception('Not Implemented')
if swap_dim:
Y = np.transpose(Y, tuple(np.hstack((Y.ndim - 1, list(range(Y.ndim))[:-1]))))
w_mov = (Y - np.mean(Y, axis=0)) / np.std(Y, axis=0)
rho_h = np.mean(np.multiply(w_mov[:, :-1, :], w_mov[:, 1:, :]), axis=0)
rho_w = np.mean(np.multiply(w_mov[:, :, :-1], w_mov[:, :, 1:]), axis=0)
rho_d1 = np.mean(np.multiply(w_mov[:, 1:, :-1], w_mov[:, :-1, 1:,]), axis=0)
rho_d2 = np.mean(np.multiply(w_mov[:, :-1, :-1], w_mov[:, 1:, 1:,]), axis=0)
return np.dstack([rho_h[:, 1:] / 2, rho_d1 / 2, rho_d2 / 2])
def local_correlations(Y, eight_neighbours: bool = True, swap_dim: bool = True, order_mean=1) -> np.ndarray:
"""Computes the correlation image for the input dataset Y
Args:
Y: np.ndarray (3D or 4D)
Input movie data in 3D or 4D format
eight_neighbours: Boolean
Use 8 neighbors if true, and 4 if false for 3D data (default = True)
Use 6 neighbors for 4D data, irrespectively
swap_dim: Boolean
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front
order_mean: (undocumented)
Returns:
rho: d1 x d2 [x d3] matrix, cross-correlation with adjacent pixels
"""
if swap_dim:
Y = np.transpose(Y, tuple(np.hstack((Y.ndim - 1, list(range(Y.ndim))[:-1]))))
rho = np.zeros(np.shape(Y)[1:])
w_mov = (Y - np.mean(Y, axis=0)) / np.std(Y, axis=0)
rho_h = np.mean(np.multiply(w_mov[:, :-1, :], w_mov[:, 1:, :]), axis=0)
rho_w = np.mean(np.multiply(w_mov[:, :, :-1], w_mov[:, :, 1:]), axis=0)
# yapf: disable
if order_mean == 0:
rho = np.ones(np.shape(Y)[1:])
rho_h = rho_h
rho_w = rho_w
rho[:-1, :] = rho[:-1, :] * rho_h
rho[1:, :] = rho[1:, :] * rho_h
rho[:, :-1] = rho[:, :-1] * rho_w
rho[:, 1:] = rho[:, 1:] * rho_w
else:
rho[:-1, :] = rho[:-1, :] + rho_h**(order_mean)
rho[1:, :] = rho[1:, :] + rho_h**(order_mean)
rho[:, :-1] = rho[:, :-1] + rho_w**(order_mean)
rho[:, 1:] = rho[:, 1:] + rho_w**(order_mean)
if Y.ndim == 4:
rho_d = np.mean(np.multiply(w_mov[:, :, :, :-1], w_mov[:, :, :, 1:]), axis=0)
rho[:, :, :-1] = rho[:, :, :-1] + rho_d
rho[:, :, 1:] = rho[:, :, 1:] + rho_d
neighbors = 6 * np.ones(np.shape(Y)[1:])
neighbors[0] = neighbors[0] - 1
neighbors[-1] = neighbors[-1] - 1
neighbors[:, 0] = neighbors[:, 0] - 1
neighbors[:, -1] = neighbors[:, -1] - 1
neighbors[:, :, 0] = neighbors[:, :, 0] - 1
neighbors[:, :, -1] = neighbors[:, :, -1] - 1
else:
if eight_neighbours:
rho_d1 = np.mean(np.multiply(w_mov[:, 1:, :-1], w_mov[:, :-1, 1:,]), axis=0)
rho_d2 = np.mean(np.multiply(w_mov[:, :-1, :-1], w_mov[:, 1:, 1:,]), axis=0)
if order_mean == 0:
rho_d1 = rho_d1
rho_d2 = rho_d2
rho[:-1, :-1] = rho[:-1, :-1] * rho_d2
rho[1:, 1:] = rho[1:, 1:] * rho_d1
rho[1:, :-1] = rho[1:, :-1] * rho_d1
rho[:-1, 1:] = rho[:-1, 1:] * rho_d2
else:
rho[:-1, :-1] = rho[:-1, :-1] + rho_d2**(order_mean)
rho[1:, 1:] = rho[1:, 1:] + rho_d1**(order_mean)
rho[1:, :-1] = rho[1:, :-1] + rho_d1**(order_mean)
rho[:-1, 1:] = rho[:-1, 1:] + rho_d2**(order_mean)
neighbors = 8 * np.ones(np.shape(Y)[1:3])
neighbors[0, :] = neighbors[0, :] - 3
neighbors[-1, :] = neighbors[-1, :] - 3
neighbors[:, 0] = neighbors[:, 0] - 3
neighbors[:, -1] = neighbors[:, -1] - 3
neighbors[0, 0] = neighbors[0, 0] + 1
neighbors[-1, -1] = neighbors[-1, -1] + 1
neighbors[-1, 0] = neighbors[-1, 0] + 1
neighbors[0, -1] = neighbors[0, -1] + 1
else:
neighbors = 4 * np.ones(np.shape(Y)[1:3])
neighbors[0, :] = neighbors[0, :] - 1
neighbors[-1, :] = neighbors[-1, :] - 1
neighbors[:, 0] = neighbors[:, 0] - 1
neighbors[:, -1] = neighbors[:, -1] - 1
# yapf: enable
if order_mean == 0:
rho = np.power(rho, 1. / neighbors)
else:
rho = np.power(np.divide(rho, neighbors), 1 / order_mean)
return rho
def correlation_pnr(Y, gSig=None, center_psf: bool = True, swap_dim: bool = True,
background_filter: str = 'disk') -> Tuple[np.ndarray, np.ndarray]:
"""
compute the correlation image and the peak-to-noise ratio (PNR) image.
If gSig is provided, then spatially filtered the video.
Args:
Y: np.ndarray (3D or 4D).
Input movie data in 3D or 4D format
gSig: scalar or vector.
gaussian width. If gSig == None, no spatial filtering
center_psf: Boolean
True indicates subtracting the mean of the filtering kernel
swap_dim: Boolean
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front
background_filter: str
(undocumented)
Returns:
cn: np.ndarray (2D or 3D).
local correlation image of the spatially filtered (or not)
data
pnr: np.ndarray (2D or 3D).
peak-to-noise ratios of all pixels/voxels
"""
if swap_dim:
Y = np.transpose(Y, tuple(np.hstack((Y.ndim - 1, list(range(Y.ndim))[:-1]))))
# parameters
_, d1, d2 = Y.shape
data_raw = Y.reshape(-1, d1, d2).astype('float32')
# filter data
data_filtered = data_raw.copy()
if gSig:
if not isinstance(gSig, list):
gSig = [gSig, gSig]
ksize = tuple([int(2 * i) * 2 + 1 for i in gSig])
if center_psf:
if background_filter == 'box':
for idx, img in enumerate(data_filtered):
data_filtered[idx, ] = cv2.GaussianBlur(
img, ksize=ksize, sigmaX=gSig[0], sigmaY=gSig[1], borderType=1) \
- cv2.boxFilter(img, ddepth=-1, ksize=ksize, borderType=1)
else:
psf = cv2.getGaussianKernel(ksize[0], gSig[0],
cv2.CV_32F).dot(cv2.getGaussianKernel(ksize[1], gSig[1], cv2.CV_32F).T)
ind_nonzero = psf >= psf[0].max()
psf -= psf[ind_nonzero].mean()
psf[~ind_nonzero] = 0
for idx, img in enumerate(data_filtered):
data_filtered[idx,] = cv2.filter2D(img, -1, psf, borderType=1)
# data_filtered[idx, ] = cv2.filter2D(img, -1, psf, borderType=1)
else:
for idx, img in enumerate(data_filtered):
data_filtered[idx,] = cv2.GaussianBlur(img, ksize=ksize, sigmaX=gSig[0], sigmaY=gSig[1], borderType=1)
# compute peak-to-noise ratio
data_filtered -= data_filtered.mean(axis=0)
data_max = np.max(data_filtered, axis=0)
data_std = get_noise_fft(data_filtered.T, noise_method='mean')[0].T
pnr = np.divide(data_max, data_std)
pnr[pnr < 0] = 0
# remove small values
tmp_data = data_filtered.copy() / data_std
tmp_data[tmp_data < 3] = 0
# compute correlation image
cn = local_correlations_fft(tmp_data, swap_dim=False)
return cn, pnr
def iter_chunk_array(arr: np.array, chunk_size: int):
if ((arr.shape[0] // chunk_size) - 1) > 0:
for i in range((arr.shape[0] // chunk_size) - 1):
yield arr[chunk_size * i:chunk_size * (i + 1)]
yield arr[chunk_size * (i + 1):]
else:
yield arr
def correlation_image_ecobost(mov, chunk_size: int = 1000, dview=None):
""" Compute correlation image as Erick. Removes the mean from each chunk
before computing the correlation
Args:
mov: ndarray or list of str
time x w x h
chunk_size: int
number of frames over which to compute the correlation (not working if
passing list of string)
"""
# MAP
if type(mov) is list:
if dview is not None:
res = dview.map(map_corr, mov)
else:
res = map(map_corr, mov)
else:
scan = mov.astype(np.float32)
num_frames = scan.shape[0]
res = map(map_corr, iter_chunk_array(scan, chunk_size))
sum_x, sum_sqx, sum_xy, num_frames = [np.sum(np.array(a), 0) for a in zip(*res)]
denom_factor = np.sqrt(num_frames * sum_sqx - sum_x**2)
corrs = np.zeros(sum_xy.shape)
for k in [0, 1, 2, 3]:
rotated_corrs = np.rot90(corrs, k=k)
rotated_sum_x = np.rot90(sum_x, k=k)
rotated_dfactor = np.rot90(denom_factor, k=k)
rotated_sum_xy = np.rot90(sum_xy, k=k)
# Compute correlation
rotated_corrs[1:, :, k] = (num_frames * rotated_sum_xy[1:, :, k] -
rotated_sum_x[1:] * rotated_sum_x[:-1]) /\
(rotated_dfactor[1:] * rotated_dfactor[:-1])
rotated_corrs[1:, 1:, 4 + k] = (num_frames * rotated_sum_xy[1:, 1:, 4 + k]
- rotated_sum_x[1:, 1:] * rotated_sum_x[:-1, : -1]) /\
(rotated_dfactor[1:, 1:] * rotated_dfactor[:-1, :-1])
# Return back to original orientation
corrs = np.rot90(rotated_corrs, k=4 - k)
sum_x = np.rot90(rotated_sum_x, k=4 - k)
denom_factor = np.rot90(rotated_dfactor, k=4 - k)
sum_xy = np.rot90(rotated_sum_xy, k=4 - k)
correlation_image = np.sum(corrs, axis=-1)
# edges
norm_factor = 5 * np.ones(correlation_image.shape)
# corners
norm_factor[[0, -1, 0, -1], [0, -1, -1, 0]] = 3
# center
norm_factor[1:-1, 1:-1] = 8
correlation_image /= norm_factor
return correlation_image
def map_corr(scan) -> Tuple[Any, Any, Any, int]:
'''This part of the code is in a mapping function that's run over different
movies in parallel
'''
# TODO: Tighten prototype above
if type(scan) is str:
scan = cm.load(scan)
# h x w x num_frames
chunk = np.array(scan).transpose([1, 2, 0])
# Subtract overall brightness per frame
chunk -= chunk.mean(axis=(0, 1))
# Compute sum_x and sum_x^2
chunk_sum = np.sum(chunk, axis=-1, dtype=float)
chunk_sqsum = np.sum(chunk**2, axis=-1, dtype=float)
# Compute sum_xy: Multiply each pixel by its eight neighbors
chunk_xysum = np.zeros((chunk.shape[0], chunk.shape[1], 8))
# amount of 90 degree rotations
for k in [0, 1, 2, 3]:
rotated_chunk = np.rot90(chunk, k=k)
rotated_xysum = np.rot90(chunk_xysum, k=k)
# Multiply each pixel by one above and by one above to the left
rotated_xysum[1:, :, k] = np.sum(rotated_chunk[1:] * rotated_chunk[:-1], axis=-1, dtype=float)
rotated_xysum[1:, 1:, 4 + k] = np.sum(rotated_chunk[1:, 1:] * rotated_chunk[:-1, :-1], axis=-1, dtype=float)
# Return back to original orientation
chunk = np.rot90(rotated_chunk, k=4 - k)
chunk_xysum = np.rot90(rotated_xysum, k=4 - k)
num_frames = chunk.shape[-1]
return chunk_sum, chunk_sqsum, chunk_xysum, num_frames
def prepare_local_correlations(Y, swap_dim: bool = False,
eight_neighbours: bool = False) -> Tuple[Any, Any, Any, Any, Any, Any, Any, Any]:
"""Computes the correlation image and some statistics to update it online
Args:
Y: np.ndarray (3D or 4D)
Input movie data in 3D or 4D format
swap_dim: Boolean
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front
eight_neighbours: Boolean
Use 8 neighbors if true, and 4 if false for 3D data
Use 18 neighbors if true, and 6 if false for 4D data
"""
# TODO: Tighten prototype above
if swap_dim:
Y = np.transpose(Y, (Y.ndim - 1,) + tuple(range(Y.ndim - 1)))
T = len(Y)
dims = Y.shape[1:]
Yr = Y.T.reshape(-1, T)
if Y.ndim == 4:
d1, d2, d3 = dims
sz = generate_binary_structure(3, 2 if eight_neighbours else 1)
sz[1, 1, 1] = 0
else:
d1, d2 = dims
if eight_neighbours:
sz = np.ones((3, 3), dtype='uint8')
sz[1, 1] = 0
else:
sz = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]], dtype='uint8')
idx = [i - 1 for i in np.nonzero(sz)]
def get_indices_of_neighbors(pixel):
pixel = np.unravel_index(pixel, dims, order='F')
x = pixel[0] + idx[0]
y = pixel[1] + idx[1]
if len(dims) == 3:
z = pixel[2] + idx[2]
inside = (x >= 0) * (x < d1) * (y >= 0) * (y < d2) * (z >= 0) * (z < d3)
return np.ravel_multi_index((x[inside], y[inside], z[inside]), dims, order='F')
else:
inside = (x >= 0) * (x < d1) * (y >= 0) * (y < d2)
return np.ravel_multi_index((x[inside], y[inside]), dims, order='F')
# more compact but slower code
# idx = np.asarray([i - 1 for i in np.nonzero(sz)])
# def get_indices_of_neighbors(pixel):
# pixel = np.asarray(np.unravel_index(pixel, dims, order='F'))
# xyz = pixel[:, None] + idx
# inside = np.all([(x >= 0) * (x < d) for (x, d) in zip(xyz, dims)], 0)
# return np.ravel_multi_index(xyz[:, inside], dims, order='F')
N = [get_indices_of_neighbors(p) for p in range(np.prod(dims))]
col_ind = np.concatenate(N)
row_ind = np.concatenate([[i] * len(k) for i, k in enumerate(N)])
num_neigbors = np.concatenate([[len(k)] * len(k) for k in N]).astype(Yr.dtype)
first_moment = Yr.mean(1)
second_moment = (Yr**2).mean(1)
crosscorr = np.mean(Yr[row_ind] * Yr[col_ind], 1)
# slower for small T, less memory intensive, but memory not an issue:
# crosscorr = np.array([Yr[r_].dot(Yr[c_])
# for (r_, c_) in zip(row_ind, col_ind)]) / Yr.shape[1]
sig = np.sqrt(second_moment - first_moment**2)
M = coo_matrix(
((crosscorr - first_moment[row_ind] * first_moment[col_ind]) / (sig[row_ind] * sig[col_ind]) / num_neigbors,
(row_ind, col_ind)),
dtype=Yr.dtype)
cn = M.dot(np.ones(M.shape[1], dtype=M.dtype)).reshape(dims, order='F')
return first_moment, second_moment, crosscorr, col_ind, row_ind, num_neigbors, M, cn
def update_local_correlations(t,
frames,
first_moment,
second_moment,
crosscorr,
col_ind,
row_ind,
num_neigbors,
M,
del_frames=None) -> np.ndarray:
"""Updates sufficient statistics in place and returns correlation image"""
dims = frames.shape[1:]
stride = len(frames)
if stride:
frames = frames.reshape((stride, -1), order='F')
if del_frames is None:
tmp = 1 - float(stride) / t
first_moment *= tmp
second_moment *= tmp
crosscorr *= tmp
else:
if stride > 10:
del_frames = del_frames.reshape((stride, -1), order='F')
first_moment -= del_frames.sum(0) / t
second_moment -= (del_frames**2).sum(0) / t
crosscorr -= np.sum(del_frames[:, row_ind] * del_frames[:, col_ind], 0) / t
else: # loop is faster
for f in del_frames:
f = f.ravel(order='F')
first_moment -= f / t
second_moment -= (f**2) / t
crosscorr -= (f[row_ind] * f[col_ind]) / t
if stride > 10:
frames = frames.reshape((stride, -1), order='F')
first_moment += frames.sum(0) / t
second_moment += (frames**2).sum(0) / t
crosscorr += np.sum(frames[:, row_ind] * frames[:, col_ind], 0) / t
else: # loop is faster
for f in frames:
f = f.ravel(order='F')
first_moment += f / t
second_moment += (f**2) / t
crosscorr += (f[row_ind] * f[col_ind]) / t
#=======
# del_frames = del_frames.reshape((stride, -1), order='F')
# first_moment -= del_frames.sum(0) / t
# second_moment -= (del_frames**2).sum(0) / t
# crosscorr -= np.sum(del_frames[:, row_ind] * del_frames[:, col_ind], 0) / t
# else: # loop is faster
# for f in del_frames:
# f = f.ravel(order='F')
# first_moment -= f / t
# second_moment -= (f**2) / t
# crosscorr -= (f[row_ind] * f[col_ind]) / t
# if stride > 10:
# frames = frames.reshape((stride, -1), order='F')
# first_moment += frames.sum(0) / t
# second_moment += (frames**2).sum(0) / t
# crosscorr += np.sum(frames[:, row_ind] * frames[:, col_ind], 0) / t
# else: # loop is faster
# for f in frames:
# f = f.ravel(order='F')
# first_moment += f / t
# second_moment += (f**2) / t
# crosscorr += (f[row_ind] * f[col_ind]) / t
#>>>>>>> dev
sig = np.sqrt(second_moment - first_moment**2)
M.data = ((crosscorr - first_moment[row_ind] * first_moment[col_ind]) / (sig[row_ind] * sig[col_ind]) /
num_neigbors)
cn = M.dot(np.ones(M.shape[1], dtype=M.dtype)).reshape(dims, order='F')
return cn
def local_correlations_movie(file_name,
tot_frames: Optional[int] = None,
fr: int = 30,
window: int = 30,
stride: int = 1,
swap_dim: bool = False,
eight_neighbours: bool = True,
mode: str = 'simple'):
"""
Compute an online correlation image as moving average
Args:
Y: string or np.ndarray (3D or 4D).
Input movie filename or data
tot_frames: int
Number of frames considered
fr: int
Frame rate
window: int
Window length in frames
stride: int
Stride length in frames
swap_dim: Boolean
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front
eight_neighbours: Boolean
Use 8 neighbors if true, and 4 if false for 3D data
Use 18 neighbors if true, and 6 if false for 4D data
mode: 'simple', 'exponential', or 'cumulative'
Mode of moving average
Returns:
corr_movie: cm.movie (3D or 4D).
local correlation movie
"""
Y = cm.load(file_name) if type(file_name) is str else file_name
Y = Y[..., :tot_frames] if swap_dim else Y[:tot_frames]
first_moment, second_moment, crosscorr, col_ind, row_ind, num_neigbors, M, cn = \
prepare_local_correlations(Y[..., :window] if swap_dim else Y[:window],
swap_dim=swap_dim, eight_neighbours=eight_neighbours)
if swap_dim:
Y = np.transpose(Y, (Y.ndim - 1,) + tuple(range(Y.ndim - 1)))
T = len(Y)
dims = Y.shape[1:]
corr_movie = np.zeros(((T - window) // stride + 1,) + dims, dtype=Y.dtype)
corr_movie[0] = cn
if mode == 'simple':
for tt in range((T - window) // stride):
corr_movie[tt + 1] = update_local_correlations(window, Y[tt * stride + window:(tt + 1) * stride + window],
first_moment, second_moment, crosscorr, col_ind, row_ind,
num_neigbors, M, cn, Y[tt * stride:(tt + 1) * stride]) # FIXME all params after M are invalid
elif mode == 'exponential':
for tt, frames in enumerate(Y[window:window + (T - window) // stride * stride].reshape((-1, stride) + dims)):
corr_movie[tt + 1] = update_local_correlations(window, frames, first_moment, second_moment, crosscorr,
col_ind, row_ind, num_neigbors, M)
elif mode == 'cumulative':
for tt, frames in enumerate(Y[window:window + (T - window) // stride * stride].reshape((-1, stride) + dims)):
corr_movie[tt + 1] = update_local_correlations(tt + window + 1, frames, first_moment, second_moment,
crosscorr, col_ind, row_ind, num_neigbors, M)
else:
raise Exception('mode of the moving average must be simple, exponential or cumulative')
return cm.movie(corr_movie, fr=fr)
def local_correlations_movie_offline(file_name,
Tot_frames=None,
fr: float = 10.,
window: int = 100,
stride: int = 100,
swap_dim: bool = False,
eight_neighbours: bool = True,
order_mean: int = 1,
ismulticolor: bool = False,
dview=None,
remove_baseline: bool = False,
winSize_baseline: int = 50,
quantil_min_baseline: float = 8,
gaussian_blur: bool=False):
"""
Efficient (parallel) computation of correlation image in shifting windows
with option for prior baseline removal
Args:
Y: str
path to movie file
Tot_frames: int
Number of total frames considered
fr: int (100)
Frame rate (optional)
window: int (100)
Window length in frames
stride: int (30)
Stride length in frames
swap_dim: bool (False)
True indicates that time is listed in the last axis of Y (matlab format)
and moves it in the front (default: False)
eight_neighbours: Boolean
Use 8 neighbors if true, and 4 if false for 3D data
Use 18 neighbors if true, and 6 if false for 4D data
dview: map object
Use it for parallel computation
remove_baseline: bool (False)
Flag for removing baseline prior to computation of CI
winSize_baseline: int (50)
Running window length for computing baseline
quantile_min_baseline: float (8)
Percentile used for baseline computations
gaussian_blur: bool (False)
Gaussian smooth the signal
Returns:
mm: cm.movie (3D or 4D).
local correlation movie
"""
if Tot_frames is None:
_, Tot_frames = get_file_size(file_name)
params: List = [[file_name, range(j, j + window), eight_neighbours, swap_dim,
order_mean, ismulticolor, remove_baseline, winSize_baseline,
quantil_min_baseline, gaussian_blur]
for j in range(0, Tot_frames - window, stride)]
params.append([file_name, range(Tot_frames - window, Tot_frames), eight_neighbours, swap_dim,
order_mean, ismulticolor, remove_baseline, winSize_baseline,
quantil_min_baseline, gaussian_blur])
if dview is None:
parallel_result = list(map(local_correlations_movie_parallel, params))
else:
if 'multiprocessing' in str(type(dview)):
parallel_result = dview.map_async(local_correlations_movie_parallel, params).get(4294967)
else:
parallel_result = dview.map_sync(local_correlations_movie_parallel, params)
dview.results.clear()
mm = cm.movie(np.concatenate(parallel_result, axis=0), fr=fr/len(parallel_result))
return mm
def local_correlations_movie_parallel(params: Tuple) -> np.ndarray:
mv_name, idx, eight_neighbours, swap_dim, order_mean, ismulticolor, remove_baseline, winSize_baseline, quantil_min_baseline, gaussian_blur = params
mv = cm.load(mv_name, subindices=idx, in_memory=True)
if gaussian_blur:
mv = mv.gaussian_blur_2D()
if remove_baseline:
mv.removeBL(quantilMin=quantil_min_baseline, windowSize=winSize_baseline, in_place=True)
if ismulticolor:
return local_correlations_multicolor(mv, swap_dim=swap_dim)[None, :, :].astype(np.float32)
else:
return local_correlations(mv, eight_neighbours=eight_neighbours, swap_dim=swap_dim,
order_mean=order_mean)[None, :, :].astype(np.float32)
def mean_image(file_name,
Tot_frames=None,
fr: float = 10.,
window: int = 100,
dview=None):
"""
Efficient (parallel) computation of mean image in chunks
Args:
Y: str
path to movie file
Tot_frames: int
Number of total frames considered
fr: int (100)
Frame rate (optional)
window: int (100)
Window length in frames
dview: map object
Use it for parallel computation
Returns:
mm: cm.movie (2D).
mean image
"""
if Tot_frames is None:
_, Tot_frames = get_file_size(file_name)
params: List = [[file_name, range(j * window, (j + 1) * window)]
for j in range(int(Tot_frames / window))]
remain_frames = Tot_frames - int(Tot_frames / window) * window
if remain_frames > 0:
params.append([file_name, range(int(Tot_frames / window) * window, Tot_frames)])
if dview is None:
parallel_result = list(map(mean_image_parallel, params))
else:
if 'multiprocessing' in str(type(dview)):
parallel_result = dview.map_async(mean_image_parallel, params).get(4294967)
else:
parallel_result = dview.map_sync(mean_image_parallel, params)
dview.results.clear()
mm = cm.movie(np.concatenate(parallel_result, axis=0), fr=fr/len(parallel_result))
if remain_frames > 0:
mean_image = (mm[:-1].sum(axis=0) + (remain_frames / window) * mm[-1]) / (len(mm) - 1 + remain_frames / window)
else:
mean_image = mm.mean(axis=0)
return mean_image
def mean_image_parallel(params: Tuple) -> np.ndarray:
mv_name, idx = params
mv = cm.load(mv_name, subindices=idx, in_memory=True)
return mv.mean(axis=0)[np.newaxis,:,:]
|
agiovann/Constrained_NMF
|
caiman/summary_images.py
|
Python
|
gpl-2.0
| 33,013
|
[
"Gaussian"
] |
ed0fcdd58264293ba0029da034a8041ffdc19fd001e33f8947c26837695c9f1d
|
#!/usr/bin/python
#
# THIS FILE IS PART OF THE JOKOSHER PROJECT AND LICENSED UNDER THE GPL. SEE
# THE 'COPYING' FILE FOR DETAILS
#
# Jokosher's main class. It creates the majority of the main window GUI
# and gets everything up and running.
#
#-------------------------------------------------------------------------------
import pygtk
pygtk.require("2.0")
import gtk.glade, gobject
import sys
import os.path
import pygst
pygst.require("0.10")
import gst
from subprocess import Popen, PIPE
import gettext
_ = gettext.gettext
import AddInstrumentDialog, TimeView, Workspace
import PreferencesDialog, ExtensionManagerDialog, RecordingView, NewProjectDialog
import ProjectManager, Globals, WelcomeDialog
import InstrumentConnectionsDialog
import EffectPresets, Extension, ExtensionManager
import Utils, AudioPreview, MixdownProfileDialog, MixdownActions
import PlatformUtils
import ui.StatusBar as StatusBar
#=========================================================================
class MainApp:
"""
Jokosher's main class. It creates the majority of the main window GUI and
gets everything up and running.
"""
# Class Constants
""" Constant value used to indicate Jokosher's recording mode """
MODE_RECORDING = 1
""" Constant value used to indicate Jokosher's mixing mode """
MODE_COMPACT_MIX = 2
#_____________________________________________________________________
def __init__(self, openproject=None, loadExtensions=True, startuptype=None):
"""
Creates a new instance of MainApp.
Parameters:
openproject -- filename of the project to open at startup.
loadExtensions -- whether the extensions should be loaded.
startuptype -- determines the startup state of Jokosher:
0 = Open the project referred by the openproject parameter.
1 = Do not display the welcome dialog or open a the previous project.
2 = Display the welcome dialog.
"""
# create tooltip messages for buttons
self.recTipEnabled = _("Stop recording")
self.recTipDisabled = _("Arm an instrument, then click here to begin recording")
self.recStopTipEnabled = _("Stop recording")
self.recStopTipDisabled = _("Stop playback")
self.mixingViewEnabledTip = _("Hide the audio level mixers")
self.mixingViewDisabledTip = _("Show the audio level mixers")
gtk.glade.bindtextdomain(Globals.LOCALE_APP, Globals.LOCALE_PATH)
gtk.glade.textdomain(Globals.LOCALE_APP)
self.wTree = gtk.glade.XML(Globals.GLADE_PATH, "MainWindow")
#Connect event handlers
signals = {
"on_MainWindow_destroy" : self.OnDestroy,
"on_MainWindow_configure_event" : self.OnResize,
"on_AddInstrument_clicked" : self.OnShowAddInstrumentDialog,
"on_About_activate" : self.About,
"on_Record_toggled" : self.Record,
"on_Play_clicked" : self.Play,
"on_Stop_clicked" : self.Stop,
"on_CompactMix_toggled" : self.OnCompactMixView,
"on_export_activate" : self.OnExport,
"on_preferences_activate" : self.OnPreferences,
"on_open_activate" : self.OnOpenProject,
"on_save_activate" : self.OnSaveProject,
"on_save_as_activate" : self.OnSaveAsProject,
"on_new_activate" : self.OnNewProject,
"on_close_activate" : self.OnCloseProject,
"on_show_as_bars_beats_ticks_toggled" : self.OnShowBarsBeats,
"on_show_as_hours_minutes_seconds_toggled" : self.OnShowHoursMins,
"on_undo_activate" : self.OnUndo,
"on_redo_activate" : self.OnRedo,
"on_cut_activate" : self.OnCut,
"on_copy_activate" : self.OnCopy,
"on_paste_activate" : self.OnPaste,
"on_delete_activate" : self.OnDelete,
"on_MouseDown" : self.OnMouseDown,
"on_instrumentconnections_activate" : self.OnInstrumentConnectionsDialog,
"on_filemenu_activate" : self.OnFileMenu,
"on_editmenu_activate" : self.OnEditMenu,
"on_help_contents_activate" : self.OnHelpContentsMenu,
"on_forums_activate" : self.OnForumsMenu,
"on_contributing_activate" : self.OnContributingDialog,
"on_ExtensionManager_activate" : self.OnExtensionManagerDialog,
"on_instrumentmenu_activate" : self.OnInstrumentMenu,
"on_instrMenu_add_audio" : self.OnAddAudio,
"on_change_instr_type_activate" : self.OnChangeInstrument,
"on_remove_instr_activate" : self.OnRemoveInstrument,
"on_report_bug_activate" : self.OnReportBug,
"on_project_add_audio" : self.OnAddAudioFile,
"on_system_information_activate" : self.OnSystemInformation,
"on_properties_activate" : self.OnProjectProperties,
}
self.wTree.signal_autoconnect(signals)
# grab some references to bits of the GUI
self.window = self.wTree.get_widget("MainWindow")
self.play = self.wTree.get_widget("Play")
self.stop = self.wTree.get_widget("Stop")
self.record = self.wTree.get_widget("Record")
self.save = self.wTree.get_widget("save")
self.save_as = self.wTree.get_widget("save_as")
self.close = self.wTree.get_widget("close")
self.reverse = self.wTree.get_widget("Rewind")
self.forward = self.wTree.get_widget("Forward")
self.addInstrumentButton = self.wTree.get_widget("AddInstrument")
self.compactMixButton = self.wTree.get_widget("CompactMix")
self.editmenu = self.wTree.get_widget("editmenu")
self.filemenu = self.wTree.get_widget("filemenu")
self.undo = self.wTree.get_widget("undo")
self.redo = self.wTree.get_widget("redo")
self.cut = self.wTree.get_widget("cut")
self.copy = self.wTree.get_widget("copy")
self.paste = self.wTree.get_widget("paste")
self.delete = self.wTree.get_widget("delete")
self.instrumentMenu = self.wTree.get_widget("instrumentmenu")
self.export = self.wTree.get_widget("export")
self.recentprojects = self.wTree.get_widget("recentprojects")
self.recentprojectsmenu = self.wTree.get_widget("recentprojects_menu")
self.menubar = self.wTree.get_widget("menubar")
self.toolbar = self.wTree.get_widget("MainToolbar")
self.addAudioMenuItem = self.wTree.get_widget("add_audio_file_instrument_menu")
self.changeInstrMenuItem = self.wTree.get_widget("change_instrument_type")
self.removeInstrMenuItem = self.wTree.get_widget("remove_selected_instrument")
self.addAudioFileButton = self.wTree.get_widget("addAudioFileButton")
self.addAudioFileMenuItem = self.wTree.get_widget("add_audio_file_project_menu")
self.addInstrumentFileMenuItem = self.wTree.get_widget("add_instrument1")
self.recordingInputsFileMenuItem = self.wTree.get_widget("instrument_connections1")
self.timeFormatFileMenuItem = self.wTree.get_widget("time_format1")
self.properties_menu_item = self.wTree.get_widget("project_properties")
self.recentprojectitems = []
self.lastopenedproject = None
self.project = None
self.headerhbox = None
self.timeview = None
self.tvtoolitem = None #wrapper for putting timeview in toolbar
self.workspace = None
self.instrNameEntry = None #the gtk.Entry when editing an instrument name
self.main_vbox = self.wTree.get_widget("main_vbox")
self.statusbar = StatusBar.StatusBar()
self.main_vbox.pack_end(self.statusbar, False)
# Initialise some useful vars
self.mode = None
self.settingButtons = True
self.compactMixButton.set_active(False)
self.settingButtons = False
self.isRecording = False
self.isPlaying = False
self.isPaused = False
# Intialise context sensitive tooltips for workspace buttons
self.compactMixButton.set_tooltip_text(self.mixingViewDisabledTip)
# set the window size to the last saved value
x = int(Globals.settings.general["windowwidth"])
y = int(Globals.settings.general["windowheight"])
self.window.resize(x, y)
# set sensitivity
self.SetGUIProjectLoaded()
# Connect up the forward and reverse handlers. We can't use the autoconnect as we need child items
innerbtn = self.reverse.get_children()[0]
innerbtn.connect("pressed", self.OnRewindPressed)
innerbtn.connect("released", self.OnRewindReleased)
innerbtn = self.forward.get_children()[0]
innerbtn.connect("pressed", self.OnForwardPressed)
innerbtn.connect("released", self.OnForwardReleased)
miximg = gtk.Image()
miximg.set_from_file(os.path.join(Globals.IMAGE_PATH, "icon_mix.png"))
self.compactMixButton.set_icon_widget(miximg)
miximg.show()
#get the audiofile image from Globals
self.audioFilePixbuf = Globals.getCachedInstrumentPixbuf("audiofile")
audioimg = gtk.Image()
size = gtk.icon_size_lookup(gtk.ICON_SIZE_MENU)
pixbuf = self.audioFilePixbuf.scale_simple(size[0], size[1], gtk.gdk.INTERP_BILINEAR)
audioimg.set_from_pixbuf(pixbuf)
# set the add audio menu item icon
self.addAudioMenuItem.set_image(audioimg)
size = gtk.icon_size_lookup(self.toolbar.get_icon_size())
pixbuf = self.audioFilePixbuf.scale_simple(size[0], size[1], gtk.gdk.INTERP_BILINEAR)
audioimg = gtk.Image()
audioimg.set_from_pixbuf(pixbuf)
self.addAudioFileButton.set_icon_widget(audioimg)
audioimg.show()
# populate the Recent Projects menu
self.OpenRecentProjects()
self.PopulateRecentProjects()
# set window icon
icon_theme = gtk.icon_theme_get_default()
try:
pixbuf = icon_theme.load_icon("jokosher", 48, 0)
self.window.set_icon(pixbuf)
except gobject.GError, exc:
self.window.set_icon_from_file(os.path.join(Globals.IMAGE_PATH, "jokosher.png"))
# make icon available to others
self.window.realize()
self.icon = self.window.get_icon()
# Make sure we can import for the instruments folder
sys.path.append("Instruments")
self.window.add_events(gtk.gdk.KEY_PRESS_MASK)
self.window.connect_after("key-press-event", self.OnKeyPress)
self.window.connect("button_press_event", self.OnMouseDown)
self.CheckGstreamerVersions()
# set up presets registry - this should probably be removed here
EffectPresets.EffectPresets()
Globals.PopulateEncoders()
Globals.PopulateAudioBackends()
# seems like this is the best place to instantiate RegisterMixdownActionAPI
# as extensions and the mixdown profile dialog can use it through mainapp
self.registerMixdownActionAPI = MixdownActions.RegisterMixdownActionAPI()
# register the default MixdownActions
self.registerMixdownActionAPI.RegisterMixdownActions((MixdownActions.RunAScript, MixdownActions.ExportAsFileType))
if loadExtensions:
# Load extensions -- this should probably go somewhere more appropriate
self.extensionManager = ExtensionManager.ExtensionManager(self)
## Setup is complete so start up the GUI and perhaps load a project
## any new setup code needs to go above here
# Show the main window
self.window.show_all()
# command line options override preferences so check for them first,
# then preferences, then default to the welcome dialog
if startuptype == 2: # welcomedialog cmdline switch
WelcomeDialog.WelcomeDialog(self)
return
elif startuptype == 1: # no-project cmdline switch
return
elif openproject: # a project name on the cmdline
self.OpenProjectFromPath(openproject)
elif Globals.settings.general["startupaction"] == PreferencesDialog.STARTUP_LAST_PROJECT:
if self.lastopenedproject:
self.OpenProjectFromPath(self.lastopenedproject[0])
elif Globals.settings.general["startupaction"] == PreferencesDialog.STARTUP_NOTHING:
return
#if everything else bombs out resort to the welcome dialog
if self.project == None:
WelcomeDialog.WelcomeDialog(self)
#_____________________________________________________________________
def OnCompactMixView(self, button=None):
"""
Updates the main window after switching to the compact view mixing mode.
Parameters:
button -- Button object calling this method.
"""
if self.workspace:
self.workspace.ToggleCompactMix()
#_____________________________________________________________________
def OnF3Pressed(self):
"""
Toggle to compact mix view button when F3 is pressed.
"""
self.compactMixButton.set_active(not self.compactMixButton.get_active())
#_____________________________________________________________________
def OnResize(self, widget, event):
"""
Called when the main window gets resized.
Parameters:
widget -- GTK callback parameter.
event -- GTK callback parameter.
Returns:
False -- continue GTK signal propagation.
"""
(self.width, self.height) = widget.get_size()
return False
#_____________________________________________________________________
def OnDestroy(self, widget=None, event=None):
"""
Called when the main window is destroyed.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
event -- reserved for GTK callbacks, don't use it explicitly.
Returns:
True -- the current project can't be properly closed.
This stops signal propagation.
"""
# save the current window size
Globals.settings.general["windowwidth"] = self.width
Globals.settings.general["windowheight"] = self.height
Globals.settings.write()
if self.CloseProject() == 0:
gtk.main_quit()
else:
return True #stop signal propogation
#_____________________________________________________________________
def OnShowAddInstrumentDialog(self, widget=None):
"""
Creates and shows the "Add Instrument" dialog box.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
AddInstrumentDialog.AddInstrumentDialog(self.project, self)
#_____________________________________________________________________
def OnChangeInstrument(self, widget=None):
"""
Changes the type of the selected Instrument.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
# Change the type of a select instrument
for instr in self.project.instruments:
if (instr.isSelected):
AddInstrumentDialog.AddInstrumentDialog(self.project, self, instr)
return
#_____________________________________________________________________
def About(self, widget=None):
"""
Creates and shows the "About" dialog box.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
gtk.about_dialog_set_url_hook(self.AboutLinkActivate)
aboutTree = gtk.glade.XML(Globals.GLADE_PATH, "AboutDialog")
dlg = aboutTree.get_widget("AboutDialog")
dlg.set_transient_for(self.window)
dlg.set_icon(self.icon)
dlg.run()
dlg.destroy()
#_____________________________________________________________________
def AboutLinkActivate(self, widget, link):
"""
Opens the Jokosher website in the user's default web browser.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
Utils.OpenExternalURL(url=link, message=_("<big>Couldn't launch the jokosher website automatically.</big>\n\nPlease visit %s to access it."), parent=self.window)
#_____________________________________________________________________
def OnReportBug(self, widget):
"""
Opens the report bug launchpad website in the user's default web browser.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
Utils.OpenExternalURL(url="https://bugs.launchpad.net/jokosher/+filebug/", message=_("<big>Couldn't launch the launchpad website automatically.</big>\n\nPlease visit %s to access it."), parent=self.window)
#_____________________________________________________________________
def Record(self, widget=None):
"""
Toggles recording. If there's an error, a warning/error message is
issued to the user.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
# toggling the record button invokes this function so we use the settingButtons var to
# indicate that we're just changing the GUI state and dont need to do anything code-wise
if self.settingButtons:
return
if self.isRecording:
self.project.Stop()
return
canRecord = False
for i in self.project.instruments:
if i.isArmed:
canRecord = True
#Check to see if any instruments are trying to use the same input channel
usedChannels = []
armed_instrs = [x for x in self.project.instruments if x.isArmed]
for instrA in armed_instrs:
for instrB in armed_instrs:
if instrA is not instrB and instrA.input == instrB.input and instrA.inTrack == instrB.inTrack:
string = _("The instruments '%(name1)s' and '%(name2)s' both have the same input selected. Please either disarm one, or connect it to a different input through 'Project -> Recording Inputs'")
message = string % {"name1" : instrA.name, "name2" : instrB.name}
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO,
gtk.BUTTONS_CLOSE,
message)
dlg.connect('response', lambda dlg, response: dlg.destroy())
dlg.run()
self.settingButtons = True
widget.set_active(False)
self.settingButtons = False
return
if not canRecord:
Globals.debug("can not record")
if self.project.instruments:
errmsg = "No instruments are armed for recording. You need to arm an instrument before you can begin recording."
else:
errmsg = "No instruments have been added. You must add an instrument before recording"
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO,
gtk.BUTTONS_CLOSE,
_(errmsg))
dlg.connect('response', lambda dlg, response: dlg.destroy())
dlg.run()
self.settingButtons = True
widget.set_active(False)
self.settingButtons = False
else:
Globals.debug("can record")
self.project.Record()
#_____________________________________________________________________
def TogglePlayIcon(self):
"""
Changes the play button icon/tooltip from play to pause and viceversa.
"""
if not self.isPlaying:
self.play.set_stock_id(gtk.STOCK_MEDIA_PLAY)
else:
self.play.set_stock_id(gtk.STOCK_MEDIA_PAUSE)
# TODO: change the tooltips in 1.0
#self.contextTooltips.set_tip(play, tooltip)
#_____________________________________________________________________
def Play(self, widget=None):
"""
Toggles playback.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.settingButtons == True:
return
self.TogglePlayIcon()
if not self.isPlaying:
self.project.Play()
else:
self.project.Pause()
#_____________________________________________________________________
#The stop button is really just an alias for toggling play/record to off
def Stop(self, widget=None):
"""
Stops the current record/playback (whichever is happening) operation.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.project.Stop()
#_____________________________________________________________________
def OnRewindPressed(self, widget=None):
"""
Starts moving backward within the project's timeline.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.project.transport.Reverse(True)
#_____________________________________________________________________
def OnRewindReleased(self, widget=None):
"""
Stops the current rewind operation.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.project.transport.Reverse(False)
#_____________________________________________________________________
def OnForwardPressed(self, widget=None):
"""
Starts moving forward within the project's timeline.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.project.transport.Forward(True)
#_____________________________________________________________________
def OnForwardReleased(self, widget=None):
"""
Stops the current forward operation.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.project.transport.Forward(False)
#_____________________________________________________________________
def OnExport(self, widget=None, profile=None):
"""
Displays the Mixdown Profiles dialog, which allows the user to
(simply) export the project as ogg or mp3 (replacing the old
export dialog), or create a mixdown profile that does a set of
complicated things.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
MixdownProfileDialog.MixdownProfileDialog(self, self.project, profile)
#_____________________________________________________________________
def OnExport_old(self, widget=None):
"""
Creates and shows a save file dialog which allows the user to export
the project as ogg or mp3.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
buttons = (gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK)
chooser = gtk.FileChooserDialog(_("Mixdown Project"), self.window, gtk.FILE_CHOOSER_ACTION_SAVE, buttons)
if os.path.exists(Globals.settings.general["projectfolder"]):
chooser.set_current_folder(Globals.settings.general["projectfolder"])
else:
chooser.set_current_folder(os.path.expanduser("~"))
chooser.set_do_overwrite_confirmation(True)
chooser.set_default_response(gtk.RESPONSE_OK)
chooser.set_current_name(self.project.name)
saveLabel = gtk.Label(_("Save as file type:"))
typeCombo = gtk.combo_box_new_text()
for format in Globals.EXPORT_FORMATS:
typeCombo.append_text("%s (.%s)" % (format["description"], format["extension"]))
#Make the first item the default
typeCombo.set_active(0)
extraHBox = gtk.HBox()
extraHBox.pack_start(saveLabel, False)
extraHBox.pack_end(typeCombo, False)
extraHBox.show_all()
chooser.set_extra_widget(extraHBox)
response = chooser.run()
if response == gtk.RESPONSE_OK:
exportFilename = chooser.get_filename()
Globals.settings.general["projectfolder"] = os.path.dirname(exportFilename)
Globals.settings.write()
#If they haven't already appended the extension for the
#chosen file type, add it to the end of the file.
filetypeDict = Globals.EXPORT_FORMATS[typeCombo.get_active()]
if not exportFilename.lower().endswith(filetypeDict["extension"]):
exportFilename += "." + filetypeDict["extension"]
chooser.destroy()
self.project.Export(exportFilename, filetypeDict["pipeline"])
else:
chooser.destroy()
#_____________________________________________________________________
def UpdateExportDialog(self):
"""
Updates the progress bar corresponding to the current export operation.
"""
progress = self.project.GetExportProgress()
if progress[0] == -1 or progress[1] == 0:
self.exportprogress.set_fraction(0.0)
self.exportprogress.set_text(_("Preparing to mixdown project"))
elif progress[0] == progress[1] == 100:
self.exportdlg.destroy()
return False
else:
self.exportprogress.set_fraction(progress[0]/progress[1])
self.exportprogress.set_text(_("%(progress)d%% of %(total)d seconds completed") % {"progress":(progress[0]/progress[1]*100), "total":progress[1] } )
return True
#_____________________________________________________________________
def OnExportCancel(self, widget=None):
"""
Cancels a running export operation and destroys the export progress dialog.
Parameters:
widget: reserved for GTK callbacks, don't use it explicitly.
"""
self.exportdlg.destroy()
self.project.TerminateExport()
#_____________________________________________________________________
def OnPreferences(self, widget, destroyCallback=None):
"""
Creates and shows the "Jokosher Preferences" dialog.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
destroyCallback -- function that'll get called when the preferences
dialog gets destroyed.
"""
prefsdlg = PreferencesDialog.PreferencesDialog(self.project, self, self.icon)
if destroyCallback:
prefsdlg.dlg.connect("destroy", destroyCallback)
#_____________________________________________________________________
def OnShowBarsBeats(self, widget):
"""
Sets and updates the current timeline view to Bars, Beats and Ticks.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.settingButtons:
return
if widget.get_active() and self.project and self.project.transport:
self.project.SetTransportMode(self.project.transport.MODE_BARS_BEATS)
#_____________________________________________________________________
def OnShowHoursMins(self, widget):
"""
Sets and updates the current timeline view to Hours, Minutes and Seconds.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.settingButtons:
return
if widget.get_active() and self.project and self.project.transport:
self.project.SetTransportMode(self.project.transport.MODE_HOURS_MINS_SECS)
#_____________________________________________________________________
def OnOpenProject(self, widget, destroyCallback=None):
"""
Creates and shows a open file dialog which allows the user to open
an existing Jokosher project.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
destroyCallback -- function that'll get called when the open file
dialog gets destroyed.
"""
chooser = gtk.FileChooserDialog((_('Choose a Jokosher project file')), None, gtk.FILE_CHOOSER_ACTION_OPEN, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK))
if os.path.exists(Globals.settings.general["projectfolder"]):
chooser.set_current_folder(Globals.settings.general["projectfolder"])
else:
chooser.set_current_folder(os.path.expanduser("~"))
chooser.set_default_response(gtk.RESPONSE_OK)
chooser.set_transient_for(self.window)
allfilter = gtk.FileFilter()
allfilter.set_name(_("All Files"))
allfilter.add_pattern("*")
jokfilter = gtk.FileFilter()
jokfilter.set_name(_("Jokosher Project File (*.jokosher)"))
jokfilter.add_pattern("*.jokosher")
chooser.add_filter(jokfilter)
chooser.add_filter(allfilter)
if destroyCallback:
chooser.connect("destroy", destroyCallback)
while True:
response = chooser.run()
if response == gtk.RESPONSE_OK:
filename = chooser.get_filename()
Globals.settings.general["projectfolder"] = os.path.dirname(filename)
Globals.settings.write()
if self.OpenProjectFromPath(filename,chooser):
break
elif response == gtk.RESPONSE_CANCEL or response == gtk.RESPONSE_DELETE_EVENT:
break
chooser.destroy()
#_____________________________________________________________________
def OnSaveProject(self, widget=None):
"""
Saves the current project file.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.project:
self.project.SelectInstrument(None)
self.project.ClearEventSelections()
self.project.SaveProjectFile()
#_____________________________________________________________________
def OnSaveAsProject(self, widget=None):
"""
Creates and shows a save as file dialog which allows the user to save
the current project to an specific file name.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
buttons = (gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK)
chooser = gtk.FileChooserDialog(_("Choose a location to save the project"), self.window,
gtk.FILE_CHOOSER_ACTION_SAVE, buttons)
chooser.set_do_overwrite_confirmation(False)
chooser.set_current_name(self.project.name)
chooser.set_default_response(gtk.RESPONSE_OK)
if os.path.exists(Globals.settings.general["projectfolder"]):
chooser.set_current_folder(Globals.settings.general["projectfolder"])
else:
chooser.set_current_folder(os.path.expanduser("~"))
response = chooser.run()
if response == gtk.RESPONSE_OK:
# InitProjectLocation expects a URI
folder = PlatformUtils.pathname2url(chooser.get_current_folder())
# Save the selected folder as the default folder
Globals.settings.general["projectfolder"] = folder
Globals.settings.write()
name = os.path.basename(chooser.get_filename())
old_audio_path = self.project.audio_path
old_levels_path = self.project.levels_path
try:
ProjectManager.InitProjectLocation(folder, name, self.project)
except ProjectManager.CreateProjectError, e:
chooser.hide()
if e.errno == 2:
message = _("A file or folder with this name already exists. Please choose a different project name and try again.")
elif e.errno == 3:
message = _("The file or folder location is write-protected.")
elif e.errno == 5:
message = _("The URI scheme given is either invalid or not supported")
# show the error dialog with the relavent error message
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_OK,
_("Unable to create project.\n\n%s") % message)
dlg.run()
dlg.destroy()
else:
self.project.SelectInstrument()
self.project.ClearEventSelections()
self.project.SaveProjectFile(self.project.projectfile)
Globals.CopyAllFiles(old_audio_path, self.project.audio_path, self.project.GetLocalAudioFilenames())
Globals.CopyAllFiles(old_levels_path, self.project.levels_path, self.project.GetLevelsFilenames())
chooser.destroy()
#_____________________________________________________________________
def OnNewProject(self, widget, destroyCallback=None):
"""
Creates and shows the "New Project" dialog.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
destroyCallback -- function that'll get called when the new project
dialog gets destroyed.
"""
newdlg = NewProjectDialog.NewProjectDialog(self)
if destroyCallback:
newdlg.dlg.connect("destroy", destroyCallback)
#_____________________________________________________________________
def OnCloseProject(self, widget):
"""
Closes the current project by calling CloseProject().
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.CloseProject() == 0:
self.SetGUIProjectLoaded()
#_____________________________________________________________________
def CloseProject(self):
"""
Closes the current project. If there's changes pending, it'll ask the user for confirmation.
Returns:
the status of the close operation:
0 = there was no project open or it was closed succesfully.
1 = cancel the operation and return to the normal program flow.
"""
if not self.project:
return 0
self.Stop()
if self.project.CheckUnsavedChanges():
message = _("<span size='large' weight='bold'>Save changes to project \"%s\" before closing?</span>\n\nYour changes will be lost if you don't save them.") % self.project.name
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL |
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_NONE)
dlg.set_markup(message)
dlg.add_button(_("Close _Without Saving"), gtk.RESPONSE_NO)
dlg.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
defaultAction = dlg.add_button(gtk.STOCK_SAVE, gtk.RESPONSE_YES)
#make save the default action when enter is pressed
dlg.set_default(defaultAction)
dlg.set_transient_for(self.window)
response = dlg.run()
dlg.destroy()
if response == gtk.RESPONSE_YES:
self.OnSaveProject()
elif response == gtk.RESPONSE_NO:
pass
elif response == gtk.RESPONSE_CANCEL or response == gtk.RESPONSE_DELETE_EVENT:
return 1
self.project.CloseProject()
self.project = None
self.mode = None
return 0
#_____________________________________________________________________
def OnUndo(self, widget):
"""
Undoes the last change made to the project and updates the displays.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.project.Undo()
#_____________________________________________________________________
def OnRedo(self, widget):
"""
Redoes the last undo operation and updates the displays.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.project.Redo()
#_____________________________________________________________________
def OnProjectAudioState(self, project):
"""
Callback for when the project starts playing or recording, or when it is
paused or stopped.
Parameters:
project -- The project instance that send the signal.
"""
self.isPlaying = (self.project.audioState == self.project.AUDIO_PLAYING)
self.isPaused = (self.project.audioState == self.project.AUDIO_PAUSED)
self.isRecording = (self.project.audioState == self.project.AUDIO_RECORDING)
self.stop.set_sensitive(True) #stop should always be clickable
self.record.set_sensitive(not self.isPlaying)
controls = (self.play, self.reverse, self.forward, self.editmenu, self.instrumentMenu,
self.workspace.recordingView.timelinebar.headerhbox,
self.addInstrumentButton, self.addAudioFileButton)
for widget in controls:
widget.set_sensitive(not self.isRecording)
self.settingButtons = True
self.record.set_active(self.isRecording)
self.TogglePlayIcon()
self.settingButtons = False
# update the tooltips depending on the current recording state
if self.isRecording:
self.record.set_tooltip_text(self.recTipEnabled)
self.stop.set_tooltip_text(self.recStopTipEnabled)
else:
self.record.set_tooltip_text(self.recTipDisabled)
self.stop.set_tooltip_text(self.recStopTipDisabled)
self.workspace.mixView.StartUpdateTimeout()
#_____________________________________________________________________
def OnProjectExportStart(self, project):
"""
Callback for when the project starts exporting audio to a file.
Parameters:
project -- The project instance that send the signal.
"""
export = gtk.glade.XML (Globals.GLADE_PATH, "ProgressDialog")
export.signal_connect("on_cancel_clicked", self.OnExportCancel)
self.exportdlg = export.get_widget("ProgressDialog")
self.exportdlg.set_icon(self.icon)
self.exportdlg.set_transient_for(self.window)
label = export.get_widget("progressLabel")
label.set_text(_("Mixing project to file: %s") % self.project.exportFilename)
self.exportprogress = export.get_widget("progressBar")
gobject.timeout_add(100, self.UpdateExportDialog)
#_____________________________________________________________________
def OnProjectExportStop(self, project):
"""
Callback for when the project has finished exporting audio to a file.
Parameters:
project -- The project instance that send the signal.
"""
if self.exportdlg:
self.exportdlg.destroy()
#_____________________________________________________________________
def OnProjectUndo(self, project=None):
"""
Callback for when the project's undo or redo stacks change.
Parameters:
project -- The project instance that send the signal.
"""
self.undo.set_sensitive(self.project.CanPerformUndo())
self.redo.set_sensitive(self.project.CanPerformRedo())
if self.project.CheckUnsavedChanges():
self.window.set_title(_('*%s - Jokosher') % self.project.name)
else:
self.window.set_title(_('%s - Jokosher') % self.project.name)
#_____________________________________________________________________
def OnTransportMode(self, transportManager=None, mode=None):
"""
Callback for signal when the transport mode changes.
Parameters:
transportManager -- the TransportManager instance that send the signal.
mode -- the mode type that the transport changed to.
"""
if self.settingButtons:
return
self.settingButtons = True
modeBars = self.wTree.get_widget("show_as_bars_beats_ticks")
modeHours = self.wTree.get_widget("show_as_hours_minutes_seconds")
transport = self.project.transport
modeBars.set_active(transport.mode == transport.MODE_BARS_BEATS)
modeHours.set_active(transport.mode == transport.MODE_HOURS_MINS_SECS)
self.settingButtons = False
#_____________________________________________________________________
def InsertRecentProject(self, path, name):
"""
Inserts a new project with its corresponding path to the recent project list.
Parameters:
path -- path to the project file.
name -- name of the project being added.
"""
for item in self.recentprojectitems:
if path == item[0]:
self.recentprojectitems.remove(item)
break
self.recentprojectitems.insert(0, (path, name))
self.SaveRecentProjects()
self.PopulateRecentProjects()
#_____________________________________________________________________
def OnClearRecentProjects(self, widget):
"""
Clears the recent projects list. It then updates the user interface to reflect
the changes.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.recentprojectitems = []
self.SaveRecentProjects()
self.PopulateRecentProjects()
#_____________________________________________________________________
def PopulateRecentProjects(self):
"""
Populates the Recent Projects menu with items from self.recentprojectitems.
"""
menuitems = self.recentprojectsmenu.get_children()
for c in menuitems:
self.recentprojectsmenu.remove(c)
if self.recentprojectitems:
for item in self.recentprojectitems:
mitem = gtk.MenuItem(item[1])
mitem.set_tooltip_text(item[0])
self.recentprojectsmenu.append(mitem)
mitem.connect("activate", self.OnRecentProjectsItem, item[0], item[1])
mitem = gtk.SeparatorMenuItem()
self.recentprojectsmenu.append(mitem)
mitem = gtk.ImageMenuItem(gtk.STOCK_CLEAR)
mitem.set_tooltip_text(_("Clear the list of recent projects"))
self.recentprojectsmenu.append(mitem)
mitem.connect("activate", self.OnClearRecentProjects)
self.recentprojects.set_sensitive(True)
self.recentprojectsmenu.show_all()
else:
#there are no items, so just make it insensitive
self.recentprojects.set_sensitive(False)
#_____________________________________________________________________
def OpenRecentProjects(self):
"""
Populate the self.recentprojectpaths with items from global settings.
"""
self.recentprojectitems = []
if Globals.settings.general.has_key("recentprojects"):
filestring = Globals.settings.general["recentprojects"]
filestring = filestring.split(",")
recentprojectitems = []
for i in filestring:
if len(i.split("|")) == 2:
recentprojectitems.append(i.split("|"))
for path, name in recentprojectitems:
#TODO - see ticket 80; should it check if the project is valid?
if not os.path.exists(path):
Globals.debug("Error: Couldn't open recent project", path)
else:
self.recentprojectitems.append((path, name))
#the first project is our last opened project
if recentprojectitems and os.path.exists(recentprojectitems[0][0]):
self.lastopenedproject = recentprojectitems[0]
self.SaveRecentProjects()
#_____________________________________________________________________
def OnRecentProjectsItem(self, widget, path, name):
"""
Opens the project selected from the "Recent Projects" drop-down menu.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
path -- path to the project file.
name -- name of the project being opened.
"""
return self.OpenProjectFromPath(path)
#_____________________________________________________________________
def SaveRecentProjects(self):
"""
Saves the list of the last 8 recent projects to the Jokosher config file.
"""
string = ""
# Cut list to 8 items
self.recentprojectitems = self.recentprojectitems[:8]
for path, name in self.recentprojectitems:
string = string + str(path) + "|" + str(name) + ","
string = string[:-1]
Globals.settings.general['recentprojects'] = string
Globals.settings.write()
#______________________________________________________________________
def OnCut(self, widget=None, cut=True):
"""
Cuts the portion of selected audio and puts it in the clipboard.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
cut -- determines whether the operation should perform a cut or copy operation:
True = perform a cut operation.
False = perform a copy operation.
"""
if self.isPlaying or self.isPaused:
return
if self.instrNameEntry:
#if an instrument name is currently being edited
if cut:
self.instrNameEntry.cut_clipboard()
else:
self.instrNameEntry.copy_clipboard()
return
#Wipe the clipboard clean
self.project.clipboardList = []
for instr in self.project.instruments:
for event in instr.events:
if event.isSelected:
#Add to the clipboard
self.project.clipboardList.append(event)
if cut:
#if we are cutting (as opposed to copying)
event.Delete()
#______________________________________________________________________
def OnCopy(self, widget=None):
"""
Copies the portion of selected audio to the clipboard.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.OnCut(widget, False)
#______________________________________________________________________
def OnPaste(self, widget=None):
"""
Pastes the portion of audio in the clipboard to the selected instrument,
at the selected position in time.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.isPlaying or self.isPaused:
return
if self.instrNameEntry:
#if an instrument name is currently being edited
self.instrNameEntry.paste_clipboard()
return
for instr in self.project.instruments:
if instr.isSelected:
for event in self.project.clipboardList:
instr.addEventFromEvent(0, event)
break
#______________________________________________________________________
def OnDelete(self, widget=None):
"""
Deletes the currently selected instruments or events.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.project.GetIsRecording() or self.isPlaying or self.isPaused:
return
# list to store instruments to delete, so we don't modify the list while we are iterating
instrOrEventList = []
eventList = []
# Delete any select instruments
for instr in self.project.instruments:
if (instr.isSelected):
#set not selected so when we undo we don't get two selected instruments
instr.isSelected = False
instrOrEventList.append(instr)
else:
# Delete any selected events
for ev in instr.events:
if ev.isSelected:
instrOrEventList.append(ev)
if instrOrEventList:
self.project.DeleteInstrumentsOrEvents(instrOrEventList)
#______________________________________________________________________
def OnMouseDown(self, widget, mouse):
"""
If there's a project open, clears event and instrument selections.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
mouse -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.project:
self.project.ClearEventSelections()
self.project.SelectInstrument(None)
#______________________________________________________________________
def SetGUIProjectLoaded(self):
"""
Refreshes the main window and it's components when a project is opened or closed.
For example, buttons are enabled/disabled whether there's a project currently open or not.
"""
children = self.main_vbox.get_children()
if self.workspace in children:
self.main_vbox.remove(self.workspace)
if self.headerhbox in children:
self.main_vbox.remove(self.headerhbox)
if self.tvtoolitem in self.toolbar.get_children():
self.toolbar.remove(self.tvtoolitem)
ctrls = (self.save, self.save_as, self.close, self.addInstrumentButton, self.addAudioFileButton,
self.reverse, self.forward, self.play, self.stop, self.record,
self.instrumentMenu, self.export, self.cut, self.copy, self.paste,
self.undo, self.redo, self.delete, self.compactMixButton, self.properties_menu_item,
self.addAudioFileMenuItem, self.addInstrumentFileMenuItem, self.recordingInputsFileMenuItem,
self.timeFormatFileMenuItem)
if self.project:
# make various buttons and menu items enabled now we have a project option
for c in ctrls:
c.set_sensitive(True)
#set undo/redo if there is saved undo history
self.OnProjectUndo()
# Create our custom widgets
self.timeview = TimeView.TimeView(self.project)
self.workspace = Workspace.Workspace(self.project, self)
# Add them to the main window
self.main_vbox.pack_start(self.workspace, True, True)
self.tvtoolitem = gtk.ToolItem()
self.tvtoolitem.add(self.timeview)
self.toolbar.insert(self.tvtoolitem, -1)
self.tvtoolitem.show_all()
#reset toggle buttons
self.settingButtons = True
self.compactMixButton.set_active(False)
self.settingButtons = False
else:
#reset toggle buttons when the project is unloaded
self.settingButtons = True
self.compactMixButton.set_active(False)
self.settingButtons = False
for c in ctrls:
c.set_sensitive(False)
# Set window title with no project name
self.window.set_title(_('Jokosher'))
# Destroy our custom widgets
if self.workspace:
self.workspace.destroy()
self.workspace = None
if self.tvtoolitem:
self.tvtoolitem.destroy()
self.tvtoolitem = None
#_____________________________________________________________________
def OnKeyPress(self, widget, event):
"""
Handles the hotkeys, calling whichever function they are assigned to.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
event -- reserved for GTK callbacks, don't use it explicitly.
"""
key = gtk.gdk.keyval_name(event.keyval)
if 'GDK_CONTROL_MASK' in event.state.value_names:
keysdict = {
"x" : self.OnCut, # Ctrl-X
"c" : self.OnCopy, # Ctrl-C
"v" : self.OnPaste, # Ctrl-V
}
else:
keysdict = {
"F1" : self.OnHelpContentsMenu, # F1 - Help Contents
"F3" : self.OnF3Pressed, # F3 - Compact Mix View
"Delete" : self.OnDelete, # delete key - remove selected item
"BackSpace" : self.OnDelete, # backspace key
"space" : self.Play,
"p" : self.Play,
"r" : self.Record
}
if key in keysdict:
keysdict[key]()
#very important; return True if we successfully handled the key press
#so that someone else doesn't handle it afterwards as well.
return True
else:
return False
#_____________________________________________________________________
def OnInstrumentConnectionsDialog(self, widget):
"""
Creates and shows the "Instrument Connections Dialog".
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
InstrumentConnectionsDialog.InstrumentConnectionsDialog(self.project, self)
#_____________________________________________________________________
def OnFileMenu(self, widget):
"""
When the file menu opens, check if there are any events and set the mixdown project menu item's
sensitivity accordingly and also the 'mixdown as' sensitivity.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.PopulateMixdownAsMenu()
if self.isRecording:
self.export.set_sensitive(False)
self.addInstrumentFileMenuItem.set_sensitive(False)
self.addAudioFileMenuItem.set_sensitive(False)
self.recordingInputsFileMenuItem.set_sensitive(False)
if self.mixdown_as_header:
self.mixdown_as_header.set_sensitive(False)
return
eventList = False
if self.project:
self.addInstrumentFileMenuItem.set_sensitive(True)
self.addAudioFileMenuItem.set_sensitive(True)
self.recordingInputsFileMenuItem.set_sensitive(True)
for instr in self.project.instruments:
if instr.events:
eventList = True
break
self.export.set_sensitive(eventList)
if self.mixdown_as_header:
self.mixdown_as_header.set_sensitive(eventList)
#_____________________________________________________________________
def OnEditMenu(self, widget):
"""
HACK: When the edit menu opens, checks if any events or instruments are selected
and sets the cut, copy, paste and delete menu items sensitivity accordingly.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
instrSelected = False
eventSelected = False
if self.project:
for instr in self.project.instruments:
if instrSelected and eventSelected:
break
if instr.isSelected:
instrSelected = True
else:
for ev in instr.events:
if ev.isSelected:
eventSelected = True
self.cut.set_sensitive(eventSelected)
self.copy.set_sensitive(eventSelected)
self.paste.set_sensitive(instrSelected and bool(self.project.clipboardList))
self.delete.set_sensitive(instrSelected or eventSelected)
#_____________________________________________________________________
def OnInstrumentMenu(self, widget):
"""
HACK: When the instrument menu opens, set sensitivity depending on
whether there's a selected instrument or not.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
instrCount = 0
if self.project:
for instr in self.project.instruments:
if instr.isSelected:
instrCount += 1
self.addAudioMenuItem.set_sensitive(instrCount == 1)
self.changeInstrMenuItem.set_sensitive(instrCount == 1)
self.removeInstrMenuItem.set_sensitive(instrCount > 0)
#_____________________________________________________________________
def OpenProjectFromPath(self,path, parent=None):
"""
Opens the project file referred by the path parameter.
Parameters:
path -- path to the project to be opened.
parent -- parent window of the error message dialog.
Returns:
the status of the loading operation:
True = the project could be successfully opened and
set as the current project.
False = loading the project failed. A dialog will be
displayed to user detailing the error.
"""
try:
uri = PlatformUtils.pathname2url(path)
self.SetProject(ProjectManager.LoadProjectFile(uri))
return True
except ProjectManager.OpenProjectError, e:
self.ShowOpenProjectErrorDialog(e,parent)
return False
#_____________________________________________________________________
def SetProject(self, project):
"""
Tries to establish the Project parameter as the current project.
If there are errors, an error message is issued to the user.
Parameters:
project -- the Project object to set as the main project.
"""
try:
ProjectManager.ValidateProject(project)
except ProjectManager.InvalidProjectError, e:
message=""
if e.files:
message+=_("The project references non-existant files:\n")
for f in e.files:
message += f + "\n"
if e.images:
message+=_("\nThe project references non-existant images:\n")
for f in e.images:
message += f + "\n"
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_OK,
_("%s\n Invalid or corrupt project file, will not open.")%message)
dlg.run()
dlg.destroy()
return
if self.project:
if self.CloseProject() != 0:
return
self.project = project
self.project.connect("audio-state::play", self.OnProjectAudioState)
self.project.connect("audio-state::pause", self.OnProjectAudioState)
self.project.connect("audio-state::record", self.OnProjectAudioState)
self.project.connect("audio-state::stop", self.OnProjectAudioState)
self.project.connect("audio-state::export-start", self.OnProjectExportStart)
self.project.connect("audio-state::export-stop", self.OnProjectExportStop)
self.project.connect("undo", self.OnProjectUndo)
self.project.transport.connect("transport-mode", self.OnTransportMode)
self.OnTransportMode()
self.InsertRecentProject(project.projectfile, project.name)
self.project.PrepareClick()
# make various buttons and menu items enabled now we have a project
self.SetGUIProjectLoaded()
#_____________________________________________________________________
def CheckGstreamerVersions(self):
"""
Check for CVS versions of Gstreamer and gnonlin. If requirements are not met,
a warning message is issued to the user.
"""
#Check for CVS versions of Gstreamer and gnonlin
message = ""
gstVersion = gst.version()
if ((gstVersion[1] <= 10 and gstVersion[2] < 9) or gstVersion[1] < 10):
message += _("You must have Gstreamer version 0.10.9 or higher.\n")
gnl = gst.registry_get_default().find_plugin("gnonlin")
if gnl:
ignored, gnlMajor, gnlMinor = gnl.get_version().split(".", 2)
#Compare gnlMajor and gnlMinor as a float so later versions of gnonlin will work
gnlMajor = float(gnlMajor)
gnlMinor = float(gnlMinor)
if gnlMajor < 10 or gnlMinor < 4.2:
message += _("You must have Gstreamer plugin gnonlin version 0.10.4.2 or later.\n")
elif not gnl:
message += _("Gstreamer plugin gnonlin is not installed.") + \
_("\nSee http://doc.jokosher.org/Installation for more details.\n")
if not gst.registry_get_default().find_plugin("level"):
message += _("You must have the Gstreamer plugin packs gst-plugins-base and gst-plugins-good installed.\n")
if message:
dlg = gtk.MessageDialog(self.window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_CLOSE)
dlg.set_markup(_("<big>Some functionality will not work correctly or at all.</big>\n\n%s") % message)
dlg.run()
dlg.destroy()
#_____________________________________________________________________
def SetStatusBar(self, message):
"""
Appends the message parameter to the status bar text.
Parameters:
message -- string to append to the status bar text.
"""
return self.statusbar.Push(message)
#_____________________________________________________________________
def ClearStatusBar(self, messageID):
"""
Clears the status bar text in the position pointed by messageID.
Parameters:
messageID -- the message identifier of the text to be cleared.
"""
self.statusbar.Remove(messageID)
#_____________________________________________________________________
def OnHelpContentsMenu(self, widget=None):
"""
Calls the appropiate help tool with the user manual in the correct
locale.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if gtk.pygtk_version[0] == 2 and gtk.pygtk_version[1] < 14:
helpfile = "http://doc.jokosher.org"
elif Globals.USE_LOCAL_HELP:
helpfile = "ghelp:" + Globals.HELP_PATH
else:
helpfile = "ghelp:jokosher"
Utils.OpenExternalURL(url=helpfile, message=_("<big>Couldn't launch the Jokosher documentation site.</big>\n\nPlease visit %s to access it."), parent=self.window)
#_____________________________________________________________________
def OnForumsMenu(self, widget):
"""
Opens the Jokosher forum in the user's default web browser.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
Utils.OpenExternalURL(url="http://www.jokosher.org/forums", message=_("<big>Couldn't launch the forums website automatically.</big>\n\nPlease visit %s to access them."), parent=self.window)
#_____________________________________________________________________
def OnContributingDialog(self, widget):
"""
Creates and shows the "Contributing to Jokosher" dialog.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
self.contribTree = gtk.glade.XML(Globals.GLADE_PATH, "ContributingDialog")
# grab references to the ContributingDialog window and vbox
self.contribdialog = self.contribTree.get_widget("ContributingDialog")
self.contribvbox = self.contribTree.get_widget("vbox14")
self.contribdialog.set_icon(self.icon)
# centre the ContributingDialog window on MainWindow
self.contribdialog.set_transient_for(self.window)
# set the contributing image
self.topimage = self.contribTree.get_widget("topimage")
self.topimage.set_from_file(os.path.join(Globals.IMAGE_PATH, "jokosher-logo.png"))
# create the bottom vbox containing the contributing website link
vbox = gtk.VBox()
label = gtk.Label()
label.set_markup(_("<b>To find out more, visit:</b>"))
vbox.pack_start(label, False, False)
if gtk.pygtk_version >= (2, 10, 0) and gtk.gtk_version >= (2, 10, 0):
contriblnkbtn = gtk.LinkButton("http://www.jokosher.org/contribute", label="http://www.jokosher.org/contribute")
contriblnkbtn.connect("clicked", self.OnContributingLinkButtonClicked)
vbox.pack_start(contriblnkbtn, False, False)
else:
vbox.pack_start(gtk.Label("http://www.jokosher.org/contribute"), False, False)
self.contribvbox.pack_start(vbox, False, False)
self.contribdialog.show_all()
#_____________________________________________________________________
def OnContributingLinkButtonClicked(self, widget):
"""
Opens the Jokosher contributing website in the user's default web browser.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
Utils.OpenExternalURL(url="http://www.jokosher.org/contribute",
message=_("<big>Couldn't launch the contributing website automatically.</big>\n\nPlease visit %s to access it."), parent=self.window)
#_____________________________________________________________________
def GetDistroVersion(self):
"""
Obtain a string with the distribution name and version.
Returns:
A string with the distribution name and version.
"""
versionStr = ""
try:
#distro name
output = Popen(args=["lsb_release", "-i"], stdout=PIPE).stdout.read()
versionStr += output[output.find("\t")+1:len(output)-1]
#distro version
output = Popen(args=["lsb_release", "-r"], stdout=PIPE).stdout.read()
versionStr += " " + output[output.find("\t")+1:len(output)-1]
except OSError:
versionStr = None
return versionStr
#_____________________________________________________________________
def OnSystemInformation(self, widget):
"""
Displays a small window with the system information.
Parameters:
widget -- Gtk callback parameter.
"""
self.sysInfoTree = gtk.glade.XML(Globals.GLADE_PATH, "SystemInformationDialog")
# grab references to the SystemInformationDialog window and vbox
self.sysInfoDialog = self.sysInfoTree.get_widget("SystemInformationDialog")
self.gstVersionStr = self.sysInfoTree.get_widget("labelGStreamerVersion")
self.gnonlinVersionStr = self.sysInfoTree.get_widget("labelGnonlinVersion")
self.distroVersionStr = self.sysInfoTree.get_widget("labelDistributionVersion")
sysInfoCloseButton = self.sysInfoTree.get_widget("closeButton")
#connect the close button
sysInfoCloseButton.connect("clicked", lambda dialog: self.sysInfoDialog.destroy())
#set the version strings to the appropriate value
gstVersion = "%s.%s.%s.%s" % gst.version()
self.gstVersionStr.set_text(gstVersion)
gnlVersion = gst.registry_get_default().find_plugin("gnonlin")
if gnlVersion:
ignored, gnlMajor, gnlMinor = gnlVersion.get_version().split(".", 2)
message = "%s.%s" % (gnlMajor, gnlMinor)
elif not gnlVersion:
message += _("Gnonlin is missing!")
self.gnonlinVersionStr.set_text(message)
distroVersion = self.GetDistroVersion()
if distroVersion is not None:
self.distroVersionStr.set_text(distroVersion)
else:
self.distroVersionStr.set_text(_("Unknown"))
#_____________________________________________________________________
def ShowOpenProjectErrorDialog(self, error, parent=None):
"""
Creates and shows a dialog to inform the user about an error that has ocurred.
Parameters:
error -- string with the error(s) description.
parent -- parent window of the error message dialog.
"""
if not parent:
parent = self.window
if error.errno == 1:
message = _("The URI scheme '%s' is either invalid or not supported.") % error.info
elif error.errno == 2:
message = _("Unable to unzip the project file %s") % error.info
elif error.errno == 3:
message = _("The project file was created with version \"%s\" of Jokosher.\n") % error.info + \
_("Projects from version \"%s\" are incompatible with this release.\n") % error.info
elif error.errno == 4:
message = _("The project:\n%s\n\ndoes not exist.\n") % error.info
elif error.errno == 5:
first = _("The project file could not be opened.\n")
second = _("It is recommended that you report this to the Jokosher developers or get help at http://www.jokosher.org/forums/")
message = "%s\n%s\n\n%s" % (first, second, error.info)
else:
message = _("The project file could not be opened.\n")
dlg = gtk.MessageDialog(parent,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_OK,
message)
dlg.set_icon(self.icon)
dlg.run()
dlg.destroy()
#_____________________________________________________________________
def OnExtensionManagerDialog(self, widget):
"""
Creates and shows the "Extension Manager" dialog.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
ExtensionManagerDialog.ExtensionManagerDialog(self)
#_____________________________________________________________________
def OnAddAudio(self, widget):
"""
Adds an audio file to the selected Instrument.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
instrID = None
for instr in self.project.instruments:
if instr.isSelected:
instrID = instr.id
break
if instrID != None:
for id, instrViewer in self.workspace.recordingView.views:
if instrID == id:
instrViewer.eventLane.CreateEventFromFile()
#_____________________________________________________________________
def OnRemoveInstrument(self, widget):
"""
Removes all selected Instruments from the Project.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
# list to store instruments to delete, so we don't modify the list while we are iterating
instrList = []
for instr in self.project.instruments:
if instr.isSelected:
#set not selected so when we undo we don't get two selected instruments
instr.isSelected = False
instrList.append(instr)
if instrList:
self.project.DeleteInstrumentsOrEvents(instrList)
#_____________________________________________________________________
def ShowImportFileChooser(self):
"""
Creates a file chooser dialog and gets the filename to be imported,
as well as if the file should be copied to the project folder or not.
Returns:
A 2-tuple containing the a list of file paths to be imported and a boolean
that will be true if the user requested the file to be copied to the project folder.
Both entries in the tuple will be None is the dialog was cancelled.
"""
buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)
copyfile = gtk.CheckButton(_("Copy file to project"))
# Make it copy files to audio dir by default
copyfile.set_active(True)
copyfile.show()
dlg = gtk.FileChooserDialog(_("Add Audio File..."), action=gtk.FILE_CHOOSER_ACTION_OPEN, buttons=buttons)
dlg.set_default_response(gtk.RESPONSE_OK)
dlg.set_icon(self.icon)
if os.path.exists(Globals.settings.general["projectfolder"]):
dlg.set_current_folder(Globals.settings.general["projectfolder"])
else:
dlg.set_current_folder(os.path.expanduser("~"))
dlg.set_extra_widget(copyfile)
dlg.set_select_multiple(True)
vbox = gtk.VBox()
audiopreview = AudioPreview.AudioPreview()
vbox.pack_start(audiopreview, True, False)
vbox.show_all()
dlg.set_preview_widget(vbox)
dlg.set_use_preview_label(False)
dlg.connect("selection-changed", audiopreview.OnSelection)
response = dlg.run()
if response == gtk.RESPONSE_OK:
#stop the preview audio from playing without destorying the dialog
audiopreview.OnDestroy()
dlg.hide()
Globals.settings.general["projectfolder"] = os.path.dirname(dlg.get_filename())
Globals.settings.write()
filenames = dlg.get_filenames()
copyfileBool = copyfile.get_active()
dlg.destroy()
return (filenames, copyfileBool)
dlg.destroy()
return (None, None)
#_____________________________________________________________________
def OnAddAudioFile(self, widget=None):
"""
Called when the "Add Audio File Instrument" in the project menu is clicked.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
filenames, copyfile = self.ShowImportFileChooser()
#check if None in case the user click cancel on the dialog.
if filenames:
self.project.AddInstrumentAndEvents(filenames, copyfile)
#_____________________________________________________________________
def PopulateMixdownAsMenu(self):
"""
If there are any saved mixdown profiles, create a Mixdown As submenu in
the file menu and add links to them.
"""
self.mixdown_as_header = None
savefolder = os.path.join(Globals.JOKOSHER_DATA_HOME, 'mixdownprofiles') # created by Globals
profiles = os.listdir(savefolder)
if not profiles: return
# remove any old mixdown profiles
for item in profiles:
if not item.endswith(".profile"):
path = os.path.join(Globals.MIXDOWN_PROFILES_PATH, item)
os.remove(path)
break
profiles = os.listdir(savefolder)
filemenulist = self.filemenu.get_submenu()
# If there's already a Mixdown As submenu, delete it and recreate it
for i in filemenulist.get_children():
if i.get_children():
if i.get_children()[0].get_label() == _("Mix_down As"):
filemenulist.remove(i)
i.destroy()
# Create a Mixdown As submenu header
self.mixdown_as_header = gtk.MenuItem(label=_("Mix_down As"))
submenu = gtk.Menu()
for p in profiles:
profilenames = p.split(".")[0]
menuitem = gtk.MenuItem(label=profilenames)
menuitem.connect("activate", self.OnExport, profilenames)
submenu.append(menuitem)
self.mixdown_as_header.set_submenu(submenu)
# insert it after Mixdown Project
counter = 0
insert_position = None
for i in filemenulist.get_children():
if i.get_children():
if i.get_children()[0].get_label() == _("_Mixdown Project..."):
insert_position = counter
counter += 1
if insert_position:
self.filemenu.get_submenu().insert(self.mixdown_as_header,insert_position + 1)
self.filemenu.show_all()
#_____________________________________________________________________
def OnProjectProperties(self, widget=None):
"""
Called when the "Properties..." in the project menu is clicked.
Parameters:
widget -- reserved for GTK callbacks, don't use it explicitly.
"""
if not self.project:
return
propertiesTree = gtk.glade.XML(Globals.GLADE_PATH, "ProjectPropertiesDialog")
dlg = propertiesTree.get_widget("ProjectPropertiesDialog")
nameEntry = propertiesTree.get_widget("nameEntry")
authorEntry = propertiesTree.get_widget("authorEntry")
notesTextView = propertiesTree.get_widget("notesTextView")
nameEntry.set_text(self.project.name)
authorEntry.set_text(self.project.author)
buffer = gtk.TextBuffer()
buffer.set_text(self.project.notes)
notesTextView.set_buffer(buffer)
dlg.connect("response", self.OnProjectPropertiesClose, nameEntry, authorEntry, notesTextView)
dlg.show_all()
#_____________________________________________________________________
def OnProjectPropertiesClose(self, dialog, response, nameEntry, authorEntry, notesTextView):
"""
Called when the "Project Properties" windows is closed.
Parameters:
dialog -- reserved for GTK callbacks, don't use it explicitly.
"""
if self.project and response == gtk.RESPONSE_CLOSE:
name = nameEntry.get_text()
author = authorEntry.get_text()
buffer = notesTextView.get_buffer()
notes = buffer.get_text(*buffer.get_bounds())
self.project.SetName(name)
self.project.SetAuthor(author)
self.project.SetNotes(notes)
dialog.destroy()
#_____________________________________________________________________
#=========================================================================
def main():
"""
Main entry point for Jokosher.
"""
MainApp()
gobject.threads_init()
gtk.main()
if __name__ == "__main__":
main()
|
lxlxlo/jokosher-devel
|
Jokosher/JokosherApp.py
|
Python
|
gpl-2.0
| 68,617
|
[
"VisIt"
] |
b0e4199688da6efeb7e6f92685849b3139f550901ff0feef7437bdaa3837cfcf
|
import ast
from .. import itypes
from . import functions, classes, expressions, assignment, modules
def parse_statements(statements, scope=None, class_type=None):
if isinstance(statements, str):
statements = [ast.parse(statements)]
parser = StatementBlockTypeParser(scope, class_type)
return parser.parse_statements(statements)
class StatementBlockTypeParser(ast.NodeVisitor):
def __init__(self, scope, class_type=None):
self.scope = scope
self.returns = itypes.TypeSet()
self.class_type = class_type
def parse_statements(self, nodes):
for node in nodes:
self.visit(node)
return {'return': self.returns}
def visit_Assign(self, node):
for target in node.targets:
assignment.assign_to_node(target, node.value, self.scope)
def visit_AugAssign(self, node):
op_node = ast.BinOp(node.target, node.op, node.value)
assignment.assign_to_node(node.target, op_node, self.scope)
def visit_For(self, node):
iterator = expressions.get_expression_type(node.iter, self.scope).get_iter()
assignment.assign_to_node(node.target, iterator, self.scope)
self.generic_visit(node)
def visit_AsyncFor(self, node):
self.visit_For(node)
def visit_With(self, node):
for item in node.items:
if item.optional_vars:
assignment.assign_to_node(item.optional_vars, item.context_expr, self.scope)
self.generic_visit(node)
def visit_AsyncWith(self, node):
self.visit_With(node)
def visit_FunctionDef(self, node):
func = functions.get_function_skeleton_from_node(node)
if self.class_type is not None:
func_scope = functions.create_member_scope_from_node(func, node, self.scope, self.class_type)
else:
func_scope = functions.create_function_scope_from_node(func, node, self.scope)
results = parse_statements(node.body, func_scope)
if results['return']:
func.return_values = results['return']
else:
func.return_values = itypes.get_type_by_name('None')
self.scope[node.name] = func
def visit_ClassDef(self, node):
class_ = classes.get_class_skeleton_from_node(node, self.scope)
self.scope[node.name] = class_
class_scope = classes.create_class_scope_from_node(node, self.scope)
parse_statements(node.body, class_scope, class_)
classes.apply_scope_to_class(class_, class_scope)
def visit_Import(self, node):
for alias in node.names:
name = alias.asname or alias.name
self.scope[name] = modules.module_from_name(alias.name, self.scope)
def visit_ImportFrom(self, node):
module = modules.module_from_name(node.module, self.scope, node.level)
for alias in node.names:
name = alias.asname or alias.name
if not module.has_attr(alias.name):
if hasattr(module, 'scope'):
module.set_attr(alias.name, modules.module_from_name(alias.name, module.scope, level=1))
self.scope[name] = module.get_attr(alias.name)
def visit_Return(self, node):
if node.value:
self.returns = self.returns.add_type(expressions.get_expression_type(node.value, self.scope))
else:
self.returns = self.returns.add_type(itypes.get_type_by_name('None'))
|
furbrain/tested
|
tested/parsers/statements.py
|
Python
|
gpl-3.0
| 3,472
|
[
"VisIt"
] |
f6b9d077efbfb8e12242a4a19de02eebce7da782e3f99b8de20a034267c6171b
|
"""
Message Queue Handler
"""
__RCSID__ = "$Id$"
import json
import logging
import socket
from DIRAC.Resources.MessageQueue.MQCommunication import createProducer
class MessageQueueHandler(logging.Handler):
"""
MessageQueueHandler is a custom handler from logging.
It has no equivalent in the standard logging library because it is linked to DIRAC.
It is useful to send log messages to a destination, like the StreamHandler to a stream, the FileHandler to a file.
Here, this handler send log messages to a message queue server.
There is an assumption made that the formatter used is JsonFormatter
"""
def __init__(self, queue):
"""
Initialization of the MessageQueueHandler.
:params queue: string representing the queue identifier in the configuration.
example: "mardirac3.in2p3.fr::Queue::TestQueue"
"""
super(MessageQueueHandler, self).__init__()
self.producer = None
result = createProducer(queue)
if result['OK']:
self.producer = result['Value']
self.hostname = socket.gethostname()
def emit(self, record):
"""
Add the record to the message queue.
:params record: log record object
"""
# add the hostname to the record
record.hostname = self.hostname
strRecord = self.format(record)
if self.producer is not None:
self.producer.put(json.loads(strRecord))
|
arrabito/DIRAC
|
FrameworkSystem/private/standardLogging/Handler/MessageQueueHandler.py
|
Python
|
gpl-3.0
| 1,385
|
[
"DIRAC"
] |
783ae5f6ac2564e16f633cd758ea7a8633170eea32c5ccd9fae28c8dfeacbffa
|
import regex
def parse(blast_fh, outfmt):
""" Function to provide similar use to biopython's SeqIO """
if str(outfmt) == "6":
return BlastParser.parse_outfmt_6(blast_fh=blast_fh)
class BlastParser(object):
"""
Just a shell of a BLAST parser so I don't have to rewrite this.
Wow, this is a really, really terrible class...
"""
def __init__(self, blast_f, outfmt):
with open(blast_f, 'r') as IN:
self.parse(IN, outfmt)
@classmethod
def parse_outfmt_6(cls, blast_fh):
"""
Parses standard outfmt 6 BLAST results
Returns: iterate Blastrecord objects
Excepts: AssertionError
"""
for line in blast_fh:
elems = line[:-1].split("\t")
# attempt to make sure it is in standard format
assert len(elems) == 12
param_dict = {k: v for k, v in
zip(["query", "subject", "perc_id", "length", "mismatch", "gapopen", "qstart", "qend", "sstart", "send", "evalue", "bitscore"], elems)}
# assign correct types to the data
for param in ['length', 'mismatch', 'gapopen', 'qstart', 'qend', 'sstart', 'send']:
param_dict[param] = int(param_dict[param])
for param in ['perc_id', 'evalue', 'bitscore']:
param_dict[param] = float(param_dict[param])
yield BlastRecord(param_dict)
class BlastRecord(object):
""" An individual hit? Or a query? Right now just a single hit"""
def __init__(self, param_dict):
""" Needs param & error checking """
for k, v in param_dict.items():
setattr(self, k, v)
def get_subj_gi(self):
"""
Returns: subject's GI as int
Excepts: ValueError
"""
try:
match = regex.search("gi\|(\d*)\|", self.subject)
except KeyError:
raise ValueError("Record has no 'subject' attribute")
else:
if match:
return(int(match.group(1)))
else:
raise RuntimeError("GI wasn't found for subject: {}".format(self.subject))
|
hunter-cameron/Bioinformatics
|
python/mypyli/blastparser.py
|
Python
|
mit
| 2,179
|
[
"BLAST",
"Biopython"
] |
284b5aa788e747f814274c96b6493fe95943c7776942f921fe0c0a6f9ed9ce18
|
from compliance_checker.ioos import IOOS0_1Check, IOOS1_1Check
from compliance_checker.tests.resources import STATIC_FILES
from compliance_checker.tests import BaseTestCase
from netCDF4 import Dataset
import os
class TestIOOS0_1(BaseTestCase):
'''
Tests for the IOOS Inventory Metadata v0.1
'''
def setUp(self):
# Use the NCEI Gold Standard Point dataset for IOOS checks
self.ds = self.load_dataset(STATIC_FILES['ncei_gold_point_1'])
self.ioos = IOOS0_1Check()
def test_cc_meta(self):
assert self.ioos._cc_spec == 'ioos'
assert self.ioos._cc_spec_version == '0.1'
def test_global_attributes(self):
'''
Tests that all global attributes checks are working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
results = self.ioos.check_global_attributes(nc_obj)
for result in results:
self.assert_result_is_bad(result)
attrs = [
'acknowledgement',
'publisher_email',
'institution',
'publisher_name',
'Conventions'
]
for attr in attrs:
setattr(nc_obj, attr, 'test')
results = self.ioos.check_global_attributes(nc_obj)
for result in results:
self.assert_result_is_good(result)
def test_variable_attributes(self):
'''
Tests that the platform variable attributes check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('platform', 'S1', ())
platform = nc_obj.variables['platform']
results = self.ioos.check_variable_attributes(nc_obj)
for result in results:
self.assert_result_is_bad(result)
platform.long_name = 'platform'
platform.short_name = 'platform'
platform.source = 'glider'
platform.ioos_name = 'urn:ioos:station:glos:leorgn'
platform.wmo_id = '1234'
platform.comment = 'test'
results = self.ioos.check_variable_attributes(nc_obj)
for result in results:
self.assert_result_is_good(result)
def test_variable_units(self):
'''
Tests that the variable units test is working
'''
# this check tests that units attribute is present on EVERY variable
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('sample_var', 'd', ('time',))
sample_var = nc_obj.variables['sample_var']
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_bad(results)
sample_var.units = 'm'
sample_var.short_name = 'sample_var'
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_good(results)
def test_altitude_units(self):
'''
Tests that the altitude variable units test is working
'''
results = self.ioos.check_altitude_units(self.ds)
self.assert_result_is_good(results)
# Now test an nc file with a 'z' variable without units
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('z', 'd', ('time',))
z = nc_obj.variables['z']
z.short_name = 'sample_var'
results = self.ioos.check_variable_units(nc_obj)
self.assert_result_is_bad(results)
class TestIOOS1_1(BaseTestCase):
'''
Tests for the compliance checker implementation of IOOS Metadata Profile
for NetCDF, Version 1.1
'''
def setUp(self):
# Use the IOOS 1_1 dataset for testing
self.ds = self.load_dataset(STATIC_FILES['ioos_gold_1_1'])
self.ioos = IOOS1_1Check()
def test_cc_meta(self):
assert self.ioos._cc_spec == 'ioos'
assert self.ioos._cc_spec_version == '1.1'
def test_required_attributes(self):
'''
Tests that required attributes test is working properly
'''
results = self.ioos.check_high(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_recomended_attributes(self):
'''
Tests that recommended attributes test is working properly
'''
results = self.ioos.check_recommended(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_platform_variables(self):
'''
Tests that the platform variable attributes check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.platform = 'platform'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_platform_variables(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_platform_variables(self):
'''
Tests that the platform variable attributes check is working
'''
results = self.ioos.check_platform_variables(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_geophysical_vars_fill_value(self):
'''
Tests that the geophysical variable _FillValue check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('sample_var', 'd', ('time',))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables['sample_var']
sample_var.units = 'm'
sample_var.short_name = 'temp'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_fill_value(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_geophysical_vars_fill_value(self):
'''
Tests that the geophysical variable _FillValue check is working
'''
results = self.ioos.check_geophysical_vars_fill_value(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_geophysical_vars_standard_name(self):
'''
Tests that the platform variable attributes check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('sample_var', 'd', ('time',))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables['sample_var']
sample_var.units = 'm'
sample_var.short_name = 'temp'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_standard_name(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_geophysical_vars_standard_name(self):
'''
Tests that the geophysical variable _FillValue check is working
'''
results = self.ioos.check_geophysical_vars_standard_name(self.ds)
for result in results:
self.assert_result_is_good(result)
def test_bad_units(self):
'''
Tests that the valid units check is working
'''
# Create an empty dataset that writes to /dev/null This acts as a
# temporary netCDF file in-memory that never gets written to disk.
nc_obj = Dataset(os.devnull, 'w', diskless=True)
self.addCleanup(nc_obj.close)
# The dataset needs at least one variable to check that it's missing
# all the required attributes.
nc_obj.createDimension('time', 1)
nc_obj.createVariable('temperature', 'd', ('time',))
# Define some variable attributes but don't specify _FillValue
sample_var = nc_obj.variables['temperature']
sample_var.units = 'degC' # Not valid units
sample_var.short_name = 'temp'
# global attribute 'platform' points to variable that does not exist in dataset
results = self.ioos.check_geophysical_vars_standard_name(nc_obj)
for result in results:
self.assert_result_is_bad(result)
def test_good_units(self):
'''
Tests that the valid units check is working
'''
results = self.ioos.check_units(self.ds)
for result in results:
self.assert_result_is_good(result)
|
DanielJMaher/compliance-checker
|
compliance_checker/tests/test_ioos_profile.py
|
Python
|
apache-2.0
| 10,600
|
[
"NetCDF"
] |
9bf809aee0d09b168f5e9765d3fc51e023ae6b6828214ee39184fe1fabb600c2
|
########################################################################
# File : ResourcesDefaults.py
# Author : Ricardo Graciani
########################################################################
"""
Some Helper class to access Default options for Different Resources (CEs, SEs, Catalags,...)
"""
from __future__ import print_function
from DIRAC.ConfigurationSystem.Client.Helpers.Path import cfgResourceSection, cfgPath, cfgInstallPath, cfgPathToList
from DIRAC.Core.Utilities.CFG import CFG
__RCSID__ = "$Id$"
def defaultSection( resource ):
"""
Build the path for the Defaults section
"""
return cfgPath( cfgResourceSection, 'Defaults', resource )
def getComputingElementDefaults( ceName = '', ceType = '', cfg = None, currentSectionPath = '' ):
"""
Return cfgDefaults with defaults for the given CEs defined either in arguments or in the provided cfg
"""
cesCfg = CFG()
if cfg:
try:
cesCfg.loadFromFile( cfg )
cesPath = cfgInstallPath( 'ComputingElements' )
if cesCfg.isSection( cesPath ):
for section in cfgPathToList( cesPath ):
cesCfg = cesCfg[section]
except:
return CFG()
# Overwrite the cfg with Command line arguments
if ceName:
if not cesCfg.isSection( ceName ):
cesCfg.createNewSection( ceName )
if currentSectionPath:
# Add Options from Command Line
optionsDict = __getExtraOptions( currentSectionPath )
for name, value in optionsDict.items():
cesCfg[ceName].setOption( name, value ) #pylint: disable=no-member
if ceType:
cesCfg[ceName].setOption( 'CEType', ceType ) #pylint: disable=no-member
ceDefaultSection = cfgPath( defaultSection( 'ComputingElements' ) )
# Load Default for the given type from Central configuration is defined
ceDefaults = __gConfigDefaults( ceDefaultSection )
for ceName in cesCfg.listSections():
if 'CEType' in cesCfg[ceName]:
ceType = cesCfg[ceName]['CEType']
if ceType in ceDefaults:
for option in ceDefaults[ceType].listOptions(): #pylint: disable=no-member
if option not in cesCfg[ceName]:
cesCfg[ceName].setOption( option, ceDefaults[ceType][option] ) #pylint: disable=unsubscriptable-object
return cesCfg
def __gConfigDefaults( defaultPath ):
"""
Build a cfg from a Default Section
"""
from DIRAC import gConfig
cfgDefaults = CFG()
result = gConfig.getSections( defaultPath )
if not result['OK']:
return cfgDefaults
for name in result['Value']:
typePath = cfgPath( defaultPath, name )
cfgDefaults.createNewSection( name )
result = gConfig.getOptionsDict( typePath )
if result['OK']:
optionsDict = result['Value']
for option, value in optionsDict.items():
cfgDefaults[name].setOption( option, value )
return cfgDefaults
def __getExtraOptions( currentSectionPath ):
from DIRAC import gConfig
optionsDict = {}
if not currentSectionPath:
return optionsDict
result = gConfig.getOptionsDict( currentSectionPath )
if not result['OK']:
return optionsDict
print(result)
return result['Value']
|
petricm/DIRAC
|
ConfigurationSystem/Client/Helpers/ResourcesDefaults.py
|
Python
|
gpl-3.0
| 3,108
|
[
"DIRAC"
] |
7bccabb64e82746e297789ea2b59580b5dc8b01348891800b4ee674522bf09d5
|
#! /usr/bin/python
#Guruprasad Ananda
"""
Filter based on nucleotide quality (PHRED score).
usage: %prog input out_file primary_species mask_species score mask_char mask_region mask_region_length
"""
from __future__ import division
from galaxy import eggs
import pkg_resources
pkg_resources.require( "bx-python" )
pkg_resources.require( "lrucache" )
try:
pkg_resources.require("numpy")
except:
pass
import psyco_full
import sys
import os, os.path
from UserDict import DictMixin
from bx.binned_array import BinnedArray, FileBinnedArray
from bx.bitset import *
from bx.bitset_builders import *
from fpconst import isNaN
from bx.cookbook import doc_optparse
from galaxy.tools.exception_handling import *
import bx.align.maf
class FileBinnedArrayDir( DictMixin ):
"""
Adapter that makes a directory of FileBinnedArray files look like
a regular dict of BinnedArray objects.
"""
def __init__( self, dir ):
self.dir = dir
self.cache = dict()
def __getitem__( self, key ):
value = None
if key in self.cache:
value = self.cache[key]
else:
fname = os.path.join( self.dir, "%s.qa.bqv" % key )
if os.path.exists( fname ):
value = FileBinnedArray( open( fname ) )
self.cache[key] = value
if value is None:
raise KeyError( "File does not exist: " + fname )
return value
def stop_err(msg):
sys.stderr.write(msg)
sys.exit()
def load_scores_ba_dir( dir ):
"""
Return a dict-like object (keyed by chromosome) that returns
FileBinnedArray objects created from "key.ba" files in `dir`
"""
return FileBinnedArrayDir( dir )
def bitwise_and ( string1, string2, maskch ):
result=[]
for i,ch in enumerate(string1):
try:
ch = int(ch)
except:
pass
if string2[i] == '-':
ch = 1
if ch and string2[i]:
result.append(string2[i])
else:
result.append(maskch)
return ''.join(result)
def main():
# Parsing Command Line here
options, args = doc_optparse.parse( __doc__ )
try:
#chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg( options.cols )
inp_file, out_file, pri_species, mask_species, qual_cutoff, mask_chr, mask_region, mask_length, loc_file = args
qual_cutoff = int(qual_cutoff)
mask_chr = int(mask_chr)
mask_region = int(mask_region)
if mask_region != 3:
mask_length = int(mask_length)
else:
mask_length_r = int(mask_length.split(',')[0])
mask_length_l = int(mask_length.split(',')[1])
except:
stop_err( "Data issue, click the pencil icon in the history item to correct the metadata attributes of the input dataset." )
if pri_species == 'None':
stop_err( "No primary species selected, try again by selecting at least one primary species." )
if mask_species == 'None':
stop_err( "No mask species selected, try again by selecting at least one species to mask." )
mask_chr_count = 0
mask_chr_dict = {0:'#', 1:'$', 2:'^', 3:'*', 4:'?', 5:'N'}
mask_reg_dict = {0:'Current pos', 1:'Current+Downstream', 2:'Current+Upstream', 3:'Current+Both sides'}
#ensure dbkey is present in the twobit loc file
filepath = None
try:
pspecies_all = pri_species.split(',')
pspecies_all2 = pri_species.split(',')
pspecies = []
filepaths = []
for line in open(loc_file):
if pspecies_all2 == []:
break
if line[0:1] == "#":
continue
fields = line.split('\t')
try:
build = fields[0]
for i,dbkey in enumerate(pspecies_all2):
if dbkey == build:
pspecies.append(build)
filepaths.append(fields[1])
del pspecies_all2[i]
else:
continue
except:
pass
except Exception, exc:
stop_err( 'Initialization errorL %s' % str( exc ) )
if len(pspecies) == 0:
stop_err( "Quality scores are not available for the following genome builds: %s" % ( pspecies_all2 ) )
if len(pspecies) < len(pspecies_all):
print "Quality scores are not available for the following genome builds: %s" %(pspecies_all2)
scores_by_chrom = []
#Get scores for all the primary species
for file in filepaths:
scores_by_chrom.append(load_scores_ba_dir( file.strip() ))
try:
maf_reader = bx.align.maf.Reader( open(inp_file, 'r') )
maf_writer = bx.align.maf.Writer( open(out_file,'w') )
except Exception, e:
stop_err( "Your MAF file appears to be malformed: %s" % str( e ) )
maf_count = 0
for block in maf_reader:
status_strings = []
for seq in range (len(block.components)):
src = block.components[seq].src
dbkey = src.split('.')[0]
chr = src.split('.')[1]
if not (dbkey in pspecies):
continue
else: #enter if the species is a primary species
index = pspecies.index(dbkey)
sequence = block.components[seq].text
s_start = block.components[seq].start
size = len(sequence) #this includes the gaps too
status_str = '1'*size
status_list = list(status_str)
if status_strings == []:
status_strings.append(status_str)
ind = 0
s_end = block.components[seq].end
#Get scores for the entire sequence
try:
scores = scores_by_chrom[index][chr][s_start:s_end]
except:
continue
pos = 0
while pos < (s_end-s_start):
if sequence[ind] == '-': #No score for GAPS
ind += 1
continue
score = scores[pos]
if score < qual_cutoff:
score = 0
if not(score):
if mask_region == 0: #Mask Corresponding position only
status_list[ind] = '0'
ind += 1
pos += 1
elif mask_region == 1: #Mask Corresponding position + downstream neighbors
for n in range(mask_length+1):
try:
status_list[ind+n] = '0'
except:
pass
ind = ind + mask_length + 1
pos = pos + mask_length + 1
elif mask_region == 2: #Mask Corresponding position + upstream neighbors
for n in range(mask_length+1):
try:
status_list[ind-n] = '0'
except:
pass
ind += 1
pos += 1
elif mask_region == 3: #Mask Corresponding position + neighbors on both sides
for n in range(-mask_length_l,mask_length_r+1):
try:
status_list[ind+n] = '0'
except:
pass
ind = ind + mask_length_r + 1
pos = pos + mask_length_r + 1
else:
pos += 1
ind += 1
status_strings.append(''.join(status_list))
if status_strings == []: #this block has no primary species
continue
output_status_str = status_strings[0]
for stat in status_strings[1:]:
try:
output_status_str = bitwise_and (status_strings[0], stat, '0')
except Exception, e:
break
for seq in range (len(block.components)):
src = block.components[seq].src
dbkey = src.split('.')[0]
if dbkey not in mask_species.split(','):
continue
sequence = block.components[seq].text
sequence = bitwise_and (output_status_str, sequence, mask_chr_dict[mask_chr])
block.components[seq].text = sequence
mask_chr_count += output_status_str.count('0')
maf_writer.write(block)
maf_count += 1
maf_reader.close()
maf_writer.close()
print "No. of blocks = %d; No. of masked nucleotides = %s; Mask character = %s; Mask region = %s; Cutoff used = %d" %(maf_count, mask_chr_count, mask_chr_dict[mask_chr], mask_reg_dict[mask_region], qual_cutoff)
if __name__ == "__main__":
main()
|
volpino/Yeps-EURAC
|
tools/regVariation/quality_filter.py
|
Python
|
mit
| 9,250
|
[
"Galaxy"
] |
d6eaa0956c00b905e64151d2528fe6ca53cb5fd8a0e38a6fd71562d5f9c3f705
|
#!/usr/bin/env python
# Copyright 2014-2021 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Spin-free lambda equation of RHF-CCSD(T)
Ref:
JCP 147, 044104 (2017); DOI:10.1063/1.4994918
'''
import numpy
from pyscf import lib
from pyscf.lib import logger
from pyscf.cc import ccsd
from pyscf.cc import ccsd_lambda
# Note: not support fov != 0
def kernel(mycc, eris=None, t1=None, t2=None, l1=None, l2=None,
max_cycle=50, tol=1e-8, verbose=logger.INFO):
return ccsd_lambda.kernel(mycc, eris, t1, t2, l1, l2, max_cycle, tol,
verbose, make_intermediates, update_lambda)
def make_intermediates(mycc, t1, t2, eris):
imds = ccsd_lambda.make_intermediates(mycc, t1, t2, eris)
nocc, nvir = t1.shape
eris_ovvv = numpy.asarray(eris.get_ovvv())
eris_ovoo = numpy.asarray(eris.ovoo)
eris_ovov = numpy.asarray(eris.ovov)
mo_e = eris.mo_energy
eia = lib.direct_sum('i-a->ia', mo_e[:nocc], mo_e[nocc:])
d3 = lib.direct_sum('ia,jb,kc->ijkabc', eia, eia, eia)
def p6(t):
t1 = t + t.transpose(0,2,1,3,5,4)
return t1 + t1.transpose(1,0,2,4,3,5) + t1.transpose(1,2,0,4,5,3)
def r6(w):
return (4 * w + w.transpose(0,1,2,4,5,3) + w.transpose(0,1,2,5,3,4)
- 2 * w.transpose(0,1,2,5,4,3) - 2 * w.transpose(0,1,2,3,5,4)
- 2 * w.transpose(0,1,2,4,3,5))
w =(numpy.einsum('iafb,kjcf->ijkabc', eris_ovvv.conj(), t2) -
numpy.einsum('iajm,mkbc->ijkabc', eris_ovoo.conj(), t2)) / d3
v =(numpy.einsum('iajb,kc->ijkabc', eris_ovov.conj(), t1) +
numpy.einsum('ck,ijab->ijkabc', eris.fock[nocc:,:nocc], t2)) / d3
w = p6(w)
v = p6(v)
imds.l1_t = numpy.einsum('jbkc,ijkabc->ia', eris_ovov, r6(w)).conj() / eia * .5
def as_r6(m):
# When making derivative over t2, r6 should be called on the 6-index
# tensor. It gives the equation for lambda2, but not corresponding to
# the lambda equation used by RCCSD-lambda code. A transformation was
# applied in RCCSD-lambda equation F(lambda)_{ijab} = 0:
# 2/3 * # F(lambda)_{ijab} + 1/3 * F(lambda)_{jiab} = 0
# Combining this transformation with r6 operation, leads to the
# transformation code below
return m * 2 - m.transpose(0,1,2,5,4,3) - m.transpose(0,1,2,3,5,4)
m = as_r6(w * 2 + v * .5)
joovv = numpy.einsum('kfbe,ijkaef->ijab', eris_ovvv, m.conj())
joovv-= numpy.einsum('ncmj,imnabc->ijab', eris_ovoo, m.conj())
joovv = joovv + joovv.transpose(1,0,3,2)
rw = as_r6(w)
joovv+= numpy.einsum('kc,ijkabc->ijab', eris.fock[:nocc,nocc:], rw.conj())
imds.l2_t = joovv / lib.direct_sum('ia+jb->ijab', eia, eia)
return imds
def update_lambda(mycc, t1, t2, l1, l2, eris=None, imds=None):
if eris is None: eris = mycc.ao2mo()
if imds is None: imds = make_intermediates(mycc, t1, t2, eris)
l1, l2 = ccsd_lambda.update_lambda(mycc, t1, t2, l1, l2, eris, imds)
l1 += imds.l1_t
l2 += imds.l2_t
return l1, l2
if __name__ == '__main__':
from pyscf import gto
from pyscf import scf
mol = gto.Mole()
mol.verbose = 0
mol.atom = [
[8 , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]]
mol.basis = 'cc-pvdz'
mol.build()
rhf = scf.RHF(mol)
rhf.conv_tol = 1e-16
rhf.scf()
mcc = ccsd.CCSD(rhf)
mcc.conv_tol = 1e-12
ecc, t1, t2 = mcc.kernel()
#l1, l2 = mcc.solve_lambda()
#print(numpy.linalg.norm(l1)-0.0132626841292)
#print(numpy.linalg.norm(l2)-0.212575609057)
conv, l1, l2 = kernel(mcc, mcc.ao2mo(), t1, t2, tol=1e-8)
print(numpy.linalg.norm(l1)-0.013575484203926739)
print(numpy.linalg.norm(l2)-0.22029981372536928)
|
sunqm/pyscf
|
pyscf/cc/ccsd_t_lambda_slow.py
|
Python
|
apache-2.0
| 4,357
|
[
"PySCF"
] |
8d10c862c3547cfcc0a4e39a92a1ad5bde89d0ee527744d8b2986ed6df2349bc
|
# -*- coding: utf-8 -*-
import logging
import txtorcon
from PyQt5.QtWidgets import QMessageBox
from twisted.internet.defer import inlineCallbacks
from gridsync import features
# From https://styleguide.torproject.org/visuals/
# "The main Tor Project color is Purple. Use Dark Purple as a secondary option"
TOR_PURPLE = "#7D4698"
TOR_DARK_PURPLE = "#59316B"
TOR_GREEN = "#68B030"
TOR_GREY = "#F8F9FA"
TOR_DARK_GREY = "#484848"
TOR_WHITE = "#FFFFFF"
def tor_required(furl):
try:
hints = furl.split("/")[2].split(",")
except (AttributeError, IndexError):
return False
num_matches = 0
for hint in hints:
if ".onion:" in hint:
num_matches += 1
return bool(num_matches and num_matches == len(hints))
@inlineCallbacks
def get_tor(reactor): # TODO: Add launch option?
tor = None
if not features.tor:
return tor
logging.debug("Looking for a running Tor daemon...")
try:
tor = yield txtorcon.connect(reactor)
except RuntimeError as exc:
logging.debug(
"Could not connect to a running Tor daemon: %s", str(exc)
)
if tor:
logging.debug("Connected to Tor daemon (%s)", tor.version)
return tor
@inlineCallbacks
def get_tor_with_prompt(reactor, parent=None):
tor = yield get_tor(reactor)
while not tor:
msgbox = QMessageBox(parent)
msgbox.setIcon(QMessageBox.Critical)
msgbox.setWindowTitle("Tor Required")
msgbox.setText(
"This connection can only be made over the Tor network, however, "
"no running Tor daemon was found or Tor has been disabled."
)
msgbox.setInformativeText(
"Please ensure that Tor is running and try again.<p>For help "
"installing Tor, visit "
"<a href=https://torproject.org>https://torproject.org</a>"
)
msgbox.setStandardButtons(QMessageBox.Abort | QMessageBox.Retry)
if msgbox.exec_() == QMessageBox.Retry:
tor = yield get_tor(reactor)
else:
break
return tor
|
gridsync/gridsync
|
gridsync/tor.py
|
Python
|
gpl-3.0
| 2,094
|
[
"VisIt"
] |
ed3bc8656b78de2bf54595cc94c2ae418a6d202c6019582a590842a76be15310
|
"""Tests of routines in cells.py."""
import os
import numpy as np
import pytest
from phonopy import Phonopy
from phonopy.interface.phonopy_yaml import read_cell_yaml
from phonopy.structure.atoms import PhonopyAtoms
from phonopy.structure.cells import (
ShortestPairs,
TrimmedCell,
compute_all_sg_permutations,
compute_permutation_for_rotation,
get_primitive,
get_supercell,
sparse_to_dense_svecs,
)
data_dir = os.path.dirname(os.path.abspath(__file__))
primitive_matrix_nacl = [[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]
multi_nacl_ref = [
1,
1,
2,
1,
2,
1,
4,
1,
2,
1,
4,
1,
4,
1,
8,
1,
1,
1,
2,
1,
1,
2,
2,
2,
1,
2,
2,
2,
1,
4,
2,
4,
1,
1,
1,
2,
2,
1,
2,
2,
1,
2,
1,
4,
2,
2,
2,
4,
1,
1,
1,
2,
1,
2,
1,
4,
2,
1,
2,
2,
2,
2,
2,
4,
1,
1,
1,
2,
1,
2,
1,
4,
1,
2,
1,
4,
1,
4,
1,
8,
1,
1,
1,
2,
2,
1,
2,
2,
2,
1,
2,
2,
4,
1,
4,
2,
1,
1,
2,
1,
1,
2,
2,
2,
2,
1,
4,
1,
2,
2,
4,
2,
1,
1,
2,
1,
2,
1,
4,
1,
1,
2,
2,
2,
2,
2,
4,
2,
]
svecs_nacl_ref10 = [[-0.5, 0.0, 0.0], [0.5, 0.0, 0.0]]
svecs_nacl_ref30 = [
[-0.5, -0.5, 0.0],
[-0.5, 0.5, 0.0],
[0.5, -0.5, 0.0],
[0.5, 0.5, 0.0],
]
def test_compute_permutation_sno2(ph_sno2: Phonopy):
"""Test of compute_permutation by SnO2."""
_test_compute_permutation(ph_sno2)
def test_compute_permutation_tio2(ph_tio2: Phonopy):
"""Test of compute_permutation by TiO2."""
_test_compute_permutation(ph_tio2)
def test_compute_permutation_nacl(ph_nacl: Phonopy):
"""Test of compute_permutation by NaCl."""
_test_compute_permutation(ph_nacl)
def _test_compute_permutation(ph: Phonopy):
symmetry = ph.primitive_symmetry
ppos = ph.primitive.scaled_positions
plat = ph.primitive.cell.T
symprec = symmetry.tolerance
rots = symmetry.symmetry_operations["rotations"]
trans = symmetry.symmetry_operations["translations"]
perms = compute_all_sg_permutations(ppos, rots, trans, plat, symprec)
for i, (r, t) in enumerate(zip(rots, trans)):
ppos_rot = np.dot(ppos, r.T) + t
perm = compute_permutation_for_rotation(ppos, ppos_rot, plat, symprec)
np.testing.assert_array_equal(perms[i], perm)
diff = ppos[perm] - ppos_rot
diff -= np.rint(diff)
assert ((np.dot(diff, plat) ** 2).sum(axis=1) < symprec).all()
@pytest.mark.parametrize("nosnf", [True, False])
def test_get_supercell_convcell_sio2(
convcell_sio2: PhonopyAtoms, nosnf, helper_methods
):
"""Test of get_supercell with/without SNF by SiO2."""
_test_get_supercell_convcell_sio2(convcell_sio2, helper_methods, is_old_style=nosnf)
@pytest.mark.parametrize("nosnf", [True, False])
def test_get_supercell_primcell_si(primcell_si: PhonopyAtoms, nosnf, helper_methods):
"""Test of get_supercell with/without SNF by Si."""
_test_get_supercell_primcell_si(primcell_si, helper_methods, is_old_style=nosnf)
def test_get_supercell_nacl_snf(convcell_nacl: PhonopyAtoms, helper_methods):
"""Test of get_supercell using SNF by NaCl."""
cell = convcell_nacl
smat = [[-1, 1, 1], [1, -1, 1], [1, 1, -1]]
scell = get_supercell(cell, smat, is_old_style=True)
scell_snf = get_supercell(cell, smat, is_old_style=False)
helper_methods.compare_cells(scell, scell_snf)
def _test_get_supercell_convcell_sio2(
convcell_sio2: PhonopyAtoms, helper_methods, is_old_style=True
):
smat = np.diag([1, 2, 3])
fname = "SiO2-123.yaml"
scell = get_supercell(convcell_sio2, smat, is_old_style=is_old_style)
cell_ref = read_cell_yaml(os.path.join(data_dir, fname))
if is_old_style is True:
helper_methods.compare_cells_with_order(scell, cell_ref)
else:
helper_methods.compare_cells(scell, cell_ref)
def _test_get_supercell_primcell_si(
primcell_si: PhonopyAtoms, helper_methods, is_old_style=True
):
smat = [[-1, 1, 1], [1, -1, 1], [1, 1, -1]]
fname = "Si-conv.yaml"
scell = get_supercell(primcell_si, smat, is_old_style=is_old_style)
cell_ref = read_cell_yaml(os.path.join(data_dir, fname))
if is_old_style is True:
helper_methods.compare_cells_with_order(scell, cell_ref)
else:
helper_methods.compare_cells(scell, cell_ref)
def test_get_primitive_convcell_nacl(
convcell_nacl: PhonopyAtoms, primcell_nacl: PhonopyAtoms, helper_methods
):
"""Test get_primitive by NaCl."""
pcell = get_primitive(convcell_nacl, primitive_matrix_nacl)
helper_methods.compare_cells_with_order(pcell, primcell_nacl)
@pytest.mark.parametrize("store_dense_svecs", [True, False])
def test_get_primitive_convcell_nacl_svecs(
convcell_nacl: PhonopyAtoms, store_dense_svecs
):
"""Test shortest vectors by NaCl."""
pcell = get_primitive(
convcell_nacl, primitive_matrix_nacl, store_dense_svecs=store_dense_svecs
)
svecs, multi = pcell.get_smallest_vectors()
if store_dense_svecs:
assert svecs.shape == (54, 3)
assert multi.shape == (8, 2, 2)
assert np.sum(multi[:, :, 0]) == 54
assert np.sum(multi[-1:, -1, :]) == 54
else:
assert svecs.shape == (8, 2, 27, 3)
assert multi.shape == (8, 2)
def test_TrimmedCell(convcell_nacl: PhonopyAtoms, helper_methods):
"""Test TrimmedCell by NaCl."""
pmat = [[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]
smat2 = np.eye(3, dtype="intc") * 2
pmat2 = np.dot(np.linalg.inv(smat2), pmat)
smat3 = np.eye(3, dtype="intc") * 3
pmat3 = np.dot(np.linalg.inv(smat3), pmat)
cell = convcell_nacl
scell2 = get_supercell(cell, smat2)
scell3 = get_supercell(cell, smat3)
n = len(scell3) // 2
# swap first and last half of atomic order
indices = [i + n for i in range(n)] + list(range(n))
scell3_swap = PhonopyAtoms(
cell=scell3.cell,
scaled_positions=scell3.scaled_positions[indices],
numbers=scell3.numbers[indices],
)
tcell2 = TrimmedCell(pmat2, scell2)
tcell3 = TrimmedCell(
pmat3, scell3_swap, positions_to_reorder=tcell2.scaled_positions
)
helper_methods.compare_cells_with_order(tcell2, tcell3)
def test_ShortestPairs_sparse_nacl(ph_nacl: Phonopy, helper_methods):
"""Test ShortestPairs (parse) by NaCl."""
scell = ph_nacl.supercell
pcell = ph_nacl.primitive
pos = scell.scaled_positions
spairs = ShortestPairs(scell.cell, pos, pos[pcell.p2s_map])
svecs = spairs.shortest_vectors
multi = spairs.multiplicities
np.testing.assert_array_equal(multi.ravel(), multi_nacl_ref)
pos_from_svecs = svecs[:, 0, 0, :] + pos[0]
np.testing.assert_allclose(svecs_nacl_ref10, svecs[1, 0, :2], atol=1e-8)
np.testing.assert_allclose(svecs_nacl_ref30, svecs[3, 0, :4], atol=1e-8)
helper_methods.compare_positions_with_order(pos_from_svecs, pos, scell.cell)
def test_ShortestPairs_dense_nacl(ph_nacl: Phonopy, helper_methods):
"""Test ShortestPairs (dense) by NaCl."""
scell = ph_nacl.supercell
pcell = ph_nacl.primitive
pos = scell.scaled_positions
spairs = ShortestPairs(scell.cell, pos, pos[pcell.p2s_map], store_dense_svecs=True)
svecs = spairs.shortest_vectors
multi = spairs.multiplicities
assert multi[-1, -1, :].sum() == multi[:, :, 0].sum()
np.testing.assert_array_equal(multi[:, :, 0].ravel(), multi_nacl_ref)
np.testing.assert_allclose(
svecs_nacl_ref10, svecs[multi[1, 0, 1] : multi[1, 0, :].sum()], atol=1e-8
)
np.testing.assert_allclose(
svecs_nacl_ref30, svecs[multi[3, 0, 1] : multi[3, 0, :].sum()], atol=1e-8
)
pos_from_svecs = svecs[multi[:, 0, 1], :] + pos[0]
helper_methods.compare_positions_with_order(pos_from_svecs, pos, scell.cell)
def test_sparse_to_dense_nacl(ph_nacl: Phonopy):
"""Test for sparse_to_dense_svecs."""
scell = ph_nacl.supercell
pcell = ph_nacl.primitive
pos = scell.scaled_positions
spairs = ShortestPairs(scell.cell, pos, pos[pcell.p2s_map], store_dense_svecs=False)
svecs = spairs.shortest_vectors
multi = spairs.multiplicities
spairs = ShortestPairs(scell.cell, pos, pos[pcell.p2s_map], store_dense_svecs=True)
dsvecs = spairs.shortest_vectors
dmulti = spairs.multiplicities
_dsvecs, _dmulti = sparse_to_dense_svecs(svecs, multi)
np.testing.assert_array_equal(dmulti, _dmulti)
np.testing.assert_allclose(dsvecs, _dsvecs, rtol=0, atol=1e-8)
|
atztogo/phonopy
|
test/structure/test_cells.py
|
Python
|
bsd-3-clause
| 8,810
|
[
"phonopy"
] |
50f32a833afafe6cbd1a788d5ec8780b9ce97f11167e40fa4765d3ea8e8b20d8
|
# coding=utf-8
import sys
import os
import shutil
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib'))
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'resources', 'lib', 'pyscraper'))
import xbmcaddon
from nfowriter import NfoWriter
from nfo_scraper import NFO_Scraper
from gamedatabase import GameDataBase
from config import Config, RomCollection
class Test_NFOWriter(unittest.TestCase):
def test_getNfoFilePath(self):
xbmcaddon._settings['rcb_nfoFolder'] = ''
writer = NfoWriter()
filename = writer.getNfoFilePath("Amiga", "./testdata/roms/Amiga/Airborne Ranger.adf", "Airborne Ranger")
filename = filename.replace("\\", "/")
self.assertEquals("./testdata/roms/Amiga/Airborne Ranger.nfo", filename)
def test_getNfoFilePath_path_in_settings(self):
xbmcaddon._settings['rcb_nfoFolder'] = './testdata/nfo/'
writer = NfoWriter()
filename = writer.getNfoFilePath("Amiga", "./testdata/roms/Amiga/Airborne Ranger.adf", "Airborne Ranger")
filename = filename.replace("\\", "/")
self.assertEquals("./testdata/nfo/Amiga/Airborne Ranger.nfo", filename)
def test_createNfoFromDesc_newfile(self):
xbmcaddon._settings['rcb_nfoFolder'] = './testdata/nfo/'
self.assertFalse(os.path.isfile('./testdata/nfo/Amiga/Airborne Ranger.nfo'), 'nfo file should not exist')
writer = NfoWriter()
writer.createNfoFromDesc('Airborne Ranger', #gamename
"Description with some special characters: ' & < >", #plot
'Amiga', #romCollectionname
'"MicroProse"', #publisher
'Imagitec', #developer
'1989', #year
'1', #players
'3.2', #rating
'128', #votes
'', #url
'USA', #region
'Floppy', #media
'Top-Down', #perspective
'Joystick', #controller
'Airborne Ranger', #originalTitle
'Airborne Ranger', #alternateTitle
'v1.00', #version
['Action', 'Simulation'], #genreList
'1', #isFavorite
'1', #launchCount
'./testdata/roms/Amiga/Airborne Ranger.adf', #romFile
'Airborne Ranger', #gamenameFromFile
{}, #artworkfiles
{} #artworkurls
)
self.assertTrue(os.path.isfile('./testdata/nfo/Amiga/Airborne Ranger.nfo'), 'Expected nfo file to be written')
#use nfo scraper to read the file
scraper = NFO_Scraper()
scraper.nfo_file = './testdata/nfo/Amiga/Airborne Ranger.nfo'
result = scraper.retrieve(1, 'Amiga')
self.assertEqual(["Airborne Ranger"], result['Game'])
self.assertEqual(["Airborne Ranger"], result['OriginalTitle'])
self.assertEqual(["Airborne Ranger"], result['AlternateTitle'])
self.assertEqual(["1989"], result['ReleaseYear'])
self.assertEqual(['"MicroProse"'], result['Publisher'])
self.assertEqual(["Imagitec"], result['Developer'])
self.assertEqual(["Top-Down"], result['Perspective'])
self.assertEqual(["Joystick"], result['Controller'])
self.assertEqual(["Floppy"], result['Media'])
self.assertEqual(["USA"], result['Region'])
self.assertEqual(["v1.00"], result['Version'])
self.assertEqual(["1"], result['Players'])
self.assertEqual(["1"], result['LaunchCount'])
self.assertEqual(["1"], result['IsFavorite'])
self.assertEqual(["3.2"], result['Rating'])
self.assertEqual(["128"], result['Votes'])
self.assertTrue(result['Description'][0].startswith(
"Description with some special characters: ' & < >"))
self.assertEqual(len(result['Genre']), 2)
self.assertIn("Action", result['Genre'])
self.assertIn("Simulation", result['Genre'])
os.remove('./testdata/nfo/Amiga/Airborne Ranger.nfo')
def test_createNfoFromDesc_newfile_missinginfos(self):
xbmcaddon._settings['rcb_nfoFolder'] = './testdata/nfo/'
self.assertFalse(os.path.isfile('./testdata/nfo/Amiga/Airborne Ranger.nfo'), 'nfo file should not exist')
writer = NfoWriter()
writer.createNfoFromDesc('Airborne Ranger', #gamename
'', #plot
'Amiga', #romCollectionname
'', #publisher
'', #developer
'', #year
'', #players
'', #rating
'', #votes
'', #url
'', #region
'', #media
'', #perspective
'', #controller
'', #originalTitle
'', #alternateTitle
'', #version
[], #genreList
'', #isFavorite
'', #launchCount
'./testdata/roms/Amiga/Airborne Ranger.adf', #romFile
'Airborne Ranger', #gamenameFromFile
{}, #artworkfiles
{} #artworkurls
)
self.assertTrue(os.path.isfile('./testdata/nfo/Amiga/Airborne Ranger.nfo'), 'Expected nfo file to be written')
#use nfo scraper to read the file
scraper = NFO_Scraper()
scraper.nfo_file = './testdata/nfo/Amiga/Airborne Ranger.nfo'
result = scraper.retrieve(1, 'Amiga')
self.assertEqual(["Airborne Ranger"], result['Game'])
self.assertEqual([None], result['OriginalTitle'])
self.assertEqual([None], result['AlternateTitle'])
self.assertEqual([None], result['ReleaseYear'])
self.assertEqual([None], result['Publisher'])
self.assertEqual([None], result['Developer'])
self.assertEqual([None], result['Perspective'])
self.assertEqual([None], result['Controller'])
self.assertEqual([None], result['Media'])
self.assertEqual([None], result['Region'])
self.assertEqual([None], result['Version'])
self.assertEqual([None], result['Players'])
self.assertEqual([None], result['LaunchCount'])
self.assertEqual([None], result['IsFavorite'])
self.assertEqual([None], result['Rating'])
self.assertEqual([None], result['Votes'])
self.assertEqual([None], result['Description'])
self.assertEqual(len(result['Genre']), 0)
os.remove('./testdata/nfo/Amiga/Airborne Ranger.nfo')
def test_createNfoFromDesc_existingfile(self):
xbmcaddon._settings['rcb_nfoFolder'] = './testdata/nfo/'
self.assertFalse(os.path.isfile('./testdata/nfo/Amiga/Airborne Ranger.nfo'), 'nfo file should not exist')
shutil.copy('./testdata/nfo/Amiga/Airborne Ranger_orig.nfo', './testdata/nfo/Amiga/Airborne Ranger.nfo')
writer = NfoWriter()
#missing infos should be merged with infos from existing file
writer.createNfoFromDesc('Airborne Ranger', #gamename
'', #plot
'Amiga', #romCollectionname
'', #publisher
'', #developer
'', #year
'', #players
'', #rating
'', #votes
'', #url
'', #region
'', #media
'', #perspective
'', #controller
'Airborne Ranger', #originalTitle
'Airborne Ranger', #alternateTitle
'', #version
[], #genreList
'', #isFavorite
'', #launchCount
'./testdata/roms/Amiga/Airborne Ranger.adf', #romFile
'Airborne Ranger', #gamenameFromFile
{}, #artworkfiles
{} #artworkurls
)
self.assertTrue(os.path.isfile('./testdata/nfo/Amiga/Airborne Ranger.nfo'), 'Expected nfo file to be written')
#use nfo scraper to read the file
scraper = NFO_Scraper()
scraper.nfo_file = './testdata/nfo/Amiga/Airborne Ranger.nfo'
result = scraper.retrieve(1, 'Amiga')
self.assertEqual(["Airborne Ranger"], result['Game'])
self.assertEqual(["Airborne Ranger"], result['OriginalTitle'])
self.assertEqual(["Airborne Ranger"], result['AlternateTitle'])
self.assertEqual(["1989"], result['ReleaseYear'])
self.assertEqual(['"MicroProse"'], result['Publisher'])
self.assertEqual(["Imagitec"], result['Developer'])
self.assertEqual(["Top-Down"], result['Perspective'])
self.assertEqual(["Joystick"], result['Controller'])
self.assertEqual(["Floppy"], result['Media'])
self.assertEqual(["USA"], result['Region'])
self.assertEqual(["v1.00"], result['Version'])
self.assertEqual(["1"], result['Players'])
self.assertEqual(["1"], result['LaunchCount'])
self.assertEqual(["1"], result['IsFavorite'])
self.assertEqual(["3.2"], result['Rating'])
self.assertEqual(["128"], result['Votes'])
self.assertTrue(result['Description'][0].startswith(
"Description with some special characters: ' & < >"))
self.assertEqual(len(result['Genre']), 2)
self.assertIn("Action", result['Genre'])
self.assertIn("Simulation", result['Genre'])
os.remove('./testdata/nfo/Amiga/Airborne Ranger.nfo')
def test_exportLibrary(self):
export_base_folder = './testdata/nfo/export/'
xbmcaddon._settings['rcb_nfoFolder'] = export_base_folder
# Setup data - MyGames.db is the hard-coded expected DB name
db_path = './testdata/database/'
shutil.copyfile(os.path.join(db_path, 'MyGames_2.2.0_full.db'), os.path.join(db_path, 'MyGames.db'))
gdb = GameDataBase(db_path)
gdb.connect()
# Setup config
config_xml_file = './testdata/config/romcollections_importtests.xml'
conf = Config(config_xml_file)
conf.readXml()
writer = NfoWriter()
writer.exportLibrary(gdb, conf.romCollections)
#check if all files have been created
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Amiga/Airborne Ranger.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Amiga/Chuck Rock.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Amiga/Eliminator.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Amiga/MicroProse Formula One Grand Prix.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Atari 2600/Adventure (1980) (Atari).nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Atari 2600/Air-Sea Battle (32 in 1) (1988) (Atari) (PAL).nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Atari 2600/Asteroids (1981) (Atari) [no copyright].nfo')))
#FIXME TODO: can't find file even if it exists
#self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'Nintendo 64/1080° Snowboarding.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'PlayStation/Bushido Blade.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'PlayStation/Silent Hill.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, 'SNES/Chrono Trigger.nfo')))
self.assertTrue(os.path.isfile(os.path.join(export_base_folder, "SNES/Madden NFL '97.nfo")))
os.remove(os.path.join(export_base_folder, 'Amiga/Airborne Ranger.nfo'))
os.remove(os.path.join(export_base_folder, 'Amiga/Chuck Rock.nfo'))
os.remove(os.path.join(export_base_folder, 'Amiga/Eliminator.nfo'))
os.remove(os.path.join(export_base_folder, 'Amiga/MicroProse Formula One Grand Prix.nfo'))
os.remove(os.path.join(export_base_folder, 'Atari 2600/Adventure (1980) (Atari).nfo'))
os.remove(os.path.join(export_base_folder, 'Atari 2600/Air-Sea Battle (32 in 1) (1988) (Atari) (PAL).nfo'))
os.remove(os.path.join(export_base_folder, 'Atari 2600/Asteroids (1981) (Atari) [no copyright].nfo'))
#FIXME TODO: can't find file even if it exists
#os.remove(os.path.join(export_base_folder, 'Nintendo 64/1080° Snowboarding.nfo'))
os.remove(os.path.join(export_base_folder, 'PlayStation/Bushido Blade.nfo'))
os.remove(os.path.join(export_base_folder, 'PlayStation/Silent Hill.nfo'))
os.remove(os.path.join(export_base_folder, 'SNES/Chrono Trigger.nfo'))
os.remove(os.path.join(export_base_folder, "SNES/Madden NFL '97.nfo"))
gdb.close()
os.remove(os.path.join(db_path, 'MyGames.db'))
|
bruny/romcollectionbrowser
|
resources/tests/test_nfowriter.py
|
Python
|
gpl-2.0
| 14,038
|
[
"ADF"
] |
ab42cbf7eb0ac06409eaec61c5f5957f035feb6b6bb676239a99c679edeb6216
|
# coding=utf-8
"""
The XFSCollector collects XFS metrics using /proc/fs/xfs/stat.
#### Dependencies
* /proc/fs/xfs/stat
"""
import diamond.collector
import sys
class XFSCollector(diamond.collector.Collector):
PROC = '/proc/fs/xfs/stat'
def get_default_config_help(self):
config_help = super(XFSCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the xfs collector settings
"""
config = super(XFSCollector, self).get_default_config()
config.update({
'path': 'xfs'
})
return config
def collect(self):
"""
Collect xfs stats.
For an explanation of the following metrics visit
http://xfs.org/index.php/Runtime_Stats
https://github.com/torvalds/linux/blob/master/fs/xfs/xfs_stats.h
"""
data_structure = {
'extent_alloc': (
'alloc_extent',
'alloc_block',
'free_extent',
'free_block'
),
'abt': (
'lookup',
'compare',
'insrec',
'delrec'
),
'blk_map': (
'read_ops',
'write_ops',
'unmap',
'add_exlist',
'del_exlist',
'look_exlist',
'cmp_exlist'
),
'bmbt': (
'lookup',
'compare',
'insrec',
'delrec'
),
'dir': (
'lookup',
'create',
'remove',
'getdents'
),
'trans': (
'sync',
'async',
'empty'
),
'ig': (
'ig_attempts',
'ig_found',
'ig_frecycle',
'ig_missed',
'ig_dup',
'ig_reclaims',
'ig_attrchg'
),
'log': (
'writes',
'blocks',
'noiclogs',
'force',
'force_sleep'
),
'push_ail': (
'try_logspace',
'sleep_logspace',
'pushes',
'success',
'pushbuf',
'pinned',
'locked',
'flushing',
'restarts',
'flush'
),
'xstrat': (
'quick',
'split'
),
'rw': (
'write_calls',
'read_calls'
),
'attr': (
'get',
'set',
'remove',
'list'
),
'icluster': (
'iflush_count',
'icluster_flushcnt',
'icluster_flushinode'
),
'vnodes': (
'vn_active',
'vn_alloc',
'vn_get',
'vn_hold',
'vn_rele',
'vn_reclaim',
'vn_remove',
'vn_free'
),
'buf': (
'xb_get',
'xb_create',
'xb_get_locked',
'xb_get_locked_waited',
'xb_busy_locked',
'xb_miss_locked',
'xb_page_retries',
'xb_page_found',
'xb_get_read'
),
'abtb2': (
'xs_abtb_2_lookup',
'xs_abtb_2_compare',
'xs_abtb_2_insrec',
'xs_abtb_2_delrec',
'xs_abtb_2_newroot',
'xs_abtb_2_killroot',
'xs_abtb_2_increment',
'xs_abtb_2_decrement',
'xs_abtb_2_lshift',
'xs_abtb_2_rshift',
'xs_abtb_2_split',
'xs_abtb_2_join',
'xs_abtb_2_alloc',
'xs_abtb_2_free',
'xs_abtb_2_moves'
),
'abtc2': (
'xs_abtc_2_lookup',
'xs_abtc_2_compare',
'xs_abtc_2_insrec',
'xs_abtc_2_delrec',
'xs_abtc_2_newroot',
'xs_abtc_2_killroot',
'xs_abtc_2_increment',
'xs_abtc_2_decrement',
'xs_abtc_2_lshift',
'xs_abtc_2_rshift',
'xs_abtc_2_split',
'xs_abtc_2_join',
'xs_abtc_2_alloc',
'xs_abtc_2_free',
'xs_abtc_2_moves'
),
'bmbt2': (
'xs_bmbt_2_lookup',
'xs_bmbt_2_compare',
'xs_bmbt_2_insrec',
'xs_bmbt_2_delrec',
'xs_bmbt_2_newroot',
'xs_bmbt_2_killroot',
'xs_bmbt_2_increment',
'xs_bmbt_2_decrement',
'xs_bmbt_2_lshift',
'xs_bmbt_2_rshift',
'xs_bmbt_2_split',
'xs_bmbt_2_join',
'xs_bmbt_2_alloc',
'xs_bmbt_2_free',
'xs_bmbt_2_moves'
),
'ibt2': (
'lookup',
'compare',
'insrec',
'delrec',
'newroot',
'killroot',
'increment',
'decrement',
'lshift',
'rshift',
'split',
'join',
'alloc',
'free',
'moves'
),
'fibt2': (
'lookup',
'compare',
'insrec',
'delrec',
'newroot',
'killroot',
'increment',
'decrement',
'lshift',
'rshift',
'split',
'join',
'alloc',
'free',
'moves'
),
'qm': (
'xs_qm_dquot',
'xs_qm_dquot_unused'
),
'xpc': (
'xs_xstrat_bytes',
'xs_write_bytes',
'xs_read_bytes'
),
'debug': (
'debug',
)
}
f = open(self.PROC)
new_stats = f.readlines()
f.close()
stats = {}
for line in new_stats:
items = line.rstrip().split()
stats[items[0]] = [int(a) for a in items[1:]]
for key in stats.keys():
for item in enumerate(data_structure[key]):
metric_name = '.'.join([key, item[1]])
value = stats[key][item[0]]
self.publish_counter(metric_name, value)
|
Ensighten/Diamond
|
src/collectors/xfs/xfs.py
|
Python
|
mit
| 7,077
|
[
"VisIt"
] |
c3e3b957634cb26b24126e43c55213dcb687068d2e766bf56cb6cc4187c8b5cc
|
""" Accounting Cache
"""
__RCSID__ = "$Id$"
import os.path
import time
import threading
from DIRAC import S_OK, S_ERROR, gLogger, rootPath, gConfig
from DIRAC.Core.Utilities.DictCache import DictCache
class DataCache( object ):
def __init__( self ):
self.graphsLocation = os.path.join( gConfig.getValue( '/LocalSite/InstancePath', rootPath ), 'data', 'accountingPlots' )
self.cachedGraphs = {}
self.alive = True
self.purgeThread = threading.Thread( target = self.purgeExpired )
self.purgeThread.setDaemon( 1 )
self.purgeThread.start()
self.__dataCache = DictCache()
self.__graphCache = DictCache( deleteFunction = self._deleteGraph )
self.__dataLifeTime = 600
self.__graphLifeTime = 3600
def setGraphsLocation( self, graphsDir ):
self.graphsLocation = graphsDir
for graphName in os.listdir( self.graphsLocation ):
if graphName.find( ".png" ) > 0:
graphLocation = "%s/%s" % ( self.graphsLocation, graphName )
gLogger.verbose( "Purging %s" % graphLocation )
os.unlink( graphLocation )
def purgeExpired( self ):
while self.alive:
time.sleep( 600 )
self.__graphCache.purgeExpired()
self.__dataCache.purgeExpired()
def getReportData( self, reportRequest, reportHash, dataFunc ):
"""
Get report data from cache if exists, else generate it
"""
reportData = self.__dataCache.get( reportHash )
if reportData is None:
retVal = dataFunc( reportRequest )
if not retVal[ 'OK' ]:
return retVal
reportData = retVal[ 'Value' ]
self.__dataCache.add( reportHash, self.__dataLifeTime, reportData )
return S_OK( reportData )
def getReportPlot( self, reportRequest, reportHash, reportData, plotFunc ):
"""
Get report data from cache if exists, else generate it
"""
plotDict = self.__graphCache.get( reportHash )
if plotDict is None:
basePlotFileName = "%s/%s" % ( self.graphsLocation, reportHash )
retVal = plotFunc( reportRequest, reportData, basePlotFileName )
if not retVal[ 'OK' ]:
return retVal
plotDict = retVal[ 'Value' ]
if plotDict[ 'plot' ]:
plotDict[ 'plot' ] = "%s.png" % reportHash
if plotDict[ 'thumbnail' ]:
plotDict[ 'thumbnail' ] = "%s.thb.png" % reportHash
self.__graphCache.add( reportHash, self.__graphLifeTime, plotDict )
return S_OK( plotDict )
def getPlotData( self, plotFileName ):
filename = "%s/%s" % ( self.graphsLocation, plotFileName )
try:
fd = file( filename, "rb" )
data = fd.read()
fd.close()
except Exception, e:
return S_ERROR( "Can't open file %s: %s" % ( plotFileName, str( e ) ) )
return S_OK( data )
def _deleteGraph( self, plotDict ):
try:
for key in plotDict:
value = plotDict[ key ]
if value:
fPath = os.path.join( self.graphsLocation, str( value ) )
if os.path.isfile( fPath ):
gLogger.info( "Deleting plot from cache", value )
os.unlink( fPath )
else:
gLogger.info( "Plot has already been deleted", value )
except:
pass
gDataCache = DataCache()
|
vmendez/DIRAC
|
AccountingSystem/private/DataCache.py
|
Python
|
gpl-3.0
| 3,185
|
[
"DIRAC"
] |
acb12bee916c08d9d0aaff5e18b7714e6b7e2f5b882f6b4b33681ee96510886e
|
#!/usr/bin/python
import httplib
import httplib2
import os
import random
import sys
import time
import datetime
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import argparser, run_flow
# Explicitly tell the underlying HTTP transport library not to retry, since
# we are handling retry logic ourselves.
httplib2.RETRIES = 1
# Maximum number of times to retry before giving up.
MAX_RETRIES = 10
# Always retry when these exceptions are raised.
RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, httplib.NotConnected,
httplib.IncompleteRead, httplib.ImproperConnectionState,
httplib.CannotSendRequest, httplib.CannotSendHeader,
httplib.ResponseNotReady, httplib.BadStatusLine)
# Always retry when an apiclient.errors.HttpError with one of these status
# codes is raised.
RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google Developers Console at
# https://console.developers.google.com/.
# Please ensure that you have enabled the YouTube Data API for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
# https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "client_secrets.json"
# This OAuth 2.0 access scope allows an application to upload files to the
# authenticated user's YouTube channel, but doesn't allow other types of access.
YOUTUBE_UPLOAD_SCOPE = "https://www.googleapis.com/auth/youtube.upload"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
VALID_PRIVACY_STATUSES = ("public", "private", "unlisted")
def get_authenticated_service(args):
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_UPLOAD_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(flow, storage, args)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def initialize_upload(youtube, options):
tags = None
if options.keywords:
tags = options.keywords.split(",")
body=dict(
snippet=dict(
title=datetime.datetime.today().strftime("%d-%m-%Y"),
description=options.description,
tags=tags,
categoryId=options.category
),
status=dict(
privacyStatus=options.privacyStatus
)
)
# Call the API's videos.insert method to create and upload the video.
insert_request = youtube.videos().insert(
part=",".join(body.keys()),
body=body,
# The chunksize parameter specifies the size of each chunk of data, in
# bytes, that will be uploaded at a time. Set a higher value for
# reliable connections as fewer chunks lead to faster uploads. Set a lower
# value for better recovery on less reliable connections.
#
# Setting "chunksize" equal to -1 in the code below means that the entire
# file will be uploaded in a single HTTP request. (If the upload fails,
# it will still be retried where it left off.) This is usually a best
# practice, but if you're using Python older than 2.6 or if you're
# running on App Engine, you should set the chunksize to something like
# 1024 * 1024 (1 megabyte).
media_body=MediaFileUpload(options.file, chunksize=-1, resumable=True)
)
resumable_upload(insert_request)
# This method implements an exponential backoff strategy to resume a
# failed upload.
def resumable_upload(insert_request):
response = None
error = None
retry = 0
while response is None:
try:
print "Uploading file..."
status, response = insert_request.next_chunk()
if 'id' in response:
print "Video id '%s' was successfully uploaded." % response['id']
else:
exit("The upload failed with an unexpected response: %s" % response)
except HttpError, e:
if e.resp.status in RETRIABLE_STATUS_CODES:
error = "A retriable HTTP error %d occurred:\n%s" % (e.resp.status,
e.content)
else:
raise
except RETRIABLE_EXCEPTIONS, e:
error = "A retriable error occurred: %s" % e
if error is not None:
print error
retry += 1
if retry > MAX_RETRIES:
exit("No longer attempting to retry.")
max_sleep = 2 ** retry
sleep_seconds = random.random() * max_sleep
print "Sleeping %f seconds and then retrying..." % sleep_seconds
time.sleep(sleep_seconds)
if __name__ == '__main__':
argparser.add_argument("--file", required=True, help="Video file to upload")
argparser.add_argument("--title", help="Video title", default="Test Title")
argparser.add_argument("--description", help="Video description",
default="Test Description")
argparser.add_argument("--category", default="22",
help="Numeric video category. " +
"See https://developers.google.com/youtube/v3/docs/videoCategories/list")
argparser.add_argument("--keywords", help="Video keywords, comma separated",
default="")
argparser.add_argument("--privacyStatus", choices=VALID_PRIVACY_STATUSES,
default=VALID_PRIVACY_STATUSES[2], help="Video privacy status.")
args = argparser.parse_args()
if not os.path.exists(args.file):
exit("Please specify a valid file using the --file= parameter.")
youtube = get_authenticated_service(args)
try:
initialize_upload(youtube, args)
except HttpError, e:
print "An HTTP error %d occurred:\n%s" % (e.resp.status, e.content)
|
98pm/youtube_upload_timelapse
|
googleapi.py
|
Python
|
apache-2.0
| 6,705
|
[
"VisIt"
] |
bfc9500dcb8457d15681ec8012219826cf38ebee63810457e7edbe0cc671873a
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import sys
import os
class Nwchem(Package):
"""High-performance computational chemistry software"""
homepage = "http://www.nwchem-sw.org"
url = "http://www.nwchem-sw.org/images/Nwchem-6.6.revision27746-src.2015-10-20.tar.gz"
version('6.6', 'c581001c004ea5e5dfacb783385825e3',
url='http://www.nwchem-sw.org/images/Nwchem-6.6.revision27746-src.2015-10-20.tar.gz')
depends_on('blas')
depends_on('lapack')
depends_on('mpi')
depends_on('scalapack')
depends_on('python@2.7:2.8', type=('build', 'run'))
# first hash is sha256 of the patch (required for URL patches),
# second is sha256 for the archive.
# patches for 6.6-27746:
urls_for_patches = {
'@6.6': [
('http://www.nwchem-sw.org/images/Tddft_mxvec20.patch.gz', 'ae04d4754c25fc324329dab085d4cc64148c94118ee702a7e14fce6152b4a0c5', 'cdfa8a5ae7d6ee09999407573b171beb91e37e1558a3bfb2d651982a85f0bc8f'),
('http://www.nwchem-sw.org/images/Tools_lib64.patch.gz', 'ef2eadef89c055c4651ea807079577bd90e1bc99ef6c89f112f1f0e7560ec9b4', '76b8d3e1b77829b683234c8307fde55bc9249b87410914b605a76586c8f32dae'),
('http://www.nwchem-sw.org/images/Config_libs66.patch.gz', '56f9c4bab362d82fb30d97564469e77819985a38e15ccaf04f647402c1ee248e', 'aa17f03cbb22ad7d883e799e0fddad1b5957f5f30b09f14a1a2caeeb9663cc07'),
('http://www.nwchem-sw.org/images/Cosmo_meminit.patch.gz', 'f05f09ca235ad222fe47d880bfd05a1b88d0148b990ca8c7437fa231924be04b', '569c5ee528f3922ee60ca831eb20ec6591633a36f80efa76cbbe41cabeb9b624'),
('http://www.nwchem-sw.org/images/Sym_abelian.patch.gz', 'e3470fb5786ab30bf2eda3bb4acc1e4c48fb5e640a09554abecf7d22b315c8fd', 'aa693e645a98dbafbb990e26145d65b100d6075254933f36326cf00bac3c29e0'),
('http://www.nwchem-sw.org/images/Xccvs98.patch.gz', '75540e0436c12e193ed0b644cff41f5036d78c101f14141846083f03ad157afa', '1c0b0f1293e3b9b05e9e51e7d5b99977ccf1edb4b072872c8316452f6cea6f13'),
('http://www.nwchem-sw.org/images/Dplot_tolrho.patch.gz', '8c30f92730d15f923ec8a623e3b311291eb2ba8b9d5a9884716db69a18d14f24', '2ebb1a5575c44eef4139da91f0e1e60057b2eccdba7f57a8fb577e840c326cbb'),
('http://www.nwchem-sw.org/images/Driver_smalleig.patch.gz', 'a040df6f1d807402ce552ba6d35c9610d5efea7a9d6342bbfbf03c8d380a4058', 'dd65bfbae6b472b94c8ee81d74f6c3ece37c8fc8766ff7a3551d8005d44815b8'),
('http://www.nwchem-sw.org/images/Ga_argv.patch.gz', '6fcd3920978ab95083483d5ed538cd9a6f2a80c2cafa0c5c7450fa5621f0a314', '8a78cb2af14314b92be9d241b801e9b9fed5527b9cb47a083134c7becdfa7cf1'),
('http://www.nwchem-sw.org/images/Raman_displ.patch.gz', 'ca4312cd3ed1ceacdc3a7d258bb05b7824c393bf44f44c28a789ebeb29a8dba4', '6a16f0f589a5cbb8d316f68bd2e6a0d46cd47f1c699a4b256a3973130061f6c3'),
('http://www.nwchem-sw.org/images/Ga_defs.patch.gz', 'f8ac827fbc11f7d2a9d8ec840c6f79d4759ef782bd4d291f2e88ec81b1b230aa', 'c6f1a48338d196e1db22bcfc6087e2b2e6eea50a34d3a2b2d3e90cccf43742a9'),
('http://www.nwchem-sw.org/images/Zgesvd.patch.gz', 'c333a94ceb2c35a490f24b007485ac6e334e153b03cfc1d093b6037221a03517', '4af592c047dc3e0bc4962376ae2c6ca868eb7a0b40a347ed9b88e887016ad9ed'),
('http://www.nwchem-sw.org/images/Cosmo_dftprint.patch.gz', '449d59983dc68c23b34e6581370b2fb3d5ea425b05c3182f0973e5b0e1a62651', 'd3b73431a68d6733eb7b669d471e18a83e03fa8e40c48e536fe8edecd99250ff'),
('http://www.nwchem-sw.org/images/Txs_gcc6.patch.gz', '1dab87f23b210e941c765f7dd7cc2bed06d292a2621419dede73f10ba1ca1bcd', '139692215718cd7414896470c0cc8b7817a73ece1e4ca93bf752cf1081a195af'),
('http://www.nwchem-sw.org/images/Gcc6_optfix.patch.gz', '8f8a5f8246bc1e42ef0137049acab4448a2e560339f44308703589adf753c148', '15cff43ab0509e0b0e83c49890032a848d6b7116bd6c8e5678e6c933f2d051ab'),
('http://www.nwchem-sw.org/images/Util_gnumakefile.patch.gz', '173e17206a9099c3512b87e3f42441f5b089db82be1d2b306fe2a0070e5c8fad', '5dd82b9bd55583152295c999a0e4d72dd9d5c6ab7aa91117c2aae57a95a14ba1'),
('http://www.nwchem-sw.org/images/Util_getppn.patch.gz', 'c4a23592fdcfb1fb6b65bc6c1906ac36f9966eec4899c4329bc8ce12015d2495', '8be418e1f8750778a31056f1fdf2a693fa4a12ea86a531f1ddf6f3620421027e'),
('http://www.nwchem-sw.org/images/Gcc6_macs_optfix.patch.gz', 'ff33d5f1ccd33385ffbe6ce7a18ec1506d55652be6e7434dc8065af64c879aaa', 'fade16098a1f54983040cdeb807e4e310425d7f66358807554e08392685a7164'),
('http://www.nwchem-sw.org/images/Notdir_fc.patch.gz', '54c722fa807671d6bf1a056586f0923593319d09c654338e7dd461dcd29ff118', 'a6a233951eb254d8aff5b243ca648def21fa491807a66c442f59c437f040ee69')
]
}
# Iterate over patches
for __condition, __urls in urls_for_patches.items():
for __url, __sha256, __archive_sha256 in __urls:
patch(__url, when=__condition, level=0, sha256=__sha256, archive_sha256=__archive_sha256)
def install(self, spec, prefix):
scalapack = spec['scalapack'].libs
lapack = spec['lapack'].libs
blas = spec['blas'].libs
# see http://www.nwchem-sw.org/index.php/Compiling_NWChem
args = []
args.extend([
'NWCHEM_TOP=%s' % self.stage.source_path,
# NWCHEM is picky about FC and CC. They should NOT be full path.
# see http://www.nwchem-sw.org/index.php/Special:AWCforum/sp/id7524
'CC=%s' % os.path.basename(spack_cc),
'FC=%s' % os.path.basename(spack_fc),
'USE_MPI=y',
'MPI_LOC=%s' % spec['mpi'].prefix,
'USE_PYTHONCONFIG=y',
'PYTHONVERSION=%s' % spec['python'].version.up_to(2),
'PYTHONHOME=%s' % spec['python'].home,
'BLASOPT=%s' % ((lapack + blas).ld_flags),
'BLAS_LIB=%s' % blas.ld_flags,
'LAPACK_LIB=%s' % lapack.ld_flags,
'USE_SCALAPACK=y',
'SCALAPACK=%s' % scalapack.ld_flags,
'NWCHEM_MODULES=all python',
'NWCHEM_LONG_PATHS=Y' # by default NWCHEM_TOP is 64 char max
])
# TODO: query if blas/lapack/scalapack uses 64bit Ints
# A flag to distinguish between 32bit and 64bit integers in linear
# algebra (Blas, Lapack, Scalapack)
use32bitLinAlg = True
if use32bitLinAlg:
args.extend([
'USE_64TO32=y',
'BLAS_SIZE=4',
'LAPACK_SIZE=4',
'SCALAPACK_SIZE=4'
])
else:
args.extend([
'BLAS_SIZE=8',
'LAPACK_SIZE=8'
'SCALAPACK_SIZE=8'
])
if sys.platform == 'darwin':
target = 'MACX64'
args.extend([
'CFLAGS_FORGA=-DMPICH_NO_ATTR_TYPE_TAGS'
])
else:
target = 'LINUX64'
args.extend(['NWCHEM_TARGET=%s' % target])
with working_dir('src'):
make('nwchem_config', *args)
if use32bitLinAlg:
make('64_to_32', *args)
make(*args)
# need to install by hand. Follow Ubuntu:
# http://packages.ubuntu.com/trusty/all/nwchem-data/filelist
# http://packages.ubuntu.com/trusty/amd64/nwchem/filelist
share_path = join_path(prefix, 'share', 'nwchem')
mkdirp(prefix.bin)
install_tree('data', share_path)
install_tree(join_path('basis', 'libraries'),
join_path(share_path, 'libraries'))
install_tree(join_path('nwpw', 'libraryps'),
join_path(share_path, 'libraryps'))
b_path = join_path(self.stage.source_path, 'bin',
target, 'nwchem')
chmod = which('chmod')
chmod('+x', b_path)
install(b_path, prefix.bin)
# Finally, make user's life easier by creating a .nwchemrc file
# to point to the required data files.
nwchemrc = """\
nwchem_basis_library {data}/libraries/
nwchem_nwpw_library {data}/libraryps/
ffield amber
amber_1 {data}/amber_s/
amber_2 {data}/amber_q/
amber_3 {data}/amber_x/
amber_4 {data}/amber_u/
spce {data}/solvents/spce.rst
charmm_s {data}/charmm_s/
charmm_x {data}/charmm_x/
""".format(data=share_path)
with open(".nwchemrc", 'w') as f:
f.write(nwchemrc)
install(".nwchemrc", share_path)
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/nwchem/package.py
|
Python
|
lgpl-2.1
| 9,805
|
[
"Amber",
"NWChem"
] |
94865184be5b966a9cb1a2eaddf2934fff917fa88661bbd1f41428955f7fc423
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2018 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""
| Database (Sherrill) of interaction energies for dissociation curves of doubly hydrogen-bonded bimolecular complexes.
| Geometries from and original reference interaction energies from Thanthiriwatte et al. JCTC 7 88 (2011).
| Revised reference interaction energies from Marshall et al. JCP 135 194102 (2011).
- **cp** ``'off'`` || ``'on'``
- **rlxd** ``'off'`` || ``'on'``
- **benchmark**
- ``'HBC60'`` Thanthiriwatte et al. JCTC 7 88 (2011).
- |dl| ``'HBC6A'`` |dr| Marshall et al. JCP 135 194102 (2011).
- ``'HBC6ARLX'`` Sherrill group, unpublished.
- **subset**
- ``'small'``
- ``'large'``
- ``'equilibrium'`` equilibrium points for the six systems
- ``'FaOOFaOO'`` dissociation curve for formic acid dimer
- ``'FaONFaON'`` dissociation curve for formamide dimer
- ``'FaNNFaNN'`` dissociation curve for formamidine dimer
- ``'FaOOFaON'`` dissociation curve for formic acid- formamide complex
- ``'FaONFaNN'`` dissociation curve for formamide- formamidine complex
- ``'FaOOFaNN'`` dissociation curve for formic acid- formamidine complex
"""
import re
import qcdb
# <<< HBC6 Database Module >>>
dbse = 'HBC1'
# <<< Database Members >>>
FaOOFaOO = []
FaONFaON = []
FaNNFaNN = []
FaOOFaON = []
FaONFaNN = []
FaOOFaNN = []
dist = [3.4, 3.5, 3.6, 3.7, 3.8, 3.9, 4.0, 4.1, 4.2, 4.3, 4.4, 4.6, 4.8, 5.0, 5.4, 5.8, 6.4, 7.0, 8.0, 10.0]
for d in dist:
FaOOFaOO.append('FaOOFaOO-' + str(d))
FaONFaON.append('FaONFaON-' + str(d))
FaNNFaNN.append('FaNNFaNN-' + str(d))
FaOOFaON.append('FaOOFaON-' + str(d))
FaONFaNN.append('FaONFaNN-' + str(d))
dist = [3.6, 3.7, 3.8, 3.9, 4.0, 4.1, 4.2, 4.3, 4.4, 4.6, 4.8, 5.0, 5.4, 5.8, 6.4, 7.0, 8.0, 10.0]
for d in dist:
FaOOFaNN.append('FaOOFaNN-' + str(d))
temp = [FaOOFaOO, FaONFaON, FaNNFaNN, FaOOFaON, FaONFaNN, FaOOFaNN]
HRXN = sum(temp, [])
HRXN_SM = ['FaOOFaOO-8.0', 'FaOOFaON-5.0']
HRXN_LG = ['FaNNFaNN-3.6']
HRXN_EQ = ['FaOOFaOO-3.6', 'FaONFaON-4.0', 'FaNNFaNN-4.1', 'FaOOFaON-3.8', 'FaONFaNN-4.0', 'FaOOFaNN-3.6']
# <<< Chemical Systems Involved >>>
RXNM = {} # reaction matrix of reagent contributions per reaction
RXNM_CPRLX = {} # reaction matrix of reagent contributions per reaction for counterpoise- and deformation-corrected
ACTV = {} # order of active reagents per reaction
ACTV_CP = {} # order of active reagents per counterpoise-corrected reaction
ACTV_SA = {} # order of active reagents for non-supramolecular calculations
ACTV_RLX = {} # order of active reagents for deformation-corrected reaction
ACTV_CPRLX = {} # order of active reagents for counterpoise- and deformation-corrected reaction
monopattern = re.compile(r'^(....)(....)-(.+)$')
for rxn in HRXN:
molname = monopattern.match(rxn)
if (rxn in FaOOFaOO) or (rxn in FaONFaON) or (rxn in FaNNFaNN):
RXNM[ '%s-%s' % (dbse, rxn)] = {'%s-%s-dimer' % (dbse, rxn): +1,
'%s-%s-monoA-CP' % (dbse, rxn): -2,
'%s-%s-monoA-unCP' % (dbse, rxn): -2,
'%s-%s-mono-RLX' % (dbse, molname.group(1)): -2 }
RXNM_CPRLX['%s-%s' % (dbse, rxn)] = {'%s-%s-dimer' % (dbse, rxn): +1,
'%s-%s-monoA-CP' % (dbse, rxn): -2,
'%s-%s-monoA-unCP' % (dbse, rxn): +2,
'%s-%s-mono-RLX' % (dbse, molname.group(1)): -2 }
ACTV_SA[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn) ]
ACTV_CP[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-CP' % (dbse, rxn) ]
ACTV_RLX[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-mono-RLX' % (dbse, molname.group(1)) ]
ACTV_CPRLX['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-CP' % (dbse, rxn),
'%s-%s-monoA-unCP' % (dbse, rxn),
'%s-%s-mono-RLX' % (dbse, molname.group(1)) ]
ACTV[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-unCP' % (dbse, rxn) ]
elif (rxn in FaOOFaON) or (rxn in FaONFaNN) or (rxn in FaOOFaNN):
RXNM[ '%s-%s' % (dbse, rxn)] = {'%s-%s-dimer' % (dbse, rxn): +1,
'%s-%s-monoA-CP' % (dbse, rxn): -1,
'%s-%s-monoB-CP' % (dbse, rxn): -1,
'%s-%s-monoA-unCP' % (dbse, rxn): -1,
'%s-%s-monoB-unCP' % (dbse, rxn): -1,
'%s-%s-mono-RLX' % (dbse, molname.group(1)): -1,
'%s-%s-mono-RLX' % (dbse, molname.group(2)): -1 }
RXNM_CPRLX['%s-%s' % (dbse, rxn)] = {'%s-%s-dimer' % (dbse, rxn): +1,
'%s-%s-monoA-CP' % (dbse, rxn): -1,
'%s-%s-monoB-CP' % (dbse, rxn): -1,
'%s-%s-monoA-unCP' % (dbse, rxn): +1,
'%s-%s-monoB-unCP' % (dbse, rxn): +1,
'%s-%s-mono-RLX' % (dbse, molname.group(1)): -1,
'%s-%s-mono-RLX' % (dbse, molname.group(2)): -1 }
ACTV_SA[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn) ]
ACTV_CP[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-CP' % (dbse, rxn),
'%s-%s-monoB-CP' % (dbse, rxn) ]
ACTV_RLX[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-mono-RLX' % (dbse, molname.group(1)),
'%s-%s-mono-RLX' % (dbse, molname.group(2)) ]
ACTV_CPRLX['%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-CP' % (dbse, rxn),
'%s-%s-monoB-CP' % (dbse, rxn),
'%s-%s-monoA-unCP' % (dbse, rxn),
'%s-%s-monoB-unCP' % (dbse, rxn),
'%s-%s-mono-RLX' % (dbse, molname.group(1)),
'%s-%s-mono-RLX' % (dbse, molname.group(2)) ]
ACTV[ '%s-%s' % (dbse, rxn)] = ['%s-%s-dimer' % (dbse, rxn),
'%s-%s-monoA-unCP' % (dbse, rxn),
'%s-%s-monoB-unCP' % (dbse, rxn) ]
# <<< Reference Values >>>
BIND = {}
# Original publication
BIND_HBC60 = {}
BIND_HBC60['%s-FaOOFaOO-3.4' % (dbse)] = -19.834
BIND_HBC60['%s-FaOOFaOO-3.5' % (dbse)] = -20.027
BIND_HBC60['%s-FaOOFaOO-3.6' % (dbse)] = -20.060 # FaOOFaOO minimum
BIND_HBC60['%s-FaOOFaOO-3.7' % (dbse)] = -19.776
BIND_HBC60['%s-FaOOFaOO-3.8' % (dbse)] = -19.132
BIND_HBC60['%s-FaOOFaOO-3.9' % (dbse)] = -18.161
BIND_HBC60['%s-FaOOFaOO-4.0' % (dbse)] = -16.943
BIND_HBC60['%s-FaOOFaOO-4.1' % (dbse)] = -15.574
BIND_HBC60['%s-FaOOFaOO-4.2' % (dbse)] = -14.148
BIND_HBC60['%s-FaOOFaOO-4.3' % (dbse)] = -12.736
BIND_HBC60['%s-FaOOFaOO-4.4' % (dbse)] = -11.392
BIND_HBC60['%s-FaOOFaOO-4.6' % (dbse)] = -9.014
BIND_HBC60['%s-FaOOFaOO-4.8' % (dbse)] = -7.091
BIND_HBC60['%s-FaOOFaOO-5.0' % (dbse)] = -5.590
BIND_HBC60['%s-FaOOFaOO-5.4' % (dbse)] = -3.548
BIND_HBC60['%s-FaOOFaOO-5.8' % (dbse)] = -2.325
BIND_HBC60['%s-FaOOFaOO-6.4' % (dbse)] = -1.320
BIND_HBC60['%s-FaOOFaOO-7.0' % (dbse)] = -0.801
BIND_HBC60['%s-FaOOFaOO-8.0' % (dbse)] = -0.394
BIND_HBC60['%s-FaOOFaOO-10.0' % (dbse)] = -0.132
BIND_HBC60['%s-FaONFaON-3.4' % (dbse)] = -6.726
BIND_HBC60['%s-FaONFaON-3.5' % (dbse)] = -10.191
BIND_HBC60['%s-FaONFaON-3.6' % (dbse)] = -12.781
BIND_HBC60['%s-FaONFaON-3.7' % (dbse)] = -14.667
BIND_HBC60['%s-FaONFaON-3.8' % (dbse)] = -15.919
BIND_HBC60['%s-FaONFaON-3.9' % (dbse)] = -16.582
BIND_HBC60['%s-FaONFaON-4.0' % (dbse)] = -16.714 # FaONFaON minimum
BIND_HBC60['%s-FaONFaON-4.1' % (dbse)] = -16.391
BIND_HBC60['%s-FaONFaON-4.2' % (dbse)] = -15.713
BIND_HBC60['%s-FaONFaON-4.3' % (dbse)] = -14.790
BIND_HBC60['%s-FaONFaON-4.4' % (dbse)] = -13.723
BIND_HBC60['%s-FaONFaON-4.6' % (dbse)] = -11.480
BIND_HBC60['%s-FaONFaON-4.8' % (dbse)] = -9.401
BIND_HBC60['%s-FaONFaON-5.0' % (dbse)] = -7.642
BIND_HBC60['%s-FaONFaON-5.4' % (dbse)] = -5.108
BIND_HBC60['%s-FaONFaON-5.8' % (dbse)] = -3.537
BIND_HBC60['%s-FaONFaON-6.4' % (dbse)] = -2.187
BIND_HBC60['%s-FaONFaON-7.0' % (dbse)] = -1.448
BIND_HBC60['%s-FaONFaON-8.0' % (dbse)] = -0.816
BIND_HBC60['%s-FaONFaON-10.0' % (dbse)] = -0.340
BIND_HBC60['%s-FaNNFaNN-3.4' % (dbse)] = -8.987
BIND_HBC60['%s-FaNNFaNN-3.5' % (dbse)] = -10.969
BIND_HBC60['%s-FaNNFaNN-3.6' % (dbse)] = -12.693
BIND_HBC60['%s-FaNNFaNN-3.7' % (dbse)] = -14.144
BIND_HBC60['%s-FaNNFaNN-3.8' % (dbse)] = -15.287
BIND_HBC60['%s-FaNNFaNN-3.9' % (dbse)] = -16.118
BIND_HBC60['%s-FaNNFaNN-4.0' % (dbse)] = -16.587
BIND_HBC60['%s-FaNNFaNN-4.1' % (dbse)] = -16.702 # FaNNFaNN minimum
BIND_HBC60['%s-FaNNFaNN-4.2' % (dbse)] = -16.452
BIND_HBC60['%s-FaNNFaNN-4.3' % (dbse)] = -15.901
BIND_HBC60['%s-FaNNFaNN-4.4' % (dbse)] = -15.102
BIND_HBC60['%s-FaNNFaNN-4.6' % (dbse)] = -13.047
BIND_HBC60['%s-FaNNFaNN-4.8' % (dbse)] = -10.810
BIND_HBC60['%s-FaNNFaNN-5.0' % (dbse)] = -8.733
BIND_HBC60['%s-FaNNFaNN-5.4' % (dbse)] = -5.539
BIND_HBC60['%s-FaNNFaNN-5.8' % (dbse)] = -3.521
BIND_HBC60['%s-FaNNFaNN-6.4' % (dbse)] = -1.861
BIND_HBC60['%s-FaNNFaNN-7.0' % (dbse)] = -1.050
BIND_HBC60['%s-FaNNFaNN-8.0' % (dbse)] = -0.463
BIND_HBC60['%s-FaNNFaNN-10.0' % (dbse)] = -0.123
BIND_HBC60['%s-FaOOFaON-3.4' % (dbse)] = -14.356
BIND_HBC60['%s-FaOOFaON-3.5' % (dbse)] = -16.486
BIND_HBC60['%s-FaOOFaON-3.6' % (dbse)] = -17.833
BIND_HBC60['%s-FaOOFaON-3.7' % (dbse)] = -18.543
BIND_HBC60['%s-FaOOFaON-3.8' % (dbse)] = -18.692 # FaOOFaON minimum
BIND_HBC60['%s-FaOOFaON-3.9' % (dbse)] = -18.347
BIND_HBC60['%s-FaOOFaON-4.0' % (dbse)] = -17.592
BIND_HBC60['%s-FaOOFaON-4.1' % (dbse)] = -16.537
BIND_HBC60['%s-FaOOFaON-4.2' % (dbse)] = -15.300
BIND_HBC60['%s-FaOOFaON-4.3' % (dbse)] = -13.989
BIND_HBC60['%s-FaOOFaON-4.4' % (dbse)] = -12.684
BIND_HBC60['%s-FaOOFaON-4.6' % (dbse)] = -10.274
BIND_HBC60['%s-FaOOFaON-4.8' % (dbse)] = -8.245
BIND_HBC60['%s-FaOOFaON-5.0' % (dbse)] = -6.613
BIND_HBC60['%s-FaOOFaON-5.4' % (dbse)] = -4.330
BIND_HBC60['%s-FaOOFaON-5.8' % (dbse)] = -2.935
BIND_HBC60['%s-FaOOFaON-6.4' % (dbse)] = -1.753
BIND_HBC60['%s-FaOOFaON-7.0' % (dbse)] = -1.121
BIND_HBC60['%s-FaOOFaON-8.0' % (dbse)] = -0.598
BIND_HBC60['%s-FaOOFaON-10.0' % (dbse)] = -0.227
BIND_HBC60['%s-FaONFaNN-3.4' % (dbse)] = -8.239
BIND_HBC60['%s-FaONFaNN-3.5' % (dbse)] = -10.918
BIND_HBC60['%s-FaONFaNN-3.6' % (dbse)] = -13.055
BIND_HBC60['%s-FaONFaNN-3.7' % (dbse)] = -14.717
BIND_HBC60['%s-FaONFaNN-3.8' % (dbse)] = -15.921
BIND_HBC60['%s-FaONFaNN-3.9' % (dbse)] = -16.672
BIND_HBC60['%s-FaONFaNN-4.0' % (dbse)] = -16.977 # FaONFaNN minimum
BIND_HBC60['%s-FaONFaNN-4.1' % (dbse)] = -16.865
BIND_HBC60['%s-FaONFaNN-4.2' % (dbse)] = -16.390
BIND_HBC60['%s-FaONFaNN-4.3' % (dbse)] = -15.631
BIND_HBC60['%s-FaONFaNN-4.4' % (dbse)] = -14.676
BIND_HBC60['%s-FaONFaNN-4.6' % (dbse)] = -12.490
BIND_HBC60['%s-FaONFaNN-4.8' % (dbse)] = -10.304
BIND_HBC60['%s-FaONFaNN-5.0' % (dbse)] = -8.362
BIND_HBC60['%s-FaONFaNN-5.4' % (dbse)] = -5.445
BIND_HBC60['%s-FaONFaNN-5.8' % (dbse)] = -3.617
BIND_HBC60['%s-FaONFaNN-6.4' % (dbse)] = -2.087
BIND_HBC60['%s-FaONFaNN-7.0' % (dbse)] = -1.295
BIND_HBC60['%s-FaONFaNN-8.0' % (dbse)] = -0.663
BIND_HBC60['%s-FaONFaNN-10.0' % (dbse)] = -0.237
BIND_HBC60['%s-FaOOFaNN-3.6' % (dbse)] = -26.289 # FaNNFaNN minimum
BIND_HBC60['%s-FaOOFaNN-3.7' % (dbse)] = -24.035
BIND_HBC60['%s-FaOOFaNN-3.8' % (dbse)] = -23.017
BIND_HBC60['%s-FaOOFaNN-3.9' % (dbse)] = -22.133
BIND_HBC60['%s-FaOOFaNN-4.0' % (dbse)] = -21.122
BIND_HBC60['%s-FaOOFaNN-4.1' % (dbse)] = -19.920
BIND_HBC60['%s-FaOOFaNN-4.2' % (dbse)] = -18.544
BIND_HBC60['%s-FaOOFaNN-4.3' % (dbse)] = -17.056
BIND_HBC60['%s-FaOOFaNN-4.4' % (dbse)] = -15.526
BIND_HBC60['%s-FaOOFaNN-4.6' % (dbse)] = -12.583
BIND_HBC60['%s-FaOOFaNN-4.8' % (dbse)] = -10.031
BIND_HBC60['%s-FaOOFaNN-5.0' % (dbse)] = -7.960
BIND_HBC60['%s-FaOOFaNN-5.4' % (dbse)] = -5.069
BIND_HBC60['%s-FaOOFaNN-5.8' % (dbse)] = -3.336
BIND_HBC60['%s-FaOOFaNN-6.4' % (dbse)] = -1.906
BIND_HBC60['%s-FaOOFaNN-7.0' % (dbse)] = -1.170
BIND_HBC60['%s-FaOOFaNN-8.0' % (dbse)] = -0.587
BIND_HBC60['%s-FaOOFaNN-10.0' % (dbse)] = -0.202
# Current revision
BIND_HBC6A = {}
BIND_HBC6A['%s-FaOOFaOO-3.4' % (dbse)] = -19.627
BIND_HBC6A['%s-FaOOFaOO-3.5' % (dbse)] = -19.850
BIND_HBC6A['%s-FaOOFaOO-3.6' % (dbse)] = -19.910 # FaOOFaOO minimum
BIND_HBC6A['%s-FaOOFaOO-3.7' % (dbse)] = -19.650
BIND_HBC6A['%s-FaOOFaOO-3.8' % (dbse)] = -19.027
BIND_HBC6A['%s-FaOOFaOO-3.9' % (dbse)] = -18.075
BIND_HBC6A['%s-FaOOFaOO-4.0' % (dbse)] = -16.873
BIND_HBC6A['%s-FaOOFaOO-4.1' % (dbse)] = -15.517
BIND_HBC6A['%s-FaOOFaOO-4.2' % (dbse)] = -14.100
BIND_HBC6A['%s-FaOOFaOO-4.3' % (dbse)] = -12.697
BIND_HBC6A['%s-FaOOFaOO-4.4' % (dbse)] = -11.360
BIND_HBC6A['%s-FaOOFaOO-4.6' % (dbse)] = -8.990
BIND_HBC6A['%s-FaOOFaOO-4.8' % (dbse)] = -7.074
BIND_HBC6A['%s-FaOOFaOO-5.0' % (dbse)] = -5.577
BIND_HBC6A['%s-FaOOFaOO-5.4' % (dbse)] = -3.539
BIND_HBC6A['%s-FaOOFaOO-5.8' % (dbse)] = -2.323
BIND_HBC6A['%s-FaOOFaOO-6.4' % (dbse)] = -1.320
BIND_HBC6A['%s-FaOOFaOO-7.0' % (dbse)] = -0.802
BIND_HBC6A['%s-FaOOFaOO-8.0' % (dbse)] = -0.397
BIND_HBC6A['%s-FaOOFaOO-10.0' % (dbse)] = -0.135
BIND_HBC6A['%s-FaONFaON-3.4' % (dbse)] = -6.556
BIND_HBC6A['%s-FaONFaON-3.5' % (dbse)] = -10.027
BIND_HBC6A['%s-FaONFaON-3.6' % (dbse)] = -12.628
BIND_HBC6A['%s-FaONFaON-3.7' % (dbse)] = -14.529
BIND_HBC6A['%s-FaONFaON-3.8' % (dbse)] = -15.796
BIND_HBC6A['%s-FaONFaON-3.9' % (dbse)] = -16.475
BIND_HBC6A['%s-FaONFaON-4.0' % (dbse)] = -16.622 # FaONFaON minimum
BIND_HBC6A['%s-FaONFaON-4.1' % (dbse)] = -16.313
BIND_HBC6A['%s-FaONFaON-4.2' % (dbse)] = -15.647
BIND_HBC6A['%s-FaONFaON-4.3' % (dbse)] = -14.735
BIND_HBC6A['%s-FaONFaON-4.4' % (dbse)] = -13.678
BIND_HBC6A['%s-FaONFaON-4.6' % (dbse)] = -11.448
BIND_HBC6A['%s-FaONFaON-4.8' % (dbse)] = -9.379
BIND_HBC6A['%s-FaONFaON-5.0' % (dbse)] = -7.626
BIND_HBC6A['%s-FaONFaON-5.4' % (dbse)] = -5.097
BIND_HBC6A['%s-FaONFaON-5.8' % (dbse)] = -3.528
BIND_HBC6A['%s-FaONFaON-6.4' % (dbse)] = -2.181
BIND_HBC6A['%s-FaONFaON-7.0' % (dbse)] = -1.443
BIND_HBC6A['%s-FaONFaON-8.0' % (dbse)] = -0.813
BIND_HBC6A['%s-FaONFaON-10.0' % (dbse)] = -0.337
BIND_HBC6A['%s-FaNNFaNN-3.4' % (dbse)] = -8.730
BIND_HBC6A['%s-FaNNFaNN-3.5' % (dbse)] = -10.725
BIND_HBC6A['%s-FaNNFaNN-3.6' % (dbse)] = -12.463
BIND_HBC6A['%s-FaNNFaNN-3.7' % (dbse)] = -13.932
BIND_HBC6A['%s-FaNNFaNN-3.8' % (dbse)] = -15.106
BIND_HBC6A['%s-FaNNFaNN-3.9' % (dbse)] = -15.950
BIND_HBC6A['%s-FaNNFaNN-4.0' % (dbse)] = -16.440
BIND_HBC6A['%s-FaNNFaNN-4.1' % (dbse)] = -16.575 # FaNNFaNN minimum
BIND_HBC6A['%s-FaNNFaNN-4.2' % (dbse)] = -16.344
BIND_HBC6A['%s-FaNNFaNN-4.3' % (dbse)] = -15.811
BIND_HBC6A['%s-FaNNFaNN-4.4' % (dbse)] = -15.028
BIND_HBC6A['%s-FaNNFaNN-4.6' % (dbse)] = -12.999
BIND_HBC6A['%s-FaNNFaNN-4.8' % (dbse)] = -10.780
BIND_HBC6A['%s-FaNNFaNN-5.0' % (dbse)] = -8.715
BIND_HBC6A['%s-FaNNFaNN-5.4' % (dbse)] = -5.532
BIND_HBC6A['%s-FaNNFaNN-5.8' % (dbse)] = -3.517
BIND_HBC6A['%s-FaNNFaNN-6.4' % (dbse)] = -1.861
BIND_HBC6A['%s-FaNNFaNN-7.0' % (dbse)] = -1.051
BIND_HBC6A['%s-FaNNFaNN-8.0' % (dbse)] = -0.466
BIND_HBC6A['%s-FaNNFaNN-10.0' % (dbse)] = -0.127
BIND_HBC6A['%s-FaOOFaON-3.4' % (dbse)] = -14.164
BIND_HBC6A['%s-FaOOFaON-3.5' % (dbse)] = -16.312
BIND_HBC6A['%s-FaOOFaON-3.6' % (dbse)] = -17.679
BIND_HBC6A['%s-FaOOFaON-3.7' % (dbse)] = -18.409
BIND_HBC6A['%s-FaOOFaON-3.8' % (dbse)] = -18.578 # FaOOFaON minimum
BIND_HBC6A['%s-FaOOFaON-3.9' % (dbse)] = -18.250
BIND_HBC6A['%s-FaOOFaON-4.0' % (dbse)] = -17.512
BIND_HBC6A['%s-FaOOFaON-4.1' % (dbse)] = -16.471
BIND_HBC6A['%s-FaOOFaON-4.2' % (dbse)] = -15.245
BIND_HBC6A['%s-FaOOFaON-4.3' % (dbse)] = -13.944
BIND_HBC6A['%s-FaOOFaON-4.4' % (dbse)] = -12.647
BIND_HBC6A['%s-FaOOFaON-4.6' % (dbse)] = -10.248
BIND_HBC6A['%s-FaOOFaON-4.8' % (dbse)] = -8.227
BIND_HBC6A['%s-FaOOFaON-5.0' % (dbse)] = -6.597
BIND_HBC6A['%s-FaOOFaON-5.4' % (dbse)] = -4.321
BIND_HBC6A['%s-FaOOFaON-5.8' % (dbse)] = -2.931
BIND_HBC6A['%s-FaOOFaON-6.4' % (dbse)] = -1.751
BIND_HBC6A['%s-FaOOFaON-7.0' % (dbse)] = -1.119
BIND_HBC6A['%s-FaOOFaON-8.0' % (dbse)] = -0.597
BIND_HBC6A['%s-FaOOFaON-10.0' % (dbse)] = -0.228
BIND_HBC6A['%s-FaONFaNN-3.4' % (dbse)] = -8.021
BIND_HBC6A['%s-FaONFaNN-3.5' % (dbse)] = -10.711
BIND_HBC6A['%s-FaONFaNN-3.6' % (dbse)] = -12.862
BIND_HBC6A['%s-FaONFaNN-3.7' % (dbse)] = -14.539
BIND_HBC6A['%s-FaONFaNN-3.8' % (dbse)] = -15.763
BIND_HBC6A['%s-FaONFaNN-3.9' % (dbse)] = -16.532
BIND_HBC6A['%s-FaONFaNN-4.0' % (dbse)] = -16.856 # FaONFaNN minimum
BIND_HBC6A['%s-FaONFaNN-4.1' % (dbse)] = -16.760
BIND_HBC6A['%s-FaONFaNN-4.2' % (dbse)] = -16.301
BIND_HBC6A['%s-FaONFaNN-4.3' % (dbse)] = -15.557
BIND_HBC6A['%s-FaONFaNN-4.4' % (dbse)] = -14.614
BIND_HBC6A['%s-FaONFaNN-4.6' % (dbse)] = -12.448
BIND_HBC6A['%s-FaONFaNN-4.8' % (dbse)] = -10.277
BIND_HBC6A['%s-FaONFaNN-5.0' % (dbse)] = -8.341
BIND_HBC6A['%s-FaONFaNN-5.4' % (dbse)] = -5.434
BIND_HBC6A['%s-FaONFaNN-5.8' % (dbse)] = -3.609
BIND_HBC6A['%s-FaONFaNN-6.4' % (dbse)] = -2.082
BIND_HBC6A['%s-FaONFaNN-7.0' % (dbse)] = -1.292
BIND_HBC6A['%s-FaONFaNN-8.0' % (dbse)] = -0.661
BIND_HBC6A['%s-FaONFaNN-10.0' % (dbse)] = -0.237
BIND_HBC6A['%s-FaOOFaNN-3.6' % (dbse)] = -26.064 # FaOOFaNN minimum
BIND_HBC6A['%s-FaOOFaNN-3.7' % (dbse)] = -23.841
BIND_HBC6A['%s-FaOOFaNN-3.8' % (dbse)] = -22.850
BIND_HBC6A['%s-FaOOFaNN-3.9' % (dbse)] = -21.990
BIND_HBC6A['%s-FaOOFaNN-4.0' % (dbse)] = -21.002
BIND_HBC6A['%s-FaOOFaNN-4.1' % (dbse)] = -19.819
BIND_HBC6A['%s-FaOOFaNN-4.2' % (dbse)] = -18.461
BIND_HBC6A['%s-FaOOFaNN-4.3' % (dbse)] = -16.988
BIND_HBC6A['%s-FaOOFaNN-4.4' % (dbse)] = -15.471
BIND_HBC6A['%s-FaOOFaNN-4.6' % (dbse)] = -12.546
BIND_HBC6A['%s-FaOOFaNN-4.8' % (dbse)] = -10.006
BIND_HBC6A['%s-FaOOFaNN-5.0' % (dbse)] = -7.942
BIND_HBC6A['%s-FaOOFaNN-5.4' % (dbse)] = -5.058
BIND_HBC6A['%s-FaOOFaNN-5.8' % (dbse)] = -3.328
BIND_HBC6A['%s-FaOOFaNN-6.4' % (dbse)] = -1.900
BIND_HBC6A['%s-FaOOFaNN-7.0' % (dbse)] = -1.166
BIND_HBC6A['%s-FaOOFaNN-8.0' % (dbse)] = -0.584
BIND_HBC6A['%s-FaOOFaNN-10.0' % (dbse)] = -0.200
# Current revision level with deformation correction
BIND_HBC6ARLX = {}
BIND_HBC6ARLX['%s-FaOOFaOO-3.4' % (dbse)] = -7.072
BIND_HBC6ARLX['%s-FaOOFaOO-3.5' % (dbse)] = -11.415
BIND_HBC6ARLX['%s-FaOOFaOO-3.6' % (dbse)] = -14.186
BIND_HBC6ARLX['%s-FaOOFaOO-3.7' % (dbse)] = -15.667
BIND_HBC6ARLX['%s-FaOOFaOO-3.8' % (dbse)] = -16.146 # FaOOFaOO minimum
BIND_HBC6ARLX['%s-FaOOFaOO-3.9' % (dbse)] = -15.900
BIND_HBC6ARLX['%s-FaOOFaOO-4.0' % (dbse)] = -15.171
BIND_HBC6ARLX['%s-FaOOFaOO-4.1' % (dbse)] = -14.153
BIND_HBC6ARLX['%s-FaOOFaOO-4.2' % (dbse)] = -12.993
BIND_HBC6ARLX['%s-FaOOFaOO-4.3' % (dbse)] = -11.792
BIND_HBC6ARLX['%s-FaOOFaOO-4.4' % (dbse)] = -10.617
BIND_HBC6ARLX['%s-FaOOFaOO-4.6' % (dbse)] = -8.486
BIND_HBC6ARLX['%s-FaOOFaOO-4.8' % (dbse)] = -6.731
BIND_HBC6ARLX['%s-FaOOFaOO-5.0' % (dbse)] = -5.341
BIND_HBC6ARLX['%s-FaOOFaOO-5.4' % (dbse)] = -3.416
BIND_HBC6ARLX['%s-FaOOFaOO-5.8' % (dbse)] = -2.251
BIND_HBC6ARLX['%s-FaOOFaOO-6.4' % (dbse)] = -1.284
BIND_HBC6ARLX['%s-FaOOFaOO-7.0' % (dbse)] = -0.784
BIND_HBC6ARLX['%s-FaOOFaOO-8.0' % (dbse)] = -0.389
BIND_HBC6ARLX['%s-FaOOFaOO-10.0' % (dbse)] = -0.133
BIND_HBC6ARLX['%s-FaONFaON-3.4' % (dbse)] = 4.943
BIND_HBC6ARLX['%s-FaONFaON-3.5' % (dbse)] = -1.431
BIND_HBC6ARLX['%s-FaONFaON-3.6' % (dbse)] = -6.432
BIND_HBC6ARLX['%s-FaONFaON-3.7' % (dbse)] = -10.102
BIND_HBC6ARLX['%s-FaONFaON-3.8' % (dbse)] = -12.566
BIND_HBC6ARLX['%s-FaONFaON-3.9' % (dbse)] = -14.000
BIND_HBC6ARLX['%s-FaONFaON-4.0' % (dbse)] = -14.603 # FaONFaON minimum
BIND_HBC6ARLX['%s-FaONFaON-4.1' % (dbse)] = -14.579
BIND_HBC6ARLX['%s-FaONFaON-4.2' % (dbse)] = -14.112
BIND_HBC6ARLX['%s-FaONFaON-4.3' % (dbse)] = -13.361
BIND_HBC6ARLX['%s-FaONFaON-4.4' % (dbse)] = -12.451
BIND_HBC6ARLX['%s-FaONFaON-4.6' % (dbse)] = -10.489
BIND_HBC6ARLX['%s-FaONFaON-4.8' % (dbse)] = -8.655
BIND_HBC6ARLX['%s-FaONFaON-5.0' % (dbse)] = -7.101
BIND_HBC6ARLX['%s-FaONFaON-5.4' % (dbse)] = -4.830
BIND_HBC6ARLX['%s-FaONFaON-5.8' % (dbse)] = -3.380
BIND_HBC6ARLX['%s-FaONFaON-6.4' % (dbse)] = -2.110
BIND_HBC6ARLX['%s-FaONFaON-7.0' % (dbse)] = -1.403
BIND_HBC6ARLX['%s-FaONFaON-8.0' % (dbse)] = -0.794
BIND_HBC6ARLX['%s-FaONFaON-10.0' % (dbse)] = -0.331
BIND_HBC6ARLX['%s-FaNNFaNN-3.4' % (dbse)] = 14.652
BIND_HBC6ARLX['%s-FaNNFaNN-3.5' % (dbse)] = 6.948
BIND_HBC6ARLX['%s-FaNNFaNN-3.6' % (dbse)] = 0.563
BIND_HBC6ARLX['%s-FaNNFaNN-3.7' % (dbse)] = -4.544
BIND_HBC6ARLX['%s-FaNNFaNN-3.8' % (dbse)] = -8.441
BIND_HBC6ARLX['%s-FaNNFaNN-3.9' % (dbse)] = -11.223
BIND_HBC6ARLX['%s-FaNNFaNN-4.0' % (dbse)] = -13.021
BIND_HBC6ARLX['%s-FaNNFaNN-4.1' % (dbse)] = -13.996
BIND_HBC6ARLX['%s-FaNNFaNN-4.2' % (dbse)] = -14.285 # FaNNFaNN minimum
BIND_HBC6ARLX['%s-FaNNFaNN-4.3' % (dbse)] = -14.074
BIND_HBC6ARLX['%s-FaNNFaNN-4.4' % (dbse)] = -13.501
BIND_HBC6ARLX['%s-FaNNFaNN-4.6' % (dbse)] = -11.755
BIND_HBC6ARLX['%s-FaNNFaNN-4.8' % (dbse)] = -9.767
BIND_HBC6ARLX['%s-FaNNFaNN-5.0' % (dbse)] = -7.915
BIND_HBC6ARLX['%s-FaNNFaNN-5.4' % (dbse)] = -5.073
BIND_HBC6ARLX['%s-FaNNFaNN-5.8' % (dbse)] = -3.259
BIND_HBC6ARLX['%s-FaNNFaNN-6.4' % (dbse)] = -1.742
BIND_HBC6ARLX['%s-FaNNFaNN-7.0' % (dbse)] = -0.990
BIND_HBC6ARLX['%s-FaNNFaNN-8.0' % (dbse)] = -0.441
BIND_HBC6ARLX['%s-FaNNFaNN-10.0' % (dbse)] = -0.121
BIND_HBC6ARLX['%s-FaOOFaON-3.4' % (dbse)] = -2.134
BIND_HBC6ARLX['%s-FaOOFaON-3.5' % (dbse)] = -7.505
BIND_HBC6ARLX['%s-FaOOFaON-3.6' % (dbse)] = -11.323
BIND_HBC6ARLX['%s-FaOOFaON-3.7' % (dbse)] = -13.775
BIND_HBC6ARLX['%s-FaOOFaON-3.8' % (dbse)] = -15.093
BIND_HBC6ARLX['%s-FaOOFaON-3.9' % (dbse)] = -15.524 # FaOOFaON minimum
BIND_HBC6ARLX['%s-FaOOFaON-4.0' % (dbse)] = -15.308
BIND_HBC6ARLX['%s-FaOOFaON-4.1' % (dbse)] = -14.658
BIND_HBC6ARLX['%s-FaOOFaON-4.2' % (dbse)] = -13.747
BIND_HBC6ARLX['%s-FaOOFaON-4.3' % (dbse)] = -12.705
BIND_HBC6ARLX['%s-FaOOFaON-4.4' % (dbse)] = -11.620
BIND_HBC6ARLX['%s-FaOOFaON-4.6' % (dbse)] = -9.536
BIND_HBC6ARLX['%s-FaOOFaON-4.8' % (dbse)] = -7.730
BIND_HBC6ARLX['%s-FaOOFaON-5.0' % (dbse)] = -6.252
BIND_HBC6ARLX['%s-FaOOFaON-5.4' % (dbse)] = -4.148
BIND_HBC6ARLX['%s-FaOOFaON-5.8' % (dbse)] = -2.834
BIND_HBC6ARLX['%s-FaOOFaON-6.4' % (dbse)] = -1.704
BIND_HBC6ARLX['%s-FaOOFaON-7.0' % (dbse)] = -1.094
BIND_HBC6ARLX['%s-FaOOFaON-8.0' % (dbse)] = -0.587
BIND_HBC6ARLX['%s-FaOOFaON-10.0' % (dbse)] = -0.226
BIND_HBC6ARLX['%s-FaONFaNN-3.4' % (dbse)] = 9.365
BIND_HBC6ARLX['%s-FaONFaNN-3.5' % (dbse)] = 2.303
BIND_HBC6ARLX['%s-FaONFaNN-3.6' % (dbse)] = -3.396
BIND_HBC6ARLX['%s-FaONFaNN-3.7' % (dbse)] = -7.780
BIND_HBC6ARLX['%s-FaONFaNN-3.8' % (dbse)] = -10.944
BIND_HBC6ARLX['%s-FaONFaNN-3.9' % (dbse)] = -13.026
BIND_HBC6ARLX['%s-FaONFaNN-4.0' % (dbse)] = -14.191
BIND_HBC6ARLX['%s-FaONFaNN-4.1' % (dbse)] = -14.622 # FaONFaNN minimum
BIND_HBC6ARLX['%s-FaONFaNN-4.2' % (dbse)] = -14.499
BIND_HBC6ARLX['%s-FaONFaNN-4.3' % (dbse)] = -13.984
BIND_HBC6ARLX['%s-FaONFaNN-4.4' % (dbse)] = -13.216
BIND_HBC6ARLX['%s-FaONFaNN-4.6' % (dbse)] = -11.325
BIND_HBC6ARLX['%s-FaONFaNN-4.8' % (dbse)] = -9.389
BIND_HBC6ARLX['%s-FaONFaNN-5.0' % (dbse)] = -7.664
BIND_HBC6ARLX['%s-FaONFaNN-5.4' % (dbse)] = -5.069
BIND_HBC6ARLX['%s-FaONFaNN-5.8' % (dbse)] = -3.412
BIND_HBC6ARLX['%s-FaONFaNN-6.4' % (dbse)] = -1.993
BIND_HBC6ARLX['%s-FaONFaNN-7.0' % (dbse)] = -1.245
BIND_HBC6ARLX['%s-FaONFaNN-8.0' % (dbse)] = -0.642
BIND_HBC6ARLX['%s-FaONFaNN-10.0' % (dbse)] = -0.232
BIND_HBC6ARLX['%s-FaOOFaNN-3.6' % (dbse)] = -12.415
BIND_HBC6ARLX['%s-FaOOFaNN-3.7' % (dbse)] = -15.329
BIND_HBC6ARLX['%s-FaOOFaNN-3.8' % (dbse)] = -17.085
BIND_HBC6ARLX['%s-FaOOFaNN-3.9' % (dbse)] = -17.872
BIND_HBC6ARLX['%s-FaOOFaNN-4.0' % (dbse)] = -17.895 # FaOOFaNN minimum
BIND_HBC6ARLX['%s-FaOOFaNN-4.1' % (dbse)] = -17.356
BIND_HBC6ARLX['%s-FaOOFaNN-4.2' % (dbse)] = -16.438
BIND_HBC6ARLX['%s-FaOOFaNN-4.3' % (dbse)] = -15.294
BIND_HBC6ARLX['%s-FaOOFaNN-4.4' % (dbse)] = -14.044
BIND_HBC6ARLX['%s-FaOOFaNN-4.6' % (dbse)] = -11.535
BIND_HBC6ARLX['%s-FaOOFaNN-4.8' % (dbse)] = -9.301
BIND_HBC6ARLX['%s-FaOOFaNN-5.0' % (dbse)] = -7.458
BIND_HBC6ARLX['%s-FaOOFaNN-5.4' % (dbse)] = -4.830
BIND_HBC6ARLX['%s-FaOOFaNN-5.8' % (dbse)] = -3.212
BIND_HBC6ARLX['%s-FaOOFaNN-6.4' % (dbse)] = -1.850
BIND_HBC6ARLX['%s-FaOOFaNN-7.0' % (dbse)] = -1.140
BIND_HBC6ARLX['%s-FaOOFaNN-8.0' % (dbse)] = -0.575
BIND_HBC6ARLX['%s-FaOOFaNN-10.0' % (dbse)] = -0.197
# Set default
BIND = BIND_HBC6A
# <<< Comment Lines >>>
TAGL = {}
rxnpattern = re.compile(r'^(.+)-(.+)$')
for item in FaOOFaOO:
molname = rxnpattern.match(item)
TAGL['%s-%s' % (dbse, item)] = 'Formic Acid Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-dimer' % (dbse, item)] = 'Formic Acid Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-CP' % (dbse, item)] = 'Formic Acid from Formic Acid Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-unCP' % (dbse, item)] = 'Formic Acid from Formic Acid Dimer at %s A' % (molname.group(2))
for item in FaONFaON:
molname = rxnpattern.match(item)
TAGL['%s-%s' % (dbse, item)] = 'Formamide Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-dimer' % (dbse, item)] = 'Formamide Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-CP' % (dbse, item)] = 'Formamide from Formamide Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-unCP' % (dbse, item)] = 'Formamide from Formamide Dimer at %s A' % (molname.group(2))
for item in FaNNFaNN:
molname = rxnpattern.match(item)
TAGL['%s-%s' % (dbse, item)] = 'Formamidine Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-dimer' % (dbse, item)] = 'Formamidine Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-CP' % (dbse, item)] = 'Formamidine from Formamidine Dimer at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-unCP' % (dbse, item)] = 'Formamidine from Formamidine Dimer at %s A' % (molname.group(2))
for item in FaOOFaON:
molname = rxnpattern.match(item)
TAGL['%s-%s' % (dbse, item)] = 'Formic Acid-Formamide Complex at %s A' % (molname.group(2))
TAGL['%s-%s-dimer' % (dbse, item)] = 'Formic Acid-Formamide Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-CP' % (dbse, item)] = 'Formic Acid from Formic Acid-Formamide Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoB-CP' % (dbse, item)] = 'Formamide from Formic Acid-Formamide Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-unCP' % (dbse, item)] = 'Formic Acid from Formic Acid-Formamide Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoB-unCP' % (dbse, item)] = 'Formamide from Formic Acid-Formamide Complex at %s A' % (molname.group(2))
for item in FaONFaNN:
molname = rxnpattern.match(item)
TAGL['%s-%s' % (dbse, item)] = 'Formamide-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-dimer' % (dbse, item)] = 'Formamide-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-CP' % (dbse, item)] = 'Formamide from Formamide-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoB-CP' % (dbse, item)] = 'Formamidine from Formamide-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-unCP' % (dbse, item)] = 'Formamide from Formamide-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoB-unCP' % (dbse, item)] = 'Formamidine from Formamide-Formamidine Complex at %s A' % (molname.group(2))
for item in FaOOFaNN:
molname = rxnpattern.match(item)
TAGL['%s-%s' % (dbse, item)] = 'Formic Acid-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-dimer' % (dbse, item)] = 'Formic Acid-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-CP' % (dbse, item)] = 'Formic Acid from Formic Acid-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoB-CP' % (dbse, item)] = 'Formamidine from Formic Acid-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoA-unCP' % (dbse, item)] = 'Formic Acid from Formic Acid-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-%s-monoB-unCP' % (dbse, item)] = 'Formamidine from Formic Acid-Formamidine Complex at %s A' % (molname.group(2))
TAGL['%s-FaOO-mono-RLX' % (dbse)] = 'Formic Acid Relaxed Monomer'
TAGL['%s-FaON-mono-RLX' % (dbse)] = 'Formamide Relaxed Monomer'
TAGL['%s-FaNN-mono-RLX' % (dbse)] = 'Formamidine Relaxed Monomer'
# <<< Geometry Specification Strings >>>
GEOS = {}
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-3.4')] = qcdb.Molecule("""
0 1
C 1.69147262 -0.17006280 0.00000000
H 2.79500199 -0.28101305 0.00000000
O 1.02814129 -1.21720864 0.00000000
O 1.36966587 1.08860681 0.00000000
H 0.34380745 1.18798183 0.00000000
--
0 1
C -1.69147262 0.17006280 0.00000000
H -2.79500199 0.28101305 0.00000000
O -1.02814129 1.21720864 0.00000000
O -1.36966587 -1.08860681 0.00000000
H -0.34380745 -1.18798183 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-3.5')] = qcdb.Molecule("""
0 1
C 1.74073379 -0.17985247 0.00000000
H 2.84248921 -0.29368574 0.00000000
O 1.04839226 -1.20544675 0.00000000
O 1.40587723 1.08303481 0.00000000
H 0.38948927 1.16733829 0.00000000
--
0 1
C -1.74073379 0.17985247 0.00000000
H -2.84248921 0.29368574 0.00000000
O -1.04839226 1.20544675 0.00000000
O -1.40587723 -1.08303481 0.00000000
H -0.38948927 -1.16733829 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-3.6')] = qcdb.Molecule("""
0 1
C 1.79035823 -0.18606050 0.00000000
H 2.89087214 -0.30042988 0.00000000
O 1.07568931 -1.19425943 0.00000000
O 1.44185816 1.08049605 0.00000000
H 0.43274661 1.15045330 0.00000000
--
0 1
C -1.79035823 0.18606050 0.00000000
H -2.89087214 0.30042988 0.00000000
O -1.07568931 1.19425943 0.00000000
O -1.44185816 -1.08049605 0.00000000
H -0.43274661 -1.15045330 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-3.7')] = qcdb.Molecule("""
0 1
C 1.84016492 -0.19051039 0.00000000
H 2.93982222 -0.30435679 0.00000000
O 1.10803623 -1.18439540 0.00000000
O 1.47971186 1.07967254 0.00000000
H 0.47644336 1.13716323 0.00000000
--
0 1
C -1.84016492 0.19051039 0.00000000
H -2.93982222 0.30435679 0.00000000
O -1.10803623 1.18439540 0.00000000
O -1.47971186 -1.07967254 0.00000000
H -0.47644336 -1.13716323 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-3.8')] = qcdb.Molecule("""
0 1
C 1.89005169 -0.19417983 0.00000000
H 2.98915191 -0.30709901 0.00000000
O 1.14427894 -1.17618187 0.00000000
O 1.52059687 1.07982181 0.00000000
H 0.52216282 1.12736965 0.00000000
--
0 1
C -1.89005169 0.19417983 0.00000000
H -2.98915191 0.30709901 0.00000000
O -1.14427894 1.17618187 0.00000000
O -1.52059687 -1.07982181 0.00000000
H -0.52216282 -1.12736965 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-3.9')] = qcdb.Molecule("""
0 1
C 1.93997588 -0.19747119 0.00000000
H 3.03876443 -0.30931746 0.00000000
O 1.18372180 -1.16964417 0.00000000
O 1.56485002 1.08053745 0.00000000
H 0.57046128 1.12083453 0.00000000
--
0 1
C -1.93997588 0.19747119 0.00000000
H -3.03876443 0.30931746 0.00000000
O -1.18372180 1.16964417 0.00000000
O -1.56485002 -1.08053745 0.00000000
H -0.57046128 -1.12083453 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-4.0')] = qcdb.Molecule("""
0 1
C 1.98993209 -0.20042861 0.00000000
H 3.08861310 -0.31108922 0.00000000
O 1.22585974 -1.16459704 0.00000000
O 1.61210992 1.08161540 0.00000000
H 0.62108112 1.11708149 0.00000000
--
0 1
C -1.98993209 0.20042861 0.00000000
H -3.08861310 0.31108922 0.00000000
O -1.22585974 1.16459704 0.00000000
O -1.61210992 -1.08161540 0.00000000
H -0.62108112 -1.11708149 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-4.1')] = qcdb.Molecule("""
0 1
C 2.03992872 -0.20295867 0.00000000
H 3.13866374 -0.31227513 0.00000000
O 1.27022640 -1.16073661 0.00000000
O 1.66162256 1.08294418 0.00000000
H 0.67334166 1.11546236 0.00000000
--
0 1
C -2.03992872 0.20295867 0.00000000
H -3.13866374 0.31227513 0.00000000
O -1.27022640 1.16073661 0.00000000
O -1.66162256 -1.08294418 0.00000000
H -0.67334166 -1.11546236 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-4.2')] = qcdb.Molecule("""
0 1
C 2.08997101 -0.20499428 0.00000000
H 3.18887965 -0.31278045 0.00000000
O 1.31635032 -1.15775239 0.00000000
O 1.71259791 1.08444718 0.00000000
H 0.72653811 1.11533705 0.00000000
--
0 1
C -2.08997101 0.20499428 0.00000000
H -3.18887965 0.31278045 0.00000000
O -1.31635032 1.15775239 0.00000000
O -1.71259791 -1.08444718 0.00000000
H -0.72653811 -1.11533705 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-4.3')] = qcdb.Molecule("""
0 1
C 2.14005789 -0.20652795 0.00000000
H 3.23921879 -0.31260333 0.00000000
O 1.36379229 -1.15537668 0.00000000
O 1.76438167 1.08605925 0.00000000
H 0.78011615 1.11617462 0.00000000
--
0 1
C -2.14005789 0.20652795 0.00000000
H -3.23921879 0.31260333 0.00000000
O -1.36379229 1.15537668 0.00000000
O -1.76438167 -1.08605925 0.00000000
H -0.78011615 -1.11617462 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-4.4')] = qcdb.Molecule("""
0 1
C 2.19018321 -0.20760327 0.00000000
H 3.28964362 -0.31181902 0.00000000
O 1.41218943 -1.15341758 0.00000000
O 1.81652565 1.08773186 0.00000000
H 0.83371879 1.11760094 0.00000000
--
0 1
C -2.19018321 0.20760327 0.00000000
H -3.28964362 0.31181902 0.00000000
O -1.41218943 1.15341758 0.00000000
O -1.81652565 -1.08773186 0.00000000
H -0.83371879 -1.11760094 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-4.6')] = qcdb.Molecule("""
0 1
C 2.29051747 -0.20864206 0.00000000
H 3.39063528 -0.30885122 0.00000000
O 1.51085984 -1.15029332 0.00000000
O 1.92095251 1.09113093 0.00000000
H 0.94036724 1.12135540 0.00000000
--
0 1
C -2.29051747 0.20864206 0.00000000
H -3.39063528 0.30885122 0.00000000
O -1.51085984 1.15029332 0.00000000
O -1.92095251 -1.09113093 0.00000000
H -0.94036724 -1.12135540 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-4.8')] = qcdb.Molecule("""
0 1
C 2.39092365 -0.20853307 0.00000000
H 3.49171140 -0.30454226 0.00000000
O 1.61118653 -1.14785646 0.00000000
O 2.02488292 1.09452849 0.00000000
H 1.04594759 1.12557184 0.00000000
--
0 1
C -2.39092365 0.20853307 0.00000000
H -3.49171140 0.30454226 0.00000000
O -1.61118653 1.14785646 0.00000000
O -2.02488292 -1.09452849 0.00000000
H -1.04594759 -1.12557184 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-5.0')] = qcdb.Molecule("""
0 1
C 2.49138346 -0.20738986 0.00000000
H 3.59280864 -0.29907564 0.00000000
O 1.71273443 -1.14581025 0.00000000
O 2.12758466 1.09795049 0.00000000
H 1.14991023 1.12938889 0.00000000
--
0 1
C -2.49138346 0.20738986 0.00000000
H -3.59280864 0.29907564 0.00000000
O -1.71273443 1.14581025 0.00000000
O -2.12758466 -1.09795049 0.00000000
H -1.14991023 -1.12938889 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-5.4')] = qcdb.Molecule("""
0 1
C 2.69240165 -0.20242478 0.00000000
H 3.79490134 -0.28531481 0.00000000
O 1.91833290 -1.14219531 0.00000000
O 2.32770522 1.10482267 0.00000000
H 1.35155974 1.13337052 0.00000000
--
0 1
C -2.69240165 0.20242478 0.00000000
H -3.79490134 0.28531481 0.00000000
O -1.91833290 1.14219531 0.00000000
O -2.32770522 -1.10482267 0.00000000
H -1.35155974 -1.13337052 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-5.8')] = qcdb.Molecule("""
0 1
C 2.89338469 -0.19577496 0.00000000
H 3.99667367 -0.27042675 0.00000000
O 2.12532927 -1.13908975 0.00000000
O 2.52303889 1.11116394 0.00000000
H 1.54758478 1.13325561 0.00000000
--
0 1
C -2.89338469 0.19577496 0.00000000
H -3.99667367 0.27042675 0.00000000
O -2.12532927 1.13908975 0.00000000
O -2.52303889 -1.11116394 0.00000000
H -1.54758478 -1.13325561 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-6.4')] = qcdb.Molecule("""
0 1
C 3.19458871 -0.18602798 0.00000000
H 4.29867458 -0.25032135 0.00000000
O 2.43545364 -1.13545523 0.00000000
O 2.81355286 1.11890079 0.00000000
H 1.83855220 1.13009890 0.00000000
--
0 1
C -3.19458871 0.18602798 0.00000000
H -4.29867458 0.25032135 0.00000000
O -2.43545364 1.13545523 0.00000000
O -2.81355286 -1.11890079 0.00000000
H -1.83855220 -1.13009890 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-7.0')] = qcdb.Molecule("""
0 1
C 3.49547568 -0.17791547 0.00000000
H 4.60006941 -0.23413795 0.00000000
O 2.74388512 -1.13278853 0.00000000
O 3.10454835 1.12465267 0.00000000
H 2.12977769 1.12626632 0.00000000
--
0 1
C -3.49547568 0.17791547 0.00000000
H -4.60006941 0.23413795 0.00000000
O -2.74388512 1.13278853 0.00000000
O -3.10454835 -1.12465267 0.00000000
H -2.12977769 -1.12626632 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-8.0')] = qcdb.Molecule("""
0 1
C 3.99646944 -0.16803989 0.00000000
H 5.10156877 -0.21450609 0.00000000
O 3.25414999 -1.12972545 0.00000000
O 3.59284622 1.13117851 0.00000000
H 2.61830479 1.12095879 0.00000000
--
0 1
C -3.99646944 0.16803989 0.00000000
H -5.10156877 0.21450609 0.00000000
O -3.25414999 1.12972545 0.00000000
O -3.59284622 -1.13117851 0.00000000
H -2.61830479 -1.12095879 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaOO-10.0')] = qcdb.Molecule("""
0 1
C 4.99755344 -0.15642268 0.00000000
H 6.10311728 -0.19102667 0.00000000
O 4.26634092 -1.12629100 0.00000000
O 4.57854479 1.13834246 0.00000000
H 3.60431482 1.11461219 0.00000000
--
0 1
C -4.99755344 0.15642268 0.00000000
H -6.10311728 0.19102667 0.00000000
O -4.26634092 1.12629100 0.00000000
O -4.57854479 -1.13834246 0.00000000
H -3.60431482 -1.11461219 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-3.4')] = qcdb.Molecule("""
0 1
C 1.68040472 -0.25737318 0.00000000
H 2.78876519 -0.42713125 0.00000000
O 0.98387212 -1.27944113 0.00000000
N 1.47197100 1.06623396 0.00000000
H 0.51175066 1.43614881 0.00000000
H 2.30581639 1.63815514 0.00000000
--
0 1
C -1.68040472 0.25737318 0.00000000
H -2.78876519 0.42713125 0.00000000
O -0.98387212 1.27944113 0.00000000
N -1.47197100 -1.06623396 0.00000000
H -0.51175066 -1.43614881 0.00000000
H -2.30581639 -1.63815514 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-3.5')] = qcdb.Molecule("""
0 1
C 1.73857937 -0.19960660 0.00000000
H 2.84972331 -0.32717722 0.00000000
O 1.07441392 -1.24725582 0.00000000
N 1.42842301 1.10192478 0.00000000
H 0.43552514 1.39016691 0.00000000
H 2.20654586 1.74794356 0.00000000
--
0 1
C -1.73857937 0.19960660 0.00000000
H -2.84972331 0.32717722 0.00000000
O -1.07441392 1.24725582 0.00000000
N -1.42842301 -1.10192478 0.00000000
H -0.43552514 -1.39016691 0.00000000
H -2.20654586 -1.74794356 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-3.6')] = qcdb.Molecule("""
0 1
C 1.79214890 -0.16793868 0.00000000
H 2.90351314 -0.27208239 0.00000000
O 1.13654074 -1.22317842 0.00000000
N 1.41655781 1.11709296 0.00000000
H 0.40800979 1.35225476 0.00000000
H 2.15197143 1.81156642 0.00000000
--
0 1
C -1.79214890 0.16793868 0.00000000
H -2.90351314 0.27208239 0.00000000
O -1.13654074 1.22317842 0.00000000
N -1.41655781 -1.11709296 0.00000000
H -0.40800979 -1.35225476 0.00000000
H -2.15197143 -1.81156642 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-3.7')] = qcdb.Molecule("""
0 1
C 1.84396206 -0.14934877 0.00000000
H 2.95481237 -0.23932033 0.00000000
O 1.18668392 -1.20479449 0.00000000
N 1.42230702 1.12368101 0.00000000
H 0.40573498 1.32114549 0.00000000
H 2.12347975 1.85285206 0.00000000
--
0 1
C -1.84396206 0.14934877 0.00000000
H -2.95481237 0.23932033 0.00000000
O -1.18668392 1.20479449 0.00000000
N -1.42230702 -1.12368101 0.00000000
H -0.40573498 -1.32114549 0.00000000
H -2.12347975 -1.85285206 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-3.8')] = qcdb.Molecule("""
0 1
C 1.89495225 -0.13840942 0.00000000
H 3.00512682 -0.21949780 0.00000000
O 1.23153916 -1.19064307 0.00000000
N 1.43998729 1.12629331 0.00000000
H 0.41948596 1.29629750 0.00000000
H 2.11367741 1.88100998 0.00000000
--
0 1
C -1.89495225 0.13840942 0.00000000
H -3.00512682 0.21949780 0.00000000
O -1.23153916 1.19064307 0.00000000
N -1.43998729 -1.12629331 0.00000000
H -0.41948596 -1.29629750 0.00000000
H -2.11367741 -1.88100998 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-3.9')] = qcdb.Molecule("""
0 1
C 1.94550597 -0.13231698 0.00000000
H 3.05506108 -0.20777961 0.00000000
O 1.27444696 -1.17984091 0.00000000
N 1.46671040 1.12708677 0.00000000
H 0.44465999 1.27731960 0.00000000
H 2.11884877 1.90053586 0.00000000
--
0 1
C -1.94550597 0.13231698 0.00000000
H -3.05506108 0.20777961 0.00000000
O -1.27444696 1.17984091 0.00000000
N -1.46671040 -1.12708677 0.00000000
H -0.44465999 -1.27731960 0.00000000
H -2.11884877 -1.90053586 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-4.0')] = qcdb.Molecule("""
0 1
C 1.99581166 -0.12937222 0.00000000
H 3.10488983 -0.20126473 0.00000000
O 1.31722472 -1.17176432 0.00000000
N 1.50061758 1.12716306 0.00000000
H 0.47842639 1.26369880 0.00000000
H 2.13649227 1.91403520 0.00000000
--
0 1
C -1.99581166 0.12937222 0.00000000
H -3.10488983 0.20126473 0.00000000
O -1.31722472 1.17176432 0.00000000
N -1.50061758 -1.12716306 0.00000000
H -0.47842639 -1.26369880 0.00000000
H -2.13649227 -1.91403520 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-4.1')] = qcdb.Molecule("""
0 1
C 2.04597250 -0.12844429 0.00000000
H 3.15473976 -0.19805169 0.00000000
O 1.36080054 -1.16589088 0.00000000
N 1.54023054 1.12707489 0.00000000
H 0.51870599 1.25472290 0.00000000
H 2.16442795 1.92321328 0.00000000
--
0 1
C -2.04597250 0.12844429 0.00000000
H -3.15473976 0.19805169 0.00000000
O -1.36080054 1.16589088 0.00000000
N -1.54023054 -1.12707489 0.00000000
H -0.51870599 -1.25472290 0.00000000
H -2.16442795 -1.92321328 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-4.2')] = qcdb.Molecule("""
0 1
C 2.09604836 -0.12877408 0.00000000
H 3.20466046 -0.19688344 0.00000000
O 1.40550526 -1.16174638 0.00000000
N 1.58426128 1.12705799 0.00000000
H 0.56383358 1.24955108 0.00000000
H 2.20059240 1.92925606 0.00000000
--
0 1
C -2.09604836 0.12877408 0.00000000
H -3.20466046 0.19688344 0.00000000
O -1.40550526 1.16174638 0.00000000
N -1.58426128 -1.12705799 0.00000000
H -0.56383358 -1.24955108 0.00000000
H -2.20059240 -1.92925606 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-4.3')] = qcdb.Molecule("""
0 1
C 2.14607510 -0.12985845 0.00000000
H 3.25466017 -0.19693865 0.00000000
O 1.45130851 -1.15890002 0.00000000
N 1.63157574 1.12717703 0.00000000
H 0.61244489 1.24732130 0.00000000
H 2.24305687 1.93302516 0.00000000
--
0 1
C -2.14607510 0.12985845 0.00000000
H -3.25466017 0.19693865 0.00000000
O -1.45130851 1.15890002 0.00000000
N -1.63157574 -1.12717703 0.00000000
H -0.61244489 -1.24732130 0.00000000
H -2.24305687 -1.93302516 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-4.4')] = qcdb.Molecule("""
0 1
C 2.19607517 -0.13136009 0.00000000
H 3.30472871 -0.19767514 0.00000000
O 1.49802227 -1.15698038 0.00000000
N 1.68120383 1.12742404 0.00000000
H 0.66343356 1.24723827 0.00000000
H 2.29010119 1.93517379 0.00000000
--
0 1
C -2.19607517 0.13136009 0.00000000
H -3.30472871 0.19767514 0.00000000
O -1.49802227 1.15698038 0.00000000
N -1.68120383 -1.12742404 0.00000000
H -0.66343356 -1.24723827 0.00000000
H -2.29010119 -1.93517379 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-4.6')] = qcdb.Molecule("""
0 1
C 2.29604573 -0.13481753 0.00000000
H 3.40501312 -0.19993307 0.00000000
O 1.59337012 -1.15484644 0.00000000
N 1.78457816 1.12819246 0.00000000
H 0.76947608 1.25110815 0.00000000
H 2.39273653 1.93641581 0.00000000
--
0 1
C -2.29604573 0.13481753 0.00000000
H -3.40501312 0.19993307 0.00000000
O -1.59337012 1.15484644 0.00000000
N -1.78457816 -1.12819246 0.00000000
H -0.76947608 -1.25110815 0.00000000
H -2.39273653 -1.93641581 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-4.8')] = qcdb.Molecule("""
0 1
C 2.39601143 -0.13831545 0.00000000
H 3.50541998 -0.20235870 0.00000000
O 1.69032936 -1.15398956 0.00000000
N 1.89087102 1.12918383 0.00000000
H 0.87833293 1.25816233 0.00000000
H 2.50182267 1.93524951 0.00000000
--
0 1
C -2.39601143 0.13831545 0.00000000
H -3.50541998 0.20235870 0.00000000
O -1.69032936 1.15398956 0.00000000
N -1.89087102 -1.12918383 0.00000000
H -0.87833293 -1.25816233 0.00000000
H -2.50182267 -1.93524951 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-5.0')] = qcdb.Molecule("""
0 1
C 2.49599331 -0.14149026 0.00000000
H 3.60589309 -0.20440710 0.00000000
O 1.78830561 -1.15373352 0.00000000
N 1.99827465 1.13031983 0.00000000
H 0.98820948 1.26676542 0.00000000
H 2.61374032 1.93291817 0.00000000
--
0 1
C -2.49599331 0.14149026 0.00000000
H -3.60589309 0.20440710 0.00000000
O -1.78830561 1.15373352 0.00000000
N -1.99827465 -1.13031983 0.00000000
H -0.98820948 -1.26676542 0.00000000
H -2.61374032 -1.93291817 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-5.4')] = qcdb.Molecule("""
0 1
C 2.69608420 -0.14536998 0.00000000
H 3.80688469 -0.20526316 0.00000000
O 1.98650872 -1.15320220 0.00000000
N 2.21035574 1.13294283 0.00000000
H 1.20419348 1.28204746 0.00000000
H 2.83403804 1.92915087 0.00000000
--
0 1
C -2.69608420 0.14536998 0.00000000
H -3.80688469 0.20526316 0.00000000
O -1.98650872 1.15320220 0.00000000
N -2.21035574 -1.13294283 0.00000000
H -1.20419348 -1.28204746 0.00000000
H -2.83403804 -1.92915087 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-5.8')] = qcdb.Molecule("""
0 1
C 2.89633015 -0.14585783 0.00000000
H 4.00779084 -0.20183046 0.00000000
O 2.18702642 -1.15195636 0.00000000
N 2.41611039 1.13579344 0.00000000
H 1.41208234 1.29109009 0.00000000
H 3.04382363 1.92881894 0.00000000
--
0 1
C -2.89633015 0.14585783 0.00000000
H -4.00779084 0.20183046 0.00000000
O -2.18702642 1.15195636 0.00000000
N -2.41611039 -1.13579344 0.00000000
H -1.41208234 -1.29109009 0.00000000
H -3.04382363 -1.92881894 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-6.4')] = qcdb.Molecule("""
0 1
C 3.19679060 -0.14329444 0.00000000
H 4.30890404 -0.19314433 0.00000000
O 2.49028966 -1.14973005 0.00000000
N 2.71746327 1.13986108 0.00000000
H 1.71465256 1.29686270 0.00000000
H 3.34615668 1.93211051 0.00000000
--
0 1
C -3.19679060 0.14329444 0.00000000
H -4.30890404 0.19314433 0.00000000
O -2.49028966 1.14973005 0.00000000
N -2.71746327 -1.13986108 0.00000000
H -1.71465256 -1.29686270 0.00000000
H -3.34615668 -1.93211051 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-7.0')] = qcdb.Molecule("""
0 1
C 3.49721450 -0.13962406 0.00000000
H 4.60973564 -0.18404076 0.00000000
O 2.79431063 -1.14771035 0.00000000
N 3.01520025 1.14321645 0.00000000
H 2.01252678 1.29808475 0.00000000
H 3.64229745 1.93673374 0.00000000
--
0 1
C -3.49721450 0.13962406 0.00000000
H -4.60973564 0.18404076 0.00000000
O -2.79431063 1.14771035 0.00000000
N -3.01520025 -1.14321645 0.00000000
H -2.01252678 -1.29808475 0.00000000
H -3.64229745 -1.93673374 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-8.0')] = qcdb.Molecule("""
0 1
C 3.99775075 -0.13414290 0.00000000
H 5.11067288 -0.17148655 0.00000000
O 3.30011621 -1.14514769 0.00000000
N 3.51015001 1.14724727 0.00000000
H 2.50709338 1.29726882 0.00000000
H 4.13374763 1.94351670 0.00000000
--
0 1
C -3.99775075 0.13414290 0.00000000
H -5.11067288 0.17148655 0.00000000
O -3.30011621 1.14514769 0.00000000
N -3.51015001 -1.14724727 0.00000000
H -2.50709338 -1.29726882 0.00000000
H -4.13374763 -1.94351670 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaON-10.0')] = qcdb.Molecule("""
0 1
C 4.99839541 -0.12669527 0.00000000
H 6.11168302 -0.15491398 0.00000000
O 4.30803664 -1.14211866 0.00000000
N 4.50192844 1.15187623 0.00000000
H 3.49800610 1.29436065 0.00000000
H 5.11998480 1.95244688 0.00000000
--
0 1
C -4.99839541 0.12669527 0.00000000
H -6.11168302 0.15491398 0.00000000
O -4.30803664 1.14211866 0.00000000
N -4.50192844 -1.15187623 0.00000000
H -3.49800610 -1.29436065 0.00000000
H -5.11998480 -1.95244688 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-3.4')] = qcdb.Molecule("""
0 1
C 1.69498253 -0.13051889 0.00000000
H 2.80633596 -0.21609653 0.00000000
N 1.09905882 -1.29491251 0.00000000
H 1.82089470 -2.01478416 0.00000000
N 1.37286317 1.16800831 0.00000000
H 0.35212595 1.46532985 0.00000000
H 2.16581591 1.79503126 0.00000000
--
0 1
C -1.69498253 0.13051889 0.00000000
H -2.80633596 0.21609653 0.00000000
N -1.09905882 1.29491251 0.00000000
H -1.82089470 2.01478416 0.00000000
N -1.37286317 -1.16800831 0.00000000
H -0.35212595 -1.46532985 0.00000000
H -2.16581591 -1.79503126 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-3.5')] = qcdb.Molecule("""
0 1
C 1.74573454 -0.12211451 0.00000000
H 2.85588849 -0.19977002 0.00000000
N 1.13008345 -1.27542245 0.00000000
H 1.83665594 -2.01109010 0.00000000
N 1.38209118 1.16632433 0.00000000
H 0.35550066 1.43042391 0.00000000
H 2.14642150 1.82776097 0.00000000
--
0 1
C -1.74573454 0.12211451 0.00000000
H -2.85588849 0.19977002 0.00000000
N -1.13008345 1.27542245 0.00000000
H -1.83665594 2.01109010 0.00000000
N -1.38209118 -1.16632433 0.00000000
H -0.35550066 -1.43042391 0.00000000
H -2.14642150 -1.82776097 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-3.6')] = qcdb.Molecule("""
0 1
C 1.79626801 -0.11585476 0.00000000
H 2.90515430 -0.18737513 0.00000000
N 1.16061610 -1.25790391 0.00000000
H 1.85182448 -2.00884269 0.00000000
N 1.39570164 1.16335736 0.00000000
H 0.36558676 1.39719019 0.00000000
H 2.13213780 1.85550007 0.00000000
--
0 1
C -1.79626801 0.11585476 0.00000000
H -2.90515430 0.18737513 0.00000000
N -1.16061610 1.25790391 0.00000000
H -1.85182448 2.00884269 0.00000000
N -1.39570164 -1.16335736 0.00000000
H -0.36558676 -1.39719019 0.00000000
H -2.13213780 -1.85550007 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-3.7')] = qcdb.Molecule("""
0 1
C 1.84665575 -0.11119213 0.00000000
H 2.95429208 -0.17788590 0.00000000
N 1.19156721 -1.24220748 0.00000000
H 1.86789891 -2.00737666 0.00000000
N 1.41335406 1.15979090 0.00000000
H 0.38141976 1.36618041 0.00000000
H 2.12308262 1.87905403 0.00000000
--
0 1
C -1.84665575 0.11119213 0.00000000
H -2.95429208 0.17788590 0.00000000
N -1.19156721 1.24220748 0.00000000
H -1.86789891 2.00737666 0.00000000
N -1.41335406 -1.15979090 0.00000000
H -0.38141976 -1.36618041 0.00000000
H -2.12308262 -1.87905403 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-3.8')] = qcdb.Molecule("""
0 1
C 1.89693820 -0.10782700 0.00000000
H 3.00339990 -0.17072121 0.00000000
N 1.22354936 -1.22831838 0.00000000
H 1.88594286 -2.00636060 0.00000000
N 1.43499889 1.15607525 0.00000000
H 0.40250444 1.33793562 0.00000000
H 2.11977084 1.89888692 0.00000000
--
0 1
C -1.89693820 0.10782700 0.00000000
H -3.00339990 0.17072121 0.00000000
N -1.22354936 1.22831838 0.00000000
H -1.88594286 2.00636060 0.00000000
N -1.43499889 -1.15607525 0.00000000
H -0.40250444 -1.33793562 0.00000000
H -2.11977084 -1.89888692 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-3.9')] = qcdb.Molecule("""
0 1
C 1.94713954 -0.10558828 0.00000000
H 3.05254284 -0.16553141 0.00000000
N 1.25700152 -1.21627302 0.00000000
H 1.90673736 -2.00565055 0.00000000
N 1.46072682 1.15251669 0.00000000
H 0.42859974 1.31294465 0.00000000
H 2.12286880 1.91531762 0.00000000
--
0 1
C -1.94713954 0.10558828 0.00000000
H -3.05254284 0.16553141 0.00000000
N -1.25700152 1.21627302 0.00000000
H -1.90673736 2.00565055 0.00000000
N -1.46072682 -1.15251669 0.00000000
H -0.42859974 -1.31294465 0.00000000
H -2.12286880 -1.91531762 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-4.0')] = qcdb.Molecule("""
0 1
C 1.99727808 -0.10431489 0.00000000
H 3.10176903 -0.16200082 0.00000000
N 1.29228783 -1.20609151 0.00000000
H 1.93090747 -2.00517101 0.00000000
N 1.49059653 1.14934169 0.00000000
H 0.45949690 1.29156428 0.00000000
H 2.13289668 1.92867591 0.00000000
--
0 1
C -1.99727808 0.10431489 0.00000000
H -3.10176903 0.16200082 0.00000000
N -1.29228783 1.20609151 0.00000000
H -1.93090747 2.00517101 0.00000000
N -1.49059653 -1.14934169 0.00000000
H -0.45949690 -1.29156428 0.00000000
H -2.13289668 -1.92867591 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-4.1')] = qcdb.Molecule("""
0 1
C 2.04736978 -0.10381896 0.00000000
H 3.15111193 -0.15978802 0.00000000
N 1.32968607 -1.19773394 0.00000000
H 1.95888022 -2.00487461 0.00000000
N 1.52453108 1.14670134 0.00000000
H 0.49489283 1.27394294 0.00000000
H 2.15004136 1.93933564 0.00000000
--
0 1
C -2.04736978 0.10381896 0.00000000
H -3.15111193 0.15978802 0.00000000
N -1.32968607 1.19773394 0.00000000
H -1.95888022 2.00487461 0.00000000
N -1.52453108 -1.14670134 0.00000000
H -0.49489283 -1.27394294 0.00000000
H -2.15004136 -1.93933564 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-4.2')] = qcdb.Molecule("""
0 1
C 2.09742845 -0.10390077 0.00000000
H 3.20059131 -0.15854840 0.00000000
N 1.36932506 -1.19108785 0.00000000
H 1.99079936 -2.00473399 0.00000000
N 1.56227621 1.14466550 0.00000000
H 0.53433608 1.25999374 0.00000000
H 2.17408498 1.94769642 0.00000000
--
0 1
C -2.09742845 0.10390077 0.00000000
H -3.20059131 0.15854840 0.00000000
N -1.36932506 1.19108785 0.00000000
H -1.99079936 2.00473399 0.00000000
N -1.56227621 -1.14466550 0.00000000
H -0.53433608 -1.25999374 0.00000000
H -2.17408498 -1.94769642 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-4.3')] = qcdb.Molecule("""
0 1
C 2.14746429 -0.10439674 0.00000000
H 3.25020808 -0.15800548 0.00000000
N 1.41111901 -1.18596705 0.00000000
H 2.02643555 -2.00474792 0.00000000
N 1.60342559 1.14321301 0.00000000
H 0.57725829 1.24942013 0.00000000
H 2.20446212 1.95412762 0.00000000
--
0 1
C -2.14746429 0.10439674 0.00000000
H -3.25020808 0.15800548 0.00000000
N -1.41111901 1.18596705 0.00000000
H -2.02643555 2.00474792 0.00000000
N -1.60342559 -1.14321301 0.00000000
H -0.57725829 -1.24942013 0.00000000
H -2.20446212 -1.95412762 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-4.4')] = qcdb.Molecule("""
0 1
C 2.19748298 -0.10521494 0.00000000
H 3.29994728 -0.15800065 0.00000000
N 1.45477374 -1.18214133 0.00000000
H 2.06523281 -2.00494056 0.00000000
N 1.64750275 1.14225070 0.00000000
H 0.62306965 1.24181216 0.00000000
H 2.24041883 1.95893145 0.00000000
--
0 1
C -2.19748298 0.10521494 0.00000000
H -3.29994728 0.15800065 0.00000000
N -1.45477374 1.18214133 0.00000000
H -2.06523281 2.00494056 0.00000000
N -1.64750275 -1.14225070 0.00000000
H -0.62306965 -1.24181216 0.00000000
H -2.24041883 -1.95893145 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-4.6')] = qcdb.Molecule("""
0 1
C 2.29747604 -0.10772948 0.00000000
H 3.39968456 -0.15941244 0.00000000
N 1.54604150 -1.17743388 0.00000000
H 2.14951038 -2.00594097 0.00000000
N 1.74263302 1.14129835 0.00000000
H 0.72137666 1.23384217 0.00000000
H 2.32599570 1.96451573 0.00000000
--
0 1
C -2.29747604 0.10772948 0.00000000
H -3.39968456 0.15941244 0.00000000
N -1.54604150 1.17743388 0.00000000
H -2.14951038 2.00594097 0.00000000
N -1.74263302 -1.14129835 0.00000000
H -0.72137666 -1.23384217 0.00000000
H -2.32599570 -1.96451573 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-4.8')] = qcdb.Molecule("""
0 1
C 2.39742249 -0.11120861 0.00000000
H 3.49964090 -0.16233693 0.00000000
N 1.64041315 -1.17535654 0.00000000
H 2.23917184 -2.00764182 0.00000000
N 1.84436251 1.14107385 0.00000000
H 0.82590336 1.23313494 0.00000000
H 2.42493207 1.96604670 0.00000000
--
0 1
C -2.39742249 0.11120861 0.00000000
H -3.49964090 0.16233693 0.00000000
N -1.64041315 1.17535654 0.00000000
H -2.23917184 2.00764182 0.00000000
N -1.84436251 -1.14107385 0.00000000
H -0.82590336 -1.23313494 0.00000000
H -2.42493207 -1.96604670 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-5.0')] = qcdb.Molecule("""
0 1
C 2.49735133 -0.11505842 0.00000000
H 3.59972532 -0.16584719 0.00000000
N 1.73648339 -1.17469470 0.00000000
H 2.33188443 -2.00962361 0.00000000
N 1.94973034 1.14125016 0.00000000
H 0.93377159 1.23689923 0.00000000
H 2.53164153 1.96514450 0.00000000
--
0 1
C -2.49735133 0.11505842 0.00000000
H -3.59972532 0.16584719 0.00000000
N -1.73648339 1.17469470 0.00000000
H -2.33188443 2.00962361 0.00000000
N -1.94973034 -1.14125016 0.00000000
H -0.93377159 -1.23689923 0.00000000
H -2.53164153 -1.96514450 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-5.4')] = qcdb.Molecule("""
0 1
C 2.69724643 -0.12191870 0.00000000
H 3.80009014 -0.17176852 0.00000000
N 1.93139799 -1.17474481 0.00000000
H 2.52207952 -2.01322139 0.00000000
N 2.16339519 1.14223517 0.00000000
H 1.15164366 1.24943138 0.00000000
H 2.75270049 1.96073403 0.00000000
--
0 1
C -2.69724643 0.12191870 0.00000000
H -3.80009014 0.17176852 0.00000000
N -1.93139799 1.17474481 0.00000000
H -2.52207952 2.01322139 0.00000000
N -2.16339519 -1.14223517 0.00000000
H -1.15164366 -1.24943138 0.00000000
H -2.75270049 -1.96073403 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-5.8')] = qcdb.Molecule("""
0 1
C 2.89724776 -0.12632587 0.00000000
H 4.00054647 -0.17443193 0.00000000
N 2.12821587 -1.17451446 0.00000000
H 2.71529330 -2.01553546 0.00000000
N 2.37424699 1.14355972 0.00000000
H 1.36542121 1.26070273 0.00000000
H 2.97042339 1.95703238 0.00000000
--
0 1
C -2.89724776 0.12632587 0.00000000
H -4.00054647 0.17443193 0.00000000
N -2.12821587 1.17451446 0.00000000
H -2.71529330 2.01553546 0.00000000
N -2.37424699 -1.14355972 0.00000000
H -1.36542121 -1.26070273 0.00000000
H -2.97042339 -1.95703238 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-6.4')] = qcdb.Molecule("""
0 1
C 3.19740673 -0.12881636 0.00000000
H 4.30120367 -0.17328586 0.00000000
N 2.42581025 -1.17305230 0.00000000
H 3.00905003 -2.01668226 0.00000000
N 2.68267636 1.14568753 0.00000000
H 1.67620388 1.27071277 0.00000000
H 3.28436882 1.95508270 0.00000000
--
0 1
C -3.19740673 0.12881636 0.00000000
H -4.30120367 0.17328586 0.00000000
N -2.42581025 1.17305230 0.00000000
H -3.00905003 2.01668226 0.00000000
N -2.68267636 -1.14568753 0.00000000
H -1.67620388 -1.27071277 0.00000000
H -3.28436882 -1.95508270 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-7.0')] = qcdb.Molecule("""
0 1
C 3.49764421 -0.12840997 0.00000000
H 4.60176692 -0.16894593 0.00000000
N 2.72569862 -1.17126633 0.00000000
H 3.30724390 -2.01600726 0.00000000
N 2.98518869 1.14779235 0.00000000
H 1.97966414 1.27507136 0.00000000
H 3.58843631 1.95603470 0.00000000
--
0 1
C -3.49764421 0.12840997 0.00000000
H -4.60176692 0.16894593 0.00000000
N -2.72569862 1.17126633 0.00000000
H -3.30724390 2.01600726 0.00000000
N -2.98518869 -1.14779235 0.00000000
H -1.97966414 -1.27507136 0.00000000
H -3.58843631 -1.95603470 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-8.0')] = qcdb.Molecule("""
0 1
C 3.99802556 -0.12568606 0.00000000
H 5.10246875 -0.16040648 0.00000000
N 3.22763840 -1.16876965 0.00000000
H 3.80888196 -2.01363519 0.00000000
N 3.48433462 1.15076117 0.00000000
H 2.47917010 1.27706322 0.00000000
H 4.08677787 1.95961623 0.00000000
--
0 1
C -3.99802556 0.12568606 0.00000000
H -5.10246875 0.16040648 0.00000000
N -3.22763840 1.16876965 0.00000000
H -3.80888196 2.01363519 0.00000000
N -3.48433462 -1.15076117 0.00000000
H -2.47917010 -1.27706322 0.00000000
H -4.08677787 -1.95961623 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaNNFaNN-10.0')] = qcdb.Molecule("""
0 1
C 4.99855171 -0.12037120 0.00000000
H 6.10329053 -0.14697466 0.00000000
N 4.23217198 -1.16574424 0.00000000
H 4.81516518 -2.00932380 0.00000000
N 4.47966407 1.15461725 0.00000000
H 3.47428778 1.27673150 0.00000000
H 5.07887010 1.96589319 0.00000000
--
0 1
C -4.99855171 0.12037120 0.00000000
H -6.10329053 0.14697466 0.00000000
N -4.23217198 1.16574424 0.00000000
H -4.81516518 2.00932380 0.00000000
N -4.47966407 -1.15461725 0.00000000
H -3.47428778 -1.27673150 0.00000000
H -5.07887010 -1.96589319 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-3.4')] = qcdb.Molecule("""
0 1
C -1.68442643 0.20364693 0.00000000
H -2.78917270 0.34190070 0.00000000
O -1.02504588 1.24486390 0.00000000
O -1.38766062 -1.06424798 0.00000000
H -0.36064758 -1.17759060 0.00000000
--
0 1
C 1.68925857 -0.21855377 0.00000000
H 2.79602984 -0.35706095 0.00000000
O 0.99264772 -1.25039598 0.00000000
N 1.44623488 1.09491586 0.00000000
H 0.47728536 1.44199769 0.00000000
H 2.26183892 1.69328776 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-3.5')] = qcdb.Molecule("""
0 1
C -1.73348582 0.18223355 0.00000000
H -2.83797096 0.30364160 0.00000000
O -1.07653282 1.22594575 0.00000000
O -1.38752757 -1.07701453 0.00000000
H -0.36204548 -1.15331467 0.00000000
--
0 1
C 1.74555921 -0.20019268 0.00000000
H 2.85267994 -0.32189045 0.00000000
O 1.03958692 -1.22627875 0.00000000
N 1.44809759 1.10348563 0.00000000
H 0.46246915 1.40389863 0.00000000
H 2.22816418 1.74757255 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-3.6')] = qcdb.Molecule("""
0 1
C -1.78217824 0.17067463 0.00000000
H -2.88586235 0.28192476 0.00000000
O -1.11959419 1.21087120 0.00000000
O -1.40116395 -1.08369245 0.00000000
H -0.38043759 -1.13241294 0.00000000
--
0 1
C 1.79967181 -0.19037187 0.00000000
H 2.90662488 -0.30195151 0.00000000
O 1.08054052 -1.20751884 0.00000000
N 1.46296486 1.10656646 0.00000000
H 0.46794114 1.37317096 0.00000000
H 2.21379368 1.78455973 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-3.7')] = qcdb.Molecule("""
0 1
C -1.83088576 0.16446589 0.00000000
H -2.93370803 0.26905294 0.00000000
O -1.15951512 1.19856960 0.00000000
O -1.42420415 -1.08772422 0.00000000
H -0.40924981 -1.11532142 0.00000000
--
0 1
C 1.85258766 -0.18485933 0.00000000
H 2.95926374 -0.28981186 0.00000000
O 1.12048297 -1.19270075 0.00000000
N 1.48618925 1.10754437 0.00000000
H 0.48561914 1.34851310 0.00000000
H 2.21266192 1.81160865 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-3.8')] = qcdb.Molecule("""
0 1
C -1.87970385 0.16163715 0.00000000
H -2.98177538 0.26162942 0.00000000
O -1.19849611 1.18849760 0.00000000
O -1.45487899 -1.09052331 0.00000000
H -0.44572390 -1.10219203 0.00000000
--
0 1
C 1.90475161 -0.18173103 0.00000000
H 3.01119328 -0.28211981 0.00000000
O 1.16164142 -1.18119246 0.00000000
N 1.51565227 1.10780268 0.00000000
H 0.51184265 1.32923289 0.00000000
H 2.22211656 1.83196397 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-3.9')] = qcdb.Molecule("""
0 1
C -1.92862050 0.16119369 0.00000000
H -3.03011644 0.25790262 0.00000000
O -1.23743130 1.18028070 0.00000000
O -1.49236998 -1.09274175 0.00000000
H -0.48861382 -1.09313129 0.00000000
--
0 1
C 1.95643502 -0.17990567 0.00000000
H 3.06274786 -0.27703752 0.00000000
O 1.20521341 -1.17264505 0.00000000
N 1.55000583 1.10795923 0.00000000
H 0.54440542 1.31463815 0.00000000
H 2.24038620 1.84747610 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-4.0')] = qcdb.Molecule("""
0 1
C -1.97758328 0.16248610 0.00000000
H -3.07869694 0.25672827 0.00000000
O -1.27661752 1.17355076 0.00000000
O -1.53599418 -1.09470560 0.00000000
H -0.53703710 -1.08809981 0.00000000
--
0 1
C 2.00784676 -0.17861908 0.00000000
H 3.11415420 -0.27330577 0.00000000
O 1.25185521 -1.16673528 0.00000000
N 1.58801714 1.10829528 0.00000000
H 0.58149375 1.30380344 0.00000000
H 2.26561575 1.85952935 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-4.1')] = qcdb.Molecule("""
0 1
C -2.02654503 0.16495904 0.00000000
H -3.12746053 0.25719799 0.00000000
O -1.31615809 1.16794329 0.00000000
O -1.58476587 -1.09656137 0.00000000
H -0.58990682 -1.08671689 0.00000000
--
0 1
C 2.05914052 -0.17735549 0.00000000
H 3.16555405 -0.27005509 0.00000000
O 1.30164029 -1.16302967 0.00000000
N 1.62857259 1.10890330 0.00000000
H 0.62159334 1.29571943 0.00000000
H 2.29594551 1.86920816 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-4.2')] = qcdb.Molecule("""
0 1
C -2.07549477 0.16808404 0.00000000
H -3.17636801 0.25855097 0.00000000
O -1.35622177 1.16316834 0.00000000
O -1.63731502 -1.09834467 0.00000000
H -0.64583681 -1.08819660 0.00000000
--
0 1
C 2.11039579 -0.17590164 0.00000000
H 3.21699962 -0.26683949 0.00000000
O 1.35401341 -1.16097028 0.00000000
N 1.67091982 1.10975954 0.00000000
H 0.66372245 1.28959945 0.00000000
H 2.33000288 1.87722287 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-4.3')] = qcdb.Molecule("""
0 1
C -2.12445982 0.17139548 0.00000000
H -3.22540810 0.26019471 0.00000000
O -1.39708971 1.15905225 0.00000000
O -1.69213335 -1.10003833 0.00000000
H -0.70338392 -1.09153044 0.00000000
--
0 1
C 2.16162183 -0.17430715 0.00000000
H 3.26847086 -0.26358230 0.00000000
O 1.40801417 -1.15997383 0.00000000
N 1.71474383 1.11078804 0.00000000
H 0.70747623 1.28502211 0.00000000
H 2.36711721 1.88391930 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-4.4')] = qcdb.Molecule("""
0 1
C -2.17348283 0.17454732 0.00000000
H -3.27458485 0.26172858 0.00000000
O -1.43904397 1.15551433 0.00000000
O -1.74791159 -1.10161980 0.00000000
H -0.76134836 -1.09577404 0.00000000
--
0 1
C 2.21279083 -0.17274192 0.00000000
H 3.31991768 -0.26040021 0.00000000
O 1.46265433 -1.15955428 0.00000000
N 1.75999690 1.11191304 0.00000000
H 0.75279213 1.28183397 0.00000000
H 2.40708847 1.88942849 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-4.6')] = qcdb.Molecule("""
0 1
C -2.27180865 0.17975090 0.00000000
H -3.37333986 0.26380020 0.00000000
O -1.52660441 1.14998794 0.00000000
O -1.85932830 -1.10443098 0.00000000
H -0.87597638 -1.10468982 0.00000000
--
0 1
C 2.31485963 -0.17022217 0.00000000
H 3.42259050 -0.25474451 0.00000000
O 1.57138429 -1.15932816 0.00000000
N 1.85465707 1.11426632 0.00000000
H 0.84799242 1.27930626 0.00000000
H 2.49507804 1.89722164 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-4.8')] = qcdb.Molecule("""
0 1
C -2.37053947 0.18335144 0.00000000
H -3.47255698 0.26439714 0.00000000
O -1.61858011 1.14618338 0.00000000
O -1.96882669 -1.10687094 0.00000000
H -0.98773609 -1.11271132 0.00000000
--
0 1
C 2.41653317 -0.16870436 0.00000000
H 3.52489949 -0.25021697 0.00000000
O 1.67809174 -1.15922822 0.00000000
N 1.95399445 1.11666331 0.00000000
H 0.94841269 1.28120182 0.00000000
H 2.59207415 1.90149404 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-5.0')] = qcdb.Molecule("""
0 1
C -2.46965740 0.18532900 0.00000000
H -3.57215472 0.26340833 0.00000000
O -1.71425854 1.14365130 0.00000000
O -2.07552238 -1.10913287 0.00000000
H -1.09609784 -1.11892804 0.00000000
--
0 1
C 2.51785158 -0.16788849 0.00000000
H 3.62685662 -0.24642870 0.00000000
O 1.78274154 -1.15890784 0.00000000
N 2.05645986 1.11912237 0.00000000
H 1.05236659 1.28627334 0.00000000
H 2.69518733 1.90341149 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-5.4')] = qcdb.Molecule("""
0 1
C -2.66850366 0.18514825 0.00000000
H -3.77181125 0.25702537 0.00000000
O -1.91234427 1.14028296 0.00000000
O -2.28050679 -1.11367376 0.00000000
H -1.30314992 -1.12550085 0.00000000
--
0 1
C 2.72007445 -0.16590118 0.00000000
H 3.83020333 -0.23822269 0.00000000
O 1.98935069 -1.15730079 0.00000000
N 2.26253222 1.12413593 0.00000000
H 1.26110058 1.29739823 0.00000000
H 2.90434733 1.90588628 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-5.8')] = qcdb.Molecule("""
0 1
C -2.86735875 0.18206412 0.00000000
H -3.97126829 0.24767419 0.00000000
O -2.11347561 1.13744992 0.00000000
O -2.47888370 -1.11819686 0.00000000
H -1.50260023 -1.12749792 0.00000000
--
0 1
C 2.92242523 -0.16204751 0.00000000
H 4.03336881 -0.22807565 0.00000000
O 2.19639756 -1.15519802 0.00000000
N 2.46432498 1.12882796 0.00000000
H 1.46400695 1.30306213 0.00000000
H 3.10628291 1.91044267 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-6.4')] = qcdb.Molecule("""
0 1
C -3.16535897 0.17586134 0.00000000
H -4.26988030 0.23307508 0.00000000
O -2.41665650 1.13403709 0.00000000
O -2.77135326 -1.12414764 0.00000000
H -1.79587783 -1.12655724 0.00000000
--
0 1
C 3.22607312 -0.15521212 0.00000000
H 4.33781868 -0.21280006 0.00000000
O 2.50723536 -1.15220261 0.00000000
N 2.76274851 1.13469399 0.00000000
H 1.76253556 1.30519593 0.00000000
H 3.40173066 1.91872273 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-7.0')] = qcdb.Molecule("""
0 1
C -3.46303289 0.16994236 0.00000000
H -4.56795025 0.22032609 0.00000000
O -2.71952203 1.13148929 0.00000000
O -3.06229197 -1.12882028 0.00000000
H -2.08723785 -1.12403398 0.00000000
--
0 1
C 3.52970200 -0.14892316 0.00000000
H 4.64195129 -0.19964122 0.00000000
O 2.81721464 -1.14975890 0.00000000
N 3.05992779 1.13914187 0.00000000
H 2.05915506 1.30417766 0.00000000
H 3.69489864 1.92641219 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-8.0')] = qcdb.Molecule("""
0 1
C -3.95866293 0.16221564 0.00000000
H -5.06397335 0.20411777 0.00000000
O -3.22210995 1.12855078 0.00000000
O -3.54823170 -1.13429286 0.00000000
H -2.57357553 -1.11984997 0.00000000
--
0 1
C 4.03559598 -0.14084533 0.00000000
H 5.14833239 -0.18302898 0.00000000
O 3.33114210 -1.14677626 0.00000000
N 3.55651747 1.14422533 0.00000000
H 2.55468792 1.30114823 0.00000000
H 4.18559310 1.93619532 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaON-10.0')] = qcdb.Molecule("""
0 1
C -4.94899199 0.15261986 0.00000000
H -6.05466697 0.18398552 0.00000000
O -4.22139968 1.12529943 0.00000000
O -4.52579681 -1.14045626 0.00000000
H -3.55156372 -1.11418323 0.00000000
--
0 1
C 5.04698843 -0.13094494 0.00000000
H 6.16016561 -0.16252342 0.00000000
O 4.35243631 -1.14332963 0.00000000
N 4.55565401 1.14984957 0.00000000
H 3.55239498 1.29639409 0.00000000
H 5.17702398 1.94785550 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-3.4')] = qcdb.Molecule("""
0 1
C -1.68224527 0.20007686 0.00000000
H -2.79851036 0.32731687 0.00000000
O -1.03337695 1.25685782 0.00000000
N -1.42371554 -1.11002338 0.00000000
H -0.41476039 -1.44699423 0.00000000
H -2.24807926 -1.69769344 0.00000000
--
0 1
C 1.69587997 -0.18498638 0.00000000
H 2.80117416 -0.31097584 0.00000000
N 1.05127931 -1.32038822 0.00000000
H 1.74163210 -2.07111657 0.00000000
N 1.41625166 1.12650330 0.00000000
H 0.43768191 1.44670293 0.00000000
H 2.21582070 1.74279349 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-3.5')] = qcdb.Molecule("""
0 1
C -1.73161493 0.16885424 0.00000000
H -2.84774407 0.27229826 0.00000000
O -1.09203207 1.23345283 0.00000000
N -1.40935402 -1.12703884 0.00000000
H -0.38663445 -1.41522920 0.00000000
H -2.19618815 -1.76421756 0.00000000
--
0 1
C 1.75344970 -0.15414523 0.00000000
H 2.85932238 -0.25663867 0.00000000
N 1.10888334 -1.28995398 0.00000000
H 1.79513417 -2.04501619 0.00000000
N 1.40087130 1.13935068 0.00000000
H 0.40196256 1.40065024 0.00000000
H 2.15615427 1.80923145 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-3.6')] = qcdb.Molecule("""
0 1
C -1.78022870 0.14987298 0.00000000
H -2.89545364 0.23836736 0.00000000
O -1.13881825 1.21461091 0.00000000
N -1.41027191 -1.13460552 0.00000000
H -0.38077495 -1.38453958 0.00000000
H -2.16426193 -1.81034042 0.00000000
--
0 1
C 1.80849126 -0.13489609 0.00000000
H 2.91397761 -0.22261770 0.00000000
N 1.15496921 -1.26599407 0.00000000
H 1.83228961 -2.02978225 0.00000000
N 1.40124345 1.14388415 0.00000000
H 0.39074481 1.36097240 0.00000000
H 2.11873085 1.85410086 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-3.7')] = qcdb.Molecule("""
0 1
C -1.82868510 0.13792020 0.00000000
H -2.94280654 0.21653844 0.00000000
O -1.18029155 1.19916421 0.00000000
N -1.42121847 -1.13750418 0.00000000
H -0.38876840 -1.35619673 0.00000000
H -2.14603498 -1.84437855 0.00000000
--
0 1
C 1.86213779 -0.12252360 0.00000000
H 2.96689030 -0.20048072 0.00000000
N 1.19568826 -1.24658561 0.00000000
H 1.86217213 -2.02059464 0.00000000
N 1.41233657 1.14451863 0.00000000
H 0.39516324 1.32705396 0.00000000
H 2.09724900 1.88616308 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-3.8')] = qcdb.Molecule("""
0 1
C -1.87720296 0.13037316 0.00000000
H -2.99024880 0.20231026 0.00000000
O -1.21984382 1.18650518 0.00000000
N -1.43964187 -1.13803078 0.00000000
H -0.40644181 -1.33086253 0.00000000
H -2.13868738 -1.87029717 0.00000000
--
0 1
C 1.91488588 -0.11471276 0.00000000
H 3.01882141 -0.18606105 0.00000000
N 1.23404120 -1.23071467 0.00000000
H 1.88923247 -2.01504741 0.00000000
N 1.43187244 1.14338561 0.00000000
H 0.41107420 1.29878706 0.00000000
H 2.08912295 1.90958573 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-3.9')] = qcdb.Molecule("""
0 1
C -1.92586862 0.12572004 0.00000000
H -3.03796709 0.19311602 0.00000000
O -1.25948828 1.17632094 0.00000000
N -1.46411088 -1.13741671 0.00000000
H -0.43144516 -1.30889080 0.00000000
H -2.14076353 -1.89029703 0.00000000
--
0 1
C 1.96699000 -0.11019704 0.00000000
H 3.07015648 -0.17705172 0.00000000
N 1.27163617 -1.21778291 0.00000000
H 1.91584742 -2.01186499 0.00000000
N 1.45874144 1.14162823 0.00000000
H 0.43630966 1.27622061 0.00000000
H 2.09327641 1.92665702 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-4.0')] = qcdb.Molecule("""
0 1
C -1.97471721 0.12295853 0.00000000
H -3.08604013 0.18727896 0.00000000
O -1.30050144 1.16840525 0.00000000
N -1.49363393 -1.13636823 0.00000000
H -0.46222530 -1.29037121 0.00000000
H -2.15122398 -1.90581462 0.00000000
--
0 1
C 2.01860067 -0.10816415 0.00000000
H 3.12111663 -0.17197486 0.00000000
N 1.30940050 -1.20735856 0.00000000
H 1.94334630 -2.01030940 0.00000000
N 1.49223980 1.13988904 0.00000000
H 0.46948873 1.25930607 0.00000000
H 2.10902806 1.93883512 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-4.1')] = qcdb.Molecule("""
0 1
C -2.02376044 0.12137130 0.00000000
H -3.13449129 0.18360767 0.00000000
O -1.34361035 1.16254229 0.00000000
N -1.52737885 -1.13528332 0.00000000
H -0.49758591 -1.27514722 0.00000000
H -2.16905903 -1.91790148 0.00000000
--
0 1
C 2.06981923 -0.10799978 0.00000000
H 3.17183809 -0.16974799 0.00000000
N 1.34787373 -1.19906629 0.00000000
H 1.97244050 -2.00991317 0.00000000
N 1.53167706 1.13850851 0.00000000
H 0.50945141 1.24775624 0.00000000
H 2.13551288 1.94717878 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-4.2')] = qcdb.Molecule("""
0 1
C -2.07299095 0.12047764 0.00000000
H -3.18330109 0.18128485 0.00000000
O -1.38900859 1.15844919 0.00000000
N -1.56461787 -1.13434964 0.00000000
H -0.53656198 -1.26288353 0.00000000
H -2.19324500 -1.92734321 0.00000000
--
0 1
C 2.12072533 -0.10919529 0.00000000
H 3.22240596 -0.16952990 0.00000000
N 1.38733366 -1.19254323 0.00000000
H 2.00342544 -2.01034739 0.00000000
N 1.57620126 1.13760159 0.00000000
H 0.55501008 1.24096826 0.00000000
H 2.17143215 1.95251421 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-4.3')] = qcdb.Molecule("""
0 1
C -2.12238102 0.12001003 0.00000000
H -3.23241727 0.17981127 0.00000000
O -1.43641896 1.15577795 0.00000000
N -1.60476162 -1.13362638 0.00000000
H -0.57842142 -1.25321622 0.00000000
H -2.22287071 -1.93470173 0.00000000
--
0 1
C 2.17139325 -0.11130946 0.00000000
H 3.27288112 -0.17065017 0.00000000
N 1.42791432 -1.18745819 0.00000000
H 2.03640187 -2.01135424 0.00000000
N 1.62476864 1.13714001 0.00000000
H 0.60488448 1.23808048 0.00000000
H 2.21504656 1.95554011 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-4.4')] = qcdb.Molecule("""
0 1
C -2.17188140 0.11988045 0.00000000
H -3.28175966 0.17894887 0.00000000
O -1.48523553 1.15415106 0.00000000
N -1.64740473 -1.13310658 0.00000000
H -0.62269709 -1.24587569 0.00000000
H -2.25726874 -1.94034051 0.00000000
--
0 1
C 2.22190118 -0.11395941 0.00000000
H 3.32331433 -0.17257731 0.00000000
N 1.46969819 -1.18352562 0.00000000
H 2.07143556 -2.01270689 0.00000000
N 1.67623295 1.13700945 0.00000000
H 0.65776551 1.23805882 0.00000000
H 2.26434801 1.95686864 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-4.6')] = qcdb.Molecule("""
0 1
C -2.27098481 0.12071738 0.00000000
H -3.38079959 0.17880004 0.00000000
O -1.58441741 1.15276122 0.00000000
N -1.73927197 -1.13261218 0.00000000
H -0.71758673 -1.23769913 0.00000000
H -2.33880523 -1.94731780 0.00000000
--
0 1
C 2.32272918 -0.11969671 0.00000000
H 3.42422935 -0.17734422 0.00000000
N 1.55712717 -1.17831430 0.00000000
H 2.14807243 -2.01572191 0.00000000
N 1.78379346 1.13726371 0.00000000
H 0.76811221 1.24281216 0.00000000
H 2.37265274 1.95644249 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-4.8')] = qcdb.Molecule("""
0 1
C -2.37001060 0.12298207 0.00000000
H -3.47998730 0.18042379 0.00000000
O -1.68320651 1.15259030 0.00000000
N -1.83827892 -1.13269684 0.00000000
H -0.81940832 -1.23705071 0.00000000
H -2.43438758 -1.94974931 0.00000000
--
0 1
C 2.42357564 -0.12508788 0.00000000
H 3.52532822 -0.18210400 0.00000000
N 1.64921165 -1.17564041 0.00000000
H 2.23277319 -2.01853948 0.00000000
N 1.89367698 1.13774591 0.00000000
H 0.88057502 1.25034358 0.00000000
H 2.48658153 1.95391122 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-5.0')] = qcdb.Molecule("""
0 1
C -2.46891156 0.12607812 0.00000000
H -3.57918367 0.18293782 0.00000000
O -1.78100213 1.15283979 0.00000000
N -1.94185120 -1.13321981 0.00000000
H -0.92564680 -1.24163694 0.00000000
H -2.53922912 -1.94924812 0.00000000
--
0 1
C 2.52454538 -0.12964879 0.00000000
H 3.62661434 -0.18608839 0.00000000
N 1.74484282 -1.17452020 0.00000000
H 2.32376341 -2.02083242 0.00000000
N 2.00367207 1.13832154 0.00000000
H 0.99293782 1.25860595 0.00000000
H 2.60169997 1.95068341 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-5.4')] = qcdb.Molecule("""
0 1
C -2.66627924 0.13246574 0.00000000
H -3.77728386 0.18752690 0.00000000
O -1.97376452 1.15272174 0.00000000
N -2.15448827 -1.13525942 0.00000000
H -1.14315421 -1.25838806 0.00000000
H -2.76095466 -1.94449331 0.00000000
--
0 1
C 2.72710220 -0.13482909 0.00000000
H 3.82978184 -0.18947766 0.00000000
N 1.94426866 -1.17439124 0.00000000
H 2.52008027 -2.02303547 0.00000000
N 2.21808889 1.13969445 0.00000000
H 1.21077019 1.27082675 0.00000000
H 2.82358807 1.94643158 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-5.8')] = qcdb.Molecule("""
0 1
C -2.86354863 0.13635439 0.00000000
H -3.97519119 0.18862454 0.00000000
O -2.16694926 1.15157613 0.00000000
N -2.36429186 -1.13776075 0.00000000
H -1.35648460 -1.27362001 0.00000000
H -2.97920858 -1.94060039 0.00000000
--
0 1
C 2.93005123 -0.13606441 0.00000000
H 4.03321099 -0.18793570 0.00000000
N 2.14816261 -1.17431528 0.00000000
H 2.72399657 -2.02298552 0.00000000
N 2.42557457 1.14138666 0.00000000
H 1.42000011 1.27687473 0.00000000
H 3.03409167 1.94580891 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-6.4')] = qcdb.Molecule("""
0 1
C -3.15990010 0.13729708 0.00000000
H -4.27217260 0.18465048 0.00000000
O -2.46166577 1.14950598 0.00000000
N -2.66867708 -1.14125954 0.00000000
H -1.66354000 -1.28595015 0.00000000
H -3.28934347 -1.93968912 0.00000000
--
0 1
C 3.23430882 -0.13492712 0.00000000
H 4.33798775 -0.18191466 0.00000000
N 2.45446506 -1.17291481 0.00000000
H 3.03058764 -2.02136402 0.00000000
N 2.73117559 1.14418616 0.00000000
H 1.72670616 1.28092608 0.00000000
H 3.34065139 1.94785431 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-7.0')] = qcdb.Molecule("""
0 1
C -3.45655227 0.13564152 0.00000000
H -4.56921064 0.17826784 0.00000000
O -2.75931575 1.14757333 0.00000000
N -2.96668321 -1.14421239 0.00000000
H -1.96251864 -1.29094605 0.00000000
H -3.58850495 -1.94176941 0.00000000
--
0 1
C 3.53831767 -0.13233432 0.00000000
H 4.64233959 -0.17462977 0.00000000
N 2.76094801 -1.17118270 0.00000000
H 3.33794153 -2.01900317 0.00000000
N 3.03335964 1.14677845 0.00000000
H 2.02910179 1.28169924 0.00000000
H 3.64162361 1.95135046 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-8.0')] = qcdb.Molecule("""
0 1
C -3.95119391 0.13180726 0.00000000
H -5.06421494 0.16795264 0.00000000
O -3.25700879 1.14506295 0.00000000
N -3.45900266 -1.14786697 0.00000000
H -2.45511963 -1.29313383 0.00000000
H -4.07950006 -1.94648494 0.00000000
--
0 1
C 4.04459226 -0.12785598 0.00000000
H 5.14896015 -0.16372034 0.00000000
N 3.27114001 -1.16876693 0.00000000
H 3.84998346 -2.01526327 0.00000000
N 3.53511346 1.15017556 0.00000000
H 2.53063281 1.28082664 0.00000000
H 4.14039777 1.95697653 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaONFaNN-10.0')] = qcdb.Molecule("""
0 1
C -4.94051459 0.12561969 0.00000000
H -6.05385655 0.15311771 0.00000000
O -4.25179826 1.14206138 0.00000000
N -4.44195445 -1.15218600 0.00000000
H -3.43769145 -1.29256009 0.00000000
H -5.05860183 -1.95380737 0.00000000
--
0 1
C 5.05643837 -0.12129136 0.00000000
H 6.16113003 -0.14857573 0.00000000
N 4.28876097 -1.16580235 0.00000000
H 4.87088844 -2.00997853 0.00000000
N 4.53935952 1.15434078 0.00000000
H 3.53425488 1.27830661 0.00000000
H 5.13978851 1.96474473 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-3.6')] = qcdb.Molecule("""
0 1
C -1.76494924 0.15235779 0.00000000
H -2.87327871 0.25118115 0.00000000
O -1.11608332 1.20862564 0.00000000
O -1.38646683 -1.09127823 0.00000000
H -0.29340506 -1.17844056 0.00000000
--
0 1
C 1.82082566 -0.16736510 0.00000000
H 2.92191627 -0.26554301 0.00000000
N 1.07144779 -1.23580930 0.00000000
H 1.63171355 -2.08650902 0.00000000
N 1.45046411 1.12181032 0.00000000
H 0.44433636 1.35982341 0.00000000
H 2.18036640 1.81973064 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-3.7')] = qcdb.Molecule("""
0 1
C -1.81131260 0.15832706 0.00000000
H -2.91739607 0.25779849 0.00000000
O -1.14367167 1.19940056 0.00000000
O -1.41977566 -1.09000937 0.00000000
H -0.35849776 -1.15413164 0.00000000
--
0 1
C 1.87381606 -0.17308101 0.00000000
H 2.97427347 -0.27204647 0.00000000
N 1.09844368 -1.22316593 0.00000000
H 1.64649981 -2.08331650 0.00000000
N 1.48635545 1.11647699 0.00000000
H 0.48139283 1.34214247 0.00000000
H 2.19917472 1.83148239 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-3.8')] = qcdb.Molecule("""
0 1
C -1.85851203 0.15849692 0.00000000
H -2.96318957 0.25525336 0.00000000
O -1.17920577 1.19001178 0.00000000
O -1.44984324 -1.09154980 0.00000000
H -0.40704328 -1.13449846 0.00000000
--
0 1
C 1.92699581 -0.17306795 0.00000000
H 3.02703743 -0.26941835 0.00000000
N 1.13373794 -1.21065729 0.00000000
H 1.67232278 -2.07790126 0.00000000
N 1.51877490 1.11463167 0.00000000
H 0.51255074 1.32398473 0.00000000
H 2.21287037 1.84762526 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-3.9')] = qcdb.Molecule("""
0 1
C -1.90625728 0.15789816 0.00000000
H -3.00992908 0.25165586 0.00000000
O -1.21798842 1.18183546 0.00000000
O -1.48225867 -1.09353763 0.00000000
H -0.45272461 -1.11834425 0.00000000
--
0 1
C 1.97974667 -0.17222063 0.00000000
H 3.07948948 -0.26564455 0.00000000
N 1.17265485 -1.20000472 0.00000000
H 1.70375525 -2.07277955 0.00000000
N 1.55373649 1.11388209 0.00000000
H 0.54641308 1.30898417 0.00000000
H 2.23141500 1.86193091 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-4.0')] = qcdb.Molecule("""
0 1
C -1.95441797 0.15753953 0.00000000
H -3.05736547 0.24858075 0.00000000
O -1.25897479 1.17508156 0.00000000
O -1.51819340 -1.09554164 0.00000000
H -0.49885308 -1.10531756 0.00000000
--
0 1
C 2.03202506 -0.17151568 0.00000000
H 3.13158097 -0.26227695 0.00000000
N 1.21422382 -1.19144395 0.00000000
H 1.73970852 -2.06839079 0.00000000
N 1.59233540 1.11373305 0.00000000
H 0.58444574 1.29777092 0.00000000
H 2.25678662 1.87343146 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-4.1')] = qcdb.Molecule("""
0 1
C -2.00288568 0.15766229 0.00000000
H -3.10533408 0.24633212 0.00000000
O -1.30161674 1.16963551 0.00000000
O -1.55791984 -1.09749465 0.00000000
H -0.54668214 -1.09529505 0.00000000
--
0 1
C 2.08391761 -0.17103897 0.00000000
H 3.18340265 -0.25947047 0.00000000
N 1.25831080 -1.18492468 0.00000000
H 1.78008989 -2.06474177 0.00000000
N 1.63434437 1.11404135 0.00000000
H 0.62635840 1.29004572 0.00000000
H 2.28868862 1.88235333 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-4.2')] = qcdb.Molecule("""
0 1
C -2.05155512 0.15829850 0.00000000
H -3.15369321 0.24486622 0.00000000
O -1.34536667 1.16523819 0.00000000
O -1.60135034 -1.09941548 0.00000000
H -0.59665904 -1.08819693 0.00000000
--
0 1
C 2.13554958 -0.17057869 0.00000000
H 3.23507103 -0.25694088 0.00000000
N 1.30498819 -1.18028525 0.00000000
H 1.82500469 -2.06170111 0.00000000
N 1.67894465 1.11473347 0.00000000
H 0.67115531 1.28502625 0.00000000
H 2.32574278 1.88931006 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-4.3')] = qcdb.Molecule("""
0 1
C -2.10033250 0.15939803 0.00000000
H -3.20231249 0.24404256 0.00000000
O -1.38972180 1.16159856 0.00000000
O -1.64810100 -1.10130943 0.00000000
H -0.64870385 -1.08383587 0.00000000
--
0 1
C 2.18703912 -0.16992061 0.00000000
H 3.28668154 -0.25438558 0.00000000
N 1.35416505 -1.17725815 0.00000000
H 1.87421236 -2.05911458 0.00000000
N 1.72522255 1.11572240 0.00000000
H 0.71775960 1.28182144 0.00000000
H 2.36632423 1.89492965 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-4.4')] = qcdb.Molecule("""
0 1
C -2.14914999 0.16086986 0.00000000
H -3.25109115 0.24371672 0.00000000
O -1.43435230 1.15849662 0.00000000
O -1.69755858 -1.10315341 0.00000000
H -0.70240660 -1.08186738 0.00000000
--
0 1
C 2.23846787 -0.16900290 0.00000000
H 3.33829123 -0.25169053 0.00000000
N 1.40543473 -1.17550354 0.00000000
H 1.92694842 -2.05687020 0.00000000
N 1.77253439 1.11690493 0.00000000
H 0.76542789 1.27975581 0.00000000
H 2.40921970 1.89964705 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-4.6')] = qcdb.Molecule("""
0 1
C -2.24680225 0.16448460 0.00000000
H -3.34889590 0.24405206 0.00000000
O -1.52408934 1.15343926 0.00000000
O -1.80181709 -1.10653912 0.00000000
H -0.81274434 -1.08325157 0.00000000
--
0 1
C 2.34125676 -0.16675788 0.00000000
H 3.44152405 -0.24619349 0.00000000
N 1.51181334 -1.17441280 0.00000000
H 2.03873187 -2.05311586 0.00000000
N 1.86930785 1.11945644 0.00000000
H 0.86293152 1.27779567 0.00000000
H 2.49986583 1.90702451 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-4.8')] = qcdb.Molecule("""
0 1
C -2.34452726 0.16814176 0.00000000
H -3.44693630 0.24477572 0.00000000
O -1.61510986 1.14962012 0.00000000
O -1.90907877 -1.10941692 0.00000000
H -0.92394094 -1.08850288 0.00000000
--
0 1
C 2.44391781 -0.16472698 0.00000000
H 3.54466133 -0.24124517 0.00000000
N 1.61976719 -1.17478374 0.00000000
H 2.15288994 -2.05011167 0.00000000
N 1.96885825 1.12195665 0.00000000
H 0.96338922 1.27845400 0.00000000
H 2.59632255 1.91191489 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-5.0')] = qcdb.Molecule("""
0 1
C -2.44239623 0.17109139 0.00000000
H -3.54516867 0.24497624 0.00000000
O -1.70804752 1.14675140 0.00000000
O -2.01618143 -1.11187675 0.00000000
H -1.03372502 -1.09479015 0.00000000
--
0 1
C 2.54642003 -0.16315517 0.00000000
H 3.64763409 -0.23693560 0.00000000
N 1.72722352 -1.17543768 0.00000000
H 2.26584227 -2.04765017 0.00000000
N 2.07067818 1.12435634 0.00000000
H 1.06632369 1.28126044 0.00000000
H 2.69733971 1.91489993 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-5.4')] = qcdb.Molecule("""
0 1
C -2.63833181 0.17421223 0.00000000
H -3.74182732 0.24259469 0.00000000
O -1.89813960 1.14236018 0.00000000
O -2.22587715 -1.11630144 0.00000000
H -1.24674157 -1.10613581 0.00000000
--
0 1
C 2.75133046 -0.15977949 0.00000000
H 3.85338801 -0.22807284 0.00000000
N 1.94079641 -1.17624150 0.00000000
H 2.48828614 -2.04318335 0.00000000
N 2.27517829 1.12893018 0.00000000
H 1.27261881 1.28724241 0.00000000
H 2.90172978 1.91949574 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-5.8')] = qcdb.Molecule("""
0 1
C -2.83430026 0.17412588 0.00000000
H -3.93836628 0.23696684 0.00000000
O -2.09196716 1.13877607 0.00000000
O -2.42840583 -1.12038969 0.00000000
H -1.45107051 -1.11332814 0.00000000
--
0 1
C 2.95632855 -0.15546371 0.00000000
H 4.05902283 -0.21822659 0.00000000
N 2.15295059 -1.17586911 0.00000000
H 2.70690065 -2.03878687 0.00000000
N 2.47731363 1.13303523 0.00000000
H 1.47540835 1.28966180 0.00000000
H 3.10222174 1.92484945 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-6.4')] = qcdb.Molecule("""
0 1
C -3.12831510 0.17084992 0.00000000
H -4.23295676 0.22611350 0.00000000
O -2.38703135 1.13479845 0.00000000
O -2.72349375 -1.12568269 0.00000000
H -1.74750630 -1.11756461 0.00000000
--
0 1
C 3.26369185 -0.14893273 0.00000000
H 4.36705371 -0.20413229 0.00000000
N 2.46830003 -1.17413526 0.00000000
H 3.02869067 -2.03292199 0.00000000
N 2.77871570 1.13816091 0.00000000
H 1.77677840 1.28984953 0.00000000
H 3.39990002 1.93285794 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-7.0')] = qcdb.Molecule("""
0 1
C -3.42232127 0.16653527 0.00000000
H -4.52734265 0.21545352 0.00000000
O -2.68394725 1.13198626 0.00000000
O -3.01424928 -1.12993738 0.00000000
H -2.03892166 -1.11784311 0.00000000
--
0 1
C 3.57083085 -0.14304496 0.00000000
H 4.67461743 -0.19190855 0.00000000
N 2.78161679 -1.17225021 0.00000000
H 3.34675787 -2.02791543 0.00000000
N 3.07956461 1.14215114 0.00000000
H 2.07713948 1.28822281 0.00000000
H 3.69667809 1.93998571 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-8.0')] = qcdb.Molecule("""
0 1
C -3.91213194 0.16018637 0.00000000
H -5.01752318 0.20105168 0.00000000
O -3.17868840 1.12881552 0.00000000
O -3.49736776 -1.13502496 0.00000000
H -2.52261475 -1.11615900 0.00000000
--
0 1
C 4.08240815 -0.13536451 0.00000000
H 5.18661439 -0.17618601 0.00000000
N 3.30082536 -1.16974476 0.00000000
H 3.87167108 -2.02158353 0.00000000
N 3.58224397 1.14684614 0.00000000
H 2.57892509 1.28493460 0.00000000
H 4.19355750 1.94910244 0.00000000
units angstrom
""")
GEOS['%s-%s-dimer' % (dbse, 'FaOOFaNN-10.0')] = qcdb.Molecule("""
0 1
C -4.89119351 0.15168886 0.00000000
H -5.99692859 0.18239314 0.00000000
O -4.16502480 1.12540358 0.00000000
O -4.46601853 -1.14087529 0.00000000
H -3.49177332 -1.11255947 0.00000000
--
0 1
C 5.10495503 -0.12588632 0.00000000
H 6.20955485 -0.15655909 0.00000000
N 4.33254137 -1.16667991 0.00000000
H 4.91019302 -2.01389087 0.00000000
N 4.59328914 1.15218338 0.00000000
H 3.58879137 1.28039339 0.00000000
H 5.19724654 1.95996355 0.00000000
units angstrom
""")
GEOS['%s-%s' % (dbse, 'FaOO-mono-RLX')] = qcdb.Molecule("""
0 1
C -0.10067338 -0.41790840 0.00000000
H -0.02994601 -1.52175017 0.00000000
O -1.13575810 0.21734849 0.00000000
O 1.14827203 0.12334561 0.00000000
H 1.03004150 1.09065136 0.00000000
units angstrom
""")
GEOS['%s-%s' % (dbse, 'FaON-mono-RLX')] = qcdb.Molecule("""
0 1
C -0.08832415 -0.41231959 0.00000000
H -0.04284111 -1.52506057 0.00000000
O -1.14609563 0.21052951 0.00000000
N 1.15566522 0.16652753 0.00000000
H 1.23188425 1.17751832 0.00000000
H 1.99476943 -0.39808693 0.00000000
units angstrom
""")
GEOS['%s-%s' % (dbse, 'FaNN-mono-RLX')] = qcdb.Molecule("""
0 1
C -0.08463791 -0.42651676 0.00000000
H -0.04431604 -1.53084724 0.00000000
N -1.17217255 0.27814579 0.00000000
H -1.98138857 -0.35160983 0.00000000
N 1.15840794 0.16588754 0.00000000
H 1.22157301 1.17651874 0.00000000
H 2.00315093 -0.38515422 0.00000000
units angstrom
""")
# <<< Derived Geometry Strings >>>
for rxn in HRXN:
GEOS['%s-%s-monoA-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1)
GEOS['%s-%s-monoB-unCP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2)
GEOS['%s-%s-monoA-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(1, 2)
GEOS['%s-%s-monoB-CP' % (dbse, rxn)] = GEOS['%s-%s-dimer' % (dbse, rxn)].extract_fragments(2, 1)
#########################################################################
# <<< Supplementary Quantum Chemical Results >>>
DATA = {}
DATA['NUCLEAR REPULSION ENERGY'] = {}
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.4-dimer' ] = 243.36065247
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.4-monoA-CP' ] = 69.33894005
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.4-monoA-unCP' ] = 69.33894005
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOO-mono-RLX' ] = 69.43637417
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.5-dimer' ] = 241.31794314
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.5-monoA-CP' ] = 69.40721776
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.5-monoA-unCP' ] = 69.40721776
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.6-dimer' ] = 239.13071852
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.6-monoA-CP' ] = 69.44436721
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.6-monoA-unCP' ] = 69.44436721
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.7-dimer' ] = 236.82630686
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.7-monoA-CP' ] = 69.46091244
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.7-monoA-unCP' ] = 69.46091244
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.8-dimer' ] = 234.43461308
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.8-monoA-CP' ] = 69.46484389
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.8-monoA-unCP' ] = 69.46484389
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.9-dimer' ] = 231.99257053
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.9-monoA-CP' ] = 69.46242034
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-3.9-monoA-unCP' ] = 69.46242034
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.0-dimer' ] = 229.54177064
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.0-monoA-CP' ] = 69.45752939
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.0-monoA-unCP' ] = 69.45752939
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.1-dimer' ] = 227.12262884
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.1-monoA-CP' ] = 69.45239532
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.1-monoA-unCP' ] = 69.45239532
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.2-dimer' ] = 224.76664877
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.2-monoA-CP' ] = 69.44786995
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.2-monoA-unCP' ] = 69.44786995
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.3-dimer' ] = 222.49484462
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.3-monoA-CP' ] = 69.44428117
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.3-monoA-unCP' ] = 69.44428117
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.4-dimer' ] = 220.31753024
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.4-monoA-CP' ] = 69.44143824
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.4-monoA-unCP' ] = 69.44143824
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.6-dimer' ] = 216.25339825
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.6-monoA-CP' ] = 69.43719403
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.6-monoA-unCP' ] = 69.43719403
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.8-dimer' ] = 212.55538491
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.8-monoA-CP' ] = 69.43411380
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-4.8-monoA-unCP' ] = 69.43411380
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.0-dimer' ] = 209.19075836
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.0-monoA-CP' ] = 69.43198525
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.0-monoA-unCP' ] = 69.43198525
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.4-dimer' ] = 203.32614471
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.4-monoA-CP' ] = 69.42994763
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.4-monoA-unCP' ] = 69.42994763
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.8-dimer' ] = 198.38447013
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.8-monoA-CP' ] = 69.42993364
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-5.8-monoA-unCP' ] = 69.42993364
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-6.4-dimer' ] = 192.25159892
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-6.4-monoA-CP' ] = 69.43156827
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-6.4-monoA-unCP' ] = 69.43156827
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-7.0-dimer' ] = 187.25816994
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-7.0-monoA-CP' ] = 69.43350416
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-7.0-monoA-unCP' ] = 69.43350416
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-8.0-dimer' ] = 180.71326778
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-8.0-monoA-CP' ] = 69.43495392
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-8.0-monoA-unCP' ] = 69.43495392
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-10.0-dimer' ] = 171.77191094
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-10.0-monoA-CP' ] = 69.43617749
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaOO-10.0-monoA-unCP' ] = 69.43617749
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.4-dimer' ] = 240.46278153
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.4-monoA-CP' ] = 70.28231877
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.4-monoA-unCP' ] = 70.28231877
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaON-mono-RLX' ] = 70.51505519
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.5-dimer' ] = 239.16459501
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.5-monoA-CP' ] = 70.32835580
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.5-monoA-unCP' ] = 70.32835580
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.6-dimer' ] = 237.68153218
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.6-monoA-CP' ] = 70.37064235
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.6-monoA-unCP' ] = 70.37064235
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.7-dimer' ] = 236.03200309
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.7-monoA-CP' ] = 70.40477708
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.7-monoA-unCP' ] = 70.40477708
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.8-dimer' ] = 234.23169579
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.8-monoA-CP' ] = 70.42980104
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.8-monoA-unCP' ] = 70.42980104
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.9-dimer' ] = 232.30206866
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.9-monoA-CP' ] = 70.44695377
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-3.9-monoA-unCP' ] = 70.44695377
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.0-dimer' ] = 230.27316227
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.0-monoA-CP' ] = 70.45845935
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.0-monoA-unCP' ] = 70.45845935
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.1-dimer' ] = 228.18215800
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.1-monoA-CP' ] = 70.46653132
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.1-monoA-unCP' ] = 70.46653132
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.2-dimer' ] = 226.06835273
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.2-monoA-CP' ] = 70.47282327
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.2-monoA-unCP' ] = 70.47282327
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.3-dimer' ] = 223.96749853
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.3-monoA-CP' ] = 70.47830733
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.3-monoA-unCP' ] = 70.47830733
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.4-dimer' ] = 221.90773477
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.4-monoA-CP' ] = 70.48335622
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.4-monoA-unCP' ] = 70.48335622
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.6-dimer' ] = 217.97817650
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.6-monoA-CP' ] = 70.49218648
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.6-monoA-unCP' ] = 70.49218648
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.8-dimer' ] = 214.34039402
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.8-monoA-CP' ] = 70.49902069
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-4.8-monoA-unCP' ] = 70.49902069
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.0-dimer' ] = 210.99686112
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.0-monoA-CP' ] = 70.50390115
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.0-monoA-unCP' ] = 70.50390115
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.4-dimer' ] = 205.14090516
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.4-monoA-CP' ] = 70.50872518
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.4-monoA-unCP' ] = 70.50872518
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.8-dimer' ] = 200.21672958
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.8-monoA-CP' ] = 70.51045849
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-5.8-monoA-unCP' ] = 70.51045849
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-6.4-dimer' ] = 194.12431457
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-6.4-monoA-CP' ] = 70.51183703
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-6.4-monoA-unCP' ] = 70.51183703
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-7.0-dimer' ] = 189.17344305
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-7.0-monoA-CP' ] = 70.51282107
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-7.0-monoA-unCP' ] = 70.51282107
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-8.0-dimer' ] = 182.68824188
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-8.0-monoA-CP' ] = 70.51387004
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-8.0-monoA-unCP' ] = 70.51387004
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-10.0-dimer' ] = 173.81988858
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-10.0-monoA-CP' ] = 70.51470037
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaON-10.0-monoA-unCP' ] = 70.51470037
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.4-dimer' ] = 239.26046970
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.4-monoA-CP' ] = 71.02970630
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.4-monoA-unCP' ] = 71.02970630
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNN-mono-RLX' ] = 71.44403839
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.5-dimer' ] = 238.10201613
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.5-monoA-CP' ] = 71.12101788
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.5-monoA-unCP' ] = 71.12101788
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.6-dimer' ] = 236.84466393
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.6-monoA-CP' ] = 71.19783215
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.6-monoA-unCP' ] = 71.19783215
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.7-dimer' ] = 235.48357291
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.7-monoA-CP' ] = 71.25979426
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.7-monoA-unCP' ] = 71.25979426
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.8-dimer' ] = 234.01564811
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.8-monoA-CP' ] = 71.30723795
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.8-monoA-unCP' ] = 71.30723795
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.9-dimer' ] = 232.44172478
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.9-monoA-CP' ] = 71.34148564
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-3.9-monoA-unCP' ] = 71.34148564
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.0-dimer' ] = 230.76831303
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.0-monoA-CP' ] = 71.36468654
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.0-monoA-unCP' ] = 71.36468654
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.1-dimer' ] = 229.00938305
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.1-monoA-CP' ] = 71.37962643
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.1-monoA-unCP' ] = 71.37962643
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.2-dimer' ] = 227.18583883
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.2-monoA-CP' ] = 71.38901927
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.2-monoA-unCP' ] = 71.38901927
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.3-dimer' ] = 225.32386318
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.3-monoA-CP' ] = 71.39518800
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.3-monoA-unCP' ] = 71.39518800
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.4-dimer' ] = 223.45062338
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.4-monoA-CP' ] = 71.39972289
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.4-monoA-unCP' ] = 71.39972289
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.6-dimer' ] = 219.76184605
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.6-monoA-CP' ] = 71.40770677
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.6-monoA-unCP' ] = 71.40770677
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.8-dimer' ] = 216.24467046
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.8-monoA-CP' ] = 71.41504118
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-4.8-monoA-unCP' ] = 71.41504118
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.0-dimer' ] = 212.95726631
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.0-monoA-CP' ] = 71.42143886
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.0-monoA-unCP' ] = 71.42143886
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.4-dimer' ] = 207.11781748
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.4-monoA-CP' ] = 71.43139269
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.4-monoA-unCP' ] = 71.43139269
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.8-dimer' ] = 202.17178340
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.8-monoA-CP' ] = 71.43632979
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-5.8-monoA-unCP' ] = 71.43632979
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-6.4-dimer' ] = 196.05789788
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-6.4-monoA-CP' ] = 71.43933362
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-6.4-monoA-unCP' ] = 71.43933362
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-7.0-dimer' ] = 191.09605908
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-7.0-monoA-CP' ] = 71.44111494
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-7.0-monoA-unCP' ] = 71.44111494
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-8.0-dimer' ] = 184.60070202
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-8.0-monoA-CP' ] = 71.44290877
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-8.0-monoA-unCP' ] = 71.44290877
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-10.0-dimer' ] = 175.71779864
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-10.0-monoA-CP' ] = 71.44390386
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaNNFaNN-10.0-monoA-unCP' ] = 71.44390386
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.4-dimer' ] = 241.93345078
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.4-monoA-CP' ] = 69.35267697
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.4-monoB-CP' ] = 70.25019076
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.4-monoA-unCP' ] = 69.35267697
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.4-monoB-unCP' ] = 70.25019076
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.5-dimer' ] = 240.30022964
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.5-monoA-CP' ] = 69.38497277
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.5-monoB-CP' ] = 70.33194829
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.5-monoA-unCP' ] = 69.38497277
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.5-monoB-unCP' ] = 70.33194829
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.6-dimer' ] = 238.47937461
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.6-monoA-CP' ] = 69.41184507
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.6-monoB-CP' ] = 70.39031970
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.6-monoA-unCP' ] = 69.41184507
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.6-monoB-unCP' ] = 70.39031970
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.7-dimer' ] = 236.49953506
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.7-monoA-CP' ] = 69.42869781
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.7-monoB-CP' ] = 70.42973315
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.7-monoA-unCP' ] = 69.42869781
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.7-monoB-unCP' ] = 70.42973315
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.8-dimer' ] = 234.39000387
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.8-monoA-CP' ] = 69.43660992
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.8-monoB-CP' ] = 70.45473144
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.8-monoA-unCP' ] = 69.43660992
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.8-monoB-unCP' ] = 70.45473144
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.9-dimer' ] = 232.18404486
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.9-monoA-CP' ] = 69.43891731
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.9-monoB-CP' ] = 70.46974894
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.9-monoA-unCP' ] = 69.43891731
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-3.9-monoB-unCP' ] = 70.46974894
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.0-dimer' ] = 229.92100144
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.0-monoA-CP' ] = 69.43897100
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.0-monoB-CP' ] = 70.47862318
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.0-monoA-unCP' ] = 69.43897100
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.0-monoB-unCP' ] = 70.47862318
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.1-dimer' ] = 227.64375917
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.1-monoA-CP' ] = 69.43883808
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.1-monoB-CP' ] = 70.48417849
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.1-monoA-unCP' ] = 69.43883808
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.1-monoB-unCP' ] = 70.48417849
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.2-dimer' ] = 225.39202083
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.2-monoA-CP' ] = 69.43915615
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.2-monoB-CP' ] = 70.48816654
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.2-monoA-unCP' ] = 69.43915615
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.2-monoB-unCP' ] = 70.48816654
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.3-dimer' ] = 223.19598463
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.3-monoA-CP' ] = 69.43973769
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.3-monoB-CP' ] = 70.49148753
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.3-monoA-unCP' ] = 69.43973769
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.3-monoB-unCP' ] = 70.49148753
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.4-dimer' ] = 221.07419115
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.4-monoA-CP' ] = 69.44018499
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.4-monoB-CP' ] = 70.49442227
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.4-monoA-unCP' ] = 69.44018499
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.4-monoB-unCP' ] = 70.49442227
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.6-dimer' ] = 217.08104010
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.6-monoA-CP' ] = 69.44001991
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.6-monoB-CP' ] = 70.49924468
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.6-monoA-unCP' ] = 69.44001991
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.6-monoB-unCP' ] = 70.49924468
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.8-dimer' ] = 213.41989010
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.8-monoA-CP' ] = 69.43900064
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.8-monoB-CP' ] = 70.50267956
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.8-monoA-unCP' ] = 69.43900064
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-4.8-monoB-unCP' ] = 70.50267956
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.0-dimer' ] = 210.07109853
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.0-monoA-CP' ] = 69.43780433
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.0-monoB-CP' ] = 70.50482980
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.0-monoA-unCP' ] = 69.43780433
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.0-monoB-unCP' ] = 70.50482980
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.4-dimer' ] = 204.21761685
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.4-monoA-CP' ] = 69.43637558
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.4-monoB-CP' ] = 70.50670959
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.4-monoA-unCP' ] = 69.43637558
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.4-monoB-unCP' ] = 70.50670959
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.8-dimer' ] = 199.28928967
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.8-monoA-CP' ] = 69.43545989
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.8-monoB-CP' ] = 70.50777320
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.8-monoA-unCP' ] = 69.43545989
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-5.8-monoB-unCP' ] = 70.50777320
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-6.4-dimer' ] = 193.18113380
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-6.4-monoA-CP' ] = 69.43552104
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-6.4-monoB-CP' ] = 70.50951053
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-6.4-monoA-unCP' ] = 69.43552104
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-6.4-monoB-unCP' ] = 70.50951053
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-7.0-dimer' ] = 188.21125650
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-7.0-monoA-CP' ] = 69.43619080
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-7.0-monoB-CP' ] = 70.51101382
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-7.0-monoA-unCP' ] = 69.43619080
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-7.0-monoB-unCP' ] = 70.51101382
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-8.0-dimer' ] = 181.69923103
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-8.0-monoA-CP' ] = 69.43703888
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-8.0-monoB-CP' ] = 70.51306687
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-8.0-monoA-unCP' ] = 69.43703888
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-8.0-monoB-unCP' ] = 70.51306687
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-10.0-dimer' ] = 172.79665021
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-10.0-monoA-CP' ] = 69.43787499
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-10.0-monoB-CP' ] = 70.51471612
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-10.0-monoA-unCP' ] = 69.43787499
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaON-10.0-monoB-unCP' ] = 70.51471612
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.4-dimer' ] = 239.90587476
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.4-monoA-CP' ] = 70.13040081
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.4-monoB-CP' ] = 71.18167959
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.4-monoA-unCP' ] = 70.13040081
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.4-monoB-unCP' ] = 71.18167959
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.5-dimer' ] = 238.67868671
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.5-monoA-CP' ] = 70.18294170
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.5-monoB-CP' ] = 71.26743554
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.5-monoA-unCP' ] = 70.18294170
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.5-monoB-unCP' ] = 71.26743554
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.6-dimer' ] = 237.31444607
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.6-monoA-CP' ] = 70.24012730
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.6-monoB-CP' ] = 71.33055862
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.6-monoA-unCP' ] = 70.24012730
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.6-monoB-unCP' ] = 71.33055862
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.7-dimer' ] = 235.81577856
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.7-monoA-CP' ] = 70.29293605
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.7-monoB-CP' ] = 71.37469923
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.7-monoA-unCP' ] = 70.29293605
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.7-monoB-unCP' ] = 71.37469923
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.8-dimer' ] = 234.18695440
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.8-monoA-CP' ] = 70.33739500
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.8-monoB-CP' ] = 71.40295408
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.8-monoA-unCP' ] = 70.33739500
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.8-monoB-unCP' ] = 71.40295408
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.9-dimer' ] = 232.43751991
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.9-monoA-CP' ] = 70.37258366
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.9-monoB-CP' ] = 71.41882940
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.9-monoA-unCP' ] = 70.37258366
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-3.9-monoB-unCP' ] = 71.41882940
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.0-dimer' ] = 230.58474965
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.0-monoA-CP' ] = 70.39937655
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.0-monoB-CP' ] = 71.42614416
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.0-monoA-unCP' ] = 70.39937655
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.0-monoB-unCP' ] = 71.42614416
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.1-dimer' ] = 228.65402462
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.1-monoA-CP' ] = 70.41955496
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.1-monoB-CP' ] = 71.42832750
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.1-monoA-unCP' ] = 70.41955496
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.1-monoB-unCP' ] = 71.42832750
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.2-dimer' ] = 226.67727517
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.2-monoA-CP' ] = 70.43516725
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.2-monoB-CP' ] = 71.42823397
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.2-monoA-unCP' ] = 70.43516725
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.2-monoB-unCP' ] = 71.42823397
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.3-dimer' ] = 224.68745859
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.3-monoA-CP' ] = 70.44783207
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.3-monoB-CP' ] = 71.42745587
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.3-monoA-unCP' ] = 70.44783207
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.3-monoB-unCP' ] = 71.42745587
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.4-dimer' ] = 222.71416710
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.4-monoA-CP' ] = 70.45874508
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.4-monoB-CP' ] = 71.42684444
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.4-monoA-unCP' ] = 70.45874508
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.4-monoB-unCP' ] = 71.42684444
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.6-dimer' ] = 218.89644490
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.6-monoA-CP' ] = 70.47675379
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.6-monoB-CP' ] = 71.42653225
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.6-monoA-unCP' ] = 70.47675379
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.6-monoB-unCP' ] = 71.42653225
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.8-dimer' ] = 215.31571895
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.8-monoA-CP' ] = 70.49066661
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.8-monoB-CP' ] = 71.42750442
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.8-monoA-unCP' ] = 70.49066661
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-4.8-monoB-unCP' ] = 71.42750442
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.0-dimer' ] = 211.99864089
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.0-monoA-CP' ] = 70.50093904
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.0-monoB-CP' ] = 71.42920390
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.0-monoA-unCP' ] = 70.50093904
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.0-monoB-unCP' ] = 71.42920390
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.4-dimer' ] = 206.14542525
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.4-monoA-CP' ] = 70.51246715
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.4-monoB-CP' ] = 71.43261759
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.4-monoA-unCP' ] = 70.51246715
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.4-monoB-unCP' ] = 71.43261759
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.8-dimer' ] = 201.20490708
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.8-monoA-CP' ] = 70.51605248
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.8-monoB-CP' ] = 71.43506208
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.8-monoA-unCP' ] = 70.51605248
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-5.8-monoB-unCP' ] = 71.43506208
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-6.4-dimer' ] = 195.09745910
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-6.4-monoA-CP' ] = 70.51673483
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-6.4-monoB-CP' ] = 71.43776882
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-6.4-monoA-unCP' ] = 70.51673483
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-6.4-monoB-unCP' ] = 71.43776882
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-7.0-dimer' ] = 190.13877337
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-7.0-monoA-CP' ] = 70.51643987
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-7.0-monoB-CP' ] = 71.43986291
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-7.0-monoA-unCP' ] = 70.51643987
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-7.0-monoB-unCP' ] = 71.43986291
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-8.0-dimer' ] = 183.64663805
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-8.0-monoA-CP' ] = 70.51626670
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-8.0-monoB-CP' ] = 71.44194166
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-8.0-monoA-unCP' ] = 70.51626670
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-8.0-monoB-unCP' ] = 71.44194166
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-10.0-dimer' ] = 174.76988329
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-10.0-monoA-CP' ] = 70.51586617
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-10.0-monoB-CP' ] = 71.44353226
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-10.0-monoA-unCP' ] = 70.51586617
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaONFaNN-10.0-monoB-unCP' ] = 71.44353226
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.6-dimer' ] = 238.70762894
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.6-monoA-CP' ] = 69.05387109
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.6-monoB-CP' ] = 71.49442229
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.6-monoA-unCP' ] = 69.05387109
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.6-monoB-unCP' ] = 71.49442229
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.7-dimer' ] = 236.77815589
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.7-monoA-CP' ] = 69.19851332
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.7-monoB-CP' ] = 71.50248139
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.7-monoA-unCP' ] = 69.19851332
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.7-monoB-unCP' ] = 71.50248139
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.8-dimer' ] = 234.79258573
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.8-monoA-CP' ] = 69.27451806
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.8-monoB-CP' ] = 71.50357299
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.8-monoA-unCP' ] = 69.27451806
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.8-monoB-unCP' ] = 71.50357299
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.9-dimer' ] = 232.73099665
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.9-monoA-CP' ] = 69.32058150
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.9-monoB-CP' ] = 71.49797419
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.9-monoA-unCP' ] = 69.32058150
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-3.9-monoB-unCP' ] = 71.49797419
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.0-dimer' ] = 230.60990332
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.0-monoA-CP' ] = 69.34967492
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.0-monoB-CP' ] = 71.48853214
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.0-monoA-unCP' ] = 69.34967492
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.0-monoB-unCP' ] = 71.48853214
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.1-dimer' ] = 228.45564352
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.1-monoA-CP' ] = 69.36935851
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.1-monoB-CP' ] = 71.47782411
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.1-monoA-unCP' ] = 69.36935851
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.1-monoB-unCP' ] = 71.47782411
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.2-dimer' ] = 226.29785177
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.2-monoA-CP' ] = 69.38400024
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.2-monoB-CP' ] = 71.46764546
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.2-monoA-unCP' ] = 69.38400024
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.2-monoB-unCP' ] = 71.46764546
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.3-dimer' ] = 224.16556584
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.3-monoA-CP' ] = 69.39601086
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.3-monoB-CP' ] = 71.45908818
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.3-monoA-unCP' ] = 69.39601086
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.3-monoB-unCP' ] = 71.45908818
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.4-dimer' ] = 222.08187684
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.4-monoA-CP' ] = 69.40618110
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.4-monoB-CP' ] = 71.45222643
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.4-monoA-unCP' ] = 69.40618110
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.4-monoB-unCP' ] = 71.45222643
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.6-dimer' ] = 218.11702388
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.6-monoA-CP' ] = 69.42169291
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.6-monoB-CP' ] = 71.44351763
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.6-monoA-unCP' ] = 69.42169291
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.6-monoB-unCP' ] = 71.44351763
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.8-dimer' ] = 214.45680622
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.8-monoA-CP' ] = 69.43148739
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.8-monoB-CP' ] = 71.43936558
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.8-monoA-unCP' ] = 69.43148739
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-4.8-monoB-unCP' ] = 71.43936558
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.0-dimer' ] = 211.10192713
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.0-monoA-CP' ] = 69.43782064
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.0-monoB-CP' ] = 71.43756173
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.0-monoA-unCP' ] = 69.43782064
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.0-monoB-unCP' ] = 71.43756173
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.4-dimer' ] = 205.22579493
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.4-monoA-CP' ] = 69.44196665
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.4-monoB-CP' ] = 71.43623387
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.4-monoA-unCP' ] = 69.44196665
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.4-monoB-unCP' ] = 71.43623387
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.8-dimer' ] = 200.27909874
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.8-monoA-CP' ] = 69.44120889
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.8-monoB-CP' ] = 71.43671523
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.8-monoA-unCP' ] = 69.44120889
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-5.8-monoB-unCP' ] = 71.43671523
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-6.4-dimer' ] = 194.15525335
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-6.4-monoA-CP' ] = 69.43965701
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-6.4-monoB-CP' ] = 71.43803563
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-6.4-monoA-unCP' ] = 69.43965701
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-6.4-monoB-unCP' ] = 71.43803563
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-7.0-dimer' ] = 189.17681765
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-7.0-monoA-CP' ] = 69.43814576
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-7.0-monoB-CP' ] = 71.43984273
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-7.0-monoA-unCP' ] = 69.43814576
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-7.0-monoB-unCP' ] = 71.43984273
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-8.0-dimer' ] = 182.65654313
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-8.0-monoA-CP' ] = 69.43721728
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-8.0-monoB-CP' ] = 71.44219510
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-8.0-monoA-unCP' ] = 69.43721728
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-8.0-monoB-unCP' ] = 71.44219510
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-10.0-dimer' ] = 173.74362872
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-10.0-monoA-CP' ] = 69.43580273
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-10.0-monoB-CP' ] = 71.44379453
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-10.0-monoA-unCP' ] = 69.43580273
DATA['NUCLEAR REPULSION ENERGY']['HBC1-FaOOFaNN-10.0-monoB-unCP' ] = 71.44379453
|
amjames/psi4
|
psi4/share/psi4/databases/HBC6.py
|
Python
|
lgpl-3.0
| 161,641
|
[
"Psi4"
] |
5948e23c9b32f5973d608d2090c582d97ce6293b69a8f68f84e5210e9be3db60
|
# c: 20.03.2008, r: 20.03.2008
input_name = 'input/sinbc.py'
output_name = 'test_sinbc.vtk'
from testsBasic import TestInput
class Test( TestInput ):
pass
|
certik/sfepy
|
tests/test_input_sinbc.py
|
Python
|
bsd-3-clause
| 160
|
[
"VTK"
] |
5c9a16b7aaa8098c18e405dd245654e6e3c8e94d188d5810c60689415c78a6f8
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Elemental masses (most common isotope), symbols, and atomic numbers from psi4.
"""
_temp_element = ["GHOST", "HYDROGEN", "HELIUM", "LITHIUM", "BERYLLIUM",
"BORON", "CARBON", "NITROGEN", "OXYGEN", "FLUORINE",
"NEON", "SODIUM", "MAGNESIUM", "ALUMINUM", "SILICON",
"PHOSPHORUS", "SULFUR", "CHLORINE", "ARGON", "POTASSIUM",
"CALCIUM", "SCANDIUM", "TITANIUM", "VANADIUM", "CHROMIUM",
"MANGANESE", "IRON", "COBALT", "NICKEL", "COPPER",
"ZINC", "GALLIUM", "GERMANIUM", "ARSENIC", "SELENIUM",
"BROMINE", "KRYPTON", "RUBIDIUM", "STRONTIUM", "YTTRIUM",
"ZIRCONIUM", "NIOBIUM", "MOLYBDENUM", "TECHNETIUM", "RUTHENIUM",
"RHODIUM", "PALLADIUM", "SILVER", "CADMIUM", "INDIUM",
"TIN", "ANTIMONY", "TELLURIUM", "IODINE", "XENON",
"CESIUM", "BARIUM", "LANTHANUM", "CERIUM", "PRASEODYMIUM",
"NEODYMIUM", "PROMETHIUM", "SAMARIUM", "EUROPIUM", "GADOLINIUM",
"TERBIUM", "DYSPROSIUM", "HOLMIUM", "ERBIUM", "THULIUM",
"YTTERBIUM", "LUTETIUM", "HAFNIUM", "TANTALUM", "TUNGSTEN",
"RHENIUM", "OSMIUM", "IRIDIUM", "PLATINUM", "GOLD",
"MERCURY", "THALLIUM", "LEAD", "BISMUTH", "POLONIUM",
"ASTATINE", "RADON", "FRANCIUM", "RADIUM", "ACTINIUM",
"THORIUM", "PROTACTINIUM", "URANIUM", "NEPTUNIUM", "PLUTONIUM",
"AMERICIUM", "CURIUM", "BERKELIUM", "CALIFORNIUM", "EINSTEINIUM",
"FERMIUM", "MENDELEVIUM", "NOBELIUM", "LAWRENCIUM" "RUTHERFORDIUM",
"DUBNIUM", "SEABORGIUM", "BOHRIUM"]
_temp_symbol = ["X", "H", "HE", "LI", "BE", "B", "C", "N", "O", "F", "NE", "NA", "MG",
"AL", "SI", "P", "S", "CL", "AR", "K", "CA", "SC", "TI", "V", "CR", "MN", "FE", "CO",
"NI", "CU", "ZN", "GA", "GE", "AS", "SE", "BR", "KR", "RB", "SR", "Y", "ZR", "NB",
"MO", "TC", "RU", "RH", "PD", "AG", "CD", "IN", "SN", "SB", "TE", "I", "XE", "CS",
"BA", "LA", "CE", "PR", "ND", "PM", "SM", "EU", "GD", "TB", "DY", "HO", "ER", "TM",
"YB", "LU", "HF", "TA", "W", "RE", "OS", "IR", "PT", "AU", "HG", "TL", "PB", "BI",
"PO", "AT", "RN", "FR", "RA", "AC", "TH", "PA", "U", "NP", "PU", "AM", "CM", "BK",
"CF", "ES", "FM", "MD", "NO", "LR", "RF", "DB", "SG", "BH", "HS", "MT", "DS", "RG",
"UUB", "UUT", "UUQ", "UUP", "UUH", "UUS", "UUO"]
_temp_z = list(range(0, 108))
_temp_mass = [
0., 1.00782503207, 4.00260325415, 7.016004548, 9.012182201, 11.009305406,
12, 14.00307400478, 15.99491461956, 18.998403224, 19.99244017542,
22.98976928087, 23.985041699, 26.981538627, 27.97692653246, 30.973761629,
31.972070999, 34.968852682, 39.96238312251, 38.963706679, 39.962590983,
44.955911909, 47.947946281, 50.943959507, 51.940507472, 54.938045141,
55.934937475, 58.933195048, 57.935342907, 62.929597474, 63.929142222,
68.925573587, 73.921177767, 74.921596478, 79.916521271, 78.918337087,
85.910610729, 84.911789737, 87.905612124, 88.905848295, 89.904704416,
92.906378058, 97.905408169, 98.906254747, 101.904349312, 102.905504292,
105.903485715, 106.90509682, 113.90335854, 114.903878484, 119.902194676,
120.903815686, 129.906224399, 126.904472681, 131.904153457, 132.905451932,
137.905247237, 138.906353267, 139.905438706, 140.907652769, 141.907723297,
144.912749023, 151.919732425, 152.921230339, 157.924103912, 158.925346757,
163.929174751, 164.93032207, 165.930293061, 168.93421325, 173.938862089,
174.940771819, 179.946549953, 180.947995763, 183.950931188, 186.955753109,
191.96148069, 192.96292643, 194.964791134, 196.966568662, 201.970643011,
204.974427541, 207.976652071, 208.980398734, 208.982430435, 210.987496271,
222.017577738, 222.01755173, 228.031070292, 227.027752127, 232.038055325,
231.03588399, 238.050788247, 237.048173444, 242.058742611, 243.06138108,
247.07035354, 247.07030708, 251.079586788, 252.082978512, 257.095104724,
258.098431319, 255.093241131, 260.105504, 263.112547, 255.107398, 259.114500,
262.122892, 263.128558, 265.136151, 281.162061, 272.153615, 283.171792, 283.176451,
285.183698, 287.191186, 292.199786, 291.206564, 293.214670]
_temp_iso_symbol = [
"H", "H1", "H2", "D", "H3", "T", "H4", "H5", "H6", "H7", "HE", "HE3", "HE4",
"HE5", "HE6", "HE7", "HE8", "HE9", "HE10", "LI", "LI3", "LI4", "LI5", "LI6",
"LI7", "LI8", "LI9", "LI10", "LI11", "LI12", "BE", "BE5", "BE6", "BE7", "BE8",
"BE9", "BE10", "BE11", "BE12", "BE13", "BE14", "BE15", "BE16", "B", "B6", "B7",
"B8", "B9", "B10", "B11", "B12", "B13", "B14", "B15", "B16", "B17", "B18", "B19",
"C", "C8", "C9", "C10", "C11", "C12", "C13", "C14", "C15", "C16", "C17", "C18",
"C19", "C20", "C21", "C22", "N", "N10", "N11", "N12", "N13", "N14", "N15", "N16",
"N17", "N18", "N19", "N20", "N21", "N22", "N23", "N24", "N25", "O", "O12", "O13",
"O14", "O15", "O16", "O17", "O18", "O19", "O20", "O21", "O22", "O23", "O24",
"O25", "O26", "O27", "O28", "F", "F14", "F15", "F16", "F17", "F18", "F19", "F20",
"F21", "F22", "F23", "F24", "F25", "F26", "F27", "F28", "F29", "F30", "F31",
"NE", "NE16", "NE17", "NE18", "NE19", "NE20", "NE21", "NE22", "NE23", "NE24",
"NE25", "NE26", "NE27", "NE28", "NE29", "NE30", "NE31", "NE32", "NE33", "NE34",
"NA", "NA18", "NA19", "NA20", "NA21", "NA22", "NA23", "NA24", "NA25", "NA26",
"NA27", "NA28", "NA29", "NA30", "NA31", "NA32", "NA33", "NA34", "NA35", "NA36",
"NA37", "MG", "MG19", "MG20", "MG21", "MG22", "MG23", "MG24", "MG25", "MG26",
"MG27", "MG28", "MG29", "MG30", "MG31", "MG32", "MG33", "MG34", "MG35", "MG36",
"MG37", "MG38", "MG39", "MG40", "AL", "AL21", "AL22", "AL23", "AL24", "AL25",
"AL26", "AL27", "AL28", "AL29", "AL30", "AL31", "AL32", "AL33", "AL34", "AL35",
"AL36", "AL37", "AL38", "AL39", "AL40", "AL41", "AL42", "SI", "SI22", "SI23",
"SI24", "SI25", "SI26", "SI27", "SI28", "SI29", "SI30", "SI31", "SI32", "SI33",
"SI34", "SI35", "SI36", "SI37", "SI38", "SI39", "SI40", "SI41", "SI42", "SI43",
"SI44", "P", "P24", "P25", "P26", "P27", "P28", "P29", "P30", "P31", "P32",
"P33", "P34", "P35", "P36", "P37", "P38", "P39", "P40", "P41", "P42", "P43",
"P44", "P45", "P46", "S", "S26", "S27", "S28", "S29", "S30", "S31", "S32", "S33",
"S34", "S35", "S36", "S37", "S38", "S39", "S40", "S41", "S42", "S43", "S44",
"S45", "S46", "S47", "S48", "S49", "CL", "CL28", "CL29", "CL30", "CL31", "CL32",
"CL33", "CL34", "CL35", "CL36", "CL37", "CL38", "CL39", "CL40", "CL41", "CL42",
"CL43", "CL44", "CL45", "CL46", "CL47", "CL48", "CL49", "CL50", "CL51", "AR",
"AR30", "AR31", "AR32", "AR33", "AR34", "AR35", "AR36", "AR37", "AR38", "AR39",
"AR40", "AR41", "AR42", "AR43", "AR44", "AR45", "AR46", "AR47", "AR48", "AR49",
"AR50", "AR51", "AR52", "AR53", "K", "K32", "K33", "K34", "K35", "K36", "K37",
"K38", "K39", "K40", "K41", "K42", "K43", "K44", "K45", "K46", "K47", "K48",
"K49", "K50", "K51", "K52", "K53", "K54", "K55", "CA", "CA34", "CA35", "CA36",
"CA37", "CA38", "CA39", "CA40", "CA41", "CA42", "CA43", "CA44", "CA45", "CA46",
"CA47", "CA48", "CA49", "CA50", "CA51", "CA52", "CA53", "CA54", "CA55", "CA56",
"CA57", "SC", "SC36", "SC37", "SC38", "SC39", "SC40", "SC41", "SC42", "SC43",
"SC44", "SC45", "SC46", "SC47", "SC48", "SC49", "SC50", "SC51", "SC52", "SC53",
"SC54", "SC55", "SC56", "SC57", "SC58", "SC59", "SC60", "TI", "TI38", "TI39",
"TI40", "TI41", "TI42", "TI43", "TI44", "TI45", "TI46", "TI47", "TI48", "TI49",
"TI50", "TI51", "TI52", "TI53", "TI54", "TI55", "TI56", "TI57", "TI58", "TI59",
"TI60", "TI61", "TI62", "TI63", "V", "V40", "V41", "V42", "V43", "V44", "V45",
"V46", "V47", "V48", "V49", "V50", "V51", "V52", "V53", "V54", "V55", "V56",
"V57", "V58", "V59", "V60", "V61", "V62", "V63", "V64", "V65", "CR", "CR42",
"CR43", "CR44", "CR45", "CR46", "CR47", "CR48", "CR49", "CR50", "CR51", "CR52",
"CR53", "CR54", "CR55", "CR56", "CR57", "CR58", "CR59", "CR60", "CR61", "CR62",
"CR63", "CR64", "CR65", "CR66", "CR67", "MN", "MN44", "MN45", "MN46", "MN47",
"MN48", "MN49", "MN50", "MN51", "MN52", "MN53", "MN54", "MN55", "MN56", "MN57",
"MN58", "MN59", "MN60", "MN61", "MN62", "MN63", "MN64", "MN65", "MN66", "MN67",
"MN68", "MN69", "FE", "FE45", "FE46", "FE47", "FE48", "FE49", "FE50", "FE51",
"FE52", "FE53", "FE54", "FE55", "FE56", "FE57", "FE58", "FE59", "FE60", "FE61",
"FE62", "FE63", "FE64", "FE65", "FE66", "FE67", "FE68", "FE69", "FE70", "FE71",
"FE72", "CO", "CO47", "CO48", "CO49", "CO50", "CO51", "CO52", "CO53", "CO54",
"CO55", "CO56", "CO57", "CO58", "CO59", "CO60", "CO61", "CO62", "CO63", "CO64",
"CO65", "CO66", "CO67", "CO68", "CO69", "CO70", "CO71", "CO72", "CO73", "CO74",
"CO75", "NI", "NI48", "NI49", "NI50", "NI51", "NI52", "NI53", "NI54", "NI55",
"NI56", "NI57", "NI58", "NI59", "NI60", "NI61", "NI62", "NI63", "NI64", "NI65",
"NI66", "NI67", "NI68", "NI69", "NI70", "NI71", "NI72", "NI73", "NI74", "NI75",
"NI76", "NI77", "NI78", "CU", "CU52", "CU53", "CU54", "CU55", "CU56", "CU57",
"CU58", "CU59", "CU60", "CU61", "CU62", "CU63", "CU64", "CU65", "CU66", "CU67",
"CU68", "CU69", "CU70", "CU71", "CU72", "CU73", "CU74", "CU75", "CU76", "CU77",
"CU78", "CU79", "CU80", "ZN", "ZN54", "ZN55", "ZN56", "ZN57", "ZN58", "ZN59",
"ZN60", "ZN61", "ZN62", "ZN63", "ZN64", "ZN65", "ZN66", "ZN67", "ZN68", "ZN69",
"ZN70", "ZN71", "ZN72", "ZN73", "ZN74", "ZN75", "ZN76", "ZN77", "ZN78", "ZN79",
"ZN80", "ZN81", "ZN82", "ZN83", "GA", "GA56", "GA57", "GA58", "GA59", "GA60",
"GA61", "GA62", "GA63", "GA64", "GA65", "GA66", "GA67", "GA68", "GA69", "GA70",
"GA71", "GA72", "GA73", "GA74", "GA75", "GA76", "GA77", "GA78", "GA79", "GA80",
"GA81", "GA82", "GA83", "GA84", "GA85", "GA86", "GE", "GE58", "GE59", "GE60",
"GE61", "GE62", "GE63", "GE64", "GE65", "GE66", "GE67", "GE68", "GE69", "GE70",
"GE71", "GE72", "GE73", "GE74", "GE75", "GE76", "GE77", "GE78", "GE79", "GE80",
"GE81", "GE82", "GE83", "GE84", "GE85", "GE86", "GE87", "GE88", "GE89", "AS",
"AS60", "AS61", "AS62", "AS63", "AS64", "AS65", "AS66", "AS67", "AS68", "AS69",
"AS70", "AS71", "AS72", "AS73", "AS74", "AS75", "AS76", "AS77", "AS78", "AS79",
"AS80", "AS81", "AS82", "AS83", "AS84", "AS85", "AS86", "AS87", "AS88", "AS89",
"AS90", "AS91", "AS92", "SE", "SE65", "SE66", "SE67", "SE68", "SE69", "SE70",
"SE71", "SE72", "SE73", "SE74", "SE75", "SE76", "SE77", "SE78", "SE79", "SE80",
"SE81", "SE82", "SE83", "SE84", "SE85", "SE86", "SE87", "SE88", "SE89", "SE90",
"SE91", "SE92", "SE93", "SE94", "BR", "BR67", "BR68", "BR69", "BR70", "BR71",
"BR72", "BR73", "BR74", "BR75", "BR76", "BR77", "BR78", "BR79", "BR80", "BR81",
"BR82", "BR83", "BR84", "BR85", "BR86", "BR87", "BR88", "BR89", "BR90", "BR91",
"BR92", "BR93", "BR94", "BR95", "BR96", "BR97", "KR", "KR69", "KR70", "KR71",
"KR72", "KR73", "KR74", "KR75", "KR76", "KR77", "KR78", "KR79", "KR80", "KR81",
"KR82", "KR83", "KR84", "KR85", "KR86", "KR87", "KR88", "KR89", "KR90", "KR91",
"KR92", "KR93", "KR94", "KR95", "KR96", "KR97", "KR98", "KR99", "KR100", "RB",
"RB71", "RB72", "RB73", "RB74", "RB75", "RB76", "RB77", "RB78", "RB79", "RB80",
"RB81", "RB82", "RB83", "RB84", "RB85", "RB86", "RB87", "RB88", "RB89", "RB90",
"RB91", "RB92", "RB93", "RB94", "RB95", "RB96", "RB97", "RB98", "RB99",
"RB100", "RB101", "RB102", "SR", "SR73", "SR74", "SR75", "SR76", "SR77",
"SR78", "SR79", "SR80", "SR81", "SR82", "SR83", "SR84", "SR85", "SR86", "SR87",
"SR88", "SR89", "SR90", "SR91", "SR92", "SR93", "SR94", "SR95", "SR96", "SR97",
"SR98", "SR99", "SR100", "SR101", "SR102", "SR103", "SR104", "SR105", "Y",
"Y76", "Y77", "Y78", "Y79", "Y80", "Y81", "Y82", "Y83", "Y84", "Y85", "Y86",
"Y87", "Y88", "Y89", "Y90", "Y91", "Y92", "Y93", "Y94", "Y95", "Y96", "Y97",
"Y98", "Y99", "Y100", "Y101", "Y102", "Y103", "Y104", "Y105", "Y106", "Y107",
"Y108", "ZR", "ZR78", "ZR79", "ZR80", "ZR81", "ZR82", "ZR83", "ZR84", "ZR85",
"ZR86", "ZR87", "ZR88", "ZR89", "ZR90", "ZR91", "ZR92", "ZR93", "ZR94", "ZR95",
"ZR96", "ZR97", "ZR98", "ZR99", "ZR100", "ZR101", "ZR102", "ZR103", "ZR104",
"ZR105", "ZR106", "ZR107", "ZR108", "ZR109", "ZR110", "NB", "NB81", "NB82",
"NB83", "NB84", "NB85", "NB86", "NB87", "NB88", "NB89", "NB90", "NB91", "NB92",
"NB93", "NB94", "NB95", "NB96", "NB97", "NB98", "NB99", "NB100", "NB101",
"NB102", "NB103", "NB104", "NB105", "NB106", "NB107", "NB108", "NB109",
"NB110", "NB111", "NB112", "NB113", "MO", "MO83", "MO84", "MO85", "MO86",
"MO87", "MO88", "MO89", "MO90", "MO91", "MO92", "MO93", "MO94", "MO95", "MO96",
"MO97", "MO98", "MO99", "MO100", "MO101", "MO102", "MO103", "MO104", "MO105",
"MO106", "MO107", "MO108", "MO109", "MO110", "MO111", "MO112", "MO113",
"MO114", "MO115", "TC", "TC85", "TC86", "TC87", "TC88", "TC89", "TC90", "TC91",
"TC92", "TC93", "TC94", "TC95", "TC96", "TC97", "TC98", "TC99", "TC100",
"TC101", "TC102", "TC103", "TC104", "TC105", "TC106", "TC107", "TC108",
"TC109", "TC110", "TC111", "TC112", "TC113", "TC114", "TC115", "TC116",
"TC117", "TC118", "RU", "RU87", "RU88", "RU89", "RU90", "RU91", "RU92", "RU93",
"RU94", "RU95", "RU96", "RU97", "RU98", "RU99", "RU100", "RU101", "RU102",
"RU103", "RU104", "RU105", "RU106", "RU107", "RU108", "RU109", "RU110",
"RU111", "RU112", "RU113", "RU114", "RU115", "RU116", "RU117", "RU118",
"RU119", "RU120", "RH", "RH89", "RH90", "RH91", "RH92", "RH93", "RH94", "RH95",
"RH96", "RH97", "RH98", "RH99", "RH100", "RH101", "RH102", "RH103", "RH104",
"RH105", "RH106", "RH107", "RH108", "RH109", "RH110", "RH111", "RH112",
"RH113", "RH114", "RH115", "RH116", "RH117", "RH118", "RH119", "RH120",
"RH121", "RH122", "PD", "PD91", "PD92", "PD93", "PD94", "PD95", "PD96", "PD97",
"PD98", "PD99", "PD100", "PD101", "PD102", "PD103", "PD104", "PD105", "PD106",
"PD107", "PD108", "PD109", "PD110", "PD111", "PD112", "PD113", "PD114",
"PD115", "PD116", "PD117", "PD118", "PD119", "PD120", "PD121", "PD122",
"PD123", "PD124", "AG", "AG93", "AG94", "AG95", "AG96", "AG97", "AG98", "AG99",
"AG100", "AG101", "AG102", "AG103", "AG104", "AG105", "AG106", "AG107",
"AG108", "AG109", "AG110", "AG111", "AG112", "AG113", "AG114", "AG115",
"AG116", "AG117", "AG118", "AG119", "AG120", "AG121", "AG122", "AG123",
"AG124", "AG125", "AG126", "AG127", "AG128", "AG129", "AG130", "CD", "CD95",
"CD96", "CD97", "CD98", "CD99", "CD100", "CD101", "CD102", "CD103", "CD104",
"CD105", "CD106", "CD107", "CD108", "CD109", "CD110", "CD111", "CD112",
"CD113", "CD114", "CD115", "CD116", "CD117", "CD118", "CD119", "CD120",
"CD121", "CD122", "CD123", "CD124", "CD125", "CD126", "CD127", "CD128",
"CD129", "CD130", "CD131", "CD132", "IN", "IN97", "IN98", "IN99", "IN100",
"IN101", "IN102", "IN103", "IN104", "IN105", "IN106", "IN107", "IN108",
"IN109", "IN110", "IN111", "IN112", "IN113", "IN114", "IN115", "IN116",
"IN117", "IN118", "IN119", "IN120", "IN121", "IN122", "IN123", "IN124",
"IN125", "IN126", "IN127", "IN128", "IN129", "IN130", "IN131", "IN132",
"IN133", "IN134", "IN135", "SN", "SN99", "SN100", "SN101", "SN102", "SN103",
"SN104", "SN105", "SN106", "SN107", "SN108", "SN109", "SN110", "SN111",
"SN112", "SN113", "SN114", "SN115", "SN116", "SN117", "SN118", "SN119",
"SN120", "SN121", "SN122", "SN123", "SN124", "SN125", "SN126", "SN127",
"SN128", "SN129", "SN130", "SN131", "SN132", "SN133", "SN134", "SN135",
"SN136", "SN137", "SB", "SB103", "SB104", "SB105", "SB106", "SB107", "SB108",
"SB109", "SB110", "SB111", "SB112", "SB113", "SB114", "SB115", "SB116",
"SB117", "SB118", "SB119", "SB120", "SB121", "SB122", "SB123", "SB124",
"SB125", "SB126", "SB127", "SB128", "SB129", "SB130", "SB131", "SB132",
"SB133", "SB134", "SB135", "SB136", "SB137", "SB138", "SB139", "TE", "TE105",
"TE106", "TE107", "TE108", "TE109", "TE110", "TE111", "TE112", "TE113",
"TE114", "TE115", "TE116", "TE117", "TE118", "TE119", "TE120", "TE121",
"TE122", "TE123", "TE124", "TE125", "TE126", "TE127", "TE128", "TE129",
"TE130", "TE131", "TE132", "TE133", "TE134", "TE135", "TE136", "TE137",
"TE138", "TE139", "TE140", "TE141", "TE142", "I", "I108", "I109", "I110",
"I111", "I112", "I113", "I114", "I115", "I116", "I117", "I118", "I119", "I120",
"I121", "I122", "I123", "I124", "I125", "I126", "I127", "I128", "I129", "I130",
"I131", "I132", "I133", "I134", "I135", "I136", "I137", "I138", "I139", "I140",
"I141", "I142", "I143", "I144", "XE", "XE110", "XE111", "XE112", "XE113",
"XE114", "XE115", "XE116", "XE117", "XE118", "XE119", "XE120", "XE121",
"XE122", "XE123", "XE124", "XE125", "XE126", "XE127", "XE128", "XE129",
"XE130", "XE131", "XE132", "XE133", "XE134", "XE135", "XE136", "XE137",
"XE138", "XE139", "XE140", "XE141", "XE142", "XE143", "XE144", "XE145",
"XE146", "XE147", "CS", "CS112", "CS113", "CS114", "CS115", "CS116", "CS117",
"CS118", "CS119", "CS120", "CS121", "CS122", "CS123", "CS124", "CS125",
"CS126", "CS127", "CS128", "CS129", "CS130", "CS131", "CS132", "CS133",
"CS134", "CS135", "CS136", "CS137", "CS138", "CS139", "CS140", "CS141",
"CS142", "CS143", "CS144", "CS145", "CS146", "CS147", "CS148", "CS149",
"CS150", "CS151", "BA", "BA114", "BA115", "BA116", "BA117", "BA118", "BA119",
"BA120", "BA121", "BA122", "BA123", "BA124", "BA125", "BA126", "BA127",
"BA128", "BA129", "BA130", "BA131", "BA132", "BA133", "BA134", "BA135",
"BA136", "BA137", "BA138", "BA139", "BA140", "BA141", "BA142", "BA143",
"BA144", "BA145", "BA146", "BA147", "BA148", "BA149", "BA150", "BA151",
"BA152", "BA153", "LA", "LA117", "LA118", "LA119", "LA120", "LA121", "LA122",
"LA123", "LA124", "LA125", "LA126", "LA127", "LA128", "LA129", "LA130",
"LA131", "LA132", "LA133", "LA134", "LA135", "LA136", "LA137", "LA138",
"LA139", "LA140", "LA141", "LA142", "LA143", "LA144", "LA145", "LA146",
"LA147", "LA148", "LA149", "LA150", "LA151", "LA152", "LA153", "LA154",
"LA155", "CE", "CE119", "CE120", "CE121", "CE122", "CE123", "CE124", "CE125",
"CE126", "CE127", "CE128", "CE129", "CE130", "CE131", "CE132", "CE133",
"CE134", "CE135", "CE136", "CE137", "CE138", "CE139", "CE140", "CE141",
"CE142", "CE143", "CE144", "CE145", "CE146", "CE147", "CE148", "CE149",
"CE150", "CE151", "CE152", "CE153", "CE154", "CE155", "CE156", "CE157", "PR",
"PR121", "PR122", "PR123", "PR124", "PR125", "PR126", "PR127", "PR128",
"PR129", "PR130", "PR131", "PR132", "PR133", "PR134", "PR135", "PR136",
"PR137", "PR138", "PR139", "PR140", "PR141", "PR142", "PR143", "PR144",
"PR145", "PR146", "PR147", "PR148", "PR149", "PR150", "PR151", "PR152",
"PR153", "PR154", "PR155", "PR156", "PR157", "PR158", "PR159", "ND", "ND124",
"ND125", "ND126", "ND127", "ND128", "ND129", "ND130", "ND131", "ND132",
"ND133", "ND134", "ND135", "ND136", "ND137", "ND138", "ND139", "ND140",
"ND141", "ND142", "ND143", "ND144", "ND145", "ND146", "ND147", "ND148",
"ND149", "ND150", "ND151", "ND152", "ND153", "ND154", "ND155", "ND156",
"ND157", "ND158", "ND159", "ND160", "ND161", "PM", "PM126", "PM127", "PM128",
"PM129", "PM130", "PM131", "PM132", "PM133", "PM134", "PM135", "PM136",
"PM137", "PM138", "PM139", "PM140", "PM141", "PM142", "PM143", "PM144",
"PM145", "PM146", "PM147", "PM148", "PM149", "PM150", "PM151", "PM152",
"PM153", "PM154", "PM155", "PM156", "PM157", "PM158", "PM159", "PM160",
"PM161", "PM162", "PM163", "SM", "SM128", "SM129", "SM130", "SM131", "SM132",
"SM133", "SM134", "SM135", "SM136", "SM137", "SM138", "SM139", "SM140",
"SM141", "SM142", "SM143", "SM144", "SM145", "SM146", "SM147", "SM148",
"SM149", "SM150", "SM151", "SM152", "SM153", "SM154", "SM155", "SM156",
"SM157", "SM158", "SM159", "SM160", "SM161", "SM162", "SM163", "SM164",
"SM165", "EU", "EU130", "EU131", "EU132", "EU133", "EU134", "EU135", "EU136",
"EU137", "EU138", "EU139", "EU140", "EU141", "EU142", "EU143", "EU144",
"EU145", "EU146", "EU147", "EU148", "EU149", "EU150", "EU151", "EU152",
"EU153", "EU154", "EU155", "EU156", "EU157", "EU158", "EU159", "EU160",
"EU161", "EU162", "EU163", "EU164", "EU165", "EU166", "EU167", "GD", "GD134",
"GD135", "GD136", "GD137", "GD138", "GD139", "GD140", "GD141", "GD142",
"GD143", "GD144", "GD145", "GD146", "GD147", "GD148", "GD149", "GD150",
"GD151", "GD152", "GD153", "GD154", "GD155", "GD156", "GD157", "GD158",
"GD159", "GD160", "GD161", "GD162", "GD163", "GD164", "GD165", "GD166",
"GD167", "GD168", "GD169", "TB", "TB136", "TB137", "TB138", "TB139", "TB140",
"TB141", "TB142", "TB143", "TB144", "TB145", "TB146", "TB147", "TB148",
"TB149", "TB150", "TB151", "TB152", "TB153", "TB154", "TB155", "TB156",
"TB157", "TB158", "TB159", "TB160", "TB161", "TB162", "TB163", "TB164",
"TB165", "TB166", "TB167", "TB168", "TB169", "TB170", "TB171", "DY", "DY138",
"DY139", "DY140", "DY141", "DY142", "DY143", "DY144", "DY145", "DY146",
"DY147", "DY148", "DY149", "DY150", "DY151", "DY152", "DY153", "DY154",
"DY155", "DY156", "DY157", "DY158", "DY159", "DY160", "DY161", "DY162",
"DY163", "DY164", "DY165", "DY166", "DY167", "DY168", "DY169", "DY170",
"DY171", "DY172", "DY173", "HO", "HO140", "HO141", "HO142", "HO143", "HO144",
"HO145", "HO146", "HO147", "HO148", "HO149", "HO150", "HO151", "HO152",
"HO153", "HO154", "HO155", "HO156", "HO157", "HO158", "HO159", "HO160",
"HO161", "HO162", "HO163", "HO164", "HO165", "HO166", "HO167", "HO168",
"HO169", "HO170", "HO171", "HO172", "HO173", "HO174", "HO175", "ER", "ER143",
"ER144", "ER145", "ER146", "ER147", "ER148", "ER149", "ER150", "ER151",
"ER152", "ER153", "ER154", "ER155", "ER156", "ER157", "ER158", "ER159",
"ER160", "ER161", "ER162", "ER163", "ER164", "ER165", "ER166", "ER167",
"ER168", "ER169", "ER170", "ER171", "ER172", "ER173", "ER174", "ER175",
"ER176", "ER177", "TM", "TM145", "TM146", "TM147", "TM148", "TM149", "TM150",
"TM151", "TM152", "TM153", "TM154", "TM155", "TM156", "TM157", "TM158",
"TM159", "TM160", "TM161", "TM162", "TM163", "TM164", "TM165", "TM166",
"TM167", "TM168", "TM169", "TM170", "TM171", "TM172", "TM173", "TM174",
"TM175", "TM176", "TM177", "TM178", "TM179", "YB", "YB148", "YB149", "YB150",
"YB151", "YB152", "YB153", "YB154", "YB155", "YB156", "YB157", "YB158",
"YB159", "YB160", "YB161", "YB162", "YB163", "YB164", "YB165", "YB166",
"YB167", "YB168", "YB169", "YB170", "YB171", "YB172", "YB173", "YB174",
"YB175", "YB176", "YB177", "YB178", "YB179", "YB180", "YB181", "LU", "LU150",
"LU151", "LU152", "LU153", "LU154", "LU155", "LU156", "LU157", "LU158",
"LU159", "LU160", "LU161", "LU162", "LU163", "LU164", "LU165", "LU166",
"LU167", "LU168", "LU169", "LU170", "LU171", "LU172", "LU173", "LU174",
"LU175", "LU176", "LU177", "LU178", "LU179", "LU180", "LU181", "LU182",
"LU183", "LU184", "HF", "HF153", "HF154", "HF155", "HF156", "HF157", "HF158",
"HF159", "HF160", "HF161", "HF162", "HF163", "HF164", "HF165", "HF166",
"HF167", "HF168", "HF169", "HF170", "HF171", "HF172", "HF173", "HF174",
"HF175", "HF176", "HF177", "HF178", "HF179", "HF180", "HF181", "HF182",
"HF183", "HF184", "HF185", "HF186", "HF187", "HF188", "TA", "TA155", "TA156",
"TA157", "TA158", "TA159", "TA160", "TA161", "TA162", "TA163", "TA164",
"TA165", "TA166", "TA167", "TA168", "TA169", "TA170", "TA171", "TA172",
"TA173", "TA174", "TA175", "TA176", "TA177", "TA178", "TA179", "TA180",
"TA181", "TA182", "TA183", "TA184", "TA185", "TA186", "TA187", "TA188",
"TA189", "TA190", "W", "W158", "W159", "W160", "W161", "W162", "W163", "W164",
"W165", "W166", "W167", "W168", "W169", "W170", "W171", "W172", "W173", "W174",
"W175", "W176", "W177", "W178", "W179", "W180", "W181", "W182", "W183", "W184",
"W185", "W186", "W187", "W188", "W189", "W190", "W191", "W192", "RE", "RE160",
"RE161", "RE162", "RE163", "RE164", "RE165", "RE166", "RE167", "RE168",
"RE169", "RE170", "RE171", "RE172", "RE173", "RE174", "RE175", "RE176",
"RE177", "RE178", "RE179", "RE180", "RE181", "RE182", "RE183", "RE184",
"RE185", "RE186", "RE187", "RE188", "RE189", "RE190", "RE191", "RE192",
"RE193", "RE194", "OS", "OS162", "OS163", "OS164", "OS165", "OS166", "OS167",
"OS168", "OS169", "OS170", "OS171", "OS172", "OS173", "OS174", "OS175",
"OS176", "OS177", "OS178", "OS179", "OS180", "OS181", "OS182", "OS183",
"OS184", "OS185", "OS186", "OS187", "OS188", "OS189", "OS190", "OS191",
"OS192", "OS193", "OS194", "OS195", "OS196", "IR", "IR164", "IR165", "IR166",
"IR167", "IR168", "IR169", "IR170", "IR171", "IR172", "IR173", "IR174",
"IR175", "IR176", "IR177", "IR178", "IR179", "IR180", "IR181", "IR182",
"IR183", "IR184", "IR185", "IR186", "IR187", "IR188", "IR189", "IR190",
"IR191", "IR192", "IR193", "IR194", "IR195", "IR196", "IR197", "IR198",
"IR199", "PT", "PT166", "PT167", "PT168", "PT169", "PT170", "PT171", "PT172",
"PT173", "PT174", "PT175", "PT176", "PT177", "PT178", "PT179", "PT180",
"PT181", "PT182", "PT183", "PT184", "PT185", "PT186", "PT187", "PT188",
"PT189", "PT190", "PT191", "PT192", "PT193", "PT194", "PT195", "PT196",
"PT197", "PT198", "PT199", "PT200", "PT201", "PT202", "AU", "AU169", "AU170",
"AU171", "AU172", "AU173", "AU174", "AU175", "AU176", "AU177", "AU178",
"AU179", "AU180", "AU181", "AU182", "AU183", "AU184", "AU185", "AU186",
"AU187", "AU188", "AU189", "AU190", "AU191", "AU192", "AU193", "AU194",
"AU195", "AU196", "AU197", "AU198", "AU199", "AU200", "AU201", "AU202",
"AU203", "AU204", "AU205", "HG", "HG171", "HG172", "HG173", "HG174", "HG175",
"HG176", "HG177", "HG178", "HG179", "HG180", "HG181", "HG182", "HG183",
"HG184", "HG185", "HG186", "HG187", "HG188", "HG189", "HG190", "HG191",
"HG192", "HG193", "HG194", "HG195", "HG196", "HG197", "HG198", "HG199",
"HG200", "HG201", "HG202", "HG203", "HG204", "HG205", "HG206", "HG207",
"HG208", "HG209", "HG210", "TL", "TL176", "TL177", "TL178", "TL179", "TL180",
"TL181", "TL182", "TL183", "TL184", "TL185", "TL186", "TL187", "TL188",
"TL189", "TL190", "TL191", "TL192", "TL193", "TL194", "TL195", "TL196",
"TL197", "TL198", "TL199", "TL200", "TL201", "TL202", "TL203", "TL204",
"TL205", "TL206", "TL207", "TL208", "TL209", "TL210", "TL211", "TL212", "PB",
"PB178", "PB179", "PB180", "PB181", "PB182", "PB183", "PB184", "PB185",
"PB186", "PB187", "PB188", "PB189", "PB190", "PB191", "PB192", "PB193",
"PB194", "PB195", "PB196", "PB197", "PB198", "PB199", "PB200", "PB201",
"PB202", "PB203", "PB204", "PB205", "PB206", "PB207", "PB208", "PB209",
"PB210", "PB211", "PB212", "PB213", "PB214", "PB215", "BI", "BI184", "BI185",
"BI186", "BI187", "BI188", "BI189", "BI190", "BI191", "BI192", "BI193",
"BI194", "BI195", "BI196", "BI197", "BI198", "BI199", "BI200", "BI201",
"BI202", "BI203", "BI204", "BI205", "BI206", "BI207", "BI208", "BI209",
"BI210", "BI211", "BI212", "BI213", "BI214", "BI215", "BI216", "BI217",
"BI218", "PO", "PO188", "PO189", "PO190", "PO191", "PO192", "PO193", "PO194",
"PO195", "PO196", "PO197", "PO198", "PO199", "PO200", "PO201", "PO202",
"PO203", "PO204", "PO205", "PO206", "PO207", "PO208", "PO209", "PO210",
"PO211", "PO212", "PO213", "PO214", "PO215", "PO216", "PO217", "PO218",
"PO219", "PO220", "AT", "AT193", "AT194", "AT195", "AT196", "AT197", "AT198",
"AT199", "AT200", "AT201", "AT202", "AT203", "AT204", "AT205", "AT206",
"AT207", "AT208", "AT209", "AT210", "AT211", "AT212", "AT213", "AT214",
"AT215", "AT216", "AT217", "AT218", "AT219", "AT220", "AT221", "AT222",
"AT223", "RN", "RN195", "RN196", "RN197", "RN198", "RN199", "RN200", "RN201",
"RN202", "RN203", "RN204", "RN205", "RN206", "RN207", "RN208", "RN209",
"RN210", "RN211", "RN212", "RN213", "RN214", "RN215", "RN216", "RN217",
"RN218", "RN219", "RN220", "RN221", "RN222", "RN223", "RN224", "RN225",
"RN226", "RN227", "RN228", "FR", "FR199", "FR200", "FR201", "FR202", "FR203",
"FR204", "FR205", "FR206", "FR207", "FR208", "FR209", "FR210", "FR211",
"FR212", "FR213", "FR214", "FR215", "FR216", "FR217", "FR218", "FR219",
"FR220", "FR221", "FR222", "FR223", "FR224", "FR225", "FR226", "FR227",
"FR228", "FR229", "FR230", "FR231", "FR232", "RA", "RA202", "RA203", "RA204",
"RA205", "RA206", "RA207", "RA208", "RA209", "RA210", "RA211", "RA212",
"RA213", "RA214", "RA215", "RA216", "RA217", "RA218", "RA219", "RA220",
"RA221", "RA222", "RA223", "RA224", "RA225", "RA226", "RA227", "RA228",
"RA229", "RA230", "RA231", "RA232", "RA233", "RA234", "AC", "AC206", "AC207",
"AC208", "AC209", "AC210", "AC211", "AC212", "AC213", "AC214", "AC215",
"AC216", "AC217", "AC218", "AC219", "AC220", "AC221", "AC222", "AC223",
"AC224", "AC225", "AC226", "AC227", "AC228", "AC229", "AC230", "AC231",
"AC232", "AC233", "AC234", "AC235", "AC236", "TH", "TH209", "TH210", "TH211",
"TH212", "TH213", "TH214", "TH215", "TH216", "TH217", "TH218", "TH219",
"TH220", "TH221", "TH222", "TH223", "TH224", "TH225", "TH226", "TH227",
"TH228", "TH229", "TH230", "TH231", "TH232", "TH233", "TH234", "TH235",
"TH236", "TH237", "TH238", "PA", "PA212", "PA213", "PA214", "PA215", "PA216",
"PA217", "PA218", "PA219", "PA220", "PA221", "PA222", "PA223", "PA224",
"PA225", "PA226", "PA227", "PA228", "PA229", "PA230", "PA231", "PA232",
"PA233", "PA234", "PA235", "PA236", "PA237", "PA238", "PA239", "PA240", "U",
"U217", "U218", "U219", "U220", "U221", "U222", "U223", "U224", "U225", "U226",
"U227", "U228", "U229", "U230", "U231", "U232", "U233", "U234", "U235", "U236",
"U237", "U238", "U239", "U240", "U241", "U242", "NP", "NP225", "NP226",
"NP227", "NP228", "NP229", "NP230", "NP231", "NP232", "NP233", "NP234",
"NP235", "NP236", "NP237", "NP238", "NP239", "NP240", "NP241", "NP242",
"NP243", "NP244", "PU", "PU228", "PU229", "PU230", "PU231", "PU232", "PU233",
"PU234", "PU235", "PU236", "PU237", "PU238", "PU239", "PU240", "PU241",
"PU242", "PU243", "PU244", "PU245", "PU246", "PU247", "AM", "AM231", "AM232",
"AM233", "AM234", "AM235", "AM236", "AM237", "AM238", "AM239", "AM240",
"AM241", "AM242", "AM243", "AM244", "AM245", "AM246", "AM247", "AM248",
"AM249", "CM", "CM233", "CM234", "CM235", "CM236", "CM237", "CM238", "CM239",
"CM240", "CM241", "CM242", "CM243", "CM244", "CM245", "CM246", "CM247",
"CM248", "CM249", "CM250", "CM251", "CM252", "BK", "BK235", "BK236", "BK237",
"BK238", "BK239", "BK240", "BK241", "BK242", "BK243", "BK244", "BK245",
"BK246", "BK247", "BK248", "BK249", "BK250", "BK251", "BK252", "BK253",
"BK254", "CF", "CF237", "CF238", "CF239", "CF240", "CF241", "CF242", "CF243",
"CF244", "CF245", "CF246", "CF247", "CF248", "CF249", "CF250", "CF251",
"CF252", "CF253", "CF254", "CF255", "CF256", "ES", "ES240", "ES241", "ES242",
"ES243", "ES244", "ES245", "ES246", "ES247", "ES248", "ES249", "ES250",
"ES251", "ES252", "ES253", "ES254", "ES255", "ES256", "ES257", "ES258", "FM",
"FM242", "FM243", "FM244", "FM245", "FM246", "FM247", "FM248", "FM249",
"FM250", "FM251", "FM252", "FM253", "FM254", "FM255", "FM256", "FM257",
"FM258", "FM259", "FM260", "MD", "MD245", "MD246", "MD247", "MD248", "MD249",
"MD250", "MD251", "MD252", "MD253", "MD254", "MD255", "MD256", "MD257",
"MD258", "MD259", "MD260", "MD261", "MD262", "NO", "NO248", "NO249", "NO250",
"NO251", "NO252", "NO253", "NO254", "NO255", "NO256", "NO257", "NO258",
"NO259", "NO260", "NO261", "NO262", "NO263", "NO264", "LR", "LR251", "LR252",
"LR253", "LR254", "LR255", "LR256", "LR257", "LR258", "LR259", "LR260",
"LR261", "LR262", "LR263", "LR264", "LR265", "LR266", "RF", "RF253", "RF254",
"RF255", "RF256", "RF257", "RF258", "RF259", "RF260", "RF261", "RF262",
"RF263", "RF264", "RF265", "RF266", "RF267", "RF268", "DB", "DB255", "DB256",
"DB257", "DB258", "DB259", "DB260", "DB261", "DB262", "DB263", "DB264",
"DB265", "DB266", "DB267", "DB268", "DB269", "DB270", "SG", "SG258", "SG259",
"SG260", "SG261", "SG262", "SG263", "SG264", "SG265", "SG266", "SG267",
"SG268", "SG269", "SG270", "SG271", "SG272", "SG273", "BH", "BH260", "BH261",
"BH262", "BH263", "BH264", "BH265", "BH266", "BH267", "BH268", "BH269",
"BH270", "BH271", "BH272", "BH273", "BH274", "BH275", "HS", "HS263", "HS264",
"HS265", "HS266", "HS267", "HS268", "HS269", "HS270", "HS271", "HS272",
"HS273", "HS274", "HS275", "HS276", "HS277", "MT", "MT265", "MT266", "MT267",
"MT268", "MT269", "MT270", "MT271", "MT272", "MT273", "MT274", "MT275",
"MT276", "MT277", "MT278", "MT279", "DS", "DS267", "DS268", "DS269", "DS270",
"DS271", "DS272", "DS273", "DS274", "DS275", "DS276", "DS277", "DS278",
"DS279", "DS280", "DS281", "RG", "RG272", "RG273", "RG274", "RG275", "RG276",
"RG277", "RG278", "RG279", "RG280", "RG281", "RG282", "RG283", "UUB",
"UUB277", "UUB278", "UUB279", "UUB280", "UUB281", "UUB282", "UUB283",
"UUB284", "UUB285", "UUT", "UUT283", "UUT284", "UUT285", "UUT286", "UUT287",
"UUQ", "UUQ285", "UUQ286", "UUQ287", "UUQ288", "UUQ289", "UUP", "UUP287",
"UUP288", "UUP289", "UUP290", "UUP291", "UUH", "UUH289", "UUH290", "UUH291",
"UUH292", "UUS", "UUS291", "UUS292", "UUO", "UUO293"]
_temp_iso_mass = [
1.00782503207, 1.00782503207, 2.01410177785, 2.01410177785, 3.01604927767,
3.01604927767, 4.027806424, 5.035311488, 6.044942594, 7.052749,
4.00260325415, 3.01602931914, 4.00260325415, 5.012223624, 6.018889124,
7.028020618, 8.033921897, 9.043950286, 10.052398837, 7.016004548, 3.030775,
4.027185558, 5.0125378, 6.015122794, 7.016004548, 8.022487362, 9.026789505,
10.035481259, 11.043797715, 12.053780, 9.012182201, 5.040790, 6.019726317,
7.016929828, 8.005305103, 9.012182201, 10.013533818, 11.021657749,
12.026920737, 13.035693007, 14.04289292, 15.053460, 16.061920, 11.009305406,
6.046810, 7.029917901, 8.024607233, 9.013328782, 10.012936992, 11.009305406,
12.014352104, 13.017780217, 14.025404009, 15.031103021, 16.039808829,
17.046989906, 18.056170, 19.063730, 12, 8.037675025, 9.031036689,
10.016853228, 11.011433613, 12, 13.00335483778, 14.0032419887, 15.010599256,
16.014701252, 17.022586116, 18.026759354, 19.034805018, 20.040319754,
21.049340, 22.057200, 14.00307400478, 10.041653674, 11.026090956,
12.018613197, 13.005738609, 14.00307400478, 15.00010889823, 16.006101658,
17.008450261, 18.014078959, 19.017028697, 20.023365807, 21.02710824,
22.034394934, 23.041220, 24.051040, 25.060660, 15.99491461956,
12.034404895, 13.024812213, 14.00859625, 15.003065617, 15.99491461956,
16.999131703, 17.999161001, 19.00358013, 20.004076742, 21.008655886,
22.009966947, 23.015687659, 24.020472917, 25.029460, 26.038340, 27.048260,
28.057810, 18.998403224, 14.035060, 15.018009103, 16.011465724,
17.002095237, 18.000937956, 18.998403224, 19.999981315, 20.999948951,
22.002998815, 23.003574631, 24.008115485, 25.012101747, 26.019615555,
27.026760086, 28.035670, 29.043260, 30.052500, 31.060429, 19.99244017542,
16.025761262, 17.017671504, 18.005708213, 19.001880248, 19.99244017542,
20.993846684, 21.991385113, 22.994466904, 23.993610779, 24.997736888,
26.000461206, 27.007589903, 28.012071575, 29.019385933, 30.024801045,
31.033110, 32.040020, 33.049380, 34.057028, 22.98976928087, 18.025969,
19.013877499, 20.007351328, 20.997655206, 21.994436425, 22.98976928087,
23.990962782, 24.989953968, 25.992633, 26.994076788, 27.998938, 29.002861,
30.008976, 31.013585452, 32.02046656, 33.026719756, 34.035170, 35.042493,
36.051480, 37.059340, 23.985041699, 19.03547, 20.018862545, 21.01171291,
21.999573843, 22.994123669, 23.985041699, 24.985836917, 25.982592929,
26.984340585, 27.983876825, 28.9886, 29.990434, 30.996546, 31.998975,
33.005254, 34.009456424, 35.017340, 36.023000, 37.031400, 38.037570,
39.046772, 40.053930, 26.981538627, 21.028040, 22.019520, 23.007267432,
23.999938865, 24.990428095, 25.986891692, 26.981538627, 27.981910306,
28.980445046, 29.982960256, 30.983946619, 31.988124489, 32.990843336,
33.996851837, 34.999860235, 36.006207204, 37.01067782, 38.017231021,
39.02297, 40.031450, 41.038330, 42.046890, 27.97692653246, 22.034530,
23.025520, 24.011545616, 25.004105574, 25.992329921, 26.986704905,
27.97692653246, 28.9764947, 29.973770171, 30.975363226999998,
31.974148082, 32.97800022, 33.978575524, 34.984583575, 35.986599477,
36.99293608, 37.995633601, 39.002070013, 40.005869121, 41.01456,
42.019790, 43.028660, 44.035260, 30.973761629, 24.034350, 25.020260,
26.011780, 26.999230236, 27.992314761, 28.981800606, 29.978313789,
30.973761629, 31.973907274, 32.971725543, 33.973636257, 34.973314117,
35.97825968, 36.979608946, 37.984156827, 38.986179475, 39.991296951,
40.994335435, 42.001007913, 43.00619, 44.012990, 45.019220, 46.027380,
31.972070999, 26.027880, 27.018833, 28.004372763, 28.996608049,
29.984903249, 30.979554728, 31.972070999, 32.971458759, 33.967866902,
34.969032161, 35.96708076, 36.971125567, 37.971163317, 38.975134306,
39.975451728, 40.979582149, 41.981022419, 42.98715479, 43.99021339,
44.996508112, 46.000750, 47.008590, 48.014170, 49.023619, 34.968852682,
28.028510, 29.014110, 30.004770, 30.992413086, 31.985689901, 32.977451887,
33.973762819, 34.968852682, 35.968306981, 36.965902591, 37.968010425,
38.968008164, 39.970415472, 40.970684525, 41.973254804, 42.974054403,
43.978281071, 44.980286886, 45.98421004, 46.988710, 47.994950, 49.000320,
50.007840, 51.014490, 39.96238312251, 30.021560, 31.012123, 31.997637984,
32.989925709, 33.980271244, 34.975257585, 35.967545105, 36.96677632,
37.962732394, 38.964313231, 39.96238312251, 40.964500611, 41.963045736,
42.965636056, 43.964924033, 44.968039956, 45.968094129, 46.972186792,
47.974540, 48.980520, 49.984430, 50.991630, 51.996780, 53.004940,
38.963706679, 32.021920, 33.007260, 33.998410, 34.988009692, 35.981292235,
36.973375889, 37.969081184, 38.963706679, 39.963998475, 40.961825762,
41.96240281, 42.96071554, 43.961556804, 44.960699493, 45.961976864,
46.961678473, 47.965513535, 48.967450928, 49.972783355, 50.976380,
51.982610, 52.987120, 53.994200, 54.999710, 39.962590983, 34.014120,
35.004940, 35.993087063, 36.985870269, 37.976318452, 38.970719725,
39.962590983, 40.962278062, 41.958618014, 42.958766628, 43.955481754,
44.956186566, 45.953692587, 46.954546006, 47.952534177, 48.955674148,
49.957518962, 50.961499214, 51.9651, 52.970050, 53.974350, 54.980550,
55.985570, 56.992356, 44.955911909, 36.014920, 37.003050, 37.994700,
38.984790002, 39.977967407, 40.969251125, 41.965516429, 42.961150658,
43.959402752, 44.955911909, 45.95517189, 46.952407508, 47.952231468,
48.950023975, 49.952187685, 50.953603368, 51.956675468, 52.959610,
53.963264561, 54.968243949, 55.972870, 56.977790, 57.983710, 58.989220,
59.995710, 47.947946281, 38.009770, 39.001610, 39.990498838, 40.983145,
41.973030902, 42.968522499, 43.959690069, 44.958125616, 45.952631555,
46.951763088, 47.947946281, 48.947869982, 49.944791194, 50.946614955,
51.946897311, 52.949727171, 53.951052401, 54.955265056, 55.958199639,
56.963989137, 57.966970, 58.972930, 59.976760, 60.983200, 61.987490,
62.994420, 50.943959507, 40.011090, 40.999780, 41.991230, 42.980650,
43.97411, 44.965775808, 45.960200481, 46.95490894, 47.952253707,
48.948516101, 49.947158485, 50.943959507, 51.944775479, 52.944337979,
53.946439854, 54.947233701, 55.950530966, 56.952561432, 57.956834136,
58.960207407, 59.965026862, 60.968480, 61.973780, 62.977550, 63.983470,
64.987920, 51.940507472, 42.006430, 42.997710, 43.985549, 44.97964,
45.968358635, 46.962900046, 47.954031716, 48.951335721, 49.946044205,
50.944767431, 51.940507472, 52.940649386, 53.938880395, 54.940839672,
55.940653139, 56.943613013, 57.944353129, 58.948586367, 59.950076033,
60.954717204, 61.95661319, 62.961860, 63.964410, 64.970160, 65.973380,
66.979550, 54.938045141, 44.006870, 44.994510, 45.986720, 46.976100,
47.96852, 48.959618005, 49.95423823, 50.948210787, 51.945565464,
52.941290117, 53.940358854, 54.938045141, 55.93890491, 56.938285378,
57.939981549, 58.940440237, 59.942911246, 60.944652638, 61.94842822,
62.95023999, 63.95424909, 64.956336065, 65.961080, 66.964140, 67.969300,
68.972840, 55.934937475, 45.014578, 46.000810, 46.992890, 47.980504,
48.973610, 49.962988982, 50.956819538, 51.948113875, 52.945307942,
53.939610501, 54.938293357, 55.934937475, 56.935393969, 57.933275558,
58.934875464, 59.934071683, 60.936745281, 61.936767442, 62.940369091,
63.941201265, 64.94538027, 65.946780638, 66.950947244, 67.9537, 68.958780,
69.961460, 70.966720, 71.969620, 58.933195048, 47.011490, 48.001760,
48.989720, 49.981540, 50.970720, 51.963590, 52.954218896, 53.948459635,
54.941999029, 55.939839278, 56.936291373, 57.935752814, 58.933195048,
59.933817059, 60.932475763, 61.934050563, 62.933611611, 63.935809908,
64.93647846, 65.939762004, 66.940889529, 67.944873058, 68.94632, 69.951,
70.9529, 71.957810, 72.960240, 73.965380, 74.968330, 57.935342907,
48.019750, 49.009660, 49.995930, 50.987720, 51.975680, 52.968470,
53.957905495, 54.951330251, 55.942132022, 56.939793526, 57.935342907,
58.934346705, 59.930786372, 60.931056033, 61.928345115, 62.929669374,
63.927965959, 64.930084304, 65.929139334, 66.931569414, 67.931868789,
68.935610269, 69.9365, 70.940736283, 71.942092682, 72.946470, 73.948070,
74.952870, 75.955330, 76.960550, 77.963180, 62.929597474, 51.997180,
52.985550, 53.976710, 54.966050, 55.958560, 56.949211078, 57.944538499,
58.939498028, 59.93736503, 60.933457821, 61.932583745, 62.929597474,
63.929764183, 64.927789485, 65.928868813, 66.927730314, 67.929610889,
68.929429269, 69.932392343, 70.932676833, 71.935820307, 72.936675282,
73.939874862, 74.9419, 75.945275026, 76.947850, 77.951960, 78.954560,
79.960870, 63.929142222, 53.992950, 54.983980, 55.972380, 56.964788,
57.954591555, 58.949263764, 59.941827035, 60.939510635, 61.934329764,
62.933211566, 63.929142222, 64.929240984, 65.926033419, 66.927127345,
67.924844154, 68.926550281, 69.925319274, 70.927721599, 71.926857951,
72.929779104, 73.929458609, 74.932936741, 75.93329357, 76.936958967,
77.938440216, 78.942652, 79.944342348, 80.950480, 81.954420, 82.961030,
68.925573587, 55.994910, 56.982930, 57.974250, 58.963370, 59.957060,
60.949446287, 61.944175238, 62.939294196, 63.936838747, 64.932734754,
65.93158901, 66.928201703, 67.927980084, 68.925573587, 69.926021972,
70.924701349, 71.926366268, 72.925174682, 73.926945762, 74.926500246,
75.928827626, 76.9291543, 77.93160818, 78.93289326, 79.936515781,
80.937752355, 81.942990, 82.946980, 83.952650, 84.957000, 85.963120,
73.921177767, 57.991010, 58.981750, 59.970190, 60.963790, 61.954650,
62.949640, 63.941653, 64.939436406, 65.933843453, 66.93273407,
67.92809424, 68.927964533, 69.924247381, 70.924950954, 71.922075815,
72.923458945, 73.921177767, 74.922858948, 75.921402557, 76.923548591,
77.922852739, 78.925400995, 79.925372392, 80.928820467, 81.929549725,
82.934620, 83.937470, 84.943030, 85.946490, 86.952510, 87.956910,
88.963830, 74.921596478, 59.993130, 60.980620, 61.973200, 62.963690,
63.957572, 64.949564, 65.94471, 66.939186071, 67.936769069, 68.932273675,
69.930924826, 70.927112428, 71.926752283, 72.923824844, 73.923928692,
74.921596478, 75.922394021, 76.920647286, 77.921827281, 78.920947934,
79.922533816, 80.922132287, 81.924504067, 82.924980024, 83.929058,
84.932020, 85.936500, 86.939900, 87.944940, 88.949390, 89.955500,
90.960430, 91.966800, 79.916521271, 64.964660, 65.955210, 66.950090,
67.941798, 68.939557817, 69.933390644, 70.932241822, 71.927112352,
72.926765345, 73.922476436, 74.922523368, 75.919213597, 76.919914038,
77.91730909, 78.918499098, 79.916521271, 80.917992474, 81.916699401,
82.919118473, 83.918462354, 84.922245053, 85.924271579, 86.928521358,
87.931423998, 88.936450, 89.939960, 90.945960, 91.949920, 92.956290,
93.960490, 78.918337087, 66.964790, 67.958516, 68.950106, 69.944792,
70.93874, 71.936644572, 72.931691524, 73.929891034, 74.925776207,
75.924541469, 76.921379082, 77.921145706, 78.918337087, 79.918529296,
80.916290563, 81.916804119, 82.915180421, 83.916478974, 84.915608403,
85.918797577, 86.920711324, 87.924065926, 88.926385334, 89.930627737,
90.933968095, 91.939258714, 92.943050, 93.948680, 94.952870, 95.958530,
96.962800, 85.910610729, 68.965180, 69.955259, 70.949625738, 71.942092038,
72.939289195, 73.933084369, 74.930945746, 75.925910078, 76.92467,
77.920364783, 78.920082431, 79.916378965, 80.916592015, 81.9134836,
82.914136099, 83.911506687, 84.912527331, 85.910610729, 86.913354862,
87.914446969, 88.917630581, 89.919516555, 90.923445215, 91.92615621,
92.931274357, 93.934360, 94.939840, 95.943070, 96.948560, 97.951910,
98.957600, 99.961140, 84.911789737, 70.965320, 71.959080, 72.950561,
73.944264751, 74.93857, 75.935072226, 76.930408, 77.928141, 78.92398946,
79.92251925, 80.918995913, 81.918208598, 82.915109701, 83.914384821,
84.911789737, 85.911167419, 86.909180526, 87.911315588, 88.912278016,
89.914801694, 90.916536958, 91.9197289, 92.922041876, 93.926404946,
94.929302889, 95.934272637, 96.937351916, 97.941790668, 98.945379283,
99.949870, 100.953196445, 101.958870, 87.905612124, 72.965970,
73.956310, 74.949949568, 75.941766782, 76.937944782, 77.93218,
78.929708, 79.924521013, 80.923211846, 81.918401639, 82.917556701,
83.913425275, 84.912932803, 85.909260204, 86.908877124, 87.905612124,
88.907450675, 89.907737888, 90.910203095, 91.911037858, 92.914025634,
93.915361312, 94.919358766, 95.921696802, 96.926152923, 97.928452934,
98.933240926, 99.935351911, 100.940517888, 101.943018987, 102.948950,
103.952330, 104.958580, 88.905848295, 75.958450, 76.949645, 77.943610,
78.937351634, 79.93428, 80.929127468, 81.926792451, 82.922354243,
83.920388264, 84.916433039, 85.914885576, 86.91087573, 87.909501146,
88.905848295, 89.907151886, 90.907304791, 91.908949143, 92.909582713,
93.911595245, 94.912820621, 95.915891343, 96.918133995, 97.92220302,
98.924636204, 99.927756586, 100.93031385, 101.933555695, 102.936730,
103.941050, 104.944870, 105.949790, 106.954140, 107.959480,
89.904704416, 77.955230, 78.949160, 79.9404, 80.937210026, 81.931087,
82.928653801, 83.923250, 84.921471182, 85.916473591, 86.914816252,
87.910226904, 88.9088895, 89.904704416, 90.905645767, 91.905040847,
92.906476006, 93.906315192, 94.9080426, 95.908273386, 96.910953109,
97.912734892, 98.916512106, 99.917761889, 100.921140415, 101.922981285,
102.926599606, 103.928780, 104.933050, 105.935910, 106.940750,
107.943960, 108.949240, 109.952870, 92.906378058, 80.949030,
81.943130, 82.936705382, 83.933570, 84.927912447, 85.925038326,
86.920361108, 87.918332163, 88.913418245, 89.911264845,
90.906996243, 91.907193888, 92.906378058, 93.907283888, 94.906835792,
95.908100647, 96.908098556, 97.910328412, 98.911618375, 99.914181619,
100.915252025, 101.918037614, 102.919143842, 103.922464701,
104.923936545, 105.927970, 106.930310, 107.934840, 108.937630,
109.942440, 110.945650, 111.950830, 112.954700, 97.905408169, 82.948740,
83.940090, 84.936550, 85.930695904, 86.927326502, 87.921953241,
88.919480009, 89.913936896, 90.911750194, 91.906810991, 92.90681261,
93.905088269, 94.905842129, 95.904679477, 96.906021465, 97.905408169,
98.90771187, 99.907477336, 100.910347001, 101.91029736, 102.913207142,
103.913763625, 104.91697461, 105.918136802, 106.921692604, 107.923453,
108.927810, 109.929730, 110.934410, 111.936840, 112.941880, 113.944920,
114.950290, 98.906254747, 84.948830, 85.942880, 86.936530, 87.932678,
88.927167, 89.923556564, 90.918427639, 91.915260166, 92.910248984,
93.909657002, 94.907657084, 95.907871383, 96.906365358, 97.907215966,
98.906254747, 99.90765778, 100.907314659, 101.909215019, 102.909181351,
103.911447454, 104.911660566, 105.914357927, 106.915079572, 107.918461226,
108.919982665, 109.923820483, 110.92569283, 111.929146493, 112.931590,
113.935880, 114.938690, 115.943370, 116.946480, 117.951480, 101.904349312,
86.949180, 87.940260, 88.936110, 89.929890, 90.926292, 91.920120,
92.917052034, 93.911359711, 94.910412929, 95.907597835, 96.9075547,
97.905287132, 98.905939302, 99.904219476, 100.905582087, 101.904349312,
102.906323847, 103.905432701, 104.907752866, 105.907329433,
106.909905089, 107.910173465, 108.913203233, 109.914136041, 110.917696,
111.918965, 112.922487194, 113.924281, 114.928686173, 115.930810,
116.935580, 117.937820, 118.942840, 119.945310, 102.905504292,
88.948837, 89.942870, 90.936550, 91.931980, 92.925740, 93.921698,
94.91589874, 95.914460631, 96.911336797, 97.910708158, 98.908132104,
99.90812155, 100.906163625, 101.906843196, 102.905504292, 103.906655518,
104.905693821, 105.907287135, 106.906748423, 107.908728018, 108.908737289,
109.911136411, 110.911585913, 111.914394159, 112.915530627, 113.918806,
114.920334, 115.924062, 116.925980, 117.930070, 118.932110, 119.936410,
120.938720, 121.943210, 105.903485715, 90.949110, 91.940420, 92.935910,
93.928770, 94.924690, 95.918164359, 96.916479073, 97.912720902,
98.911767833, 99.908505886, 100.908289242, 101.905608544, 102.906087307,
103.904035834, 104.90508492, 105.903485715, 106.905133481, 107.903891701,
108.905950451, 109.905153254, 110.907670734, 111.907314058, 112.910152908,
113.910362638, 114.913683824, 115.914158662, 116.917841338, 117.9189843,
118.923110, 119.924691878, 120.928870, 121.930550, 122.934930, 123.936880,
106.90509682, 92.949780, 93.942780, 94.935480, 95.930680, 96.923972412,
97.921566201, 98.917597178, 99.916104255, 100.912802233, 101.911685,
102.90897272, 103.908629157, 104.906528661, 105.906668921, 106.90509682,
107.905955556, 108.904752292, 109.906107231, 110.905291157, 111.907004814,
112.906566579, 113.908803704, 114.908762698, 115.911359933, 116.911684562,
117.914582768, 118.915665059, 119.918787384, 120.919848046, 121.923530,
122.924900, 123.928640, 124.930430, 125.934500, 126.936770, 127.941170,
128.943690, 129.950448, 113.90335854, 94.949870, 95.939770, 96.934940,
97.927395546, 98.925010, 99.920289525, 100.918681538, 101.914462258,
102.913419246, 103.909849475, 104.909467905, 105.90645941, 106.906617928,
107.904183683, 108.904982293, 109.90300207, 110.904178107, 111.902757809,
112.904401662, 113.90335854, 114.905430969, 115.904755809, 116.907218618,
117.90691453, 118.909921597, 119.909850129, 120.912977363, 121.913332432,
122.917002999, 123.917647616, 124.92124637, 125.922353321, 126.926443864,
127.927762285, 128.932150, 129.933901937, 130.940670, 131.945550,
114.903878484, 96.949540, 97.942140, 98.934220, 99.931110851,
100.926340, 101.924090238, 102.919914188, 103.918296171, 104.91467354,
105.913465411, 106.9102951, 107.90969818, 108.907150507, 109.907165274,
110.905103278, 111.905532331, 112.904057761, 113.904913876,
114.903878484, 115.905259703, 116.904513564, 117.906354367, 118.90584535,
119.907959608, 120.907845822, 121.91027601, 122.910438276, 123.913175231,
124.913600588, 125.916463857, 126.917353091, 127.920172328, 128.92169698,
129.924970049, 130.926851767, 131.93299026, 132.937810, 133.944150,
134.949330, 119.902194676, 98.949330, 99.939044343, 100.936060,
101.930295324, 102.928100, 103.923143223, 104.921349437, 105.91688062,
106.915644329, 107.911925378, 108.911283214, 109.907842791, 110.90773446,
111.904818207, 112.905170577, 113.902778869, 114.903342397, 115.90174053,
116.902951656, 117.901603167, 118.90330763, 119.902194676, 120.90423548,
121.903439046, 122.905720838, 123.905273946, 124.907784125, 125.90765328,
126.910360024, 127.910536624, 128.913479, 129.913967295, 130.916999769,
131.917815713, 132.923829249, 133.928291765, 134.934730, 135.939340,
136.945990, 120.903815686, 102.939690, 103.936472, 104.931486348,
105.928791, 106.924150, 107.922160, 108.918132426, 109.916753, 110.913163,
111.912398009, 112.909371672, 113.909269, 114.906598, 115.906793629,
116.904835941, 117.905528731, 118.903942009, 119.905072427, 120.903815686,
121.905173651, 122.90421397, 123.905935743, 124.905253818, 125.90724748,
126.906923609, 127.909169001, 128.909148442, 129.911656324, 130.911982275,
131.914466896, 132.91525163, 133.920379744, 134.925165771, 135.930350,
136.935310, 137.940790, 138.945980, 129.906224399, 104.943640,
105.937504237, 106.935006, 107.929444597, 108.927415515, 109.922407316,
110.921110692, 111.917013672, 112.915891, 113.912089, 114.911902,
115.90846, 116.908644719, 117.905827581, 118.906403645, 119.904020222,
120.904936424, 121.903043898, 122.904270029, 123.902817896, 124.904430731,
125.903311696, 126.905226336, 127.904463056, 128.906598238, 129.906224399,
130.908523864, 131.90855316, 132.910955306, 133.911368737, 134.916448592,
135.920101246, 136.925322954, 137.929220, 138.934730, 139.938850,
140.944650, 141.949080, 126.904472681, 107.943475, 108.938149417,
109.935242, 110.930276, 111.927970, 112.923640583, 113.921850, 114.918048,
115.916808633, 116.91365, 117.913074, 118.910074, 119.910048173,
120.907366811, 121.907589284, 122.905588965, 123.906209852, 124.904630164,
125.905624153, 126.904472681, 127.905809443, 128.904987722, 129.906674247,
130.906124609, 131.907997381, 132.907796939, 133.909744465, 134.910048121,
135.914653993, 136.91787084, 137.922349591, 138.926099478, 139.931000,
140.935030, 141.940180, 142.944560, 143.949990, 131.904153457, 109.944278068,
110.941602, 111.935623112, 112.933341174, 113.927980306, 114.92629392,
115.921581087, 116.920358735, 117.916178655, 118.915410688, 119.911784244,
120.911461829, 121.908367632, 122.90848191, 123.905893003, 124.906395464,
125.904273634, 126.905183723, 127.903531275, 128.904779435, 129.903508007,
130.905082362, 131.904153457, 132.905910722, 133.905394464, 134.907227495,
135.907218794, 136.911562125, 137.913954475, 138.918792936, 139.921640943,
140.926648049, 141.92970959, 142.935110, 143.938510, 144.944070, 145.947750,
146.953560, 132.905451932, 111.950301, 112.944493274, 113.941450, 114.935910,
115.933367, 116.928670701, 117.926559494, 118.922377304, 119.920677253,
120.917229209, 121.916113434, 122.912996036, 123.912257798, 124.90972827,
125.909451977, 126.907417525, 127.907748866, 128.906064426, 129.906708552,
130.905463926, 131.90643426, 132.905451932, 133.906718475, 134.905977008,
135.907311576, 136.907089473, 137.911016704, 138.913363999, 139.917282354,
140.920045752, 141.924298927, 142.92735175, 143.932076914, 144.93552617,
145.940289423, 146.944155008, 147.949218153, 148.952930, 149.958170,
150.962190, 137.905247237, 113.950675405, 114.947370, 115.941380,
116.938499, 117.933040, 118.930659661, 119.926044974, 120.924054499,
121.919904, 122.918781036, 123.915093603, 124.914472912, 125.911250177,
126.911093797, 127.908317698, 128.908679439, 129.906320811, 130.906941118,
131.905061288, 132.90600749, 133.904508383, 134.905688591, 135.904575945,
136.905827384, 137.905247237, 138.908841341, 139.910604505, 140.914411009,
141.91645341, 142.920626719, 143.922952853, 144.927627032, 145.930219572,
146.934945, 147.937720047, 148.942580, 149.945680, 150.950810, 151.954270,
152.959610, 138.906353267, 116.950068, 117.946730, 118.940990, 119.938070,
120.933010, 121.930710, 122.926240, 123.924574275, 124.920816034,
125.919512667, 126.916375448, 127.915585177, 128.912692815, 129.912368724,
130.91007, 131.910101145, 132.908218, 133.908514011, 134.906976844,
135.907635536, 136.906493598, 137.90711193, 138.906353267, 139.909477645,
140.910962152, 141.91407913, 142.91606272, 143.919599647, 144.921645401,
145.92579346, 146.928235284, 147.932228868, 148.934734, 149.938770,
150.941720, 151.946250, 152.949620, 153.954500, 154.958350, 139.905438706,
118.952760, 119.946640, 120.943420, 121.937910, 122.935400, 123.930410,
124.928440, 125.923971, 126.922731, 127.918911, 128.918102, 129.914736,
130.914422, 131.911460487, 132.91151502, 133.908924821, 134.909151396,
135.907172422, 136.907805577, 137.905991321, 138.906652651, 139.905438706,
140.90827627, 141.909244205, 142.91238591, 143.913647336, 144.917233135,
145.918759009, 146.922673954, 147.92443241, 148.928399883, 149.930408931,
150.933976196, 151.936540, 152.940580, 153.943420, 154.948040, 155.951260,
156.956340, 140.907652769, 120.955364, 121.951810, 122.945960, 123.942960,
124.937830, 125.935310, 126.930830, 127.928791, 128.925095, 129.92359,
130.920259, 131.919255, 132.916330532, 133.915711737, 134.913111745,
135.912691611, 136.910705455, 137.910754636, 138.908938399, 139.909075874,
140.907652769, 141.910044806, 142.910816926, 143.913305245, 144.9145117,
145.917644336, 146.918995992, 147.922135026, 148.923717651, 149.926672997,
150.928318618, 151.931499225, 152.933838905, 153.937518153, 154.940120,
155.944270, 156.947430, 157.951980, 158.955500, 141.907723297, 123.952230,
124.948880, 125.943220, 126.940500, 127.935390, 128.933188, 129.928506,
130.927247, 131.923321237, 132.922348, 133.918790181, 134.91818116,
135.914976035, 136.914567137, 137.911949961, 138.911978288, 139.909552,
140.909609854, 141.907723297, 142.90981429, 143.910087274, 144.912573636,
145.913116939, 146.916100441, 147.916893288, 148.920148842, 149.920890888,
150.923828929, 151.924682219, 152.927698232, 153.929477307, 154.932932,
155.935018114, 156.939030, 157.941600, 158.946090, 159.949090, 160.953880,
144.912749023, 125.957520, 126.951630, 127.948420, 128.943160, 129.940450,
130.935870, 131.933750, 132.929782, 133.928353, 134.924876, 135.923565829,
136.920479493, 137.919548281, 138.916804082, 139.916041789, 140.913555054,
141.912874471, 142.910932616, 143.912590843, 144.912749023, 145.914696305,
146.915138545, 147.917474618, 148.918334155, 149.920983561, 150.921206973,
151.923496795, 152.924116889, 153.926463943, 154.928101267, 155.931056736,
156.933039369, 157.936561407, 158.938970, 159.942990, 160.945860,
161.950290, 162.953680, 151.919732425, 127.958080, 128.954640, 129.948920,
130.946110, 131.940690, 132.938670, 133.933970, 134.93252, 135.928275527,
136.926971746, 137.923243961, 138.922296605, 139.918994687, 140.918476488,
141.915197641, 142.914628338, 143.911999478, 144.913410353, 145.9130409,
146.914897923, 147.914822674, 148.917184735, 149.917275539, 150.919932409,
151.919732425, 152.922097356, 153.922209273, 154.924640161, 155.925527887,
156.928358717, 157.929991317, 158.933211271, 159.935140, 160.938830,
161.941220, 162.945360, 163.948280, 164.952980, 152.921230339, 129.963569,
130.957753, 131.954370, 132.949240, 133.946510, 134.941820, 135.939600,
136.935570, 137.933709, 138.92979228, 139.928087607, 140.92493072,
141.923434945, 142.920297509, 143.918816823, 144.916265237, 145.917205817,
146.916746111, 147.918085895, 148.917931238, 149.919701819, 150.919850161,
151.921744534, 152.921230339, 153.922979237, 154.92289326, 155.924752249,
156.925423647, 157.927845302, 158.929088861, 159.931971, 160.933680,
161.937040, 162.939210, 163.942990, 164.945720, 165.949970, 166.953210,
157.924103912, 133.955370, 134.952570, 135.947340, 136.945020, 137.940120,
138.938240, 139.933674, 140.932126, 141.928116, 142.92674951, 143.922963,
144.921709252, 145.918310608, 146.91909442, 147.918114524, 148.919340915,
149.918658876, 150.920348482, 151.919790996, 152.921749543, 153.920865598,
154.922622022, 155.922122743, 156.923960135, 157.924103912, 158.926388658,
159.927054146, 160.929669211, 161.930984751, 162.933990, 163.935860,
164.939380, 165.941600, 166.945570, 167.948360, 168.952870, 158.925346757,
135.961380, 136.955980, 137.953160, 138.948290, 139.945805049, 140.941448,
141.938744, 142.935121, 143.933045, 144.929274, 145.927246584, 146.924044585,
147.924271701, 148.923245909, 149.923659686, 150.923102543, 151.924074438,
152.923434588, 153.924678019, 154.923505236, 155.924747213, 156.924024604,
157.925413137, 158.925346757, 159.927167606, 160.927569919, 161.929488234,
162.930647536, 163.933350838, 164.934880, 165.937991959, 166.940050,
167.943640, 168.946220, 169.950250, 170.953300, 163.929174751, 137.962490,
138.959540, 139.954010, 140.951350, 141.946366, 142.943830, 143.939254,
144.937425, 145.932845369, 146.9310915, 147.927149831, 148.927304787,
149.925585184, 150.926184601, 151.9247183, 152.92576467, 153.924424457,
154.925753775, 155.92428311, 156.925466095, 157.924409487, 158.925739214,
159.925197517, 160.926933364, 161.926798447, 162.928731159, 163.929174751,
164.931703333, 165.932806741, 166.935655462, 167.937128769, 168.940307614,
169.942390, 170.946200, 171.948760, 172.953000, 164.93032207, 139.968539,
140.963098, 141.959770, 142.954610, 143.951480, 144.947200, 145.944640,
146.940056, 147.937718, 148.933774771, 149.933496182, 150.931688142,
151.931713714, 152.930198789, 153.930601579, 154.929103491, 155.929839,
156.928256188, 157.928941007, 158.927711959, 159.928729478, 160.927854776,
161.929095504, 162.928733903, 163.930233507, 164.93032207, 165.932284162,
166.933132633, 167.935515708, 168.936872273, 169.939618929, 170.94146515,
171.944820, 172.947290, 173.951150, 174.954050, 165.930293061, 142.966340,
143.960380, 144.957390, 145.952000, 146.949490, 147.944550, 148.942306,
149.937913839, 150.937448903, 151.935050389, 152.935063492, 153.932783081,
154.933208949, 155.931064698, 156.931916, 157.929893474, 158.930684066,
159.929083292, 160.929995309, 161.928778264, 162.930032749, 163.929200229,
164.930726003, 165.930293061, 166.932048159, 167.932370224, 168.934590364,
169.935464312, 170.938029808, 171.939356113, 172.942400, 173.944230,
174.947770, 175.950080, 176.954050, 168.93421325, 144.970073, 145.966425,
146.960961, 147.957840, 148.952720, 149.949960, 150.94548349, 151.944422,
152.942012112, 153.941567808, 154.939199459, 155.938979933, 156.936973,
157.936979525, 158.934975, 159.935262801, 160.933549, 161.933994682,
162.932651124, 163.93356, 164.932435492, 165.933554131, 166.932851622,
167.934172776, 168.93421325, 169.935801397, 170.93642944, 171.938400044,
172.939603607, 173.942168605, 174.943836853, 175.946994685, 176.949040,
177.952640, 178.955340, 173.938862089, 147.967420, 148.964040, 149.958420,
150.955400769, 151.950288919, 152.949480, 153.946393928, 154.945782332,
155.942818215, 156.942627848, 157.939865617, 158.940050099, 159.937552344,
160.937901678, 161.93576821, 162.936334305, 163.934489416, 164.935279,
165.933882042, 166.934949605, 167.933896895, 168.935189802, 169.934761837,
170.936325799, 171.936381469, 172.938210787, 173.938862089, 174.94127645,
175.942571683, 176.945260822, 177.94664668, 178.950170, 179.952330,
180.956150, 174.940771819, 149.973228, 150.967577, 151.964120,
152.958767331, 153.957522, 154.954316216, 155.953032523, 156.9500983,
157.949313283, 158.946628776, 159.946033, 160.943572, 161.943277288,
162.941179, 163.941339, 164.939406724, 165.939859, 166.93827,
167.938739111, 168.937651439, 169.938474968, 170.937913136, 171.939085669,
172.938930602, 173.94033748, 174.940771819, 175.94268631, 176.943758055,
177.945954559, 178.947327443, 179.94988116, 180.951970, 181.955040,
182.957570, 183.960910, 179.946549953, 152.970690, 153.964860, 154.963390,
155.959364025, 156.958396, 157.954799366, 158.95399487, 159.950684379,
160.950274844, 161.947210498, 162.947089, 163.944367284, 164.944567,
165.94218, 166.9426, 167.940568, 168.941259, 169.939609, 170.940492,
171.939448301, 172.940513, 173.940046178, 174.941509181, 175.941408631,
176.943220651, 177.943698766, 178.945816145, 179.946549953, 180.949101246,
181.950554096, 182.953530439, 183.955446515, 184.958820, 185.960890,
186.964590, 187.966850, 180.947995763, 154.974592, 155.972303,
156.968192445, 157.966699, 158.963018173, 159.961486056, 160.958417,
161.957291859, 162.954330271, 163.953534, 164.950772514, 165.950512,
166.948093, 167.948047, 168.946011, 169.946175, 170.944476, 171.944895,
172.94375, 173.944454, 174.943737, 175.944857, 176.944472403,
177.945778221, 178.945929535, 179.947464831, 180.947995763, 181.950151849,
182.951372616, 183.954007966, 184.955559375, 185.958552023, 186.960530,
187.963700, 188.965830, 189.969230, 183.950931188, 157.974562, 158.972918,
159.968478805, 160.967357, 161.963497417, 162.962523542, 163.958954382,
164.958279949, 165.955027253, 166.954816014, 167.951808394, 168.95177879,
169.949228482, 170.949451, 171.947292, 172.947689, 173.946079, 174.946717,
175.945634, 176.946643, 177.945876236, 178.947070447, 179.946704459,
180.948197248, 181.948204156, 182.950222951, 183.950931188, 184.953419264,
185.954364127, 186.957160466, 187.958489105, 188.961912868, 189.963181378,
190.966600, 191.968170, 186.955753109, 159.982115, 160.977589119,
161.976002, 162.972080535, 163.970323, 164.967088557, 165.965808,
166.962601, 167.961572608, 168.958791096, 169.958220071, 170.955716,
171.955422961, 172.953243, 173.953115, 174.951381, 175.951623, 176.950328,
177.950989, 178.949987641, 179.950789084, 180.950067916, 181.95121008,
182.950819841, 183.952520756, 184.952954982, 185.954986084, 186.955753109,
187.958114438, 188.959229007, 189.961817977, 190.963125242, 191.965960,
192.967470, 193.970420, 191.96148069, 161.984431, 162.982690,
163.978035649, 164.976762, 165.972690753, 166.971547969, 167.967803678,
168.96701927, 169.963577028, 170.963184819, 171.960023303, 172.959808409,
173.957062202, 174.956945835, 175.954806, 176.954965324, 177.953251241,
178.953816017, 179.952378803, 180.953244, 181.952110186, 182.953126102,
183.952489071, 184.954042265, 185.953838158, 186.955750458, 187.955838228,
188.95814747, 189.958447048, 190.960929718, 191.96148069, 192.964151563,
193.965182083, 194.968126661, 195.969639333, 192.96292643, 163.992201,
164.987520, 165.985824, 166.981665156, 167.979881, 168.976294942, 169.974965,
170.971626042, 171.970456, 172.967501739, 173.966861045, 174.964112895,
175.963648688, 176.9613015, 177.961082, 178.959122266, 179.959229446,
180.957625297, 181.958076296, 182.956846458, 183.957476, 184.956698,
185.957946104, 186.957363361, 187.958853121, 188.958718935, 189.960545968,
190.960594046, 191.962605012, 192.96292643, 193.965078378, 194.965979573,
195.968396542, 196.969653285, 197.972280, 198.973804583, 194.964791134,
165.994855, 166.992979, 167.988150742, 168.986715, 169.982495289,
170.981244542, 171.977347128, 172.976444754, 173.972818767, 174.972420552,
175.968944622, 176.968469481, 177.965648724, 178.965363404, 179.963031477,
180.963097285, 181.961170656, 182.961596703, 183.959922251, 184.960619,
185.959350813, 186.960587, 187.959395391, 188.960833686, 189.959931655,
190.961676661, 191.961038005, 192.962987401, 193.962680253, 194.964791134,
195.964951521, 196.967340182, 197.96789279, 198.970593094, 199.971440677,
200.974512868, 201.975740, 196.966568662, 168.998080, 169.996122,
170.991878881, 171.990035, 172.98623738, 173.984761, 174.981274107,
175.980099, 176.976864908, 177.97603192, 178.973212812, 179.972521124,
180.970079048, 181.969617874, 182.967593034, 183.967451524, 184.965789411,
185.965952703, 186.964567541, 187.965323661, 188.963948286, 189.964700339,
190.963704225, 191.964812953, 192.964149715, 193.96536525, 194.96503464,
195.966569813, 196.966568662, 197.968242303, 198.968765193, 199.970725647,
200.97165724, 201.973805838, 202.975154542, 203.977724, 204.979870,
201.970643011, 171.003760, 171.998832686, 172.997242, 173.992863695,
174.99142327, 175.98735458, 176.986279158, 177.982483143, 178.981833861,
179.978266394, 180.977819311, 181.974689964, 182.974449841, 183.971713051,
184.971899086, 185.96936179, 186.969814236, 187.967577049, 188.968190034,
189.966322449, 190.967157105, 191.965634327, 192.966665421, 193.965439409,
194.966720113, 195.965832649, 196.967212908, 197.966769032, 198.968279932,
199.968326004, 200.970302268, 201.970643011, 202.972872484, 203.973493933,
204.976073386, 205.977514066, 206.982588545, 207.985940, 208.991040,
209.994510, 204.974427541, 176.000590, 176.996427286, 177.994897,
178.991089082, 179.989906, 180.986257447, 181.985667104, 182.982192802,
183.981873122, 184.978791305, 185.978325, 186.975905897, 187.976009782,
188.973588428, 189.973877149, 190.971786154, 191.972225, 192.970672,
193.9712, 194.969774335, 195.970481151, 196.969574511, 197.970483495,
198.969877, 199.970962672, 200.970818891, 201.972105808, 202.97234422,
203.973863522, 204.974427541, 205.97611032, 206.977419429, 207.9820187,
208.985358952, 209.990073689, 210.993477, 211.998228, 207.976652071,
178.003830191, 179.002150, 179.997918173, 180.996623958, 181.992671842,
182.991874629, 183.988142339, 184.987609944, 185.984238945, 186.98391837,
187.980874338, 188.980807, 189.978081517, 190.978265, 191.975785171,
192.976173234, 193.97401207, 194.97454205, 195.972774109, 196.973431124,
197.972033959, 198.97291665, 199.971826675, 200.972884511, 201.972159133,
202.973390521, 203.973043589, 204.974481755, 205.974465278, 206.975896887,
207.976652071, 208.98109012, 209.984188527, 210.988736964, 211.991897543,
212.996581499, 213.999805408, 215.004807, 208.980398734, 184.001124,
184.997625, 185.996597625, 186.993157835, 187.992265154, 188.989199012,
189.988295129, 190.985786119, 191.985457954, 192.982959771, 193.98283396,
194.980650737, 195.980666509, 196.978864454, 197.979206, 198.977671961,
199.978131829, 200.977009036, 201.977742324, 202.976876001, 203.977812736,
204.977389366, 205.97849913, 206.978470679, 207.979742196, 208.980398734,
209.984120371, 210.98726946, 211.991285724, 212.994384666, 213.998711539,
215.001769776, 216.006305943, 217.009470, 218.014316, 208.982430435,
187.999422048, 188.998480562, 189.995101185, 190.994574485, 191.991335149,
192.991025275, 193.988185606, 194.988110728, 195.98553458, 196.98565963,
197.983388616, 198.983666063, 199.981798604, 200.982259764, 201.980757541,
202.981420103, 203.980318121, 204.981203322, 205.980481099, 206.981593173,
207.981245702, 208.982430435, 209.982873673, 210.986653154, 211.988867969,
212.99285728, 213.99520135, 214.999419988, 216.001915035, 217.006334796,
218.008973037, 219.013744, 220.016602, 210.987496271, 192.999843112,
193.998725085, 194.996268098, 195.995788077, 196.993189215, 197.992837202,
198.990532254, 199.990351264, 200.988416999, 201.988630236, 202.986941984,
203.987251326, 204.986074483, 205.986667036, 206.985783502, 207.986589977,
208.986173143, 209.98714771, 210.987496271, 211.990744771, 212.992936646,
213.996371733, 214.99865257, 216.002423257, 217.004718822, 218.008694336,
219.011161691, 220.015407682, 221.018050, 222.022330, 223.025190,
222.017577738, 195.005437696, 196.002115223, 197.001584351, 197.998678663,
198.998370297, 199.9956993, 200.995628335, 201.993263492, 202.993386687,
203.99142874, 204.991718799, 205.990214104, 206.990734225, 207.98964247,
208.990414742, 209.989696216, 210.990600523, 211.990703529, 212.993882668,
213.995362554, 214.998745483, 216.00027437, 217.003927675, 218.005601256,
219.009480204, 220.011393981, 221.015536782, 222.017577738, 223.021790,
224.024090, 225.028440, 226.030890, 227.035407, 228.037986, 222.01755173,
199.007258147, 200.00657249, 201.003860867, 202.003372847, 203.000924647,
204.000653204, 204.99859396, 205.998666066, 206.996949414, 207.997138783,
208.995953555, 209.996407738, 210.995536544, 211.996202244, 212.996189081,
213.998971145, 215.000341497, 216.00319799, 217.004631951, 218.007578322,
219.009252149, 220.012327405, 221.014254762, 222.01755173, 223.019735857,
224.023249951, 225.025565414, 226.029386231, 227.031835938, 228.035729,
229.038450228, 230.042510, 231.045440, 232.049772, 228.031070292,
202.009890686, 203.009271619, 204.006499668, 205.00626857, 206.00382727,
207.003798105, 208.00183994, 209.001991373, 210.000494978, 211.000897987,
211.999794499, 213.000383959, 214.000107894, 215.002719834, 216.003533035,
217.006320327, 218.00714023, 219.010085078, 220.011028384, 221.013917338,
222.01537453, 223.018502171, 224.020211821, 225.023611564, 226.025409823,
227.029177842, 228.031070292, 229.034957577, 230.037056394, 231.041220,
232.043638, 233.048060, 234.050704, 227.027752127, 206.01450498,
207.011949748, 208.011551551, 209.009494863, 210.009435986, 211.007734835,
212.007813822, 213.006607643, 214.006901798, 215.006453625, 216.008720075,
217.009346914, 218.011641453, 219.012420389, 220.014762979, 221.015591248,
222.017843851, 223.019137468, 224.021722866, 225.023229585, 226.026098089,
227.027752127, 228.031021112, 229.033015243, 230.036294178, 231.038558786,
232.042027438, 233.044550, 234.048420, 235.051232, 236.055296,
232.038055325, 209.017715682, 210.015075342, 211.014928413, 212.012980288,
213.01301014, 214.01149977, 215.01173033, 216.011062115, 217.013114328,
218.013284499, 219.015536895, 220.015747762, 221.018183674, 222.018468121,
223.020811448, 224.021466895, 225.023951021, 226.024903069, 227.02770407,
228.028741127, 229.03176243, 230.033133843, 231.036304343, 232.038055325,
233.041581843, 234.04360123, 235.047510074, 236.049870, 237.053894,
238.056496, 231.03588399, 212.023204138, 213.02110934, 214.020918417,
215.019185865, 216.019109564, 217.018323986, 218.020041889, 219.019883143,
220.021875303, 221.021877983, 222.023742, 223.023962273, 224.025625738,
225.026130678, 226.027947753, 227.028805072, 228.031051376, 229.032096793,
230.034540754, 231.03588399, 232.038591592, 233.040247277, 234.043308058,
235.045443615, 236.048681284, 237.051145659, 238.05450271, 239.057260,
240.060980, 238.050788247, 217.024368791, 218.023535671, 219.02491916,
220.024723, 221.026399, 222.026086, 223.0277386, 224.027604778,
225.029390717, 226.029338702, 227.031156367, 228.031374006, 229.033505939,
230.033939784, 231.036293704, 232.037156152, 233.039635207, 234.040952088,
235.043929918, 236.045568006, 237.048730184, 238.050788247, 239.054293299,
240.056591988, 241.060330, 242.062931, 237.048173444, 225.033913933,
226.035145, 227.034956789, 228.036180, 229.036263808, 230.037827597,
231.038245085, 232.040108, 233.040740546, 234.042895038, 235.044063267,
236.0465696, 237.048173444, 238.050946405, 239.052939025, 240.056162182,
241.058252431, 242.06164118, 243.064279, 244.067850, 242.058742611,
228.038742328, 229.040150212, 230.039649886, 231.041101107, 232.041187097,
233.042997375, 234.043317076, 235.04528605, 236.046057964, 237.048409658,
238.049559894, 239.052163381, 240.053813545, 241.056851456, 242.058742611,
243.062003092, 244.064203907, 245.067747154, 246.070204627, 247.074070,
243.06138108, 231.045560, 232.046590, 233.046348, 234.047809, 235.047946,
236.049579, 237.049996, 238.051984324, 239.053024479, 240.055300179,
241.056829144, 242.059549159, 243.06138108, 244.064284847, 245.066452114,
246.069774619, 247.072093, 248.075752, 249.078480, 247.07035354,
233.050771232, 234.050159841, 235.051434, 236.051413, 237.052901,
238.053028697, 239.054957, 240.055529539, 241.057653001, 242.058835824,
243.061389114, 244.062752578, 245.065491249, 246.067223662, 247.07035354,
248.072348508, 249.075953413, 250.078356959, 251.082284605, 252.084870,
247.07030708, 235.056580, 236.057330, 237.057003, 238.058281, 239.058279,
240.059759, 241.060230, 242.061981, 243.063007572, 244.065180774,
245.066361616, 246.068672947, 247.07030708, 248.073086, 249.074986657,
250.07831652, 251.080760172, 252.084310, 253.086880, 254.090600,
251.079586788, 237.062070, 238.061410, 239.062422, 240.062302, 241.063726,
242.063701552, 243.065427, 244.066000689, 245.068048612, 246.068805309,
247.071000589, 248.072184861, 249.074853537, 250.076406066, 251.079586788,
252.081625846, 253.085133145, 254.087322909, 255.091046, 256.093440,
252.082978512, 240.068920, 241.068538, 242.069745, 243.069548, 244.070883,
245.071324, 246.072896, 247.073656, 248.075471, 249.076411, 250.078612,
251.079992142, 252.082978512, 253.084824697, 254.088022021, 255.090273122,
256.093598, 257.095979, 258.099520, 257.095104724, 242.073430, 243.074353,
244.074084, 245.075385, 246.075299023, 247.076847, 248.077194714,
249.079034, 250.079521264, 251.081575017, 252.082466855, 253.085185236,
254.08685422, 255.089962202, 256.091773117, 257.095104724, 258.097076,
259.100595, 260.102678, 258.098431319, 245.080829, 246.081886, 247.081635,
248.082823, 249.083013, 250.084420, 251.084839, 252.086560, 253.087280,
254.089656, 255.091082705, 256.094059025, 257.095541368, 258.098431319,
259.100509, 260.103652, 261.105721, 262.108865, 255.093241131, 248.086596,
249.087833, 250.087510, 251.089012, 252.088976521, 253.090678,
254.090955253, 255.093241131, 256.094282666, 257.09687719, 258.098207,
259.101031, 260.102643, 261.105749, 262.107301, 263.110552, 264.112345,
260.105504, 251.094360, 252.095371, 253.095210, 254.096454, 255.096681,
256.098629, 257.099555, 258.101814, 259.102901, 260.105504, 261.106883,
262.109634, 263.111293, 264.114038, 265.115839, 266.119305, 263.112547,
253.100689, 254.100184, 255.101340, 256.101166194, 257.102990,
258.103489, 259.105637, 260.106440, 261.108766556, 262.109925, 263.112547,
264.113985, 265.116704, 266.117956, 267.121529, 268.123644, 255.107398,
255.107398, 256.108127, 257.107722, 258.109231, 259.109610, 260.111300,
261.112056, 262.114084, 263.114988, 264.117404, 265.118601, 266.121029,
267.122377, 268.125445, 269.127460, 270.130712, 259.114500, 258.113168,
259.114500, 260.114422071, 261.116117, 262.116398, 263.118322, 264.118931,
265.121114693, 266.122065, 267.124425, 268.125606, 269.128755, 270.130329,
271.133472, 272.135158, 273.138220, 262.122892, 260.121970, 261.121664,
262.122892, 263.123035, 264.124604, 265.125147, 266.126942, 267.127650,
268.129755, 269.130694, 270.133616, 271.135179, 272.138032, 273.139618,
274.142440, 275.144250, 263.128558, 263.128558, 264.128394885, 265.130085,
266.130097, 267.131789, 268.132162, 269.134056, 270.134650, 271.137657,
272.139052, 273.141986, 274.143131, 275.145952, 276.147208, 277.149841,
265.136151, 265.136151, 266.137299, 267.137307, 268.138728, 269.139055,
270.140657, 271.141139, 272.143738, 273.144913, 274.147492, 275.148647,
276.151156, 277.152420, 278.154812, 279.156193, 281.162061, 267.144341,
268.143795, 269.145124, 270.144720, 271.146062, 272.146317, 273.148863,
274.149492, 275.152176, 276.153034, 277.155647, 278.156469, 279.158861,
280.159795, 281.162061, 272.153615, 272.153615, 273.153682, 274.155713,
275.156142, 276.158493, 277.159519, 278.161604, 279.162468, 280.164473,
281.165372, 282.167486, 283.168415, 283.171792, 277.163943, 278.164312,
279.166546, 280.167039, 281.169286, 282.169765, 283.171792, 284.172384,
285.174105, 283.176451, 283.176451, 284.178080, 285.178732, 286.180481,
287.181045, 285.183698, 285.183698, 286.183855, 287.185599, 288.185689,
289.187279, 287.191186, 287.191186, 288.192492, 289.192715, 290.194141,
291.194384, 292.199786, 289.198862, 290.198590, 291.200011, 292.199786,
291.206564, 291.206564, 292.207549, 293.214670, 293.214670]
el2mass = dict(zip(_temp_symbol, _temp_mass))
el2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100.
eliso2mass = dict(zip(_temp_iso_symbol, _temp_iso_mass)) # encompasses el2mass
eliso2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100. # encompasses el2mass
#eliso2mass["X0"] = 0. # probably needed, just checking
el2z = dict(zip(_temp_symbol, _temp_z))
el2z["GH"] = 0
z2mass = dict(zip(_temp_z, _temp_mass))
z2el = dict(zip(_temp_z, _temp_symbol))
z2element = dict(zip(_temp_z, _temp_element))
|
kratman/psi4public
|
psi4/driver/qcdb/periodictable.py
|
Python
|
gpl-2.0
| 78,237
|
[
"CFOUR",
"Psi4"
] |
5c7d62e20d8e09ed89321746419f9a4e8499ed47336638e32cd1c8fa4c0d835b
|
'''
Created on 03.06.2011
@author: Sergey Khayrulin
'''
from __future__ import absolute_import
from Entity.Vertex import Vertex
from Entity.Face import Face
from Entity.Slice import Slice, AlternateSlice
from Entity.Helper import *
import pprint
import math
class Entity(object):
'''
Main Class which process data from blender file or WRL(formated file).
'''
def __init__(self):
'''
Constructor
'''
self.vertices = []
self.faces = Faces()
self.resulting_points = []
self.checked_points = []
self.neuronInfo = ''
def clean_all(self):
self.faces.clean_all()
def add_vertex(self, coordinates):
'''
Method add vertex to collection point. It get a
collection of coordinates of point, create point
and append it to collection of point.
'''
try:
if len(coordinates) != 3:
raise ParserException('Error')
point = Vertex(float(coordinates[0]),float(coordinates[1]),float(coordinates[2]))
self.vertices.append(point)
except ParserException as ex:
print('It should be some incorrect data')
raise ex
def add_face(self, points_arr):
'''
Method add face to faces collection. It get a sequence
of numbers which means position in point collection.
'''
try:
if len(points_arr) < 4:
raise ParserException('Face contains more that 4 point')
face = Face(self.vertices[int(points_arr[0])],self.vertices[int(points_arr[1])],self.vertices[int(points_arr[2])],self.vertices[int(points_arr[3])])
face.order = [int(points_arr[0]),int(points_arr[1]),int(points_arr[2]),int(points_arr[3])]
self.faces[face.order] = face
#print("add_face %s" % face.order)
#self.faces.append(face)
except ParserException as ex:
print('Error:%s'%ex)
print(points_arr)
raise ex
def findCenterOfSoma(self, use_method2 = False):
'''
Method find start point for work main algorithm
first point should be in soma. Soma is the
biggest segment of cell.
'''
iter = 0
temp_points = []
slices = []
for p in range(len(self.vertices)):
temp_points.append(HelpPoint(p,0))
if use_method2:
startSlice = Slice(temp_points,self.faces, use_method2 = True, vertices = self.vertices)
point_on_perimeter = self.vertices[startSlice[0].point]
self.checked_points += startSlice.extra_dict['points_in_soma']
self.start_center_point = startSlice.extra_dict['center_pt']
self.start_center_point.diametr = 2 * self.start_center_point.len_between_point(point_on_perimeter)
self.starting_slice = startSlice
return
slice = Slice(temp_points,self.faces)
slices.append(slice)
while len(slice) != 0:
temp_points = list(filter(lambda p: not slice.__contains__(p), temp_points))
slice = None
slice = Slice(temp_points,self.faces)
if len(slice) != 0:
slices.append(slice)
#if not (iter % 10):
# print('slice %d iter %d' % (len(temp_points), iter))
#slice.printSlice()
#print slice.getPerimetr(self.vertices)
iter += 1
# find slice with longest line segments
perimiter_coll = sorted(slices,key=lambda slice:slice.getPerimetr(self.vertices), reverse=True)
startSlice = Slice(perimiter_coll[0],self.faces)
#print("findCenterOfSoma while loop done %d %d" % (iter, len(temp_points)))
try:
self.start_center_point = self.__getCenterPoint(startSlice, minimal = True)
except IndexError:
print("no center point startSlice %d perimiter_coll %d"
% (len(startSlice), len(perimiter_coll[0])))
for face in self.faces.keys():
print("face order %s" % face)
# the coordinates aren't organized in a pattern that the normal
# code in Slice can understand, so we use an alternate method
return self.findCenterOfSoma(use_method2 = True)
if not use_method2:
point_on_perimeter = self.vertices[perimiter_coll[0][0].point]
self.start_center_point.diametr = 2 * self.start_center_point.len_between_point(point_on_perimeter)
def getAllBrunches(self):
'''
Method return dictionary which contains pair key=>value:
key it's name of neurite, value - it's sorted sequence
numbers which means position in resulting_points collection
for instance 'axon' => [1,2,4]
'''
brunches_temp = {}
result_coll = {}
i = 0
roots = [self.resulting_points.index(p) for p in self.resulting_points \
if p.parentPoint == 0 and self.resulting_points.index(p) != 0]
for root in roots:
brunches_temp[root] = []
for p in self.resulting_points:
parent = p.getRoot(self.resulting_points)
if parent == root:
brunches_temp[root].append(self.resulting_points.index(p))
# the first of these two lines works with python3, the second with python2:
#for k1, value in sorted(brunches_temp.iteritems(),key=lambda k,v:(len(v),k),reverse=True): # we try to determine
for k1, value in sorted(brunches_temp.iteritems(),key=lambda (k,v):(len(v),k),reverse=True): # we try to determine
if i == 0:
for j in value:
self.resulting_points[j].isAxon = True
result_coll['axon'] = value
else:
for j in value:
if self.resulting_points[j].cable != 2:
self.resulting_points[j].isDendrite = True
self.resulting_points[j].cable = 3
result_coll['dendrite' + str(i)] = value
i += 1
return result_coll
def use_alt_slice(self):
return hasattr(self, 'starting_slice')
def create_slice(self, coll, allow_checked = False):
if self.use_alt_slice():
if not allow_checked:
coll = filter(lambda p: not self.checked_points.__contains__(p.point), coll)
slice = AlternateSlice(coll,self.faces, self.vertices, self.checked_points, self.vertices[self.starting_slice[0].point], None, allow_checked)
else:
slice = Slice(coll,self.faces)
return slice
def branching(self, slice):
if not self.use_alt_slice():
return False
for p in range(len(slice)):
if len(self.starting_slice.extra_dict['adjacentPoints'][slice[p].point]) == 5:
return True
return False
def find_point(self,center_point=Vertex(),iteration=0,
parentPoint=0, isNeurite=False,
isBrunchStart=False, _slice=None):
'''
Main function find axon dendrite and neurite
'''
vector_len = []
print("enter find_point iteration %d isBrunchStart %d" % (iteration, isBrunchStart))
if iteration == 0: center_point = self.start_center_point
if isNeurite:
res_point = Result_Point(center_point,parentPoint,2,isBrunchStart)
res_point.isNeurite = True
self.resulting_points.append(res_point)
elif iteration != 0:
self.resulting_points.append(Result_Point(center_point,parentPoint,1,isBrunchStart))
elif iteration == 0:
self.resulting_points.append(Result_Point(center_point,parentPoint,0,isBrunchStart))
current_point = len(self.resulting_points) - 1
for p in range(len(self.vertices)):
vector_len.append(HelpPoint(p,self.vertices[p].len_between_point(center_point)))
vector_len = sorted(vector_len,key=lambda p:p.lenght)
tmp_list = []
if iteration != 0:
'''
If iteration != 0 that means we are should find next 4 or more(if we find place of brunching 6 or 8) vertices
'''
if _slice is not None:
slice = _slice
else:
slice = self.create_slice(vector_len)
adjacentPoints = []
use_v5 = iteration >= 3 and self.branching(slice) # with 5 adjacent points
for p in range(4):
if use_v5 and not isBrunchStart:
c = slice[p].point
tmp_list.append(c)
adjacentPoints.append(HelpPoint(c, self.vertices[c].len_between_point(center_point)))
if use_v5 and isBrunchStart:
#print("use_v5 br %d p %d" % (len(slice), p))
coll = self.__find_adjacent_vertices5(slice[p].point)
elif p != 3:
coll = self.__find_adjacent_vertices(slice[p].point, slice[p+1].point)
else:
coll = self.__find_adjacent_vertices(slice[p].point, slice[0].point)
#print("%d-%d has %d adj v" % (slice[p].point, slice[(p+1)%4].point, len(coll)))
for c in coll:
helpPoint = HelpPoint(c,self.vertices[c].len_between_point(center_point))
#print("%3d %3d is checked? %d" % (p, c, self.checked_points.__contains__(c)))
if not adjacentPoints.__contains__(helpPoint):
if not self.checked_points.__contains__(c):
adjacentPoints.append(helpPoint)
tmp_list.append(c)
print("got %d adjacentPoints %s" % (len(adjacentPoints), tmp_list))
if len(adjacentPoints) == 0: return
'''
If we find 8 adjacent vertices it means that we place in branching segments
'''
if len(adjacentPoints) > 4 and not (use_v5 and isBrunchStart):
if self.__more4AdjacentPointCase(adjacentPoints, slice, isBrunchStart,iteration, current_point, center_point):
return
del vector_len[:]
vector_len = [HelpPoint(p.point,self.vertices[p.point].len_between_point(center_point))
for p in adjacentPoints if not self.checked_points.__contains__(p.point)]
vector_len = sorted(vector_len,key=lambda p:p.lenght)
if self.use_alt_slice():
vector_len = filter(lambda p: not self.checked_points.__contains__(p.point), vector_len)
if iteration == 0:
adj_dict = self.starting_slice.extra_dict['adjacentPoints']
else:
adj_dict = None
slice = AlternateSlice(vector_len,self.faces, self.vertices, self.checked_points, self.vertices[self.starting_slice[0].point], adj_dict)
else:
slice = Slice(vector_len,self.faces)
lenOfSlice = len(slice)
print("lenOfSlice %d iter %d %d" % (lenOfSlice, iteration, len(vector_len)))
if lenOfSlice == 0:
slice = vector_len
if len(slice) < 4:
return
new_center_point = self.__getCenterPoint(slice)
iteration += 1
if lenOfSlice != 0:
self.find_point(new_center_point,iteration,parentPoint=current_point,isNeurite=isNeurite,isBrunchStart=False, _slice=slice)
else:
if isNeurite:
res_point = Result_Point(new_center_point,current_point,2,False)
res_point.isNeurite = True
self.resulting_points.append(res_point)
elif iteration != 0:
self.resulting_points.append(Result_Point(new_center_point,current_point,1,False))
if iteration == 1:
self.__checkDendrite(slice, center_point, vector_len,current_point)
def __getCenterPoint(self, slice, minimal = False):
'''
Get center point like center of mass for input collection slice (usually it should be 4 point)
'''
x=y=z=0
n_points = 4
if len(slice) < 4:
print("Bad slice len %d" % len(slice))
if minimal and len(slice) > 0:
n_points = len(slice)
else:
raise IndexError
for p in range(n_points):
x += self.vertices[slice[p].point].x
y += self.vertices[slice[p].point].y
z += self.vertices[slice[p].point].z
if not self.checked_points.__contains__(slice[p].point):
self.checked_points.append(slice[p].point)
center_point = Vertex(x/n_points,y/n_points,z/n_points)
center_point.diametr = 2 * center_point.len_between_point(self.vertices[slice[0].point])
if isinstance(slice, Slice):
slice.printSlice()
else:
print(slice)
return center_point
def __find_adjacent_vertices(self, num_p1,num_p2):
'''
Find for two point adjacent vertices
'''
adjacentVertices = []
for key,f in self.faces.items():
if f.order.__contains__(num_p1) and f.order.__contains__(num_p2):
for p in f.order:
if p != num_p1 and p != num_p2:
adjacentVertices.append(p)
return adjacentVertices
def __find_adjacent_vertices5(self, num_p1):
'''
Find for one point adjacent vertices
'''
adjacentVertices = []
for key,f in self.faces.items():
if f.order.__contains__(num_p1):
for p in f.order:
if p != num_p1 and not (p in adjacentVertices):
near_old_point = False
for r_pt in self.resulting_points:
dist = r_pt.point.len_between_point(self.vertices[p])
if dist < r_pt.point.diametr:
near_old_point = True
break
if not near_old_point:
adjacentVertices.append(p)
return adjacentVertices
def __fillUpBrachesCollection(self, adjacentPoints, slice):
'''
Fill branches collection
'''
branchesCollection = []
for i in range(4):
for p1 in adjacentPoints:
for p2 in adjacentPoints:
if p1 == p2:
continue
s = self.create_slice([slice[i], slice[(i + 1) % 4], p1, p2],
allow_checked = True)
if (len(s) == 4):
if not branchesCollection.__contains__(s):
branchesCollection.append(s)
if len(self.create_slice(adjacentPoints)) != 0:
branchesCollection.append(self.create_slice(adjacentPoints))
return branchesCollection
def __more4AdjacentPointCase(self, adjacentPoints, slice, isBrunch,iteration, current_point, center_point):
'''
Work when algorithm find more that 4 adjacent points
'''
branchesCollection = self.__fillUpBrachesCollection(adjacentPoints, slice)
if len(branchesCollection) >= 2 :
center_points = {}
thirdBrunchCollection = []
for branch in branchesCollection:
branch_center_point = self.__getCenterPoint(branch)
center_points[branch_center_point] = branch
print("%d center_points" % (len(center_points.keys())))
for branch_center_point,branch in center_points.items():
old_num_r_points = len(self.resulting_points)
print("start branch %d %d %d %d size %d %3d resulting_points"
% (branch[0].point, branch[1].point, branch[2].point, branch[3].point, len(branch), len(self.resulting_points)))
self.find_point(branch_center_point,iteration,current_point,True,True, _slice=branch)
print("finish branch %d %3d resulting_points" % (branch[0].point, len(self.resulting_points)))
if self.use_alt_slice() and len(self.resulting_points) == old_num_r_points + 1:
del self.resulting_points[-1]
print("undo branches of length 1")
if len(adjacentPoints) > 6:
thirdBrunchCollection.extend(branch)
thirdBrunchPoints = [HelpPoint(p.point,self.vertices[p.point].len_between_point(center_point)) \
for p in thirdBrunchCollection if not slice.__contains__(p)]
slice_t = self.create_slice(thirdBrunchPoints)
if len(slice_t) == 4:
third_brunch_center_point = self.__getCenterPoint(slice_t)
self.find_point(third_brunch_center_point,iteration, current_point,True,True, _slice=slice_t)
return True
elif len(branchesCollection) == 0 or (len(branchesCollection) == 1 and not isBrunch):
sortedadjacentPoints = sorted(adjacentPoints,key=lambda p:p.lenght)
first_slice = self.create_slice(sortedadjacentPoints)
second_slice = self.create_slice(filter(lambda p: first_slice.__contains__(p) == False, sortedadjacentPoints))
perimeter_1 = first_slice.getPerimetr(self.vertices)
perimeter_2 = second_slice.getPerimetr(self.vertices)
if perimeter_1 > perimeter_2 and perimeter_2 != 0:
new_center_point = self.__getCenterPoint(second_slice)
self.find_point(new_center_point,iteration, current_point,False,False, _slice=second_slice)
return True
elif perimeter_1 < perimeter_2 or perimeter_2 == 0:
if perimeter_1 == 0:
if len(branchesCollection) == 1:
first_slice = branchesCollection[0]
else:
first_slice.getFaceFromColl(adjacentPoints,self.faces)
new_center_point = self.__getCenterPoint(first_slice)
self.find_point(new_center_point,iteration, current_point,isBrunch,False, _slice=first_slice)
else:
new_center_point = self.__getCenterPoint(first_slice)
self.find_point(new_center_point,iteration, current_point,False,False, _slice=first_slice)
return True
elif len(branchesCollection) == 1 and isBrunch:
slice = branchesCollection[0]
if len(slice) == 0:
slice = slice.getFaceFromColl(adjacentPoints,self.faces)
try:
new_center_point = self.__getCenterPoint(slice)
except IndexError:
print("Warning: __getCenterPoint failed, slice len %d, %d adjacentPoints"
% (len(slice), len(adjacentPoints)))
slice.printSlice()
return False
self.find_point(new_center_point,iteration, parentPoint=current_point,isNeurite=True,isBrunchStart=False, _slice=slice)
return True
return False
def __checkDendrite(self, slice, center_point, vector_len, current_point):
'''
Private Method.
Check if soma has other output processes
if it's contain than run find_point for it.
'''
iteration = 1
vector_len = filter(lambda p: slice.__contains__(p) == False
and self.checked_points.__contains__(p.point) == False, vector_len)
vector_len = sorted(vector_len,key=lambda p:p.lenght)
for i in range(5):
slice2 = self.create_slice(vector_len)
if (len(slice2) == 4 and
int(slice.getPerimetr(self.vertices) / slice2.getPerimetr(self.vertices)) <= 1 and
int(slice2.getPerimetr(self.vertices) / slice.getPerimetr(self.vertices)) <= 1):
new_center_point = self.__getCenterPoint(slice2)
iteration += 1
self.find_point(new_center_point,iteration,parentPoint=current_point,isNeurite=False,isBrunchStart=False, _slice=slice2)
vector_len = filter(lambda p: slice2.__contains__(p) == False
and self.checked_points.__contains__(p.point) == False, vector_len)
vector_len = sorted(vector_len, key=lambda p:p.lenght)
#
# check_unused_coordinates might be of some use in checking for
# sections of a neuron that were omitted due to flaws in the code
#
def check_unused_coordinates(self):
for key,f in self.faces.items():
unused = True
for p in f.order:
if p in self.checked_points:
unused = False
break
if unused:
print("unused face %s" % f.order)
|
openworm/Blender2NeuroML
|
src/Entity/Entity.py
|
Python
|
mit
| 21,651
|
[
"NEURON"
] |
5f14e763a721e6e792ede3b3b43f1e909f86f9b73e2a913d751eca8b1b167f76
|
"""
Test helper functions.
"""
import os
import uuid
from regression.pages.studio import LOGIN_BASE_URL
from regression.pages.studio.utils import get_course_key
from regression.pages.whitelabel.activate_account import ActivateAccount
from regression.pages.whitelabel.const import ORG, UNUSED_REGISTRATION_FIELDS_MAPPING
COURSE_ORG = 'COURSE_ORG'
COURSE_NUMBER = 'COURSE_NUMBER'
COURSE_RUN = 'COURSE_RUN'
COURSE_DISPLAY_NAME = 'COURSE_DISPLAY_NAME'
def get_random_credentials():
"""
Get random user name and email address
"""
username = 'test_{}'.format(str(uuid.uuid4().node))
email = f"{username}@example.com"
return username, email
def get_random_password():
"""
Get random password, suitable for registering a user
"""
# Allow specifying a prefix in case a site has specific complexity
# requirements. But provide a default that should cover most cases.
prefix = os.environ.get('NEW_PASSWORD_PREFIX', 'a0.')
return prefix + uuid.uuid4().hex
def get_course_info():
"""
Returns the course info of the course that we use for
the regression tests.
"""
return {
'org': os.environ.get(COURSE_ORG),
'number': os.environ.get(COURSE_NUMBER),
'run': os.environ.get(COURSE_RUN),
'display_name': os.environ.get(
COURSE_DISPLAY_NAME)
}
def get_wl_course_info(org, num, run):
"""
Returns the course info of the course that we use for
the wl regression tests.
Arguments:
org
num
run
Returns:
Course Info
"""
return {
'course_org': org,
'course_num': num,
'course_run': run,
'display_name': f"{org}-{num}-Test"
}
def get_course_display_name():
"""
Returns the course info of the course that we use for
the regression tests.
"""
return os.environ.get(COURSE_DISPLAY_NAME)
def visit_all(pages):
"""
Visit each page object in `pages` (an iterable).
Arguments:
pages:
"""
for page in pages:
print(f"Visiting: {page}")
page.visit()
def get_url(url_path, course_info):
"""
Construct a URL to the page within the course.
Arguments:
url_path:
course_info:
"""
course_key = get_course_key(course_info)
return "/".join([LOGIN_BASE_URL, url_path, str(course_key)])
def get_data_locator(page):
"""
Get Data locator
Arguments:
page:
Returns:
Unique data locator for the component
"""
data_locator = page.q(css='.hd-3').attrs('id')[0]
return data_locator
def get_data_id_of_component(page):
"""
Get data id of component
Arguments:
page
Returns:
ID for the component
"""
data_id = page.q(css='.problem-header').attrs('id')[0]
return data_id
def get_white_label_registration_fields(
email='', password='', name="Automated Test User",
username='', first_name='Test', last_name='User',
gender='m', year_of_birth='1994', state='Massachusetts',
country='US', level_of_education='m', company='Arbisoft', title='SQA',
profession='physician', specialty='neurology', terms_of_service="true",
honor_code="true"
):
"""
Returns a dictionary of fields to register a user.
Arguments:
email(str): User's email
password(str): User's password
name(str): User's full name
first_name(str): User's first name
last_name(str): User's last name
gender(str): User's gender
year_of_birth(str): User's year of birth
state(str): User's current state of residence.
country(str): User's country
level_of_education(str): User's education level.
company(str): User's current company of affiliation.
title(str): User's title.
username(str): User's user name
profession(str): Profession of user
specialty(str): User's Area of specialty,
terms_of_service(str): Terms of Services checkbox
honor_code(str): Honor code check box
Returns:
dict: A dictionary of all fields.
"""
# use the username and email values if set by function call, otherwise
# set random values
get_user_name, get_user_email = get_random_credentials()
get_user_password = get_random_password()
return {
'email': email or get_user_email,
'confirm_email': email or get_user_email,
'username': username or get_user_name,
'password': password or get_user_password,
'name': name,
'first_name': first_name,
'last_name': last_name,
'gender': gender,
'year_of_birth': year_of_birth,
'state': state,
'country': country,
'level_of_education': level_of_education,
'company': company,
'title': title,
'profession': profession,
'specialty': specialty,
'terms_of_service': terms_of_service,
'honor_code': honor_code
}
def fill_input_fields(page, elements_and_values_dict):
"""
Fill input fields
Arguments:
page(PageObject): Page to fill input fields on.
elements_and_values_dict(dict): A dictionary of
elements(css) and input values.
"""
for key, value in elements_and_values_dict.items():
page.q(css=key).fill(value)
def select_drop_down_values(page, elements_and_values_dict):
"""
Select drop down values.
Arguments:
page(PageObject): Page on which drop down exists
elements_and_values_dict(dict): A dictionary of
drop down elements(css) and values.
"""
for element, val in elements_and_values_dict.items():
target_css = f'select[name={element}] option[value="{val}"]'
page.wait_for_element_visibility(
target_css,
'target value is visible in Drop down'
)
page.q(css=target_css).click()
def click_checkbox(page, checkbox_css, toggle=False):
"""
Click a checkbox.
Arguments:
page(PageObject): The page object on which checkbox exists.
checkbox_css(str): The css of checkbox.
toggle(bool): If False then, it won't un-check the checked checkbox.
"""
page.wait_for_element_visibility(checkbox_css, 'wait for target checkbox')
checkbox = page.q(css=checkbox_css).results[0]
if toggle:
checkbox.click()
else:
if not checkbox.is_selected():
checkbox.click()
def activate_account(test, email_api):
"""
Activates an account.
Fetch activation url from email, open the activation link in a new
window, verify that account is activated.
Arguments:
test: The browser on which activation is performed.
email_api(GuerrillaMailApi): Api to access GuerrillaMail.
"""
main_window = test.browser.current_window_handle
# Get activation link from email
activation_url = email_api.get_url_from_email(
'activate'
)
# Open a new window and go to activation link in this window
test.browser.execute_script("window.open('');")
test.browser.switch_to.window(test.browser.window_handles[-1])
account_activate_page = ActivateAccount(test.browser, activation_url)
account_activate_page.visit()
# Verify that activation is complete
test.assertTrue(account_activate_page.is_account_activation_complete)
test.browser.close()
# Switch back to original window and refresh the page
test.browser.switch_to.window(main_window)
test.browser.refresh()
def get_org_specific_registration_fields():
"""
Get ORG Specific registration fields by removing unused fields based on
the selected ORG
Returns:
filtered registration data
"""
registration_data = get_white_label_registration_fields()
unused_field_keys = UNUSED_REGISTRATION_FIELDS_MAPPING[ORG]
for field in unused_field_keys:
registration_data.pop(field)
return registration_data
def construct_course_basket_page_url(course_id):
"""
Uses the course id to construct course related basket page url
Arguments:
course_id:
Returns:
constructed url:
"""
return 'account/finish_auth?course_id={}&enrollment_action=enroll&' \
'purchase_workflow=single&next=/dashboard'.format(course_id)
|
edx/edx-e2e-tests
|
regression/tests/helpers/utils.py
|
Python
|
agpl-3.0
| 8,374
|
[
"VisIt"
] |
87965d704da4c2ba020f3a7f999db45737301439c29bdda4ce615a1c342a342c
|
#===============================================================================
#
# CSR_T_P.py
#
# This file is part of ANNarchy.
#
# Copyright (C) 2016-2020 Julien Vitay <julien.vitay@gmail.com>,
# Helge Uelo Dinkelbach <helge.dinkelbach@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ANNarchy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#===============================================================================
attribute_decl = {
'local':
"""
// Local %(attr_type)s %(name)s
std::vector< std::vector<%(type)s> > %(name)s;
""",
'semiglobal':
"""
// Semiglobal %(attr_type)s %(name)s
std::vector< %(type)s > %(name)s ;
""",
'global':
"""
// Global %(attr_type)s %(name)s
%(type)s %(name)s ;
"""
}
attribute_cpp_init = {
'local':
"""
// Local %(attr_type)s %(name)s
%(name)s = init_matrix_variable< %(type)s, std::vector<%(type)s> >(%(init)s);
""",
'semiglobal':
"""
// Semiglobal %(attr_type)s %(name)s
%(name)s = init_vector_variable< %(type)s >(%(init)s);
""",
'global':
"""
// Global %(attr_type)s %(name)s
%(name)s = %(init)s;
"""
}
attribute_cpp_size = {
'local': """
// Local %(attr_type)s %(name)s
size_in_bytes += sizeof(std::vector<std::vector<%(ctype)s>>);
size_in_bytes += sizeof(std::vector<%(ctype)s>) * %(name)s.capacity();
for(auto it = %(name)s.cbegin(); it != %(name)s.cend(); it++)
size_in_bytes += (it->capacity()) * sizeof(%(ctype)s);
""",
'semiglobal': """
// Semiglobal %(attr_type)s %(name)s
size_in_bytes += sizeof(std::vector<%(ctype)s>);
size_in_bytes += sizeof(%(ctype)s) * %(name)s.capacity();
""",
'global': """
// Global %(attr_type)s %(name)s
size_in_bytes += sizeof(%(ctype)s);
"""
}
attribute_cpp_delete = {
'local': """
// %(name)s
for (auto it = %(name)s.begin(); it != %(name)s.end(); it++) {
it->clear();
it->shrink_to_fit();
};
%(name)s.clear();
%(name)s.shrink_to_fit();
""",
'semiglobal': """
// %(name)s
%(name)s.clear();
%(name)s.shrink_to_fit();
""",
'global': ""
}
delay = {
'nonuniform_spiking': {
'declare': """
std::vector<int> delay;
int max_delay;
int idx_delay;
std::vector< std::vector< std::vector< int > > > _delayed_spikes;
""",
'init': """
delay = init_matrix_variable<int>(1);
update_matrix_variable_all<int>(delay, delays);
idx_delay = 0;
max_delay = pop%(id_pre)s.max_delay;
""",
'reset': """
while(!_delayed_spikes.empty()) {
auto elem = _delayed_spikes.back();
elem.clear();
_delayed_spikes.pop_back();
}
idx_delay = 0;
max_delay = pop%(id_pre)s.max_delay ;
_delayed_spikes = std::vector< std::vector< std::vector< int > > >(max_delay, std::vector< std::vector< int > >(post_rank.size(), std::vector< int >()) );
""",
'pyx_struct':
"""
# Non-uniform delay
vector[vector[int]] delay
int max_delay
void update_max_delay(int)
void reset_ring_buffer()
""",
'pyx_wrapper_init': "",
'pyx_wrapper_accessor':
"""
# Access to non-uniform delay
def get_delay(self):
return proj%(id_proj)s.delay
def get_dendrite_delay(self, idx):
return proj%(id_proj)s.delay[idx]
def set_delay(self, value):
proj%(id_proj)s.delay = value
def get_max_delay(self):
return proj%(id_proj)s.max_delay
def set_max_delay(self, value):
proj%(id_proj)s.max_delay = value
def update_max_delay(self, value):
proj%(id_proj)s.update_max_delay(value)
def reset_ring_buffer(self):
proj%(id_proj)s.reset_ring_buffer()
"""
}
}
event_driven = {
'declare': """
std::vector<long> _last_event;
""",
'cpp_init': """
// Event-driven
_last_event = init_matrix_variable<long>(-10000);
""",
'pyx_struct': """
vector[vector[long]] _last_event
""",
}
spiking_summation_fixed_delay = """// Event-based summation
if (_transmission && pop%(id_post)s._active){
auto row_ptr_ = sub_matrices_[tid]->row_ptr();
auto col_idx_ = sub_matrices_[tid]->col_idx();
// Iterate over all spiking neurons
for( int _idx = 0; _idx < %(pre_array)s.size(); _idx++) {
// Rank of the presynaptic neuron
int _pre = %(pre_array)s[_idx];
// Iterate over connected post neurons
for(int syn = row_ptr_[_pre]; syn < row_ptr_[_pre + 1]; syn++) {
// Event-driven integration
%(event_driven)s
// Update conductance
%(g_target)s
// Synaptic plasticity: pre-events
%(pre_event)s
}
}
} // active
"""
conn_templates = {
# accessors
'delay': delay,
'attribute_decl': attribute_decl,
'attribute_cpp_init': attribute_cpp_init,
'attribute_cpp_size': attribute_cpp_size,
'attribute_cpp_delete': attribute_cpp_delete,
'event_driven': event_driven,
#operations
'spiking_sum_fixed_delay': spiking_summation_fixed_delay,
}
|
ANNarchy/ANNarchy
|
ANNarchy/generator/Projection/OpenMP/CSR_T_P.py
|
Python
|
gpl-2.0
| 5,813
|
[
"NEURON"
] |
606ae2e8b627f6d3c9e9efb9c9c65656785df16ac82b8a203c56977d4cdc2c11
|
"""
@package AlignerSpliter
@brief ...
@copyright [GNU General Public License v2](http://www.gnu.org/licenses/gpl-2.0.html)
@author Adrien Leger - 2014
* <adrien.leger@gmail.com>
* <adrien.leger@inserm.fr>
* <adrien.leger@univ-nantes.fr>
* [Github](https://github.com/a-slide)
* [Atlantic Gene Therapies - INSERM 1089] (http://www.atlantic-gene-therapies.fr/)
"""
#~~~~~~~GLOBAL IMPORTS~~~~~~~#
# Standard library packages import
from os import remove, path
# Local library packages import
#from SamSpliter import SamSpliter
from FastaReader import FastaReader
from BWA.BwaIndexer import GenerateIndex, ExistingIndex
from BWA.BwaAligner import BwaAligner
def align (source_list,
R1,
R2=None,
index = None,
ref_outdir = "./reference/"
bwa_mem = "bwa mem",
align_opt=None,
align_outdir= "./bwa_align/",
bwa_index = "bwa index",
index_opt=None,
index_outdir = "./bwa_index/")
"""
Main function of RefMasker that integrate database creation, blast and homology masking
* Instantiate Blast database and blastn object
* Perform iterative blasts of query sequences against the subject database and create a list of
hits.
"""
# Try to validate a index from an existing one
try:
if not index_path:
raise Exception("No index provided. An index will be generated")
print("Existing index provided")
FastaRef = FastaReader(ref1_path, ref2_path, write_merge=False)
Index = ExistingIndex(bwa_path, index_path)
# If no index or if an error occured during validation of the existing index = create a new one
except Exception as E:
print (E)
print("Merge References...")
mkdir(ref_outdir)
FastaRef = FastaReader([ref1_path,ref2_path], write_merge=True, output="merged.fa")
print("Generating index...")
mkdir(db_outdir)
Index = GenerateIndex(bwa_path, FastaRef.merge_ref, index_opt)
remove (FastaRef.merge_ref)
Aligner = BwaAligner(bwa_path, Index, align_opt)
return (Aligner.align(R1_path, R2_path))
#Spliter = SamSpliter(FastaRef.ref1_list, FastaRef.ref2_list, spliter_opt)
#Main = (Aligner, Spliter, main_opt)
#Main.align_and_split()
|
a-slide/IsFinder
|
src/ShortReadAligner_test.py
|
Python
|
gpl-2.0
| 2,322
|
[
"BLAST",
"BWA"
] |
64fdb78aba5fd9f6c91eef0b263819d8e4e946d29fea7eee954c8c5b344e6228
|
#!/bin/env python
""" Show request given its ID, a jobID or a transformation and a task """
__RCSID__ = "$Id: $"
import datetime
def convertDate( date ):
try:
value = datetime.datetime.strptime( date, '%Y-%m-%d' )
return value
except:
pass
try:
value = datetime.datetime.utcnow() - datetime.timedelta( hours = int( 24 * float( date ) ) )
except:
gLogger.fatal( "Invalid date", date )
value = None
return value
from DIRAC.Core.Base import Script
Script.registerSwitch( '', 'Job=', ' JobID[,jobID2,...]' )
Script.registerSwitch( '', 'Transformation=', ' transformation ID' )
Script.registerSwitch( '', 'Tasks=', ' Associated to --Transformation, list of taskIDs' )
Script.registerSwitch( '', 'Verbose', ' Print more information' )
Script.registerSwitch( '', 'Terse', ' Only print request status' )
Script.registerSwitch( '', 'Full', ' Print full request content' )
Script.registerSwitch( '', 'Status=', ' Select all requests in a given status' )
Script.registerSwitch( '', 'Since=', ' Associated to --Status, start date yyyy-mm-dd or nb of days (default= -one day' )
Script.registerSwitch( '', 'Until=', ' Associated to --Status, end date (default= now' )
Script.registerSwitch( '', 'Maximum=', ' Associated to --Status, max number of requests ' )
Script.registerSwitch( '', 'Reset', ' Reset Failed files to Waiting if any' )
Script.registerSwitch( '', 'All', ' (if --Status Failed) all requests, otherwise exclude irrecoverable failures' )
Script.registerSwitch( '', 'FixJob', ' Set job Done if the request is Done' )
Script.registerSwitch( '', 'Cancel', ' Cancel the request' )
Script.setUsageMessage( '\n'.join( [ __doc__,
'Usage:',
' %s [option|cfgfile] [requestID/requestName(if unique)]' % Script.scriptName,
'Arguments:',
' requestID: a request ID' ] ) )
# # execution
if __name__ == "__main__":
from DIRAC.Core.Base.Script import parseCommandLine
parseCommandLine()
import DIRAC
from DIRAC import gLogger
jobs = []
requestID = 0
transID = None
taskIDs = None
tasks = None
requests = []
full = False
verbose = False
status = None
until = None
since = None
terse = False
allR = False
reset = False
fixJob = False
maxRequests = 999999999999
cancel = False
for switch in Script.getUnprocessedSwitches():
if switch[0] == 'Job':
try:
jobs = [int( job ) for job in switch[1].split( ',' )]
except:
gLogger.fatal( "Invalid jobID", switch[1] )
elif switch[0] == 'Transformation':
try:
transID = int( switch[1] )
except:
gLogger.fatal( 'Invalid transID', switch[1] )
elif switch[0] == 'Tasks':
try:
taskIDs = [int( task ) for task in switch[1].split( ',' )]
except:
gLogger.fatal( 'Invalid tasks', switch[1] )
elif switch[0] == 'Full':
full = True
elif switch[0] == 'Verbose':
verbose = True
elif switch[0] == 'Terse':
terse = True
elif switch[0] == 'All':
allR = True
elif switch[0] == 'Reset':
reset = True
elif switch[0] == 'Status':
status = switch[1].capitalize()
elif switch[0] == 'Since':
since = convertDate( switch[1] )
elif switch[0] == 'Until':
until = convertDate( switch[1] )
elif switch[0] == 'FixJob':
fixJob = True
elif switch[0] == 'Cancel':
cancel = True
elif switch[0] == 'Maximum':
try:
maxRequests = int( switch[1] )
except:
pass
if reset:
status = 'Failed'
if fixJob:
status = 'Done'
if terse:
verbose = True
if status:
if not until:
until = datetime.datetime.utcnow()
if not since:
since = until - datetime.timedelta( hours = 24 )
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
from DIRAC.RequestManagementSystem.Client.ReqClient import printRequest, recoverableRequest
reqClient = ReqClient()
if transID:
if not taskIDs:
gLogger.fatal( "If Transformation is set, a list of Tasks should also be set" )
Script.showHelp()
DIRAC.exit( 2 )
# In principle, the task name is unique, so the request name should be unique as well
# If ever this would not work anymore, we would need to use the transformationClient
# to fetch the ExternalID
requests = ['%08d_%08d' % ( transID, task ) for task in taskIDs]
allR = True
elif not jobs:
args = Script.getPositionalArgs()
if len( args ) == 1:
allR = True
requests = [reqID for reqID in args[0].split( ',' ) if reqID]
else:
res = reqClient.getRequestIDsForJobs( jobs )
if not res['OK']:
gLogger.fatal( "Error getting request for jobs", res['Message'] )
DIRAC.exit( 2 )
if res['Value']['Failed']:
gLogger.error( "No request found for jobs %s" % ','.join( sorted( str( job ) for job in res['Value']['Failed'] ) ) )
requests = sorted( res['Value']['Successful'].values() )
if requests:
allR = True
else:
DIRAC.exit( 0 )
if status and not requests:
allR = allR or status != 'Failed'
res = reqClient.getRequestIDsList( [status], limit = maxRequests, since = since, until = until )
if not res['OK']:
gLogger.error( "Error getting requests:", res['Message'] )
DIRAC.exit( 2 )
requests = [reqID for reqID, _st, updTime in res['Value'] if updTime > since and updTime <= until and reqID]
gLogger.notice( 'Obtained %d requests %s between %s and %s' % ( len( requests ), status, since, until ) )
if not requests:
gLogger.notice( 'No request selected....' )
Script.showHelp()
DIRAC.exit( 2 )
okRequests = []
warningPrinted = False
for reqID in requests:
# We allow reqID to be the requestName if it is unique
try:
requestID = int( reqID )
except ValueError:
requestID = reqClient.getRequestIDForName( reqID )
if not requestID['OK']:
gLogger.notice( requestID['Message'] )
continue
requestID = requestID['Value']
request = reqClient.peekRequest( requestID )
if not request["OK"]:
gLogger.error( request["Message"] )
DIRAC.exit( -1 )
request = request["Value"]
if not request:
gLogger.error( "no such request %s" % requestID )
continue
if status and request.Status != status:
gLogger.notice( "Request %s is not in requested status %s%s" % \
( reqID, status, ' (cannot be reset)' if reset else '' ) )
continue
if fixJob and request.Status == 'Done' and request.JobID:
# The request is for a job and is Done, verify that the job is in the proper status
result = reqClient.finalizeRequest( request.RequestID, request.JobID, useCertificates = False )
if not result['OK']:
gLogger.error( "Error finalizing job", result['Message'] )
else:
gLogger.notice( "Job %d updated to %s" % ( request.JobID, result['Value'] ) )
continue
if cancel:
if request.Status not in ( 'Done', 'Failed' ):
ret = reqClient.cancelRequest( requestID )
if not ret['OK']:
gLogger.error( "Error canceling request %s" % reqID, ret['Message'] )
else:
gLogger.notice( "Request %s cancelled" % reqID )
else:
gLogger.notice( "Request %s is in status %s, not cancelled" % ( reqID, request.Status ) )
elif allR or recoverableRequest( request ):
okRequests.append( str( requestID ) )
if reset:
gLogger.notice( '============ Request %s =============' % requestID )
ret = reqClient.resetFailedRequest( requestID, allR = allR )
if not ret['OK']:
gLogger.error( "Error resetting request %s" % requestID, ret['Message'] )
else:
if len( requests ) > 1:
gLogger.notice( '\n===================================' )
dbStatus = reqClient.getRequestStatus( requestID ).get( 'Value', 'Unknown' )
printRequest( request, status = dbStatus, full = full, verbose = verbose, terse = terse )
if status and okRequests:
from DIRAC.Core.Utilities.List import breakListIntoChunks
gLogger.notice( '\nList of %d selected requests:' % len( okRequests ) )
for reqs in breakListIntoChunks( okRequests, 100 ):
gLogger.notice( ','.join( reqs ) )
|
Andrew-McNab-UK/DIRAC
|
RequestManagementSystem/scripts/dirac-rms-request.py
|
Python
|
gpl-3.0
| 8,432
|
[
"DIRAC"
] |
73bb2682b333ad9ea2a4f666df66aded28fe2af1a1c91496e376db4828d94e13
|
"""
This transform turns calls to delay() that use non-integer time
expressed in seconds into calls to delay_mu() that use int64 time
expressed in multiples of ref_period.
It does so by inserting multiplication/division/rounding operations around
those calls.
The seconds_to_mu and mu_to_seconds core language functions are also
implemented here, as well as watchdog to syscall conversion.
"""
import ast
from artiq.transforms.tools import value_to_ast
def _seconds_to_mu(ref_period, node):
divided = ast.copy_location(
ast.BinOp(left=node,
op=ast.Div(),
right=value_to_ast(ref_period)),
node)
return ast.copy_location(
ast.Call(func=ast.Name("round64", ast.Load()),
args=[divided], keywords=[]),
divided)
def _mu_to_seconds(ref_period, node):
return ast.copy_location(
ast.BinOp(left=node,
op=ast.Mult(),
right=value_to_ast(ref_period)),
node)
class _TimeQuantizer(ast.NodeTransformer):
def __init__(self, ref_period):
self.ref_period = ref_period
self.watchdog_id_counter = 0
def visit_Call(self, node):
funcname = node.func.id
if funcname == "delay":
node.func.id = "delay_mu"
if (isinstance(node.args[0], ast.Call)
and node.args[0].func.id == "mu_to_seconds"):
# optimize:
# delay(mu_to_seconds(x)) -> delay_mu(x)
node.args[0] = self.visit(node.args[0].args[0])
else:
node.args[0] = _seconds_to_mu(self.ref_period,
self.visit(node.args[0]))
return node
elif funcname == "seconds_to_mu":
return _seconds_to_mu(self.ref_period,
self.visit(node.args[0]))
elif funcname == "mu_to_seconds":
return _mu_to_seconds(self.ref_period,
self.visit(node.args[0]))
else:
self.generic_visit(node)
return node
def visit_With(self, node):
self.generic_visit(node)
if (isinstance(node.items[0].context_expr, ast.Call)
and node.items[0].context_expr.func.id == "watchdog"):
idname = "__watchdog_id_" + str(self.watchdog_id_counter)
self.watchdog_id_counter += 1
time = ast.BinOp(left=node.items[0].context_expr.args[0],
op=ast.Mult(),
right=ast.Num(1000))
time_int = ast.Call(
func=ast.Name("round", ast.Load()),
args=[time], keywords=[])
syscall_set = ast.Call(
func=ast.Name("syscall", ast.Load()),
args=[ast.Str("watchdog_set"), time_int], keywords=[])
stmt_set = ast.copy_location(
ast.Assign(targets=[ast.Name(idname, ast.Store())],
value=syscall_set),
node)
syscall_clear = ast.Call(
func=ast.Name("syscall", ast.Load()),
args=[ast.Str("watchdog_clear"),
ast.Name(idname, ast.Load())], keywords=[])
stmt_clear = ast.copy_location(ast.Expr(syscall_clear), node)
node.items[0] = ast.withitem(
context_expr=ast.Name(id="sequential",
ctx=ast.Load()),
optional_vars=None)
node.body = [
stmt_set,
ast.Try(body=node.body,
handlers=[],
orelse=[],
finalbody=[stmt_clear])
]
return node
def quantize_time(func_def, ref_period):
_TimeQuantizer(ref_period).visit(func_def)
|
kgilmo/penning_artiq
|
artiq/transforms/quantize_time.py
|
Python
|
gpl-3.0
| 3,862
|
[
"VisIt"
] |
c62ff22acc13f1de03f4e0fdd070bf632ea2951336bf93843b263cb82e88f083
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
"""VGG, implemented in Gluon."""
from __future__ import division
__all__ = ['VGG',
'vgg11', 'vgg13', 'vgg16', 'vgg19',
'vgg11_bn', 'vgg13_bn', 'vgg16_bn', 'vgg19_bn',
'get_vgg']
from ....context import cpu
from ....initializer import Xavier
from ...block import HybridBlock
from ... import nn
class VGG(HybridBlock):
r"""VGG model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
layers : list of int
Numbers of layers in each feature block.
filters : list of int
Numbers of filters in each feature block. List length should match the layers.
classes : int, default 1000
Number of classification classes.
batch_norm : bool, default False
Use batch normalization.
"""
def __init__(self, layers, filters, classes=1000, batch_norm=False, **kwargs):
super(VGG, self).__init__(**kwargs)
assert len(layers) == len(filters)
with self.name_scope():
self.features = self._make_features(layers, filters, batch_norm)
self.classifier = nn.HybridSequential(prefix='')
self.classifier.add(nn.Dense(4096, activation='relu',
weight_initializer='normal',
bias_initializer='zeros'))
self.classifier.add(nn.Dropout(rate=0.5))
self.classifier.add(nn.Dense(4096, activation='relu',
weight_initializer='normal',
bias_initializer='zeros'))
self.classifier.add(nn.Dropout(rate=0.5))
self.classifier.add(nn.Dense(classes,
weight_initializer='normal',
bias_initializer='zeros'))
def _make_features(self, layers, filters, batch_norm):
featurizer = nn.HybridSequential(prefix='')
for i, num in enumerate(layers):
for _ in range(num):
featurizer.add(nn.Conv2D(filters[i], kernel_size=3, padding=1,
weight_initializer=Xavier(rnd_type='gaussian',
factor_type='out',
magnitude=2),
bias_initializer='zeros'))
if batch_norm:
featurizer.add(nn.BatchNorm())
featurizer.add(nn.Activation('relu'))
featurizer.add(nn.MaxPool2D(strides=2))
return featurizer
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.classifier(x)
return x
# Specification
vgg_spec = {11: ([1, 1, 2, 2, 2], [64, 128, 256, 512, 512]),
13: ([2, 2, 2, 2, 2], [64, 128, 256, 512, 512]),
16: ([2, 2, 3, 3, 3], [64, 128, 256, 512, 512]),
19: ([2, 2, 4, 4, 4], [64, 128, 256, 512, 512])}
# Constructors
def get_vgg(num_layers, pretrained=False, ctx=cpu(), root='~/.mxnet/models', **kwargs):
r"""VGG model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
num_layers : int
Number of layers for the variant of densenet. Options are 11, 13, 16, 19.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
layers, filters = vgg_spec[num_layers]
net = VGG(layers, filters, **kwargs)
if pretrained:
from ..model_store import get_model_file
batch_norm_suffix = '_bn' if kwargs.get('batch_norm') else ''
net.load_params(get_model_file('vgg%d%s'%(num_layers, batch_norm_suffix),
root=root), ctx=ctx)
return net
def vgg11(**kwargs):
r"""VGG-11 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_vgg(11, **kwargs)
def vgg13(**kwargs):
r"""VGG-13 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_vgg(13, **kwargs)
def vgg16(**kwargs):
r"""VGG-16 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_vgg(16, **kwargs)
def vgg19(**kwargs):
r"""VGG-19 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_vgg(19, **kwargs)
def vgg11_bn(**kwargs):
r"""VGG-11 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(11, **kwargs)
def vgg13_bn(**kwargs):
r"""VGG-13 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(13, **kwargs)
def vgg16_bn(**kwargs):
r"""VGG-16 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(16, **kwargs)
def vgg19_bn(**kwargs):
r"""VGG-19 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(19, **kwargs)
|
Mega-DatA-Lab/mxnet
|
python/mxnet/gluon/model_zoo/vision/vgg.py
|
Python
|
apache-2.0
| 9,401
|
[
"Gaussian"
] |
c7097eccb5ef6fee87bdac6b332bab0dd094660eb578a7f3736c0800d8207b71
|
#/*
# *
# * SimilarTracks for Kodi.
# *
# * Copyright (C) 2015 Brian Hornsby
# *
# * This program is free software: you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation, either version 3 of the License, or
# * (at your option) any later version.
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program. If not, see <http://www.gnu.org/licenses/>.
# *
# */
import random
import sys
import urllib
import xbmc
import xbmcgui
from urllib2 import urlopen, URLError
import resources.lib.kodisettings as kodisettings
import resources.lib.kodiutils as utils
if sys.version_info < (2, 7):
import simplejson
else:
import json as simplejson
_addonid = 'script.similartracks'
_settings = kodisettings.KodiSettings(_addonid, sys.argv)
# Get addon information and settings.
_addonname = _settings.get_name()
_version = _settings.get_version()
_maxcount = _settings['maxcount']
if not _maxcount:
_maxcount = -1
_runinbackground = (_settings['background'] == 'true')
_order = int(_settings['order'])
def log_debug(msg):
if _settings['debug'] == 'true':
print '%s: DEBUG: %s' % (_addonid, msg)
def log_error(msg):
print '%s: ERROR: %s' % (_addonid, msg)
log_debug('Addon Id: [%s]' % (_addonid))
log_debug('Addon Name: [%s]' % (_addonname))
log_debug('Version: [%s]' % (_version))
log_debug('maxcount: %d' % _maxcount)
log_debug('runinbackground: %d' % _runinbackground)
log_debug('order: %d' % _order)
if not _runinbackground:
pDialog = xbmcgui.DialogProgress()
pDialog.create(
_settings.get_string(1000), _settings.get_string(3000))
def display_notification(header, message):
image = _settings.get_path('icon.png')
utils.notification(header, message, image=image)
def get_lastfm_similar_tracks(artist, track):
base_url = 'http://ws.audioscrobbler.com/2.0/?'
params = {'method': 'track.getsimilar', 'artist': artist, 'track':
track, 'api_key': '5da513b631898f5372a5e5f863651212', 'format': 'json',
'autocorrect': 1}
url = '%s%s' % (base_url, urllib.urlencode(params))
log_debug('Last.fm URL: %s' % url)
try:
f = urlopen(url)
except URLError as exception:
log_error(exception)
utils.ok(_settings.get_string(1000), _settings.get_string(3007))
return []
json_query = unicode(f.read(), 'ascii', errors='ignore')
f.close()
json_response = simplejson.loads(json_query)
lastfmtracks = []
for track in json_response['similartracks']['track']:
lastfmtracks.append({'title': track[
'name'], 'artist': track['artist']['name']})
return lastfmtracks
def get_similar_tracks(artist, title):
log_debug('Looking for similar tracks to %s - %s' % (artist, title))
lastfmtracks = get_lastfm_similar_tracks(artist, title)
message = (_settings.get_string(3003) % (len(lastfmtracks)))
if _runinbackground:
display_notification(_settings.get_string(1000), message)
else:
pDialog.update(75, _settings.get_string(3002), message)
log_debug('Last.fm returned %d similar tracks' % (len(lastfmtracks)))
if not _runinbackground and pDialog.iscanceled():
return (0, [])
json_query = xbmc.executeJSONRPC(
'{"jsonrpc": "2.0", "method": "AudioLibrary.GetArtists", "params": {"properties": [], "sort": { "method": "label" } }, "id": 1}')
json_query = unicode(json_query, 'ascii', errors='ignore')
json_response = simplejson.loads(json_query)
artists = []
if (json_response['result'] is not None) and ('artists' in json_response['result']):
for artist in json_response['result']['artists']:
artists.append(
{'artist': artist['artist'], 'id': artist['artistid']})
count = 0
playlisttracks = []
for track in lastfmtracks:
tracktitle = track['title'].encode('ascii', 'ignore')
trackartist = track['artist'].encode('ascii', 'ignore')
artistid = None
for artist in artists:
if 'artist' in artist and artist['artist'].encode('ascii', 'ignore') == trackartist:
artistid = artist['id']
break
if artistid:
json_query = xbmc.executeJSONRPC(
'{"jsonrpc": "2.0", "method": "AudioLibrary.GetSongs", "params": {"properties": ["title", "artist"], "sort": { "method": "label" }, "filter": {"artistid": %s} }, "id": 1}' % artistid)
json_query = unicode(json_query, 'ascii', errors='ignore')
json_response = simplejson.loads(json_query)
if (json_response['result'] is not None) and ('songs' in json_response['result']):
for song in json_response['result']['songs']:
if 'title' in song and song['title'] == tracktitle:
playlisttracks.append({'songid': song[
'songid'], 'artist': song['artist'], 'title': song['title']})
count = count + 1
if not _runinbackground:
pDialog.update(85, _settings.get_string(3004) % ('%s - %s' % (
trackartist, tracktitle)), _settings.get_string(3001) % count)
break
return (count, playlisttracks)
def get_next_track_to_add(previous_artist, playlisttracks):
if len(playlisttracks) == 0:
return -1
if _order == 0:
return random.randrange(len(playlisttracks))
elif _order == 2:
for track in playlisttracks:
if track['artist'][0] != previous_artist:
return index
index = index + 1
if len(playlisttracks) > 0:
return 0
return -1
def add_tracks_to_playlist(artist, playlisttracks):
index = 0
previous_artist = artist
while index < _maxcount or _maxcount < 0:
i = get_next_track_to_add(previous_artist, playlisttracks)
if i == -1:
break
previous_artist = playlisttracks[i]['artist']
json_query = xbmc.executeJSONRPC(
'{ "jsonrpc": "2.0", "method": "Playlist.Add", "params": { "playlistid": 0, "item": { "songid": %d } }, "id": 1 }' % playlisttracks[i]['songid'])
json_query = unicode(json_query, 'ascii', errors='ignore')
json_response = simplejson.loads(json_query)
playlisttracks.pop(i)
index = index + 1
return index
if xbmc.Player().isPlayingAudio():
tag = xbmc.Player().getMusicInfoTag()
artist = tag.getArtist()
title = tag.getTitle()
playlist = xbmc.PlayList(0)
currenttrackpos = playlist.getposition() + 1
if currenttrackpos <= len(playlist):
if _runinbackground:
display_notification(_settings.get_string(1000), _settings.get_string(4000) % (artist.decode('ascii', 'ignore'), title.decode('ascii', 'ignore')))
else:
pDialog.update(25, _settings.get_string(3005), '%s - %s' % (artist.decode('ascii', 'ignore'), title.decode('ascii', 'ignore')))
count, playlisttracks = get_similar_tracks(artist, title)
log_debug('Found %d similar tracks in Kodi library' % count)
if _runinbackground or not pDialog.iscanceled():
index = 0
if count > 0:
while xbmc.PlayList(0).size() > currenttrackpos:
xbmc.PlayList(0).remove(xbmc.PlayList(0)[currenttrackpos].getfilename())
index = add_tracks_to_playlist(artist, playlisttracks)
if not _runinbackground:
pDialog.close()
log_debug('Added %d songs to playlist' % index)
if _runinbackground:
display_notification(_settings.get_string(1000), _settings.get_string(4001) % (index, artist.decode('ascii', 'ignore'), title.decode('ascii', 'ignore')))
else:
utils.ok(_settings.get_string(1000), _settings.get_string(3006) % index, '%s - %s' % (artist.decode('ascii', 'ignore'), title.decode('ascii', 'ignore')))
else:
log_debug('Script was cancelled')
else:
log_debug('Unable to get currently playing track')
else:
utils.ok(_settings.get_string(1000), _settings.get_string(3008))
|
brianhornsby/script.similartracks
|
default.py
|
Python
|
gpl-3.0
| 8,605
|
[
"Brian"
] |
086db4f311c0420b5fe4e7462ac89fb382a37c821140356bd60d721af40a2ab5
|
try:
from setuptools import setup
except:
from disutils.core import setup
dependencies = ['docopt', 'ed25519', 'scrypt', 'py-notify']
setup(
name='sqrl',
version='0.1.0',
description='Command line SQRL client',
url='http://gitbub.com/bushxnyc/sqrl',
author='Brian Pinkney',
author_email='bushxnyc@gmail.com',
install_requires=dependencies,
packages=['sqrl'],
entry_points={
'console_scripts': [
'sqrl=sqrl.sqrl:main'
],
},
classifiers=(
'Development Satus :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language : Python',
'Programming Language : Pythoni :: 2.7',
)
)
|
bushxnyc/sqrl
|
setup.py
|
Python
|
mit
| 792
|
[
"Brian"
] |
909208b9ad20095bf3a72ac5b98d9fb6c1b259c40ad7079bd219f21a08afbd70
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example demonstrates how to handle policy violation errors.
To get ad groups, run get_ad_groups.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from googleads import adwords
from googleads import errors
AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE'
def main(client, ad_group_id):
ad_group_ad_service = client.GetService('AdGroupAdService', 'v201802')
# Create expanded text ad that violates an exemptable policy.
exemptable_expanded_text_ad_operation = {
'operator': 'ADD',
'operand': {
'adGroupId': ad_group_id,
'ad': {
# The 'xsi_type' field allows you to specify the xsi:type of the
# object being created. It's only necessary when you must provide
# an explicit type that the client library can't infer.
'xsi_type': 'ExpandedTextAd',
'headlinePart1': 'Mars Cruise!!!',
'headlinePart2': 'Best space cruise line.',
'description': 'Visit the Red Planet in style.',
'finalUrls': ['http://www.example.com']
}
}
}
# Create text ad that violates a non-exemptable policy.
non_exemptable_expanded_text_ad_operation = {
'operator': 'ADD',
'operand': {
'adGroupId': ad_group_id,
'ad': {
# The 'xsi_type' field allows you to specify the xsi:type of the
# object being created. It's only necessary when you must provide
# an explicit type that the client library can't infer.
'xsi_type': 'ExpandedTextAd',
'headlinePart1': 'Mars Cruise with too long of a headline.',
'headlinePart2': 'Best space cruise line.',
'description': 'Visit the Red Planet in style.',
'finalUrls': ['http://www.example.com']
}
}
}
operations = [exemptable_expanded_text_ad_operation,
non_exemptable_expanded_text_ad_operation]
# Validate the ad.
try:
# Enable "validate only" to check for errors.
client.validate_only = True
ad_group_ad_service.mutate(operations)
print 'Validation successful, no errors returned.'
except errors.GoogleAdsServerFault, e:
for error in e.errors:
# Get the index of the failed operation from the error's field path
# elements.
field_path_elements = error['fieldPathElements']
first_field_path_element = None
if field_path_elements:
first_field_path_element = field_path_elements[0]
# If the operation index is not present on the first error field path
# element, then there's no way to determine which operation to remove,
# so simply throw the exception.
if (not (first_field_path_element
and first_field_path_element['field'] == 'operations'
and 'index' in first_field_path_element)):
raise e
index = long(first_field_path_element['index'])
operation = operations[index]
if not HandleAPIError(error, operation):
# Set non-exemptable operation to None to mark for deletion.
print ('Removing operation with non-exemptable error at index %s.'
% index)
operations[index] = None
# Remove the non-exemptable operations.
operations = [op for op in operations if op is not None]
# Add these ads. Disable "validate only" so the ads will get created.
client.validate_only = False
if operations:
response = ad_group_ad_service.mutate(operations)
if response and response['value']:
ads = response['value']
print 'Added %s ad(s) to ad group %s.' % (len(ads), ad_group_id)
for ad in ads:
print (' Ad id is %s, type is %s and status is "%s".' %
(ad['ad']['id'], ad['ad']['Ad.Type'], ad['status']))
else:
print 'No ads were added.'
def HandleAPIError(error, operation):
"""Makes an exemption for exemptable PolicyViolationErrors.
Args:
error: the error associated with the given operation.
operation: the operation associated with the given error.
Returns:
A boolean that is True if the given error was an exemptable
PolicyViolationError; otherwise, returns False.
"""
is_exemptable = False
# Determine if the operation can be resubmitted with an exemption request.
if error['ApiError.Type'] == 'PolicyViolationError':
expanded_text_ad = operation['operand']['ad']
is_exemptable = (error['isExemptable'] if 'isExemptable' in error else
False)
print ('Ad with headline "%s - %s" violated %s policy "%s".' %
(expanded_text_ad['headlinePart1'],
expanded_text_ad['headlinePart2'],
'exemptable' if is_exemptable else 'non-exemptable',
error['externalPolicyName']))
if is_exemptable:
# Add exemption request to the operation.
print ('Adding exemption request for policy name "%s" on text "%s".'
% (error['key']['policyName'], error['key']['violatingText']))
if 'exemptionRequests' not in operation:
operation['exemptionRequests'] = []
operation['exemptionRequests'].append({'key': error['key']})
return is_exemptable
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, AD_GROUP_ID)
|
Aloomaio/googleads-python-lib
|
examples/adwords/v201802/error_handling/handle_policy_violation_error.py
|
Python
|
apache-2.0
| 6,157
|
[
"VisIt"
] |
31d965613cea413b5d2a1a840ff7ba4cd68bb1ec1c281aed4426475b455d1b34
|
'''
Created on Nov 12, 2014
@author: alucard
'''
import vtk
from SceneObject import SceneObject
from scene import MenuItem
class MenuItemController(SceneObject):
def __init__(self, botManager, roleManager, renderManager, parent, iren, menuName):
'''
Initialize the MenuItemController.
'''
# Initialize all the variables so that they're unique
self.__renderManager = renderManager
# Call the parent constructor
super(MenuItemController, self).__init__(self.__renderManager.renderers, parent)
# Reference the botManager
self.__botManager = botManager
# Reference the roleManager
self.__roleManager = roleManager
self.__pickerHandle = iren.AddObserver("EndPickEvent", self._OnPickEvent)
self.name = menuName
self.selectedNode = None
self.isOpen = False
def __str__(self):
s = "Open : " + str(self.GetOpen()) + "\n"
return s
def Disconnect(self):
iren = self.GetInteractor()
iren.RemoveObserver(self.__pickerHandle)
def _OnPickEvent(self, obj, event):
picker = obj.GetPicker()
actor = picker.GetActor()
if actor is not None:
self.CheckPick(actor)
def GetOpen(self):
return self.isOpen
def GetWidth(self):
return 0
def SetOpen(self, state):
self.isOpen = state
def AddMenuItem(self, newMenuItem):
self.childrenObjects.append(newMenuItem)
def OpenMenu(self):
'''
Build the menu as it's opened and set it to be visible.
'''
self.__BuildMenu()
self.SetOpen(True)
self.__SetInitialOptionsHeight()
# Open the base menu item.
self.__ActionMenuItem(self.__baseMenuItem)
def __SetInitialOptionsHeight(self):
'''
Set the position if more than one root node is used, accounts for an even/uneven number of entries
'''
# Make sure there are nodes
if len(self.childrenObjects) > 0:
totalHeight = 0.0
# Get the total height
for item in self.childrenObjects:
totalHeight += item.GetHeight()
# Halve height to position nodes
totalHeight = totalHeight / 2
# Place all nodes at their calculated positions
for item in self.childrenObjects:
position = [0.0, 0.0, 0.0]
position[0] = 0.0
position[1] = totalHeight - (item.GetHeight() / 2)
position[2] = 0.0
totalHeight -= item.GetHeight()
item.SetSceneObjectPosition(position)
def CloseMenu(self):
'''
Close the menu and remove all references to nodes.
'''
self.SetOpen(False)
for item in self.childrenObjects:
item.GlobalMenuClose()
# Remove all references to the menu so it is garbage collected.
self.selectedNode = None
self.childrenObjects = []
# def CloseCurrentMenuItem(self):
# if type(self.selectedNode.parent) is MenuItemController:
# self.selectedNode = None
# self.SetMenuItemControllerMenuItemLocation()
# for item in self.childrenObjects:
# item.GlobalMenuClose()
# item.SetVisible(True)
# else:
# self.SetSelectedMenuItem(self.selectedNode.parent)
def CheckPick(self, pickedActor):
'''
Checks what the user has picked
'''
# If the currently selected node is picked, do nothing
if self.selectedNode.CheckActor(pickedActor):
return
# Check if any children were picked
if self.CheckChildren(pickedActor) == True:
return
# Check if any parent items were picked
if self.CheckParent(self.selectedNode.parent, pickedActor) == True:
return
# if self.selectedNode == None:
# for item in self.childrenObjects:
# if item.CheckActor(pickedActor) == True:
# self.__ActionMenuItem(item)
# return
def CheckParent(self, parent, actor):
'''
Checks if any parent nodes were selected
'''
# Make sure we do not look at MenuItemController
if type(parent) is not MenuItemController:
if parent.CheckActor(actor) == True:
# Perform action if selected node found
self.__ActionMenuItem(parent)
return True
if type(parent.parent) is not MenuItemController:
# Recurse until all parents have been checked.
self.CheckParent(parent.parent, actor)
# Return false if no parents selected
return False
def CheckChildren(self, actor):
'''
Checks if any child nodes were selected
'''
# Check only the children of the currently selected node
for child in self.selectedNode.childrenObjects:
if type(child) is MenuItem.MenuItem:
if child.CheckActor(actor) == True:
# Perform action if selected node found
self.__ActionMenuItem(child)
return True
# Return false if no children selected
return False
def GetParentWidth(self, selectedNode):
if type(selectedNode.parent) is MenuItemController:
return selectedNode.GetWidth()
else:
return selectedNode.GetWidth() + self.GetParentWidth(selectedNode.parent)
def GetChildHeight(self, selectedNode):
for item in selectedNode.childrenObjects:
if type(item) is MenuItem.MenuItem:
if item.GetOpen():
return item.GetHeight() + self.GetChildHeight(item)
else:
return 0.0
def GetRootParent(self, selectedNode):
if type(selectedNode.parent) is MenuItemController:
return selectedNode
else:
return self.GetRootParent(selectedNode.parent)
def __ActionMenuItem(self, selectedNode):
'''
Action the menu item.
'''
self.selectedNode = selectedNode
# If there are no children, close the menu (the callback has already been fired in MenuItem if it exists), otherwise expand it
hasMenuChildren = False
for item in selectedNode.childrenObjects:
if type(item) is MenuItem.MenuItem:
hasMenuChildren = True
break
if hasMenuChildren == True:
for item in self.selectedNode.childrenObjects:
if type(item) is MenuItem.MenuItem:
item.CloseMenuItem()
self.selectedNode.OpenMenuItem()
self.CloseUnselectedMenuItems()
self.__SetAllParentPositions(self.selectedNode)
self.__SetCurrentOptionsExpandedPosition(self.selectedNode)
else: # Hit a leaf, close the global menu
self.CloseMenu()
def SetSelectedMenuItem(self, selectedNode):
self.selectedNode = selectedNode
if type(selectedNode.parent) is not MenuItemController:
for item in self.childrenObjects:
if item is not self.selectedNode:
#if type(item) is MenuItem:
item.SetVisible(False)
for item in self.selectedNode.childrenObjects:
if type(item) is MenuItem:
item.CloseMenuItem()
self.selectedNode.OpenMenuItem()
self.CloseUnselectedMenuItems()
self.SetMenuItemPositions(self.selectedNode)
# def SetSelectedMenuItemLocation(self, selctedNodeLocation):
# self.selectedNode.SetSelectedMenuItemLocation()
def __SetCurrentOptionsExpandedPosition(self, selectedNode):
'''
I'm a bitch, I know.
'''
if len(selectedNode.childrenObjects) > 0:
# Calculate the total height
totalHeight = 0.0
for item in selectedNode.childrenObjects:
if type(item) is MenuItem.MenuItem:
totalHeight += item.GetHeight()
# Find the vertical midpoint - the vertical origin for the children
totalHeight = totalHeight / 2
# Set all the children centered around this point
for item in selectedNode.childrenObjects:
if type(item) is MenuItem.MenuItem:
position = [(selectedNode.GetWidth() / 2) + (item.GetWidth() / 2), 0.0, 0.0]
position[0] += 0.0
position[1] = totalHeight - (item.GetHeight() / 2)
position[2] += 0.0
totalHeight -= item.GetHeight()
item.SetSceneObjectPosition(position)
def __SetAllParentPositions(self, selectedNode):
'''
Put the current option at the origin (by moving the root node)
'''
# Special case - current node should be in the center
totalWidth = 0
totalAngle = 0
# Traverse up the tree and calculate the total width of all the collapsed nodes
rootMenuItem = selectedNode
while type(selectedNode) is not MenuItemController:
totalWidth += selectedNode.GetWidth() / 2
validChild = None
for item in selectedNode.childrenObjects:
if type(item) is MenuItem.MenuItem:
validChild = item
break
if validChild is not None:
totalWidth += validChild.GetWidth() / 2
#Make his (her?) vertical offset cero
selectedNode.SetSceneObjectPosition([
(selectedNode.GetWidth() / 2) + (selectedNode.parent.GetWidth() / 2),
0, #Force Y to zero.
0])
# selectedNode.SetSceneObjectOrientation([0, 30, 0])
# totalAngle += 30
selectedNode = selectedNode.parent
# Keep track of the root node!
if type(selectedNode) is not MenuItemController:
rootMenuItem = selectedNode
# Now set the child to the total width.
rootMenuItem.SetSceneObjectPosition([-totalWidth,
0,
0
])
# rootMenuItem.SetSceneObjectOrientation([0, totalAngle, 0])
def CloseUnselectedMenuItems(self):
'''
Close all unselected MenuItems
'''
# Find all unselected MeunItems
for item in self.selectedNode.parent.childrenObjects:
if item is not self.selectedNode:
# Make sure we are closing a MenuItem
if type(item) is MenuItem.MenuItem:
item.SetOpen(False)
item.SetVisible(False)
def UpdateMenuSelect(self, handPosInThreeSpace, handIsSelecting):
'''
Update the menu selection and pick if necessary
'''
# First clear the menu highlighting
self.__ClearHighlighting()
# Get the lighted item
highlightedItem = self.__GetHighlightedMenuItem(handPosInThreeSpace)
# Check whether it is selected.
if(highlightedItem is not None):
if handIsSelecting: #Trigger the menu item
self.__ActionMenuItem(highlightedItem)
else:
# Highlight the node here
highlightedItem.SetHighlightOn()
def __ClearHighlighting(self):
'''
Clear the menu item highlighting
'''
testableItems = []
testableItems = self.__BuildListOfVisibleMenuItems(self.__baseMenuItem, testableItems)
for item in testableItems:
item.SetHighlightOff()
def __GetHighlightedMenuItem(self, handPosInThreeSpace):
'''
Return the visible menu item that is closest to the hand [Gears quick-fix, need to make this more robust]
'''
closestMenuItem = None
distance = 10000000 #A big number
testableItems = []
testableItems = self.__BuildListOfVisibleMenuItems(self.__baseMenuItem, testableItems)
for item in testableItems:
# Use the bounds to quickly calculate the final absolute point
bounds = item.vtkActor.GetBounds()
# Average this to find the 'centroid' position of this menuitem
itemPos = [(bounds[0] + bounds[1])/ 2.0, (bounds[2] + bounds[3])/ 2.0, (bounds[4] + bounds[5])/ 2.0]
# Confirm that these points are absolute, transformed coordinates, not relative [GearsAD]
dist = vtk.vtkMath.Distance2BetweenPoints(itemPos, handPosInThreeSpace)
if dist < distance:
distance = dist
closestMenuItem = item
return closestMenuItem
def __BuildListOfVisibleMenuItems(self, baseMenuItem, visibleMenuItems):
'''
Get a list of all visible menu items
'''
if baseMenuItem.GetVisible() is True:
visibleMenuItems.append(baseMenuItem)
for child in baseMenuItem.childrenObjects:
if type(child) is MenuItem.MenuItem:
visibleMenuItems = self.__BuildListOfVisibleMenuItems(child, visibleMenuItems)
return visibleMenuItems
def __BuildMenu(self):
'''
Build the main menu.
'''
mediaFolder = "../scene/media/menu/"
rootTexs = 'root_node.png'
# Get the textures
rootSelectedTexs = [
'1high.png',
'2high.png',
'3high.png',
'4high.png'
]
rootUnselectedTexs = [
'1.png',
'2.png',
'3.png',
'4.png'
]
self.__pngRootReader = None
self.__vtkRootTexs = None
self.__pngRootUnselectedReaders = []
self.__vtkRootUnselectedTexs = []
self.__pngRootSelectedReaders = []
self.__vtkRootSelectedTexs = []
# Build root textures
pngReaderRoot = vtk.vtkPNGReader()
pngReaderRoot.SetFileName(mediaFolder + rootTexs)
vtkTextureRoot = vtk.vtkTexture()
vtkTextureRoot.SetInputConnection(pngReaderRoot.GetOutputPort())
vtkTextureRoot.InterpolateOn()
vtkTextureRoot.RepeatOff()
vtkTextureRoot.EdgeClampOn()
self.__pngRootReader = pngReaderRoot
self.__vtkRootTexs = vtkTextureRoot
# Build the textures
for i in range(0,len(rootUnselectedTexs)):
pngReader = vtk.vtkPNGReader()
pngReader.SetFileName(mediaFolder + rootUnselectedTexs[i])
vtkTexture = vtk.vtkTexture()
vtkTexture.SetInputConnection(pngReader.GetOutputPort())
vtkTexture.InterpolateOn()
vtkTexture.RepeatOff()
vtkTexture.EdgeClampOn()
self.__pngRootUnselectedReaders.append(pngReader)
self.__vtkRootUnselectedTexs.append(vtkTexture)
for i in range(0,len(rootSelectedTexs)):
pngReader = vtk.vtkPNGReader()
pngReader.SetFileName(mediaFolder + rootSelectedTexs[i])
vtkTexture = vtk.vtkTexture()
vtkTexture.SetInputConnection(pngReader.GetOutputPort())
vtkTexture.InterpolateOn()
vtkTexture.RepeatOff()
vtkTexture.EdgeClampOn()
self.__pngRootSelectedReaders.append(pngReader)
self.__vtkRootSelectedTexs.append(vtkTexture)
leafSelectedTexs = [
'1optionhigh.png',
'2optionhigh.png',
'3optionhigh.png',
'4optionhigh.png'
]
leafUnselectedTexs = [
'1option.png',
'2option.png',
'3option.png',
'4option.png'
]
self.__pngleafUnselectedReaders = []
self.__vtkleafUnselectedTexs = []
self.__pngleafSelectedReaders = []
self.__vtkleafSelectedTexs = []
# Build the textures
for i in range(0,len(leafUnselectedTexs)):
pngReader = vtk.vtkPNGReader()
pngReader.SetFileName(mediaFolder + leafUnselectedTexs[i])
vtkTexture = vtk.vtkTexture()
vtkTexture.SetInputConnection(pngReader.GetOutputPort())
vtkTexture.InterpolateOn()
vtkTexture.RepeatOff()
vtkTexture.EdgeClampOn()
self.__pngleafUnselectedReaders.append(pngReader)
self.__vtkleafUnselectedTexs.append(vtkTexture)
for i in range(0,len(leafSelectedTexs)):
pngReader = vtk.vtkPNGReader()
pngReader.SetFileName(mediaFolder + leafSelectedTexs[i])
vtkTexture = vtk.vtkTexture()
vtkTexture.SetInputConnection(pngReader.GetOutputPort())
vtkTexture.InterpolateOn()
vtkTexture.RepeatOff()
vtkTexture.EdgeClampOn()
self.__pngleafSelectedReaders.append(pngReader)
self.__vtkleafSelectedTexs.append(vtkTexture)
self.__baseMenuItem = None
rootMenuItems = []
self.__baseMenuItem = MenuItem.MenuItem(self.__renderManager, 0.4 * 170/128, 0.4, self, "", self.__vtkRootTexs, self.__vtkRootTexs)
# Now build the menu
for i in range(0, 4):
menuOption = MenuItem.MenuItem(self.__renderManager, 0.2, 0.2, self.__baseMenuItem, "", self.__vtkRootSelectedTexs[i], self.__vtkRootUnselectedTexs[i])
rootMenuItems.append(menuOption)
# Set the roles
availableRoles = self.__roleManager.GetMenuItemsRolesAndCallbacks()
availableBots = self.__botManager.GetMenuItemsBotsAndCallbacks()
# If planner, get the available planners
# If bots, get the list of bots
for item in availableRoles:
menuOption = MenuItem.MenuItem(self.__renderManager, 0.2*5.77, 0.2, rootMenuItems[0], item[0], self.__vtkleafSelectedTexs[0], self.__vtkleafUnselectedTexs[0], item[1], None)
# Set the bots
for item in availableBots:
menuOption = MenuItem.MenuItem(self.__renderManager, 0.2*5.77, 0.2, rootMenuItems[1], item[0], self.__vtkleafSelectedTexs[1], self.__vtkleafUnselectedTexs[1])
# Add exit options
menuOption = MenuItem.MenuItem(self.__renderManager, 0.2*5.77, 0.2, rootMenuItems[3], "Shutdown ARNerve", self.__vtkleafSelectedTexs[1], self.__vtkleafUnselectedTexs[1], self.ExitARNerve, None)
def ExitARNerve(self, object):
'''
Menu item callback for exiting.
'''
exit()
|
GearsAD/semisorted_arnerve
|
arnerve/scene/MenuItemController.py
|
Python
|
mit
| 19,922
|
[
"VTK"
] |
3d956bcf97e8bc9ffd692b651448de466daf7d6968258ba88200ab6c8caa7d02
|
"""
PySCeS - Python Simulator for Cellular Systems (http://pysces.sourceforge.net)
Copyright (C) 2004-2020 B.G. Olivier, J.M. Rohwer, J.-H.S Hofmeyr all rights reserved,
Brett G. Olivier (bgoli@users.sourceforge.net)
Triple-J Group for Molecular Cell Physiology
Stellenbosch University, South Africa.
Permission to use, modify, and distribute this software is given under the
terms of the PySceS (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Brett G. Olivier
"""
from __future__ import division, print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from .version import __version__
__doc__ = '''SBML reading/writing module - now replaced by PySCeS Core2'''
import os, sys
from time import sleep, strftime
from getpass import getuser
try:
input = raw_input # Py2 compatibility
except NameError:
pass
if sys.platform == 'win32':
try:
import libsbml as SBML
except Exception as e:
print('Windows sbml load error', e)
else:
try:
import libsbml as SBML
except Exception as e:
print('Posix sbml load error', e)
class PyscesSBML:
"""The PySCeS interface to libSBML and SBML utilities"""
mode_number_format = '%2.4e'
sbml_level = 2
_debug = 0
SBML = SBML
def SBML_buildBasicModel(
self,
mod,
filename,
slvl=2,
dir=None,
substance=(1, 0),
volume=(1, 0),
time=(1, 0),
arules=None,
notes=None,
):
"""
SBML_buildBasicModel(mod,filename,slvl=2,dir=None)
Create a basic SBML model.
Arguments:
=========
mod: an active PySCeS model object
filename: the output SBML file name
slvl [default=2]: the SBML level that should be used
dir [default=None]: the output directory
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
self.SBML_createModel(mod, filename, slvl, dir)
self.SBML_setCompartment()
self.SBML_setNotes(txt=notes)
self.SBML_setUnits(substance=substance, volume=volume, time=time)
if arules != None:
self.SBML_setAssignmentRules(arules)
self.SBML_setSpecies()
self.SBML_setReactions()
self.SBML_setModel()
def write(
self,
mod,
filename,
slvl=2,
dir=None,
substance=(1, 0),
volume=(1, 0),
time=(1, 0),
arules=None,
notes=None,
):
"""
write(mod,filename,slvl=2,dir=None)
Write a PySCeS model as an SBML file.
Arguments:
=========
mod: an active PySCeS model object
filename: the output SBML file name
slvl [default=2]: the SBML level that should be used
dir [default=None]: the output directory
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
self.SBML_buildBasicModel(
mod, filename, slvl, dir, substance, volume, time, arules, notes
)
self.SBML_writeFile()
def getSBML_document(
self, mod, substance=(1, 0), volume=(1, 0), time=(1, 0), arules=None, notes=None
):
"""
Returns an SBML document object
Arguments:
=========
mod: an active PySCeS model object
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
filename = 'tempXML'
slvl = 2
dir = None
self.SBML_buildBasicModel(
mod, filename, slvl, dir, substance, volume, time, arules, notes
)
return self.sbml_document
def getSBML_string(
self, mod, substance=(1, 0), volume=(1, 0), time=(1, 0), arules=None, notes=None
):
"""
Returns an SBML file as a string
Arguments:
=========
mod: an active PySCeS model object
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
filename = 'tempXML'
slvl = 2
dir = None
self.SBML_buildBasicModel(
mod, filename, slvl, dir, substance, volume, time, arules, notes
)
return self.sbml_document.toSBML()
def __cleanString__(self, s):
s = s.lstrip()
s = s.rstrip()
return s
def parseForcingFunctions(self):
self.__forcing_function_dic__ = {}
ff = self.model_obj._Function_forced.split('\n')
for f in ff:
if f != '':
f = f.split('=')
f[0] = f[0].replace('self.', '')
f[1] = f[1].replace('self.', '')
self.__forcing_function_dic__.setdefault(
self.__cleanString__(f[0]), self.__cleanString__(f[1])
)
def getAssignmentRules(self):
self.parseForcingFunctions()
out = []
for key in list(self.__forcing_function_dic__.keys()):
out.append((key, self.__forcing_function_dic__[key]))
return out
def SBML_createModel(self, mod, filename, slvl=2, dir=None):
"""
SBML_createModel(mod,filename,slvl=2,dir=None)
Set up an SBML document and extract the model NetworkDict
Arguments:
=========
mod: a PySCeS model object
filename: the output filename
slvl [default=2]: SBML level required
dir [default=None]: output directory
"""
if self._debug:
print('SBML_createModel')
self.model_obj = mod
self.__nDict__ = self.model_obj.__nDict__
self.model_filename = filename
if dir == None:
self.model_dir = self.model_obj.ModelOutput
else:
self.model_dir = dir
self.sbml_level = slvl
self.sbml_model = self.SBML.Model()
self.sbml_model.setName(self.model_obj.ModelFile[:-4])
self.sbml_document = self.SBML.SBMLDocument()
# new stuff
self.global_parameters = []
self.model_compartment_name = None
# create initdict
self.__InitStrings__ = [
s.replace('self.', '') for s in self.model_obj._PysMod__InitStrings
]
self.__InitDict__ = {}
for ii in self.__InitStrings__:
l, r = ii.split('=')
self.__InitDict__.setdefault(
self.__cleanString__(l), float(self.__cleanString__(r))
)
# create forcing function dic
try:
self.parseForcingFunctions()
except:
print("No pre-defined forcing functions")
if self.sbml_level == 1:
if sys.platform == 'win32':
print(
'Due to a bug in self.SBML for Windows writing a lvl 1 file will crash your session writing lvl 2 instead ... sorry'
)
self.sbml_document.setLevel(2)
else:
self.sbml_document.setLevel(self.sbml_level)
else:
self.sbml_document.setLevel(2)
def SBML_setCompartment(self, name=None, vol=1):
"""
SBML_setCompartment(name=None,vol=1)
Initialise SBML compartments (note PySCeS currently utilises a single compartment)
Arguments:
=========
name [default=None]: the compartment name, default is compartment1
vol [default=1]: the compartment volume
"""
if self._debug:
print('SBML_setCompartment')
comp_def = self.sbml_model.createCompartment()
if not name:
self.model_compartment_name = 'Cell'
else:
self.model_compartment_name = name
for char in [' ', '.', '-', '*', '?', '!', '\t', '\n']:
self.model_compartment_name = self.model_compartment_name.replace(
char, '_'
)
self.model_compartment_name = name
comp_def.setId(self.model_compartment_name)
comp_def.setVolume(vol)
def SBML_setNotes(self, txt=None):
notes = '<body xmlns="http://www.w3.org/1999/xhtml">'
if txt != None:
notes += '<span style="font-family: Courier New,Courier,monospace;">'
notes += txt
notes += '</span>'
notes += '</body>'
self.sbml_model.setNotes(notes)
def SBML_setUnits(self, **kwargs):
"""
SBML_setUnits(substance=(1,0), volume=(1,0), time=(1,0))
Set the SBML default units note that the input here is the factor and index multiplying
the SBML default, so for example the default substance (1,0) is (1*10**0)*mole So e.g.
if you were specifing default units of millimoles and seconds you would
set substance=(1,-3) and time=(60,0) i.e. (1*10**-3)*mole and (60*10**0)*seconds
Arguments:
=========
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
for un in list(kwargs.keys()):
vdef = self.sbml_model.createUnitDefinition()
vdef.setId(un)
vu = self.sbml_model.createUnit()
if un == 'substance':
vu.setKind(self.SBML.UnitKind_forName('mole'))
elif un == 'volume':
vu.setKind(self.SBML.UnitKind_forName('litre'))
elif un == 'time':
vu.setKind(self.SBML.UnitKind_forName('second'))
vu.setMultiplier(kwargs[un][0])
vu.setScale(kwargs[un][1])
vu.setOffset(0)
def SBML_setSpecies(self):
"""
SBML_setSpecies()
Initialise and add species information to the SBML model
Arguments:
None
"""
if self._debug:
print('SBML_setSpecies')
reagList = self.model_obj.__species__ + self.model_obj.__fixed_species__
for reagent in range(len(reagList)):
s = self.sbml_model.createSpecies()
s.setId(reagList[reagent])
s.setName(reagList[reagent])
s.setCompartment(self.model_compartment_name)
if reagList[reagent] in self.model_obj.__fixed_species__:
s.setBoundaryCondition(True)
s.setConstant(True)
else:
s.setBoundaryCondition(False)
if reagent < len(self.model_obj.__species__):
reagName = reagList[reagent] + '_init'
else:
reagName = reagList[reagent]
if self.sbml_level == 1:
s.setInitialAmount(getattr(self.model_obj, reagName))
else:
s.setInitialConcentration(getattr(self.model_obj, reagName))
def SBML_setAssignmentRules(self, rules=[]):
for rule in rules:
print(rule)
self.global_parameters.append(rule[0])
p = self.sbml_model.createParameter()
p.setId(rule[0])
p.setValue(getattr(self.model_obj, rule[0]))
p.setConstant(False)
r = self.sbml_model.createAssignmentRule()
r.setVariable(rule[0])
r.setFormula(rule[1])
r.setMathFromFormula()
def SBML_setReactions(self):
"""
SBML_setReactions()
Add kinetic rate laws to the SBMl model
Arguments:
None
"""
if self._debug:
print('SBML_setReactions')
# TotSpecies = list(self.model_obj._PysMod__FixedReagents)+list(self.model_obj._PysMod__VarReagents)
reaction_params = []
for rxn in self.model_obj._PysMod__ReactionIDs:
print('Adding reaction:', rxn)
i = self.sbml_model.createReaction()
i.setId(rxn)
ndr = self.model_network_dict[rxn]
for reagent in ndr['Reagents']:
stoich = ndr['Reagents'][reagent]
species = self.SBML.SpeciesReference(
reagent.replace('self.', ''), abs(stoich)
)
if stoich < 0:
i.addReactant(species)
elif stoich > 0:
i.addProduct(species)
elif stoich == 0:
i.addModifier(species)
# add a volume to convert rate equation to kinetic law
kineticLaw = ndr['RateEq'].replace('self.', '')
kineticLaw = kineticLaw.replace('scipy.', '')
if self.model_compartment_name not in self.model_obj.parameters:
kineticLaw = self.model_compartment_name + ' * (' + kineticLaw + ')'
else:
kineticLaw = kineticLaw
kineticLaw = self.SBML.KineticLaw(kineticLaw)
# local parameters retired in favour of globals
## for parameter in ndr['Params']:
## p = parameter.replace('self.','')
## if p not in self.model_obj.__fixed_species__ and p not in self.global_parameters:
## try:
## kineticLaw.addParameter(self.SBML.Parameter(p, getattr(self.model_obj,p)))
## reaction_params.append(p)
## except AttributeError,err :
## print '\n', err
## print "Parameter set error ... are there forcing functions??"
## sleep(0.5)
i.setKineticLaw(kineticLaw)
if ndr['Type'] == 'Rever':
rev = True
else:
rev = False
i.setReversible(rev)
# Add modifiers to reaction - brett 20050607
for reac in self.model_obj.__modifiers__:
if reac[0] == rxn:
for x in reac[1]:
print(' ' + reac[0] + ' has modifier: ' + x)
self.sbml_model.createModifier().setSpecies(x)
# add extra parameter initialised but not in reactions
# we have to do this in case the assignment rules are added after we build the model
hack = list(self.__forcing_function_dic__.keys())
not_xparams = (
self.global_parameters
+ reaction_params
+ list(self.model_obj.species)
+ list(self.model_obj.fixed_species)
+ [self.model_compartment_name]
+ hack
)
for k in list(self.__InitDict__.keys()):
if k not in not_xparams:
print('Adding parameter:', k)
self.global_parameters.append(k)
p = self.sbml_model.createParameter()
p.setId(k)
p.setValue(getattr(self.model_obj, k))
def SBML_setModel(self):
"""
SBML_setModel()
Add the SBML model to the predefined SBML document
Arguments:
None
"""
if self._debug:
print('SBML_setModel')
self.sbml_document.setModel(self.sbml_model)
def SBML_writeFile(self):
"""
SBML_writeFile()
Write the SBML document to predefined output file
Arguments:
None
"""
self.SBML.writeSBML(self.sbml_document, 'pysces_sbml_tmp.xml')
Fin = open('pysces_sbml_tmp.xml', 'r')
Fout = open(os.path.join(self.model_dir, self.model_filename + '.xml'), 'w')
cntr = 0
try:
UseR = getuser()
except:
UseR = ''
for line in Fin:
if cntr == 1:
Fout.write(
'<!-- Created with PySCeS ('
+ __version__
+ ') on '
+ strftime("%a, %d %b %Y %H:%M:%S")
+ ' by '
+ UseR
+ ' -->\n'
+ line
)
else:
Fout.write(line)
cntr += 1
Fout.close()
Fin.close()
os.remove('pysces_sbml_tmp.xml')
def convert2psc(self, filename, dir=None, dirOut=None):
"""
convert2psc(filename,dir=None,dirOut=None)
Convert an SBML file into a PySCeS input file
Arguments:
=========
filename: the SBML source file
dir [default=None]: specify the SBMl file directory
dirOut [default=None]: the PSC file output directory
"""
if dir == None:
dir = os.getcwd()
File = os.path.join(dir, filename)
assert os.path.exists(File), "Invalid path"
self.model_filename = filename
r = self.SBML.SBMLReader()
d = r.readSBML(File)
m = d.getModel()
def getName(i):
if d.getLevel() == 1:
return i.getName()
else:
return i.getId()
reactions = m.getListOfReactions()
ReactionIDs = []
for i in reactions:
ReactionIDs.append(getName(i))
init_fixed = []
init_var = []
init_par = []
parameters = []
for i in m.getListOfSpecies():
parName = getName(i)
# if a species is a BoundaryCondition or constant it becomes fixed - brett 20050111
if i.getBoundaryCondition() or i.getConstant():
if i.getConstant() and not i.getBoundaryCondition():
print(
parName,
' is set as constant, assuming: BoundaryCondition = True',
)
init_fixed.append((parName, i.getInitialConcentration()))
else:
init_var.append((parName, i.getInitialConcentration()))
NetworkDict = dict(
[
(i, dict.fromkeys(['Params', 'RateEq', 'Reagents', 'Type']))
for i in ReactionIDs
]
)
for i in reactions:
rDict = NetworkDict[getName(i)]
j = i.getKineticLaw()
par = []
try:
for k in j.getListOfParameters():
par.append(getName(k))
init_par.append((getName(k), k.getValue()))
parameters.append(getName(k))
rDict['Params'] = par
rDict['RateEq'] = j.getFormula()
if d.getLevel() == 1:
rDict['RateEq'] = rDict['RateEq'].replace(' ', '')
rDict['RateEq'] = rDict['RateEq'].replace('^', '**')
except Exception as err:
rDict['Params'] = []
rDict['RateEq'] = ''
print(err)
Substrates = []
Products = []
for k in i.getListOfReactants():
species = k.getSpecies()
stoich = -k.getStoichiometry()
Substrates.append((species, stoich))
for k in i.getListOfProducts():
species = k.getSpecies()
stoich = k.getStoichiometry()
Products.append((species, stoich))
# this is to eliminate zero stoichiometries {0}xyz
badList = []
for sub in Substrates:
if sub[1] == 0:
badList.append(sub)
for bad in badList:
Substrates.pop(Substrates.index(bad))
badList = []
for prod in Products:
if prod[1] == 0:
badList.append(prod)
for bad in badList:
Products.pop(Products.index(bad))
# add source/sink pools to nasty substrate/productless reactions - brett 20050908
if len(Substrates) == 0:
Substrates.append(('$pool', -1.0))
if len(Products) == 0:
Products.append(('$pool', 1.0))
# print Substrates
# print Products
rDict['Reagents'] = dict(Substrates + Products)
if i.getReversible() == True:
t = 'Rever'
else:
t = 'Irrev'
rDict['Type'] = t
NetworkDict[getName(i)].update(rDict)
# Add extra model parameters not defined in reactions (apparently)
if len(m.getListOfParameters()) > 0:
for x in m.getListOfParameters():
if getName(x) not in parameters:
# print getName(x)
init_par.append((getName(x), x.getValue()))
if dirOut == None:
self.model_filename = os.path.join(os.getcwd(), self.model_filename)
else:
self.model_filename = os.path.join(dirOut, self.model_filename)
# print 'init_par'
# print init_par
# print 'init_var'
# print init_var
# print 'init_fixed'
# print init_fixed
# sometimes things just work lekker (replaced all the old showS^&t) - brett 20050913
outFile = open(self.model_filename + '.psc', 'w')
self.PSC_writeHeader(outFile)
self.PSC_writeFixedSpeciesList(outFile, init_fixed)
self.PSC_writeRateEquations(outFile, NetworkDict, number_format='%2.3f')
self.PSC_writeSpecies(outFile, init_var)
self.PSC_writeFixedSpecies(outFile, init_fixed)
self.PSC_writeParameters(outFile, init_par)
outFile.close()
# Initialise compartment volumes as a parameter - brett 20050908
compartmentList = []
for comp in m.getListOfCompartments():
# print comp
compartmentList.append((getName(comp), comp.getVolume()))
if len(compartmentList) > 1:
print('\nINFO: PySCeS models are assumed to have a single compartment')
if len(compartmentList) > 0:
F = open(self.model_filename + '.psc', 'a')
F.write('\n## Initialise compartment volumes')
for comp in compartmentList:
F.write('\n' + comp[0] + ' = ' + str(comp[1]))
## parameters.append(x[0])
## init_par.append(x)
F.write('\n')
F.close()
# Add assignment rules as forcing functions - brett 20050908
pscRules = []
for rule in m.getListOfRules():
pscRules.append((rule.getVariable(), rule.getFormula()))
if len(pscRules) > 0:
F = open(self.model_filename + '.psc', 'a')
F.write('\n## Assignment rules translated to forcing functions\n')
for rule in pscRules:
rule0 = 'self.' + rule[0]
rule1l = rule[1].split()
for word in range(len(rule1l)):
if rule1l[word].isalnum():
if rule1l[word] not in [
'1',
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
]:
rule1l[word] = 'self.' + rule1l[word]
F.write('!F ' + rule0 + ' = ')
for word in rule1l:
F.write(word + ' ')
F.write('\n')
F.write('\n')
F.close()
if len(m.getNotes()) > 0:
F = open(self.model_filename + '.psc', 'a')
F.write('\n## Model notes' + m.getNotes().replace('\n', '\n# ') + '\n\n')
F.close()
def PSC_writeHeader(self, File):
"""
PSC_writeHeader(File)
Write a PSC file header to an open file object
Arguments:
=========
File: a writable open text file object
"""
try:
UseR = getuser()
except:
UseR = ''
header = ''
# header += '############################################################\n'
header += '# PySCeS (' + __version__ + ') model input file\n'
header += '# PySCeS can be found at http://pysces.sourceforge.net/\n'
# header += '###########################################################\n\n'
header += '# Original input file: ' + File.name.split('\\')[-1][:-4] + '\n'
header += (
'# This file generated: '
+ strftime("%a, %d %b %Y %H:%M:%S")
+ ' by '
+ UseR
+ '\n\n'
)
File.write(header)
File.write('\n')
def PSC_writeSpecies(self, File, species):
"""
PSC_writeSpecies(File,species)
Write out model species initiaiisations to file
Arguments:
=========
File: a writable open file object
species: a list of (species.value) pairs
"""
out_list = []
out_list.append('\n## Variable species initial values\n')
for x in range(len(species)):
out_list.append(
species[x][0] + ' = ' + self.mode_number_format % species[x][1] + '\n'
)
for x in out_list:
File.write(x)
File.write('\n')
def PSC_writeFixedSpeciesList(self, File, fixed_species):
"""
PSC_writeFixedSpeciesList(File,fixed_species)
Write fixed species declaration to a PSC file
Arguments:
=========
File: open, writable file object
fixed_species: a list of (species,value) pairs
"""
File.write('## Fixed species\n')
if len(fixed_species) == 0:
File.write('# <none>')
else:
File.write('FIX: ')
for x in fixed_species:
File.write(x[0] + ' ')
File.write('\n\n')
def PSC_writeFixedSpecies(self, File, fixed_species):
"""
PSC_writeFixedSpecies(File,fixed_species)
Write fixed species initialisations to a PSC file
Arguments:
=========
File: open, writable file object
fixed_species: a list of (species,value) pairs
"""
out_list = []
out_list.append('\n## Fixed species\n')
for x in range(len(fixed_species)):
out_list.append(
fixed_species[x][0]
+ ' = '
+ self.mode_number_format % fixed_species[x][1]
+ '\n'
)
for x in out_list:
File.write(x)
File.write('\n')
def PSC_writeParameters(self, File, parameters):
"""
PSC_writeParameters(File,parameters)
Write mode parameter initialisations to a PSC file
Arguments:
=========
File: open, writable file object
parameters: a list of (parameter,value) pairs
"""
out_list = []
out_list.append('\n## Parameters\n')
for x in range(len(parameters)):
out_list.append(
parameters[x][0]
+ ' = '
+ self.mode_number_format % parameters[x][1]
+ '\n'
)
for x in out_list:
File.write(x)
File.write('\n')
def PSC_writeRateEquations(self, File, NetworkDict, number_format='%2.3f'):
"""
PSC_writeRateEquations(File,NetworkDict,number_format='%2.3f')
Write model rate equations to a PSC file
Arguments:
=========
File: open, writable file object
NetworkDict: a PySCeS network dictionary
number_format [default='%2.3f']: number formatting to use in rate laws
"""
out_list = []
out_list.append('\n## Reaction stoichiometry and rate equations\n')
for key in NetworkDict:
out_list.append(key + ':\n')
reagL = []
reagR = []
for reagent in NetworkDict[key]['Reagents']:
if NetworkDict[key]['Reagents'][reagent] > 0:
if NetworkDict[key]['Reagents'][reagent] == 1.0:
reagR.append(reagent.replace('self.', ''))
else:
reagR.append(
'{'
+ number_format % abs(NetworkDict[key]['Reagents'][reagent])
+ '}'
+ reagent.replace('self.', '')
)
elif NetworkDict[key]['Reagents'][reagent] < 0:
if NetworkDict[key]['Reagents'][reagent] == -1.0:
reagL.append(reagent.replace('self.', ''))
else:
reagL.append(
'{'
+ number_format % abs(NetworkDict[key]['Reagents'][reagent])
+ '}'
+ reagent.replace('self.', '')
)
elif NetworkDict[key]['Reagents'][reagent] == 0:
# reagL.append(reagent.replace('self.',''))
print(NetworkDict[key]['Reagents'])
input('WTF: please contact developers')
if len(reagL) == 0:
print('Zero pool substrate', File.name)
reagL.append('$pool')
if len(reagR) == 0:
print('Zero pool product', File.name)
reagR.append('$pool')
substring = ''
count = 0
for x in reagL:
if count != 0:
substring += ' + '
substring += x.replace(' ', '')
count += 1
prodstring = ''
count = 0
for x in reagR:
if count != 0:
prodstring += ' + '
prodstring += x.replace(' ', '')
count += 1
if NetworkDict[key]['Type'] == 'Rever':
symbol = ' = '
else:
symbol = ' > '
out_list.append('\t' + substring + symbol + prodstring + '\n')
out_list.append(
'\t' + NetworkDict[key]['RateEq'].replace('self.', '') + '\n\n'
)
for x in out_list:
File.write(x)
|
bgoli/pysces
|
pysces/PyscesSBML.py
|
Python
|
bsd-3-clause
| 30,871
|
[
"PySCeS"
] |
87dbe47d17fa3aecbb3644da921c943700d6bd45631c0678168c99d1342ca01b
|
__RCSID__ = "$Id$"
# FIXME: if it requires a dirac.cfg it is not a unit test and should be moved to TestDIRAC
from DIRAC.Core.Base.Script import parseCommandLine
parseCommandLine()
import unittest
from DIRAC.Resources.Storage.StorageFactory import StorageFactory
class StorageFactoryTestCase( unittest.TestCase ):
""" Base class for the StorageFactory test cases
"""
"""
def test_getStorageName(self):
factory = StorageFactory()
initialName = 'RAWFileDestination'
res = factory.getStorageName(initialName)
self.assert_(res['OK'])
self.assertEqual(res['Value'],'CERN-tape')
"""
def test_getStorage( self ):
storageDict = {}
storageDict['StorageName'] = 'IN2P3-disk'
storageDict['PluginName'] = 'SRM2'
storageDict['Protocol'] = 'srm'
storageDict['Host'] = 'ccsrmtestv2.in2p3.fr'
storageDict['Port'] = '8443'
storageDict['WSUrl'] = '/srm/managerv2?SFN='
storageDict['Path'] = '/pnfs/in2p3.fr/data'
storageDict['SpaceToken'] = 'LHCb_FAKE'
factory = StorageFactory( vo = 'lhcb' )
res = factory.getStorage( storageDict )
self.assert_( res['OK'] )
storageStub = res['Value']
parameters = storageStub.getParameters()
self.assertEqual( parameters, storageDict )
res = storageStub.getTransportURL( '/lhcb/user' )
self.assert_( res['OK'] )
self.assertEqual( res['Value']['Successful']['/lhcb/user'], 'srm://ccsrmtestv2.in2p3.fr:8443/srm/managerv2?SFN=/pnfs/in2p3.fr/data/lhcb/user' )
def test_getStorages( self ):
factory = StorageFactory( vo = 'lhcb' )
storageName = 'IN2P3-disk'
protocolList = ['SRM2']
res = factory.getStorages( storageName, protocolList )
self.assert_( res['OK'] )
storageStubs = res['Value']['StorageObjects']
storageStub = storageStubs[0]
storageDict = {}
storageDict['StorageName'] = 'IN2P3-disk'
storageDict['PluginName'] = 'SRM2'
storageDict['Protocol'] = 'srm'
storageDict['Host'] = 'ccsrm02.in2p3.fr'
storageDict['Port'] = '8443'
storageDict['WSUrl'] = '/srm/managerv2?SFN='
storageDict['Path'] = '/pnfs/in2p3.fr/data/lhcb'
storageDict['SpaceToken'] = ''
parameterDict = storageStub.getParameters()
self.assertEqual( parameterDict, storageDict )
res = storageStub.getTransportURL( '/lhcb/production/DC06/test.file' )
self.assert_( res['OK'] )
self.assertEqual( res['Value'], 'srm://ccsrm02.in2p3.fr:8443/srm/managerv2?SFN=/pnfs/in2p3.fr/data/lhcb/production/DC06/test.file' )
res = storageStub.removeFile( 'srm://ccsrm02.in2p3.fr:8443/srm/managerv2?SFN=/pnfs/in2p3.fr/data/lhcb/production/DC06/test.file' )
listOfDirs = ['srm://ccsrm02.in2p3.fr:8443/srm/managerv2?SFN=/pnfs/in2p3.fr/data/lhcb/production/DC06/v1-lumi2/00001368/DIGI']
res = storageStub.listDirectory( listOfDirs )
#directoryPath = 'srm://ccsrmtestv2.in2p3.fr:8443/srm/managerv2?SFN=/pnfs/in2p3.fr/data/lhcb/production/DC06/v1-lumi2/1368'
#res = storageStub.removeDir(directoryPath)
destFile = 'srm://ccsrmtestv2.in2p3.fr:8443/srm/managerv2?SFN=/pnfs/in2p3.fr/data/lhcb/production/DC06/v1-lumi2/1368/dirac_directory.7'
res = storageStub.putFile( destFile )
print res
res = storageStub.getFile( destFile )
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase( StorageFactoryTestCase )
#suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(CreateFTSReqCase))
testResult = unittest.TextTestRunner( verbosity = 2 ).run( suite )
|
coberger/DIRAC
|
Resources/Storage/test/TestStorageFactory.py
|
Python
|
gpl-3.0
| 3,495
|
[
"DIRAC"
] |
9d9567d36c058dfd502adf92de8b1e847a34dd89f49ee66fd4636041c52f9921
|
from threading import Thread
from uiautomator import device as d
from ctx.generators.util import Adb
class Actuator:
def execute_in_background(self, recipe):
thread = Thread(target=self.execute, args=(recipe,))
thread.start()
def execute(self, recipe):
for action in recipe['action']:
if action['id'] == 'Facebook':
self.do_facebook(action, recipe['variables'])
elif action['id'] == 'Calendar':
self.do_agenda(action, recipe['variables'])
elif action['id'] == 'Email':
self.do_email(action, recipe['variables'])
def do_email(self, action, variables):
d.screen.on()
d.press.home()
Adb.start("com.google.android.gm/com.google.android.gm.ComposeActivityGmail")
d(resourceId="com.google.android.gm:id/to").click()
Adb.write(action['value'])
d.press.enter()
d(resourceId="com.google.android.gm:id/subject").click()
Adb.write(self.get_var("$titulo", variables))
d(resourceId="com.google.android.gm:id/body").click()
Adb.write(self.get_var("$mensagem", variables))
d(resourceId="com.google.android.gm:id/send").click()
def do_agenda(self, action, variables):
pass
def do_facebook(self, action, variables):
contact = action["value"]
d.screen.on()
d.press.home()
Adb.start("com.facebook.orca/.auth.StartScreenActivity")
d(resourceId="com.facebook.orca:id/action_search").click()
Adb.write(contact)
d(className="android.widget.ListView").child(index=1).click()
d(resourceId="com.facebook.orca:id/edit_text").click()
Adb.write(self.get_var("$mensagem", variables))
d(description="Enviar").click()
@staticmethod
def get_var(var_name, variables):
return next(filter(lambda var: var['name'] == var_name, variables))['value']
|
fmca/ctxpy
|
samples/ifctt/action.py
|
Python
|
mit
| 1,939
|
[
"ORCA"
] |
8eae861efe73c048cd93456d276bf09311d186e701cde95946f3235fe63ecf2a
|
"""
Creates postage stamp, sigma-map, and psf image for each selected galaxy.
Also creates catalog of guess parameters for each stamp, to be used in AIM.
Input:
-Sextractor catalog name
-Cluster FITS image name (argv)
-psf name
-sextractor output file names
-Pixel scale
-Pixel size cutoff
Output:
-Postage stamp for each size-selected image
-Postage stamp for corresponding psf
-Postage stamp for image weight
-Catalog of initial parameters for AIM
"""
import numpy as np
import pyfits
#import matplotlib.pyplot as plt
import sys
import os
pixel_scale = 0.2
#scale_pix_cutoff = 4. #Minimum number of pixels required for source semi-major axis
hlr_cutoff = 3.75 #Okura uses 1.25 psf
FWHM_cutoff = 5 #Okura uses 1.12 psf
psf_name ='psf1.fits'
bg_rms_name ='bg_rms.fits'
bg_sub_name ='bg_minus.fits'
ob_sub_name ='object_minus.fits'
segment_name ='segment.fits'
catalog = 'test.cat'
filename = sys.argv[1]
bg_RMS = 'bg_rms.fits'
obsImg = pyfits.open(filename)[0].data
#Autoclobber
folder = 'Stamps/'
for the_file in os.listdir(folder):
file_path = os.path.join(folder, the_file)
try:
if (os.path.isfile(file_path) and (('.npy' in file_path))):
os.unlink(file_path)
except Exception, e:
print e
xmin_list = []
xmax_list = []
ymin_list = []
ymax_list = []
guess_flux = []
guess_q = []
guess_phi = []
guess_re = []
x_list = []
y_list = []
def cutPSF(psf,N):
psfN = psf.shape[0]
cut = psfN - N
#Condition for psf stamp size > galaxy stamp size
if cut > 0:
#Crop psf stamp
cutHalf = int(cut/2.)
endCut = int(cutHalf+N)
psfNew = psf[cutHalf:endCut,cutHalf:endCut]
#Condition for galaxy stamp size > psf stamp size
elif cut == 0:
psfNew = psf
else:
#Pad psf stamp
addHalf = int(-cut/2.)
endAdd = int(addHalf+psfN)
#print addHalf,endAdd
psfNew = np.zeros((N,N))
psfNew[addHalf:endAdd,addHalf:endAdd] = psf
#Scale psf to have total flux = 1
flux = np.sum(psfNew)
psfNew = psfNew/float(flux)
return psfNew
selected_args = []
#Find postage stamp coordinates of objects
with open(catalog) as f:
pos=0
for line in f:
if line.startswith('#'): #Find indices of important parameters
split = line.split()
if split[2] == 'XMIN_IMAGE':
xminInd = int(split[1])
if split[2] == 'XMAX_IMAGE':
xmaxInd = int(split[1])
if split[2] == 'YMIN_IMAGE':
yminInd = int(split[1])
if split[2] == 'YMAX_IMAGE':
ymaxInd = int(split[1])
if split[2] == 'ELONGATION':
qInd = int(split[1])
if split[2] == 'A_IMAGE':
reInd = int(split[1])
if split[2] == 'THETA_IMAGE':
phiInd = int(split[1])
if split[2] == 'FLUX_AUTO':
fluxInd = int(split[1])
if split[2] == 'X_IMAGE':
x0Ind = int(split[1])
if split[2] == 'Y_IMAGE':
y0Ind = int(split[1])
if split[2] == 'FLUX_RADIUS':
hlrInd = int(split[1])
if split[2] == 'FWHM_IMAGE':
FWHMInd = int(split[1])
continue
columns = line.split()
#Calculate if image stamp would go outside field border
x0 = float(columns[x0Ind-1])
y0 = float(columns[y0Ind-1])
re = float(columns[reInd-1])
hlr = float(columns[hlrInd-1])
FWHM = float(columns[FWHMInd-1])
#Scale the size of the postage stamp
scalar = 4.5
ymin = np.floor(y0 - scalar*re)
ymax = np.ceil(y0 + scalar*re)
xmin = np.floor(x0 - scalar*re)
xmax = np.ceil(x0 + scalar*re)
if (xmax-xmin) > (ymax-ymin):
ymax += 1
elif (ymax-ymin) > (xmax-xmin):
xmax += 1
#Create list of indices of selected galaxies
if ((hlr > hlr_cutoff) and (FWHM > FWHM_cutoff)):
#Skip stamps which would lie outside image borders
if ((ymin<0) or (xmin< 0) or (ymax>obsImg.shape[0]) or (xmax>obsImg.shape[1])):
pass
else:
selected_args.append(pos)
pos+=1
#Find guess parameters
guess_flux.append(float(columns[fluxInd-1]))
guess_q.append(float(columns[qInd-1]))
guess_phi.append((float(columns[phiInd-1])*np.pi/180.)%(2*np.pi))
guess_re.append(re)
x_list.append(x0)
y_list.append(y0)
#Returns min, max slice pixels (square)
xmin_list.append(xmin)
xmax_list.append(xmax)
ymin_list.append(ymin)
ymax_list.append(ymax)
print "Number of satisfying galaxies: ",len(selected_args)
#print selected_args
#Open files to create stamps
bgrms = pyfits.open(bg_RMS)[0].data
bgSub = pyfits.open('bg_minus.fits')[0].data
objSub = pyfits.open('object_minus.fits')[0].data
psf = pyfits.open('psf1.fits')[0].data
segImg = pyfits.open('segment.fits')[0].data
#Array to hold inital parameters for AIM
sersic_initials = np.zeros((len(selected_args),19))
disk_bulge_initials = np.zeros((len(selected_args),21))
combined_sersic_initials = np.zeros((len(selected_args),22))
#bad_args=np.array([])
#Create postage stamps for each selected galaxy
k = 0
for i in selected_args:
#Crop appropriate images
cropped = obsImg[ymin_list[i]:ymax_list[i],xmin_list[i]:xmax_list[i]]
croppedSeg = np.copy(segImg[ymin_list[i]:ymax_list[i],xmin_list[i]:xmax_list[i]])
cropped_bgSub = bgSub[ymin_list[i]:ymax_list[i],xmin_list[i]:xmax_list[i]]
cropped_objSub = objSub[ymin_list[i]:ymax_list[i],xmin_list[i]:xmax_list[i]]
bgrms_cropped = bgrms[ymin_list[i]:ymax_list[i],xmin_list[i]:xmax_list[i]]
#Identify other galaxies
otherobj = ((croppedSeg != i+1) & (croppedSeg != 0))
#Identify only target pixels
croppedSeg[(croppedSeg == i+1)] = 1.e4
exclMsk = (croppedSeg != 1.e4) #Mask for pixels not part of targets
croppedSeg[exclMsk] = 0.0 #Set other pixels to 0.0
croppedSeg[~exclMsk] = 1.0 #Target pixels form window function
#Apply object window function to background subtracted image.
#Add the background subtracted, object subtracted image to get correct
#object surroundings
final = croppedSeg*cropped_bgSub + cropped_objSub
rms_total = np.sqrt(bgrms_cropped**2 + final)
rms_total[otherobj] *= 3
#center = final.shape[0]/2
#a,b = np.meshgrid(*map(np.arange,final.shape),indexing='ij')
fit_weight = 3*(np.abs(final)/np.sum(np.abs(final)))**3
rms_total = bgrms * fit_weight + bgrms
Npsf = cutPSF(psf,final.shape[0])
#print "i, final.shape[0]: ",i,final.shape[0]
#Appending data table
N = cropped.shape[0]
xmax = N*pixel_scale/2.0
#Sersic
sersic_initials[k,0] = xmax
sersic_initials[k,1] = N
sersic_initials[k,2] = 1.0
sersic_initials[k,3] = guess_flux[i]
sersic_initials[k,4] = guess_q[i]
sersic_initials[k,5] = guess_phi[i]
sersic_initials[k,6] = guess_re[i]
sersic_initials[k,7] = 0.0 #kap
sersic_initials[k,8] = 0.0 #gamma1,2
sersic_initials[k,9] = 0.0
sersic_initials[k,10] = 0.0 #f1,f2
sersic_initials[k,11] = 0.0
sersic_initials[k,12] = 0.0 #g1,g2
sersic_initials[k,13] = 0.0
sersic_initials[k,14] = 0.0 #x0
sersic_initials[k,15] = 0.0 #y0
sersic_initials[k,16] = x_list[i] #image x-coordinate
sersic_initials[k,17] = y_list[i] #image y-coordinate
sersic_initials[k,18] = i #Catalog index
#Disk + bulge model
disk_bulge_initials[k,0] = xmax
disk_bulge_initials[k,1] = N
disk_bulge_initials[k,2] = guess_flux[i]/2. #Set initial flux in each to be half of
disk_bulge_initials[k,3] = guess_flux[i]/2. #total flux
disk_bulge_initials[k,4] = guess_q[i]
disk_bulge_initials[k,5] = guess_phi[i]
disk_bulge_initials[k,6] = guess_re[i]*1.5 #Disk will have a shallower, bigger hlr
disk_bulge_initials[k,7] = guess_re[i]/2. #Bulge will have a much steeper, smaller hlr
disk_bulge_initials[k,8] = 3.0 #n_b
disk_bulge_initials[k,9] = 0.0 #kap
disk_bulge_initials[k,10] = 0.0 #gamma1
disk_bulge_initials[k,11] = 0.0
disk_bulge_initials[k,12] = 0.0 #f1
disk_bulge_initials[k,13] = 0.0
disk_bulge_initials[k,14] = 0.0 #g1
disk_bulge_initials[k,15] = 0.0
disk_bulge_initials[k,16] = 0.0 #x0
disk_bulge_initials[k,17] = 0.0 #y0
disk_bulge_initials[k,18] = x_list[i] #image x-coordinate
disk_bulge_initials[k,19] = y_list[i] #image y-coordinate
disk_bulge_initials[k,20] = i #Catalog index
#Overlapping sersics model
#xmax,N,n1,flux1,re1,n2,flux2,re2,q,phi,kap,gamma1,gamma2,f1,f2,g1,g2,cx,cy
combined_sersic_initials[k,0] = xmax
combined_sersic_initials[k,1] = N
combined_sersic_initials[k,2] = 1.0 #Initial n1
combined_sersic_initials[k,3] = guess_flux[i]/2. #Initial flux1
combined_sersic_initials[k,4] = guess_re[i]*1.5 #Initial re1
combined_sersic_initials[k,5] = 4.0 #Initial n2
combined_sersic_initials[k,6] = guess_flux[i]/2.
combined_sersic_initials[k,7] = guess_re[i]/2.
combined_sersic_initials[k,8] = guess_q[i]
combined_sersic_initials[k,9] = guess_phi[i]
combined_sersic_initials[k,10] = 0.0 #kap
combined_sersic_initials[k,11] = 0.0 #gamma1
combined_sersic_initials[k,12] = 0.0
combined_sersic_initials[k,13] = 0.0 #f1
combined_sersic_initials[k,14] = 0.0
combined_sersic_initials[k,15] = 0.0 #g1
combined_sersic_initials[k,16] = 0.0
combined_sersic_initials[k,17] = 0.0 #x0
combined_sersic_initials[k,18] = 0.0
combined_sersic_initials[k,19] = x_list[i] #image x-coord
combined_sersic_initials[k,20] = y_list[i]
combined_sersic_initials[k,21] = i #index
#Saving stamps
savename = 'Stamps/'+str(i)+"_img"
errSavename = 'Stamps/'+str(i)+"_err"
psfName = 'Stamps/'+str(i)+"_psf"
np.save(savename,final)
np.save(errSavename,rms_total)
np.save(psfName,Npsf)
#thdu = pyfits.PrimaryHDU(final)
#thdu.writeto(savename +".fits")
#thdu = pyfits.PrimaryHDU(Npsf)
#thdu.writeto(psfName +".fits")
k+=1
'''sersic_initials = np.delete(sersic_initials,bad_args,axis=0)
disk_bulge_initials = np.delete(disk_bulge_initials,bad_args,axis=0)
combined_sersic_initials = np.delete(combined_sersic_initials,bad_args,axis=0)'''
#print "Number of adjacent galaxies: ",bad_args.size
#print "Final number of good galaxies: ",sersic_initials.shape[0]
np.save('Sersic_initials',sersic_initials)
np.save('Disk_bulge_initials',disk_bulge_initials)
np.save('Combined_sersic_initials',combined_sersic_initials)
|
Jmt354/Cluster-Analysis
|
run.py
|
Python
|
gpl-3.0
| 9,809
|
[
"Galaxy"
] |
61c0bde2f010350b20ac8314c91472dec670e56897369963f2077c0e396073bd
|
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Implements ProcessPoolExecutor.
The follow diagram and text describe the data-flow through the system:
|======================= In-process =====================|== Out-of-process ==|
+----------+ +----------+ +--------+ +-----------+ +---------+
| | => | Work Ids | => | | => | Call Q | => | |
| | +----------+ | | +-----------+ | |
| | | ... | | | | ... | | |
| | | 6 | | | | 5, call() | | |
| | | 7 | | | | ... | | |
| Process | | ... | | Local | +-----------+ | Process |
| Pool | +----------+ | Worker | | #1..n |
| Executor | | Thread | | |
| | +----------- + | | +-----------+ | |
| | <=> | Work Items | <=> | | <= | Result Q | <= | |
| | +------------+ | | +-----------+ | |
| | | 6: call() | | | | ... | | |
| | | future | | | | 4, result | | |
| | | ... | | | | 3, except | | |
+----------+ +------------+ +--------+ +-----------+ +---------+
Executor.submit() called:
- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
- adds the id of the _WorkItem to the "Work Ids" queue
Local worker thread:
- reads work ids from the "Work Ids" queue and looks up the corresponding
WorkItem from the "Work Items" dict: if the work item has been cancelled then
it is simply removed from the dict, otherwise it is repackaged as a
_CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
- reads _ResultItems from "Result Q", updates the future stored in the
"Work Items" dict and deletes the dict entry
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
_ResultItems in "Result Q"
"""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
from concurrent.futures.multithreaded import Future
from concurrent.futures.old.executor import Executor
import atexit
import os
import queue
from queue import Full
import multiprocessing
from multiprocessing import SimpleQueue
from multiprocessing.connection import wait
import threading
import weakref
# Workers are created as daemon threads and processes. This is done to allow the
# interpreter to exit when there are still idle processes in a
# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
# allowing workers to die with the interpreter has two undesirable properties:
# - The workers would still be running during interpretor shutdown,
# meaning that they would fail in unpredictable ways.
# - The workers could be killed while evaluating a work item, which could
# be bad if the callable being evaluated has external side-effects e.g.
# writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads/processes finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
def _python_exit():
global _shutdown
_shutdown = True
items = list(_threads_queues.items())
for t, q in items:
q.put(None)
for t, q in items:
t.join()
# Controls how many more calls than processes will be queued in the call queue.
# A smaller number will mean that processes spend more time idle waiting for
# work while a larger number will make Future.cancel() succeed less frequently
# (Futures in the call queue cannot be cancelled).
EXTRA_QUEUED_CALLS = 1
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
class _ResultItem(object):
def __init__(self, work_id, exception=None, result=None):
self.work_id = work_id
self.exception = exception
self.result = result
class _CallItem(object):
def __init__(self, work_id, fn, args, kwargs):
self.work_id = work_id
self.fn = fn
self.args = args
self.kwargs = kwargs
def _process_worker(call_queue, result_queue):
"""Evaluates calls from call_queue and places the results in result_queue.
This worker is run in a separate process.
Args:
call_queue: A multiprocessing.Queue of _CallItems that will be read and
evaluated by the worker.
result_queue: A multiprocessing.Queue of _ResultItems that will written
to by the worker.
shutdown: A multiprocessing.Event that will be set as a signal to the
worker that it should exit when call_queue is empty.
"""
while True:
call_item = call_queue.get(block=True)
if call_item is None:
# Wake up queue management thread
result_queue.put(os.getpid())
return
try:
r = call_item.fn(*call_item.args, **call_item.kwargs)
except BaseException as e:
result_queue.put(_ResultItem(call_item.work_id,
exception=e))
else:
result_queue.put(_ResultItem(call_item.work_id,
result=r))
def _add_call_item_to_queue(pending_work_items,
work_ids,
call_queue):
"""Fills call_queue with _WorkItems from pending_work_items.
This function never blocks.
Args:
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
are consumed and the corresponding _WorkItems from
pending_work_items are transformed into _CallItems and put in
call_queue.
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems.
"""
while True:
if call_queue.full():
return
try:
work_id = work_ids.get(block=False)
except queue.Empty:
return
else:
work_item = pending_work_items[work_id]
if work_item.future.set_running_or_notify_cancel():
call_queue.put(_CallItem(work_id,
work_item.fn,
work_item.args,
work_item.kwargs),
block=True)
else:
del pending_work_items[work_id]
continue
def _queue_management_worker(executor_reference,
processes,
pending_work_items,
work_ids_queue,
call_queue,
result_queue):
"""Manages the communication between this process and the worker processes.
This function is run in a local thread.
Args:
executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
this thread. Used to determine if the ProcessPoolExecutor has been
garbage collected and that this function can exit.
process: A list of the multiprocessing.Process instances used as
workers.
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems for processing by the process workers.
result_queue: A multiprocessing.Queue of _ResultItems generated by the
process workers.
"""
executor = None
def shutting_down():
return _shutdown or executor is None or executor._shutdown_thread
def shutdown_worker():
# This is an upper bound
nb_children_alive = sum(p.is_alive() for p in processes.values())
for i in range(0, nb_children_alive):
call_queue.put_nowait(None)
# Release the queue's resources as soon as possible.
call_queue.close()
# If .join() is not called on the created processes then
# some multiprocessing.Queue methods may deadlock on Mac OS X.
for p in processes.values():
p.join()
reader = result_queue._reader
while True:
_add_call_item_to_queue(pending_work_items,
work_ids_queue,
call_queue)
sentinels = [p.sentinel for p in processes.values()]
assert sentinels
ready = wait([reader] + sentinels)
if reader in ready:
result_item = reader.recv()
else:
# Mark the process pool broken so that submits fail right now.
executor = executor_reference()
if executor is not None:
executor._broken = True
executor._shutdown_thread = True
executor = None
# All futures in flight must be marked failed
for work_id, work_item in pending_work_items.items():
work_item.future.set_exception(
BrokenProcessPool(
"A process in the process pool was "
"terminated abruptly while the future was "
"running or pending."
))
# Delete references to object. See issue16284
del work_item
pending_work_items.clear()
# Terminate remaining workers forcibly: the queues or their
# locks may be in a dirty state and block forever.
for p in processes.values():
p.terminate()
shutdown_worker()
return
if isinstance(result_item, int):
# Clean shutdown of a worker using its PID
# (avoids marking the executor broken)
assert shutting_down()
p = processes.pop(result_item)
p.join()
if not processes:
shutdown_worker()
return
elif result_item is not None:
work_item = pending_work_items.pop(result_item.work_id, None)
# work_item can be None if another process terminated (see above)
if work_item is not None:
if result_item.exception:
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
# Delete references to object. See issue16284
del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
if shutting_down():
try:
# Since no new work items can be added, it is safe to shutdown
# this thread if there are no pending work items.
if not pending_work_items:
shutdown_worker()
return
except Full:
# This is not a problem: we will eventually be woken up (in
# result_queue.get()) and be able to send a sentinel again.
pass
executor = None
_system_limits_checked = False
_system_limited = None
def _check_system_limits():
global _system_limits_checked, _system_limited
if _system_limits_checked:
if _system_limited:
raise NotImplementedError(_system_limited)
_system_limits_checked = True
try:
nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
except (AttributeError, ValueError):
# sysconf not available or setting not available
return
if nsems_max == -1:
# indetermined limit, assume that limit is determined
# by available memory only
return
if nsems_max >= 256:
# minimum number of semaphores available
# according to POSIX
return
_system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
raise NotImplementedError(_system_limited)
class BrokenProcessPool(RuntimeError):
"""
Raised when a process in a ProcessPoolExecutor terminated abruptly
while a future was in the running state.
"""
class ProcessPoolExecutor(Executor):
def __init__(self, max_workers=None):
"""Initializes a new ProcessPoolExecutor instance.
Args:
max_workers: The maximum number of processes that can be used to
execute the given calls. If None or not given then as many
worker processes will be created as the machine has processors.
"""
_check_system_limits()
if max_workers is None:
self._max_workers = os.cpu_count() or 1
else:
self._max_workers = max_workers
# Make the call queue slightly larger than the number of processes to
# prevent the worker processes from idling. But don't make it too big
# because futures in the call queue cannot be cancelled.
self._call_queue = multiprocessing.Queue(self._max_workers +
EXTRA_QUEUED_CALLS)
# Killed worker processes can produce spurious "broken pipe"
# tracebacks in the queue's own worker thread. But we detect killed
# processes anyway, so silence the tracebacks.
self._call_queue._ignore_epipe = True
self._result_queue = SimpleQueue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
# Map of pids to processes
self._processes = {}
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
self._broken = False
self._queue_count = 0
self._pending_work_items = {}
def _start_queue_management_thread(self):
# When the executor gets lost, the weakref callback will wake up
# the queue management thread.
def weakref_cb(_, q=self._result_queue):
q.put(None)
if self._queue_management_thread is None:
# Start the processes so that their sentinels are known.
self._adjust_process_count()
self._queue_management_thread = threading.Thread(
target=_queue_management_worker,
args=(weakref.ref(self, weakref_cb),
self._processes,
self._pending_work_items,
self._work_ids,
self._call_queue,
self._result_queue))
self._queue_management_thread.daemon = True
self._queue_management_thread.start()
_threads_queues[self._queue_management_thread] = self._result_queue
def _adjust_process_count(self):
for _ in range(len(self._processes), self._max_workers):
p = multiprocessing.Process(
target=_process_worker,
args=(self._call_queue,
self._result_queue))
p.start()
self._processes[p.pid] = p
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._broken:
raise BrokenProcessPool('A child process terminated '
'abruptly, the process pool is not usable anymore')
if self._shutdown_thread:
raise RuntimeError('cannot schedule new futures after shutdown')
f = Future()
w = _WorkItem(f, fn, args, kwargs)
self._pending_work_items[self._queue_count] = w
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
self._result_queue.put(None)
self._start_queue_management_thread()
return f
submit.__doc__ = Executor.submit.__doc__
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown_thread = True
if self._queue_management_thread:
# Wake up queue management thread
self._result_queue.put(None)
if wait:
self._queue_management_thread.join()
# To reduce the risk of opening too many files, remove references to
# objects that use file descriptors.
self._queue_management_thread = None
self._call_queue = None
self._result_queue = None
self._processes = None
shutdown.__doc__ = Executor.shutdown.__doc__
atexit.register(_python_exit)
|
mikhtonyuk/rxpython
|
concurrent/executors/process.py
|
Python
|
mit
| 17,649
|
[
"Brian"
] |
598ead73a13665e28dc520bbd2484de99c2fa96fbf8b4a97704b6581d0d740d3
|
from base import Parser1
class Foo(Parser1):
def visit(self, foo):
return 'bar'
|
sourcesimian/pyPlugin
|
tests/plugins1.py
|
Python
|
mit
| 94
|
[
"VisIt"
] |
ccb82b5867cf56c32dec91d791b58dccbbd2fb3fa9cc519e71df50a7889be55d
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.1
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_applicationprofile
author: Gaurav Rastogi (@grastogi23) <grastogi@avinetworks.com>
short_description: Module for setup of ApplicationProfile Avi RESTful Object
description:
- This module is used to configure ApplicationProfile object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
cloud_config_cksum:
description:
- Checksum of application profiles.
- Internally set by cloud connector.
- Field introduced in 17.2.14, 18.1.5, 18.2.1.
version_added: "2.9"
created_by:
description:
- Name of the application profile creator.
- Field introduced in 17.2.14, 18.1.5, 18.2.1.
version_added: "2.9"
description:
description:
- User defined description for the object.
dns_service_profile:
description:
- Specifies various dns service related controls for virtual service.
dos_rl_profile:
description:
- Specifies various security related controls for virtual service.
http_profile:
description:
- Specifies the http application proxy profile parameters.
name:
description:
- The name of the application profile.
required: true
preserve_client_ip:
description:
- Specifies if client ip needs to be preserved for backend connection.
- Not compatible with connection multiplexing.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
preserve_client_port:
description:
- Specifies if we need to preserve client port while preserving client ip for backend connections.
- Field introduced in 17.2.7.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.6"
type: bool
sip_service_profile:
description:
- Specifies various sip service related controls for virtual service.
- Field introduced in 17.2.8, 18.1.3, 18.2.1.
version_added: "2.9"
tcp_app_profile:
description:
- Specifies the tcp application proxy profile parameters.
tenant_ref:
description:
- It is a reference to an object of type tenant.
type:
description:
- Specifies which application layer proxy is enabled for the virtual service.
- Enum options - APPLICATION_PROFILE_TYPE_L4, APPLICATION_PROFILE_TYPE_HTTP, APPLICATION_PROFILE_TYPE_SYSLOG, APPLICATION_PROFILE_TYPE_DNS,
- APPLICATION_PROFILE_TYPE_SSL, APPLICATION_PROFILE_TYPE_SIP.
required: true
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Uuid of the application profile.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Create an Application Profile for HTTP application enabled for SSL traffic
avi_applicationprofile:
controller: '{{ controller }}'
username: '{{ username }}'
password: '{{ password }}'
http_profile:
cache_config:
age_header: true
aggressive: false
date_header: true
default_expire: 600
enabled: false
heuristic_expire: false
max_cache_size: 0
max_object_size: 4194304
mime_types_group_refs:
- admin:System-Cacheable-Resource-Types
min_object_size: 100
query_cacheable: false
xcache_header: true
client_body_timeout: 0
client_header_timeout: 10000
client_max_body_size: 0
client_max_header_size: 12
client_max_request_size: 48
compression_profile:
compressible_content_ref: admin:System-Compressible-Content-Types
compression: false
remove_accept_encoding_header: true
type: AUTO_COMPRESSION
connection_multiplexing_enabled: true
hsts_enabled: false
hsts_max_age: 365
http_to_https: false
httponly_enabled: false
keepalive_header: false
keepalive_timeout: 30000
max_bad_rps_cip: 0
max_bad_rps_cip_uri: 0
max_bad_rps_uri: 0
max_rps_cip: 0
max_rps_cip_uri: 0
max_rps_unknown_cip: 0
max_rps_unknown_uri: 0
max_rps_uri: 0
post_accept_timeout: 30000
secure_cookie_enabled: false
server_side_redirect_to_https: false
spdy_enabled: false
spdy_fwd_proxy_mode: false
ssl_client_certificate_mode: SSL_CLIENT_CERTIFICATE_NONE
ssl_everywhere_enabled: false
websockets_enabled: true
x_forwarded_proto_enabled: false
xff_alternate_name: X-Forwarded-For
xff_enabled: true
name: System-HTTP
tenant_ref: admin
type: APPLICATION_PROFILE_TYPE_HTTP
"""
RETURN = '''
obj:
description: ApplicationProfile (api/applicationprofile) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, avi_ansible_api, HAS_AVI)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
cloud_config_cksum=dict(type='str',),
created_by=dict(type='str',),
description=dict(type='str',),
dns_service_profile=dict(type='dict',),
dos_rl_profile=dict(type='dict',),
http_profile=dict(type='dict',),
name=dict(type='str', required=True),
preserve_client_ip=dict(type='bool',),
preserve_client_port=dict(type='bool',),
sip_service_profile=dict(type='dict',),
tcp_app_profile=dict(type='dict',),
tenant_ref=dict(type='str',),
type=dict(type='str', required=True),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) or requests is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'applicationprofile',
set([]))
if __name__ == '__main__':
main()
|
thaim/ansible
|
lib/ansible/modules/network/avi/avi_applicationprofile.py
|
Python
|
mit
| 7,960
|
[
"VisIt"
] |
fb9fa7b0eeff3a766bc757a6deccbd64224b3cffca479f94af955a186c3cda7f
|
'''
Created on Jul 31, 2015
@author: sergio
'''
import numpy as np
import ctypes
import numpy.ctypeslib as npct
import matplotlib.pyplot as plt
import psycopg2
import time
import neurodb.neodb.core
from math import e, pow
from scipy.optimize import leastsq
import neurodb
import random
from sklearn.cluster import KMeans, AgglomerativeClustering, MiniBatchKMeans
from neurodb.cfsfdp import libcd
import multiprocessing as mp
from timeit import timeit
from time import time
from sklearn.decomposition import PCA
import neurodb.features
def get_features(spikes):
username = 'postgres'
password = 'postgres'
host = '172.16.162.128'
dbname = 'demo'
url = 'postgresql://%s:%s@%s/%s'%(username, password, host, dbname)
dbconn = psycopg2.connect('dbname=%s user=%s password=%s host=%s'%(dbname, username, password, host))
p = ""
for i in spikes:
p = p + " id_spike=%s or"%(i)
p = p[:len(p)-3]
query = "SELECT id from FEATURES WHERE" + p
cursor = dbconn.cursor()
cursor.execute(query)
results = cursor.fetchall()
results = [x[0] for x in results]
return np.array(results, np.float64)
def clustering(nodo, points, output):
username = 'postgres'
password = 'postgres'
host = '172.16.162.128'
dbname = 'demo'
url = 'postgresql://%s:%s@%s/%s'%(username, password, host, dbname)
dbconn = psycopg2.connect('dbname=%s user=%s password=%s host=%s'%(dbname, username, password, host))
connect = "dbname=demo host=172.16.162.128 user=postgres password=postgres"
spikes_id = np.array(nodo, np.float64)
#spikes_id1= np.copy(spikes_id)
nspikes = len(nodo)
rho = np.empty(nspikes)
delta = np.empty(nspikes)
nneigh = np.empty(nspikes)
centers = np.empty(nspikes)
cluster_index = np.empty(nspikes)
features = get_features(nodo)
#dc = libcd.get_dc(connect, spikes_id, nspikes, np.float(1.8), points)
dc = libcd.getDC(connect, features, spikes_id, nspikes, np.float(1.8), points)
#libcd.cluster_dp(connect, rho, delta, spikes_id,
# cluster_index1, nneigh, centers, dc, points, nspikes, "gaussian")
libcd.dpClustering(features, nspikes, dc, points, "gaussian", spikes_id, cluster_index, rho, delta)
templates = []
spikes = []
gspikes = []
for i in range(1, int(cluster_index.max())+1):
template = np.zeros(64, np.float64)
k = 0
for j in range(nspikes):
if cluster_index[j] == i:
spike = neurodb.neodb.core.spikedb.get_from_db(dbconn,
id_block = id_block, channel = channel, id = int(spikes_id[j]))
signal = spike[0].waveform
template = template + signal
gspikes.append(nodo[j])
k = k + 1
template = template/k
templates.append(template)
spikes.append(gspikes)
output.put((templates, spikes))
pass
def ajuste(local_density, coeficientes):
vajuste = np.zeros(len(local_density))
for j in range(len(local_density)):
vajuste[j] = np.polynomial.polynomial.polyval(local_density[j], coeficientes)
return vajuste
def __select_nodes(id_project, id_session, channel, n_nodos):
project = neurodb.project.get_from_db(id_project)
session = project.get_session(int(id_session))
channels = session.get_channels()
for ch in channels:
if ch['channel']==int(channel):
rc = session.get_channel(ch['id'])
spikes = rc.get_spikes()
random.shuffle(spikes)
len_spikes = len(spikes)
len_nodo = np.ceil(float(len_spikes)/float(n_nodos))
nodos = []
for i in range(n_nodos):
nodo = []
j = 0
while(spikes != [] and j<len_nodo):
nodo.append(spikes.pop())
j = j + 1
nodos.append(nodo)
return nodos
def insertFeaturesTemplate(templates, spike_ids):
pca = PCA(n_components=10)
transf = pca.fit_transform(templates)
dbconn = psycopg2.connect('dbname=%s user=%s password=%s host=%s'%(dbname, username, password, host))
#spike_ids = np.float16(spike_ids)
cursor = dbconn.cursor()
ids = []
i = 0
for x in transf:
query = """INSERT INTO FEATURES (p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, extra)
VALUES (%s, %s,%s, %s,%s,
%s,%s, %s,%s, %s, %s) RETURNING id"""
cursor.execute(query, [x[0], x[1], x[2], x[3], x[4], x[5], x[6], x[7],
x[8], x[9], psycopg2.Binary(np.float32(spike_ids[i]))])
id_of_new_row = cursor.fetchone()[0]
ids.append(id_of_new_row)
i = i+1
dbconn.commit()
return ids
def getSpikeCluster(features_ids):
pass
def serial(dbconn, nodos, output):
for nodo in nodos:
a = clustering(nodo, 3, output)
result = output.get()
for i in result[0]:
plt.plot(i)
plt.show()
if __name__ == '__main__':
username = 'postgres'
password = 'postgres'
host = '172.16.162.128'
dbname = 'demo'
url = 'postgresql://%s:%s@%s/%s'%(username, password, host, dbname)
dbconn = psycopg2.connect('dbname=%s user=%s password=%s host=%s'%(dbname, username, password, host))
color = ['bo', 'ro', 'go', 'co', 'ko', 'mo', 'b^', 'r^', 'g^', 'c^', 'k^', 'm^']
connect = "dbname=demo host=172.16.162.128 user=postgres password=postgres"
centers = []
rho = np.array([], np.float64)
delta = np.array([], np.float64)
template = np.zeros(64, np.float64)
project = 19
id_block = "78"
channel = "1"
points = 3
nnodos = 20
output = mp.Queue()
nodos = __select_nodes(project, id_block, channel, nnodos)
processes = []
#serial(dbconn, nodos, output)
for i in range(nnodos):
processes.append(mp.Process(target=clustering, args=(nodos[i], points, output)))
for p in processes:
p.start()
for p in processes:
p.join(4)
results = [output.get() for p in processes]
templates = []
ids = []
for x in results:
for y in x[0]:
templates.append(y)
for y in x[1]:
ids.append(y)
features_ids = insertFeaturesTemplate(templates, ids)
features_ids = np.array(features_ids, np.float64)
nspikes = len(features_ids)
rho = np.empty(nspikes)
delta = np.empty(nspikes)
id_spikes = np.empty(nspikes)
cluster_index = np.empty(len(features_ids))
dc = libcd.getDC(connect, features_ids, id_spikes, len(features_ids), np.float(2.0), points)
libcd.dpClustering(features_ids, len(features_ids), dc, points, "gaussian", id_spikes, cluster_index, rho, delta)
res = neurodb.features.getFromDB(features_ids, "extra")
neurodb.features.removeOnDB(features_ids)
n = len(features_ids)
max = rho.max()
max = int(max*0.06)
print max
for j in range(len(delta)):
if (rho[j] < max):
delta[j] = 0
coeficientes1, stats1= np.polynomial.polynomial.polyfit(rho, delta, 1, full=True)
ajuste1 = ajuste(rho, coeficientes1)
desvio1 = (stats1[0][0]/float(n))**0.5
plt.plot(rho, delta, 'bo')
plt.plot(rho, ajuste1, 'r')
plt.plot(rho, ajuste1 + 1.4*desvio1, 'g')
plt.show()
pass
|
sergiohr/NeuroDB
|
test/test_multi.py
|
Python
|
gpl-3.0
| 7,558
|
[
"Gaussian"
] |
b14af262722efd3a4ef2720d2d62addb6917387c2d73ec7ee03ea3288479f34a
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# . documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 19 17:41:28 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../python/'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary',
'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.mathjax',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'SciScript-Python'
copyright = '2017, SciServer'
author = u'Gerard Lemson, Manuchehr Taghizadeh-Popp, Brian Cherinka'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import sciserver
version = sciserver.__version__
# The full version, including alpha/beta/rc tags.
release = sciserver.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {'**': ['localtoc.html']}
#html_sidebars = {'**': ['searchbox.html', 'globaltoc.html', 'sourcelink.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sciserverdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'SciServer.tex', u'SciServer Documentation',
'Gerard Lemson, Manuchehr Taghizadeh-Popp, Brian Cherinka', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'sciserver', u'SciServer Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'SciServer', 'SciServer Documentation',
author, 'SciServer', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = '.'
epub_author = author
epub_publisher = author
epub_copyright = '2017, SciServer'
# The basename for the epub file. It defaults to the project name.
#epub_basename = '.'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
|
havok2063/SciScript-Python
|
docs/sphinx/conf.py
|
Python
|
apache-2.0
| 10,668
|
[
"Brian"
] |
f07f86d8669a1176536a4cd0dd8d68b4a4ead2f07eb061762bafd2e6f9d7e104
|
###############################################################################
# Copyright 2015-2020 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from nacc.lbd.v3_1.ivp import forms as lbd_short_ivp_forms
from nacc.uds3 import packet as lbd_short_ivp_packet
def build_lbd_short_ivp_form(record):
''' Converts REDCap CSV data into a packet (list of IVP Form objects) '''
packet = lbd_short_ivp_packet.Packet()
# Set up the forms..........
# This form cannot precede June 1, 2017.
if not (int(record['visityr']) > 2017) or \
(int(record['visityr']) == 2017 and int(record['visitmo']) > 6) \
or (int(record['visityr']) == 2017 and int(record['visitmo']) == 6
and int(record['visitday']) >= 1):
raise ValueError('Visit date cannot precede June 1, 2017.')
add_z1x(record, packet)
# Forms B1L, B3L, B4L, B5L, B7L, B9L, C1L, E1L, E2L, E3L, D1L are REQUIRED.
# Forms B2L and B6L are OPTIONAL and must be specifically marked as present
# for nacculator to process them
add_b1l(record, packet)
try:
if record['lbdb2ls'] == '1':
add_b2l(record, packet)
except KeyError:
if record['lbudspch'] is not None:
add_b2l(record, packet)
add_b3l(record, packet)
add_b4l(record, packet)
add_b5l(record, packet)
try:
if record['lbdb6ls'] == '1':
add_b6l(record, packet)
except KeyError:
if record['lbspcgim'] is not None:
add_b6l(record, packet)
add_b7l(record, packet)
add_b9l(record, packet)
add_c1l(record, packet)
add_d1l(record, packet)
add_e1l(record, packet)
add_e2l(record, packet)
add_e3l(record, packet)
update_header(record, packet)
return packet
def add_z1x(record, packet):
Z1X = lbd_short_ivp_forms.FormZ1X()
Z1X.LANGA1 = record['langa1']
Z1X.LANGA2 = record['langa2']
Z1X.A2SUB = record['a2sub']
Z1X.A2NOT = record['a2not']
Z1X.LANGA3 = record['langa3']
Z1X.A3SUB = record['a3sub']
Z1X.A3NOT = record['a3not']
Z1X.LANGA4 = record['langa4']
Z1X.A4SUB = record['a4sub']
Z1X.A4NOT = record['a4not']
Z1X.LANGA5 = record['langa5']
Z1X.LANGB1 = record['langb1']
Z1X.B1SUB = record['b1sub']
Z1X.B1NOT = record['b1not']
Z1X.LANGB4 = record['langb4']
Z1X.LANGB5 = record['langb5']
Z1X.B5SUB = record['b5sub']
Z1X.B5NOT = record['b5not']
Z1X.LANGB6 = record['langb6']
Z1X.B6SUB = record['b6sub']
Z1X.B6NOT = record['b6not']
Z1X.LANGB7 = record['langb7']
Z1X.B7SUB = record['b7sub']
Z1X.B7NOT = record['b7not']
Z1X.LANGB8 = record['langb8']
Z1X.LANGB9 = record['langb9']
Z1X.LANGC2 = record['langc2']
Z1X.LANGD1 = record['langd1']
Z1X.LANGD2 = record['langd2']
try:
Z1X.LANGA3A = record['langa3a']
Z1X.FTDA3AFS = record['ftda3afs']
Z1X.FTDA3AFR = record['ftda3afr']
Z1X.LANGB3F = record['langb3f']
Z1X.LANGB9F = record['langb9f']
Z1X.LANGC1F = record['langc1f']
Z1X.LANGC2F = record['langc2f']
Z1X.LANGC3F = record['langc3f']
Z1X.LANGC4F = record['langc4f']
Z1X.FTDC4FS = record['ftdc4fs']
Z1X.FTDC4FR = record['ftdc4fr']
Z1X.LANGC5F = record['langc5f']
Z1X.FTDC5FS = record['ftdc5fs']
Z1X.FTDC5FR = record['ftdc5fr']
Z1X.LANGC6F = record['langc6f']
Z1X.FTDC6FS = record['ftdc6fs']
Z1X.FTDC6FR = record['ftdc6fr']
Z1X.LANGE2F = record['lange2f']
Z1X.LANGE3F = record['lange3f']
Z1X.LANGCLS = record['langcls']
Z1X.CLSSUB = record['clssub']
except KeyError:
Z1X.LANGA3A = ''
Z1X.FTDA3AFS = ''
Z1X.FTDA3AFR = ''
Z1X.LANGB3F = ''
Z1X.LANGB9F = ''
Z1X.LANGC1F = ''
Z1X.LANGC2F = ''
Z1X.LANGC3F = ''
Z1X.LANGC4F = ''
Z1X.FTDC4FS = ''
Z1X.FTDC4FR = ''
Z1X.LANGC5F = ''
Z1X.FTDC5FS = ''
Z1X.FTDC5FR = ''
Z1X.LANGC6F = ''
Z1X.FTDC6FS = ''
Z1X.FTDC6FR = ''
Z1X.LANGE2F = ''
Z1X.LANGE3F = ''
Z1X.LANGCLS = ''
Z1X.CLSSUB = '0'
# for REDCap projects that don't have the LBD questions added to their Z1X,
# we just see if there's info in the B2L and B6L forms and fill in
# accordingly.
try:
Z1X.B2LSUB = record['b2lsub']
Z1X.B2LNOT = record['b2lnot']
Z1X.B6LSUB = record['b6lsub']
Z1X.B6LNOT = record['b6lnot']
except KeyError:
if record['lbudspch'] is not None:
Z1X.B2LSUB = '1'
Z1X.B2LNOT = ''
if record['lbspcgim'] is not None:
Z1X.B6LSUB = '1'
Z1X.B6LNOT = ''
packet.insert(0, Z1X)
def add_b1l(record,packet):
B1L = lbd_short_ivp_forms.FormB1L()
B1L.LBSSALIV = record['LBSSALIV'.lower()]
B1L.LBSSWALL = record['LBSSWALL'.lower()]
B1L.LBSSMeLL = record['LBSSMeLL'.lower()]
B1L.LBSSWeAt = record['LBSSWeAT'.lower()]
B1L.LBSCoNSt = record['LBSCoNSt'.lower()]
B1L.LBSUBLAD = record['LBSUBLAD'.lower()]
B1L.LBSDZStU = record['LBSDZStU'.lower()]
B1L.LBSDZStN = record['LBSDZStN'.lower()]
B1L.LBSFAINt = record['LBSFAINt'.lower()]
B1L.LBPSyAGe = record['LBPSyAGe'.lower()]
B1L.LBSStNSy = record['LBSStNSy'.lower()]
B1L.LBSITSy = record['LBSITSy'.lower()]
B1L.LBSStNDI = record['LBSStNDI'.lower()]
B1L.LBSITDI = record['LBSITDI'.lower()]
B1L.LBSStNHt = record['LBSStNHt'.lower()]
B1L.LBSITHR = record['LBSITHR'.lower()]
B1L.LBSAGerM = record['LBSAGerM'.lower()]
B1L.LBSAGeSM = record['LBSAGeSM'.lower()]
B1L.LBSAGeGt = record['LBSAGeGt'.lower()]
B1L.LBSAGeFL = record['LBSAGeFL'.lower()]
B1L.LBSAGetr = record['LBSAGetr'.lower()]
B1L.LBSAGeBr = record['LBSAGeBr'.lower()]
packet.append(B1L)
def add_b2l(record, packet):
B2L = lbd_short_ivp_forms.FormB2L()
B2L.LBUDSPCH = record['LBUDSPCH'.lower()]
B2L.LBUDSALV = record['LBUDSALV'.lower()]
B2L.LBUDSWAL = record['LBUDSWAL'.lower()]
B2L.LBUWrIte = record['LBUWrIte'.lower()]
B2L.LBUDFooD = record['LBUDFooD'.lower()]
B2L.LBUDreSS = record['LBUDreSS'.lower()]
B2L.LBUDHyGN = record['LBUDHyGN'.lower()]
B2L.LBUDtUrN = record['LBUDtUrN'.lower()]
B2L.LBUDFALL = record['LBUDFALL'.lower()]
B2L.LBUDFrZ = record['LBUDFrZ'.lower()]
B2L.LBUDWALK = record['LBUDWALK'.lower()]
B2L.LBUDtreM = record['LBUDtreM'.lower()]
B2L.LBUDSeNS = record['LBUDSeNS'.lower()]
packet.append(B2L)
def add_b3l(record, packet):
B3L = lbd_short_ivp_forms.FormB3L()
B3L.LBUMSPCH = record['LBUMSPCH'.lower()]
B3L.LBUMSPCX = record['LBUMSPCX'.lower()]
B3L.LBUMFACe = record['LBUMFACe'.lower()]
B3L.LBUMFACX = record['LBUMFACX'.lower()]
B3L.LBUMtrFA = record['LBUMtrFA'.lower()]
B3L.LBUtrFAX = record['LBUtrFAX'.lower()]
B3L.LBUMtrrH = record['LBUMtrrH'.lower()]
B3L.LBUtrrHX = record['LBUtrrHX'.lower()]
B3L.LBUMtrLH = record['LBUMtrLH'.lower()]
B3L.LBUtrLHX = record['LBUtrLHX'.lower()]
B3L.LBUMtrrF = record['LBUMtrrF'.lower()]
B3L.LBUtrrFX = record['LBUtrrFX'.lower()]
B3L.LBUMtrLF = record['LBUMtrLF'.lower()]
B3L.LBUtrLFX = record['LBUtrLFX'.lower()]
B3L.LBUMAtrH = record['LBUMAtrH'.lower()]
B3L.LBUAtrHX = record['LBUAtrHX'.lower()]
B3L.LBUMAtLH = record['LBUMAtLH'.lower()]
B3L.LBUAtLHX = record['LBUAtLHX'.lower()]
B3L.LBUMrGNK = record['LBUMrGNK'.lower()]
B3L.LBUrGNKX = record['LBUrGNKX'.lower()]
B3L.LBUMrGrU = record['LBUMrGrU'.lower()]
B3L.LBUrGrUX = record['LBUrGrUX'.lower()]
B3L.LBUMrGLU = record['LBUMrGLU'.lower()]
B3L.LBUrGLUX = record['LBUrGLUX'.lower()]
B3L.LBUMrGrL = record['LBUMrGrL'.lower()]
B3L.LBUrGrLX = record['LBUrGrLX'.lower()]
B3L.LBUMrGLL = record['LBUMrGLL'.lower()]
B3L.LBUrGLLX = record['LBUrGLLX'.lower()]
B3L.LBUMFtrH = record['LBUMFtrH'.lower()]
B3L.LBUFtrHX = record['LBUFtrHX'.lower()]
B3L.LBUMFtLH = record['LBUMFtLH'.lower()]
B3L.LBUFtLHX = record['LBUFtLHX'.lower()]
B3L.LBUMHMrH = record['LBUMHMrH'.lower()]
B3L.LBUHMrHX = record['LBUHMrHX'.lower()]
B3L.LBUMHMLH = record['LBUMHMLH'.lower()]
B3L.LBUHMLHX = record['LBUHMLHX'.lower()]
B3L.LBUMPSrH = record['LBUMPSrH'.lower()]
B3L.LBUPSrHX = record['LBUPSrHX'.lower()]
B3L.LBUMPSLH = record['LBUMPSLH'.lower()]
B3L.LBUPSLHX = record['LBUPSLHX'.lower()]
B3L.LBUMLGrL = record['LBUMLGrL'.lower()]
B3L.LBULGrLX = record['LBULGrLX'.lower()]
B3L.LBUMLGLL = record['LBUMLGLL'.lower()]
B3L.LBULGLLX = record['LBULGLLX'.lower()]
B3L.LBUMrISe = record['LBUMrISe'.lower()]
B3L.LBUMrISX = record['LBUMrISX'.lower()]
B3L.LBUMPoSt = record['LBUMPoSt'.lower()]
B3L.LBUMPoSX = record['LBUMPoSX'.lower()]
B3L.LBUMGAIt = record['LBUMGAIt'.lower()]
B3L.LBUMGAIX = record['LBUMGAIX'.lower()]
B3L.LBUPStBL = record['LBUPStBL'.lower()]
B3L.LBUPStBX = record['LBUPStBX'.lower()]
B3L.LBUMBrAD = record['LBUMBrAD'.lower()]
B3L.LBUMBrAX = record['LBUMBrAX'.lower()]
B3L.LBUMHNyr = record['LBUMHNyr'.lower()]
B3L.LBUMHNyX = record['LBUMHNyX'.lower()]
packet.append(B3L)
def add_b4l(record, packet):
B4L = lbd_short_ivp_forms.FormB4L()
B4L.LBDeLUS = record['LBDeLUS'.lower()]
B4L.LBDHUrt = record['LBDHUrt'.lower()]
B4L.LBDSteAL = record['LBDSteAL'.lower()]
B4L.LBDAFFr = record['LBDAFFr'.lower()]
B4L.LBDGUeSt = record['LBDGUeSt'.lower()]
B4L.LBDIMPoS = record['LBDIMPoS'.lower()]
B4L.LBDHoMe = record['LBDHoMe'.lower()]
B4L.LBDABAND = record['LBDABAND'.lower()]
B4L.LBDPreS = record['LBDPreS'.lower()]
B4L.LBDotHer = record['LBDotHer'.lower()]
B4L.LBHALL = record['LBHALL'.lower()]
B4L.LBHVoICe = record['LBHVoICe'.lower()]
B4L.LBHPeoPL = record['LBHPeoPL'.lower()]
B4L.LBHNotPr = record['LBHNotPr'.lower()]
B4L.LBHoDor = record['LBHoDor'.lower()]
B4L.LBHFeeL = record['LBHFeeL'.lower()]
B4L.LBHtASte = record['LBHtASte'.lower()]
B4L.LBHotSeN = record['LBHotSeN'.lower()]
B4L.LBANXIet = record['LBANXIet'.lower()]
B4L.LBANeVNt = record['LBANeVNt'.lower()]
B4L.LBANreLX = record['LBANreLX'.lower()]
B4L.LBANBrtH = record['LBANBrtH'.lower()]
B4L.LBANBUtt = record['LBANBUtt'.lower()]
B4L.LBANPLAC = record['LBANPLAC'.lower()]
B4L.LBANSePr = record['LBANSePr'.lower()]
B4L.LBANotHr = record['LBANotHr'.lower()]
B4L.LBAPAtHy = record['LBAPAtHy'.lower()]
B4L.LBAPSPNt = record['LBAPSPNt'.lower()]
B4L.LBAPCoNV = record['LBAPCoNV'.lower()]
B4L.LBAPAFF = record['LBAPAFF'.lower()]
B4L.LBAPCHor = record['LBAPCHor'.lower()]
B4L.LBAPINt = record['LBAPINt'.lower()]
B4L.LBAPFAML = record['LBAPFAML'.lower()]
B4L.LBAPINtr = record['LBAPINtr'.lower()]
B4L.LBAPotH = record['LBAPotH'.lower()]
packet.append(B4L)
def add_b5l(record, packet):
B5L = lbd_short_ivp_forms.FormB5L()
B5L.LBMLtHrG = record['LBMLtHrG'.lower()]
B5L.LBMSLeeP = record['LBMSLeeP'.lower()]
B5L.LBMDISrG = record['LBMDISrG'.lower()]
B5L.LBMStAre = record['LBMStAre'.lower()]
packet.append(B5L)
def add_b6l(record, packet):
B6L = lbd_short_ivp_forms.FormB6L()
B6L.LBSPCGIM = record['LBSPCGIM'.lower()]
B6L.LBSPDrM = record['LBSPDrM'.lower()]
B6L.LBSPyrS = record['LBSPyrS'.lower()]
B6L.LBSPMoS = record['LBSPMoS'.lower()]
B6L.LBSPINJS = record['LBSPINJS'.lower()]
B6L.LBSPINJP = record['LBSPINJP'.lower()]
B6L.LBSPCHAS = record['LBSPCHAS'.lower()]
B6L.LBSPMoVe = record['LBSPMoVe'.lower()]
B6L.LBSPLeGS = record['LBSPLeGS'.lower()]
B6L.LBSPNerV = record['LBSPNerv'.lower()]
B6L.LBSPUrGL = record['LBSPUrGL'.lower()]
B6L.LBSPSeNS = record['LBSPSeNS'.lower()]
B6L.LBSPWorS = record['LBSPWorS'.lower()]
B6L.LBSPWALK = record['LBSPWALK'.lower()]
B6L.LBSPAWAK = record['LBSPAWAK'.lower()]
B6L.LBSPBrtH = record['LBSPBrtH'.lower()]
B6L.LBSPtrt = record['LBSPtrt'.lower()]
B6L.LBSPCrMP = record['LBSPCrMP'.lower()]
B6L.LBSPALrt = record['LBSPALrt'.lower()]
packet.append(B6L)
def add_b7l(record, packet):
B7L = lbd_short_ivp_forms.FormB7L()
B7L.LBSCLIV = record['LBSCLIV'.lower()]
B7L.LBSCSLP = record['LBSCSLP'.lower()]
B7L.LBSCBeHV = record['LBSCBeHV'.lower()]
B7L.LBSCDrM = record['LBSCDrM'.lower()]
B7L.LBSCyrS = record['LBSCyrS'.lower()]
B7L.LBSCMoS = record['LBSCMoS'.lower()]
B7L.LBSCINJS = record['LBSCINJS'.lower()]
B7L.LBSCINJP = record['LBSCINJP'.lower()]
B7L.LBSCCHAS = record['LBSCCHAS'.lower()]
B7L.LBSCMoVe = record['LBSCMoVe'.lower()]
B7L.LBSCLeGS = record['LBSCLeGS'.lower()]
B7L.LBSCNerV = record['LBSCNerV'.lower()]
B7L.LBSCSeNS = record['LBSCSeNS'.lower()]
B7L.LBSCWorS = record['LBSCWorS'.lower()]
B7L.LBSCWALK = record['LBSCWALK'.lower()]
B7L.LBSCAWAK = record['LBSCAWAK'.lower()]
B7L.LBSCBrtH = record['LBSCBrtH'.lower()]
B7L.LBSCtrt = record['LBSCtrt'.lower()]
B7L.LBSCCrMP = record['LBSCCrMP'.lower()]
B7L.LBSCALrt = record['LBSCALrt'.lower()]
packet.append(B7L)
def add_b9l(record, packet):
B9L = lbd_short_ivp_forms.FormB9L()
B9L.CoNSFALL = record['CoNSFALL'.lower()]
B9L.CoNSWKoF = record['CoNSWKoF'.lower()]
B9L.CoNSLyAW = record['CoNSLyAW'.lower()]
B9L.CoNSWKer = record['CoNSWKer'.lower()]
B9L.CoNSLttL = record['CoNSLttL'.lower()]
B9L.SCCorAte = record['SCCorAte'.lower()]
B9L.CoDSUNeX = record['CoDSUNeX'.lower()]
B9L.CoDSSItP = record['CoDSSItP'.lower()]
B9L.CoDSWAtV = record['CoDSWAtV'.lower()]
B9L.CoDStALK = record['CoDStALK'.lower()]
B9L.CoDSAWDy = record['CoDSAWDy'.lower()]
B9L.CoDSFLDy = record['CoDSFLDy'.lower()]
packet.append(B9L)
def add_c1l(record, packet):
C1L = lbd_short_ivp_forms.FormC1L()
C1L.LBNPFACe = record['LBNPFACe'.lower()]
C1L.LBNPNoIS = record['LBNPNoIS'.lower()]
C1L.LBNPtCor = record['LBNPtCor'.lower()]
C1L.LBNPPArD = record['LBNPPArD'.lower()]
packet.append(C1L)
def add_d1l(record, packet):
D1L = lbd_short_ivp_forms.FormD1L()
D1L.LBCDSCoG = record['LBCDSCoG'.lower()]
D1L.LBCCMeM = record['LBCCMeM'.lower()]
D1L.LBCCLANG = record['LBCCLANG'.lower()]
D1L.LBCCAtt = record['LBCCAtt'.lower()]
D1L.LBCCeXDe = record['LBCCeXDe'.lower()]
D1L.LBCCVIS = record['LBCCVIS'.lower()]
D1L.LBCDSMoV = record['LBCDSMoV'.lower()]
D1L.LBCMBrAD = record['LBCMBrAD'.lower()]
D1L.LBCMrIGD = record['LBCMrIGD'.lower()]
D1L.LBCMrtrM = record['LBCMrtrM'.lower()]
D1L.LBCMPtrM = record['LBCMPtrM'.lower()]
D1L.LBCMAtrM = record['LBCMAtrM'.lower()]
D1L.LBCMMyoC = record['LBCMMyoC'.lower()]
D1L.LBCMGAIt = record['LBCMGAIt'.lower()]
D1L.LBCMPINS = record['LBCMPINS'.lower()]
D1L.LBCDSBeV = record['LBCDSBeV'.lower()]
D1L.LBCBDeP = record['LBCBDeP'.lower()]
D1L.LBCBAPA = record['LBCBAPA'.lower()]
D1L.LBCBANX = record['LBCBANX'.lower()]
D1L.LBCBHALL = record['LBCBHALL'.lower()]
D1L.LBCBDeL = record['LBCBDeL'.lower()]
D1L.LBCDSAUt = record['LBCDSAUt'.lower()]
D1L.LBCAreM = record['LBCAreM'.lower()]
D1L.LBCAAPN = record['LBCAAPN'.lower()]
D1L.LBCALGSL = record['LBCALGSL'.lower()]
D1L.LBCArSLe = record['LBCArSLe'.lower()]
D1L.LBCADtSL = record['LBCADtSL'.lower()]
D1L.LBCACGFL = record['LBCACGFL'.lower()]
D1L.LBCAHyPt = record['LBCAHyPt'.lower()]
D1L.LBCACoNS = record['LBCACoNS'.lower()]
D1L.LBCAHyPS = record['LBCAHyPS'.lower()]
D1L.LBCAFALL = record['LBCAFALL'.lower()]
D1L.LBCASyNC = record['LBCASyNC'.lower()]
D1L.LBCASNAP = record['LBCASNAP'.lower()]
D1L.LBCoGSt = record['LBCoGSt'.lower()]
D1L.LBCoGDX = record['LBCoGDX'.lower()]
packet.append(D1L)
def add_e1l(record, packet):
E1L = lbd_short_ivp_forms.FormE1L()
E1L.LBGLrrK2 = record['LBGLrrK2'.lower()]
E1L.LBGLrKIS = record['LBGLrKIS'.lower()]
E1L.LBGPArK2 = record['LBGPArK2'.lower()]
E1L.LBGPK2IS = record['LBGPK2IS'.lower()]
E1L.LBGPArK7 = record['LBGPArK7'.lower()]
E1L.LBGPK7IS = record['LBGPK7IS'.lower()]
E1L.LBGPINK1 = record['LBGPINK1'.lower()]
E1L.LBGPNKIS = record['LBGPNKIS'.lower()]
E1L.LBGSNCA = record['LBGSNCA'.lower()]
E1L.LBGSNCIS = record['LBGSNCIS'.lower()]
E1L.LBGGBA = record['LBGGBA'.lower()]
E1L.LBGGBAIS = record['LBGGBAIS'.lower()]
E1L.LBGotHr = record['LBGotHr'.lower()]
E1L.LBGotHIS = record['LBGotHIS'.lower()]
E1L.LBGotHX = record['LBGotHX'.lower()]
packet.append(E1L)
def add_e2l(record, packet):
E2L = lbd_short_ivp_forms.FormE2L()
E2L.LBISMrI = record['LBISMrI'.lower()]
E2L.LBISMHIP = record['LBISMHIP'.lower()]
E2L.LBISMAVL = record['LBISMAVL'.lower()]
E2L.LBIFPet = record['LBIFPet'.lower()]
E2L.LBIFPoCC = record['LBIFPoCC'.lower()]
E2L.LBIFPtPP = record['LBIFPtPP'.lower()]
E2L.LBIFPISL = record['LBIFPISL'.lower()]
E2L.LBIFPAVL = record['LBIFPAVL'.lower()]
E2L.LBIAPet = record['LBIAPet'.lower()]
E2L.LBIAPAVL = record['LBIAPAVL'.lower()]
E2L.LBItPet = record['LBItPet'.lower()]
E2L.LBItPAVL = record['LBItPAVL'.lower()]
E2L.LBIDAtS = record['LBIDAtS'.lower()]
E2L.LBIDSABN = record['LBIDSABN'.lower()]
packet.append(E2L)
def add_e3l(record, packet):
E3L = lbd_short_ivp_forms.FormE3L()
E3L.LBoPoLyS = record['LBoPoLyS'.lower()]
E3L.LBoPoPoS = record['LBoPoPoS'.lower()]
E3L.LBoPoAVL = record['LBoPoAVL'.lower()]
E3L.LBoCMIBG = record['LBoCMIBG'.lower()]
E3L.LBoCMPoS = record['LBoCMPoS'.lower()]
E3L.LBoCMAVL = record['LBoCMAVL'.lower()]
E3L.LBoANoS = record['LBoANoS'.lower()]
E3L.LBoANPoS = record['LBoANPoS'.lower()]
E3L.LBoANAVL = record['LBoANAVL'.lower()]
E3L.LBoANVer = record['LBoANVer'.lower()]
E3L.LBoANotH = record['LBoANotH'.lower()]
E3L.LBoCGAIt = record['LBoCGAIt'.lower()]
E3L.LBoCGPoS = record['LBoCGPoS'.lower()]
E3L.LBoCGAVL = record['LBoCGAVL'.lower()]
packet.append(E3L)
update_header(record, packet)
return packet
def update_header(record, packet):
for header in packet:
if header.form_name == "Z1X":
header.PACKET = "I"
header.FORMID = header.form_name
header.FORMVER = 3
else:
header.PACKET = "IL"
header.FORMID = header.form_name
header.FORMVER = 3.1
header.ADCID = record['adcid']
header.PTID = record['ptid']
header.VISITMO = record['visitmo']
header.VISITDAY = record['visitday']
header.VISITYR = record['visityr']
header.VISITNUM = record['visitnum']
header.INITIALS = record['initials']
|
ctsit/nacculator
|
nacc/lbd/v3_1/ivp/builder.py
|
Python
|
bsd-2-clause
| 19,110
|
[
"VisIt"
] |
1fe787a1df224efeb41b2d7adf95fd19ce4fbec058c599ba317c315e314a835e
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
import sys
sys.setrecursionlimit(1500)
VERSION = '2.2.4'
# Add VTK 6.2 to path
sys.path.insert(0,'/Users/vistrails/src/VTK6/build/Wrapping/Python')
sys.path.insert(0,'/Users/vistrails/src/VTK6/build/lib/')
plist = dict(
CFBundleName='VisTrails',
CFBundleShortVersionString=VERSION,
CFBundleGetInfoString=' '.join(['VisTrails', VERSION]),
CFBundleExecutable='vistrails',
CFBundleIdentifier='org.vistrails',
)
sys.path.append('../../..')
APP = ['../../../vistrails/run.py']
#comma-separated list of additional data files and
#folders to include (not for code!)
#DATA_FILES = ['/usr/local/graphviz-2.12/bin/dot',]
#removed gridifield: gridfield, gridfield.core, gridfield.algebra, gridfield.gfvis, gridfield.selfe, \
OPTIONS = {'argv_emulation': True,
'iconfile': 'resources/vistrails_icon.icns',
'includes': 'sip,pylab,xml,libxml2,libxslt,Cookie,BaseHTTPServer,\
multifile,shelve,uuid,sine,st,Numeric,pexpect,\
sqlite3,suds,shapelib,dbflib,\
mpl_toolkits.mplot3d,_mysql_exceptions,readline,\
HTMLParser,sqlalchemy,sqlalchemy.dialects.sqlite,\
sqlalchemy.dialects.mysql,sqlalchemy.dialects.postgresql,\
sqlalchemy.dialects.firebird,sqlalchemy.dialects.mssql,\
sqlalchemy.dialects.oracle,sqlalchemy.dialects.sybase,\
sqlalchemy.dialects.drizzle,certifi,backports.ssl_match_hostname,\
pymongo',
'packages': 'PyQt4,vtk,MySQLdb,matplotlib,vistrails,numpy,scipy,\
api,twisted,Scientific,distutils,h5py,batchq,osgeo,\
nose,IPython,zmq,pygments,pyth,psycopg2,remoteq,\
file_archive,sklearn,tej',
'excludes': 'mpl_toolkits.basemap,PyQt4.uic,PyQt4.uic.Compiler,\
PyQt4.uic.Loader,PyQt4.uic.port_v2,PyQt4.uic.port_v3',
'plist': plist,
}
setup(
app=APP,
# data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
VisTrails/VisTrails
|
scripts/dist/mac/setup.py
|
Python
|
bsd-3-clause
| 4,175
|
[
"VTK"
] |
1a1a9d605773ebaa7d6d5fecf6f79aa4ac75942d00c7eea5aec70db99f516b9d
|
from typing import TypeVar, Tuple, List, Callable, Generic, Type, Union, Optional, Any, cast
from abc import ABC
from .utils import combine_alternatives
from .tree import Tree, Branch
from .exceptions import VisitError, GrammarError
from .lexer import Token
###{standalone
from functools import wraps, update_wrapper
from inspect import getmembers, getmro
_Return_T = TypeVar('_Return_T')
_Return_V = TypeVar('_Return_V')
_Leaf_T = TypeVar('_Leaf_T')
_Leaf_U = TypeVar('_Leaf_U')
_R = TypeVar('_R')
_FUNC = Callable[..., _Return_T]
_DECORATED = Union[_FUNC, type]
class _DiscardType:
"""When the Discard value is returned from a transformer callback,
that node is discarded and won't appear in the parent.
Example:
::
class T(Transformer):
def ignore_tree(self, children):
return Discard
def IGNORE_TOKEN(self, token):
return Discard
"""
def __repr__(self):
return "lark.visitors.Discard"
Discard = _DiscardType()
# Transformers
class _Decoratable:
"Provides support for decorating methods with @v_args"
@classmethod
def _apply_v_args(cls, visit_wrapper):
mro = getmro(cls)
assert mro[0] is cls
libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)}
for name, value in getmembers(cls):
# Make sure the function isn't inherited (unless it's overwritten)
if name.startswith('_') or (name in libmembers and name not in cls.__dict__):
continue
if not callable(value):
continue
# Skip if v_args already applied (at the function level)
if isinstance(cls.__dict__[name], _VArgsWrapper):
continue
setattr(cls, name, _VArgsWrapper(cls.__dict__[name], visit_wrapper))
return cls
def __class_getitem__(cls, _):
return cls
class Transformer(_Decoratable, ABC, Generic[_Leaf_T, _Return_T]):
"""Transformers visit each node of the tree, and run the appropriate method on it according to the node's data.
Methods are provided by the user via inheritance, and called according to ``tree.data``.
The returned value from each method replaces the node in the tree structure.
Transformers work bottom-up (or depth-first), starting with the leaves and ending at the root of the tree.
Transformers can be used to implement map & reduce patterns. Because nodes are reduced from leaf to root,
at any point the callbacks may assume the children have already been transformed (if applicable).
``Transformer`` can do anything ``Visitor`` can do, but because it reconstructs the tree,
it is slightly less efficient.
To discard a node, return Discard (``lark.visitors.Discard``).
All these classes implement the transformer interface:
- ``Transformer`` - Recursively transforms the tree. This is the one you probably want.
- ``Transformer_InPlace`` - Non-recursive. Changes the tree in-place instead of returning new instances
- ``Transformer_InPlaceRecursive`` - Recursive. Changes the tree in-place instead of returning new instances
Parameters:
visit_tokens (bool, optional): Should the transformer visit tokens in addition to rules.
Setting this to ``False`` is slightly faster. Defaults to ``True``.
(For processing ignored tokens, use the ``lexer_callbacks`` options)
NOTE: A transformer without methods essentially performs a non-memoized partial deepcopy.
"""
__visit_tokens__ = True # For backwards compatibility
def __init__(self, visit_tokens: bool=True) -> None:
self.__visit_tokens__ = visit_tokens
def _call_userfunc(self, tree, new_children=None):
# Assumes tree is already transformed
children = new_children if new_children is not None else tree.children
try:
f = getattr(self, tree.data)
except AttributeError:
return self.__default__(tree.data, children, tree.meta)
else:
try:
wrapper = getattr(f, 'visit_wrapper', None)
if wrapper is not None:
return f.visit_wrapper(f, tree.data, children, tree.meta)
else:
return f(children)
except GrammarError:
raise
except Exception as e:
raise VisitError(tree.data, tree, e)
def _call_userfunc_token(self, token):
try:
f = getattr(self, token.type)
except AttributeError:
return self.__default_token__(token)
else:
try:
return f(token)
except GrammarError:
raise
except Exception as e:
raise VisitError(token.type, token, e)
def _transform_children(self, children):
for c in children:
if isinstance(c, Tree):
res = self._transform_tree(c)
elif self.__visit_tokens__ and isinstance(c, Token):
res = self._call_userfunc_token(c)
else:
res = c
if res is not Discard:
yield res
def _transform_tree(self, tree):
children = list(self._transform_children(tree.children))
return self._call_userfunc(tree, children)
def transform(self, tree: Tree[_Leaf_T]) -> _Return_T:
"Transform the given tree, and return the final result"
return self._transform_tree(tree)
def __mul__(
self: 'Transformer[_Leaf_T, Tree[_Leaf_U]]',
other: 'Union[Transformer[_Leaf_U, _Return_V], TransformerChain[_Leaf_U, _Return_V,]]'
) -> 'TransformerChain[_Leaf_T, _Return_V]':
"""Chain two transformers together, returning a new transformer.
"""
return TransformerChain(self, other)
def __default__(self, data, children, meta):
"""Default function that is called if there is no attribute matching ``data``
Can be overridden. Defaults to creating a new copy of the tree node (i.e. ``return Tree(data, children, meta)``)
"""
return Tree(data, children, meta)
def __default_token__(self, token):
"""Default function that is called if there is no attribute matching ``token.type``
Can be overridden. Defaults to returning the token as-is.
"""
return token
def merge_transformers(base_transformer=None, **transformers_to_merge):
"""Merge a collection of transformers into the base_transformer, each into its own 'namespace'.
When called, it will collect the methods from each transformer, and assign them to base_transformer,
with their name prefixed with the given keyword, as ``prefix__methodname``.
This function is especially useful for processing grammars that import other grammars,
thereby creating some of their rules in a 'namespace'. (i.e with a consistent name prefix).
In this case, the key for the transformer should match the name of the imported grammar.
Parameters:
base_transformer (Transformer, optional): The transformer that all other transformers will be added to.
**transformers_to_merge: Keyword arguments, in the form of ``name_prefix = transformer``.
Raises:
AttributeError: In case of a name collision in the merged methods
Example:
::
class TBase(Transformer):
def start(self, children):
return children[0] + 'bar'
class TImportedGrammar(Transformer):
def foo(self, children):
return "foo"
composed_transformer = merge_transformers(TBase(), imported=TImportedGrammar())
t = Tree('start', [ Tree('imported__foo', []) ])
assert composed_transformer.transform(t) == 'foobar'
"""
if base_transformer is None:
base_transformer = Transformer()
for prefix, transformer in transformers_to_merge.items():
for method_name in dir(transformer):
method = getattr(transformer, method_name)
if not callable(method):
continue
if method_name.startswith("_") or method_name == "transform":
continue
prefixed_method = prefix + "__" + method_name
if hasattr(base_transformer, prefixed_method):
raise AttributeError("Cannot merge: method '%s' appears more than once" % prefixed_method)
setattr(base_transformer, prefixed_method, method)
return base_transformer
class InlineTransformer(Transformer): # XXX Deprecated
def _call_userfunc(self, tree, new_children=None):
# Assumes tree is already transformed
children = new_children if new_children is not None else tree.children
try:
f = getattr(self, tree.data)
except AttributeError:
return self.__default__(tree.data, children, tree.meta)
else:
return f(*children)
class TransformerChain(Generic[_Leaf_T, _Return_T]):
transformers: 'Tuple[Union[Transformer, TransformerChain], ...]'
def __init__(self, *transformers: 'Union[Transformer, TransformerChain]') -> None:
self.transformers = transformers
def transform(self, tree: Tree[_Leaf_T]) -> _Return_T:
for t in self.transformers:
tree = t.transform(tree)
return cast(_Return_T, tree)
def __mul__(
self: 'TransformerChain[_Leaf_T, Tree[_Leaf_U]]',
other: 'Union[Transformer[_Leaf_U, _Return_V], TransformerChain[_Leaf_U, _Return_V]]'
) -> 'TransformerChain[_Leaf_T, _Return_V]':
return TransformerChain(*self.transformers + (other,))
class Transformer_InPlace(Transformer):
"""Same as Transformer, but non-recursive, and changes the tree in-place instead of returning new instances
Useful for huge trees. Conservative in memory.
"""
def _transform_tree(self, tree): # Cancel recursion
return self._call_userfunc(tree)
def transform(self, tree: Tree[_Leaf_T]) -> _Return_T:
for subtree in tree.iter_subtrees():
subtree.children = list(self._transform_children(subtree.children))
return self._transform_tree(tree)
class Transformer_NonRecursive(Transformer):
"""Same as Transformer but non-recursive.
Like Transformer, it doesn't change the original tree.
Useful for huge trees.
"""
def transform(self, tree: Tree[_Leaf_T]) -> _Return_T:
# Tree to postfix
rev_postfix = []
q: List[Branch[_Leaf_T]] = [tree]
while q:
t = q.pop()
rev_postfix.append(t)
if isinstance(t, Tree):
q += t.children
# Postfix to tree
stack: List = []
for x in reversed(rev_postfix):
if isinstance(x, Tree):
size = len(x.children)
if size:
args = stack[-size:]
del stack[-size:]
else:
args = []
res = self._call_userfunc(x, args)
if res is not Discard:
stack.append(res)
elif self.__visit_tokens__ and isinstance(x, Token):
res = self._call_userfunc_token(x)
if res is not Discard:
stack.append(res)
else:
stack.append(x)
result, = stack # We should have only one tree remaining
# There are no guarantees on the type of the value produced by calling a user func for a
# child will produce. This means type system can't statically know that the final result is
# _Return_T. As a result a cast is required.
return cast(_Return_T, result)
class Transformer_InPlaceRecursive(Transformer):
"Same as Transformer, recursive, but changes the tree in-place instead of returning new instances"
def _transform_tree(self, tree):
tree.children = list(self._transform_children(tree.children))
return self._call_userfunc(tree)
# Visitors
class VisitorBase:
def _call_userfunc(self, tree):
return getattr(self, tree.data, self.__default__)(tree)
def __default__(self, tree):
"""Default function that is called if there is no attribute matching ``tree.data``
Can be overridden. Defaults to doing nothing.
"""
return tree
def __class_getitem__(cls, _):
return cls
class Visitor(VisitorBase, ABC, Generic[_Leaf_T]):
"""Tree visitor, non-recursive (can handle huge trees).
Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data``
"""
def visit(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]:
"Visits the tree, starting with the leaves and finally the root (bottom-up)"
for subtree in tree.iter_subtrees():
self._call_userfunc(subtree)
return tree
def visit_topdown(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]:
"Visit the tree, starting at the root, and ending at the leaves (top-down)"
for subtree in tree.iter_subtrees_topdown():
self._call_userfunc(subtree)
return tree
class Visitor_Recursive(VisitorBase, Generic[_Leaf_T]):
"""Bottom-up visitor, recursive.
Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data``
Slightly faster than the non-recursive version.
"""
def visit(self, tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]:
"Visits the tree, starting with the leaves and finally the root (bottom-up)"
for child in tree.children:
if isinstance(child, Tree):
self.visit(child)
self._call_userfunc(tree)
return tree
def visit_topdown(self,tree: Tree[_Leaf_T]) -> Tree[_Leaf_T]:
"Visit the tree, starting at the root, and ending at the leaves (top-down)"
self._call_userfunc(tree)
for child in tree.children:
if isinstance(child, Tree):
self.visit_topdown(child)
return tree
class Interpreter(_Decoratable, ABC, Generic[_Leaf_T, _Return_T]):
"""Interpreter walks the tree starting at the root.
Visits the tree, starting with the root and finally the leaves (top-down)
For each tree node, it calls its methods (provided by user via inheritance) according to ``tree.data``.
Unlike ``Transformer`` and ``Visitor``, the Interpreter doesn't automatically visit its sub-branches.
The user has to explicitly call ``visit``, ``visit_children``, or use the ``@visit_children_decor``.
This allows the user to implement branching and loops.
"""
def visit(self, tree: Tree[_Leaf_T]) -> _Return_T:
# There are no guarantees on the type of the value produced by calling a user func for a
# child will produce. So only annotate the public method and use an internal method when
# visiting child trees.
return self._visit_tree(tree)
def _visit_tree(self, tree: Tree[_Leaf_T]):
f = getattr(self, tree.data)
wrapper = getattr(f, 'visit_wrapper', None)
if wrapper is not None:
return f.visit_wrapper(f, tree.data, tree.children, tree.meta)
else:
return f(tree)
def visit_children(self, tree: Tree[_Leaf_T]) -> List:
return [self._visit_tree(child) if isinstance(child, Tree) else child
for child in tree.children]
def __getattr__(self, name):
return self.__default__
def __default__(self, tree):
return self.visit_children(tree)
_InterMethod = Callable[[Type[Interpreter], _Return_T], _R]
def visit_children_decor(func: _InterMethod) -> _InterMethod:
"See Interpreter"
@wraps(func)
def inner(cls, tree):
values = cls.visit_children(tree)
return func(cls, values)
return inner
# Decorators
def _apply_v_args(obj, visit_wrapper):
try:
_apply = obj._apply_v_args
except AttributeError:
return _VArgsWrapper(obj, visit_wrapper)
else:
return _apply(visit_wrapper)
class _VArgsWrapper:
"""
A wrapper around a Callable. It delegates `__call__` to the Callable.
If the Callable has a `__get__`, that is also delegate and the resulting function is wrapped.
Otherwise, we use the original function mirroring the behaviour without a __get__.
We also have the visit_wrapper attribute to be used by Transformers.
"""
base_func: Callable
def __init__(self, func: Callable, visit_wrapper: Callable[[Callable, str, list, Any], Any]):
if isinstance(func, _VArgsWrapper):
func = func.base_func
# https://github.com/python/mypy/issues/708
self.base_func = func # type: ignore[assignment]
self.visit_wrapper = visit_wrapper
update_wrapper(self, func)
def __call__(self, *args, **kwargs):
return self.base_func(*args, **kwargs)
def __get__(self, instance, owner=None):
try:
g = self.base_func.__get__
except AttributeError:
return self
else:
return _VArgsWrapper(g(instance, owner), self.visit_wrapper)
def __set_name__(self, owner, name):
try:
f = self.base_func.__set_name__
except AttributeError:
return
else:
f(owner, name)
def _vargs_inline(f, _data, children, _meta):
return f(*children)
def _vargs_meta_inline(f, _data, children, meta):
return f(meta, *children)
def _vargs_meta(f, _data, children, meta):
return f(meta, children)
def _vargs_tree(f, data, children, meta):
return f(Tree(data, children, meta))
def v_args(inline: bool = False, meta: bool = False, tree: bool = False, wrapper: Optional[Callable] = None) -> Callable[[_DECORATED], _DECORATED]:
"""A convenience decorator factory for modifying the behavior of user-supplied visitor methods.
By default, callback methods of transformers/visitors accept one argument - a list of the node's children.
``v_args`` can modify this behavior. When used on a transformer/visitor class definition,
it applies to all the callback methods inside it.
``v_args`` can be applied to a single method, or to an entire class. When applied to both,
the options given to the method take precedence.
Parameters:
inline (bool, optional): Children are provided as ``*args`` instead of a list argument (not recommended for very long lists).
meta (bool, optional): Provides two arguments: ``children`` and ``meta`` (instead of just the first)
tree (bool, optional): Provides the entire tree as the argument, instead of the children.
wrapper (function, optional): Provide a function to decorate all methods.
Example:
::
@v_args(inline=True)
class SolveArith(Transformer):
def add(self, left, right):
return left + right
class ReverseNotation(Transformer_InPlace):
@v_args(tree=True)
def tree_node(self, tree):
tree.children = tree.children[::-1]
"""
if tree and (meta or inline):
raise ValueError("Visitor functions cannot combine 'tree' with 'meta' or 'inline'.")
func = None
if meta:
if inline:
func = _vargs_meta_inline
else:
func = _vargs_meta
elif inline:
func = _vargs_inline
elif tree:
func = _vargs_tree
if wrapper is not None:
if func is not None:
raise ValueError("Cannot use 'wrapper' along with 'tree', 'meta' or 'inline'.")
func = wrapper
def _visitor_args_dec(obj):
return _apply_v_args(obj, func)
return _visitor_args_dec
###}
# --- Visitor Utilities ---
class CollapseAmbiguities(Transformer):
"""
Transforms a tree that contains any number of _ambig nodes into a list of trees,
each one containing an unambiguous tree.
The length of the resulting list is the product of the length of all _ambig nodes.
Warning: This may quickly explode for highly ambiguous trees.
"""
def _ambig(self, options):
return sum(options, [])
def __default__(self, data, children_lists, meta):
return [Tree(data, children, meta) for children in combine_alternatives(children_lists)]
def __default_token__(self, t):
return [t]
|
lark-parser/lark
|
lark/visitors.py
|
Python
|
mit
| 20,653
|
[
"VisIt"
] |
1de477ec66fbd20494f52cc5d767bc91a7a5e84a65de730c8bd83bf2fd3bc794
|
"""
Perform Levenberg-Marquardt least-squares minimization, based on MINPACK-1.
AUTHORS
The original version of this software, called LMFIT, was written in FORTRAN
as part of the MINPACK-1 package by XXX.
Craig Markwardt converted the FORTRAN code to IDL. The information for the
IDL version is:
Craig B. Markwardt, NASA/GSFC Code 662, Greenbelt, MD 20770
craigm@lheamail.gsfc.nasa.gov
UPDATED VERSIONs can be found on my WEB PAGE:
http://cow.physics.wisc.edu/~craigm/idl/idl.html
Mark Rivers created this Python version from Craig's IDL version.
Mark Rivers, University of Chicago
Building 434A, Argonne National Laboratory
9700 South Cass Avenue, Argonne, IL 60439
rivers@cars.uchicago.edu
Updated versions can be found at http://cars.uchicago.edu/software
Sergey Koposov converted the Mark's Python version from Numeric to numpy
Sergey Koposov, University of Cambridge, Institute of Astronomy,
Madingley road, CB3 0HA, Cambridge, UK
koposov@ast.cam.ac.uk
Updated versions can be found at http://code.google.com/p/astrolibpy/source/browse/trunk/
DESCRIPTION
MPFIT uses the Levenberg-Marquardt technique to solve the
least-squares problem. In its typical use, MPFIT will be used to
fit a user-supplied function (the "model") to user-supplied data
points (the "data") by adjusting a set of parameters. MPFIT is
based upon MINPACK-1 (LMDIF.F) by More' and collaborators.
For example, a researcher may think that a set of observed data
points is best modelled with a Gaussian curve. A Gaussian curve is
parameterized by its mean, standard deviation and normalization.
MPFIT will, within certain constraints, find the set of parameters
which best fits the data. The fit is "best" in the least-squares
sense; that is, the sum of the weighted squared differences between
the model and data is minimized.
The Levenberg-Marquardt technique is a particular strategy for
iteratively searching for the best fit. This particular
implementation is drawn from MINPACK-1 (see NETLIB), and is much faster
and more accurate than the version provided in the Scientific Python package
in Scientific.Functions.LeastSquares.
This version allows upper and lower bounding constraints to be placed on each
parameter, or the parameter can be held fixed.
The user-supplied Python function should return an array of weighted
deviations between model and data. In a typical scientific problem
the residuals should be weighted so that each deviate has a
gaussian sigma of 1.0. If X represents values of the independent
variable, Y represents a measurement for each value of X, and ERR
represents the error in the measurements, then the deviates could
be calculated as follows:
DEVIATES = (Y - F(X)) / ERR
where F is the analytical function representing the model. You are
recommended to use the convenience functions MPFITFUN and
MPFITEXPR, which are driver functions that calculate the deviates
for you. If ERR are the 1-sigma uncertainties in Y, then
TOTAL( DEVIATES^2 )
will be the total chi-squared value. MPFIT will minimize the
chi-square value. The values of X, Y and ERR are passed through
MPFIT to the user-supplied function via the FUNCTKW keyword.
Simple constraints can be placed on parameter values by using the
PARINFO keyword to MPFIT. See below for a description of this
keyword.
MPFIT does not perform more general optimization tasks. See TNMIN
instead. MPFIT is customized, based on MINPACK-1, to the
least-squares minimization problem.
USER FUNCTION
The user must define a function which returns the appropriate
values as specified above. The function should return the weighted
deviations between the model and the data. It should also return a status
flag and an optional partial derivative array. For applications which
use finite-difference derivatives -- the default -- the user
function should be declared in the following way:
def myfunct(p, fjac=None, x=None, y=None, err=None)
# Parameter values are passed in "p"
# If fjac==None then partial derivatives should not be
# computed. It will always be None if MPFIT is called with default
# flag.
model = F(x, p)
# Non-negative status value means MPFIT should continue, negative means
# stop the calculation.
status = 0
return([status, (y-model)/err]
See below for applications with analytical derivatives.
The keyword parameters X, Y, and ERR in the example above are
suggestive but not required. Any parameters can be passed to
MYFUNCT by using the functkw keyword to MPFIT. Use MPFITFUN and
MPFITEXPR if you need ideas on how to do that. The function *must*
accept a parameter list, P.
In general there are no restrictions on the number of dimensions in
X, Y or ERR. However the deviates *must* be returned in a
one-dimensional Numeric array of type Float.
User functions may also indicate a fatal error condition using the
status return described above. If status is set to a number between
-15 and -1 then MPFIT will stop the calculation and return to the caller.
ANALYTIC DERIVATIVES
In the search for the best-fit solution, MPFIT by default
calculates derivatives numerically via a finite difference
approximation. The user-supplied function need not calculate the
derivatives explicitly. However, if you desire to compute them
analytically, then the AUTODERIVATIVE=0 keyword must be passed to MPFIT.
As a practical matter, it is often sufficient and even faster to allow
MPFIT to calculate the derivatives numerically, and so
AUTODERIVATIVE=0 is not necessary.
If AUTODERIVATIVE=0 is used then the user function must check the parameter
FJAC, and if FJAC!=None then return the partial derivative array in the
return list.
def myfunct(p, fjac=None, x=None, y=None, err=None)
# Parameter values are passed in "p"
# If FJAC!=None then partial derivatives must be comptuer.
# FJAC contains an array of len(p), where each entry
# is 1 if that parameter is free and 0 if it is fixed.
model = F(x, p)
Non-negative status value means MPFIT should continue, negative means
# stop the calculation.
status = 0
if (dojac):
pderiv = zeros([len(x), len(p)], Float)
for j in range(len(p)):
pderiv[:,j] = FGRAD(x, p, j)
else:
pderiv = None
return([status, (y-model)/err, pderiv]
where FGRAD(x, p, i) is a user function which must compute the
derivative of the model with respect to parameter P[i] at X. When
finite differencing is used for computing derivatives (ie, when
AUTODERIVATIVE=1), or when MPFIT needs only the errors but not the
derivatives the parameter FJAC=None.
Derivatives should be returned in the PDERIV array. PDERIV should be an m x
n array, where m is the number of data points and n is the number
of parameters. dp[i,j] is the derivative at the ith point with
respect to the jth parameter.
The derivatives with respect to fixed parameters are ignored; zero
is an appropriate value to insert for those derivatives. Upon
input to the user function, FJAC is set to a vector with the same
length as P, with a value of 1 for a parameter which is free, and a
value of zero for a parameter which is fixed (and hence no
derivative needs to be calculated).
If the data is higher than one dimensional, then the *last*
dimension should be the parameter dimension. Example: fitting a
50x50 image, "dp" should be 50x50xNPAR.
CONSTRAINING PARAMETER VALUES WITH THE PARINFO KEYWORD
The behavior of MPFIT can be modified with respect to each
parameter to be fitted. A parameter value can be fixed; simple
boundary constraints can be imposed; limitations on the parameter
changes can be imposed; properties of the automatic derivative can
be modified; and parameters can be tied to one another.
These properties are governed by the PARINFO structure, which is
passed as a keyword parameter to MPFIT.
PARINFO should be a list of dictionaries, one list entry for each parameter.
Each parameter is associated with one element of the array, in
numerical order. The dictionary can have the following keys
(none are required, keys are case insensitive):
'value' - the starting parameter value (but see the START_PARAMS
parameter for more information).
'fixed' - a boolean value, whether the parameter is to be held
fixed or not. Fixed parameters are not varied by
MPFIT, but are passed on to MYFUNCT for evaluation.
'limited' - a two-element boolean array. If the first/second
element is set, then the parameter is bounded on the
lower/upper side. A parameter can be bounded on both
sides. Both LIMITED and LIMITS must be given
together.
'limits' - a two-element float array. Gives the
parameter limits on the lower and upper sides,
respectively. Zero, one or two of these values can be
set, depending on the values of LIMITED. Both LIMITED
and LIMITS must be given together.
'parname' - a string, giving the name of the parameter. The
fitting code of MPFIT does not use this tag in any
way. However, the default iterfunct will print the
parameter name if available.
'step' - the step size to be used in calculating the numerical
derivatives. If set to zero, then the step size is
computed automatically. Ignored when AUTODERIVATIVE=0.
'mpside' - the sidedness of the finite difference when computing
numerical derivatives. This field can take four
values:
0 - one-sided derivative computed automatically
1 - one-sided derivative (f(x+h) - f(x) )/h
-1 - one-sided derivative (f(x) - f(x-h))/h
2 - two-sided derivative (f(x+h) - f(x-h))/(2*h)
Where H is the STEP parameter described above. The
"automatic" one-sided derivative method will chose a
direction for the finite difference which does not
violate any constraints. The other methods do not
perform this check. The two-sided method is in
principle more precise, but requires twice as many
function evaluations. Default: 0.
'mpmaxstep' - the maximum change to be made in the parameter
value. During the fitting process, the parameter
will never be changed by more than this value in
one iteration.
A value of 0 indicates no maximum. Default: 0.
'tied' - a string expression which "ties" the parameter to other
free or fixed parameters. Any expression involving
constants and the parameter array P are permitted.
Example: if parameter 2 is always to be twice parameter
1 then use the following: parinfo(2).tied = '2 * p(1)'.
Since they are totally constrained, tied parameters are
considered to be fixed; no errors are computed for them.
[ NOTE: the PARNAME can't be used in expressions. ]
'mpprint' - if set to 1, then the default iterfunct will print the
parameter value. If set to 0, the parameter value
will not be printed. This tag can be used to
selectively print only a few parameter values out of
many. Default: 1 (all parameters printed)
Future modifications to the PARINFO structure, if any, will involve
adding dictionary tags beginning with the two letters "MP".
Therefore programmers are urged to avoid using tags starting with
the same letters; otherwise they are free to include their own
fields within the PARINFO structure, and they will be ignored.
PARINFO Example:
parinfo = [{'value':0., 'fixed':0, 'limited':[0,0], 'limits':[0.,0.]}
for i in range(5)]
parinfo[0]['fixed'] = 1
parinfo[4]['limited'][0] = 1
parinfo[4]['limits'][0] = 50.
values = [5.7, 2.2, 500., 1.5, 2000.]
for i in range(5): parinfo[i]['value']=values[i]
A total of 5 parameters, with starting values of 5.7,
2.2, 500, 1.5, and 2000 are given. The first parameter
is fixed at a value of 5.7, and the last parameter is
constrained to be above 50.
EXAMPLE
import mpfit
import numpy.oldnumeric as Numeric
x = arange(100, float)
p0 = [5.7, 2.2, 500., 1.5, 2000.]
y = ( p[0] + p[1]*[x] + p[2]*[x**2] + p[3]*sqrt(x) +
p[4]*log(x))
fa = {'x':x, 'y':y, 'err':err}
m = mpfit('myfunct', p0, functkw=fa)
print 'status = ', m.status
if (m.status <= 0): print 'error message = ', m.errmsg
print 'parameters = ', m.params
Minimizes sum of squares of MYFUNCT. MYFUNCT is called with the X,
Y, and ERR keyword parameters that are given by FUNCTKW. The
results can be obtained from the returned object m.
THEORY OF OPERATION
There are many specific strategies for function minimization. One
very popular technique is to use function gradient information to
realize the local structure of the function. Near a local minimum
the function value can be taylor expanded about x0 as follows:
f(x) = f(x0) + f'(x0) . (x-x0) + (1/2) (x-x0) . f''(x0) . (x-x0)
----- --------------- ------------------------------- (1)
Order 0th 1st 2nd
Here f'(x) is the gradient vector of f at x, and f''(x) is the
Hessian matrix of second derivatives of f at x. The vector x is
the set of function parameters, not the measured data vector. One
can find the minimum of f, f(xm) using Newton's method, and
arrives at the following linear equation:
f''(x0) . (xm-x0) = - f'(x0) (2)
If an inverse can be found for f''(x0) then one can solve for
(xm-x0), the step vector from the current position x0 to the new
projected minimum. Here the problem has been linearized (ie, the
gradient information is known to first order). f''(x0) is
symmetric n x n matrix, and should be positive definite.
The Levenberg - Marquardt technique is a variation on this theme.
It adds an additional diagonal term to the equation which may aid the
convergence properties:
(f''(x0) + nu I) . (xm-x0) = -f'(x0) (2a)
where I is the identity matrix. When nu is large, the overall
matrix is diagonally dominant, and the iterations follow steepest
descent. When nu is small, the iterations are quadratically
convergent.
In principle, if f''(x0) and f'(x0) are known then xm-x0 can be
determined. However the Hessian matrix is often difficult or
impossible to compute. The gradient f'(x0) may be easier to
compute, if even by finite difference techniques. So-called
quasi-Newton techniques attempt to successively estimate f''(x0)
by building up gradient information as the iterations proceed.
In the least squares problem there are further simplifications
which assist in solving eqn (2). The function to be minimized is
a sum of squares:
f = Sum(hi^2) (3)
where hi is the ith residual out of m residuals as described
above. This can be substituted back into eqn (2) after computing
the derivatives:
f' = 2 Sum(hi hi')
f'' = 2 Sum(hi' hj') + 2 Sum(hi hi'') (4)
If one assumes that the parameters are already close enough to a
minimum, then one typically finds that the second term in f'' is
negligible [or, in any case, is too difficult to compute]. Thus,
equation (2) can be solved, at least approximately, using only
gradient information.
In matrix notation, the combination of eqns (2) and (4) becomes:
hT' . h' . dx = - hT' . h (5)
Where h is the residual vector (length m), hT is its transpose, h'
is the Jacobian matrix (dimensions n x m), and dx is (xm-x0). The
user function supplies the residual vector h, and in some cases h'
when it is not found by finite differences (see MPFIT_FDJAC2,
which finds h and hT'). Even if dx is not the best absolute step
to take, it does provide a good estimate of the best *direction*,
so often a line minimization will occur along the dx vector
direction.
The method of solution employed by MINPACK is to form the Q . R
factorization of h', where Q is an orthogonal matrix such that QT .
Q = I, and R is upper right triangular. Using h' = Q . R and the
ortogonality of Q, eqn (5) becomes
(RT . QT) . (Q . R) . dx = - (RT . QT) . h
RT . R . dx = - RT . QT . h (6)
R . dx = - QT . h
where the last statement follows because R is upper triangular.
Here, R, QT and h are known so this is a matter of solving for dx.
The routine MPFIT_QRFAC provides the QR factorization of h, with
pivoting, and MPFIT_QRSOLV provides the solution for dx.
REFERENCES
MINPACK-1, Jorge More', available from netlib (www.netlib.org).
"Optimization Software Guide," Jorge More' and Stephen Wright,
SIAM, *Frontiers in Applied Mathematics*, Number 14.
More', Jorge J., "The Levenberg-Marquardt Algorithm:
Implementation and Theory," in *Numerical Analysis*, ed. Watson,
G. A., Lecture Notes in Mathematics 630, Springer-Verlag, 1977.
MODIFICATION HISTORY
- Translated from MINPACK-1 in FORTRAN, Apr-Jul 1998, CM
Copyright (C) 1997-2002, Craig Markwardt
This software is provided as is without any warranty whatsoever.
Permission to use, copy, modify, and distribute modified or
unmodified copies is granted, provided this copyright and disclaimer
are included unchanged.
- Translated from MPFIT (Craig Markwardt's IDL package) to Python,
August, 2002. Mark Rivers
- Converted from Numeric to numpy (Sergey Koposov, July 2008)
- Included a key modification for mge_fit_sectors.
Michele Cappellari, Oxford, 8 February 2014
- Support both Python 2.6/2.7 and Python 3. MC, Oxford, 25 May 2014
- Removed Scipy dependency. MC, Oxford, 13 August 2014
- Replaced numpy.rank function with ndim attribute to avoid
deprecation warning in Numpy 1.9. MC, Utah, 9 September 2014
"""
from __future__ import print_function
import numpy
# Original FORTRAN documentation
# **********
#
# subroutine lmdif
#
# the purpose of lmdif is to minimize the sum of the squares of
# m nonlinear functions in n variables by a modification of
# the levenberg-marquardt algorithm. the user must provide a
# subroutine which calculates the functions. the jacobian is
# then calculated by a forward-difference approximation.
#
# the subroutine statement is
#
# subroutine lmdif(fcn,m,n,x,fvec,ftol,xtol,gtol,maxfev,epsfcn,
# diag,mode,factor,nprint,info,nfev,fjac,
# ldfjac,ipvt,qtf,wa1,wa2,wa3,wa4)
#
# where
#
# fcn is the name of the user-supplied subroutine which
# calculates the functions. fcn must be declared
# in an external statement in the user calling
# program, and should be written as follows.
#
# subroutine fcn(m,n,x,fvec,iflag)
# integer m,n,iflag
# double precision x(n),fvec(m)
# ----------
# calculate the functions at x and
# return this vector in fvec.
# ----------
# return
# end
#
# the value of iflag should not be changed by fcn unless
# the user wants to terminate execution of lmdif.
# in this case set iflag to a negative integer.
#
# m is a positive integer input variable set to the number
# of functions.
#
# n is a positive integer input variable set to the number
# of variables. n must not exceed m.
#
# x is an array of length n. on input x must contain
# an initial estimate of the solution vector. on output x
# contains the final estimate of the solution vector.
#
# fvec is an output array of length m which contains
# the functions evaluated at the output x.
#
# ftol is a nonnegative input variable. termination
# occurs when both the actual and predicted relative
# reductions in the sum of squares are at most ftol.
# therefore, ftol measures the relative error desired
# in the sum of squares.
#
# xtol is a nonnegative input variable. termination
# occurs when the relative error between two consecutive
# iterates is at most xtol. therefore, xtol measures the
# relative error desired in the approximate solution.
#
# gtol is a nonnegative input variable. termination
# occurs when the cosine of the angle between fvec and
# any column of the jacobian is at most gtol in absolute
# value. therefore, gtol measures the orthogonality
# desired between the function vector and the columns
# of the jacobian.
#
# maxfev is a positive integer input variable. termination
# occurs when the number of calls to fcn is at least
# maxfev by the end of an iteration.
#
# epsfcn is an input variable used in determining a suitable
# step length for the forward-difference approximation. this
# approximation assumes that the relative errors in the
# functions are of the order of epsfcn. if epsfcn is less
# than the machine precision, it is assumed that the relative
# errors in the functions are of the order of the machine
# precision.
#
# diag is an array of length n. if mode = 1 (see
# below), diag is internally set. if mode = 2, diag
# must contain positive entries that serve as
# multiplicative scale factors for the variables.
#
# mode is an integer input variable. if mode = 1, the
# variables will be scaled internally. if mode = 2,
# the scaling is specified by the input diag. other
# values of mode are equivalent to mode = 1.
#
# factor is a positive input variable used in determining the
# initial step bound. this bound is set to the product of
# factor and the euclidean norm of diag*x if nonzero, or else
# to factor itself. in most cases factor should lie in the
# interval (.1,100.). 100. is a generally recommended value.
#
# nprint is an integer input variable that enables controlled
# printing of iterates if it is positive. in this case,
# fcn is called with iflag = 0 at the beginning of the first
# iteration and every nprint iterations thereafter and
# immediately prior to return, with x and fvec available
# for printing. if nprint is not positive, no special calls
# of fcn with iflag = 0 are made.
#
# info is an integer output variable. if the user has
# terminated execution, info is set to the (negative)
# value of iflag. see description of fcn. otherwise,
# info is set as follows.
#
# info = 0 improper input parameters.
#
# info = 1 both actual and predicted relative reductions
# in the sum of squares are at most ftol.
#
# info = 2 relative error between two consecutive iterates
# is at most xtol.
#
# info = 3 conditions for info = 1 and info = 2 both hold.
#
# info = 4 the cosine of the angle between fvec and any
# column of the jacobian is at most gtol in
# absolute value.
#
# info = 5 number of calls to fcn has reached or
# exceeded maxfev.
#
# info = 6 ftol is too small. no further reduction in
# the sum of squares is possible.
#
# info = 7 xtol is too small. no further improvement in
# the approximate solution x is possible.
#
# info = 8 gtol is too small. fvec is orthogonal to the
# columns of the jacobian to machine precision.
#
# nfev is an integer output variable set to the number of
# calls to fcn.
#
# fjac is an output m by n array. the upper n by n submatrix
# of fjac contains an upper triangular matrix r with
# diagonal elements of nonincreasing magnitude such that
#
# t t t
# p *(jac *jac)*p = r *r,
#
# where p is a permutation matrix and jac is the final
# calculated jacobian. column j of p is column ipvt(j)
# (see below) of the identity matrix. the lower trapezoidal
# part of fjac contains information generated during
# the computation of r.
#
# ldfjac is a positive integer input variable not less than m
# which specifies the leading dimension of the array fjac.
#
# ipvt is an integer output array of length n. ipvt
# defines a permutation matrix p such that jac*p = q*r,
# where jac is the final calculated jacobian, q is
# orthogonal (not stored), and r is upper triangular
# with diagonal elements of nonincreasing magnitude.
# column j of p is column ipvt(j) of the identity matrix.
#
# qtf is an output array of length n which contains
# the first n elements of the vector (q transpose)*fvec.
#
# wa1, wa2, and wa3 are work arrays of length n.
#
# wa4 is a work array of length m.
#
# subprograms called
#
# user-supplied ...... fcn
#
# minpack-supplied ... dpmpar,enorm,fdjac2,,qrfac
#
# fortran-supplied ... dabs,dmax1,dmin1,dsqrt,mod
#
# argonne national laboratory. minpack project. march 1980.
# burton s. garbow, kenneth e. hillstrom, jorge j. more
#
# **********
def norm(x): # Euclidean norm
return numpy.sqrt(numpy.sum(x**2))
class mpfit:
def __init__(self, fcn, xall=None, functkw={}, parinfo=None,
ftol=1.e-10, xtol=1.e-10, gtol=1.e-10,
damp=0., maxiter=200, factor=100., nprint=1,
iterfunct='default', iterkw={}, nocovar=0,
rescale=0, autoderivative=1, quiet=0,
diag=None, epsfcn=None, debug=0):
"""
Inputs:
fcn:
The function to be minimized. The function should return the weighted
deviations between the model and the data, as described above.
xall:
An array of starting values for each of the parameters of the model.
The number of parameters should be fewer than the number of measurements.
This parameter is optional if the parinfo keyword is used (but see
parinfo). The parinfo keyword provides a mechanism to fix or constrain
individual parameters.
Keywords:
autoderivative:
If this is set, derivatives of the function will be computed
automatically via a finite differencing procedure. If not set, then
fcn must provide the (analytical) derivatives.
Default: set (=1)
NOTE: to supply your own analytical derivatives,
explicitly pass autoderivative=0
ftol:
A nonnegative input variable. Termination occurs when both the actual
and predicted relative reductions in the sum of squares are at most
ftol (and status is accordingly set to 1 or 3). Therefore, ftol
measures the relative error desired in the sum of squares.
Default: 1E-10
functkw:
A dictionary which contains the parameters to be passed to the
user-supplied function specified by fcn via the standard Python
keyword dictionary mechanism. This is the way you can pass additional
data to your user-supplied function without using global variables.
Consider the following example:
if functkw = {'xval':[1.,2.,3.], 'yval':[1.,4.,9.],
'errval':[1.,1.,1.] }
then the user supplied function should be declared like this:
def myfunct(p, fjac=None, xval=None, yval=None, errval=None):
Default: {} No extra parameters are passed to the user-supplied
function.
gtol:
A nonnegative input variable. Termination occurs when the cosine of
the angle between fvec and any column of the jacobian is at most gtol
in absolute value (and status is accordingly set to 4). Therefore,
gtol measures the orthogonality desired between the function vector
and the columns of the jacobian.
Default: 1e-10
iterkw:
The keyword arguments to be passed to iterfunct via the dictionary
keyword mechanism. This should be a dictionary and is similar in
operation to FUNCTKW.
Default: {} No arguments are passed.
iterfunct:
The name of a function to be called upon each NPRINT iteration of the
MPFIT routine. It should be declared in the following way:
def iterfunct(myfunct, p, iter, fnorm, functkw=None,
parinfo=None, quiet=0, dof=None, [iterkw keywords here])
# perform custom iteration update
iterfunct must accept all three keyword parameters (FUNCTKW, PARINFO
and QUIET).
myfunct: The user-supplied function to be minimized,
p: The current set of model parameters
iter: The iteration number
functkw: The arguments to be passed to myfunct.
fnorm: The chi-squared value.
quiet: Set when no textual output should be printed.
dof: The number of degrees of freedom, normally the number of points
less the number of free parameters.
See below for documentation of parinfo.
In implementation, iterfunct can perform updates to the terminal or
graphical user interface, to provide feedback while the fit proceeds.
If the fit is to be stopped for any reason, then iterfunct should return a
a status value between -15 and -1. Otherwise it should return None
(e.g. no return statement) or 0.
In principle, iterfunct should probably not modify the parameter values,
because it may interfere with the algorithm's stability. In practice it
is allowed.
Default: an internal routine is used to print the parameter values.
Set iterfunct=None if there is no user-defined routine and you don't
want the internal default routine be called.
maxiter:
The maximum number of iterations to perform. If the number is exceeded,
then the status value is set to 5 and MPFIT returns.
Default: 200 iterations
nocovar:
Set this keyword to prevent the calculation of the covariance matrix
before returning (see COVAR)
Default: clear (=0) The covariance matrix is returned
nprint:
The frequency with which iterfunct is called. A value of 1 indicates
that iterfunct is called with every iteration, while 2 indicates every
other iteration, etc. Note that several Levenberg-Marquardt attempts
can be made in a single iteration.
Default value: 1
parinfo
Provides a mechanism for more sophisticated constraints to be placed on
parameter values. When parinfo is not passed, then it is assumed that
all parameters are free and unconstrained. Values in parinfo are never
modified during a call to MPFIT.
See description above for the structure of PARINFO.
Default value: None All parameters are free and unconstrained.
quiet:
Set this keyword when no textual output should be printed by MPFIT
damp:
A scalar number, indicating the cut-off value of residuals where
"damping" will occur. Residuals with magnitudes greater than this
number will be replaced by their hyperbolic tangent. This partially
mitigates the so-called large residual problem inherent in
least-squares solvers (as for the test problem CURVI,
http://www.maxthis.com/curviex.htm).
A value of 0 indicates no damping.
Default: 0
Note: DAMP doesn't work with autoderivative=0
xtol:
A nonnegative input variable. Termination occurs when the relative error
between two consecutive iterates is at most xtol (and status is
accordingly set to 2 or 3). Therefore, xtol measures the relative error
desired in the approximate solution.
Default: 1E-10
Outputs:
Returns an object of type mpfit. The results are attributes of this class,
e.g. mpfit.status, mpfit.errmsg, mpfit.params, npfit.niter, mpfit.covar.
.status
An integer status code is returned. All values greater than zero can
represent success (however .status == 5 may indicate failure to
converge). It can have one of the following values:
-16
A parameter or function value has become infinite or an undefined
number. This is usually a consequence of numerical overflow in the
user's model function, which must be avoided.
-15 to -1
These are error codes that either MYFUNCT or iterfunct may return to
terminate the fitting process. Values from -15 to -1 are reserved
for the user functions and will not clash with MPFIT.
0 Improper input parameters.
1 Both actual and predicted relative reductions in the sum of squares
are at most ftol.
2 Relative error between two consecutive iterates is at most xtol
3 Conditions for status = 1 and status = 2 both hold.
4 The cosine of the angle between fvec and any column of the jacobian
is at most gtol in absolute value.
5 The maximum number of iterations has been reached.
6 ftol is too small. No further reduction in the sum of squares is
possible.
7 xtol is too small. No further improvement in the approximate solution
x is possible.
8 gtol is too small. fvec is orthogonal to the columns of the jacobian
to machine precision.
.fnorm
The value of the summed squared residuals for the returned parameter
values.
.covar
The covariance matrix for the set of parameters returned by MPFIT.
The matrix is NxN where N is the number of parameters. The square root
of the diagonal elements gives the formal 1-sigma statistical errors on
the parameters if errors were treated "properly" in fcn.
Parameter errors are also returned in .perror.
To compute the correlation matrix, pcor, use this example:
cov = mpfit.covar
pcor = cov * 0.
for i in range(n):
for j in range(n):
pcor[i,j] = cov[i,j]/sqrt(cov[i,i]*cov[j,j])
If nocovar is set or MPFIT terminated abnormally, then .covar is set to
a scalar with value None.
.errmsg
A string error or warning message is returned.
.nfev
The number of calls to MYFUNCT performed.
.niter
The number of iterations completed.
.perror
The formal 1-sigma errors in each parameter, computed from the
covariance matrix. If a parameter is held fixed, or if it touches a
boundary, then the error is reported as zero.
If the fit is unweighted (i.e. no errors were given, or the weights
were uniformly set to unity), then .perror will probably not represent
the true parameter uncertainties.
*If* you can assume that the true reduced chi-squared value is unity --
meaning that the fit is implicitly assumed to be of good quality --
then the estimated parameter uncertainties can be computed by scaling
.perror by the measured chi-squared value.
dof = len(x) - len(mpfit.params) # deg of freedom
# scaled uncertainties
pcerror = mpfit.perror * sqrt(mpfit.fnorm / dof)
"""
self.niter = 0
self.params = None
self.covar = None
self.perror = None
self.status = 0 # Invalid input flag set while we check inputs
self.debug = debug
self.errmsg = ''
self.nfev = 0
self.damp = damp
self.dof = 0
if fcn==None:
self.errmsg = "Usage: parms = mpfit('myfunt', ... )"
return
if iterfunct == 'default':
iterfunct = self.defiter
# Parameter damping doesn't work when user is providing their own
# gradients.
if (self.damp != 0) and (autoderivative == 0):
self.errmsg = 'ERROR: keywords DAMP and AUTODERIVATIVE are mutually exclusive'
return
# Parameters can either be stored in parinfo, or x. x takes precedence if it exists
if (xall is None) and (parinfo is None):
self.errmsg = 'ERROR: must pass parameters in P or PARINFO'
return
# Be sure that PARINFO is of the right type
if parinfo is not None:
if type(parinfo) != list:
self.errmsg = 'ERROR: PARINFO must be a list of dictionaries.'
return
else:
if type(parinfo[0]) != dict:
self.errmsg = 'ERROR: PARINFO must be a list of dictionaries.'
return
if ((xall is not None) and (len(xall) != len(parinfo))):
self.errmsg = 'ERROR: number of elements in PARINFO and P must agree'
return
# If the parameters were not specified at the command line, then
# extract them from PARINFO
if xall is None:
xall = self.parinfo(parinfo, 'value')
if xall is None:
self.errmsg = 'ERROR: either P or PARINFO(*)["value"] must be supplied.'
return
# Make sure parameters are numpy arrays
xall = numpy.asarray(xall)
# In the case if the xall is not float or if is float but has less
# than 64 bits we do convert it into double
if xall.dtype.kind != 'f' or xall.dtype.itemsize <= 4:
xall = xall.astype(numpy.float)
npar = len(xall)
self.fnorm = -1.
fnorm1 = -1.
# TIED parameters?
ptied = self.parinfo(parinfo, 'tied', default='', n=npar)
self.qanytied = 0
for i in range(npar):
ptied[i] = ptied[i].strip()
if ptied[i] != '':
self.qanytied = 1
self.ptied = ptied
# FIXED parameters ?
pfixed = self.parinfo(parinfo, 'fixed', default=0, n=npar)
pfixed = (pfixed == 1)
for i in range(npar):
pfixed[i] = pfixed[i] or (ptied[i] != '') # Tied parameters are also effectively fixed
# Finite differencing step, absolute and relative, and sidedness of deriv.
step = self.parinfo(parinfo, 'step', default=0., n=npar)
dstep = self.parinfo(parinfo, 'relstep', default=0., n=npar)
dside = self.parinfo(parinfo, 'mpside', default=0, n=npar)
# Maximum and minimum steps allowed to be taken in one iteration
maxstep = self.parinfo(parinfo, 'mpmaxstep', default=0., n=npar)
minstep = self.parinfo(parinfo, 'mpminstep', default=0., n=npar)
qmin = minstep != 0
qmin[:] = False # Remove minstep for now!!
qmax = maxstep != 0
if numpy.any(qmin & qmax & (maxstep<minstep)):
self.errmsg = 'ERROR: MPMINSTEP is greater than MPMAXSTEP'
return
wh = numpy.nonzero((qmin!=0.) | (qmax!=0.))[0]
qminmax = len(wh > 0)
# Finish up the free parameters
ifree = numpy.nonzero(pfixed != 1)[0]
nfree = len(ifree)
if nfree == 0:
self.errmsg = 'ERROR: no free parameters'
return
# Compose only VARYING parameters
self.params = xall.copy() # self.params is the set of parameters to be returned
x = self.params[ifree] # x is the set of free parameters
# LIMITED parameters ?
limited = self.parinfo(parinfo, 'limited', default=[0,0], n=npar)
limits = self.parinfo(parinfo, 'limits', default=[0.,0.], n=npar)
if (limited is not None) and (limits is not None):
# Error checking on limits in parinfo
if numpy.any((limited[:,0] & (xall < limits[:,0])) |
(limited[:,1] & (xall > limits[:,1]))):
self.errmsg = 'ERROR: parameters are not within PARINFO limits'
return
if numpy.any((limited[:,0] & limited[:,1]) &
(limits[:,0] >= limits[:,1]) & (pfixed == 0)):
self.errmsg = 'ERROR: PARINFO parameter limits are not consistent'
return
# Transfer structure values to local variables
qulim = limited[ifree,1]
ulim = limits [ifree,1]
qllim = limited[ifree,0]
llim = limits [ifree,0]
if numpy.any((qulim!=0.) | (qllim!=0.)):
qanylim = 1
else:
qanylim = 0
else:
# Fill in local variables with dummy values
qulim = numpy.zeros(nfree)
ulim = x * 0.
qllim = qulim
llim = x * 0.
qanylim = 0
n = len(x)
# Check input parameters for errors
if (n < 0) or (ftol <= 0) or (xtol <= 0) or (gtol <= 0) \
or (maxiter < 0) or (factor <= 0):
self.errmsg = 'ERROR: input keywords are inconsistent'
return
if rescale != 0:
self.errmsg = 'ERROR: DIAG parameter scales are inconsistent'
if len(diag) < n:
return
if numpy.any(diag <= 0):
return
self.errmsg = ''
[self.status, fvec] = self.call(fcn, self.params, functkw)
if self.status < 0:
self.errmsg = 'ERROR: first call to "'+str(fcn)+'" failed'
return
# If the returned fvec has more than four bits I assume that we have
# double precision
# It is important that the machar is determined by the precision of
# the returned value, not by the precision of the input array
if numpy.array([fvec]).dtype.itemsize > 4:
self.machar = machar(double=1)
else:
self.machar = machar(double=0)
machep = self.machar.machep
m = len(fvec)
if m < n:
self.errmsg = 'ERROR: number of parameters must not exceed data'
return
self.dof = m-nfree
self.fnorm = norm(fvec)
# Initialize Levelberg-Marquardt parameter and iteration counter
par = 0.
self.niter = 1
qtf = x * 0.
self.status = 0
# Beginning of the outer loop
while(1):
# If requested, call fcn to enable printing of iterates
self.params[ifree] = x
if self.qanytied:
self.params = self.tie(self.params, ptied)
if (nprint > 0) and (iterfunct is not None):
if ((self.niter-1) % nprint) == 0:
mperr = 0
xnew0 = self.params.copy()
dof = max(len(fvec) - len(x), 0)
status = iterfunct(fcn, self.params, self.niter, self.fnorm**2,
functkw=functkw, parinfo=parinfo, quiet=quiet,
dof=dof, **iterkw)
if status is not None:
self.status = status
# Check for user termination
if self.status < 0:
self.errmsg = 'WARNING: premature termination by ' + str(iterfunct)
return
# If parameters were changed (grrr..) then re-tie
if numpy.max(numpy.abs(xnew0-self.params)) > 0:
if self.qanytied:
self.params = self.tie(self.params, ptied)
x = self.params[ifree]
# Calculate the jacobian matrix
self.status = 2
catch_msg = 'calling MPFIT_FDJAC2'
fjac = self.fdjac2(fcn, x, fvec, step, qulim, ulim, dside,
epsfcn=epsfcn,
autoderivative=autoderivative, dstep=dstep,
functkw=functkw, ifree=ifree, xall=self.params)
if fjac is None:
self.errmsg = 'WARNING: premature termination by FDJAC2'
return
# Determine if any of the parameters are pegged at the limits
if qanylim:
catch_msg = 'zeroing derivatives of pegged parameters'
whlpeg = numpy.nonzero(qllim & (x == llim))[0]
nlpeg = len(whlpeg)
whupeg = numpy.nonzero(qulim & (x == ulim))[0]
nupeg = len(whupeg)
# See if any "pegged" values should keep their derivatives
if nlpeg > 0:
# Total derivative of sum wrt lower pegged parameters
for i in range(nlpeg):
sum0 = numpy.sum(fvec * fjac[:,whlpeg[i]])
if sum0 > 0:
fjac[:,whlpeg[i]] = 0
if nupeg > 0:
# Total derivative of sum wrt upper pegged parameters
for i in range(nupeg):
sum0 = numpy.sum(fvec * fjac[:,whupeg[i]])
if sum0 < 0:
fjac[:,whupeg[i]] = 0
# Compute the QR factorization of the jacobian
[fjac, ipvt, wa1, wa2] = self.qrfac(fjac, pivot=1)
# On the first iteration if "diag" is unspecified, scale
# according to the norms of the columns of the initial jacobian
catch_msg = 'rescaling diagonal elements'
if self.niter == 1:
if (rescale==0) or (len(diag) < n):
diag = wa2.copy()
diag[diag == 0] = 1.
# On the first iteration, calculate the norm of the scaled x
# and initialize the step bound delta
wa3 = diag * x
xnorm = norm(wa3)
delta = factor*xnorm
if delta == 0.:
delta = factor
# Form (q transpose)*fvec and store the first n components in qtf
catch_msg = 'forming (q transpose)*fvec'
wa4 = fvec.copy()
for j in range(n):
lj = ipvt[j]
temp3 = fjac[j,lj]
if temp3 != 0:
fj = fjac[j:,lj]
wa4[j:] -= fj * numpy.sum(fj*wa4[j:]) / temp3
fjac[j,lj] = wa1[j]
qtf[j] = wa4[j]
# From this point on, only the square matrix, consisting of the
# triangle of R, is needed.
fjac = fjac[:n, :n]
fjac = fjac[:, ipvt]
# Check for overflow. This should be a cheap test here since FJAC
# has been reduced to a (small) square matrix, and the test is
# O(N^2).
#wh = where(finite(fjac) EQ 0, ct)
#if ct GT 0 then goto, FAIL_OVERFLOW
# Compute the norm of the scaled gradient
catch_msg = 'computing the scaled gradient'
gnorm = 0.
if self.fnorm != 0:
for j in range(n):
l = ipvt[j]
if wa2[l] != 0:
sum0 = numpy.sum(fjac[:j+1,j]*qtf[:j+1])/self.fnorm
gnorm = numpy.max([gnorm,numpy.abs(sum0/wa2[l])])
# Test for convergence of the gradient norm
if gnorm <= gtol:
self.status = 4
break
if maxiter == 0:
self.status = 5
break
# Rescale if necessary
if rescale == 0:
diag = numpy.choose(diag>wa2, (wa2, diag))
# Beginning of the inner loop
while(1):
# Determine the levenberg-marquardt parameter
catch_msg = 'calculating LM parameter (MPFIT_)'
[fjac, par, wa1, wa2] = self.lmpar(fjac, ipvt, diag, qtf,
delta, wa1, wa2, par=par)
# Store the direction p and x+p. Calculate the norm of p
wa1 = -wa1
if (qanylim == 0) and (qminmax == 0):
# No parameter limits, so just move to new position WA2
alpha = 1.
wa2 = x + wa1
else:
# Respect the limits. If a step were to go out of bounds, then
# we should take a step in the same direction but shorter distance.
# The step should take us right to the limit in that case.
alpha = 1.
if qanylim:
# Do not allow any steps out of bounds
catch_msg = 'checking for a step out of bounds'
if nlpeg > 0:
wa1[whlpeg] = wa1[whlpeg].clip(0., numpy.max(wa1))
if nupeg > 0:
wa1[whupeg] = wa1[whupeg].clip(numpy.min(wa1), 0.)
dwa1 = numpy.abs(wa1) > machep
whl = numpy.nonzero(((dwa1!=0.) & qllim) & ((x + wa1) < llim))[0]
if len(whl) > 0:
t = (llim[whl] - x[whl]) / wa1[whl]
alpha = numpy.min([alpha, numpy.min(t)])
whu = numpy.nonzero(((dwa1!=0.) & qulim) & ((x + wa1) > ulim))[0]
if len(whu) > 0:
t = (ulim[whu] - x[whu]) / wa1[whu]
alpha = numpy.min([alpha, numpy.min(t)])
# Obey any max step values.
if qminmax:
nwa1 = wa1 * alpha
whmax = numpy.nonzero((qmax != 0.) & (maxstep > 0))[0]
if len(whmax) > 0:
mrat = numpy.max(numpy.abs(nwa1[whmax]) /
numpy.abs(maxstep[ifree[whmax]]))
if mrat > 1:
alpha = alpha / mrat
# Scale the resulting vector
wa1 *= alpha
wa2 = x + wa1
if len(whu) > 0:
wa2[whu] = ulim[whu] # Michele Cappellari, Windhoek 3/OCT/2008
if len(whl) > 0:
wa2[whl] = llim[whl] # Michele Cappellari, Windhoek 3/OCT/2008
wa3 = diag * wa1
pnorm = norm(wa3)
# On the first iteration, adjust the initial step bound
if self.niter == 1:
delta = min(delta, pnorm)
self.params[ifree] = wa2
# Evaluate the function at x+p and calculate its norm
mperr = 0
catch_msg = 'calling '+str(fcn)
[self.status, wa4] = self.call(fcn, self.params, functkw)
if self.status < 0:
self.errmsg = 'WARNING: premature termination by "'+fcn+'"'
return
fnorm1 = norm(wa4)
# Compute the scaled actual reduction
catch_msg = 'computing convergence criteria'
actred = -1.
if (0.1 * fnorm1) < self.fnorm:
actred = - (fnorm1/self.fnorm)**2 + 1.
# Compute the scaled predicted reduction and the scaled directional
# derivative
for j in range(n):
wa3[j] = 0
wa3[:j+1] += fjac[:j+1,j]*wa1[ipvt[j]]
# Remember, alpha is the fraction of the full LM step actually
# taken
temp1 = norm(alpha*wa3)/self.fnorm
temp2 = (numpy.sqrt(alpha*par)*pnorm)/self.fnorm
prered = temp1*temp1 + (temp2*temp2)/0.5
dirder = -(temp1*temp1 + temp2*temp2)
# Compute the ratio of the actual to the predicted reduction.
ratio = 0.
if prered != 0:
ratio = actred/prered
# Update the step bound
if ratio <= 0.25:
if actred >= 0:
temp = .5
else:
temp = .5*dirder/(dirder + .5*actred)
if ((0.1*fnorm1) >= self.fnorm) or (temp < 0.1):
temp = 0.1
delta = temp*min(delta, pnorm/0.1)
par = par/temp
else:
if (par == 0) or (ratio >= 0.75):
delta = pnorm/.5
par = .5*par
# Test for successful iteration
if ratio >= 0.0001:
# Successful iteration. Update x, fvec, and their norms
x = wa2
wa2 = diag * x
fvec = wa4
xnorm = norm(wa2)
self.fnorm = fnorm1
self.niter = self.niter + 1
# Tests for convergence
if (numpy.abs(actred) <= ftol) and (prered <= ftol) \
and (0.5 * ratio <= 1):
self.status = 1
if delta <= xtol*xnorm:
self.status = 2
if (numpy.abs(actred) <= ftol) and (prered <= ftol) \
and (0.5 * ratio <= 1) and (self.status == 2):
self.status = 3
if self.status != 0:
break
# Tests for termination and stringent tolerances
if self.niter >= maxiter:
self.status = 5
if (numpy.abs(actred) <= machep) and (prered <= machep) \
and (0.5*ratio <= 1):
self.status = 6
if delta <= machep*xnorm:
self.status = 7
if gnorm <= machep:
self.status = 8
if self.status != 0:
break
# End of inner loop. Repeat if iteration unsuccessful
if ratio >= 0.0001:
break
# Check for over/underflow
if ~numpy.all(numpy.isfinite(wa1) & numpy.isfinite(wa2) & \
numpy.isfinite(x)) or ~numpy.isfinite(ratio):
self.errmsg = ('''ERROR: parameter or function value(s) have become
'infinite; check model function for over- 'and underflow''')
self.status = -16
break
#wh = where(finite(wa1) EQ 0 OR finite(wa2) EQ 0 OR finite(x) EQ 0, ct)
#if ct GT 0 OR finite(ratio) EQ 0 then begin
if self.status != 0:
break;
# End of outer loop.
catch_msg = 'in the termination phase'
# Termination, either normal or user imposed.
if len(self.params) == 0:
return
if nfree == 0:
self.params = xall.copy()
else:
self.params[ifree] = x
if (nprint > 0) and (self.status > 0):
catch_msg = 'calling ' + str(fcn)
[status, fvec] = self.call(fcn, self.params, functkw)
catch_msg = 'in the termination phase'
self.fnorm = norm(fvec)
if (self.fnorm is not None) and (fnorm1 is not None):
self.fnorm = max(self.fnorm, fnorm1)
self.fnorm = self.fnorm**2.
self.covar = None
self.perror = None
# (very carefully) set the covariance matrix COVAR
if (self.status > 0) and (nocovar==0) and (n is not None) \
and (fjac is not None) and (ipvt is not None):
sz = fjac.shape
if (n > 0) and (sz[0] >= n) and (sz[1] >= n) \
and (len(ipvt) >= n):
catch_msg = 'computing the covariance matrix'
cv = self.calc_covar(fjac[:n,:n], ipvt[:n])
cv.shape = [n, n]
nn = len(xall)
# Fill in actual covariance matrix, accounting for fixed
# parameters.
self.covar = numpy.zeros([nn, nn])
for i in range(n):
self.covar[ifree,ifree[i]] = cv[:,i]
# Compute errors in parameters
catch_msg = 'computing parameter errors'
self.perror = numpy.zeros(nn)
d = numpy.diagonal(self.covar)
wh = numpy.nonzero(d >= 0)[0]
if len(wh) > 0:
self.perror[wh] = numpy.sqrt(d[wh])
return
def __str__(self):
return {'params': self.params,
'niter': self.niter,
'params': self.params,
'covar': self.covar,
'perror': self.perror,
'status': self.status,
'debug': self.debug,
'errmsg': self.errmsg,
'nfev': self.nfev,
'damp': self.damp
#,'machar':self.machar
}.__str__()
# Default procedure to be called every iteration. It simply prints
# the parameter values.
def defiter(self, fcn, x, iter, fnorm=None, functkw=None,
quiet=0, iterstop=None, parinfo=None,
format=None, pformat='%.10g', dof=1):
if self.debug:
print('Entering defiter...')
if quiet:
return
if fnorm is None:
[status, fvec] = self.call(fcn, x, functkw)
fnorm = norm(fvec)**2
# Determine which parameters to print
nprint = len(x)
print("Iter ", ('%6i' % iter)," CHI-SQUARE = ",('%.10g' % fnorm)," DOF = ", ('%i' % dof))
for i in range(nprint):
if (parinfo is not None) and ('parname' in parinfo[i]):
p = ' ' + parinfo[i]['parname'] + ' = '
else:
p = ' P' + str(i) + ' = '
if (parinfo is not None) and ('mpprint' in parinfo[i]):
iprint = parinfo[i]['mpprint']
else:
iprint = 1
if iprint:
print(p + (pformat % x[i]) + ' ')
return 0
# DO_ITERSTOP:
# if keyword_set(iterstop) then begin
# k = get_kbrd(0)
# if k EQ string(byte(7)) then begin
# message, 'WARNING: minimization not complete', /info
# print, 'Do you want to terminate this procedure? (y/n)', $
# format='(A,$)'
# k = ''
# read, k
# if strupcase(strmid(k,0,1)) EQ 'Y' then begin
# message, 'WARNING: Procedure is terminating.', /info
# mperr = -1
# endif
# endif
# endif
# Procedure to parse the parameter values in PARINFO, which is a list of dictionaries
def parinfo(self, parinfo=None, key='a', default=None, n=0):
if self.debug:
print('Entering parinfo...')
if (n == 0) and (parinfo is not None):
n = len(parinfo)
if n == 0:
values = default
return values
values = []
for i in range(n):
if (parinfo is not None) and (key in parinfo[i]):
values.append(parinfo[i][key])
else:
values.append(default)
# Convert to numeric arrays if possible
test = default
if type(default) == list:
test = default[0]
if isinstance(test, int):
values = numpy.asarray(values, int)
elif isinstance(test, float):
values = numpy.asarray(values, float)
return values
# Call user function or procedure, with _EXTRA or not, with
# derivatives or not.
def call(self, fcn, x, functkw, fjac=None):
if self.debug:
print('Entering call...')
if self.qanytied:
x = self.tie(x, self.ptied)
self.nfev = self.nfev + 1
if fjac is None:
[status, f] = fcn(x, fjac=fjac, **functkw)
if self.damp > 0:
# Apply the damping if requested. This replaces the residuals
# with their hyperbolic tangent. Thus residuals larger than
# DAMP are essentially clipped.
f = numpy.tanh(f/self.damp)
return [status, f]
else:
return fcn(x, fjac=fjac, **functkw)
def fdjac2(self, fcn, x, fvec, step=None, ulimited=None, ulimit=None, dside=None,
epsfcn=None, autoderivative=1,
functkw=None, xall=None, ifree=None, dstep=None):
if self.debug:
print('Entering fdjac2...')
machep = self.machar.machep
if epsfcn is None:
epsfcn = machep
if xall is None:
xall = x
if ifree is None:
ifree = numpy.arange(len(xall))
if step is None:
step = x * 0.
nall = len(xall)
eps = numpy.sqrt(max(epsfcn, machep))
m = len(fvec)
n = len(x)
# Compute analytical derivative if requested
if autoderivative == 0:
mperr = 0
fjac = numpy.zeros(nall)
fjac[ifree] = 1.0 # Specify which parameters need derivatives
[status, fp] = self.call(fcn, xall, functkw, fjac=fjac)
if len(fjac) != m*nall:
print('ERROR: Derivative matrix was not computed properly.')
return None
# This definition is consistent with CURVEFIT
# Sign error found (thanks Jesus Fernandez <fernande@irm.chu-caen.fr>)
fjac.shape = [m,nall]
fjac = -fjac
# Select only the free parameters
if len(ifree) < nall:
fjac = fjac[:,ifree]
fjac.shape = [m, n]
return fjac
fjac = numpy.zeros([m, n])
h = eps * numpy.abs(x)
# if STEP is given, use that
# STEP includes the fixed parameters
if step is not None:
stepi = step[ifree]
wh = numpy.nonzero(stepi > 0)[0]
if len(wh) > 0:
h[wh] = stepi[wh]
# if relative step is given, use that
# DSTEP includes the fixed parameters
if len(dstep) > 0:
dstepi = dstep[ifree]
wh = numpy.nonzero(dstepi > 0)[0]
if len(wh) > 0:
h[wh] = numpy.abs(dstepi[wh]*x[wh])
# In case any of the step values are zero
h[h == 0] = eps
# Reverse the sign of the step if we are up against the parameter
# limit, or if the user requested it.
# DSIDE includes the fixed parameters (ULIMITED/ULIMIT have only
# varying ones)
mask = dside[ifree] == -1
if len(ulimited) > 0 and len(ulimit) > 0:
mask = (mask | ((ulimited!=0) & (x > ulimit-h)))
wh = numpy.nonzero(mask)[0]
if len(wh) > 0:
h[wh] = - h[wh]
# Loop through parameters, computing the derivative for each
for j in range(n):
xp = xall.copy()
xp[ifree[j]] = xp[ifree[j]] + h[j]
[status, fp] = self.call(fcn, xp, functkw)
if status < 0:
return None
if numpy.abs(dside[ifree[j]]) <= 1:
# COMPUTE THE ONE-SIDED DERIVATIVE
# Note optimization fjac(0:*,j)
fjac[:,j] = (fp-fvec)/h[j]
else:
# COMPUTE THE TWO-SIDED DERIVATIVE
xp[ifree[j]] = xall[ifree[j]] - h[j]
mperr = 0
[status, fm] = self.call(fcn, xp, functkw)
if status < 0:
return None
# Note optimization fjac(0:*,j)
fjac[:,j] = (fp-fm)/(2*h[j])
return fjac
# Original FORTRAN documentation
# **********
#
# subroutine qrfac
#
# this subroutine uses householder transformations with column
# pivoting (optional) to compute a qr factorization of the
# m by n matrix a. that is, qrfac determines an orthogonal
# matrix q, a permutation matrix p, and an upper trapezoidal
# matrix r with diagonal elements of nonincreasing magnitude,
# such that a*p = q*r. the householder transformation for
# column k, k = 1,2,...,min(m,n), is of the form
#
# t
# i - (1/u(k))*u*u
#
# where u has zeros in the first k-1 positions. the form of
# this transformation and the method of pivoting first
# appeared in the corresponding linpack subroutine.
#
# the subroutine statement is
#
# subroutine qrfac(m,n,a,lda,pivot,ipvt,lipvt,rdiag,acnorm,wa)
#
# where
#
# m is a positive integer input variable set to the number
# of rows of a.
#
# n is a positive integer input variable set to the number
# of columns of a.
#
# a is an m by n array. on input a contains the matrix for
# which the qr factorization is to be computed. on output
# the strict upper trapezoidal part of a contains the strict
# upper trapezoidal part of r, and the lower trapezoidal
# part of a contains a factored form of q (the non-trivial
# elements of the u vectors described above).
#
# lda is a positive integer input variable not less than m
# which specifies the leading dimension of the array a.
#
# pivot is a logical input variable. if pivot is set true,
# then column pivoting is enforced. if pivot is set false,
# then no column pivoting is done.
#
# ipvt is an integer output array of length lipvt. ipvt
# defines the permutation matrix p such that a*p = q*r.
# column j of p is column ipvt(j) of the identity matrix.
# if pivot is false, ipvt is not referenced.
#
# lipvt is a positive integer input variable. if pivot is false,
# then lipvt may be as small as 1. if pivot is true, then
# lipvt must be at least n.
#
# rdiag is an output array of length n which contains the
# diagonal elements of r.
#
# acnorm is an output array of length n which contains the
# norms of the corresponding columns of the input matrix a.
# if this information is not needed, then acnorm can coincide
# with rdiag.
#
# wa is a work array of length n. if pivot is false, then wa
# can coincide with rdiag.
#
# subprograms called
#
# minpack-supplied ... dpmpar,enorm
#
# fortran-supplied ... dmax1,dsqrt,min0
#
# argonne national laboratory. minpack project. march 1980.
# burton s. garbow, kenneth e. hillstrom, jorge j. more
#
# **********
#
# PIVOTING / PERMUTING:
#
# Upon return, A(*,*) is in standard parameter order, A(*,IPVT) is in
# permuted order.
#
# RDIAG is in permuted order.
# ACNORM is in standard parameter order.
#
#
# NOTE: in IDL the factors appear slightly differently than described
# above. The matrix A is still m x n where m >= n.
#
# The "upper" triangular matrix R is actually stored in the strict
# lower left triangle of A under the standard notation of IDL.
#
# The reflectors that generate Q are in the upper trapezoid of A upon
# output.
#
# EXAMPLE: decompose the matrix [[9.,2.,6.],[4.,8.,7.]]
# aa = [[9.,2.,6.],[4.,8.,7.]]
# mpfit_qrfac, aa, aapvt, rdiag, aanorm
# IDL> print, aa
# 1.81818* 0.181818* 0.545455*
# -8.54545+ 1.90160* 0.432573*
# IDL> print, rdiag
# -11.0000+ -7.48166+
#
# The components marked with a * are the components of the
# reflectors, and those marked with a + are components of R.
#
# To reconstruct Q and R we proceed as follows. First R.
# r = fltarr(m, n)
# for i = 0, n-1 do r(0:i,i) = aa(0:i,i) # fill in lower diag
# r(lindgen(n)*(m+1)) = rdiag
#
# Next, Q, which are composed from the reflectors. Each reflector v
# is taken from the upper trapezoid of aa, and converted to a matrix
# via (I - 2 vT . v / (v . vT)).
#
# hh = ident # identity matrix
# for i = 0, n-1 do begin
# v = aa(*,i) & if i GT 0 then v(0:i-1) = 0 # extract reflector
# hh = hh # (ident - 2*(v # v)/total(v * v)) # generate matrix
# endfor
#
# Test the result:
# IDL> print, hh # transpose(r)
# 9.00000 4.00000
# 2.00000 8.00000
# 6.00000 7.00000
#
# Note that it is usually never necessary to form the Q matrix
# explicitly, and MPFIT does not.
def qrfac(self, a, pivot=0):
if self.debug: print('Entering qrfac...')
machep = self.machar.machep
sz = a.shape
m = sz[0]
n = sz[1]
# Compute the initial column norms and initialize arrays
acnorm = numpy.zeros(n)
for j in range(n):
acnorm[j] = norm(a[:,j])
rdiag = acnorm.copy()
wa = rdiag.copy()
ipvt = numpy.arange(n)
# Reduce a to r with householder transformations
minmn = numpy.min([m,n])
for j in range(minmn):
if pivot != 0:
# Bring the column of largest norm into the pivot position
rmax = numpy.max(rdiag[j:])
kmax = numpy.nonzero(rdiag[j:] == rmax)[0]
ct = len(kmax)
kmax = kmax + j
if ct > 0:
kmax = kmax[0]
# Exchange rows via the pivot only. Avoid actually exchanging
# the rows, in case there is lots of memory transfer. The
# exchange occurs later, within the body of MPFIT, after the
# extraneous columns of the matrix have been shed.
if kmax != j:
ipvt[j], ipvt[kmax] = ipvt[kmax], ipvt[j]
rdiag[kmax] = rdiag[j]
wa[kmax] = wa[j]
# Compute the householder transformation to reduce the jth
# column of A to a multiple of the jth unit vector
lj = ipvt[j]
ajj = a[j:,lj]
ajnorm = norm(ajj)
if ajnorm == 0:
break
if a[j,lj] < 0:
ajnorm = -ajnorm
ajj /= ajnorm
ajj[0] += 1
# *** Note optimization a(j:*,j)
a[j:,lj] = ajj
# Apply the transformation to the remaining columns
# and update the norms
# NOTE to SELF: tried to optimize this by removing the loop,
# but it actually got slower. Reverted to "for" loop to keep
# it simple.
if j+1 < n:
for k in range(j+1, n):
lk = ipvt[k]
# *** Note optimization a(j:*,lk)
# (corrected 20 Jul 2000)
if a[j,lj] != 0:
a[j:,lk] -= ajj * numpy.sum(a[j:,lk]*ajj)/a[j,lj]
if (pivot != 0) and (rdiag[k] != 0):
temp = a[j,lk]/rdiag[k]
rdiag[k] *= numpy.sqrt(max(1.-temp**2, 0.))
temp = rdiag[k]/wa[k]
if (0.05*temp**2) <= machep:
rdiag[k] = norm(a[j+1:,lk])
wa[k] = rdiag[k]
rdiag[j] = -ajnorm
return [a, ipvt, rdiag, acnorm]
# Original FORTRAN documentation
# **********
#
# subroutine qrsolv
#
# given an m by n matrix a, an n by n diagonal matrix d,
# and an m-vector b, the problem is to determine an x which
# solves the system
#
# a*x = b , d*x = 0 ,
#
# in the least squares sense.
#
# this subroutine completes the solution of the problem
# if it is provided with the necessary information from the
# factorization, with column pivoting, of a. that is, if
# a*p = q*r, where p is a permutation matrix, q has orthogonal
# columns, and r is an upper triangular matrix with diagonal
# elements of nonincreasing magnitude, then qrsolv expects
# the full upper triangle of r, the permutation matrix p,
# and the first n components of (q transpose)*b. the system
# a*x = b, d*x = 0, is then equivalent to
#
# t t
# r*z = q *b , p *d*p*z = 0 ,
#
# where x = p*z. if this system does not have full rank,
# then a least squares solution is obtained. on output qrsolv
# also provides an upper triangular matrix s such that
#
# t t t
# p *(a *a + d*d)*p = s *s .
#
# s is computed within qrsolv and may be of separate interest.
#
# the subroutine statement is
#
# subroutine qrsolv(n,r,ldr,ipvt,diag,qtb,x,sdiag,wa)
#
# where
#
# n is a positive integer input variable set to the order of r.
#
# r is an n by n array. on input the full upper triangle
# must contain the full upper triangle of the matrix r.
# on output the full upper triangle is unaltered, and the
# strict lower triangle contains the strict upper triangle
# (transposed) of the upper triangular matrix s.
#
# ldr is a positive integer input variable not less than n
# which specifies the leading dimension of the array r.
#
# ipvt is an integer input array of length n which defines the
# permutation matrix p such that a*p = q*r. column j of p
# is column ipvt(j) of the identity matrix.
#
# diag is an input array of length n which must contain the
# diagonal elements of the matrix d.
#
# qtb is an input array of length n which must contain the first
# n elements of the vector (q transpose)*b.
#
# x is an output array of length n which contains the least
# squares solution of the system a*x = b, d*x = 0.
#
# sdiag is an output array of length n which contains the
# diagonal elements of the upper triangular matrix s.
#
# wa is a work array of length n.
#
# subprograms called
#
# fortran-supplied ... dabs,dsqrt
#
# argonne national laboratory. minpack project. march 1980.
# burton s. garbow, kenneth e. hillstrom, jorge j. more
#
def qrsolv(self, r, ipvt, diag, qtb, sdiag):
if self.debug:
print('Entering qrsolv...')
sz = r.shape
m = sz[0]
n = sz[1]
# copy r and (q transpose)*b to preserve input and initialize s.
# in particular, save the diagonal elements of r in x.
for j in range(n):
r[j:n,j] = r[j,j:n]
x = numpy.diagonal(r).copy()
wa = qtb.copy()
# Eliminate the diagonal matrix d using a givens rotation
for j in range(n):
l = ipvt[j]
if diag[l] == 0:
break
sdiag[j:] = 0
sdiag[j] = diag[l]
# The transformations to eliminate the row of d modify only a
# single element of (q transpose)*b beyond the first n, which
# is initially zero.
qtbpj = 0.
for k in range(j,n):
if sdiag[k] == 0:
break
if numpy.abs(r[k,k]) < numpy.abs(sdiag[k]):
cotan = r[k,k]/sdiag[k]
sine = 0.5/numpy.sqrt(.25 + .25*cotan**2)
cosine = sine*cotan
else:
tang = sdiag[k]/r[k,k]
cosine = 0.5/numpy.sqrt(.25 + .25*tang**2)
sine = cosine*tang
# Compute the modified diagonal element of r and the
# modified element of ((q transpose)*b,0).
r[k,k] = cosine*r[k,k] + sine*sdiag[k]
temp = cosine*wa[k] + sine*qtbpj
qtbpj = -sine*wa[k] + cosine*qtbpj
wa[k] = temp
# Accumulate the transformation in the row of s
if n > k+1:
temp = cosine*r[k+1:n,k] + sine*sdiag[k+1:n]
sdiag[k+1:n] = -sine*r[k+1:n,k] + cosine*sdiag[k+1:n]
r[k+1:n,k] = temp
sdiag[j] = r[j,j]
r[j,j] = x[j]
# Solve the triangular system for z. If the system is singular
# then obtain a least squares solution
nsing = n
wh = numpy.nonzero(sdiag == 0)[0]
if len(wh) > 0:
nsing = wh[0]
wa[nsing:] = 0
if nsing >= 1:
wa[nsing-1] = wa[nsing-1]/sdiag[nsing-1] # Degenerate case
# *** Reverse loop ***
for j in range(nsing-2,-1,-1):
sum0 = numpy.sum(r[j+1:nsing,j]*wa[j+1:nsing])
wa[j] = (wa[j]-sum0)/sdiag[j]
# Permute the components of z back to components of x
x[ipvt] = wa
return (r, x, sdiag)
# Original FORTRAN documentation
#
# subroutine lmpar
#
# given an m by n matrix a, an n by n nonsingular diagonal
# matrix d, an m-vector b, and a positive number delta,
# the problem is to determine a value for the parameter
# par such that if x solves the system
#
# a*x = b , sqrt(par)*d*x = 0 ,
#
# in the least squares sense, and dxnorm is the euclidean
# norm of d*x, then either par is zero and
#
# (dxnorm-delta) .le. 0.1*delta ,
#
# or par is positive and
#
# abs(dxnorm-delta) .le. 0.1*delta .
#
# this subroutine completes the solution of the problem
# if it is provided with the necessary information from the
# qr factorization, with column pivoting, of a. that is, if
# a*p = q*r, where p is a permutation matrix, q has orthogonal
# columns, and r is an upper triangular matrix with diagonal
# elements of nonincreasing magnitude, then lmpar expects
# the full upper triangle of r, the permutation matrix p,
# and the first n components of (q transpose)*b. on output
# lmpar also provides an upper triangular matrix s such that
#
# t t t
# p *(a *a + par*d*d)*p = s *s .
#
# s is employed within lmpar and may be of separate interest.
#
# only a few iterations are generally needed for convergence
# of the algorithm. if, however, the limit of 10 iterations
# is reached, then the output par will contain the best
# value obtained so far.
#
# the subroutine statement is
#
# subroutine lmpar(n,r,ldr,ipvt,diag,qtb,delta,par,x,sdiag,
# wa1,wa2)
#
# where
#
# n is a positive integer input variable set to the order of r.
#
# r is an n by n array. on input the full upper triangle
# must contain the full upper triangle of the matrix r.
# on output the full upper triangle is unaltered, and the
# strict lower triangle contains the strict upper triangle
# (transposed) of the upper triangular matrix s.
#
# ldr is a positive integer input variable not less than n
# which specifies the leading dimension of the array r.
#
# ipvt is an integer input array of length n which defines the
# permutation matrix p such that a*p = q*r. column j of p
# is column ipvt(j) of the identity matrix.
#
# diag is an input array of length n which must contain the
# diagonal elements of the matrix d.
#
# qtb is an input array of length n which must contain the first
# n elements of the vector (q transpose)*b.
#
# delta is a positive input variable which specifies an upper
# bound on the euclidean norm of d*x.
#
# par is a nonnegative variable. on input par contains an
# initial estimate of the levenberg-marquardt parameter.
# on output par contains the final estimate.
#
# x is an output array of length n which contains the least
# squares solution of the system a*x = b, sqrt(par)*d*x = 0,
# for the output par.
#
# sdiag is an output array of length n which contains the
# diagonal elements of the upper triangular matrix s.
#
# wa1 and wa2 are work arrays of length n.
#
# subprograms called
#
# minpack-supplied ... dpmpar,enorm,qrsolv
#
# fortran-supplied ... dabs,dmax1,dmin1,dsqrt
#
# argonne national laboratory. minpack project. march 1980.
# burton s. garbow, kenneth e. hillstrom, jorge j. more
#
def lmpar(self, r, ipvt, diag, qtb, delta, x, sdiag, par=None):
if self.debug:
print('Entering lmpar...')
dwarf = self.machar.minnum
machep = self.machar.machep
sz = r.shape
m = sz[0]
n = sz[1]
# Compute and store in x the gauss-newton direction. If the
# jacobian is rank-deficient, obtain a least-squares solution
nsing = n
wa1 = qtb.copy()
rthresh = numpy.max(numpy.abs(numpy.diagonal(r))) * machep
wh = numpy.nonzero(numpy.abs(numpy.diagonal(r)) < rthresh)[0]
if len(wh) > 0:
nsing = wh[0]
wa1[wh[0]:] = 0
if nsing >= 1:
# *** Reverse loop ***
for j in range(nsing-1,-1,-1):
wa1[j] = wa1[j]/r[j,j]
if j-1 >= 0:
wa1[:j] -= r[:j,j]*wa1[j]
# Note: ipvt here is a permutation array
x[ipvt] = wa1
# Initialize the iteration counter. Evaluate the function at the
# origin, and test for acceptance of the gauss-newton direction
iter = 0
wa2 = diag * x
dxnorm = norm(wa2)
fp = dxnorm - delta
if fp <= 0.1*delta:
return [r, 0., x, sdiag]
# If the jacobian is not rank deficient, the newton step provides a
# lower bound, parl, for the zero of the function. Otherwise set
# this bound to zero.
parl = 0.
if nsing >= n:
wa1 = diag[ipvt] * wa2[ipvt] / dxnorm
wa1[0] = wa1[0] / r[0,0] # Degenerate case
for j in range(1,n): # Note "1" here, not zero
sum0 = numpy.sum(r[:j,j]*wa1[:j])
wa1[j] = (wa1[j] - sum0)/r[j,j]
temp = norm(wa1)
parl = ((fp/delta)/temp)/temp
# Calculate an upper bound, paru, for the zero of the function
for j in range(n):
sum0 = numpy.sum(r[:j+1,j]*qtb[:j+1])
wa1[j] = sum0/diag[ipvt[j]]
gnorm = norm(wa1)
paru = gnorm/delta
if paru == 0:
paru = dwarf/numpy.min([delta,0.1])
# If the input par lies outside of the interval (parl,paru), set
# par to the closer endpoint
par = numpy.max([par,parl])
par = numpy.min([par,paru])
if par == 0:
par = gnorm/dxnorm
# Beginning of an interation
while(1):
iter = iter + 1
# Evaluate the function at the current value of par
if par == 0:
par = numpy.max([dwarf, paru*0.001])
temp = numpy.sqrt(par)
wa1 = temp * diag
[r, x, sdiag] = self.qrsolv(r, ipvt, wa1, qtb, sdiag)
wa2 = diag*x
dxnorm = norm(wa2)
temp = fp
fp = dxnorm - delta
if (numpy.abs(fp) <= 0.1*delta) or \
((parl == 0) and (fp <= temp) and (temp < 0)) or \
(iter == 10):
break;
# Compute the newton correction
wa1 = diag[ipvt] * wa2[ipvt] / dxnorm
for j in range(n-1):
wa1[j] = wa1[j]/sdiag[j]
wa1[j+1:n] -= r[j+1:n,j]*wa1[j]
wa1[n-1] = wa1[n-1]/sdiag[n-1] # Degenerate case
temp = norm(wa1)
parc = ((fp/delta)/temp)/temp
# Depending on the sign of the function, update parl or paru
if fp > 0:
parl = numpy.max([parl,par])
if fp < 0:
paru = numpy.min([paru,par])
# Compute an improved estimate for par
par = numpy.max([parl, par+parc])
# End of an iteration
# Termination
return [r, par, x, sdiag]
# Procedure to tie one parameter to another.
def tie(self, p, ptied=None):
if self.debug:
print('Entering tie...')
if ptied is None:
return
for i in range(len(ptied)):
if ptied[i] == '':
continue
cmd = 'p[' + str(i) + '] = ' + ptied[i]
exec(cmd)
return p
# Original FORTRAN documentation
# **********
#
# subroutine covar
#
# given an m by n matrix a, the problem is to determine
# the covariance matrix corresponding to a, defined as
#
# t
# inverse(a *a) .
#
# this subroutine completes the solution of the problem
# if it is provided with the necessary information from the
# qr factorization, with column pivoting, of a. that is, if
# a*p = q*r, where p is a permutation matrix, q has orthogonal
# columns, and r is an upper triangular matrix with diagonal
# elements of nonincreasing magnitude, then covar expects
# the full upper triangle of r and the permutation matrix p.
# the covariance matrix is then computed as
#
# t t
# p*inverse(r *r)*p .
#
# if a is nearly rank deficient, it may be desirable to compute
# the covariance matrix corresponding to the linearly independent
# columns of a. to define the numerical rank of a, covar uses
# the tolerance tol. if l is the largest integer such that
#
# abs(r(l,l)) .gt. tol*abs(r(1,1)) ,
#
# then covar computes the covariance matrix corresponding to
# the first l columns of r. for k greater than l, column
# and row ipvt(k) of the covariance matrix are set to zero.
#
# the subroutine statement is
#
# subroutine covar(n,r,ldr,ipvt,tol,wa)
#
# where
#
# n is a positive integer input variable set to the order of r.
#
# r is an n by n array. on input the full upper triangle must
# contain the full upper triangle of the matrix r. on output
# r contains the square symmetric covariance matrix.
#
# ldr is a positive integer input variable not less than n
# which specifies the leading dimension of the array r.
#
# ipvt is an integer input array of length n which defines the
# permutation matrix p such that a*p = q*r. column j of p
# is column ipvt(j) of the identity matrix.
#
# tol is a nonnegative input variable used to define the
# numerical rank of a in the manner described above.
#
# wa is a work array of length n.
#
# subprograms called
#
# fortran-supplied ... dabs
#
# argonne national laboratory. minpack project. august 1980.
# burton s. garbow, kenneth e. hillstrom, jorge j. more
#
# **********
def calc_covar(self, rr, ipvt=None, tol=1.e-14):
if self.debug:
print('Entering calc_covar...')
if rr.ndim != 2:
print('ERROR: r must be a two-dimensional matrix')
return -1
s = rr.shape
n = s[0]
if s[0] != s[1]:
print('ERROR: r must be a square matrix')
return -1
if ipvt is None:
ipvt = numpy.arange(n)
r = rr.copy()
r.shape = [n,n]
# For the inverse of r in the full upper triangle of r
l = -1
tolr = tol * numpy.abs(r[0,0])
for k in range(n):
if numpy.abs(r[k,k]) <= tolr:
break
r[k,k] = 1./r[k,k]
for j in range(k):
temp = r[k,k] * r[j,k]
r[j,k] = 0.
r[:j+1,k] -= temp*r[:j+1,j]
l = k
# Form the full upper triangle of the inverse of (r transpose)*r
# in the full upper triangle of r
if l >= 0:
for k in range(l+1):
for j in range(k):
temp = r[j,k]
r[:j+1,j] += temp*r[:j+1,k]
temp = r[k,k]
r[:k+1,k] = temp * r[:k+1,k]
# For the full lower triangle of the covariance matrix
# in the strict lower triangle or and in wa
wa = numpy.repeat([r[0,0]], n)
for j in range(n):
jj = ipvt[j]
sing = j > l
for i in range(j+1):
if sing:
r[i,j] = 0.
ii = ipvt[i]
if ii > jj:
r[ii,jj] = r[i,j]
if ii < jj:
r[jj,ii] = r[i,j]
wa[jj] = r[j,j]
# Symmetrize the covariance matrix in r
for j in range(n):
r[:j+1,j] = r[j,:j+1]
r[j,j] = wa[j]
return r
class machar:
def __init__(self, double=1):
if double == 0:
info = numpy.finfo(numpy.float32)
else:
info = numpy.finfo(numpy.float64)
self.machep = info.eps
self.maxnum = info.max
self.minnum = info.tiny
self.maxlog = numpy.log(self.maxnum)
self.minlog = numpy.log(self.minnum)
self.rdwarf = numpy.sqrt(self.minnum*1.5) * 10
self.rgiant = numpy.sqrt(self.maxnum) * 0.1
|
cebarbosa/fossilgroups
|
ppxf/cap_mpfit.py
|
Python
|
gpl-3.0
| 93,623
|
[
"Gaussian"
] |
08c57236d8b49a6748b00eff8434232a2140ee18600eb379440b7b16da1f28b2
|
# Python 2's Lib/test/test_grammar.py (r66189)
# Python test set -- part 1, grammar.
# This just tests whether the parser accepts them all.
# NOTE: When you run this test as a script from the command line, you
# get warnings about certain hex/oct constants. Since those are
# issued by the parser, you can't suppress them by adding a
# filterwarnings() call to this module. Therefore, to shut up the
# regression test, the filterwarnings() call has been added to
# regrtest.py.
from test.test_support import run_unittest, check_syntax_error
import unittest
import sys
# testing import *
from sys import *
class TokenTests(unittest.TestCase):
def testBackslash(self):
# Backslash means line continuation:
x = 1 \
+ 1
self.assertEquals(x, 2, 'backslash for line continuation')
# Backslash does not means continuation in comments :\
x = 0
self.assertEquals(x, 0, 'backslash ending comment')
def testPlainIntegers(self):
self.assertEquals(0xff, 255)
self.assertEquals(0377, 255)
self.assertEquals(2147483647, 017777777777)
# "0x" is not a valid literal
self.assertRaises(SyntaxError, eval, "0x")
from sys import maxint
if maxint == 2147483647:
self.assertEquals(-2147483647-1, -020000000000)
# XXX -2147483648
self.assert_(037777777777 > 0)
self.assert_(0xffffffff > 0)
for s in '2147483648', '040000000000', '0x100000000':
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
elif maxint == 9223372036854775807:
self.assertEquals(-9223372036854775807-1, -01000000000000000000000)
self.assert_(01777777777777777777777 > 0)
self.assert_(0xffffffffffffffff > 0)
for s in '9223372036854775808', '02000000000000000000000', \
'0x10000000000000000':
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
else:
self.fail('Weird maxint value %r' % maxint)
def testLongIntegers(self):
x = 0L
x = 0l
x = 0xffffffffffffffffL
x = 0xffffffffffffffffl
x = 077777777777777777L
x = 077777777777777777l
x = 123456789012345678901234567890L
x = 123456789012345678901234567890l
def testFloats(self):
x = 3.14
x = 314.
x = 0.314
# XXX x = 000.314
x = .314
x = 3e14
x = 3E14
x = 3e-14
x = 3e+14
x = 3.e14
x = .3e14
x = 3.1e4
def testStringLiterals(self):
x = ''; y = ""; self.assert_(len(x) == 0 and x == y)
x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39)
x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34)
x = "doesn't \"shrink\" does it"
y = 'doesn\'t "shrink" does it'
self.assert_(len(x) == 24 and x == y)
x = "does \"shrink\" doesn't it"
y = 'does "shrink" doesn\'t it'
self.assert_(len(x) == 24 and x == y)
x = """
The "quick"
brown fox
jumps over
the 'lazy' dog.
"""
y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n'
self.assertEquals(x, y)
y = '''
The "quick"
brown fox
jumps over
the 'lazy' dog.
'''
self.assertEquals(x, y)
y = "\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the 'lazy' dog.\n\
"
self.assertEquals(x, y)
y = '\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the \'lazy\' dog.\n\
'
self.assertEquals(x, y)
class GrammarTests(unittest.TestCase):
# single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
# XXX can't test in a script -- this rule is only used when interactive
# file_input: (NEWLINE | stmt)* ENDMARKER
# Being tested as this very moment this very module
# expr_input: testlist NEWLINE
# XXX Hard to test -- used only in calls to input()
def testEvalInput(self):
# testlist ENDMARKER
x = eval('1, 0 or 1')
def testFuncdef(self):
### 'def' NAME parameters ':' suite
### parameters: '(' [varargslist] ')'
### varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' ('**'|'*' '*') NAME]
### | ('**'|'*' '*') NAME)
### | fpdef ['=' test] (',' fpdef ['=' test])* [',']
### fpdef: NAME | '(' fplist ')'
### fplist: fpdef (',' fpdef)* [',']
### arglist: (argument ',')* (argument | *' test [',' '**' test] | '**' test)
### argument: [test '='] test # Really [keyword '='] test
def f1(): pass
f1()
f1(*())
f1(*(), **{})
def f2(one_argument): pass
def f3(two, arguments): pass
def f4(two, (compound, (argument, list))): pass
def f5((compound, first), two): pass
self.assertEquals(f2.func_code.co_varnames, ('one_argument',))
self.assertEquals(f3.func_code.co_varnames, ('two', 'arguments'))
if sys.platform.startswith('java'):
self.assertEquals(f4.func_code.co_varnames,
('two', '(compound, (argument, list))', 'compound', 'argument',
'list',))
self.assertEquals(f5.func_code.co_varnames,
('(compound, first)', 'two', 'compound', 'first'))
else:
self.assertEquals(f4.func_code.co_varnames,
('two', '.1', 'compound', 'argument', 'list'))
self.assertEquals(f5.func_code.co_varnames,
('.0', 'two', 'compound', 'first'))
def a1(one_arg,): pass
def a2(two, args,): pass
def v0(*rest): pass
def v1(a, *rest): pass
def v2(a, b, *rest): pass
def v3(a, (b, c), *rest): return a, b, c, rest
f1()
f2(1)
f2(1,)
f3(1, 2)
f3(1, 2,)
f4(1, (2, (3, 4)))
v0()
v0(1)
v0(1,)
v0(1,2)
v0(1,2,3,4,5,6,7,8,9,0)
v1(1)
v1(1,)
v1(1,2)
v1(1,2,3)
v1(1,2,3,4,5,6,7,8,9,0)
v2(1,2)
v2(1,2,3)
v2(1,2,3,4)
v2(1,2,3,4,5,6,7,8,9,0)
v3(1,(2,3))
v3(1,(2,3),4)
v3(1,(2,3),4,5,6,7,8,9,0)
# ceval unpacks the formal arguments into the first argcount names;
# thus, the names nested inside tuples must appear after these names.
if sys.platform.startswith('java'):
self.assertEquals(v3.func_code.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c'))
else:
self.assertEquals(v3.func_code.co_varnames, ('a', '.1', 'rest', 'b', 'c'))
self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,)))
def d01(a=1): pass
d01()
d01(1)
d01(*(1,))
d01(**{'a':2})
def d11(a, b=1): pass
d11(1)
d11(1, 2)
d11(1, **{'b':2})
def d21(a, b, c=1): pass
d21(1, 2)
d21(1, 2, 3)
d21(*(1, 2, 3))
d21(1, *(2, 3))
d21(1, 2, *(3,))
d21(1, 2, **{'c':3})
def d02(a=1, b=2): pass
d02()
d02(1)
d02(1, 2)
d02(*(1, 2))
d02(1, *(2,))
d02(1, **{'b':2})
d02(**{'a': 1, 'b': 2})
def d12(a, b=1, c=2): pass
d12(1)
d12(1, 2)
d12(1, 2, 3)
def d22(a, b, c=1, d=2): pass
d22(1, 2)
d22(1, 2, 3)
d22(1, 2, 3, 4)
def d01v(a=1, *rest): pass
d01v()
d01v(1)
d01v(1, 2)
d01v(*(1, 2, 3, 4))
d01v(*(1,))
d01v(**{'a':2})
def d11v(a, b=1, *rest): pass
d11v(1)
d11v(1, 2)
d11v(1, 2, 3)
def d21v(a, b, c=1, *rest): pass
d21v(1, 2)
d21v(1, 2, 3)
d21v(1, 2, 3, 4)
d21v(*(1, 2, 3, 4))
d21v(1, 2, **{'c': 3})
def d02v(a=1, b=2, *rest): pass
d02v()
d02v(1)
d02v(1, 2)
d02v(1, 2, 3)
d02v(1, *(2, 3, 4))
d02v(**{'a': 1, 'b': 2})
def d12v(a, b=1, c=2, *rest): pass
d12v(1)
d12v(1, 2)
d12v(1, 2, 3)
d12v(1, 2, 3, 4)
d12v(*(1, 2, 3, 4))
d12v(1, 2, *(3, 4, 5))
d12v(1, *(2,), **{'c': 3})
def d22v(a, b, c=1, d=2, *rest): pass
d22v(1, 2)
d22v(1, 2, 3)
d22v(1, 2, 3, 4)
d22v(1, 2, 3, 4, 5)
d22v(*(1, 2, 3, 4))
d22v(1, 2, *(3, 4, 5))
d22v(1, *(2, 3), **{'d': 4})
def d31v((x)): pass
d31v(1)
def d32v((x,)): pass
d32v((1,))
# keyword arguments after *arglist
def f(*args, **kwargs):
return args, kwargs
self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4),
{'x':2, 'y':5}))
self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)")
self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)")
# Check ast errors in *args and *kwargs
check_syntax_error(self, "f(*g(1=2))")
check_syntax_error(self, "f(**g(1=2))")
def testLambdef(self):
### lambdef: 'lambda' [varargslist] ':' test
l1 = lambda : 0
self.assertEquals(l1(), 0)
l2 = lambda : a[d] # XXX just testing the expression
l3 = lambda : [2 < x for x in [-1, 3, 0L]]
self.assertEquals(l3(), [0, 1, 0])
l4 = lambda x = lambda y = lambda z=1 : z : y() : x()
self.assertEquals(l4(), 1)
l5 = lambda x, y, z=2: x + y + z
self.assertEquals(l5(1, 2), 5)
self.assertEquals(l5(1, 2, 3), 6)
check_syntax_error(self, "lambda x: x = 2")
check_syntax_error(self, "lambda (None,): None")
### stmt: simple_stmt | compound_stmt
# Tested below
def testSimpleStmt(self):
### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x
def foo():
# verify statments that end with semi-colons
x = 1; pass; del x;
foo()
### small_stmt: expr_stmt | print_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt | exec_stmt
# Tested below
def testExprStmt(self):
# (exprlist '=')* exprlist
1
1, 2, 3
x = 1
x = 1, 2, 3
x = y = z = 1, 2, 3
x, y, z = 1, 2, 3
abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4)
check_syntax_error(self, "x + 1 = 1")
check_syntax_error(self, "a + 1 = b + 2")
def testPrintStmt(self):
# 'print' (test ',')* [test]
import StringIO
# Can't test printing to real stdout without comparing output
# which is not available in unittest.
save_stdout = sys.stdout
sys.stdout = StringIO.StringIO()
print 1, 2, 3
print 1, 2, 3,
print
print 0 or 1, 0 or 1,
print 0 or 1
# 'print' '>>' test ','
print >> sys.stdout, 1, 2, 3
print >> sys.stdout, 1, 2, 3,
print >> sys.stdout
print >> sys.stdout, 0 or 1, 0 or 1,
print >> sys.stdout, 0 or 1
# test printing to an instance
class Gulp:
def write(self, msg): pass
gulp = Gulp()
print >> gulp, 1, 2, 3
print >> gulp, 1, 2, 3,
print >> gulp
print >> gulp, 0 or 1, 0 or 1,
print >> gulp, 0 or 1
# test print >> None
def driver():
oldstdout = sys.stdout
sys.stdout = Gulp()
try:
tellme(Gulp())
tellme()
finally:
sys.stdout = oldstdout
# we should see this once
def tellme(file=sys.stdout):
print >> file, 'hello world'
driver()
# we should not see this at all
def tellme(file=None):
print >> file, 'goodbye universe'
driver()
self.assertEqual(sys.stdout.getvalue(), '''\
1 2 3
1 2 3
1 1 1
1 2 3
1 2 3
1 1 1
hello world
''')
sys.stdout = save_stdout
# syntax errors
check_syntax_error(self, 'print ,')
check_syntax_error(self, 'print >> x,')
def testDelStmt(self):
# 'del' exprlist
abc = [1,2,3]
x, y, z = abc
xyz = x, y, z
del abc
del x, y, (z, xyz)
def testPassStmt(self):
# 'pass'
pass
# flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt
# Tested below
def testBreakStmt(self):
# 'break'
while 1: break
def testContinueStmt(self):
# 'continue'
i = 1
while i: i = 0; continue
msg = ""
while not msg:
msg = "ok"
try:
continue
msg = "continue failed to continue inside try"
except:
msg = "continue inside try called except block"
if msg != "ok":
self.fail(msg)
msg = ""
while not msg:
msg = "finally block not called"
try:
continue
finally:
msg = "ok"
if msg != "ok":
self.fail(msg)
def test_break_continue_loop(self):
# This test warrants an explanation. It is a test specifically for SF bugs
# #463359 and #462937. The bug is that a 'break' statement executed or
# exception raised inside a try/except inside a loop, *after* a continue
# statement has been executed in that loop, will cause the wrong number of
# arguments to be popped off the stack and the instruction pointer reset to
# a very small number (usually 0.) Because of this, the following test
# *must* written as a function, and the tracking vars *must* be function
# arguments with default values. Otherwise, the test will loop and loop.
def test_inner(extra_burning_oil = 1, count=0):
big_hippo = 2
while big_hippo:
count += 1
try:
if extra_burning_oil and big_hippo == 1:
extra_burning_oil -= 1
break
big_hippo -= 1
continue
except:
raise
if count > 2 or big_hippo <> 1:
self.fail("continue then break in try/except in loop broken!")
test_inner()
def testReturn(self):
# 'return' [testlist]
def g1(): return
def g2(): return 1
g1()
x = g2()
check_syntax_error(self, "class foo:return 1")
def testYield(self):
check_syntax_error(self, "class foo:yield 1")
def testRaise(self):
# 'raise' test [',' test]
try: raise RuntimeError, 'just testing'
except RuntimeError: pass
try: raise KeyboardInterrupt
except KeyboardInterrupt: pass
def testImport(self):
# 'import' dotted_as_names
import sys
import time, sys
# 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names)
from time import time
from time import (time)
# not testable inside a function, but already done at top of the module
# from sys import *
from sys import path, argv
from sys import (path, argv)
from sys import (path, argv,)
def testGlobal(self):
# 'global' NAME (',' NAME)*
global a
global a, b
global one, two, three, four, five, six, seven, eight, nine, ten
def testExec(self):
# 'exec' expr ['in' expr [',' expr]]
z = None
del z
exec 'z=1+1\n'
if z != 2: self.fail('exec \'z=1+1\'\\n')
del z
exec 'z=1+1'
if z != 2: self.fail('exec \'z=1+1\'')
z = None
del z
import types
if hasattr(types, "UnicodeType"):
exec r"""if 1:
exec u'z=1+1\n'
if z != 2: self.fail('exec u\'z=1+1\'\\n')
del z
exec u'z=1+1'
if z != 2: self.fail('exec u\'z=1+1\'')"""
g = {}
exec 'z = 1' in g
if g.has_key('__builtins__'): del g['__builtins__']
if g != {'z': 1}: self.fail('exec \'z = 1\' in g')
g = {}
l = {}
import warnings
warnings.filterwarnings("ignore", "global statement", module="<string>")
exec 'global a; a = 1; b = 2' in g, l
if g.has_key('__builtins__'): del g['__builtins__']
if l.has_key('__builtins__'): del l['__builtins__']
if (g, l) != ({'a':1}, {'b':2}):
self.fail('exec ... in g (%s), l (%s)' %(g,l))
def testAssert(self):
# assert_stmt: 'assert' test [',' test]
assert 1
assert 1, 1
assert lambda x:x
assert 1, lambda x:x+1
try:
assert 0, "msg"
except AssertionError, e:
self.assertEquals(e.args[0], "msg")
else:
if __debug__:
self.fail("AssertionError not raised by assert 0")
### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef
# Tested below
def testIf(self):
# 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
if 1: pass
if 1: pass
else: pass
if 0: pass
elif 0: pass
if 0: pass
elif 0: pass
elif 0: pass
elif 0: pass
else: pass
def testWhile(self):
# 'while' test ':' suite ['else' ':' suite]
while 0: pass
while 0: pass
else: pass
# Issue1920: "while 0" is optimized away,
# ensure that the "else" clause is still present.
x = 0
while 0:
x = 1
else:
x = 2
self.assertEquals(x, 2)
def testFor(self):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
for i in 1, 2, 3: pass
for i, j, k in (): pass
else: pass
class Squares:
def __init__(self, max):
self.max = max
self.sofar = []
def __len__(self): return len(self.sofar)
def __getitem__(self, i):
if not 0 <= i < self.max: raise IndexError
n = len(self.sofar)
while n <= i:
self.sofar.append(n*n)
n = n+1
return self.sofar[i]
n = 0
for x in Squares(10): n = n+x
if n != 285:
self.fail('for over growing sequence')
result = []
for x, in [(1,), (2,), (3,)]:
result.append(x)
self.assertEqual(result, [1, 2, 3])
def testTry(self):
### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
### | 'try' ':' suite 'finally' ':' suite
### except_clause: 'except' [expr [('as' | ',') expr]]
try:
1/0
except ZeroDivisionError:
pass
else:
pass
try: 1/0
except EOFError: pass
except TypeError as msg: pass
except RuntimeError, msg: pass
except: pass
else: pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError): pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError), msg: pass
try: pass
finally: pass
def testSuite(self):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if 1: pass
if 1:
pass
if 1:
#
#
#
pass
pass
#
pass
#
def testTest(self):
### and_test ('or' and_test)*
### and_test: not_test ('and' not_test)*
### not_test: 'not' not_test | comparison
if not 1: pass
if 1 and 1: pass
if 1 or 1: pass
if not not not 1: pass
if not 1 and 1 and 1: pass
if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass
def testComparison(self):
### comparison: expr (comp_op expr)*
### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
if 1: pass
x = (1 == 1)
if 1 == 1: pass
if 1 != 1: pass
if 1 <> 1: pass
if 1 < 1: pass
if 1 > 1: pass
if 1 <= 1: pass
if 1 >= 1: pass
if 1 is 1: pass
if 1 is not 1: pass
if 1 in (): pass
if 1 not in (): pass
if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass
def testBinaryMaskOps(self):
x = 1 & 1
x = 1 ^ 1
x = 1 | 1
def testShiftOps(self):
x = 1 << 1
x = 1 >> 1
x = 1 << 1 >> 1
def testAdditiveOps(self):
x = 1
x = 1 + 1
x = 1 - 1 - 1
x = 1 - 1 + 1 - 1 + 1
def testMultiplicativeOps(self):
x = 1 * 1
x = 1 / 1
x = 1 % 1
x = 1 / 1 * 1 % 1
def testUnaryOps(self):
x = +1
x = -1
x = ~1
x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
x = -1*1/1 + 1*1 - ---1*1
def testSelectors(self):
### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME
### subscript: expr | [expr] ':' [expr]
import sys, time
c = sys.path[0]
x = time.time()
x = sys.modules['time'].time()
a = '01234'
c = a[0]
c = a[-1]
s = a[0:5]
s = a[:5]
s = a[0:]
s = a[:]
s = a[-5:]
s = a[:-1]
s = a[-4:-3]
# A rough test of SF bug 1333982. http://python.org/sf/1333982
# The testing here is fairly incomplete.
# Test cases should include: commas with 1 and 2 colons
d = {}
d[1] = 1
d[1,] = 2
d[1,2] = 3
d[1,2,3] = 4
L = list(d)
L.sort()
self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]')
def testAtoms(self):
### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING
### dictmaker: test ':' test (',' test ':' test)* [',']
x = (1)
x = (1 or 2 or 3)
x = (1 or 2 or 3, 2, 3)
x = []
x = [1]
x = [1 or 2 or 3]
x = [1 or 2 or 3, 2, 3]
x = []
x = {}
x = {'one': 1}
x = {'one': 1,}
x = {'one' or 'two': 1 or 2}
x = {'one': 1, 'two': 2}
x = {'one': 1, 'two': 2,}
x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}
x = `x`
x = `1 or 2 or 3`
self.assertEqual(`1,2`, '(1, 2)')
x = x
x = 'x'
x = 123
### exprlist: expr (',' expr)* [',']
### testlist: test (',' test)* [',']
# These have been exercised enough above
def testClassdef(self):
# 'class' NAME ['(' [testlist] ')'] ':' suite
class B: pass
class B2(): pass
class C1(B): pass
class C2(B): pass
class D(C1, C2, B): pass
class C:
def meth1(self): pass
def meth2(self, arg): pass
def meth3(self, a1, a2): pass
# decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
# decorators: decorator+
# decorated: decorators (classdef | funcdef)
def class_decorator(x):
x.decorated = True
return x
@class_decorator
class G:
pass
self.assertEqual(G.decorated, True)
def testListcomps(self):
# list comprehension tests
nums = [1, 2, 3, 4, 5]
strs = ["Apple", "Banana", "Coconut"]
spcs = [" Apple", " Banana ", "Coco nut "]
self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut'])
self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15])
self.assertEqual([x for x in nums if x > 2], [3, 4, 5])
self.assertEqual([(i, s) for i in nums for s in strs],
[(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'),
(2, 'Apple'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Apple'), (3, 'Banana'), (3, 'Coconut'),
(4, 'Apple'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Apple'), (5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]],
[(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)],
[[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]])
def test_in_func(l):
return [None < x < 3 for x in l if x > 2]
self.assertEqual(test_in_func(nums), [False, False, False])
def test_nested_front():
self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]],
[[1, 2], [3, 4], [5, 6]])
test_nested_front()
check_syntax_error(self, "[i, s for i in nums for s in strs]")
check_syntax_error(self, "[x if y]")
suppliers = [
(1, "Boeing"),
(2, "Ford"),
(3, "Macdonalds")
]
parts = [
(10, "Airliner"),
(20, "Engine"),
(30, "Cheeseburger")
]
suppart = [
(1, 10), (1, 20), (2, 20), (3, 30)
]
x = [
(sname, pname)
for (sno, sname) in suppliers
for (pno, pname) in parts
for (sp_sno, sp_pno) in suppart
if sno == sp_sno and pno == sp_pno
]
self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'),
('Macdonalds', 'Cheeseburger')])
def testGenexps(self):
# generator expression tests
g = ([x for x in range(10)] for x in range(1))
self.assertEqual(g.next(), [x for x in range(10)])
try:
g.next()
self.fail('should produce StopIteration exception')
except StopIteration:
pass
a = 1
try:
g = (a for d in a)
g.next()
self.fail('should produce TypeError')
except TypeError:
pass
self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd'])
self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy'])
a = [x for x in range(10)]
b = (x for x in (y for y in a))
self.assertEqual(sum(b), sum([x for x in range(10)]))
self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)]))
self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2]))
self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0)
check_syntax_error(self, "foo(x for x in range(10), 100)")
check_syntax_error(self, "foo(100, x for x in range(10))")
def testComprehensionSpecials(self):
# test for outmost iterable precomputation
x = 10; g = (i for i in range(x)); x = 5
self.assertEqual(len(list(g)), 10)
# This should hold, since we're only precomputing outmost iterable.
x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x))
x = 5; t = True;
self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g))
# Grammar allows multiple adjacent 'if's in listcomps and genexps,
# even though it's silly. Make sure it works (ifelse broke this.)
self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7])
self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7])
# verify unpacking single element tuples in listcomp/genexp.
self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6])
self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9])
def testIfElseExpr(self):
# Test ifelse expressions in various cases
def _checkeval(msg, ret):
"helper to check that evaluation of expressions is done correctly"
print x
return ret
self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True])
self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True])
self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True])
self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5)
self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5)
self.assertEqual((5 and 6 if 0 else 1), 1)
self.assertEqual(((5 and 6) if 0 else 1), 1)
self.assertEqual((5 and (6 if 1 else 1)), 6)
self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3)
self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1)
self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5)
self.assertEqual((not 5 if 1 else 1), False)
self.assertEqual((not 5 if 0 else 1), 1)
self.assertEqual((6 + 1 if 1 else 2), 7)
self.assertEqual((6 - 1 if 1 else 2), 5)
self.assertEqual((6 * 2 if 1 else 4), 12)
self.assertEqual((6 / 2 if 1 else 3), 3)
self.assertEqual((6 < 4 if 0 else 2), 2)
def test_main():
run_unittest(TokenTests, GrammarTests)
if __name__ == '__main__':
test_main()
|
leighpauls/k2cro4
|
third_party/python_26/Lib/lib2to3/tests/data/py2_test_grammar.py
|
Python
|
bsd-3-clause
| 30,527
|
[
"GULP"
] |
02716a2fff54c1b66ea7c47de9659a29288859e0123e23557824f10f9b9ba89a
|
# -*- coding: utf-8 -*-
#
# one_neuron_with_sine_wave.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import nest
import nest.voltage_trace
import matplotlib.pyplot as plt
nest.ResetKernel()
neuron = nest.Create('iaf_neuron')
sine = nest.Create('ac_generator', 1,
{'amplitude': 100.0,
'frequency': 2.0})
noise = nest.Create('poisson_generator', 2,
[{'rate': 70000.0},
{'rate': 20000.0}])
voltmeter = nest.Create('voltmeter', 1,
{'withgid': True})
nest.Connect(sine, neuron)
nest.Connect(voltmeter, neuron)
nest.Connect(noise[:1], neuron, syn_spec={'weight': 1.0, 'delay': 1.0})
nest.Connect(noise[1:], neuron, syn_spec={'weight': -1.0, 'delay': 1.0})
nest.Simulate(1000.0)
nest.voltage_trace.from_device(voltmeter)
plt.savefig('../figures/voltage_trace.eps')
|
HBPNeurorobotics/nest-simulator
|
doc/nest_by_example/scripts/one_neuron_with_sine_wave.py
|
Python
|
gpl-2.0
| 1,514
|
[
"NEURON"
] |
ab5e7acc59faead2f3e01308febc753902050be33658d5ad8321054858e6f903
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
from glob import glob
class Bowtie2(Package):
"""Bowtie 2 is an ultrafast and memory-efficient tool for aligning
sequencing reads to long reference sequences"""
homepage = "http://bowtie-bio.sourceforge.net/bowtie2/index.shtml"
url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.3.1/bowtie2-2.3.1-source.zip"
version('2.4.1', sha256='566d6fb01a361883747103d797308ee4bdb70f6db7d27bfc72a520587815df22')
version('2.3.5.1', sha256='335c8dafb1487a4a9228ef922fbce4fffba3ce8bc211e2d7085aac092155a53f')
version('2.3.5', sha256='2b6b2c46fbb5565ba6206b47d07ece8754b295714522149d92acebefef08347b')
version('2.3.4.1', sha256='a1efef603b91ecc11cfdb822087ae00ecf2dd922e03c85eea1ed7f8230c119dc')
version('2.3.1', sha256='33bd54f5041a31878e7e450cdcf0afba08345fa1133ce8ac6fd00bf7e521a443')
version('2.3.0', sha256='f9f841e780e78b1ae24b17981e2469e6d5add90ec22ef563af23ae2dd5ca003c')
version('2.2.5', sha256='e22766dd9421c10e82a3e207ee1f0eb924c025b909ad5fffa36633cd7978d3b0')
depends_on('tbb', when='@2.3.0:')
depends_on('readline', when='@2.3.1:')
depends_on('perl', type='run')
depends_on('python', type='run')
depends_on('zlib', when='@2.3.1:')
patch('bowtie2-2.2.5.patch', when='@2.2.5', level=0)
patch('bowtie2-2.3.1.patch', when='@2.3.1', level=0)
patch('bowtie2-2.3.0.patch', when='@2.3.0', level=0)
resource(name='simde', git="https://github.com/nemequ/simde",
destination='.', when='target=aarch64:')
# seems to have trouble with 6's -std=gnu++14
conflicts('%gcc@6:', when='@:2.3.1')
conflicts('@:2.3.5.0', when='target=aarch64:')
def patch(self):
if self.spec.target.family == 'aarch64':
copy_tree('simde', 'third_party/simde')
if self.spec.satisfies('%gcc@:4.8.9'):
filter_file('-fopenmp-simd', '', 'Makefile')
@run_before('install')
def filter_sbang(self):
"""Run before install so that the standard Spack sbang install hook
can fix up the path to the perl|python binary.
"""
with working_dir(self.stage.source_path):
kwargs = {'ignore_absent': True, 'backup': False, 'string': False}
match = '^#!/usr/bin/env perl'
perl = self.spec['perl'].command
substitute = "#!{perl}".format(perl=perl)
files = ['bowtie2', ]
filter_file(match, substitute, *files, **kwargs)
match = '^#!/usr/bin/env python'
python = self.spec['python'].command
substitute = "#!{python}".format(python=python)
files = ['bowtie2-build', 'bowtie2-inspect']
filter_file(match, substitute, *files, **kwargs)
def install(self, spec, prefix):
make_arg = []
if self.spec.target.family == 'aarch64':
make_arg.append('POPCNT_CAPABILITY=0')
make(*make_arg)
mkdirp(prefix.bin)
for bow in glob("bowtie2*"):
install(bow, prefix.bin)
# install('bowtie2',prefix.bin)
# install('bowtie2-align-l',prefix.bin)
# install('bowtie2-align-s',prefix.bin)
# install('bowtie2-build',prefix.bin)
# install('bowtie2-build-l',prefix.bin)
# install('bowtie2-build-s',prefix.bin)
# install('bowtie2-inspect',prefix.bin)
# install('bowtie2-inspect-l',prefix.bin)
# install('bowtie2-inspect-s',prefix.bin)
|
rspavel/spack
|
var/spack/repos/builtin/packages/bowtie2/package.py
|
Python
|
lgpl-2.1
| 3,666
|
[
"Bowtie"
] |
2a4758a4b59dcbbde6876ffdcaeaf00db9e372fe3d1e2f6e7e54c15bd9f54a23
|
import numpy as np
class AdaptiveSpike(object):
def __init__(self, n_inputs, n_outputs, n_neurons,
seed=None, learning_rate=1e-3,
has_neuron_state=True, smoothing=0):
self.rng = np.random.RandomState(seed=seed)
self.compute_encoders(n_inputs, n_neurons)
self.initialize_decoders(n_neurons, n_outputs)
self.learning_rate=learning_rate
self.has_neuron_state=has_neuron_state
self.input_max = 1.0
self.is_spiking = True
self.smoothing = smoothing
if smoothing > 0:
self.smoothing_decay = np.exp(-1.0/smoothing)
self.smoothing_state = np.zeros(n_outputs)
if has_neuron_state:
self.state = np.zeros(n_neurons)
def step(self, state, error):
# feed input over the static synapses
current = self.compute_neuron_input(state)
# do the neural nonlinearity
activity = self.neuron(current)
# apply the learned synapses
value = self.compute_output(activity)
# update the synapses with the learning rule
index = np.where(activity>0)
self.decoder[:,index] -= error * self.learning_rate
# Note: that multiply can be changed to a shift if the
# learning_rate is a power of 2
return value
def compute_encoders(self, n_inputs, n_neurons):
# generate the static synapses
# NOTE: this algorithm could be changed, and just needs to
# produce a similar distribution of connection weights. Changing
# this distribution slightly changes the class of functions the
# neural network will be good at learning
max_rates = self.rng.uniform(0.5, 1, n_neurons)
intercepts = self.rng.uniform(-1, 1, n_neurons)
gain = max_rates / (1 - intercepts)
bias = -intercepts * gain
enc = self.rng.randn(n_neurons, n_inputs)
enc /= np.linalg.norm(enc, axis=1)[:,None]
self.encoder = enc * gain[:, None]
self.bias = bias
def initialize_decoders(self, n_neurons, n_outputs):
self.decoder = np.zeros((n_outputs, n_neurons))
def compute_neuron_input(self, state):
# there is currently still a multiply here. But, since self.encoder is
# randomly generated, we can replace this with any easy-to-compute
# system. For example, we could replace the multiplies with shifts
# by rounding all the numbers to powers of 2.
return np.dot(self.encoder, state) + self.bias
def neuron(self, current):
if self.has_neuron_state:
# this is the accumulator implementation for a spike
self.state = self.state + current
self.state = np.where(self.state < 0, 0, self.state)
spikes = np.where(self.state > 1.0, 1, 0)
self.state[spikes>0] -= 1.0
else:
# this is the rng implementation for a spike
spikes = np.where(self.rng.uniform(0, 1, len(current))<current,
1, 0)
return spikes
def compute_output(self, activity):
decoder_access = self.decoder[:,np.where(activity>0)[0]]
if decoder_access.shape[1]>0:
value = np.sum(decoder_access, axis=1)
else:
value = np.zeros(decoder_access.shape[0])
if self.smoothing:
decay = self.smoothing_decay
self.smoothing_state = (self.smoothing_state*decay +
value*(1.0-decay))
value = self.smoothing_state
return value
|
tcstewar/minimal_adaptive_controller
|
adapt_spike.py
|
Python
|
gpl-3.0
| 3,603
|
[
"NEURON"
] |
50844a8d1b409add356934720b93fcca4cf0d152d6fe2a576366237681860284
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Visitor restricting traversal to only the public tensorflow API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.python.util import tf_inspect
class PublicAPIVisitor(object):
"""Visitor to use with `traverse` to visit exactly the public TF API."""
def __init__(self, visitor):
"""Constructor.
`visitor` should be a callable suitable as a visitor for `traverse`. It will
be called only for members of the public TensorFlow API.
Args:
visitor: A visitor to call for the public API.
"""
self._visitor = visitor
self._root_name = 'tf'
# Modules/classes we want to suppress entirely.
self._private_map = {
# Some implementations have this internal module that we shouldn't
# expose.
'tf.flags': ['cpp_flags'],
}
# Modules/classes we do not want to descend into if we hit them. Usually,
# system modules exposed through platforms for compatibility reasons.
# Each entry maps a module path to a name to ignore in traversal.
self._do_not_descend_map = {
'tf': [
'core',
'examples',
'flags', # Don't add flags
# TODO(drpng): This can be removed once sealed off.
'platform',
# TODO(drpng): This can be removed once sealed.
'pywrap_tensorflow',
# TODO(drpng): This can be removed once sealed.
'user_ops',
'python',
'tools',
'tensorboard',
],
## Everything below here is legitimate.
# It'll stay, but it's not officially part of the API.
'tf.app': ['flags'],
# Imported for compatibility between py2/3.
'tf.test': ['mock'],
}
@property
def private_map(self):
"""A map from parents to symbols that should not be included at all.
This map can be edited, but it should not be edited once traversal has
begun.
Returns:
The map marking symbols to not include.
"""
return self._private_map
@property
def do_not_descend_map(self):
"""A map from parents to symbols that should not be descended into.
This map can be edited, but it should not be edited once traversal has
begun.
Returns:
The map marking symbols to not explore.
"""
return self._do_not_descend_map
def set_root_name(self, root_name):
"""Override the default root name of 'tf'."""
self._root_name = root_name
def _is_private(self, path, name):
"""Return whether a name is private."""
# TODO(wicke): Find out what names to exclude.
return ((path in self._private_map and
name in self._private_map[path]) or
(name.startswith('_') and not re.match('__.*__$', name) or
name in ['__base__', '__class__']))
def _do_not_descend(self, path, name):
"""Safely queries if a specific fully qualified name should be excluded."""
return (path in self._do_not_descend_map and
name in self._do_not_descend_map[path])
def __call__(self, path, parent, children):
"""Visitor interface, see `traverse` for details."""
# Avoid long waits in cases of pretty unambiguous failure.
if tf_inspect.ismodule(parent) and len(path.split('.')) > 10:
raise RuntimeError('Modules nested too deep:\n%s.%s\n\nThis is likely a '
'problem with an accidental public import.' %
(self._root_name, path))
# Includes self._root_name
full_path = '.'.join([self._root_name, path]) if path else self._root_name
# Remove things that are not visible.
for name, child in list(children):
if self._is_private(full_path, name):
children.remove((name, child))
self._visitor(path, parent, children)
# Remove things that are visible, but which should not be descended into.
for name, child in list(children):
if self._do_not_descend(full_path, name):
children.remove((name, child))
|
npuichigo/ttsflow
|
third_party/tensorflow/tensorflow/tools/common/public_api.py
|
Python
|
apache-2.0
| 4,753
|
[
"VisIt"
] |
9022a9851e172cdd88c886924635b70ef79349dd87f95524100f47879c06aec6
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to send CLI commands to Lenovo Switches
# Lenovo Networking
#
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_command
author: "Dave Kasberg (@dkasberg)"
short_description: Execute a single command on devices running Lenovo CNOS
description:
- This module allows you to modify the switch running configuration. It provides a way to
execute a single CNOS command on a switch by evaluating the current running configuration
and executing the command only if the specific setting has not been already configured.
The CNOS command is passed as an argument of the method.
This module uses SSH to manage network device configuration.
The results of the operation will be placed in a directory named 'results'
that must be created by the user in their local directory to where the playbook is run.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_command.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options:
clicommand:
description:
- This specifies the CLI command as an attribute to this method. The command is
passed using double quotes. The variables can be placed directly on to the CLI
commands or can be invoked from the vars directory.
required: true
default: Null
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_command. These are written in the main.yml file of the tasks directory.
---
- name: Test Command
cnos_command:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['username'] }}"
password: "{{ hostvars[inventory_hostname]['password'] }}"
enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
outputfile: "./results/test_command_{{ inventory_hostname }}_output.txt"
clicommand: "display users"
'''
RETURN = '''
return value: |
On successful execution, the method returns a message in JSON format
[Command Applied]
Upon any failure, the method returns an error display string.
'''
import sys
import paramiko
import time
import argparse
import socket
import array
import json
import time
import re
try:
from ansible.module_utils import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
clicommand=dict(required=True),
outputfile=dict(required=True),
host=dict(required=True),
deviceType=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True),),
supports_check_mode=False)
username = module.params['username']
password = module.params['password']
enablePassword = module.params['enablePassword']
cliCommand = module.params['clicommand']
deviceType = module.params['deviceType']
outputfile = module.params['outputfile']
hostIP = module.params['host']
output = ""
# Create instance of SSHClient object
remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure okay for security policy in your environment)
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection with the switch
remote_conn_pre.connect(hostIP, username=username, password=password)
time.sleep(2)
# Use invoke_shell to establish an 'interactive session'
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(2)
# Enable and enter configure terminal then send command
output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn)
output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn)
# Make terminal length = 0
output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn)
# Go to config mode
output = output + cnos.waitForDeviceResponse("configure d\n", "(config)#", 2, remote_conn)
# Send the CLi command
output = output + cnos.waitForDeviceResponse(cliCommand + "\n", "(config)#", 2, remote_conn)
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
# Logic to check when changes occur or not
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="CLI command executed and results saved in file ")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
|
bjolivot/ansible
|
lib/ansible/modules/network/lenovo/cnos_command.py
|
Python
|
gpl-3.0
| 5,736
|
[
"VisIt"
] |
29b9b5cdd8df222ce2b9046e075a6aed8231f68858a75be71ebcd532708c86bc
|
from check_grad import check_grad
from utils import *
from logistic import *
import matplotlib.pyplot as plt
def run_logistic_regression(hyperparameters):
# TODO specify training data
train_inputs, train_targets = load_train_small()
valid_inputs, valid_targets = load_valid()
# N is number of examples; M is the number of features per example.
N, M = train_inputs.shape
# Logistic regression weights
# TODO:Initialize to random weights here.
weights = 0.1*np.random.randn(M+1,1)
# Verify that your logistic function produces the right gradient.
# diff should be very close to 0.
run_check_grad(hyperparameters)
# Begin learning with gradient descent
logging = np.zeros((hyperparameters['num_iterations'], 5))
for t in xrange(hyperparameters['num_iterations']):
# Find the negative log likelihood and its derivatives w.r.t. the weights.
f, df, predictions = logistic(weights, train_inputs, train_targets, hyperparameters)
# Evaluate the prediction.
cross_entropy_train, frac_correct_train = evaluate(train_targets, predictions)
if np.isnan(f) or np.isinf(f):
raise ValueError("nan/inf error")
# update parameters
weights = weights - hyperparameters['learning_rate'] * df / N
# Make a prediction on the valid_inputs.
predictions_valid = logistic_predict(weights, valid_inputs)
# Evaluate the prediction.
cross_entropy_valid, frac_correct_valid = evaluate(valid_targets, predictions_valid)
logging[t] = [f/N, cross_entropy_train, frac_correct_train*100, cross_entropy_valid, frac_correct_valid*100]
return logging
def run_check_grad(hyperparameters):
"""Performs gradient check on logistic function.
"""
# This creates small random data with 7 examples and
# 9 dimensions and checks the gradient on that data.
num_examples = 7
num_dimensions = 9
weights = np.random.randn(num_dimensions+1, 1)
data = np.random.randn(num_examples, num_dimensions)
targets = (np.random.rand(num_examples, 1) > 0.5).astype(int)
diff = check_grad(logistic, # function to check
weights,
0.001, # perturbation
data,
targets,
hyperparameters)
print "diff =", diff
if __name__ == '__main__':
# TODO: Set hyperparameters
hyperparameters = {
'learning_rate': 0.5,
'weight_regularization':False, # boolean, True for using Gaussian prior on weights
'num_iterations': 300,
'weight_decay': 0.1, # related to standard deviation of weight prior
}
# average over multiple runs
num_runs = 1
logging = np.zeros((hyperparameters['num_iterations'], 5))
for i in xrange(num_runs):
logging += run_logistic_regression(hyperparameters)
logging /= num_runs
# TODO generate plots
plt.plot(logging[:,1],marker='+',label='training set')
plt.plot(logging[:,3],marker='*',label='validation set')
plt.legend(loc='upper right')
plt.title('Plot of Cross Entropy vs. Iteration Times on training set and validation set')
plt.xlabel('Iteration Times')
plt.ylabel('Cross Entropy')
plt.show()
|
ouyangyike/Machine-Learning-and-Data-Mining
|
Logistic Regression/logistic_regression_template3.py
|
Python
|
mit
| 3,373
|
[
"Gaussian"
] |
c52e8a0de8eefa8a5286587a31880cf3ac4010ed15f388f306a35c1c390607c1
|
# Copyright (C) 2006 CAMP
# Please see the accompanying LICENSE file for further information.
from math import pi, log, sqrt
import numpy as np
from gpaw.utilities import _fact
"""Fourier filtering
This module is an implementation of this Fourier filtering scheme:
*A general and efficient pseudopotential Fourier filtering scheme for
real space methods using mask functions*, Maxim Tafipolsky, Rochus
Schmid, J Chem Phys. 2006 May 7;124:174102.
Only difference is that we use a gaussian for the mask function. The
filtering is used for the projector functions and for the zero
potential."""
# 3D-Fourier transform:
#
# / _ _
# ~ ^ | _ iq.r ^
# f (q) Y (q) = | dr e f (r) Y (r)
# l lm | l lm
# /
#
# Radial part:
#
# /
# ~ __ l | 2
# f (q) = 4||i | r dr j (qr) f (r)
# l | l l
# /
#
# XXX use fast bessel transform !!!
class Filter:
"""Mask-function Fourier filter"""
def __init__(self, r_g, dr_g, gcut, h):
"""Construct filter.
The radial grid is defined by r(g) and dr/dg(g) (`r_g` and
`dr_g`), `gcut` is the cutoff grid point, and `h` is the target
grid spacing used in the calculation."""
self.gcut = gcut
rcut = r_g[gcut]
N = 200
self.r_g = r_g = r_g[:gcut].copy() # will be modified later!
self.dr_g = dr_g[:gcut]
# Matrices for Bessel transform:
q1 = 5 * pi / h / N
self.q_i = q_i = q1 * np.arange(N)
self.c = sqrt(2 * q1 / pi)
self.sinqr_ig = np.sin(q_i[:, None] * r_g) * self.c
self.cosqr_ig = np.cos(q_i[:, None] * r_g) * self.c
# Cutoff function:
qmax = pi / h
alpha = 1.1
qcut = qmax / alpha
icut = 1 + int(qcut / q1)
beta = 5 * log(10) / (alpha - 1.0)**2
self.cut_i = np.ones(N)
self.cut_i[icut:] = np.exp(
-np.clip(beta * (q_i[icut:] / qcut - 1.0)**2, 0, 400))
# self.cut_i[icut:] = np.exp(
# -np.clip(0, 400, beta * (q_i[icut:] / qcut - 1.0)**2))
# Mask function:
gamma = 3 * log(10) / rcut**2
self.m_g = np.exp(-gamma * r_g**2)
# We will need to divide by these two! Remove zeros:
q_i[0] = 1.0
r_g[0] = 1.0
def filter(self, f_g, l=0):
"""Filter radial function.
The function to be filtered is::
f(r) ^
---- Y (r)
r lm
Output is::
l ^
g(r) r Y (r),
lm
where the filtered radial part ``g(r)`` is returned."""
r_g = self.r_g
q_i = self.q_i
fdrim_g = f_g[:self.gcut] * self.dr_g / self.m_g / r_g
# sin(x)
# j (x) = ------,
# 0 x
#
# sin(x) cos(x)
# j (x) = ------ - ------,
# 1 2 x
# x
#
# 3 1 3
# j (x) = (--- - -) sin(x) - --- cos(x),
# 2 3 x 2
# x x
#
# 15 6 15 1
# j (x) = (-- - ---) sin(x) - (-- - -) cos(x).
# 3 4 2 3 x
# x x x
#
if l == 0:
fq_i = np.dot(self.sinqr_ig, fdrim_g * r_g) * self.cut_i
fr_g = np.dot(fq_i, self.sinqr_ig)
elif l == 1:
fq_i = np.dot(self.sinqr_ig, fdrim_g) / q_i
fq_i -= np.dot(self.cosqr_ig, r_g * fdrim_g)
fq_i[0] = 0.0
fq_i *= self.cut_i
fr_g = np.dot(fq_i / q_i, self.sinqr_ig) / r_g
fr_g -= np.dot(fq_i, self.cosqr_ig)
elif l == 2:
fq_i = 3 * np.dot(self.sinqr_ig, fdrim_g / r_g) / q_i**2
fq_i -= np.dot(self.sinqr_ig, fdrim_g * r_g)
fq_i -= 3 * np.dot(self.cosqr_ig, fdrim_g) / q_i
fq_i[0] = 0.0
fq_i *= self.cut_i
fr_g = 3 * np.dot(fq_i / q_i**2, self.sinqr_ig) / r_g**2
fr_g -= np.dot(fq_i, self.sinqr_ig)
fr_g -= 3 * np.dot(fq_i / q_i, self.cosqr_ig) / r_g
elif l == 3: ### This should be tested
fq_i = 15 * np.dot(self.sinqr_ig, fdrim_g / r_g**2) / q_i**3
fq_i -= 6 * np.dot(self.sinqr_ig, fdrim_g) / q_i
fq_i -= 15 * np.dot(self.cosqr_ig, fdrim_g / r_g) / q_i**2
fq_i += np.dot(self.cosqr_ig, r_g * fdrim_g)
fq_i[0] = 0.0
fq_i *= self.cut_i
fr_g = 15 * np.dot(fq_i / q_i**3, self.sinqr_ig) / r_g**3
fr_g -= 6 * np.dot(fq_i / q_i, self.sinqr_ig) / r_g
fr_g -= 15 * np.dot(fq_i / q_i**2, self.cosqr_ig) / r_g**2
fr_g += np.dot(fq_i, self.cosqr_ig)
else:
raise NotImplementedError
a_g = np.zeros(len(f_g))
a_g[:self.gcut] = fr_g * self.m_g / r_g**(l + 1)
# n
# 2 n! n
# j (x) = --------- x for x << 1.
# n (2n + 1)!
#
# This formula is used for finding the value of
#
# -l
# f(r) r for r -> 0
#
c = 2.0**l * _fact[l] / _fact[2 * l + 1] * self.c
a_g[0] = np.dot(fq_i, q_i**(l + 1)) * c
return a_g
if __name__ == '__main__':
rc = 1.1
gamma = 1.95
rc2 = rc * gamma
M = 300
beta = 0.3
gcut = 1 + int(M * rc / (beta + rc))
g_g = np.arange(M)
r_g = beta * g_g / (M - g_g)
drdg_g = beta * M / (M - g_g)**2
x_g = r_g / rc
p_g = 1 - x_g**2 * (3 - 2 * x_g)
p_g[gcut:] = 0.0
#p_g = np.exp(-np.clip(5.0 * r_g**2, 0, 400))
h = 0.4
f = Filter(r_g, drdg_g, rc2, h)
pf0_g = f.filter(p_g)
pf1_g = f.filter(p_g * r_g**1, 1)
pf2_g = f.filter(p_g * r_g**2, 2)
if 0:
for i in range(200):
print 5 * pi / h * i / 200, pf0_g[i], pf1_g[i], pf2_g[i]
if 1:
for r, p, pf0, pf1, pf2 in zip(r_g, p_g, pf0_g, pf1_g, pf2_g):
print r, p, pf0, pf1, pf2
if r > rc2:
break
|
qsnake/gpaw
|
gpaw/atom/filter.py
|
Python
|
gpl-3.0
| 6,297
|
[
"GPAW",
"Gaussian"
] |
2544739e677b251ce27eb0f672c81a90db09bd67baf3193abed0e50955dd4980
|
""" TransformationAgent processes transformations found in the transformation database.
"""
import time, re, random, Queue, os, datetime, pickle
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Utilities.ThreadPool import ThreadPool
from DIRAC.Core.Utilities.ThreadSafe import Synchronizer
from DIRAC.Core.Utilities.List import breakListIntoChunks
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
from DIRAC.TransformationSystem.Agent.TransformationAgentsUtilities import TransformationAgentsUtilities
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
__RCSID__ = "$Id$"
AGENT_NAME = 'Transformation/TransformationAgent'
gSynchro = Synchronizer()
class TransformationAgent( AgentModule, TransformationAgentsUtilities ):
""" Usually subclass of AgentModule
"""
def __init__( self, *args, **kwargs ):
""" c'tor
"""
AgentModule.__init__( self, *args, **kwargs )
TransformationAgentsUtilities.__init__( self )
# few parameters
self.pluginLocation = ''
self.transformationStatus = []
self.maxFiles = 0
self.transformationTypes = []
# clients (out of the threads)
self.transfClient = None
# parameters for the threading
self.transQueue = Queue.Queue()
self.transInQueue = []
# parameters for caching
self.workDirectory = ''
self.cacheFile = ''
self.controlDirectory = ''
self.dateWriteCache = datetime.datetime.utcnow()
# Validity of the cache
self.replicaCache = None
self.replicaCacheValidity = None
self.writingCache = False
self.noUnusedDelay = 0
self.unusedFiles = {}
self.unusedTimeStamp = {}
def initialize( self ):
""" standard initialize
"""
# few parameters
self.pluginLocation = self.am_getOption( 'PluginLocation',
'DIRAC.TransformationSystem.Agent.TransformationPlugin' )
self.transformationStatus = self.am_getOption( 'transformationStatus', ['Active', 'Completing', 'Flush'] )
self.maxFiles = self.am_getOption( 'MaxFiles', 5000 )
agentTSTypes = self.am_getOption( 'TransformationTypes', [] )
if agentTSTypes:
self.transformationTypes = sorted( agentTSTypes )
else:
dataProc = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge'] )
dataManip = Operations().getValue( 'Transformations/DataManipulation', ['Replication', 'Removal'] )
self.transformationTypes = sorted( dataProc + dataManip )
# clients
self.transfClient = TransformationClient()
# shifter
self.am_setOption( 'shifterProxy', 'ProductionManager' )
# for caching using a pickle file
self.__readCache()
self.workDirectory = self.am_getWorkDirectory()
self.cacheFile = os.path.join( self.workDirectory, 'ReplicaCache.pkl' )
self.controlDirectory = self.am_getControlDirectory()
self.replicaCacheValidity = self.am_getOption( 'ReplicaCacheValidity', 2 )
self.noUnusedDelay = self.am_getOption( 'NoUnusedDelay', 6 )
self.dateWriteCache = datetime.datetime.utcnow()
# Get it threaded
maxNumberOfThreads = self.am_getOption( 'maxThreadsInPool', 1 )
threadPool = ThreadPool( maxNumberOfThreads, maxNumberOfThreads )
self.log.info( "Multithreaded with %d threads" % maxNumberOfThreads )
for i in xrange( maxNumberOfThreads ):
threadPool.generateJobAndQueueIt( self._execute, [i] )
self.log.info( "Will treat the following transformation types: %s" % str( self.transformationTypes ) )
return S_OK()
def finalize( self ):
""" graceful finalization
"""
if self.transInQueue:
self._logInfo( "Wait for threads to get empty before terminating the agent (%d tasks)" % len( self.transInThread ) )
self.transInQueue = []
while self.transInThread:
time.sleep( 2 )
self.log.info( "Threads are empty, terminating the agent..." )
self.__writeCache( force = True )
return S_OK()
def execute( self ):
""" Just puts transformations in the queue
"""
# Get the transformations to process
res = self.getTransformations()
if not res['OK']:
self._logError( "Failed to obtain transformations: %s" % ( res['Message'] ) )
return S_OK()
# Process the transformations
count = 0
for transDict in res['Value']:
transID = long( transDict['TransformationID'] )
if transDict.get( 'InheritedFrom' ):
# Try and move datasets from the ancestor production
res = self.transfClient.moveFilesToDerivedTransformation( transDict )
if not res['OK']:
self._logError( "Error moving files from an inherited transformation", res['Message'], transID = transID )
else:
parentProd, movedFiles = res['Value']
if movedFiles:
self._logInfo( "Successfully moved files from %d to %d:" % ( parentProd, transID ), transID = transID )
for status, val in movedFiles.items():
self._logInfo( "\t%d files to status %s" % ( val, status ), transID = transID )
if transID not in self.transInQueue:
count += 1
self.transInQueue.append( transID )
self.transQueue.put( transDict )
self._logInfo( "Out of %d transformations, %d put in thread queue" % ( len( res['Value'] ), count ) )
return S_OK()
def getTransformations( self ):
""" Obtain the transformations to be executed - this is executed at the start of every loop (it's really the
only real thing in the execute()
"""
transName = self.am_getOption( 'Transformation', 'All' )
if transName == 'All':
self._logInfo( "Initializing general purpose agent.", method = 'getTransformations' )
transfDict = {'Status': self.transformationStatus }
if self.transformationTypes:
transfDict['Type'] = self.transformationTypes
res = self.transfClient.getTransformations( transfDict, extraParams = True )
if not res['OK']:
self._logError( "Failed to get transformations: %s" % res['Message'], method = 'getTransformations' )
return res
transformations = res['Value']
self._logInfo( "Obtained %d transformations to process" % len( transformations ), method = 'getTransformations' )
else:
self._logInfo( "Initializing for transformation %s." % transName, method = "getTransformations" )
res = self.transfClient.getTransformation( transName, extraParams = True )
if not res['OK']:
self._logError( "Failed to get transformation: %s." % res['Message'], method = 'getTransformations' )
return res
transformations = [res['Value']]
return S_OK( transformations )
def _getClients( self ):
""" returns the clients used in the threads
"""
threadTransformationClient = TransformationClient()
threadDataManager = DataManager()
return {'TransformationClient': threadTransformationClient,
'DataManager': threadDataManager}
def _execute( self, threadID ):
""" thread - does the real job: processing the transformations to be processed
"""
# Each thread will have its own clients
clients = self._getClients()
while True:
transDict = self.transQueue.get()
try:
transID = long( transDict['TransformationID'] )
if transID not in self.transInQueue:
break
self.transInThread[transID] = ' [Thread%d] [%s] ' % ( threadID, str( transID ) )
self._logInfo( "Processing transformation %s." % transID, transID = transID )
startTime = time.time()
res = self.processTransformation( transDict, clients )
if not res['OK']:
self._logInfo( "Failed to process transformation: %s" % res['Message'], transID = transID )
except Exception, x:
self._logException( '%s' % x, transID = transID )
finally:
if not transID:
transID = 'None'
self._logInfo( "Processed transformation in %.1f seconds" % ( time.time() - startTime ), transID = transID )
self._logVerbose( "%d transformations still in queue" % ( len( self.transInQueue ) - 1 ) )
self.transInThread.pop( transID, None )
if transID in self.transInQueue:
self.transInQueue.remove( transID )
return S_OK()
def processTransformation( self, transDict, clients, active = True ):
""" process a single transformation (in transDict)
"""
transID = transDict['TransformationID']
replicateOrRemove = transDict['Type'].lower() in ['replication', 'removal']
# First get the LFNs associated to the transformation
transFiles = self._getTransformationFiles( transDict, clients )
if not transFiles['OK']:
return transFiles
if not transFiles['Value']:
return S_OK()
transFiles = transFiles['Value']
lfns = [ f['LFN'] for f in transFiles ]
# Limit the number of LFNs to be considered for replication or removal as they are treated individually
if replicateOrRemove:
lfns = self.__applyReduction( lfns )
unusedFiles = len( lfns )
# Check the data is available with replicas
res = self.__getDataReplicas( transDict, lfns, clients, active = not replicateOrRemove )
if not res['OK']:
self._logError( "Failed to get data replicas: %s" % res['Message'],
method = "processTransformation", transID = transID )
return res
dataReplicas = res['Value']
# Get the plug-in type and create the plug-in object
plugin = 'Standard'
if transDict.get( 'Plugin' ):
plugin = transDict['Plugin']
self._logInfo( "Processing transformation with '%s' plug-in." % plugin,
method = "processTransformation", transID = transID )
res = self.__generatePluginObject( plugin, clients )
if not res['OK']:
return res
oPlugin = res['Value']
# Get the plug-in and set the required params
oPlugin.setParameters( transDict )
oPlugin.setInputData( dataReplicas )
oPlugin.setTransformationFiles( transFiles )
res = oPlugin.generateTasks()
if not res['OK']:
self._logError( "Failed to generate tasks for transformation: %s" % res['Message'],
method = "processTransformation", transID = transID )
return res
tasks = res['Value']
# Create the tasks
allCreated = True
created = 0
for se, lfns in tasks:
res = clients['TransformationClient'].addTaskForTransformation( transID, lfns, se )
if not res['OK']:
self._logError( "Failed to add task generated by plug-in: %s." % res['Message'],
method = "processTransformation", transID = transID )
allCreated = False
else:
created += 1
unusedFiles -= len( lfns )
if created:
self._logInfo( "Successfully created %d tasks for transformation." % created,
method = "processTransformation", transID = transID )
self.unusedFiles[transID] = unusedFiles
# If this production is to Flush
if transDict['Status'] == 'Flush' and allCreated:
res = clients['TransformationClient'].setTransformationParameter( transID, 'Status', 'Active' )
if not res['OK']:
self._logError( "Failed to update transformation status to 'Active': %s." % res['Message'],
method = "processTransformation", transID = transID )
else:
self._logInfo( "Updated transformation status to 'Active'.",
method = "processTransformation", transID = transID )
return S_OK()
######################################################################
#
# Internal methods used by the agent
#
def _getTransformationFiles( self, transDict, clients, statusList = ['Unused', 'ProbInFC'] ):
""" get the data replicas for a certain transID
"""
transID = transDict['TransformationID']
# Files that were problematic (either explicit or because SE was banned) may be recovered,
# and always removing the missing ones
statusList = statusList + ['MissingInFC'] if transDict['Type'] == 'Removal' else statusList
res = clients['TransformationClient'].getTransformationFiles( condDict = {'TransformationID':transID,
'Status':statusList} )
if not res['OK']:
self._logError( "Failed to obtain input data: %s." % res['Message'],
method = "_getTransformationFiles", transID = transID )
return res
transFiles = res['Value']
if not transFiles:
self._logInfo( "No 'Unused' files found for transformation.",
method = "_getTransformationFiles", transID = transID )
if transDict['Status'] == 'Flush':
res = clients['TransformationClient'].setTransformationParameter( transID, 'Status', 'Active' )
if not res['OK']:
self._logError( "Failed to update transformation status to 'Active': %s." % res['Message'],
method = "_getTransformationFiles", transID = transID )
else:
self._logInfo( "Updated transformation status to 'Active'.",
method = "_getTransformationFiles", transID = transID )
return S_OK()
# Check if transformation is kicked
kickFile = os.path.join( self.controlDirectory, 'KickTransformation_%s' % str( transID ) )
try:
kickTrans = os.path.exists( kickFile )
if kickTrans:
os.remove( kickFile )
except:
pass
# Check if something new happened
now = datetime.datetime.utcnow()
if not kickTrans:
nextStamp = self.unusedTimeStamp.setdefault( transID, now ) + datetime.timedelta( hours = self.noUnusedDelay )
skip = now < nextStamp
if len( transFiles ) == self.unusedFiles.get( transID, 0 ) and transDict['Status'] != 'Flush' and skip:
self._logInfo( "No new 'Unused' files found for transformation.",
method = "_getTransformationFiles", transID = transID )
return S_OK()
self.unusedTimeStamp[transID] = now
return S_OK( transFiles )
def __applyReduction( self, lfns ):
""" eventually remove the number of files to be considered
"""
if len( lfns ) <= self.maxFiles:
firstFile = 0
else:
firstFile = int( random.uniform( 0, len( lfns ) - self.maxFiles ) )
lfns = lfns[firstFile:firstFile + self.maxFiles - 1]
return lfns
def __getDataReplicas( self, transDict, lfns, clients, active = True ):
""" Get the replicas for the LFNs and check their statuses. It first looks within the cache.
"""
method = '__getDataReplicas'
transID = transDict['TransformationID']
clearCacheFile = os.path.join( self.workDirectory, 'ClearCache_%s' % str( transID ) )
try:
clearCache = os.path.exists( clearCacheFile )
if clearCache:
os.remove( clearCacheFile )
except:
pass
if clearCache or transDict['Status'] == 'Flush':
self._logInfo( "Replica cache cleared", method = method, transID = transID )
# We may need to get new replicas
self.__clearCacheForTrans( transID )
else:
# If the cache needs to be cleaned
self.__cleanCache()
startTime = time.time()
dataReplicas = {}
lfns.sort()
nLfns = len( lfns )
self._logVerbose( "Getting replicas for %d files" % nLfns, method = method, transID = transID )
newLFNs = []
try:
cachedReplicaSets = self.replicaCache.get( transID, {} )
cachedReplicas = {}
# Merge all sets of replicas
for crs in cachedReplicaSets:
cachedReplicas.update( cachedReplicaSets[crs] )
self._logVerbose( "Number of cached replicas: %d" % len( cachedReplicas ), method = method, transID = transID )
# Sorted browsing
for cacheLfn in sorted( cachedReplicas ):
while lfns and lfns[0] < cacheLfn:
# All files until cacheLfn are new
newLFNs.append( lfns.pop( 0 ) )
if lfns:
if lfns[0] == cacheLfn:
# We found a match, copy and go to next cache
lfn = lfns.pop( 0 )
dataReplicas[lfn] = sorted( cachedReplicas[lfn] )
continue
if not lfns or lfns[0] > cacheLfn:
# Remove files from the cache that are not in the required list
for crs in cachedReplicaSets:
cachedReplicaSets[crs].pop( cacheLfn, None )
# Add what is left as new files
newLFNs += lfns
except Exception:
self._logException( "Exception when browsing cache", method = method, transID = transID )
self._logVerbose( "ReplicaCache hit for %d out of %d LFNs" % ( len( dataReplicas ), nLfns ),
method = method, transID = transID )
if newLFNs:
startTime = time.time()
self._logVerbose( "Getting replicas for %d files from catalog" % len( newLFNs ),
method = method, transID = transID )
newReplicas = {}
noReplicas = []
for chunk in breakListIntoChunks( newLFNs, 1000 ):
res = self._getDataReplicasRM( transID, chunk, clients, active = active )
if res['OK']:
for lfn, ses in res['Value'].items():
if ses:
# Keep only the list of SEs as SURLs are useless
newReplicas[lfn] = sorted( ses )
else:
noReplicas.append( lfn )
else:
self._logWarn( "Failed to get replicas for %d files" % len( chunk ), res['Message'],
method = method, transID = transID )
if noReplicas:
self._logWarn( "Found %d files without replicas" % len( noReplicas ),
method = method, transID = transID )
self.__updateCache( transID, newReplicas )
dataReplicas.update( newReplicas )
self._logInfo( "Obtained %d replicas from catalog in %.1f seconds" \
% ( len( newReplicas ), time.time() - startTime ),
method = method, transID = transID )
return S_OK( dataReplicas )
def _getDataReplicasRM( self, transID, lfns, clients, active = True ):
""" Get the replicas for the LFNs and check their statuses, using the replica manager
"""
method = '_getDataReplicasRM'
startTime = time.time()
self._logVerbose( "Getting replicas for %d files from catalog" % len( lfns ),
method = method, transID = transID )
if active:
res = clients['DataManager'].getActiveReplicas( lfns )
else:
res = clients['DataManager'].getReplicas( lfns )
if not res['OK']:
return res
replicas = res['Value']
# Prepare a dictionary for all LFNs
dataReplicas = {}
for lfn in lfns:
dataReplicas[lfn] = []
self._logInfo( "Replica results for %d files obtained in %.2f seconds" % ( len( lfns ), time.time() - startTime ),
method = method, transID = transID )
# If files are neither Successful nor Failed, they are set problematic in the FC
problematicLfns = [lfn for lfn in lfns if lfn not in replicas['Successful'] and lfn not in replicas['Failed']]
if problematicLfns:
self._logInfo( "%d files found problematic in the catalog" % len( problematicLfns ) )
res = clients['TransformationClient'].setFileStatusForTransformation( transID, 'ProbInFC', problematicLfns )
if not res['OK']:
self._logError( "Failed to update status of problematic files: %s." % res['Message'],
method = method, transID = transID )
# Create a dictionary containing all the file replicas
failoverLfns = []
for lfn, replicaDict in replicas['Successful'].items():
for se in replicaDict:
#### This should definitely be included in the SE definition (i.e. not used for transformations)
if active and re.search( 'failover', se.lower() ):
self._logVerbose( "Ignoring failover replica for %s." % lfn, method = method, transID = transID )
else:
dataReplicas[lfn].append( se )
if not dataReplicas[lfn]:
failoverLfns.append( lfn )
if failoverLfns:
self._logInfo( "%d files only found in Failover SE" % len( failoverLfns ) )
# Make sure that file missing from the catalog are marked in the transformation DB.
missingLfns = []
for lfn, reason in replicas['Failed'].items():
if re.search( "No such file or directory", reason ):
self._logVerbose( "%s not found in the catalog." % lfn, method = method, transID = transID )
missingLfns.append( lfn )
if missingLfns:
self._logInfo( "%d files not found in the catalog" % len( missingLfns ) )
res = clients['TransformationClient'].setFileStatusForTransformation( transID, 'MissingInFC', missingLfns )
if not res['OK']:
self._logError( "Failed to update status of missing files: %s." % res['Message'],
method = method, transID = transID )
return S_OK( dataReplicas )
@gSynchro
def __updateCache( self, transID, newReplicas ):
""" Add replicas to the cache
"""
self.replicaCache.setdefault( transID, {} )[datetime.datetime.utcnow()] = newReplicas
@gSynchro
def __clearCacheForTrans( self, transID ):
""" Remove all replicas for a transformation
"""
self.replicaCache.pop( transID , None )
@gSynchro
def __cleanCache( self ):
""" Cleans the cache
"""
cacheChanged = False
try:
timeLimit = datetime.datetime.utcnow() - datetime.timedelta( days = self.replicaCacheValidity )
for transID in sorted( self.replicaCache ):
for updateTime in self.replicaCache[transID].keys():
if updateTime < timeLimit or not self.replicaCache[transID][updateTime]:
self._logVerbose( "Clear %d cached replicas for transformation %s" % ( len( self.replicaCache[transID][updateTime] ),
str( transID ) ), method = '__cleanCache' )
self.replicaCache[transID].pop( updateTime )
cacheChanged = True
# Remove empty transformations
if not self.replicaCache[transID]:
self.replicaCache.pop( transID )
except Exception:
self._logException( "Exception when cleaning replica cache:" )
# Write the cache file
try:
if cacheChanged:
self.__writeCache()
except Exception:
self._logException( "While writing replica cache" )
def __readCache( self ):
""" Reads from the cache
"""
try:
cacheFile = open( self.cacheFile, 'r' )
self.replicaCache = pickle.load( cacheFile )
cacheFile.close()
self._logInfo( "Successfully loaded replica cache from file %s" % self.cacheFile )
except Exception:
self._logException( "Failed to load replica cache from file %s" % self.cacheFile, method = '__readCache' )
self.replicaCache = {}
def __writeCache( self, force = False ):
""" Writes the cache
"""
method = '__writeCache'
now = datetime.datetime.utcnow()
if ( now - self.dateWriteCache ) < datetime.timedelta( minutes = 60 ) and not force:
return
while force and self.writingCache:
# If writing is forced, wait until the previous write is over
time.sleep( 10 )
try:
startTime = time.time()
self.dateWriteCache = now
if self.writingCache:
return
self.writingCache = True
# Protect the copy of the cache
tmpCache = self.replicaCache.copy()
# write to a temporary file in order to avoid corrupted files
tmpFile = self.cacheFile + '.tmp'
f = open( tmpFile, 'w' )
pickle.dump( tmpCache, f )
f.close()
# Now rename the file as it shold
os.rename( tmpFile, self.cacheFile )
self._logVerbose( "Successfully wrote replica cache file %s in %.1f seconds" \
% ( self.cacheFile, time.time() - startTime ), method = method )
except Exception:
self._logException( "Could not write replica cache file %s" % self.cacheFile, method = method )
finally:
self.writingCache = False
def __generatePluginObject( self, plugin, clients ):
""" This simply instantiates the TransformationPlugin class with the relevant plugin name
"""
try:
plugModule = __import__( self.pluginLocation, globals(), locals(), ['TransformationPlugin'] )
except ImportError, e:
self._logException( "Failed to import 'TransformationPlugin' %s: %s" % ( plugin, e ),
method = "__generatePluginObject" )
return S_ERROR()
try:
plugin_o = getattr( plugModule, 'TransformationPlugin' )( '%s' % plugin,
transClient = clients['TransformationClient'],
dataManager = clients['DataManager'] )
return S_OK( plugin_o )
except AttributeError, e:
self._logException( "Failed to create %s(): %s." % ( plugin, e ), method = "__generatePluginObject" )
return S_ERROR()
plugin_o.setDirectory( self.workDirectory )
plugin_o.setCallback( self.pluginCallback )
@gSynchro
def pluginCallback( self, transID, invalidateCache = False ):
""" Standard plugin callback
"""
save = False
if invalidateCache:
try:
if transID in self.replicaCache:
self._logInfo( "Removed cached replicas for transformation" , method = 'pluginCallBack', transID = transID )
self.replicaCache.pop( transID )
save = True
except:
pass
if save:
self.__writeCache()
|
sposs/DIRAC
|
TransformationSystem/Agent/TransformationAgent.py
|
Python
|
gpl-3.0
| 26,019
|
[
"DIRAC"
] |
b51567e5ea73b029f931e70a27e66f15b3e050398c892062739dd20116a0ff28
|
#-------------------------------------------------------------------------------
# Name: LIST
# Purpose: Create/hold a list for Human Generator
#
# Author: Rivan
#
# Created: 10/03/2014
# Copyright: (c) Rivan 2015
# Licence: GNU GENERAL PUBLIC LICENSE.
#-------------------------------------------------------------------------------
#!/usr/bin/env python
import random
def giftMale(justMy_Female, justMy_Kingdoms, CFemale, lastName, lastName2, CKingdoms):
if justMy_Female != True:
SPeopleFemale = PeopleFemale
else:
SPeopleFemale = []
if justMy_Kingdoms != True:
SKingdoms = Kingdoms
else:
SKingdoms = []
giftGen = ["A miniature air elemental floats above his shoulder.",
"He is married to " + random.choice(SPeopleFemale + CFemale) + " " + lastName + ".",
"He spends most of his evenings training at St. Arianas' cathedral.",
"He trains with Kerae; the Duelist of Arlent.",
"He is currently courting " + random.choice(SPeopleFemale + CFemale) + " " + lastName2 + ".",
"He trained with the arch-wizard of " + random.choice(SKingdoms + CKingdoms) + ".",
]
return list(giftGen)
def quirkMale(justMy_Tomes, justMy_Cities, CTomes, CCities):
if justMy_Tomes != True:
STomes = Tomes
else:
STomes = []
if justMy_Cities != True:
SCities = Cities
else:
SCities = []
quirkGen = ["He has six fingers on his " + random.choice(["left", "right"]) + " hand.",
"He has a sigil painted above his " + random.choice(["left", "right"]) + " eye.",
"His ears are slightly pointed.",
"He has a long beard.",
"He has a braided beard.",
"He has a well trimmed beard.",
"He is constantly spinning a coin around his fingers.",
"He carries " + random.choice(STomes + CTomes) + ".",
"He has studied several secrets of the universe, but can't find a tome on how to use them.",
"He has been searching for " + random.choice(STomes + CTomes) + " for " + str(random.randint(2, 8)) + " years.",
"He died once, and was resurrected by the church of " + random.choice(SCities + CCities) + ".",
]
return list(quirkGen)
def flawMale(justMy_Kingdoms, CKingdoms):
if justMy_Kingdoms != True:
SKingdoms = Kingdoms
else:
SKingdoms = []
flawGen = ["He has a scar across his face.",
"His " + random.choice(["left", "right"]) + " eye is blind.",
"He has been banished from " + random.choice(SKingdoms + CKingdoms) + ".",
"He is the exiled prince of " + random.choice(SKingdoms + CKingdoms) + ".",
"He has " + random.choice(["minor", "slight", "moderate", "minor", "slight", "moderate", "minor", "slight", "major"]) + " anger issues.",
"He dabbled in the occult " + str(random.randint(2, 7)) + " years ago.",
]
return list(flawGen)
def giftFemale(justMy_Male, justMy_Villages, PeopleMale_Custom, LastName, LastName2, Villages_Custom):
if justMy_Male != True:
SPeopleMale = PeopleMale
else:
SPeopleMale = []
if justMy_Villages != True:
SVillages = Villages
else:
SVillages = []
giftGen = ["She has a heart painted on her " + random.choice(["left", "right"]) + " arm.",
"A small pale-blue light floats above her " + random.choice(["left", "right"]) + " shoulder.",
"She has a small bird resting on her shoulder.",
"She is married to " + random.choice(SPeopleMale + PeopleMale_Custom) + " " + LastName + ".",
"She is being courted by " + random.choice(SPeopleMale + PeopleMale_Custom) + " " + LastName2 + ".",
"She spends most of her evenings training at St. Arianas' cathedral.",
"She trains with Kerae; the Duelist of Arlent.",
"She has won seven blade duels, and lost only two.",
"She has an enchantment on her " + random.choice(["left", "right"]) + " hand, keeping it from injuries.",
"She learned from the mage of " + random.choice(SVillages + Villages_Custom) + ".",
]
return list(giftGen)
def quirkFemale(justMy_Tomes, CTomes):
if justMy_Tomes != True:
STomes = Tomes
else:
STomes = []
quirkGen = ["She has a sigil painted above her " + random.choice(["left", "right"]) + " eye.",
"She has a small deck of cards she looks through every few minutes.",
"She always carries " + random.choice(STomes + CTomes) + ".",
"She always dances as she walks.",
"She has been searching for " + random.choice(STomes + CTomes) + " for " + str(random.randint(2, 8)) + " years.",
]
return list(quirkGen)
def flawFemale(justMy_Kingdoms, justMy_Animals, CKingdoms, CAnimals):
if justMy_Kingdoms != True:
SKingdoms = Kingdoms
else:
SKingdoms = []
if justMy_Animals != True:
SAnimalsPlural = AnimalsPlural
else:
SAnimalsPlural = []
flawGen = ["She has a scar across her face.",
"Her " + random.choice(["left", "right"]) + " eye is blind.",
"She is the exiled princess of " + random.choice(SKingdoms + CKingdoms) + ".",
"She is irrationally afraid of " + random.choice(SAnimalsPlural + CAnimals) + ".",
]
return list(flawGen)
PeopleMale = [
"Harvey",
"Kris",
"Johnathan",
"Tony",
"Salvador",
"Rico",
"Zackary",
"Enrique",
"Ricardo",
"Shayne",
"Sherman",
"Bennie",
"Lorenzo",
"Larry",
"Julius",
"Anderson",
"Ryan",
"Millard",
"Harry",
"John",
"Jacob",
"Peter",
"Edmund",
"Gordon",
"Luther",
"Grant",
"Dominick",
"Jesse",
"Pedro",
"Joseph",
"Blake",
"Edward",
"Derrick",
]
PeopleFemale = [
"Rosalina",
"Kathrin",
"Philomena",
"Serena",
"Holli",
"Telma",
"Isis",
"Magdalen",
"Brande",
"Dot",
"Carisa",
"Sofia",
"Kristel",
"Keilly",
"Analisa",
"Delma",
"Latisha",
"Bernetta",
"Gretta",
"Gwynevere",
"Sarah",
"Susan",
"Lucy",
"Kelly",
"Rochelle",
"Sonya",
"Anna",
"Lydia",
"Amber",
"Sophie",
"Iris",
"Erika",
"Sophie",
]
LastNames = [
"Smith",
"Nery",
"Burne",
"Fordye",
"Wyne",
"Shancey",
"Folcey",
"Bourne",
"Awer",
"Enthyns",
"Weills",
"Cileir",
"Scrinun",
"Leonard",
"Figueroa",
"Martinez",
"Briggs",
"Castillo",
"Reid",
"Vera",
]
LastAndFirst = [
"Storm",
"Frost",
"Fire",
"Earth",
"Moon",
"Sun",
"Star",
"Ever",
"Emerald",
"Ruby",
"Diamond",
"Pure",
"Steel",
"Heaven",
"War",
"Bright",
]
LastAndLast = [
"leaf",
"eye",
"wing",
"fire",
"dragon",
"circle",
"hammer",
"wind",
"drink",
"spirit",
"wolf",
"fox",
"tree",
"rose",
"blade",
"dagger",
"rising",
"wood",
]
AnimalsPlural = [
"foxes",
"mice",
"dogs",
"cats",
"rats",
"camels",
"flies",
"dragons",
"pegasai",
"squirrels",
"sheep",
"cute little bunny rabbits",
]
Towns = [
"Nandolond",
"Byford",
"Hallmibekkr",
"Ragrove",
"Keliklif",
"Geheath",
"Beewick",
"Keford",
"Thoebrook",
"Abam, the Dragon's Lair",
"Lefield",
"Abed",
"Tabrycg",
"Pawold",
"Bawold",
]
Cities = [
"Chigate",
"Eithithlum",
"Cupool",
"Linevriath",
"Gamor, the City of Scepters",
"Eorcot, the City Beneath the Arch",
"Pastow, the City of Rings",
"The Five Cities of Hone",
"Camor, the City of Palaces",
"Huybury, the City of Spells",
"Hymoor, the City Beneath the Arch",
"The Mystical City of Beydon",
"Dublin City",
]
Villages = [
"Goldenwall Village",
"Craftsman's Borough",
"Feydale Farthing",
"Lake Borough",
"Knightstower Borough",
"Lion's Ward",
"Bridge Village",
"Hart's District",
"Crown District",
"Merchant's Village",
]
Kingdoms = [
"The Empire of Thithlonde",
"The Principality of Arenon",
"The Hallowed Realm of Londorthon",
"The Principality of Enond",
"The Peerless Dominion of Beliene",
"The Seventh Great Kingdom of Ellormen",
"The Dominion of Siregul",
"Ellothlon, the Dominion of Swords",
"Alithlon, the Magocracy of Lights",
"The Magocracy of Enor",
"Edherest, the Theocracy of Rings",
"Faline, the Kingdom of Chains",
"Menione, the Dominion of Veils",
"Manaquia, the kingdom of Blessings",
]
Tomes = [
"The Tome of Providence",
"Lima's Leaves of Mythology",
"The Elysian Compendium of Abal",
"The Elemental Esoterica of Prosido",
"The Phitusin Apocrypha",
"The Tome of Psycrystals",
"Buda's Parchments of Demiplanes",
"The Ziri Fragments",
"The Arcane Slates of Grainan",
]
#Apparel section begining
ApparelBeggarMale = [
"He wears dirty linen rags, which used to be white",
"He wears a tattered shirt and brown trousers",
"He wears ragged animal skins tied together",
"He wears worn trousers",
]
ApparelBeggarFemale = [
"She wears dirty linen rags, which used to be white",
"She wears a tattered shirt and brown trousers",
"She wears a threadbare dress and a torn cloak",
"She wears a torn long tunic",
"She wears a sack cloth shirt, and ragged trousers",
]
ApparelProfessionMale = [
"He wears a lightly faded bright " + random.choice(["red", "blue", "green", "purple"]) + " tunic, and fine black trousers",
"He wears a leather apron, soot covered linen shirt, and heavy gloves",
"He wears a hat to protect from the sun, a long dirt-covered tunic, and brown trousers with patched knees",
"He wears a long brown robe, and a satchel of leaves",
"He wears a dirty plain tunic, equally plain trousers, and carries a pick",
"He wears a dirty leather doublet, a long dark cloak, and a strung bow over his shoulder",
"He wears a slightly rusted plate breastplate, and a pike on his shoulder",
]
ApparelProfessionFemale = [
"She wears a lightly faded, " + random.choice(["red", "blue", "green"]) + " dress, and a jingling pouch at her side",
"She wears a leather apron, and soot covered linen shirt, and heavy gloves",
"She wears a hat to protect from the sun, a long dirt-covered tunic, and brown trousers with patched knees",
"She wears a long brown robe, and a satchel of leaves",
"She wears a dirty plain tunic, equally plain trousers, and carries a pick",
"She wears a dirty leather doublet, a long dark cloak, and a strung bow over her shoulder",
"She wears a slightly rusted plate breastplate, and a pike on her shoulder",
]
ApparelNobleMale = [
"He wears a well tailored " + random.choice(["red", "blue", "green", "purple"]) + " shirt, and matching trousers",
"He wears a long " + random.choice(["red", "blue", "green", "purple"]) + " robe with silver trim",
"He wears perfectly white robes",
"He wears a tabbard displaying his device, over fine " + random.choice(["red", "blue", "green", "purple"]) + " linen",
]
ApparelNobleFemale = [
"She wears a long, flowing dress the same color as her eyes",
"She wears a " + random.choice(["red", "blue", "green", "purple"]) + " tunic, matching skirt, a silver necklace, a thin circlet, a bracelet, and a single earring",
"She wears a medium-length " + random.choice(["red", "blue", "green", "purple"]) + " dress with gold trim",
]
ApparelAdventurerMale = [
"He wears dented platemail",
"He wears light chainmaille with a heraldic tabbard",
"He wears a long hardened leather doublet, and a sword at his side",
"He wears " + random.choice(["red", "blue", "green", "yellow", "purple", "black", "white"]) + " robes, covered in celtic forms which glow a faint white",
"He wears plain white robes, and cloth wraps on his fists",
"He wears a wolf pelt coat over bloodstained white linen, and he carries a claymore",
"He wears a " + random.choice(["red", "blue", "green", "gray", "yellow", "purple", "black", "white"]) + " robe with an arm torn off, and bandages beneath",
]
ApparelAdventurerFemale = [
"She wears white robes, with a gold cross on the front",
"She wears a decorative formed breastplate, and leather leggings",
"She wears a long cloak, covering her leather armor",
"She wears plain white robes, and cloth wraps on her fists",
"She wears rosewood armor",
"She wears a " + random.choice(["red", "blue", "green", "gray", "yellow", "purple", "black", "white"]) + " robe with an arm torn off, and bandages beneath",
"She wears a torn tunic and a collection of chainmaille pieces that used to be a hawberk",
]
ApparelRoyalMale = [
"He wears grand " + random.choice(["red", "blue", "green", "purple"]) + " robes, a gold chain, and a large crown with rubies",
"He wears a chain hauberk with a tabbard displaying fine heraldry, and a gold crown",
]
ApparelRoyalFemale = [
"She wears a long " + random.choice(["red", "blue", "green", "purple"]) + " dress with gold accents, three gold rings, and a small crown with emeralds",
"She wears a short dress that matches the large sapphire in her silver crown",
"She wears a silver dress with a long train, a gold necklace, and a crown with sapphires",
]
#Apparel section end
print("'Human_Generator_LIST.py' loaded")
|
Liaku/CharGen_Fantasy
|
Human_Generator_LIST.py
|
Python
|
gpl-2.0
| 12,932
|
[
"Amber"
] |
2ba5bbcc02a6544ec09d9d2d89e0b44b01d5553b1cf093925e78aafdb7b3e31e
|
#!/usr/bin/env python
########################################################################
# File : dirac-admin-get-pilot-output
# Author : Ricardo Graciani
########################################################################
"""
Retrieve available info about the given pilot
"""
__RCSID__ = "$Id$"
# pylint: disable=wrong-import-position
from DIRAC import exit as DIRACExit
from DIRAC.Core.Base import Script
extendedPrint = False
def setExtendedPrint(_arg):
global extendedPrint
extendedPrint = True
Script.setUsageMessage('\n'.join([__doc__.split('\n')[1],
'Usage:',
' %s [option|cfgfile] ... PilotID ...' % Script.scriptName,
'Arguments:',
' PilotID: Grid ID of the pilot']))
Script.registerSwitch('e', 'extended', 'Get extended printout', setExtendedPrint)
Script.parseCommandLine(ignoreErrors=True)
from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin
from DIRAC.Interfaces.API.Dirac import Dirac
args = Script.getPositionalArgs()
if len(args) < 1:
Script.showHelp()
diracAdmin = DiracAdmin()
dirac = Dirac()
exitCode = 0
errorList = []
for gridID in args:
result = diracAdmin.getPilotInfo(gridID)
if not result['OK']:
errorList.append((gridID, result['Message']))
exitCode = 2
else:
res = result['Value'][gridID]
if extendedPrint:
tab = ''
for key in [
'PilotJobReference',
'Status',
'OwnerDN',
'OwnerGroup',
'SubmissionTime',
'DestinationSite',
'GridSite',
]:
if key in res:
diracAdmin.log.notice('%s%s: %s' % (tab, key, res[key]))
if not tab:
tab = ' '
diracAdmin.log.notice('')
for jobID in res['Jobs']:
tab = ' '
result = dirac.attributes(int(jobID))
if not result['OK']:
errorList.append((gridID, result['Message']))
exitCode = 2
else:
job = result['Value']
diracAdmin.log.notice('%sJob ID: %s' % (tab, jobID))
tab += ' '
for key in ['OwnerDN', 'OwnerGroup', 'JobName', 'Status', 'StartExecTime', 'LastUpdateTime', 'EndExecTime']:
if key in job:
diracAdmin.log.notice('%s%s:' % (tab, key), job[key])
diracAdmin.log.notice('')
else:
print diracAdmin.pPrint.pformat({gridID: res})
for error in errorList:
print "ERROR %s: %s" % error
DIRACExit(exitCode)
|
arrabito/DIRAC
|
Interfaces/scripts/dirac-admin-get-pilot-info.py
|
Python
|
gpl-3.0
| 2,543
|
[
"DIRAC"
] |
b9c8d0a00a243a2efa8c4cb8f220595c9ee83f0284d2db8d5a66d771c9d05d1e
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 22 13:22:26 2015
@author: agiovann
"""
from builtins import map
from builtins import zip
from builtins import str
from builtins import range
import cv2
import json
import logging
import matplotlib.pyplot as pl
import matplotlib.patches as mpatches
import numpy as np
import os
from past.utils import old_div
from scipy.ndimage.filters import gaussian_filter
from scipy.ndimage import label, center_of_mass
from skimage.morphology import remove_small_objects, remove_small_holes, dilation
import scipy
from scipy import ndimage as ndi
from scipy.optimize import linear_sum_assignment
import shutil
from skimage.filters import sobel
from skimage.morphology import watershed
from skimage.draw import polygon
from skimage.segmentation import find_boundaries
from skimage.io import imsave
import tempfile
import time
from typing import List
import zipfile
from ..motion_correction import tile_and_correct
try:
cv2.setNumThreads(0)
except:
pass
def com(A, d1, d2, d3=None):
"""Calculation of the center of mass for spatial components
Args:
A: np.ndarray
matrix of spatial components (d x K)
d1: int
number of pixels in x-direction
d2: int
number of pixels in y-direction
d3: int
number of pixels in z-direction
Returns:
cm: np.ndarray
center of mass for spatial components (K x 2 or 3)
"""
if 'csc_matrix' not in str(type(A)):
A = scipy.sparse.csc_matrix(A)
if d3 is None:
Coor = np.matrix([np.outer(np.ones(d2), np.arange(d1)).ravel(),
np.outer(np.arange(d2), np.ones(d1)).ravel()], dtype=A.dtype)
else:
Coor = np.matrix([
np.outer(np.ones(d3), np.outer(np.ones(d2), np.arange(d1)).ravel()).ravel(),
np.outer(np.ones(d3), np.outer(np.arange(d2), np.ones(d1)).ravel()).ravel(),
np.outer(np.arange(d3), np.outer(np.ones(d2), np.ones(d1)).ravel()).ravel()],
dtype=A.dtype)
cm = (Coor * A / A.sum(axis=0)).T
return np.array(cm)
def extract_binary_masks(Y, min_area_size=30, min_hole_size=15, gSig=20, expand_method='closing', selem=np.ones((3, 3))):
"""Extract binary masks by using adaptive thresholding on a structural channel,
Hendrik added support of pre-selected binary masks, where features are separated with watershed algorithm.
Args:
Y: caiman movie object or binary/boolean mask
- movie of the structural channel (assumed motion corrected)
- if mask, should have same size as movie frames. Components at borders should touch the
border to ensure accurate feature detection.
min_area_size: int
ignore components with smaller size
min_hole_size: int
fill in holes up to that size (donuts)
gSig: int
average radius of cell, very important for accurate feature detection
expand_method: string
method to expand binary masks (morphological closing or dilation)
selem: np.array
morphological element with which to expand binary masks
Returns:
A: sparse column format matrix
matrix of binary masks to be used for CNMF seeding
mR: np.array
mean image used to detect cell boundaries
"""
if len(Y.shape) == 3:
# if input is a movie, perform component detection on mean image
mR = Y.mean(axis=0)
img = cv2.blur(mR, (gSig, gSig))
img = (img - np.min(img)) / (np.max(img) - np.min(img)) * 255.
img = img.astype(np.uint8)
th = cv2.adaptiveThreshold(img, np.max(
img), cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, gSig, 0)
th = remove_small_holes(th > 0, min_size=min_hole_size)
th = remove_small_objects(th, min_size=min_area_size)
else:
# transform Y into a binary mask
th = np.asarray(Y, dtype=int)
th = (th > 0)*1
# transform image to a local distance map from the nearest edge
distance = ndi.distance_transform_edt(th)
# apply threshold of expected cell radius to get one maximum per cell and restore result to a binary image
local_max = distance.copy()
local_max[local_max >= gSig/2] = gSig
local_max[local_max < gSig/2] = 0
local_max[local_max > 0] = 1
local_max = local_max.astype('bool')
# generate markers of isolated features
markers = ndi.label(local_max)[0]
# remove any feature smaller than the expected cell size (avoids irregular cells being counted multiple times)
sizes = np.bincount(markers.ravel())
mask_sizes = sizes > gSig
mask_sizes[0] = 0 # remove background count
local_max_cleaned = mask_sizes[markers]
# label cleaned-up features
markers_cleaned = ndi.label(local_max_cleaned)[0]
# apply watershed algorithm to the original binary mask using the markers to create individually labelled areas
labels = watershed(-distance, markers_cleaned, mask=th)
# set boundaries of features to 0 to avoid merged/overlapped features
boundaries = find_boundaries(labels, connectivity=2, mode='outer', background=0)
labels[boundaries] = 0
# name variables the same as in the other function for further processing
mR = th # this is the initial binary mask that was used to detect cell boundaries
th = labels # this is the processed, watershed mask
# assigns every separate feature an individual numerical value
areas = label(th)
# initializes sparse csc matrix
A = np.zeros((np.prod(th.shape), areas[1]), dtype=bool)
for i in range(areas[1]):
temp = (areas[0] == i + 1) # here, each feature is saved as a single component in its own frame
if expand_method == 'dilation':
temp = dilation(temp, selem=selem)
elif expand_method == 'closing':
temp = dilation(temp, selem=selem)
# parse the current component 'temp' into the sparse column matrix
A[:, i] = temp.flatten('F')
return A, areas[0], mR
def extract_binary_masks_from_structural_channel(Y, min_area_size=30, min_hole_size=15, gSig=5, expand_method='closing', selem=np.ones((3, 3))):
"""Extract binary masks by using adaptive thresholding on a structural channel
Args:
Y: caiman movie object
movie of the structural channel (assumed motion corrected)
min_area_size: int
ignore components with smaller size
min_hole_size: int
fill in holes up to that size (donuts)
gSig: int
average radius of cell
expand_method: string
method to expand binary masks (morphological closing or dilation)
selem: np.array
morphological element with which to expand binary masks
Returns:
A: sparse column format matrix
matrix of binary masks to be used for CNMF seeding
mR: np.array
mean image used to detect cell boundaries
"""
mR = Y.mean(axis=0)
img = cv2.blur(mR, (gSig, gSig))
img = (img - np.min(img)) / (np.max(img) - np.min(img)) * 255.
img = img.astype(np.uint8)
th = cv2.adaptiveThreshold(img, np.max(
img), cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, gSig, 0)
th = remove_small_holes(th > 0, min_size=min_hole_size)
th = remove_small_objects(th, min_size=min_area_size)
areas = label(th)
A = np.zeros((np.prod(th.shape), areas[1]), dtype=bool)
for i in range(areas[1]):
temp = (areas[0] == i + 1)
if expand_method == 'dilation':
temp = dilation(temp, selem=selem)
elif expand_method == 'closing':
temp = dilation(temp, selem=selem)
A[:, i] = temp.flatten('F')
return A, mR
def mask_to_2d(mask):
# todo todocument
if mask.ndim > 2:
_, d1, d2 = np.shape(mask)
dims = d1, d2
return scipy.sparse.coo_matrix(np.reshape(mask[:].transpose([1, 2, 0]), (np.prod(dims), -1,), order='F'))
else:
dims = np.shape(mask)
return scipy.sparse.coo_matrix(np.reshape(mask, (np.prod(dims), -1,), order='F'))
def get_distance_from_A(masks_gt, masks_comp, min_dist=10):
# todo todocument
_, d1, d2 = np.shape(masks_gt)
dims = d1, d2
A_ben = scipy.sparse.csc_matrix(np.reshape(
masks_gt[:].transpose([1, 2, 0]), (np.prod(dims), -1,), order='F'))
A_cnmf = scipy.sparse.csc_matrix(np.reshape(
masks_comp[:].transpose([1, 2, 0]), (np.prod(dims), -1,), order='F'))
cm_ben = [scipy.ndimage.center_of_mass(mm) for mm in masks_gt]
cm_cnmf = [scipy.ndimage.center_of_mass(mm) for mm in masks_comp]
return distance_masks([A_ben, A_cnmf], [cm_ben, cm_cnmf], min_dist)
def nf_match_neurons_in_binary_masks(masks_gt, masks_comp, thresh_cost=.7, min_dist=10, print_assignment=False,
plot_results=False, Cn=None, labels=['Session 1','Session 2'], cmap='gray', D=None, enclosed_thr=None):
"""
Match neurons expressed as binary masks. Uses Hungarian matching algorithm
Args:
masks_gt: bool ndarray components x d1 x d2
ground truth masks
masks_comp: bool ndarray components x d1 x d2
mask to compare to
thresh_cost: double
max cost accepted
min_dist: min distance between cm
print_assignment:
for hungarian algorithm
plot_results: bool
Cn:
correlation image or median
D: list of ndarrays
list of distances matrices
enclosed_thr: float
if not None set distance to at most the specified value when ground truth is a subset of inferred
Returns:
idx_tp_1:
indices true pos ground truth mask
idx_tp_2:
indices true pos comp
idx_fn_1:
indices false neg
idx_fp_2:
indices false pos
"""
_, d1, d2 = np.shape(masks_gt)
dims = d1, d2
# transpose to have a sparse list of components, then reshaping it to have a 1D matrix red in the Fortran style
A_ben = scipy.sparse.csc_matrix(np.reshape(
masks_gt[:].transpose([1, 2, 0]), (np.prod(dims), -1,), order='F'))
A_cnmf = scipy.sparse.csc_matrix(np.reshape(
masks_comp[:].transpose([1, 2, 0]), (np.prod(dims), -1,), order='F'))
# have the center of mass of each element of the two masks
cm_ben = [scipy.ndimage.center_of_mass(mm) for mm in masks_gt]
cm_cnmf = [scipy.ndimage.center_of_mass(mm) for mm in masks_comp]
if D is None:
#% find distances and matches
# find the distance between each masks
D = distance_masks([A_ben, A_cnmf], [cm_ben, cm_cnmf],
min_dist, enclosed_thr=enclosed_thr)
level = 0.98
else:
level = .98
matches, costs = find_matches(D, print_assignment=print_assignment)
matches = matches[0]
costs = costs[0]
#%% compute precision and recall
TP = np.sum(np.array(costs) < thresh_cost) * 1.
FN = np.shape(masks_gt)[0] - TP
FP = np.shape(masks_comp)[0] - TP
TN = 0
performance = dict()
performance['recall'] = old_div(TP, (TP + FN))
performance['precision'] = old_div(TP, (TP + FP))
performance['accuracy'] = old_div((TP + TN), (TP + FP + FN + TN))
performance['f1_score'] = 2 * TP / (2 * TP + FP + FN)
logging.debug(performance)
#%%
idx_tp = np.where(np.array(costs) < thresh_cost)[0]
idx_tp_ben = matches[0][idx_tp] # ground truth
idx_tp_cnmf = matches[1][idx_tp] # algorithm - comp
idx_fn = np.setdiff1d(
list(range(np.shape(masks_gt)[0])), matches[0][idx_tp])
idx_fp = np.setdiff1d(
list(range(np.shape(masks_comp)[0])), matches[1][idx_tp])
idx_fp_cnmf = idx_fp
idx_tp_gt, idx_tp_comp, idx_fn_gt, idx_fp_comp = idx_tp_ben, idx_tp_cnmf, idx_fn, idx_fp_cnmf
if plot_results:
try: # Plotting function
pl.rcParams['pdf.fonttype'] = 42
font = {'family': 'Myriad Pro',
'weight': 'regular',
'size': 10}
pl.rc('font', **font)
lp, hp = np.nanpercentile(Cn, [5, 95])
ses_1 = mpatches.Patch(color='red', label=labels[0])
ses_2 = mpatches.Patch(color='white', label=labels[1])
pl.subplot(1, 2, 1)
pl.imshow(Cn, vmin=lp, vmax=hp, cmap=cmap)
[pl.contour(norm_nrg(mm), levels=[level], colors='w', linewidths=1)
for mm in masks_comp[idx_tp_comp]]
[pl.contour(norm_nrg(mm), levels=[level], colors='r',
linewidths=1) for mm in masks_gt[idx_tp_gt]]
if labels is None:
pl.title('MATCHES')
else:
pl.title('MATCHES: ' + labels[1] + '(w), ' + labels[0] + '(r)')
pl.legend(handles=[ses_1, ses_2])
pl.show()
pl.axis('off')
pl.subplot(1, 2, 2)
pl.imshow(Cn, vmin=lp, vmax=hp, cmap=cmap)
[pl.contour(norm_nrg(mm), levels=[level], colors='w', linewidths=1)
for mm in masks_comp[idx_fp_comp]]
[pl.contour(norm_nrg(mm), levels=[level], colors='r',
linewidths=1) for mm in masks_gt[idx_fn_gt]]
if labels is None:
pl.title('FALSE POSITIVE (w), FALSE NEGATIVE (r)')
else:
pl.title(labels[1] + '(w), ' + labels[0] + '(r)')
pl.legend(handles=[ses_1, ses_2])
pl.show()
pl.axis('off')
except Exception as e:
logging.warning("not able to plot precision recall: graphics failure")
logging.warning(e)
return idx_tp_gt, idx_tp_comp, idx_fn_gt, idx_fp_comp, performance
def register_ROIs(A1, A2, dims, template1=None, template2=None, align_flag=True,
D=None, max_thr = 0, use_opt_flow = True, thresh_cost=.7,
max_dist=10, enclosed_thr=None, print_assignment=False,
plot_results=False, Cn=None, cmap='viridis'):
"""
Register ROIs across different sessions using an intersection over union
metric and the Hungarian algorithm for optimal matching
Args:
A1: ndarray or csc_matrix # pixels x # of components
ROIs from session 1
A2: ndarray or csc_matrix # pixels x # of components
ROIs from session 2
dims: list or tuple
dimensionality of the FOV
template1: ndarray dims
template from session 1
template2: ndarray dims
template from session 2
align_flag: bool
align the templates before matching
D: ndarray
matrix of distances in the event they are pre-computed
max_thr: scalar
max threshold parameter before binarization
use_opt_flow: bool
use dense optical flow to align templates
thresh_cost: scalar
maximum distance considered
max_dist: scalar
max distance between centroids
enclosed_thr: float
if not None set distance to at most the specified value when ground
truth is a subset of inferred
print_assignment: bool
print pairs of matched ROIs
plot_results: bool
create a plot of matches and mismatches
Cn: ndarray
background image for plotting purposes
cmap: string
colormap for background image
Returns:
matched_ROIs1: list
indices of matched ROIs from session 1
matched_ROIs2: list
indices of matched ROIs from session 2
non_matched1: list
indices of non-matched ROIs from session 1
non_matched2: list
indices of non-matched ROIs from session 2
performance: list
(precision, recall, accuracy, f_1 score) with A1 taken as ground truth
A2: csc_matrix # pixels x # of components
ROIs from session 2 aligned to session 1
"""
# if 'csc_matrix' not in str(type(A1)):
# A1 = scipy.sparse.csc_matrix(A1)
# if 'csc_matrix' not in str(type(A2)):
# A2 = scipy.sparse.csc_matrix(A2)
if 'ndarray' not in str(type(A1)):
A1 = A1.toarray()
if 'ndarray' not in str(type(A2)):
A2 = A2.toarray()
if template1 is None or template2 is None:
align_flag = False
x_grid, y_grid = np.meshgrid(np.arange(0., dims[1]).astype(
np.float32), np.arange(0., dims[0]).astype(np.float32))
if align_flag: # first align ROIs from session 2 to the template from session 1
template1 -= template1.min()
template1 /= template1.max()
template2 -= template2.min()
template2 /= template2.max()
if use_opt_flow:
template1_norm = np.uint8(template1*(template1 > 0)*255)
template2_norm = np.uint8(template2*(template2 > 0)*255)
flow = cv2.calcOpticalFlowFarneback(np.uint8(template1_norm*255),
np.uint8(template2_norm*255),
None,0.5,3,128,3,7,1.5,0)
x_remap = (flow[:,:,0] + x_grid).astype(np.float32)
y_remap = (flow[:,:,1] + y_grid).astype(np.float32)
else:
template2, shifts, _, xy_grid = tile_and_correct(template2, template1 - template1.min(),
[int(
dims[0] / 4), int(dims[1] / 4)], [16, 16], [10, 10],
add_to_movie=template2.min(), shifts_opencv=True)
dims_grid = tuple(np.max(np.stack(xy_grid, axis=0), axis=0) -
np.min(np.stack(xy_grid, axis=0), axis=0) + 1)
_sh_ = np.stack(shifts, axis=0)
shifts_x = np.reshape(_sh_[:, 1], dims_grid,
order='C').astype(np.float32)
shifts_y = np.reshape(_sh_[:, 0], dims_grid,
order='C').astype(np.float32)
x_remap = (-np.resize(shifts_x, dims) + x_grid).astype(np.float32)
y_remap = (-np.resize(shifts_y, dims) + y_grid).astype(np.float32)
A_2t = np.reshape(A2, dims + (-1,), order='F').transpose(2, 0, 1)
A2 = np.stack([cv2.remap(img.astype(np.float32), x_remap,
y_remap, cv2.INTER_NEAREST) for img in A_2t], axis=0)
A2 = np.reshape(A2.transpose(1, 2, 0),
(A1.shape[0], A_2t.shape[0]), order='F')
A1 = np.stack([a*(a>max_thr*a.max()) for a in A1.T]).T
A2 = np.stack([a*(a>max_thr*a.max()) for a in A2.T]).T
if D is None:
if 'csc_matrix' not in str(type(A1)):
A1 = scipy.sparse.csc_matrix(A1)
if 'csc_matrix' not in str(type(A2)):
A2 = scipy.sparse.csc_matrix(A2)
cm_1 = com(A1, dims[0], dims[1])
cm_2 = com(A2, dims[0], dims[1])
A1_tr = (A1 > 0).astype(float)
A2_tr = (A2 > 0).astype(float)
D = distance_masks([A1_tr, A2_tr], [cm_1, cm_2],
max_dist, enclosed_thr=enclosed_thr)
matches, costs = find_matches(D, print_assignment=print_assignment)
matches = matches[0]
costs = costs[0]
#%% store indices
idx_tp = np.where(np.array(costs) < thresh_cost)[0]
if len(idx_tp) > 0:
matched_ROIs1 = matches[0][idx_tp] # ground truth
matched_ROIs2 = matches[1][idx_tp] # algorithm - comp
non_matched1 = np.setdiff1d(
list(range(D[0].shape[0])), matches[0][idx_tp])
non_matched2 = np.setdiff1d(
list(range(D[0].shape[1])), matches[1][idx_tp])
TP = np.sum(np.array(costs) < thresh_cost) * 1.
else:
TP = 0.
plot_results = False
matched_ROIs1 = []
matched_ROIs2 = []
non_matched1 = list(range(D[0].shape[0]))
non_matched2 = list(range(D[0].shape[1]))
#%% compute precision and recall
FN = D[0].shape[0] - TP
FP = D[0].shape[1] - TP
TN = 0
performance = dict()
performance['recall'] = old_div(TP, (TP + FN))
performance['precision'] = old_div(TP, (TP + FP))
performance['accuracy'] = old_div((TP + TN), (TP + FP + FN + TN))
performance['f1_score'] = 2 * TP / (2 * TP + FP + FN)
logging.info(performance)
if plot_results:
if Cn is None:
if template1 is not None:
Cn = template1
elif template2 is not None:
Cn = template2
else:
Cn = np.reshape(A1.sum(1) + A2.sum(1), dims, order='F')
masks_1 = np.reshape(A1.toarray(), dims + (-1,),
order='F').transpose(2, 0, 1)
masks_2 = np.reshape(A2.toarray(), dims + (-1,),
order='F').transpose(2, 0, 1)
# try : #Plotting function
level = 0.98
pl.rcParams['pdf.fonttype'] = 42
font = {'family': 'Myriad Pro',
'weight': 'regular',
'size': 10}
pl.rc('font', **font)
lp, hp = np.nanpercentile(Cn, [5, 95])
pl.subplot(1, 2, 1)
pl.imshow(Cn, vmin=lp, vmax=hp, cmap=cmap)
[pl.contour(norm_nrg(mm), levels=[level], colors='w', linewidths=1)
for mm in masks_1[matched_ROIs1]]
[pl.contour(norm_nrg(mm), levels=[level], colors='r', linewidths=1)
for mm in masks_2[matched_ROIs2]]
pl.title('Matches')
pl.axis('off')
pl.subplot(1, 2, 2)
pl.imshow(Cn, vmin=lp, vmax=hp, cmap=cmap)
[pl.contour(norm_nrg(mm), levels=[level], colors='w', linewidths=1)
for mm in masks_1[non_matched1]]
[pl.contour(norm_nrg(mm), levels=[level], colors='r', linewidths=1)
for mm in masks_2[non_matched2]]
pl.title('Mismatches')
pl.axis('off')
# except Exception as e:
# logging.warning("not able to plot precision recall usually because we are on travis")
# logging.warning(e)
return matched_ROIs1, matched_ROIs2, non_matched1, non_matched2, performance, A2
def register_multisession(A, dims, templates = [None], align_flag=True,
max_thr = 0, use_opt_flow = True, thresh_cost=.7,
max_dist=10, enclosed_thr=None):
"""
Register ROIs across multiple sessions using an intersection over union metric
and the Hungarian algorithm for optimal matching. Registration occurs by
aligning session 1 to session 2, keeping the union of the matched and
non-matched components to register with session 3 and so on.
Args:
A: list of ndarray or csc_matrix matrices # pixels x # of components
ROIs from each session
dims: list or tuple
dimensionality of the FOV
template: list of ndarray matrices of size dims
templates from each session
align_flag: bool
align the templates before matching
max_thr: scalar
max threshold parameter before binarization
use_opt_flow: bool
use dense optical flow to align templates
thresh_cost: scalar
maximum distance considered
max_dist: scalar
max distance between centroids
enclosed_thr: float
if not None set distance to at most the specified value when ground
truth is a subset of inferred
Returns:
A_union: csc_matrix # pixels x # of total distinct components
union of all kept ROIs
assignments: ndarray int of size # of total distinct components x # sessions
element [i,j] = k if component k from session j is mapped to component
i in the A_union matrix. If there is no much the value is NaN
matchings: list of lists
matchings[i][j] = k means that component j from session i is represented
by component k in A_union
"""
n_sessions = len(A)
templates = list(templates)
if len(templates) == 1:
templates = n_sessions*templates
if n_sessions <= 1:
raise Exception('number of sessions must be greater than 1')
A = [a.toarray() if 'ndarray' not in str(type(a)) else a for a in A]
A_union = A[0].copy()
matchings = []
matchings.append(list(range(A_union.shape[-1])))
for sess in range(1,n_sessions):
reg_results = register_ROIs(A[sess], A_union, dims,
template1 = templates[sess],
template2 = templates[sess-1], align_flag=True,
max_thr = max_thr, use_opt_flow = use_opt_flow,
thresh_cost = thresh_cost, max_dist = max_dist,
enclosed_thr = enclosed_thr)
mat_sess, mat_un, nm_sess, nm_un, _, A2 = reg_results
logging.info(len(mat_sess))
A_union = A2.copy()
A_union[:,mat_un] = A[sess][:,mat_sess]
A_union = np.concatenate((A_union.toarray(), A[sess][:,nm_sess]), axis = 1)
new_match = np.zeros(A[sess].shape[-1], dtype = int)
new_match[mat_sess] = mat_un
new_match[nm_sess] = range(A2.shape[-1],A_union.shape[-1])
matchings.append(new_match.tolist())
assignments = np.empty((A_union.shape[-1], n_sessions))*np.nan
for sess in range(n_sessions):
assignments[matchings[sess],sess] = range(len(matchings[sess]))
return A_union, assignments, matchings
def extract_active_components(assignments, indices, only = True):
"""
Computes the indices of components that were active in a specified set of
sessions.
Args:
assignments: ndarray # of components X # of sessions
assignments matrix returned by function register_multisession
indices: list int
set of sessions to look for active neurons. Session 1 corresponds to a
pythonic index 0 etc
only: bool
If True return components that were active ONLY in these sessions and
were inactive in all the others. If False components can be active
in other sessions as well
Returns:
components: list int
indices of components
"""
components = np.where(np.isnan(assignments[:,indices]).sum(-1) == 0)[0]
if only:
not_inds = list(np.setdiff1d(range(assignments.shape[-1]), indices))
not_comps = np.where(np.isnan(assignments[:,not_inds]).sum(-1) ==
len(not_inds))[0]
components = np.intersect1d(components, not_comps)
return components
def norm_nrg(a_):
a = a_.copy()
dims = a.shape
a = a.reshape(-1, order='F')
indx = np.argsort(a, axis=None)[::-1]
cumEn = np.cumsum(a.flatten()[indx]**2)
cumEn /= cumEn[-1]
a = np.zeros(np.prod(dims))
a[indx] = cumEn
return a.reshape(dims, order='F')
def distance_masks(M_s, cm_s, max_dist, enclosed_thr=None):
"""
Compute distance matrix based on an intersection over union metric. Matrix are compared in order,
with matrix i compared with matrix i+1
Args:
M_s: tuples of 1-D arrays
The thresholded A matrices (masks) to compare, output of threshold_components
cm_s: list of list of 2-ples
the centroids of the components in each M_s
max_dist: float
maximum distance among centroids allowed between components. This corresponds to a distance
at which two components are surely disjoined
enclosed_thr: float
if not None set distance to at most the specified value when ground truth is a subset of inferred
Returns:
D_s: list of matrix distances
Raises:
Exception: 'Nan value produced. Error in inputs'
"""
D_s = []
for gt_comp, test_comp, cmgt_comp, cmtest_comp in zip(M_s[:-1], M_s[1:], cm_s[:-1], cm_s[1:]):
# todo : better with a function that calls itself
logging.info('New Pair **')
# not to interfer with M_s
gt_comp = gt_comp.copy()[:, :]
test_comp = test_comp.copy()[:, :]
# the number of components for each
nb_gt = np.shape(gt_comp)[-1]
nb_test = np.shape(test_comp)[-1]
D = np.ones((nb_gt, nb_test))
cmgt_comp = np.array(cmgt_comp)
cmtest_comp = np.array(cmtest_comp)
if enclosed_thr is not None:
gt_val = gt_comp.T.dot(gt_comp).diagonal()
for i in range(nb_gt):
# for each components of gt
k = gt_comp[:, np.repeat(i, nb_test)] + test_comp
# k is correlation matrix of this neuron to every other of the test
for j in range(nb_test): # for each components on the tests
dist = np.linalg.norm(cmgt_comp[i] - cmtest_comp[j])
# we compute the distance of this one to the other ones
if dist < max_dist:
# union matrix of the i-th neuron to the jth one
union = k[:, j].sum()
# we could have used OR for union and AND for intersection while converting
# the matrice into real boolean before
# product of the two elements' matrices
# we multiply the boolean values from the jth omponent to the ith
intersection = np.array(gt_comp[:, i].T.dot(
test_comp[:, j]).todense()).squeeze()
# if we don't have even a union this is pointless
if union > 0:
# intersection is removed from union since union contains twice the overlaping area
# having the values in this format 0-1 is helpfull for the hungarian algorithm that follows
D[i, j] = 1 - 1. * intersection / \
(union - intersection)
if enclosed_thr is not None:
if intersection == gt_val[j] or intersection == gt_val[i]:
D[i, j] = min(D[i, j], 0.5)
else:
D[i, j] = 1.
if np.isnan(D[i, j]):
raise Exception('Nan value produced. Error in inputs')
else:
D[i, j] = 1
D_s.append(D)
return D_s
def find_matches(D_s, print_assignment=False):
# todo todocument
matches = []
costs = []
t_start = time.time()
for ii, D in enumerate(D_s):
# we make a copy not to set changes in the original
DD = D.copy()
if np.sum(np.where(np.isnan(DD))) > 0:
logging.error('Exception: Distance Matrix contains NaN, not allowed!')
raise Exception('Distance Matrix contains NaN, not allowed!')
# we do the hungarian
indexes = linear_sum_assignment(DD)
indexes2 = [(ind1, ind2) for ind1, ind2 in zip(indexes[0], indexes[1])]
matches.append(indexes)
DD = D.copy()
total = []
# we want to extract those informations from the hungarian algo
for row, column in indexes2:
value = DD[row, column]
if print_assignment:
logging.debug(('(%d, %d) -> %f' % (row, column, value)))
total.append(value)
logging.debug(('FOV: %d, shape: %d,%d total cost: %f' %
(ii, DD.shape[0], DD.shape[1], np.sum(total))))
logging.debug((time.time() - t_start))
costs.append(total)
# send back the results in the format we want
return matches, costs
def link_neurons(matches, costs, max_cost=0.6, min_FOV_present=None):
"""
Link neurons from different FOVs given matches and costs obtained from the hungarian algorithm
Args:
matches: lists of list of tuple
output of the find_matches function
costs: list of lists of scalars
cost associated to each match in matches
max_cost: float
maximum allowed value of the 1- intersection over union metric
min_FOV_present: int
number of FOVs that must consequently contain the neuron starting from 0. If none
the neuro must be present in each FOV
Returns:
neurons: list of arrays representing the indices of neurons in each FOV
"""
if min_FOV_present is None:
min_FOV_present = len(matches)
neurons = []
num_neurons = 0
num_chunks = len(matches) + 1
for idx in range(len(matches[0][0])):
neuron = []
neuron.append(idx)
for match, cost, _ in zip(matches, costs, list(range(1, num_chunks))):
rows, cols = match
m_neur = np.where(rows == neuron[-1])[0].squeeze()
if m_neur.size > 0:
if cost[m_neur] <= max_cost:
neuron.append(cols[m_neur])
else:
break
else:
break
if len(neuron) > min_FOV_present:
num_neurons += 1
neurons.append(neuron)
neurons = np.array(neurons).T
logging.info(('num_neurons:' + str(num_neurons)))
return neurons
def nf_load_masks(file_name, dims):
# todo todocument
# load the regions (training data only)
with open(file_name) as f:
regions = json.load(f)
def tomask(coords):
mask = np.zeros(dims)
mask[list(zip(*coords))] = 1
return mask
masks = np.array([tomask(s['coordinates']) for s in regions])
return masks
def nf_masks_to_json(binary_masks, json_filename):
"""
Take as input a tensor of binary mask and produces json format for neurofinder
Args:
binary_masks: 3d ndarray (components x dimension 1 x dimension 2)
json_filename: str
Returns:
regions: list of dict
regions in neurofinder format
"""
regions = []
for m in binary_masks:
coords = [[int(x), int(y)] for x, y in zip(*np.where(m))]
regions.append({"coordinates": coords})
with open(json_filename, 'w') as f:
f.write(json.dumps(regions))
return regions
#%%
def nf_masks_to_neurof_dict(binary_masks, dataset_name):
"""
Take as input a tensor of binary mask and produces dict format for neurofinder
Args:
binary_masks: 3d ndarray (components x dimension 1 x dimension 2)
dataset_filename: name of the dataset
Returns:
dset: dict
dataset in neurofinder format to be saved in json
"""
regions = []
for m in binary_masks:
coords = [[int(x), int(y)] for x, y in zip(*np.where(m))]
regions.append({"coordinates": coords})
dset = {"regions": regions, "dataset": dataset_name}
return dset
#%%
def nf_read_roi(fileobj):
'''
points = read_roi(fileobj)
Read ImageJ's ROI format
Adapted from https://gist.github.com/luispedro/3437255
'''
# This is based on:
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiDecoder.java.html
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiEncoder.java.html
# TODO: Use an enum
#SPLINE_FIT = 1
#DOUBLE_HEADED = 2
#OUTLINE = 4
#OVERLAY_LABELS = 8
#OVERLAY_NAMES = 16
#OVERLAY_BACKGROUNDS = 32
#OVERLAY_BOLD = 64
SUB_PIXEL_RESOLUTION = 128
#DRAW_OFFSET = 256
pos = [4]
def get8():
pos[0] += 1
s = fileobj.read(1)
if not s:
raise IOError('readroi: Unexpected EOF')
return ord(s)
def get16():
b0 = get8()
b1 = get8()
return (b0 << 8) | b1
def get32():
s0 = get16()
s1 = get16()
return (s0 << 16) | s1
def getfloat():
v = np.int32(get32())
return v.view(np.float32)
magic = fileobj.read(4)
if magic != 'Iout':
# raise IOError('Magic number not found')
logging.warning('Magic number not found')
version = get16()
# It seems that the roi type field occupies 2 Bytes, but only one is used
roi_type = get8()
# Discard second Byte:
get8()
# if not (0 <= roi_type < 11):
# logging.error(('roireader: ROI type %s not supported' % roi_type))
#
# if roi_type != 7:
#
# logging.error(('roireader: ROI type %s not supported (!= 7)' % roi_type))
top = get16()
left = get16()
bottom = get16()
right = get16()
n_coordinates = get16()
x1 = getfloat()
y1 = getfloat()
x2 = getfloat()
y2 = getfloat()
stroke_width = get16()
shape_roi_size = get32()
stroke_color = get32()
fill_color = get32()
subtype = get16()
if subtype != 0:
raise ValueError(
'roireader: ROI subtype %s not supported (!= 0)' % subtype)
options = get16()
arrow_style = get8()
arrow_head_size = get8()
rect_arc_size = get16()
position = get32()
header2offset = get32()
if options & SUB_PIXEL_RESOLUTION:
getc = getfloat
points = np.empty((n_coordinates, 2), dtype=np.float32)
else:
getc = get16
points = np.empty((n_coordinates, 2), dtype=np.int16)
points[:, 1] = [getc() for i in range(n_coordinates)]
points[:, 0] = [getc() for i in range(n_coordinates)]
points[:, 1] += left
points[:, 0] += top
points -= 1
return points
def nf_read_roi_zip(fname, dims, return_names=False):
# todo todocument
with zipfile.ZipFile(fname) as zf:
names = zf.namelist()
coords = [nf_read_roi(zf.open(n))
for n in names]
def tomask(coords):
mask = np.zeros(dims)
coords = np.array(coords)
rr, cc = polygon(coords[:, 0] + 1, coords[:, 1] + 1)
mask[rr, cc] = 1
return mask
masks = np.array([tomask(s - 1) for s in coords])
if return_names:
return masks, names
else:
return masks
def nf_merge_roi_zip(fnames, idx_to_keep, new_fold):
"""
Create a zip file containing ROIs for ImageJ by combining elements from a list of ROI zip files
Args:
fnames: str
list of zip files containing ImageJ rois
idx_to_keep: list of lists
for each zip file index of elements to keep
new_fold: str
name of the output zip file (without .zip extension)
"""
folders_rois = []
files_to_keep = []
# unzip the files and keep only the ones that are requested
for fn, idx in zip(fnames, idx_to_keep):
dirpath = tempfile.mkdtemp()
folders_rois.append(dirpath)
with zipfile.ZipFile(fn) as zf:
name_rois = zf.namelist()
logging.debug(len(name_rois))
zip_ref = zipfile.ZipFile(fn, 'r')
zip_ref.extractall(dirpath)
files_to_keep.append([os.path.join(dirpath, ff)
for ff in np.array(name_rois)[idx]])
zip_ref.close()
os.makedirs(new_fold)
for fls in files_to_keep:
for fl in fls:
shutil.move(fl, new_fold)
shutil.make_archive(new_fold, 'zip', new_fold)
shutil.rmtree(new_fold)
def extract_binary_masks_blob(A, neuron_radius, dims, num_std_threshold=1, minCircularity=0.5,
minInertiaRatio=0.2, minConvexity=.8):
"""
Function to extract masks from data. It will also perform a preliminary selectino of good masks based on criteria like shape and size
Args:
A: scipy.sparse matris
contains the components as outputed from the CNMF algorithm
neuron_radius: float
neuronal radius employed in the CNMF settings (gSiz)
num_std_threshold: int
number of times above iqr/1.349 (std estimator) the median to be considered as threshold for the component
minCircularity: float
parameter from cv2.SimpleBlobDetector
minInertiaRatio: float
parameter from cv2.SimpleBlobDetector
minConvexity: float
parameter from cv2.SimpleBlobDetector
Returns:
masks: np.array
pos_examples:
neg_examples:
"""
params = cv2.SimpleBlobDetector_Params()
params.minCircularity = minCircularity
params.minInertiaRatio = minInertiaRatio
params.minConvexity = minConvexity
# Change thresholds
params.blobColor = 255
params.minThreshold = 0
params.maxThreshold = 255
params.thresholdStep = 3
params.minArea = np.pi * ((neuron_radius * .75)**2)
params.filterByColor = True
params.filterByArea = True
params.filterByCircularity = True
params.filterByConvexity = True
params.filterByInertia = True
detector = cv2.SimpleBlobDetector_create(params)
masks_ws = []
pos_examples = []
neg_examples = []
for count, comp in enumerate(A.tocsc()[:].T):
logging.debug(count)
comp_d = np.array(comp.todense())
gray_image = np.reshape(comp_d, dims, order='F')
gray_image = (gray_image - np.min(gray_image)) / \
(np.max(gray_image) - np.min(gray_image)) * 255
gray_image = gray_image.astype(np.uint8)
# segment using watershed
markers = np.zeros_like(gray_image)
elevation_map = sobel(gray_image)
thr_1 = np.percentile(gray_image[gray_image > 0], 50)
iqr = np.diff(np.percentile(gray_image[gray_image > 0], (25, 75)))
thr_2 = thr_1 + num_std_threshold * iqr / 1.35
markers[gray_image < thr_1] = 1
markers[gray_image > thr_2] = 2
edges = watershed(elevation_map, markers) - 1
# only keep largest object
label_objects, _ = ndi.label(edges)
sizes = np.bincount(label_objects.ravel())
if len(sizes) > 1:
idx_largest = np.argmax(sizes[1:])
edges = (label_objects == (1 + idx_largest))
edges = ndi.binary_fill_holes(edges)
else:
logging.warning('empty component')
edges = np.zeros_like(edges)
masks_ws.append(edges)
keypoints = detector.detect((edges * 200.).astype(np.uint8))
if len(keypoints) > 0:
pos_examples.append(count)
else:
neg_examples.append(count)
return np.array(masks_ws), np.array(pos_examples), np.array(neg_examples)
def extract_binary_masks_blob_parallel(A, neuron_radius, dims, num_std_threshold=1, minCircularity=0.5,
minInertiaRatio=0.2, minConvexity=.8, dview=None):
# todo todocument
pars = []
for a in range(A.shape[-1]):
pars.append([A[:, a], neuron_radius, dims, num_std_threshold,
minCircularity, minInertiaRatio, minConvexity])
if dview is not None:
res = dview.map_sync(
extract_binary_masks_blob_parallel_place_holder, pars)
else:
res = list(map(extract_binary_masks_blob_parallel_place_holder, pars))
masks = []
is_pos = []
is_neg = []
for r in res:
masks.append(np.squeeze(r[0]))
is_pos.append(r[1])
is_neg.append(r[2])
masks = np.dstack(masks).transpose([2, 0, 1])
return masks, is_pos, is_neg
def extract_binary_masks_blob_parallel_place_holder(pars):
A, neuron_radius, dims, num_std_threshold, _, minInertiaRatio, minConvexity = pars
masks_ws, pos_examples, neg_examples = extract_binary_masks_blob(A, neuron_radius,
dims, num_std_threshold=num_std_threshold, minCircularity=0.5,
minInertiaRatio=minInertiaRatio, minConvexity=minConvexity)
return masks_ws, len(pos_examples), len(neg_examples)
def extractROIsFromPCAICA(spcomps, numSTD=4, gaussiansigmax=2, gaussiansigmay=2, thresh=None):
"""
Given the spatial components output of the IPCA_stICA function extract possible regions of interest
The algorithm estimates the significance of a components by thresholding the components after gaussian smoothing
Args:
spcompomps, 3d array containing the spatial components
numSTD: number of standard deviation above the mean of the spatial component to be considered signiificant
"""
numcomps, _, _ = spcomps.shape
allMasks = []
maskgrouped = []
for k in range(0, numcomps):
comp = spcomps[k]
comp = gaussian_filter(comp, [gaussiansigmay, gaussiansigmax])
q75, q25 = np.percentile(comp, [75, 25])
iqr = q75 - q25
minCompValuePos = np.median(comp) + numSTD * iqr / 1.35
minCompValueNeg = np.median(comp) - numSTD * iqr / 1.35
# got both positive and negative large magnitude pixels
if thresh is None:
compabspos = comp * (comp > minCompValuePos) - \
comp * (comp < minCompValueNeg)
else:
compabspos = comp * (comp > thresh) - comp * (comp < -thresh)
labeledpos, n = label(compabspos > 0, np.ones((3, 3)))
maskgrouped.append(labeledpos)
for jj in range(1, n + 1):
tmp_mask = np.asarray(labeledpos == jj)
allMasks.append(tmp_mask)
return allMasks, maskgrouped
def detect_duplicates_and_subsets(binary_masks, predictions=None, r_values=None, dist_thr=0.1, min_dist=10,
thresh_subset=0.8):
cm = [scipy.ndimage.center_of_mass(mm) for mm in binary_masks]
sp_rois = scipy.sparse.csc_matrix(
np.reshape(binary_masks, (binary_masks.shape[0], -1)).T)
D = distance_masks([sp_rois, sp_rois], [cm, cm], min_dist)[0]
np.fill_diagonal(D, 1)
overlap = sp_rois.T.dot(sp_rois).toarray()
sz = np.array(sp_rois.sum(0))
logging.info(sz.shape)
overlap = overlap/sz.T
np.fill_diagonal(overlap, 0)
# pairs of duplicate indices
indices_orig = np.where((D < dist_thr) | ((overlap) >= thresh_subset))
indices_orig = [(a, b) for a, b in zip(indices_orig[0], indices_orig[1])]
use_max_area = False
if predictions is not None:
metric = predictions.squeeze()
elif r_values is not None:
metric = r_values.squeeze()
else:
metric = sz.squeeze()
logging.debug('***** USING MAX AREA BY DEFAULT')
overlap_tmp = overlap.copy() >= thresh_subset
overlap_tmp = overlap_tmp*metric[:, None]
max_idx = np.argmax(overlap_tmp)
one, two = np.unravel_index(max_idx, overlap_tmp.shape)
max_val = overlap_tmp[one, two]
indices_to_keep:List = []
indices_to_remove = []
while max_val > 0:
one, two = np.unravel_index(max_idx, overlap_tmp.shape)
if metric[one] > metric[two]:
#indices_to_keep.append(one)
overlap_tmp[:,two] = 0
overlap_tmp[two,:] = 0
indices_to_remove.append(two)
#if two in indices_to_keep:
# indices_to_keep.remove(two)
else:
overlap_tmp[:,one] = 0
overlap_tmp[one,:] = 0
indices_to_remove.append(one)
#indices_to_keep.append(two)
#if one in indices_to_keep:
# indices_to_keep.remove(one)
max_idx = np.argmax(overlap_tmp)
one, two = np.unravel_index(max_idx, overlap_tmp.shape)
max_val = overlap_tmp[one, two]
#indices_to_remove = np.setdiff1d(np.unique(indices_orig),indices_to_keep)
indices_to_keep = np.setdiff1d(np.unique(indices_orig),indices_to_remove)
# if len(indices) > 0:
# if use_max_area:
# # if is to deal with tie breaks in case of same area
# indices_keep = np.argmax([[overlap[sec, frst], overlap[frst, sec]]
# for frst, sec in indices], 1)
# indices_remove = np.argmin([[overlap[sec, frst], overlap[frst, sec]]
# for frst, sec in indices], 1)
#
#
# else: #use CNN
# indices_keep = np.argmin([[metric[sec], metric[frst]]
# for frst, sec in indices], 1)
# indices_remove = np.argmax([[metric[sec], metric[frst]]
# for frst, sec in indices], 1)
#
# indices_keep = np.unique([elms[ik] for ik, elms in
# zip(indices_keep, indices)])
# indices_remove = np.unique([elms[ik] for ik, elms in
# zip(indices_remove, indices)])
#
# multiple_appearance = np.intersect1d(indices_keep,indices_remove)
# for mapp in multiple_appearance:
# indices_remove.remove(mapp)
# else:
# indices_keep = []
# indices_remove = []
# indices_keep = []
# indices_remove = []
return indices_orig, indices_to_keep, indices_to_remove, D, overlap
def detect_duplicates(file_name, dist_thr=0.1, FOV=(512, 512)):
"""
Removes duplicate ROIs from file file_name
Args:
file_name: .zip file with all rois
dist_thr: distance threshold for duplicate detection
FOV: dimensions of the FOV
Returns:
duplicates : list of indices with duplicate entries
ind_keep : list of kept indices
"""
rois = nf_read_roi_zip(file_name, FOV)
cm = [scipy.ndimage.center_of_mass(mm) for mm in rois]
sp_rois = scipy.sparse.csc_matrix(
np.reshape(rois, (rois.shape[0], np.prod(FOV))).T)
D = distance_masks([sp_rois, sp_rois], [cm, cm], 10)[0]
np.fill_diagonal(D, 1)
indices = np.where(D < dist_thr) # pairs of duplicate indices
ind = list(np.unique(indices[1][indices[1] > indices[0]]))
ind_keep = list(set(range(D.shape[0])) - set(ind))
duplicates = list(np.unique(np.concatenate((indices[0], indices[1]))))
return duplicates, ind_keep
|
simonsfoundation/CaImAn
|
use_cases/custom/rois_Hendrik.py
|
Python
|
gpl-2.0
| 50,709
|
[
"Gaussian",
"NEURON"
] |
87fdeb88c971f9858f0938e31a0d5aeae178b83ae5e43ff6ccdb8115a52154b0
|
# Copyright 2012 by Eric Talevich. All rights reserved.
# This code is part of the Biopython distribution and governed by its license.
# Please see the LICENSE file that should have been included as part of this
# package.
"""Command-line wrapper for the tree inference program RAxML.
Derived from the help page for RAxML version 7.3 by Alexandros Stamatakis, but
should work for any version 7.X (and probably earlier for most options).
"""
from __future__ import print_function
from Bio._py3k import basestring
from Bio.Application import _Option, _Switch, AbstractCommandline
class RaxmlCommandline(AbstractCommandline):
"""Command-line wrapper for the tree inference program RAxML.
The required parameters are 'sequences' (-s), 'model' (-m) and 'name' (-n).
The parameter 'parsimony_seed' (-p) must also be set for RAxML, but if you
do not specify it, this wrapper will set the seed to 10000 for you.
Example:
>>> from Bio.Phylo.Applications import RaxmlCommandline
>>> raxml_cline = RaxmlCommandline(sequences="Tests/Phylip/interlaced2.phy",
... model="PROTCATWAG", name="interlaced2")
>>> print(raxml_cline)
raxmlHPC -m PROTCATWAG -n interlaced2 -p 10000 -s Tests/Phylip/interlaced2.phy
You would typically run the command line with raxml_cline() or via
the Python subprocess module, as described in the Biopython tutorial.
Citation:
Stamatakis A.
RAxML-VI-HPC: Maximum Likelihood-based Phylogenetic Analyses with
Thousands of Taxa and Mixed Models.
Bioinformatics 2006, 22(21):2688-2690.
Homepage: http://sco.h-its.org/exelixis/software.html
"""
def __init__(self, cmd='raxmlHPC', **kwargs):
self.parameters = [
_Option(['-a', 'weight_filename'],
"Name of a column weight file to assign individual weights "
"to each column of the alignment. Those weights must be "
"integers separated by any type and number of whitespaces "
"within a separate file.",
filename=True,
equate=False,
),
_Option(['-b', 'bootstrap_seed'],
"Random seed for bootstrapping.",
equate=False,
),
_Option(['-c', 'num_categories'],
"Number of distinct rate categories for RAxML when "
"evolution model is set to GTRCAT or GTRMIX."
"Individual per-site rates are categorized into this "
"many rate categories to accelerate computations. "
"Default: 25.",
equate=False,
),
_Switch(['-d', 'random_starting_tree'],
"Start ML optimization from random starting tree."
),
_Option(['-e', 'epsilon'],
"Set model optimization precision in log likelihood units "
"for final optimization of tree topology under MIX/MIXI "
"or GAMMA/GAMMAI."
"Default: 0.1 for models not using proportion of "
"invariant sites estimate; 0.001 for models using "
"proportion of invariant sites estimate.",
equate=False,
),
_Option(['-E', 'exclude_filename'],
"An exclude file name, containing a specification of "
"alignment positions you wish to exclude. Format is "
"similar to Nexus, the file shall contain entries like "
"'100-200 300-400'; to exclude a single column write, "
"e.g., '100-100'. If you use a mixed model, an "
"appropriately adapted model file will be written.",
filename=True,
equate=False,
),
_Option(['-f', 'algorithm'],
"""Select algorithm:
a: Rapid Bootstrap analysis and search for best-scoring ML
tree in one program run.
b: Draw bipartition information on a tree provided with '-t'
based on multiple trees (e.g. form a bootstrap) in a file
specifed by '-z'.
c: Check if the alignment can be properly read by RAxML.
d: New rapid hill-climbing (DEFAULT).
e: Optimize model+branch lengths for given input tree under
GAMMA/GAMMAI only.
g: Compute per site log Likelihoods for one ore more trees
passed via '-z' and write them to a file that can be read
by CONSEL.
h: Compute log likelihood test (SH-test) between best tree
passed via '-t' and a bunch of other trees passed via '-z'.
i: Perform a really thorough bootstrap, refinement of final
bootstrap tree under GAMMA and a more exhaustive algorithm.
j: Generate a bunch of bootstrapped alignment files from an
original alignemnt file.
m: Compare bipartitions between two bunches of trees passed
via '-t' and '-z' respectively. This will return the
Pearson correlation between all bipartitions found in the
two tree files. A file called
RAxML_bipartitionFrequencies.outputFileName will be
printed that contains the pair-wise bipartition
frequencies of the two sets.
n: Compute the log likelihood score of all trees contained
in a tree file provided by '-z' under GAMMA or
GAMMA+P-Invar.
o: Old and slower rapid hill-climbing.
p: Perform pure stepwise MP addition of new sequences to an
incomplete starting tree.
s: Split up a multi-gene partitioned alignment into the
respective subalignments.
t: Do randomized tree searches on one fixed starting tree.
w: Compute ELW test on a bunch of trees passed via '-z'.
x: Compute pair-wise ML distances, ML model parameters will
be estimated on an MP starting tree or a user-defined
tree passed via '-t', only allowed for GAMMA-based models
of rate heterogeneity.""",
checker_function=(lambda x:
isinstance(x, basestring) and len(x) == 1),
equate=False,
),
_Option(['-g', 'grouping_constraint'],
"File name of a multifurcating constraint tree. "
"this tree does not need to be comprehensive, i.e. "
"contain all taxa.",
filename=True,
equate=False,
),
_Option(['-i', 'rearrangements'],
"Initial rearrangement setting for the subsequent "
"application of topological changes phase.",
equate=False,
),
_Switch(['-j', 'checkpoints'],
"Write checkpoints (intermediate tree topologies)."
),
_Switch(['-k', 'bootstrap_branch_lengths'],
"Print bootstrapped trees with branch lengths. "
"The bootstraps will run a bit longer, because model "
"parameters will be optimized at the end of each run. "
"Use with CATMIX/PROTMIX or GAMMA/GAMMAI."
),
_Option(['-l', 'cluster_threshold'],
"Threshold for sequence similarity clustering. "
"RAxML will then print out an alignment to a file "
"called sequenceFileName.reducedBy.threshold that "
"only contains sequences <= the specified threshold "
"that must be between 0.0 and 1.0. RAxML uses the "
"QT-clustering algorithm to perform this task. "
"In addition, a file called "
"RAxML_reducedList.outputFileName will be written "
"that contains clustering information.",
equate=False,
),
_Option(['-L', 'cluster_threshold_fast'],
"Same functionality as '-l', but uses a less "
"exhaustive and thus faster clustering algorithm. "
"This is intended for very large datasets with more "
"than 20,000-30,000 sequences.",
equate=False,
),
_Option(['-m', 'model'],
"""Model of Nucleotide or Amino Acid Substitution:
NUCLEOTIDES:
GTRCAT : GTR + Optimization of substitution rates + Optimization of site-specific
evolutionary rates which are categorized into numberOfCategories distinct
rate categories for greater computational efficiency
if you do a multiple analysis with '-#' or '-N' but without bootstrapping the program
will use GTRMIX instead
GTRGAMMA : GTR + Optimization of substitution rates + GAMMA model of rate
heterogeneity (alpha parameter will be estimated)
GTRMIX : Inference of the tree under GTRCAT
and thereafter evaluation of the final tree topology under GTRGAMMA
GTRCAT_GAMMA : Inference of the tree with site-specific evolutionary rates.
However, here rates are categorized using the 4 discrete GAMMA rates.
Evaluation of the final tree topology under GTRGAMMA
GTRGAMMAI : Same as GTRGAMMA, but with estimate of proportion of invariable sites
GTRMIXI : Same as GTRMIX, but with estimate of proportion of invariable sites
GTRCAT_GAMMAI : Same as GTRCAT_GAMMA, but with estimate of proportion of invariable sites
AMINO ACIDS:
PROTCATmatrixName[F] : specified AA matrix + Optimization of substitution rates + Optimization of site-specific
evolutionary rates which are categorized into numberOfCategories distinct
rate categories for greater computational efficiency
if you do a multiple analysis with '-#' or '-N' but without bootstrapping the program
will use PROTMIX... instead
PROTGAMMAmatrixName[F] : specified AA matrix + Optimization of substitution rates + GAMMA model of rate
heterogeneity (alpha parameter will be estimated)
PROTMIXmatrixName[F] : Inference of the tree under specified AA matrix + CAT
and thereafter evaluation of the final tree topology under specified AA matrix + GAMMA
PROTCAT_GAMMAmatrixName[F] : Inference of the tree under specified AA matrix and site-specific evolutionary rates.
However, here rates are categorized using the 4 discrete GAMMA rates.
Evaluation of the final tree topology under specified AA matrix + GAMMA
PROTGAMMAImatrixName[F] : Same as PROTGAMMAmatrixName[F], but with estimate of proportion of invariable sites
PROTMIXImatrixName[F] : Same as PROTMIXmatrixName[F], but with estimate of proportion of invariable sites
PROTCAT_GAMMAImatrixName[F] : Same as PROTCAT_GAMMAmatrixName[F], but with estimate of proportion of invariable sites
Available AA substitution models: DAYHOFF, DCMUT, JTT, MTREV, WAG, RTREV, CPREV, VT, BLOSUM62, MTMAM, GTR
With the optional 'F' appendix you can specify if you want to use empirical base frequencies
Please not that for mixed models you can in addition specify the per-gene AA model in
the mixed model file (see manual for details)
""",
equate=False,
),
_Switch(['-M', 'partition_branch_lengths'],
"Switch on estimation of individual per-partition "
"branch lengths. Only has effect when used in "
"combination with 'partition_filename' ('-q'). "
"Branch lengths for individual partitions will be "
"printed to separate files. A weighted average of the "
"branch lengths is computed by using the respective "
"partition lengths. "
),
_Option(['-n', 'name'],
"Name used in the output files.",
filename=True,
equate=False,
),
_Option(['-o', 'outgroup'],
"Name of a single outgroup or a comma-separated list "
"of outgroups, eg '-o Rat' or '-o Rat,Mouse'. In case "
"that multiple outgroups are not monophyletic the "
"first name in the list will be selected as outgroup. "
"Don't leave spaces between taxon names!",
checker_function=lambda x: len(x.split()) == 1,
equate=False,
),
_Option(['-q', 'partition_filename'],
"File name containing the assignment of models to "
"alignment partitions for multiple models of "
"substitution. For the syntax of this file please "
"consult the RAxML manual.",
filename=True,
equate=False,
),
_Option(['-p', 'parsimony_seed'],
"Random number seed for the parsimony inferences. "
"This allows you to reproduce your results and will "
"help developers debug the program. This option HAS "
"NO EFFECT in the parallel MPI version.",
equate=False,
),
_Option(['-P', 'protein_model'],
"File name of a user-defined AA (Protein) substitution "
"model. This file must contain 420 entries, the first "
"400 being the AA substitution rates (this must be a "
"symmetric matrix) and the last 20 are the empirical "
"base frequencies.",
filename=True,
equate=False,
),
_Option(['-r', 'binary_constraint'],
"File name of a binary constraint tree. "
"This tree does not need to be comprehensive, i.e. "
"contain all taxa.",
filename=True,
equate=False,
),
_Option(['-s', 'sequences'],
"Name of the alignment data file, in PHYLIP format.",
filename=True,
equate=False,
),
_Option(['-t', 'starting_tree'],
"File name of a user starting tree, in Newick format.",
filename=True,
equate=False,
),
_Option(['-T', 'threads'],
"Number of threads to run. "
"PTHREADS VERSION ONLY! "
"Make sure to set this at most the number of CPUs "
"you have on your machine, otherwise, there will be "
"a huge performance decrease!",
equate=False,
),
_Option(['-u', 'num_bootstrap_searches'],
"Number of multiple bootstrap searches per replicate. "
"Use this to obtain better ML trees for each "
"replicate. Default: 1 ML search per bootstrap "
"replicate.",
equate=False,
),
_Switch(['-v', 'version'],
"Display version information."
),
_Option(['-w', 'working_dir'],
"Name of the working directory where RAxML will "
"write its output files. Default: current directory.",
filename=True,
equate=False,
),
_Option(['-x', 'rapid_bootstrap_seed'],
"Random seed for rapid bootstrapping.",
equate=False,
),
_Switch(['-y', 'parsimony'],
"Only compute a parsimony starting tree, then exit."
),
_Option(['-z', 'bipartition_filename'],
"Name of a file containing multiple trees, e.g. from "
"a bootstrap run, that shall be used to draw "
"bipartition values onto a tree provided with '-t'. "
"It can also be used to compute per-site log "
"likelihoods in combination with '-f g', and to read "
"a bunch of trees for a couple of other options "
"('-f h', '-f m', '-f n').",
filename=True,
equate=False,
),
_Option(['-N', '-#', 'num_replicates'],
"Number of alternative runs on distinct starting trees. "
"In combination with the '-b' option, this will invoke a "
"multiple bootstrap analysis. "
"DEFAULT: 1 single analysis."
"Note that '-N' has been added as an alternative since "
"'-#' sometimes caused problems with certain MPI job "
"submission systems, since '-#' is often used to start "
"comments. ",
equate=False,
),
]
AbstractCommandline.__init__(self, cmd, **kwargs)
# ENH: enforce -s, -n and -m
if not self.parsimony_seed:
self.parsimony_seed = 10000
if __name__ == "__main__":
from Bio._utils import run_doctest
run_doctest()
|
zjuchenyuan/BioWeb
|
Lib/Bio/Phylo/Applications/_Raxml.py
|
Python
|
mit
| 20,423
|
[
"Biopython"
] |
f1b9ac7f301e0825f107e0e14ed39fb0684b4b6976108c4682c35883117fe1ae
|
"""
Some portions adapted from https://github.com/warner/python-ecdsa/ Copyright (c) 2010 Brian Warner
who granted its use under this license:
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Portions written in 2005 by Peter Pearson and placed in the public domain.
"""
import hashlib
import hmac
from . import intbytes
from . import ellipticcurve, numbertheory
if hasattr(1, "bit_length"):
bit_length = lambda v: v.bit_length()
else:
def bit_length(self):
# Make this library compatible with python < 2.7
# https://docs.python.org/3.5/library/stdtypes.html#int.bit_length
s = bin(self) # binary representation: bin(-37) --> '-0b100101'
s = s.lstrip('-0b') # remove leading zeros and minus sign
return len(s) # len('100101') --> 6
def deterministic_generate_k(generator_order, secret_exponent, val, hash_f=hashlib.sha256):
"""
Generate K value according to https://tools.ietf.org/html/rfc6979
"""
n = generator_order
order_size = (bit_length(n) + 7) // 8
hash_size = hash_f().digest_size
v = b'\x01' * hash_size
k = b'\x00' * hash_size
priv = intbytes.to_bytes(secret_exponent, length=order_size)
shift = 8 * hash_size - bit_length(n)
if shift > 0:
val >>= shift
if val > n:
val -= n
h1 = intbytes.to_bytes(val, length=order_size)
k = hmac.new(k, v + b'\x00' + priv + h1, hash_f).digest()
v = hmac.new(k, v, hash_f).digest()
k = hmac.new(k, v + b'\x01' + priv + h1, hash_f).digest()
v = hmac.new(k, v, hash_f).digest()
while 1:
t = bytearray()
while len(t) < order_size:
v = hmac.new(k, v, hash_f).digest()
t.extend(v)
k1 = intbytes.from_bytes(bytes(t))
k1 >>= (len(t)*8 - bit_length(n))
if k1 >= 1 and k1 < n:
return k1
k = hmac.new(k, v + b'\x00', hash_f).digest()
v = hmac.new(k, v, hash_f).digest()
def sign(generator, secret_exponent, val):
"""Return a signature for the provided hash, using the provided
random nonce. It is absolutely vital that random_k be an unpredictable
number in the range [1, self.public_key.point.order()-1]. If
an attacker can guess random_k, he can compute our private key from a
single signature. Also, if an attacker knows a few high-order
bits (or a few low-order bits) of random_k, he can compute our private
key from many signatures. The generation of nonces with adequate
cryptographic strength is very difficult and far beyond the scope
of this comment.
May raise RuntimeError, in which case retrying with a new
random value k is in order.
"""
G = generator
n = G.order()
k = deterministic_generate_k(n, secret_exponent, val)
p1 = k * G
r = p1.x()
if r == 0: raise RuntimeError("amazingly unlucky random number r")
s = ( numbertheory.inverse_mod( k, n ) * \
( val + ( secret_exponent * r ) % n ) ) % n
if s == 0: raise RuntimeError("amazingly unlucky random number s")
return (r, s)
def public_pair_for_secret_exponent(generator, secret_exponent):
return (generator*secret_exponent).pair()
def public_pair_for_x(generator, x, is_even):
curve = generator.curve()
p = curve.p()
alpha = ( pow(x, 3, p) + curve.a() * x + curve.b() ) % p
beta = numbertheory.modular_sqrt(alpha, p)
if is_even == bool(beta & 1):
return (x, p - beta)
return (x, beta)
def is_public_pair_valid(generator, public_pair):
return generator.curve().contains_point(public_pair[0], public_pair[1])
def verify(generator, public_pair, val, signature):
"""
Verify that signature is a valid signature of hash.
Return True if the signature is valid.
"""
# From X9.62 J.3.1.
G = generator
n = G.order()
r, s = signature
if r < 1 or r > n-1: return False
if s < 1 or s > n-1: return False
c = numbertheory.inverse_mod( s, n )
u1 = ( val * c ) % n
u2 = ( r * c ) % n
point = u1 * G + u2 * ellipticcurve.Point( G.curve(), public_pair[0], public_pair[1], G.order() )
v = point.x() % n
return v == r
def possible_public_pairs_for_signature(generator, value, signature):
""" See http://www.secg.org/download/aid-780/sec1-v2.pdf for the math """
G = generator
curve = G.curve()
order = G.order()
p = curve.p()
r,s = signature
possible_points = set()
#recid = nV - 27
# 1.1
inv_r = numbertheory.inverse_mod(r,order)
minus_e = -value % order
x = r
# 1.3
alpha = ( pow(x,3,p) + curve.a() * x + curve.b() ) % p
beta = numbertheory.modular_sqrt(alpha, p)
for y in [beta, p - beta]:
# 1.4 the constructor checks that nR is at infinity
R = ellipticcurve.Point(curve, x, y, order)
# 1.6 compute Q = r^-1 (sR - eG)
Q = inv_r * ( s * R + minus_e * G )
public_pair = (Q.x(), Q.y())
# check that Q is the public key
if verify(generator, public_pair, value, signature):
# check that we get the original signing address
possible_points.add(public_pair)
return possible_points
|
ionux/bitforge
|
bitforge/utils/ecdsa.py
|
Python
|
mit
| 6,139
|
[
"Brian"
] |
b98842fbf52c2e02706ae86ff25431b806320cfc679025c13cc643418e2432db
|
import copy
import gc
import pickle
import sys
import unittest
import weakref
from test import support
class FinalizationTest(unittest.TestCase):
def test_frame_resurrect(self):
# A generator frame can be resurrected by a generator's finalization.
def gen():
nonlocal frame
try:
yield
finally:
frame = sys._getframe()
g = gen()
wr = weakref.ref(g)
next(g)
del g
support.gc_collect()
self.assertIs(wr(), None)
self.assertTrue(frame)
del frame
support.gc_collect()
def test_refcycle(self):
# A generator caught in a refcycle gets finalized anyway.
old_garbage = gc.garbage[:]
finalized = False
def gen():
nonlocal finalized
try:
g = yield
yield 1
finally:
finalized = True
g = gen()
next(g)
g.send(g)
self.assertGreater(sys.getrefcount(g), 2)
self.assertFalse(finalized)
del g
support.gc_collect()
self.assertTrue(finalized)
self.assertEqual(gc.garbage, old_garbage)
def test_lambda_generator(self):
# Issue #23192: Test that a lambda returning a generator behaves
# like the equivalent function
f = lambda: (yield 1)
def g(): return (yield 1)
# test 'yield from'
f2 = lambda: (yield from g())
def g2(): return (yield from g())
f3 = lambda: (yield from f())
def g3(): return (yield from f())
for gen_fun in (f, g, f2, g2, f3, g3):
gen = gen_fun()
self.assertEqual(next(gen), 1)
with self.assertRaises(StopIteration) as cm:
gen.send(2)
self.assertEqual(cm.exception.value, 2)
class GeneratorTest(unittest.TestCase):
def test_copy(self):
def f():
yield 1
g = f()
with self.assertRaises(TypeError):
copy.copy(g)
def test_pickle(self):
def f():
yield 1
g = f()
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.assertRaises((TypeError, pickle.PicklingError)):
pickle.dumps(g, proto)
class ExceptionTest(unittest.TestCase):
# Tests for the issue #23353: check that the currently handled exception
# is correctly saved/restored in PyEval_EvalFrameEx().
def test_except_throw(self):
def store_raise_exc_generator():
try:
self.assertEqual(sys.exc_info()[0], None)
yield
except Exception as exc:
# exception raised by gen.throw(exc)
self.assertEqual(sys.exc_info()[0], ValueError)
self.assertIsNone(exc.__context__)
yield
# ensure that the exception is not lost
self.assertEqual(sys.exc_info()[0], ValueError)
yield
# we should be able to raise back the ValueError
raise
make = store_raise_exc_generator()
next(make)
try:
raise ValueError()
except Exception as exc:
try:
make.throw(exc)
except Exception:
pass
next(make)
with self.assertRaises(ValueError) as cm:
next(make)
self.assertIsNone(cm.exception.__context__)
self.assertEqual(sys.exc_info(), (None, None, None))
def test_except_next(self):
def gen():
self.assertEqual(sys.exc_info()[0], ValueError)
yield "done"
g = gen()
try:
raise ValueError
except Exception:
self.assertEqual(next(g), "done")
self.assertEqual(sys.exc_info(), (None, None, None))
def test_except_gen_except(self):
def gen():
try:
self.assertEqual(sys.exc_info()[0], None)
yield
# we are called from "except ValueError:", TypeError must
# inherit ValueError in its context
raise TypeError()
except TypeError as exc:
self.assertEqual(sys.exc_info()[0], TypeError)
self.assertEqual(type(exc.__context__), ValueError)
# here we are still called from the "except ValueError:"
self.assertEqual(sys.exc_info()[0], ValueError)
yield
self.assertIsNone(sys.exc_info()[0])
yield "done"
g = gen()
next(g)
try:
raise ValueError
except Exception:
next(g)
self.assertEqual(next(g), "done")
self.assertEqual(sys.exc_info(), (None, None, None))
def test_except_throw_exception_context(self):
def gen():
try:
try:
self.assertEqual(sys.exc_info()[0], None)
yield
except ValueError:
# we are called from "except ValueError:"
self.assertEqual(sys.exc_info()[0], ValueError)
raise TypeError()
except Exception as exc:
self.assertEqual(sys.exc_info()[0], TypeError)
self.assertEqual(type(exc.__context__), ValueError)
# we are still called from "except ValueError:"
self.assertEqual(sys.exc_info()[0], ValueError)
yield
self.assertIsNone(sys.exc_info()[0])
yield "done"
g = gen()
next(g)
try:
raise ValueError
except Exception as exc:
g.throw(exc)
self.assertEqual(next(g), "done")
self.assertEqual(sys.exc_info(), (None, None, None))
tutorial_tests = """
Let's try a simple generator:
>>> def f():
... yield 1
... yield 2
>>> for i in f():
... print(i)
1
2
>>> g = f()
>>> next(g)
1
>>> next(g)
2
"Falling off the end" stops the generator:
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
StopIteration
"return" also stops the generator:
>>> def f():
... yield 1
... return
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 3, in f
StopIteration
>>> next(g) # once stopped, can't be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
"raise StopIteration" stops the generator too:
>>> def f():
... yield 1
... raise StopIteration
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
However, they are not exactly equivalent:
>>> def g1():
... try:
... return
... except:
... yield 1
...
>>> list(g1())
[]
>>> def g2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(g2()))
[42]
This may be surprising at first:
>>> def g3():
... try:
... return
... finally:
... yield 1
...
>>> list(g3())
[1]
Let's create an alternate range() function implemented as a generator:
>>> def yrange(n):
... for i in range(n):
... yield i
...
>>> list(yrange(5))
[0, 1, 2, 3, 4]
Generators always return to the most recent caller:
>>> def creator():
... r = yrange(5)
... print("creator", next(r))
... return r
...
>>> def caller():
... r = creator()
... for i in r:
... print("caller", i)
...
>>> caller()
creator 0
caller 1
caller 2
caller 3
caller 4
Generators can call other generators:
>>> def zrange(n):
... for i in yrange(n):
... yield i
...
>>> list(zrange(5))
[0, 1, 2, 3, 4]
"""
# The examples from PEP 255.
pep_tests = """
Specification: Yield
Restriction: A generator cannot be resumed while it is actively
running:
>>> def g():
... i = next(me)
... yield i
>>> me = g()
>>> next(me)
Traceback (most recent call last):
...
File "<string>", line 2, in g
ValueError: generator already executing
Specification: Return
Note that return isn't always equivalent to raising StopIteration: the
difference lies in how enclosing try/except constructs are treated.
For example,
>>> def f1():
... try:
... return
... except:
... yield 1
>>> print(list(f1()))
[]
because, as in any function, return simply exits, but
>>> def f2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(f2()))
[42]
because StopIteration is captured by a bare "except", as is any
exception.
Specification: Generators and Exception Propagation
>>> def f():
... return 1//0
>>> def g():
... yield f() # the zero division exception propagates
... yield 42 # and we'll never get here
>>> k = g()
>>> next(k)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
File "<stdin>", line 2, in f
ZeroDivisionError: integer division or modulo by zero
>>> next(k) # and the generator cannot be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>>
Specification: Try/Except/Finally
>>> def f():
... try:
... yield 1
... try:
... yield 2
... 1//0
... yield 3 # never get here
... except ZeroDivisionError:
... yield 4
... yield 5
... raise
... except:
... yield 6
... yield 7 # the "raise" above stops this
... except:
... yield 8
... yield 9
... try:
... x = 12
... finally:
... yield 10
... yield 11
>>> print(list(f()))
[1, 2, 4, 5, 8, 9, 10, 11]
>>>
Guido's binary tree example.
>>> # A binary tree class.
>>> class Tree:
...
... def __init__(self, label, left=None, right=None):
... self.label = label
... self.left = left
... self.right = right
...
... def __repr__(self, level=0, indent=" "):
... s = level*indent + repr(self.label)
... if self.left:
... s = s + "\\n" + self.left.__repr__(level+1, indent)
... if self.right:
... s = s + "\\n" + self.right.__repr__(level+1, indent)
... return s
...
... def __iter__(self):
... return inorder(self)
>>> # Create a Tree from a list.
>>> def tree(list):
... n = len(list)
... if n == 0:
... return []
... i = n // 2
... return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # A recursive generator that generates Tree labels in in-order.
>>> def inorder(t):
... if t:
... for x in inorder(t.left):
... yield x
... yield t.label
... for x in inorder(t.right):
... yield x
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # Print the nodes of the tree in in-order.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
>>> # A non-recursive generator.
>>> def inorder(node):
... stack = []
... while node:
... while node.left:
... stack.append(node)
... node = node.left
... yield node.label
... while not node.right:
... try:
... node = stack.pop()
... except IndexError:
... return
... yield node.label
... node = node.right
>>> # Exercise the non-recursive generator.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
"""
# Examples from Iterator-List and Python-Dev and c.l.py.
email_tests = """
The difference between yielding None and returning it.
>>> def g():
... for i in range(3):
... yield None
... yield None
... return
>>> list(g())
[None, None, None, None]
Ensure that explicitly raising StopIteration acts like any other exception
in try/except, not like a return.
>>> def g():
... yield 1
... try:
... raise StopIteration
... except:
... yield 2
... yield 3
>>> list(g())
[1, 2, 3]
Next one was posted to c.l.py.
>>> def gcomb(x, k):
... "Generate all combinations of k elements from list x."
...
... if k > len(x):
... return
... if k == 0:
... yield []
... else:
... first, rest = x[0], x[1:]
... # A combination does or doesn't contain first.
... # If it does, the remainder is a k-1 comb of rest.
... for c in gcomb(rest, k-1):
... c.insert(0, first)
... yield c
... # If it doesn't contain first, it's a k comb of rest.
... for c in gcomb(rest, k):
... yield c
>>> seq = list(range(1, 5))
>>> for k in range(len(seq) + 2):
... print("%d-combs of %s:" % (k, seq))
... for c in gcomb(seq, k):
... print(" ", c)
0-combs of [1, 2, 3, 4]:
[]
1-combs of [1, 2, 3, 4]:
[1]
[2]
[3]
[4]
2-combs of [1, 2, 3, 4]:
[1, 2]
[1, 3]
[1, 4]
[2, 3]
[2, 4]
[3, 4]
3-combs of [1, 2, 3, 4]:
[1, 2, 3]
[1, 2, 4]
[1, 3, 4]
[2, 3, 4]
4-combs of [1, 2, 3, 4]:
[1, 2, 3, 4]
5-combs of [1, 2, 3, 4]:
From the Iterators list, about the types of these things.
>>> def g():
... yield 1
...
>>> type(g)
<class 'function'>
>>> i = g()
>>> type(i)
<class 'generator'>
>>> [s for s in dir(i) if not s.startswith('_')]
['close', 'gi_code', 'gi_frame', 'gi_running', 'send', 'throw']
>>> from test.support import HAVE_DOCSTRINGS
>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
Implement next(self).
>>> iter(i) is i
True
>>> import types
>>> isinstance(i, types.GeneratorType)
True
And more, added later.
>>> i.gi_running
0
>>> type(i.gi_frame)
<class 'frame'>
>>> i.gi_running = 42
Traceback (most recent call last):
...
AttributeError: readonly attribute
>>> def g():
... yield me.gi_running
>>> me = g()
>>> me.gi_running
0
>>> next(me)
1
>>> me.gi_running
0
A clever union-find implementation from c.l.py, due to David Eppstein.
Sent: Friday, June 29, 2001 12:16 PM
To: python-list@python.org
Subject: Re: PEP 255: Simple Generators
>>> class disjointSet:
... def __init__(self, name):
... self.name = name
... self.parent = None
... self.generator = self.generate()
...
... def generate(self):
... while not self.parent:
... yield self
... for x in self.parent.generator:
... yield x
...
... def find(self):
... return next(self.generator)
...
... def union(self, parent):
... if self.parent:
... raise ValueError("Sorry, I'm not a root!")
... self.parent = parent
...
... def __str__(self):
... return self.name
>>> names = "ABCDEFGHIJKLM"
>>> sets = [disjointSet(name) for name in names]
>>> roots = sets[:]
>>> import random
>>> gen = random.Random(42)
>>> while 1:
... for s in sets:
... print(" %s->%s" % (s, s.find()), end='')
... print()
... if len(roots) > 1:
... s1 = gen.choice(roots)
... roots.remove(s1)
... s2 = gen.choice(roots)
... s1.union(s2)
... print("merged", s1, "into", s2)
... else:
... break
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
merged K into B
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged A into F
A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged E into F
A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged D into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged M into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
merged J into B
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
merged B into C
A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
merged F into G
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
merged L into C
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
merged G into I
A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
merged I into H
A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
merged C into H
A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
"""
# Emacs turd '
# Fun tests (for sufficiently warped notions of "fun").
fun_tests = """
Build up to a recursive Sieve of Eratosthenes generator.
>>> def firstn(g, n):
... return [next(g) for i in range(n)]
>>> def intsfrom(i):
... while 1:
... yield i
... i += 1
>>> firstn(intsfrom(5), 7)
[5, 6, 7, 8, 9, 10, 11]
>>> def exclude_multiples(n, ints):
... for i in ints:
... if i % n:
... yield i
>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
[1, 2, 4, 5, 7, 8]
>>> def sieve(ints):
... prime = next(ints)
... yield prime
... not_divisible_by_prime = exclude_multiples(prime, ints)
... for p in sieve(not_divisible_by_prime):
... yield p
>>> primes = sieve(intsfrom(2))
>>> firstn(primes, 20)
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
Another famous problem: generate all integers of the form
2**i * 3**j * 5**k
in increasing order, where i,j,k >= 0. Trickier than it may look at first!
Try writing it without generators, and correctly, and without generating
3 internal results for each result output.
>>> def times(n, g):
... for i in g:
... yield n * i
>>> firstn(times(10, intsfrom(1)), 10)
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
>>> def merge(g, h):
... ng = next(g)
... nh = next(h)
... while 1:
... if ng < nh:
... yield ng
... ng = next(g)
... elif ng > nh:
... yield nh
... nh = next(h)
... else:
... yield ng
... ng = next(g)
... nh = next(h)
The following works, but is doing a whale of a lot of redundant work --
it's not clear how to get the internal uses of m235 to share a single
generator. Note that me_times2 (etc) each need to see every element in the
result sequence. So this is an example where lazy lists are more natural
(you can look at the head of a lazy list any number of times).
>>> def m235():
... yield 1
... me_times2 = times(2, m235())
... me_times3 = times(3, m235())
... me_times5 = times(5, m235())
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Don't print "too many" of these -- the implementation above is extremely
inefficient: each call of m235() leads to 3 recursive calls, and in
turn each of those 3 more, and so on, and so on, until we've descended
enough levels to satisfy the print stmts. Very odd: when I printed 5
lines of results below, this managed to screw up Win98's malloc in "the
usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
address space, and it *looked* like a very slow leak.
>>> result = m235()
>>> for i in range(3):
... print(firstn(result, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
Heh. Here's one way to get a shared list, complete with an excruciating
namespace renaming trick. The *pretty* part is that the times() and merge()
functions can be reused as-is, because they only assume their stream
arguments are iterable -- a LazyList is the same as a generator to times().
>>> class LazyList:
... def __init__(self, g):
... self.sofar = []
... self.fetch = g.__next__
...
... def __getitem__(self, i):
... sofar, fetch = self.sofar, self.fetch
... while i >= len(sofar):
... sofar.append(fetch())
... return sofar[i]
>>> def m235():
... yield 1
... # Gack: m235 below actually refers to a LazyList.
... me_times2 = times(2, m235)
... me_times3 = times(3, m235)
... me_times5 = times(5, m235)
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Print as many of these as you like -- *this* implementation is memory-
efficient.
>>> m235 = LazyList(m235())
>>> for i in range(5):
... print([m235[j] for j in range(15*i, 15*(i+1))])
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
Ye olde Fibonacci generator, LazyList style.
>>> def fibgen(a, b):
...
... def sum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def tail(g):
... next(g) # throw first away
... for x in g:
... yield x
...
... yield a
... yield b
... for s in sum(iter(fib),
... tail(iter(fib))):
... yield s
>>> fib = LazyList(fibgen(1, 2))
>>> firstn(iter(fib), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
Running after your tail with itertools.tee (new in version 2.4)
The algorithms "m235" (Hamming) and Fibonacci presented above are both
examples of a whole family of FP (functional programming) algorithms
where a function produces and returns a list while the production algorithm
suppose the list as already produced by recursively calling itself.
For these algorithms to work, they must:
- produce at least a first element without presupposing the existence of
the rest of the list
- produce their elements in a lazy manner
To work efficiently, the beginning of the list must not be recomputed over
and over again. This is ensured in most FP languages as a built-in feature.
In python, we have to explicitly maintain a list of already computed results
and abandon genuine recursivity.
This is what had been attempted above with the LazyList class. One problem
with that class is that it keeps a list of all of the generated results and
therefore continually grows. This partially defeats the goal of the generator
concept, viz. produce the results only as needed instead of producing them
all and thereby wasting memory.
Thanks to itertools.tee, it is now clear "how to get the internal uses of
m235 to share a single generator".
>>> from itertools import tee
>>> def m235():
... def _m235():
... yield 1
... for n in merge(times(2, m2),
... merge(times(3, m3),
... times(5, m5))):
... yield n
... m1 = _m235()
... m2, m3, m5, mRes = tee(m1, 4)
... return mRes
>>> it = m235()
>>> for i in range(5):
... print(firstn(it, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
The "tee" function does just what we want. It internally keeps a generated
result for as long as it has not been "consumed" from all of the duplicated
iterators, whereupon it is deleted. You can therefore print the hamming
sequence during hours without increasing memory usage, or very little.
The beauty of it is that recursive running-after-their-tail FP algorithms
are quite straightforwardly expressed with this Python idiom.
Ye olde Fibonacci generator, tee style.
>>> def fib():
...
... def _isum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def _fib():
... yield 1
... yield 2
... next(fibTail) # throw first away
... for res in _isum(fibHead, fibTail):
... yield res
...
... realfib = _fib()
... fibHead, fibTail, fibRes = tee(realfib, 3)
... return fibRes
>>> firstn(fib(), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
"""
# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0
# hackery.
syntax_tests = """
These are fine:
>>> def f():
... yield 1
... return
>>> def f():
... try:
... yield 1
... finally:
... pass
>>> def f():
... try:
... try:
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... pass
... finally:
... pass
>>> def f():
... try:
... try:
... yield 12
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... try:
... x = 12
... finally:
... yield 12
... except:
... return
>>> list(f())
[12, 666]
>>> def f():
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield 1
>>> type(f())
<class 'generator'>
>>> def f():
... if "":
... yield None
>>> type(f())
<class 'generator'>
>>> def f():
... return
... try:
... if x==4:
... pass
... elif 0:
... try:
... 1//0
... except SyntaxError:
... pass
... else:
... if 0:
... while 12:
... x += 1
... yield 2 # don't blink
... f(a, b, c, d, e)
... else:
... pass
... except:
... x = 1
... return
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... def g():
... yield 1
...
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... class C:
... def __init__(self):
... yield 1
... def f(self):
... yield 2
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... return
... if 0:
... yield 2
>>> type(f())
<class 'generator'>
This one caused a crash (see SF bug 567538):
>>> def f():
... for i in range(3):
... try:
... continue
... finally:
... yield i
...
>>> g = f()
>>> print(next(g))
0
>>> print(next(g))
1
>>> print(next(g))
2
>>> print(next(g))
Traceback (most recent call last):
StopIteration
Test the gi_code attribute
>>> def f():
... yield 5
...
>>> g = f()
>>> g.gi_code is f.__code__
True
>>> next(g)
5
>>> next(g)
Traceback (most recent call last):
StopIteration
>>> g.gi_code is f.__code__
True
Test the __name__ attribute and the repr()
>>> def f():
... yield 5
...
>>> g = f()
>>> g.__name__
'f'
>>> repr(g) # doctest: +ELLIPSIS
'<generator object f at ...>'
Lambdas shouldn't have their usual return behavior.
>>> x = lambda: (yield 1)
>>> list(x())
[1]
>>> x = lambda: ((yield 1), (yield 2))
>>> list(x())
[1, 2]
"""
# conjoin is a simple backtracking generator, named in honor of Icon's
# "conjunction" control structure. Pass a list of no-argument functions
# that return iterable objects. Easiest to explain by example: assume the
# function list [x, y, z] is passed. Then conjoin acts like:
#
# def g():
# values = [None] * 3
# for values[0] in x():
# for values[1] in y():
# for values[2] in z():
# yield values
#
# So some 3-lists of values *may* be generated, each time we successfully
# get into the innermost loop. If an iterator fails (is exhausted) before
# then, it "backtracks" to get the next value from the nearest enclosing
# iterator (the one "to the left"), and starts all over again at the next
# slot (pumps a fresh iterator). Of course this is most useful when the
# iterators have side-effects, so that which values *can* be generated at
# each slot depend on the values iterated at previous slots.
def simple_conjoin(gs):
values = [None] * len(gs)
def gen(i):
if i >= len(gs):
yield values
else:
for values[i] in gs[i]():
for x in gen(i+1):
yield x
for x in gen(0):
yield x
# That works fine, but recursing a level and checking i against len(gs) for
# each item produced is inefficient. By doing manual loop unrolling across
# generator boundaries, it's possible to eliminate most of that overhead.
# This isn't worth the bother *in general* for generators, but conjoin() is
# a core building block for some CPU-intensive generator applications.
def conjoin(gs):
n = len(gs)
values = [None] * n
# Do one loop nest at time recursively, until the # of loop nests
# remaining is divisible by 3.
def gen(i):
if i >= n:
yield values
elif (n-i) % 3:
ip1 = i+1
for values[i] in gs[i]():
for x in gen(ip1):
yield x
else:
for x in _gen3(i):
yield x
# Do three loop nests at a time, recursing only if at least three more
# remain. Don't call directly: this is an internal optimization for
# gen's use.
def _gen3(i):
assert i < n and (n-i) % 3 == 0
ip1, ip2, ip3 = i+1, i+2, i+3
g, g1, g2 = gs[i : ip3]
if ip3 >= n:
# These are the last three, so we can yield values directly.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
yield values
else:
# At least 6 loop nests remain; peel off 3 and recurse for the
# rest.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
for x in _gen3(ip3):
yield x
for x in gen(0):
yield x
# And one more approach: For backtracking apps like the Knight's Tour
# solver below, the number of backtracking levels can be enormous (one
# level per square, for the Knight's Tour, so that e.g. a 100x100 board
# needs 10,000 levels). In such cases Python is likely to run out of
# stack space due to recursion. So here's a recursion-free version of
# conjoin too.
# NOTE WELL: This allows large problems to be solved with only trivial
# demands on stack space. Without explicitly resumable generators, this is
# much harder to achieve. OTOH, this is much slower (up to a factor of 2)
# than the fancy unrolled recursive conjoin.
def flat_conjoin(gs): # rename to conjoin to run tests with this instead
n = len(gs)
values = [None] * n
iters = [None] * n
_StopIteration = StopIteration # make local because caught a *lot*
i = 0
while 1:
# Descend.
try:
while i < n:
it = iters[i] = gs[i]().__next__
values[i] = it()
i += 1
except _StopIteration:
pass
else:
assert i == n
yield values
# Backtrack until an older iterator can be resumed.
i -= 1
while i >= 0:
try:
values[i] = iters[i]()
# Success! Start fresh at next level.
i += 1
break
except _StopIteration:
# Continue backtracking.
i -= 1
else:
assert i < 0
break
# A conjoin-based N-Queens solver.
class Queens:
def __init__(self, n):
self.n = n
rangen = range(n)
# Assign a unique int to each column and diagonal.
# columns: n of those, range(n).
# NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
# each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
# based.
# NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
# each, smallest i+j is 0, largest is 2n-2.
# For each square, compute a bit vector of the columns and
# diagonals it covers, and for each row compute a function that
# generates the possiblities for the columns in that row.
self.rowgenerators = []
for i in rangen:
rowuses = [(1 << j) | # column ordinal
(1 << (n + i-j + n-1)) | # NW-SE ordinal
(1 << (n + 2*n-1 + i+j)) # NE-SW ordinal
for j in rangen]
def rowgen(rowuses=rowuses):
for j in rangen:
uses = rowuses[j]
if uses & self.used == 0:
self.used |= uses
yield j
self.used &= ~uses
self.rowgenerators.append(rowgen)
# Generate solutions.
def solve(self):
self.used = 0
for row2col in conjoin(self.rowgenerators):
yield row2col
def printsolution(self, row2col):
n = self.n
assert n == len(row2col)
sep = "+" + "-+" * n
print(sep)
for i in range(n):
squares = [" " for j in range(n)]
squares[row2col[i]] = "Q"
print("|" + "|".join(squares) + "|")
print(sep)
# A conjoin-based Knight's Tour solver. This is pretty sophisticated
# (e.g., when used with flat_conjoin above, and passing hard=1 to the
# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
# creating 10s of thousands of generators then!), and is lengthy.
class Knights:
def __init__(self, m, n, hard=0):
self.m, self.n = m, n
# solve() will set up succs[i] to be a list of square #i's
# successors.
succs = self.succs = []
# Remove i0 from each of its successor's successor lists, i.e.
# successors can't go back to i0 again. Return 0 if we can
# detect this makes a solution impossible, else return 1.
def remove_from_successors(i0, len=len):
# If we remove all exits from a free square, we're dead:
# even if we move to it next, we can't leave it again.
# If we create a square with one exit, we must visit it next;
# else somebody else will have to visit it, and since there's
# only one adjacent, there won't be a way to leave it again.
# Finelly, if we create more than one free square with a
# single exit, we can only move to one of them next, leaving
# the other one a dead end.
ne0 = ne1 = 0
for i in succs[i0]:
s = succs[i]
s.remove(i0)
e = len(s)
if e == 0:
ne0 += 1
elif e == 1:
ne1 += 1
return ne0 == 0 and ne1 < 2
# Put i0 back in each of its successor's successor lists.
def add_to_successors(i0):
for i in succs[i0]:
succs[i].append(i0)
# Generate the first move.
def first():
if m < 1 or n < 1:
return
# Since we're looking for a cycle, it doesn't matter where we
# start. Starting in a corner makes the 2nd move easy.
corner = self.coords2index(0, 0)
remove_from_successors(corner)
self.lastij = corner
yield corner
add_to_successors(corner)
# Generate the second moves.
def second():
corner = self.coords2index(0, 0)
assert self.lastij == corner # i.e., we started in the corner
if m < 3 or n < 3:
return
assert len(succs[corner]) == 2
assert self.coords2index(1, 2) in succs[corner]
assert self.coords2index(2, 1) in succs[corner]
# Only two choices. Whichever we pick, the other must be the
# square picked on move m*n, as it's the only way to get back
# to (0, 0). Save its index in self.final so that moves before
# the last know it must be kept free.
for i, j in (1, 2), (2, 1):
this = self.coords2index(i, j)
final = self.coords2index(3-i, 3-j)
self.final = final
remove_from_successors(this)
succs[final].append(corner)
self.lastij = this
yield this
succs[final].remove(corner)
add_to_successors(this)
# Generate moves 3 thru m*n-1.
def advance(len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, i)]
break
candidates.append((e, i))
else:
candidates.sort()
for e, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate moves 3 thru m*n-1. Alternative version using a
# stronger (but more expensive) heuristic to order successors.
# Since the # of backtracking levels is m*n, a poor move early on
# can take eons to undo. Smallest square board for which this
# matters a lot is 52x52.
def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
# Break ties via max distance from board centerpoint (favor
# corners and edges whenever possible).
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, 0, i)]
break
i1, j1 = self.index2coords(i)
d = (i1 - vmid)**2 + (j1 - hmid)**2
candidates.append((e, -d, i))
else:
candidates.sort()
for e, d, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate the last move.
def last():
assert self.final in succs[self.lastij]
yield self.final
if m*n < 4:
self.squaregenerators = [first]
else:
self.squaregenerators = [first, second] + \
[hard and advance_hard or advance] * (m*n - 3) + \
[last]
def coords2index(self, i, j):
assert 0 <= i < self.m
assert 0 <= j < self.n
return i * self.n + j
def index2coords(self, index):
assert 0 <= index < self.m * self.n
return divmod(index, self.n)
def _init_board(self):
succs = self.succs
del succs[:]
m, n = self.m, self.n
c2i = self.coords2index
offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2),
(-1, -2), (-2, -1), (-2, 1), (-1, 2)]
rangen = range(n)
for i in range(m):
for j in rangen:
s = [c2i(i+io, j+jo) for io, jo in offsets
if 0 <= i+io < m and
0 <= j+jo < n]
succs.append(s)
# Generate solutions.
def solve(self):
self._init_board()
for x in conjoin(self.squaregenerators):
yield x
def printsolution(self, x):
m, n = self.m, self.n
assert len(x) == m*n
w = len(str(m*n))
format = "%" + str(w) + "d"
squares = [[None] * n for i in range(m)]
k = 1
for i in x:
i1, j1 = self.index2coords(i)
squares[i1][j1] = format % k
k += 1
sep = "+" + ("-" * w + "+") * n
print(sep)
for i in range(m):
row = squares[i]
print("|" + "|".join(row) + "|")
print(sep)
conjoin_tests = """
Generate the 3-bit binary numbers in order. This illustrates dumbest-
possible use of conjoin, just to generate the full cross-product.
>>> for c in conjoin([lambda: iter((0, 1))] * 3):
... print(c)
[0, 0, 0]
[0, 0, 1]
[0, 1, 0]
[0, 1, 1]
[1, 0, 0]
[1, 0, 1]
[1, 1, 0]
[1, 1, 1]
For efficiency in typical backtracking apps, conjoin() yields the same list
object each time. So if you want to save away a full account of its
generated sequence, you need to copy its results.
>>> def gencopy(iterator):
... for x in iterator:
... yield x[:]
>>> for n in range(10):
... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
... print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
0 1 True True
1 2 True True
2 4 True True
3 8 True True
4 16 True True
5 32 True True
6 64 True True
7 128 True True
8 256 True True
9 512 True True
And run an 8-queens solver.
>>> q = Queens(8)
>>> LIMIT = 2
>>> count = 0
>>> for row2col in q.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... q.printsolution(row2col)
Solution 1
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
Solution 2
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
>>> print(count, "solutions in all.")
92 solutions in all.
And run a Knight's Tour on a 10x10 board. Note that there are about
20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
>>> k = Knights(10, 10)
>>> LIMIT = 2
>>> count = 0
>>> for x in k.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... k.printsolution(x)
... else:
... break
Solution 1
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
Solution 2
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
"""
weakref_tests = """\
Generators are weakly referencable:
>>> import weakref
>>> def gen():
... yield 'foo!'
...
>>> wr = weakref.ref(gen)
>>> wr() is gen
True
>>> p = weakref.proxy(gen)
Generator-iterators are weakly referencable as well:
>>> gi = gen()
>>> wr = weakref.ref(gi)
>>> wr() is gi
True
>>> p = weakref.proxy(gi)
>>> list(p)
['foo!']
"""
coroutine_tests = """\
Sending a value into a started generator:
>>> def f():
... print((yield 1))
... yield 2
>>> g = f()
>>> next(g)
1
>>> g.send(42)
42
2
Sending a value into a new generator produces a TypeError:
>>> f().send("foo")
Traceback (most recent call last):
...
TypeError: can't send non-None value to a just-started generator
Yield by itself yields None:
>>> def f(): yield
>>> list(f())
[None]
An obscene abuse of a yield expression within a generator expression:
>>> list((yield 21) for i in range(4))
[21, None, 21, None, 21, None, 21, None]
And a more sane, but still weird usage:
>>> def f(): list(i for i in [(yield 26)])
>>> type(f())
<class 'generator'>
A yield expression with augmented assignment.
>>> def coroutine(seq):
... count = 0
... while count < 200:
... count += yield
... seq.append(count)
>>> seq = []
>>> c = coroutine(seq)
>>> next(c)
>>> print(seq)
[]
>>> c.send(10)
>>> print(seq)
[10]
>>> c.send(10)
>>> print(seq)
[10, 20]
>>> c.send(10)
>>> print(seq)
[10, 20, 30]
Check some syntax errors for yield expressions:
>>> f=lambda: (yield 1),(yield 2)
Traceback (most recent call last):
...
SyntaxError: 'yield' outside function
>>> def f(): x = yield = y
Traceback (most recent call last):
...
SyntaxError: assignment to yield expression not possible
>>> def f(): (yield bar) = y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
>>> def f(): (yield bar) += y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
Now check some throw() conditions:
>>> def f():
... while True:
... try:
... print((yield))
... except ValueError as v:
... print("caught ValueError (%s)" % (v))
>>> import sys
>>> g = f()
>>> next(g)
>>> g.throw(ValueError) # type only
caught ValueError ()
>>> g.throw(ValueError("xyz")) # value only
caught ValueError (xyz)
>>> g.throw(ValueError, ValueError(1)) # value+matching type
caught ValueError (1)
>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped
caught ValueError (1)
>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback
caught ValueError (1)
>>> g.throw(ValueError(1), "foo") # bad args
Traceback (most recent call last):
...
TypeError: instance exception may not have a separate value
>>> g.throw(ValueError, "foo", 23) # bad args
Traceback (most recent call last):
...
TypeError: throw() third argument must be a traceback object
>>> g.throw("abc")
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not str
>>> g.throw(0)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not int
>>> g.throw(list)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not type
>>> def throw(g,exc):
... try:
... raise exc
... except:
... g.throw(*sys.exc_info())
>>> throw(g,ValueError) # do it with traceback included
caught ValueError ()
>>> g.send(1)
1
>>> throw(g,TypeError) # terminate the generator
Traceback (most recent call last):
...
TypeError
>>> print(g.gi_frame)
None
>>> g.send(2)
Traceback (most recent call last):
...
StopIteration
>>> g.throw(ValueError,6) # throw on closed generator
Traceback (most recent call last):
...
ValueError: 6
>>> f().throw(ValueError,7) # throw on just-opened generator
Traceback (most recent call last):
...
ValueError: 7
Plain "raise" inside a generator should preserve the traceback (#13188).
The traceback should have 3 levels:
- g.throw()
- f()
- 1/0
>>> def f():
... try:
... yield
... except:
... raise
>>> g = f()
>>> try:
... 1/0
... except ZeroDivisionError as v:
... try:
... g.throw(v)
... except Exception as w:
... tb = w.__traceback__
>>> levels = 0
>>> while tb:
... levels += 1
... tb = tb.tb_next
>>> levels
3
Now let's try closing a generator:
>>> def f():
... try: yield
... except GeneratorExit:
... print("exiting")
>>> g = f()
>>> next(g)
>>> g.close()
exiting
>>> g.close() # should be no-op now
>>> f().close() # close on just-opened generator should be fine
>>> def f(): yield # an even simpler generator
>>> f().close() # close before opening
>>> g = f()
>>> next(g)
>>> g.close() # close normally
And finalization:
>>> def f():
... try: yield
... finally:
... print("exiting")
>>> g = f()
>>> next(g)
>>> del g
exiting
GeneratorExit is not caught by except Exception:
>>> def f():
... try: yield
... except Exception:
... print('except')
... finally:
... print('finally')
>>> g = f()
>>> next(g)
>>> del g
finally
Now let's try some ill-behaved generators:
>>> def f():
... try: yield
... except GeneratorExit:
... yield "foo!"
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
RuntimeError: generator ignored GeneratorExit
>>> g.close()
Our ill-behaved code should be invoked during GC:
>>> import sys, io
>>> old, sys.stderr = sys.stderr, io.StringIO()
>>> g = f()
>>> next(g)
>>> del g
>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
True
>>> sys.stderr = old
And errors thrown during closing should propagate:
>>> def f():
... try: yield
... except GeneratorExit:
... raise TypeError("fie!")
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
TypeError: fie!
Ensure that various yield expression constructs make their
enclosing function a generator:
>>> def f(): x += yield
>>> type(f())
<class 'generator'>
>>> def f(): x = yield
>>> type(f())
<class 'generator'>
>>> def f(): lambda x=(yield): 1
>>> type(f())
<class 'generator'>
>>> def f(): x=(i for i in (yield) if (yield))
>>> type(f())
<class 'generator'>
>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
>>> data = [1,2]
>>> g = f(data)
>>> type(g)
<class 'generator'>
>>> g.send(None)
'a'
>>> data
[1, 2]
>>> g.send(0)
'b'
>>> data
[27, 2]
>>> try: g.send(1)
... except StopIteration: pass
>>> data
[27, 27]
"""
refleaks_tests = """
Prior to adding cycle-GC support to itertools.tee, this code would leak
references. We add it to the standard suite so the routine refleak-tests
would trigger if it starts being uncleanable again.
>>> import itertools
>>> def leak():
... class gen:
... def __iter__(self):
... return self
... def __next__(self):
... return self.item
... g = gen()
... head, tail = itertools.tee(g)
... g.item = head
... return head
>>> it = leak()
Make sure to also test the involvement of the tee-internal teedataobject,
which stores returned items.
>>> item = next(it)
This test leaked at one point due to generator finalization/destruction.
It was copied from Lib/test/leakers/test_generator_cycle.py before the file
was removed.
>>> def leak():
... def gen():
... while True:
... yield g
... g = gen()
>>> leak()
This test isn't really generator related, but rather exception-in-cleanup
related. The coroutine tests (above) just happen to cause an exception in
the generator's __del__ (tp_del) method. We can also test for this
explicitly, without generators. We do have to redirect stderr to avoid
printing warnings and to doublecheck that we actually tested what we wanted
to test.
>>> import sys, io
>>> old = sys.stderr
>>> try:
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
... def invoke(message):
... raise RuntimeError(message)
... invoke("test")
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
... "Exception ignored in" in err
... "RuntimeError: test" in err
... "Traceback" in err
... "in invoke" in err
... finally:
... sys.stderr = old
True
True
True
True
These refleak tests should perhaps be in a testfile of their own,
test_generators just happened to be the test that drew these out.
"""
__test__ = {"tut": tutorial_tests,
"pep": pep_tests,
"email": email_tests,
"fun": fun_tests,
"syntax": syntax_tests,
"conjoin": conjoin_tests,
"weakref": weakref_tests,
"coroutine": coroutine_tests,
"refleaks": refleaks_tests,
}
# Magic test name that regrtest.py invokes *after* importing this module.
# This worms around a bootstrap problem.
# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
# so this works as expected in both ways of running regrtest.
def test_main(verbose=None):
from test import support, test_generators
support.run_unittest(__name__)
support.run_doctest(test_generators, verbose)
# This part isn't needed for regrtest, but for running the test directly.
if __name__ == "__main__":
test_main(1)
|
technologiescollege/Blockly-rduino-communication
|
scripts_XP/Lib/test/test_generators.py
|
Python
|
gpl-3.0
| 55,570
|
[
"VisIt"
] |
ead5c366cba839c8b6fd753663fb2c67bc5c0eca8a938873c9ae16ddf71b0e37
|
from ..biotools import load_record
from ..GraphicRecord import GraphicRecord
from ..CircularGraphicRecord import CircularGraphicRecord
from ..GraphicFeature import GraphicFeature
class BiopythonTranslatorBase:
"""Base class for all BiopythonTranslators.
This class needs to be complemented with methods compute_feature_label,
compute_features_color, etc. to be usable. See BiopythonTranslator for
an example of minimal working subclass.
Parameters
----------
features_filters
List of filters (some_biopython_feature) => True/False.
Only features passing all the filters are kept.
This only works if you haven't redefined ``compute_filtered_features``
features_properties
A function (feature)=> properties_dict
"""
graphic_record_parameters = {}
def __init__(self, features_filters=(), features_properties=None):
self.features_filters = features_filters
self.features_properties = features_properties
def translate_feature(self, feature):
"""Translate a Biopython feature into a Dna Features Viewer feature."""
properties = dict(
label=self.compute_feature_label(feature),
color=self.compute_feature_color(feature),
html=self.compute_feature_html(feature),
fontdict=self.compute_feature_fontdict(feature),
box_linewidth=self.compute_feature_box_linewidth(feature),
box_color=self.compute_feature_box_color(feature),
linewidth=self.compute_feature_linewidth(feature),
label_link_color=self.compute_feature_label_link_color(feature),
legend_text=self.compute_feature_legend_text(feature)
)
if self.features_properties is not None:
other_properties = self.features_properties
if hasattr(other_properties, '__call__'):
other_properties = other_properties(feature)
properties.update(other_properties)
return GraphicFeature(
start=feature.location.start,
end=feature.location.end,
strand=feature.location.strand,
**properties
)
def translate_record(self, record, record_class=None):
"""Create a new GraphicRecord from a BioPython Record object.
Parameters
----------
record
A BioPython Record object or the path to a Genbank or a GFF file.
record_class
The graphic record class to use, e.g. GraphicRecord (default) or
CircularGraphicRecord. Strings 'circular' and 'linear' can also be
provided.
"""
classes = {
"linear": GraphicRecord,
"circular": CircularGraphicRecord,
None: GraphicRecord,
}
if record_class in classes:
record_class = classes[record_class]
if isinstance(record, str) or hasattr(record, 'read'):
record = load_record(record)
filtered_features = self.compute_filtered_features(record.features)
return record_class(
sequence_length=len(record),
sequence=str(record.seq),
features=[
self.translate_feature(feature)
for feature in filtered_features
if feature.location is not None
],
**self.graphic_record_parameters
)
@classmethod
def quick_class_plot(cls, record, figure_width=12, **kwargs):
"""Allows super quick and dirty plotting of Biopython records.
This is really meant for use in a Jupyter/Ipython notebook with
the "%matplotlib inline" setting.
>>> from dna_features_viewer import BiopythonTranslator
>>> BiopythonTranslator.quick_plot(my_record)
"""
graphic_record = cls().translate_record(record)
ax, _ = graphic_record.plot(figure_width=figure_width, **kwargs)
return ax
def quick_plot(self, record, figure_width=12, **kwargs):
"""Allows super quick and dirty plotting of Biopython records.
This is really meant for use in a Jupyter/Ipython notebook with
the "%matplotlib inline" setting.
>>> from dna_features_viewer import BiopythonTranslator
>>> BiopythonTranslator.quick_plot(my_record)
"""
graphic_record = self.translate_record(record)
ax, _ = graphic_record.plot(figure_width=figure_width, **kwargs)
return ax
|
TAMU-CPT/galaxy-tools
|
tools/genome_viz/dna_features_viewer/BiopythonTranslator/BiopythonTranslatorBase.py
|
Python
|
gpl-3.0
| 4,482
|
[
"Biopython"
] |
3862b6989f22a0984338a2716a4a4faca0da7ea2e805be92f16557cd5bd5e1dc
|
# -*- coding: utf-8 -*-
"""
Authors: Gonzalo E. Espinoza-Dávalos
IHE Delft 2017
Contact: g.espinoza@un-ihe.org
Repository: https://github.com/gespinoza/waterpix
Module: waterpix
"""
import os
import arcpy
import netCDF4
def output_nc_to_tiffs(output_nc, output_path):
"""
Create raster files from the variables in the output netcdf file
"""
# Output folders
if not os.path.isdir(output_path):
os.mkdir(output_path)
path_y = os.path.join(output_path, 'yearly')
path_m = os.path.join(output_path, 'monthly')
path_a = os.path.join(output_path, 'additional')
if not os.path.isdir(path_y):
os.mkdir(path_y)
if not os.path.isdir(path_m):
os.mkdir(path_m)
if not os.path.isdir(path_a):
os.mkdir(path_a)
# Read netcdf file
nc_file = netCDF4.Dataset(output_nc, 'r')
variables_ls = nc_file.variables.keys()
time_y = nc_file.variables['time_yyyy'][:]
time_m = nc_file.variables['time_yyyymm'][:]
nc_file.close()
# Remove variables
for variable in ['latitude', 'longitude', 'time_yyyy', 'time_yyyymm',
'RoundCode', 'a_Y', 'b_Y', 'crs']:
variables_ls.remove(variable)
# Add sub-folders
for variable in variables_ls:
if '_Y' in variable:
if not os.path.exists(os.path.join(path_y, variable)):
os.mkdir(os.path.join(path_y, variable))
elif '_M' in variable:
if not os.path.exists(os.path.join(path_m, variable)):
os.mkdir(os.path.join(path_m, variable))
else:
if not os.path.exists(os.path.join(path_a, variable)):
os.mkdir(os.path.join(path_a, variable))
# Main Loop
for variable in variables_ls:
# Yearly rasters
if '_Y' in variable:
for time in time_y:
print '{0}\t{1}'.format(variable, time)
file_name = variable[:-1] + '{0}.tif'.format(time)
output_tiff = os.path.join(path_y, variable, file_name)
arcpy.md.MakeNetCDFRasterLayer(output_nc, variable,
'longitude', 'latitude',
file_name[:-4], '#',
'time_yyyy {0}'.format(time),
'BY_VALUE')
output_ras = arcpy.Raster(file_name[:-4])
output_ras.save(output_tiff)
# Monthly rasters
elif '_M' in variable:
for time in time_m:
print '{0}\t{1}'.format(variable, time)
file_name = variable[:-1] + '{0}.tif'.format(time)
output_tiff = os.path.join(path_m, variable, file_name)
arcpy.md.MakeNetCDFRasterLayer(output_nc, variable,
'longitude', 'latitude',
file_name[:-4], '#',
'time_yyyymm {0}'.format(time),
'BY_VALUE')
output_ras = arcpy.Raster(file_name[:-4])
output_ras.save(output_tiff)
# Additional rasters
else:
print '{0}'.format(variable)
file_name = variable[:-1] + '.tif'
output_tiff = os.path.join(path_y, variable, file_name)
arcpy.md.MakeNetCDFRasterLayer(output_nc, variable,
'longitude', 'latitude',
file_name[:-4])
output_ras = arcpy.Raster(file_name[:-4])
output_ras.save(output_tiff)
# Return
return output_path
|
wateraccounting/wa
|
Models/waterpix/wp_arcpy/output_nc_to_tiffs.py
|
Python
|
apache-2.0
| 3,744
|
[
"NetCDF"
] |
cba582d9ab5333495a1859b76aca284a2f43991180bef64b9b57026de011916c
|
"""Identify program versions used for analysis, reporting in structured table.
Catalogs the full list of programs used in analysis, enabling reproduction of
results and tracking of provenance in output files.
"""
import os
import contextlib
import subprocess
import sys
import yaml
import toolz as tz
from bcbio import utils
from bcbio.pipeline import config_utils
from bcbio.pipeline import datadict as dd
from bcbio.log import logger
_cl_progs = [{"cmd": "bamtofastq", "name": "biobambam",
"args": "--version", "stdout_flag": "This is biobambam version"},
{"cmd": "bamtools", "args": "--version", "stdout_flag": "bamtools"},
{"cmd": "bcftools", "stdout_flag": "Version:"},
{"cmd": "bedtools", "args": "--version", "stdout_flag": "bedtools"},
{"cmd": "bowtie2", "args": "--version", "stdout_flag": "bowtie2-align version"},
{"cmd": "bwa", "stdout_flag": "Version:"},
{"cmd": "chanjo"},
{"cmd": "cutadapt", "args": "--version"},
{"cmd": "fastqc", "args": "--version", "stdout_flag": "FastQC"},
{"cmd": "freebayes", "stdout_flag": "version:"},
{"cmd": "gemini", "args": "--version", "stdout_flag": "gemini "},
{"cmd": "novosort", "paren_flag": "novosort"},
{"cmd": "novoalign", "stdout_flag": "Novoalign"},
{"cmd": "samtools", "stdout_flag": "Version:"},
{"cmd": "qualimap", "args": "-h", "stdout_flag": "QualiMap"},
{"cmd": "preseq", "stdout_flag": "preseq"},
{"cmd": "vcflib", "has_cl_version": False},
{"cmd": "featurecounts", "args": "-v", "stdout_flag": "featureCounts"}]
_manifest_progs = ["bcbio-variation", "bioconductor-bubbletree", "cufflinks",
"cnvkit", "gatk4", "gatk-framework", "hisat2", "sailfish", "salmon",
"grabix", "htseq", "lumpy-sv", "manta", "metasv", "mirdeep2", "oncofuse",
"picard", "phylowgs", "platypus-variant",
"rna-star", "rtg-tools", "sambamba", "samblaster", "scalpel",
"seqbuster", "snpeff", "vardict",
"vardict-java", "varscan", "variant-effect-predictor", "vt", "wham"]
def _broad_versioner(type):
def get_version(config):
from bcbio import broad
try:
runner = broad.runner_from_config(config)
except ValueError:
return ""
if type == "gatk":
return runner.get_gatk_version()
elif type == "mutect":
try:
runner = broad.runner_from_config(config, "mutect")
except ValueError:
return ""
return runner.get_mutect_version()
else:
raise NotImplementedError(type)
return get_version
def jar_versioner(program_name, jar_name):
"""Retrieve version information based on jar file.
"""
def get_version(config):
try:
pdir = config_utils.get_program(program_name, config, "dir")
# not configured
except ValueError:
return ""
jar = os.path.basename(config_utils.get_jar(jar_name, pdir))
for to_remove in [jar_name, ".jar", "-standalone"]:
jar = jar.replace(to_remove, "")
if jar.startswith(("-", ".")):
jar = jar[1:]
if not jar:
logger.warn("Unable to determine version for program '{}' from jar file {}".format(
program_name, config_utils.get_jar(jar_name, pdir)))
return jar
return get_version
def java_versioner(pname, jar_name, **kwargs):
def get_version(config):
try:
pdir = config_utils.get_program(pname, config, "dir")
except ValueError:
return ""
jar = config_utils.get_jar(jar_name, pdir)
kwargs["cmd"] = "java"
kwargs["args"] = "-Xms128m -Xmx256m -jar %s" % jar
return _get_cl_version(kwargs, config)
return get_version
_alt_progs = [{"name": "gatk", "version_fn": _broad_versioner("gatk")},
{"name": "mutect",
"version_fn": _broad_versioner("mutect")}]
def _parse_from_stdoutflag(stdout, x):
for line in stdout:
if line.find(x) >= 0:
parts = [p for p in line[line.find(x) + len(x):].split() if p.strip()]
return parts[0].strip()
return ""
def _parse_from_parenflag(stdout, x):
for line in stdout:
if line.find(x) >= 0:
return line.split("(")[-1].split(")")[0]
return ""
def _get_cl_version(p, config):
"""Retrieve version of a single commandline program.
"""
if not p.get("has_cl_version", True):
return ""
try:
prog = config_utils.get_program(p["cmd"], config)
except config_utils.CmdNotFound:
localpy_cmd = os.path.join(os.path.dirname(sys.executable), p["cmd"])
if os.path.exists(localpy_cmd):
prog = localpy_cmd
else:
return ""
args = p.get("args", "")
cmd = "{prog} {args}"
subp = subprocess.Popen(cmd.format(**locals()), stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True)
with contextlib.closing(subp.stdout) as stdout:
if p.get("stdout_flag"):
v = _parse_from_stdoutflag(stdout, p["stdout_flag"])
elif p.get("paren_flag"):
v = _parse_from_parenflag(stdout, p["paren_flag"])
else:
lines = [l.strip() for l in stdout.read().split("\n") if l.strip()]
v = lines[-1]
if v.endswith("."):
v = v[:-1]
return v
def _get_brew_versions():
"""Retrieve versions of tools installed via brew.
"""
from bcbio import install
tooldir = install.get_defaults().get("tooldir")
brew_cmd = os.path.join(tooldir, "bin", "brew") if tooldir else "brew"
try:
vout = subprocess.check_output([brew_cmd, "list", "--versions"])
except OSError: # brew not installed/used
vout = ""
out = {}
for vstr in vout.split("\n"):
if vstr.strip():
parts = vstr.rstrip().split()
name = parts[0]
v = parts[-1]
out[name] = v
return out
def _get_versions(config=None):
"""Retrieve details on all programs available on the system.
"""
try:
from bcbio.pipeline import version
bcbio_version = ("%s-%s" % (version.__version__, version.__git_revision__)
if version.__git_revision__ else version.__version__)
except ImportError:
bcbio_version = ""
out = [{"program": "bcbio-nextgen", "version": bcbio_version}]
manifest_dir = _get_manifest_dir(config)
manifest_vs = _get_versions_manifest(manifest_dir)
if manifest_vs:
out += manifest_vs
else:
assert config is not None, "Need configuration to retrieve from non-manifest installs"
brew_vs = _get_brew_versions()
for p in _cl_progs:
out.append({"program": p["cmd"],
"version": (brew_vs[p["cmd"]] if p["cmd"] in brew_vs else
_get_cl_version(p, config))})
for p in _alt_progs:
out.append({"program": p["name"],
"version": (brew_vs[p["name"]] if p["name"] in brew_vs else
p["version_fn"](config))})
out.sort(key=lambda x: x["program"])
return out
def _get_manifest_dir(data=None):
"""
get manifest directory from the data dictionary, falling back on alternatives
it prefers, in order:
1. locating it from the bcbio_system.yaml file
2. locating it from the galaxy directory
3. location it from the python executable.
it can accept either the data or config dictionary
"""
manifest_dir = None
if data:
bcbio_system = tz.get_in(["config", "bcbio_system"], data, None)
bcbio_system = bcbio_system if bcbio_system else data.get("bcbio_system", None)
if bcbio_system:
sibling_dir = os.path.normpath(os.path.dirname(bcbio_system))
else:
sibling_dir = dd.get_galaxy_dir(data)
if sibling_dir:
manifest_dir = os.path.normpath(os.path.join(sibling_dir, os.pardir,
"manifest"))
if not manifest_dir or not os.path.exists(manifest_dir):
manifest_dir = os.path.join(config_utils.get_base_installdir(), "manifest")
return manifest_dir
def _get_versions_manifest(manifest_dir):
"""Retrieve versions from a pre-existing manifest of installed software.
"""
all_pkgs = _manifest_progs + [p.get("name", p["cmd"]) for p in _cl_progs] + [p["name"] for p in _alt_progs]
if os.path.exists(manifest_dir):
out = []
for plist in ["toolplus", "python", "r", "debian", "custom"]:
pkg_file = os.path.join(manifest_dir, "%s-packages.yaml" % plist)
if os.path.exists(pkg_file):
with open(pkg_file) as in_handle:
pkg_info = yaml.safe_load(in_handle)
if not pkg_info:
continue
added = []
for pkg in all_pkgs:
if pkg in pkg_info:
added.append(pkg)
out.append({"program": pkg, "version": pkg_info[pkg]["version"]})
for x in added:
all_pkgs.remove(x)
out.sort(key=lambda x: x["program"])
for pkg in all_pkgs:
out.append({"program": pkg, "version": ""})
return out
def _get_program_file(dirs):
if dirs.get("work"):
base_dir = utils.safe_makedir(os.path.join(dirs["work"], "provenance"))
return os.path.join(base_dir, "programs.txt")
def write_versions(dirs, config=None, is_wrapper=False):
"""Write CSV file with versions used in analysis pipeline.
"""
out_file = _get_program_file(dirs)
if is_wrapper:
assert utils.file_exists(out_file), "Failed to create program versions from VM"
elif out_file is None:
for p in _get_versions(config):
print("{program},{version}".format(**p))
else:
with open(out_file, "w") as out_handle:
for p in _get_versions(config):
out_handle.write("{program},{version}\n".format(**p))
return out_file
def get_version_manifest(name, data=None, required=False):
"""Retrieve a version from the currently installed manifest.
"""
manifest_dir = _get_manifest_dir(data)
manifest_vs = _get_versions_manifest(manifest_dir)
for x in manifest_vs:
if x["program"] == name:
v = x.get("version", "")
if v:
return v
if required:
raise ValueError("Did not find %s in install manifest. Could not check version." % name)
return ""
def add_subparser(subparsers):
"""Add command line option for exporting version information.
"""
parser = subparsers.add_parser("version",
help="Export versions of used software to stdout or a file ")
parser.add_argument("--workdir", help="Directory export programs to in workdir/provenance/programs.txt",
default=None)
def get_version(name, dirs=None, config=None):
"""Retrieve the current version of the given program from cached names.
"""
if dirs:
p = _get_program_file(dirs)
else:
p = tz.get_in(["resources", "program_versions"], config)
if p:
with open(p) as in_handle:
for line in in_handle:
prog, version = line.rstrip().split(",")
if prog == name and version:
return version
raise KeyError("Version information not found for %s in %s" % (name, p))
|
biocyberman/bcbio-nextgen
|
bcbio/provenance/programs.py
|
Python
|
mit
| 11,886
|
[
"BWA",
"Bioconductor",
"Galaxy",
"HTSeq"
] |
75c9ec50fb97d449ce1c3461db01e4edab1f255e926ab2200e5730d66bd9bd1d
|
# -*- coding: utf-8 -*-
"""Connection configuration for PyBEL."""
import logging
import pystow
__all__ = [
"connection",
"PYBEL_MINIMUM_IMPORT_VERSION",
"PYBEL_HOME",
]
logger = logging.getLogger(__name__)
#: The last PyBEL version where the graph data definition changed
PYBEL_MINIMUM_IMPORT_VERSION = 0, 14, 0
PYBEL_HOME = pystow.join("pybel")
DEFAULT_CACHE_NAME = "pybel_{}.{}.{}_cache.db".format(*PYBEL_MINIMUM_IMPORT_VERSION)
DEFAULT_CACHE_PATH = pystow.join("pybel", name=DEFAULT_CACHE_NAME)
#: The default cache connection string uses sqlite.
DEFAULT_CACHE_CONNECTION = "sqlite:///" + DEFAULT_CACHE_PATH.as_posix()
connection = pystow.get_config(
"pybel",
"connection",
default=DEFAULT_CACHE_CONNECTION,
)
|
pybel/pybel
|
src/pybel/config.py
|
Python
|
mit
| 745
|
[
"Pybel"
] |
61e05d46d23ebe8fdeee7a4a0ed03affd23c3b8e6570a45fd832375b5baf9ead
|
#!/usr/bin/env python
"""Phylotyper program main
Script for running various phylotyper functions
Examples:
To run subtyping routine on Stx1 genes:
$ python -m phylotyper subtype stx1 test/ecoli_stx1.ffn
"""
import argparse
import csv
import logging
import os
import pprint
import pkg_resources
import re
from Bio import SeqIO
from collections import Counter, defaultdict
from config import PhylotyperOptions
from genome.loci import LociSearch
from subtypes_index import SubtypeConfig
from phylotyper import Phylotyper
from tree.fasttree import FastTreeWrapper
from tree.seqaligner import SeqAligner
from tree.seq import SeqDict, LociConcat
__author__ = "Matthew Whiteside"
__copyright__ = "Copyright 2015, Public Health Agency of Canada"
__license__ = "APL"
__version__ = "2.0"
__maintainer__ = "Matthew Whiteside"
__email__ = "matthew.whiteside@phac-aspc.gc.ca"
logger = logging.getLogger('phylotyper.main')
def align_all_sequences(inputs, outputs, superaln_output, summary, config):
"""Build MSA
Args:
inputs (list): List of Fasta files
outputs (list): Output files for MSA
superaln_output (str): Output file for concatenated alignment
summary (str): Trimming summary file
config (obj): PhylotyperOptions object
"""
logger.debug('Performing full alignment')
aln = SeqAligner(config)
aln.malign(inputs, outputs, superaln_output)
aln.trim(superaln_output, superaln_output, trimming_summary_file=summary)
def align_new_sequences(inputs, alignments, trim_summary, output, config):
"""Add new sequences to existing MSA using
profile alignment
Args:
inputs (list): List of Fasta files
alignment (str): List of aligned fasta files
trim_summary (str): trimming summary file
output (str): Output files for MSA
config (obj): PhylotyperOptions object
"""
logger.debug('Aligning genes to existing alignment')
aln = SeqAligner(config)
aln.madd(inputs, alignments, output)
aln.trim(output, output, trimming_summary_file=trim_summary)
def build_tree(input, output, nt, fast, config):
"""Build phylogenetic tree
Args:
input (str): Fasta file
output (str): Output file for newick tree
nt (bool): True when nucleotide sequences
fast (bool): True to prioritize speed over accuracy
config (obj): PhylotyperOptions object
"""
logger.debug('Building fasttree')
tree = FastTreeWrapper(config)
tree.build(input, output, nt, fast)
def predict_subtypes(treefile, subtypefile, plotfile, options, config):
"""Calls subtype method
Wrapper around the Phylotyper.subtype method. Identifies
predictions above significance cutoff. Writes results to
file.
Args:
treefile (str): Filepath to newick tree
subtypefile (str): Filepath to tab-delim subtype assignments for leaves in tree
plotfile (str|False): If provided, filepath to probability plot
options (dict): user defined settings from __main__
config (obj): PhylotyperConfig with .ini file settings
Returns:
dict with keys:
subtype
probability
phylotyper_assignment
"""
logger.debug('Running phylotyper')
pt = Phylotyper(config)
assignment_dict = pt.subtype(treefile, subtypefile, options['rate_matrix'], plotfile)
# Compute assignments
cutoff = float(config.get('phylotyper','prediction_threshold'))
logger.debug('Using posterior probability cutoff: %f' % (cutoff))
results = {}
for genome_id,subtup in assignment_dict.items():
pp = subtup[1]
subt = ','.join(subtup[0])
pred = 'non-significant/undetermined'
if pp > cutoff:
pred = subt
results[genome_id] = {
'subtype': subt,
'probability': str(pp),
'phylotyper_assignment': pred,
}
return(results)
def results_header():
# Returns output fields
return ['genome','tree_label','subtype','probability','phylotyper_assignment','loci']
def subtype_pipeline(options, config):
"""Run phylotyper pipeline
Runs individual steps in phylotyper pipeline
for pre-built subtype scheme
Args:
options (dict): user defined settings from __main__
config (obj): PhylotyperConfig with .ini file settings
write_results (bool): A switch so that in dispatch/multi mode,
individual results returned not printed to file.
"""
# Define files
genome_search = False
if 'genomes' in options:
# Inputs are genome files
# Need to define fasta files for each loci
# found in the blast search
locifiles = []
for i in xrange(options['nloci']):
file_name = os.path.join(options['output_directory'], 'search_results.locus{}'.format(i))
locifiles.append( file_name )
# Overwrite existing files
try:
os.remove(file_name)
except OSError:
pass
options['input'] = locifiles
genome_search = True
refalnfiles = options['alignment']
subtypefile = options['subtype']
options['result'] = os.path.join(options['output_directory'], 'subtype_predictions.tsv')
logger.info('Settings:\n%s' % (pprint.pformat(options)))
logger.info('Config:\n%s' % (config.pformat()))
# Predict subtypes
with open(options['result'], 'w') as resfile:
assignments = csv.DictWriter(resfile, fieldnames=results_header(), delimiter='\t', quoting=csv.QUOTE_MINIMAL)
assignments.writeheader()
loci_found = 0
if genome_search:
# Identify loci in input genomes
seqtype = 'prot' if options['seq'] == 'aa' else 'nucl'
detector = LociSearch(config, options['search_database'], None, seqtype, options['nloci'])
for genome in options['genomes']:
genome_label, nhits = detector.search(genome, options['input'], append=True)
if nhits == 0:
assignments.writerow({
'genome': genome_label,
'tree_label': 'not applicable',
'subtype': 'not applicable',
'probability': 'not applicable',
'phylotyper_assignment': 'Subtype loci not found in genome',
'loci': 'not applicable'
})
else:
loci_found += nhits
# Check if sequences match known subtyped sequences
remaining = []
if loci_found > 0:
remaining = identical_sequences(options, assignments)
# Run phylotyper on remaining untyped input sequences
for genome in remaining:
allele = 1
for alleleset in remaining[genome]:
# Setup input/output files
infiles = []
loci = 1
# Only one gene for one genome submitted
# don't need to split genes into individual files
tree_label = "{}-allele{}".format(genome,allele)
filename = tree_label.replace('|','_')
# Only one gene, don't need to distinguish alleles
if len(remaining[genome]) == 1:
tree_label = genome
for s in alleleset.seqlist():
infile = os.path.join(options['output_directory'], "{}_loci{}_step2_alignment_input.fasta".format(filename, loci))
loci += 1
with open(infile, 'w') as outfh:
outfh.write('>{}\n{}\n'.format(tree_label, s))
infiles.append(infile)
trimfile = os.path.join(options['output_directory'], "{}_step3_alignment_trimming_summary.html".format(filename))
alnfile = os.path.join(options['output_directory'], "{}_step4_profile_alignment_output.fasta".format(filename))
treefile = os.path.join(options['output_directory'], "{}_step5_subtype_tree.newick".format(filename))
plotfile = os.path.join(options['output_directory'], "{}_step5_posterior_probability_tree.png".format(filename))
# Run alignment on each locus
align_new_sequences(infiles, refalnfiles, trimfile, alnfile, config)
# Compute tree
nt = options['seq'] == 'nt'
build_tree(alnfile, treefile, nt, options['fast'], config)
# Predict subtypes & write to file
results = predict_subtypes(treefile, subtypefile, plotfile, options, config)
print results
if not tree_label in results:
raise Exception("Phylotyper failed to complete")
this_results = results[tree_label]
this_results['genome'] = genome
this_results['tree_label'] = tree_label
this_results['loci'] = alleleset.iddump()
assignments.writerow(this_results)
allele += 1
def evaluate_subtypes(options, config, seqdict):
"""Examine correlation of subtype in phylogenetic tree
Wrapper around the Phylotyper.subtype method. Identifies
predictions above significance cutoff. Writes results to
file.
Args:
options (dict): user defined settings from __main__
config (obj): PhylotyperConfig with .ini file settings
seqdict (obj): SeqDict object
"""
logger.debug('Running phylotyper new scheme evaluation')
# Define files
subtfile = options['subtype']
treefile = options['tree']
ratematfile = options['rate_matrix']
pt = Phylotyper(config)
pt.evaluate(treefile, subtfile, ratematfile, options['output_directory'], seqdict.accession_map())
None
def build_pipeline(options, config):
"""Create and evaluate new reference alignment for subtyping
User provides new reference set for subtyping. Build and
refine alignment. Evaluate predictive ability of tree.
Args:
options (dict): user defined settings from __main__
config (obj): PhylotyperConfig with .ini file settings
"""
alnfiles = options['alignment']
tmpfiles = []
for i in xrange(options['nloci']):
alnfile = os.path.basename(options['alignment'][i])
tmpfiles.append(os.path.join(options['output_directory'], '{}.tmp{}'.format(alnfile, i)))
tmpfile = os.path.join(options['output_directory'], 'tmpsuperaln.fasta')
treefile = options['tree'] = os.path.join(options['output_directory'], 'test.tree')
summary = os.path.join(options['output_directory'], 'alignment_trimming_summary.html')
logger.info('Settings:\n%s' % (pprint.pformat(options)))
logger.info('Config:\n%s' % (config.pformat()))
# Check sequence IDs
check_gene_names(options)
# Create blast database for searching genomes
seqtype = 'prot' if options['seq'] == 'aa' else 'nucl'
LociSearch(config, options['search_database'], options['input'], seqtype, options['nloci'])
# Remove identical sequences,
logger.debug('Collapsing identical sequences')
seqdict = SeqDict(options['nloci'])
seqdict.build(options['input'], options['subtype_orig'])
# Output unique set
seqdict.write(tmpfiles, options['subtype'])
# Save lookup object
seqdict.store(options['lookup'])
# Align
align_all_sequences(tmpfiles, alnfiles, tmpfile, summary, config)
# Compute tree
nt = options['seq'] == 'nt'
build_tree(tmpfile, treefile, nt, options['fast'], config)
# Run evaluation
evaluate_subtypes(options, config, seqdict)
def update_pipeline(options, config):
"""Add new sequences to reference alignment for subtyping
User provides new reference sequences for subtyping. After
appending new sequences to existing sequences, build_pipeline
is called.
Args:
options (dict): user defined settings from __main__
config (obj): PhylotyperConfig with .ini file settings
"""
lookup = SeqDict()
lookup.load(options['lookup'])
subtype_data = lookup.get_dict()
p = re.compile("\s+")
# Temp files for holding new and old sequences
tmpfiles = []
tmpfhs = []
for i in xrange(options['nloci']):
infile = os.path.basename(options['input'][i])
tmpfiles.append(os.path.join(options['output_directory'], '{}.updated_set{}'.format(infile, i)))
tmpfhs.append(open(tmpfiles[i],'w'))
# Append new sequences to current subtype data
newsubtype_file = os.path.join(options['output_directory'], 'phylotyper_subtypes.csv')
with open(newsubtype_file, 'w') as stfh:
for subtype_dict in subtype_data.itervalues():
subtype = subtype_dict['subtype']
original_genome_labels = subtype_dict['accessions']
loci = subtype_dict['loci']
for genome in original_genome_labels:
stfh.write("{}\t{}\n".format(genome, subtype))
for i in xrange(options['nloci']):
tmpfhs[i].write(">{}\n{}\n".format(genome, loci[i]))
with open(options['subtype_orig'], 'r') as infh:
for line in infh:
line = line.strip()
result = p.split(line)
(genome, subtype) = result
stfh.write("{}\t{}\n".format(genome, subtype.lower()))
for i in xrange(options['nloci']):
f1 = options['input'][i]
with open(f1, 'r') as fh1:
for line in fh1:
tmpfhs[i].write(line)
tmpfhs[i].close()
options['input'] = tmpfiles
options['subtype_orig'] = newsubtype_file
# Run build with updated dataset
build_pipeline(options, config)
def loci_pipeline(options, config):
"""Find gene alleles
Using BLAST, find subtyping alleles in input genomes,
for a given subtype scheme.
No subtype prediction is preformed.
Args:
options (dict): user defined settings from __main__
config (obj): PhylotyperConfig with .ini file settings
"""
options['result'] = os.path.join(options['output_directory'], 'subtype_gene_predictions.tsv')
# Define files
locifiles = []
for i in xrange(options['nloci']):
file_name = os.path.join(options['output_directory'], 'search_results.locus{}'.format(i))
locifiles.append( file_name )
# Overwrite existing files
try:
os.remove(file_name)
except OSError:
pass
options['input'] = locifiles
logger.info('Settings:\n%s' % (pprint.pformat(options)))
logger.info('Config:\n%s' % (config.pformat()))
# Predict subtypes
with open(options['result'], 'w') as resfile:
assignments = csv.DictWriter(resfile, fieldnames=results_header(),
delimiter='\t', quoting=csv.QUOTE_MINIMAL)
assignments.writeheader()
loci_found = 0
# Identify loci in input genomes
seqtype = 'prot' if options['seq'] == 'aa' else 'nucl'
detector = LociSearch(config, options['search_database'], None, seqtype, options['nloci'])
for genome in options['genomes']:
genome_label, nhits = detector.search(genome, options['input'], append=True)
if nhits == 0:
assignments.writerow({
'genome': genome,
'tree_label': 'not applicable',
'subtype': 'not applicable',
'probability': 'not applicable',
'phylotyper_assignment': 'blast',
'loci': 'not found'
})
else:
loci_found += nhits
# Check if sequences match known subtyped sequences
remaining = []
if loci_found > 0:
remaining = identical_sequences(options, assignments)
for genome in remaining:
allele = 1
filename = genome.replace('|','_')
for alleleset in remaining[genome]:
loci = 1
tree_label = "{}|allele{}".format(genome,allele)
# Only one gene, don't need to distinguish alleles
if len(remaining[genome]) == 1:
tree_label = genome
for s in alleleset.seqlist():
infile = os.path.join(options['output_directory'], "{}_loci{}.fasta".format(filename, loci))
loci += 1
with open(infile, 'a') as outfh:
outfh.write('>{}\n{}\n'.format(tree_label, s))
assignments.writerow({
'genome': genome,
'tree_label': 'not applicable',
'subtype': 'not applicable',
'probability': 'not applicable',
'phylotyper_assignment': 'blast',
'loci': alleleset.iddump() # Use the power of json to ecode this fasta header string
})
allele += 1
def identical_sequences(options, identified=None):
"""Looks for exact matches
Sequences that are identical to subtyped sequences in
the reference set do not need to be run with phylotyper.
Subtype assignment will be transfered from identical sequences.
Sets filename values in options dictionary for downstream
analyses.
Args:
options (dict): user defined settings from __main__
identified (list): Inputs that match reference sequences will be appended to this list
Returns:
Integer indicated number of remaining unsubtyped sequences
"""
logger.debug('Searching for identical sequences with known subtype')
# Load lookup object
lookup = SeqDict()
lookup.load(options['lookup'])
# Load all allele combinations for each loci
# in all genomes
# Skip genomes that have don't have all copies of
# each loci
remaining = defaultdict(list)
concat = LociConcat()
supersequences, incomplete = concat.load(options['input'], missing='warn')
for genome, typing_sequences in supersequences.iteritems():
if genome in incomplete:
logger.info('{} has incomplete set of loci. Skipping genome.'.format(genome))
else:
for alleleset in typing_sequences.iteralleles():
sequence = alleleset.seqlist()
found = lookup.find(sequence)
if found:
# Matches reference sequence
subt = found['subtype']
hit = found['name']
if identified:
identified.writerow({
'genome': genome,
'tree_label': 'not applicable',
'subtype': subt,
'probability': 'identical to {}'.format(hit),
'phylotyper_assignment': subt,
'loci': alleleset.iddump() # Use the power of json to encode this fasta header string
})
else:
# Need to run through phylotyper
remaining[genome].append(alleleset)
return(remaining)
def check_gene_names(options):
"""Make sure gene names Fasttree and Mafft safe
Args:
options (dict): user defined settings from __main__
"""
logger.debug('Checking gene sequence names')
# Define files
subtype_file = options['subtype_orig']
input_files = iter(options['input'])
# Check first fasta file
i = 1
input_file = next(input_files)
fasta_sequences = SeqIO.parse(open(input_file),'fasta')
uniq = Counter()
genomes = Counter()
for fasta in fasta_sequences:
name = fasta.id
desc = fasta.description[0:20]
allele = validate_fasta_header(name)
genomes[allele.genome] = i
if uniq[str(allele)] > 0:
raise Exception("allele id in {} in file {} is not unique (allele: {})".format(desc, input_file, str(allele)))
uniq[str(allele)] += 1
fasta_sequences.close()
# Check subtype file
i += 1
reserved = set(':(), ') # Newick reserved characters
for row in csv.reader(open(subtype_file,'r'),delimiter='\t'):
name = row[0]
subt = row[1]
if any((c in reserved) for c in subt):
raise Exception("invalid character in subtype {}".format(subt))
if len(subt) > 20:
raise Exception("{} subtype name is too long".format(subt))
if genomes[name] != 1:
raise Exception("unknown genome {} in subtype file".format(name))
genomes[name] = i
for name in genomes:
if genomes[name] != 2:
raise Exception("missing genome {} in subtype file".format(name))
# Check remaining input files
for input_file in input_files:
fasta_sequences = SeqIO.parse(open(input_file),'fasta')
uniq = Counter()
i += i
for fasta in fasta_sequences:
allele = validate_fasta_header(fasta.id)
if uniq[str(allele)] > 0:
raise Exception("allele id in {} in file {} is not unique (allele: {})".format(desc, input_file, str(allele)))
uniq[str(allele)] += 1
if not allele.genome in genomes:
raise Exception("unknown genome {} in file {}".format(allele.genome, input_file))
genomes[allele.genome] = i
for genome in genomes:
if genomes[genome] != i:
raise Exception("missing genome entry {} in file {}".format(genome, input_file))
return True
def validate_fasta_header(name):
"""Check that fasta header is suitable for phylotyper downstream
applications.
Args:
id (str): Fasta ID check
Returns:
AlleleID namedtuple
Raises:
Exception if format unsuitable
"""
reserved = set(':(), ') # Newick reserved characters
if len(name) > 40:
raise Exception("{} id in fasta header is too long".format(name))
if any((c in reserved) for c in name):
raise Exception("invalid character in fasta header id {}".format(name))
return LociConcat.parse_fasta_id(name)
# Modify filename, not extension
def modify_filename(filename, uid, altext=None):
name, ext = os.path.splitext(filename)
if not altext is None:
ext = altext
return "{name}_{uid}{ext}".format(name=name, uid=uid, ext=ext)
def main():
"""Run phylotyper functions
Parses command-line arguments and calls appropriate
functions
"""
logging.basicConfig(level=logging.INFO)
# Parse command-line args
# Phylotyper functions are broken up into commands
# Each command has its own options and subparser
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='commands')
# New subtype command
new_parser = subparsers.add_parser('new', help='Build new subtype scheme')
new_parser.add_argument('gene', action='store', help='Subtype name')
new_parser.add_argument('subtype', action='store', help='Subtypes for reference sequences')
new_parser.add_argument('results', action='store', help='Directory for evaluation result files')
new_parser.add_argument('ref', nargs='+', metavar='reference', help='Fasta input(s) for reference sequences')
new_parser.add_argument('--aa', action='store_true', help='Amino acid')
new_parser.add_argument('--index', help='Specify non-default location of YAML-formatted file index for pre-built subtype schemes')
new_parser.add_argument('--config', action='store', help='Phylotyper config options file')
new_parser.add_argument('--description', action='store', help='Description of subtype scheme')
new_parser.set_defaults(which='new')
# Builtin subtype command with genome as input
genome_parser = subparsers.add_parser('genome', help='Predict subtype for scheme provided in phylotyper for genome input')
genome_parser.add_argument('gene', action='store', help='Subtype gene name')
genome_parser.add_argument('output', action='store', help='Directory for subtype predictions')
genome_parser.add_argument('inputs', nargs='+', help='Fasta input for genomes')
genome_parser.add_argument('--index', help='Specify non-default location of YAML-formatted file index for pre-built subtype schemes')
genome_parser.add_argument('--noplots', action='store_true', help='Do not generate tree image file')
genome_parser.add_argument('--config', action='store', help='Phylotyper config options file')
genome_parser.set_defaults(which='genome')
# Builtin loci-finder command with genome as input
loci_parser = subparsers.add_parser('loci', help='Find gene loci for scheme provided in phylotyper for genome input')
loci_parser.add_argument('gene', action='store', help='Subtype gene name')
loci_parser.add_argument('output', action='store', help='Directory for loci outputs')
loci_parser.add_argument('inputs', nargs='+', help='Fasta input for genomes')
loci_parser.add_argument('--index', help='Specify non-default location of YAML-formatted file index for pre-built subtype schemes')
loci_parser.add_argument('--config', action='store', help='Phylotyper config options file')
loci_parser.set_defaults(which='loci')
# Update subtype command
new_parser = subparsers.add_parser('update', help='Update subtype scheme')
new_parser.add_argument('gene', action='store', help='Subtype name')
new_parser.add_argument('subtype', action='store', help='Subtypes for reference sequences')
new_parser.add_argument('results', action='store', help='Directory for evaluation result files')
new_parser.add_argument('ref', nargs='+', metavar='reference', help='Fasta input(s) for reference sequences')
new_parser.add_argument('--index', help='Specify non-default location of YAML-formatted file index for pre-built subtype schemes')
new_parser.add_argument('--config', action='store', help='Phylotyper config options file')
new_parser.set_defaults(which='update')
# Builtin subtype command with gene as input
subtype_parser = subparsers.add_parser('subtype', help='Predict subtype for scheme provided in phylotyper')
# INCOMPLETE
# subtype_parser.add_argument('gene', action='store', help='Subtype gene name')
# subtype_parser.add_argument('input', action='store', help='Fasta input for unknowns')
# subtype_parser.add_argument('output', action='store', help='Directory for subtype predictions')
# subtype_parser.add_argument('--aa', action='store_true', help='Amino acid sequences')
# subtype_parser.add_argument('--noplots', action='store_true', help='Do not generate tree image file')
subtype_parser.add_argument('--config', action='store', help='Phylotyper config options file')
subtype_parser.add_argument('--index', help='Specify non-default location of YAML-formatted file index for pre-built subtype schemes')
subtype_parser.set_defaults(which='subtype')
list_parser = subparsers.add_parser('list', help='List subtype schemes')
list_parser.add_argument('--config', action='store', help='Phylotyper config options file')
list_parser.add_argument('--index', help='Specify non-default location of YAML-formatted file index for pre-built subtype schemes')
list_parser.set_defaults(which='list')
options = parser.parse_args()
# Parse .ini config file
# Location of config file is defined by ENV variable PHYLOTYPER_CONFIG or by --config (overrides previous)
if options.config:
config_file = options.config
elif os.environ.get('PHYLOTYPER_CONFIG'):
config_file = os.environ.get('PHYLOTYPER_CONFIG')
else:
msg = 'No config file argument. Using default settings.\nYou can provide a Phylotyper config file using' \
' enviroment variable PHYLOTYPER_CONFIG or command-line argument --config.'
logger.info(msg)
config_file = None
config = PhylotyperOptions(config_file)
# Default index location
subtype_config_file = pkg_resources.resource_filename(__name__, 'subtypes_index.yaml')
# Non-default location
if options.index:
if not os.path.isfile(options.index):
msg = 'Invalid/missing index file option.'
raise Exception(msg)
subtype_config_file = options.index
stConfig = SubtypeConfig(subtype_config_file)
if options.which == 'new':
# Build & evaluate new subtype alignment
print('what the fuck')
# Check arguments
# Check input files exists
n_loci = 0
for f in options.ref:
if not os.path.isfile(f):
msg = 'Invalid/missing input file argument.'
raise Exception(msg)
n_loci += 1
# Check subtype file exists
if not os.path.isfile(options.subtype):
msg = 'Invalid/missing subtype file argument.'
raise Exception(msg)
# Check output directory exists, if not create it if possible
if not os.path.exists(options.results):
os.makedirs(options.results)
outdir = os.path.abspath(options.results)
# Create subtype directory & file names
subtype_options = stConfig.create_subtype(options.gene, n_loci, options.aa, options.description)
# Save additional build options inputted by user
subtype_options['input'] = [os.path.abspath(f) for f in options.ref]
subtype_options['subtype_orig'] = os.path.abspath(options.subtype)
subtype_options['output_directory'] = outdir
subtype_options['nloci'] = n_loci
subtype_options['fast'] = False
# Run pipeline
build_pipeline(subtype_options, config)
# Update subtype YAML file
stConfig.save()
elif options.which == 'genome':
# Compute subtype for builtin scheme
# Genome input
# Check arguments
# Check input file exists
n_genomes = 0
for f in options.inputs:
if not os.path.isfile(f):
msg = 'Invalid/missing input file argument.'
raise Exception(msg)
n_genomes += 1
# Check output directory exists, if not create it if possible
if not os.path.exists(options.output):
os.makedirs(options.output)
# Load requested subtype data files
scheme = options.gene
subtype_options = stConfig.get_subtype_config(scheme)
# Add pipeline options
subtype_options['genomes'] = [os.path.abspath(f) for f in options.inputs]
subtype_options['output_directory'] = os.path.abspath(options.output)
subtype_options['ngenomes'] = n_genomes
subtype_options['fast'] = False
subtype_options['noplots'] = False
if options.noplots:
subtype_options['noplots'] = True
# Run pipeline
subtype_pipeline(subtype_options, config)
elif options.which == 'subtype':
# Compute subtype for builtin scheme
raise Exception("Subcommand currently unavailable")
# Check arguments
# Check input file exists
if not os.path.isfile(options.input):
msg = 'Invalid/missing input file argument.'
raise Exception(msg)
# Check output directory exists, if not create it if possible
if not os.path.exists(options.output):
os.makedirs(options.output)
# Load requested subtype data files
scheme = options.gene
subtype_options = stConfig.get_subtype_config(scheme)
# Add pipeline options
subtype_options['input'] = os.path.abspath(options.input)
subtype_options['output_directory'] = os.path.abspath(options.output)
subtype_options['fast'] = False
subtype_options['noplots'] = False
if options.noplots:
subtype_options['noplots'] = True
if options.aa and (subtype_options['seq'] != 'aa'):
msg = 'Sequence type of input does not match Phylotyper gene sequences for %s' % (scheme)
raise Exception(msg)
# Run pipeline
subtype_pipeline(subtype_options, config)
elif options.which == 'loci':
# Find subtype loci for builtin scheme
# Genome input
# Check arguments
# Check input file exists
n_genomes = 0
for f in options.inputs:
if not os.path.isfile(f):
msg = 'Invalid/missing input file argument.'
raise Exception(msg)
n_genomes += 1
# Check output directory exists, if not create it if possible
if not os.path.exists(options.output):
os.makedirs(options.output)
# Load requested subtype data files
scheme = options.gene
loci_options = stConfig.get_subtype_config(scheme)
# Add pipeline options
loci_options['genomes'] = [os.path.abspath(f) for f in options.inputs]
loci_options['output_directory'] = os.path.abspath(options.output)
loci_options['ngenomes'] = n_genomes
# Run pipeline
loci_pipeline(loci_options, config)
elif options.which == 'update':
# Update subtype alignment
# Check arguments
# Check input files exists
n_loci = 0
for f in options.ref:
if not os.path.isfile(f):
msg = 'Invalid/missing input file argument.'
raise Exception(msg)
n_loci += 1
# Check subtype file exists
if not os.path.isfile(options.subtype):
msg = 'Invalid/missing subtype file argument.'
raise Exception(msg)
# Check output directory exists, if not create it if possible
if not os.path.exists(options.results):
os.makedirs(options.results)
outdir = os.path.abspath(options.results)
# Load requested subtype data files
scheme = options.gene
subtype_options = stConfig.get_subtype_config(scheme)
if n_loci != subtype_options['nloci']:
msg = 'Invalid/missing input file argument. Number of loci provided does not match subtype scheme'
raise Exception(msg)
# Save additional build options inputted by user
subtype_options['input'] = [os.path.abspath(f) for f in options.ref]
subtype_options['subtype_orig'] = os.path.abspath(options.subtype)
subtype_options['output_directory'] = outdir
subtype_options['fast'] = False
# Make backup of subtype data
stConfig.backup_subtype(scheme)
# Run pipeline
update_pipeline(subtype_options, config)
elif options.which == 'list':
# List available subtype schemes
schemes = stConfig.list()
for n, t, l, d in zip(schemes[0], schemes[1], schemes[2], schemes[3]):
print '- {}\n..+ sequence type: {}\n..+ number of loci: {}\n..+ description: {}'.format(n, t, l, d)
else:
raise Exception("Unrecognized command")
|
superphy/insilico-subtyping
|
phylotyper/run.py
|
Python
|
apache-2.0
| 35,621
|
[
"BLAST"
] |
ca2f2f0aa1a2f63f82b7855603bdfb4836eef799785933cb2979aa95a6c0513d
|
#!/usr/bin/env python
#Processes uploads from the user.
# WARNING: Changes in this tool (particularly as related to parsing) may need
# to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
import urllib, sys, os, gzip, tempfile, shutil, re, gzip, zipfile, codecs, binascii
from galaxy import eggs
# need to import model before sniff to resolve a circular import dependency
import galaxy.model
from galaxy.datatypes import sniff
from galaxy.datatypes.binary import *
from galaxy.datatypes.registry import Registry
from galaxy import util
from galaxy.util.json import *
assert sys.version_info[:2] >= ( 2, 4 )
def stop_err( msg, ret=1 ):
sys.stderr.write( msg )
sys.exit( ret )
def file_err( msg, dataset, json_file ):
json_file.write( to_json_string( dict( type = 'dataset',
ext = 'data',
dataset_id = dataset.dataset_id,
stderr = msg ) ) + "\n" )
try:
os.remove( dataset.path )
except:
pass
def safe_dict(d):
"""
Recursively clone json structure with UTF-8 dictionary keys
http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-keys-as-python-arguments/
"""
if isinstance(d, dict):
return dict([(k.encode('utf-8'), safe_dict(v)) for k,v in d.iteritems()])
elif isinstance(d, list):
return [safe_dict(x) for x in d]
else:
return d
def check_html( temp_name, chunk=None ):
if chunk is None:
temp = open(temp_name, "U")
else:
temp = chunk
regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
regexp4 = re.compile( "<META[^>]*>", re.I )
regexp5 = re.compile( "<SCRIPT[^>]*>", re.I )
lineno = 0
for line in temp:
lineno += 1
matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line ) or regexp5.search( line )
if matches:
if chunk is None:
temp.close()
return True
if lineno > 100:
break
if chunk is None:
temp.close()
return False
def check_binary( temp_name ):
is_binary = False
temp = open( temp_name, "U" )
chars_read = 0
for chars in temp:
for char in chars:
chars_read += 1
if ord( char ) > 128:
is_binary = True
break
if chars_read > 100:
break
if chars_read > 100:
break
temp.close()
return is_binary
def check_bam( temp_name ):
return Bam().sniff( temp_name )
def check_sff( temp_name ):
return Sff().sniff( temp_name )
def check_gzip( temp_name ):
# This method returns a tuple of booleans representing ( is_gzipped, is_valid )
# Make sure we have a gzipped file
try:
temp = open( temp_name, "U" )
magic_check = temp.read( 2 )
temp.close()
if magic_check != util.gzip_magic:
return ( False, False )
except:
return ( False, False )
# We support some binary data types, so check if the compressed binary file is valid
# If the file is Bam, it should already have been detected as such, so we'll just check
# for sff format.
try:
header = gzip.open( temp_name ).read(4)
if binascii.b2a_hex( header ) == binascii.hexlify( '.sff' ):
return ( True, True )
except:
return( False, False )
CHUNK_SIZE = 2**15 # 32Kb
gzipped_file = gzip.GzipFile( temp_name, mode='rb' )
chunk = gzipped_file.read( CHUNK_SIZE )
gzipped_file.close()
# See if we have a compressed HTML file
if check_html( temp_name, chunk=chunk ):
return ( True, False )
return ( True, True )
def check_zip( temp_name ):
if not zipfile.is_zipfile( temp_name ):
return ( False, False, None )
zip_file = zipfile.ZipFile( temp_name, "r" )
# Make sure the archive consists of valid files. The current rules are:
# 1. Archives can only include .ab1, .scf or .txt files
# 2. All file extensions within an archive must be the same
name = zip_file.namelist()[0]
try:
test_ext = name.split( "." )[1].strip().lower()
except:
return ( True, False, None )
if not ( test_ext in unsniffable_binary_formats or test_ext == 'txt' ):
return ( True, False, test_ext )
for name in zip_file.namelist():
ext = name.split( "." )[1].strip().lower()
if ext != test_ext:
return ( True, False, test_ext )
return ( True, True, test_ext )
def parse_outputs( args ):
rval = {}
for arg in args:
id, files_path, path = arg.split( ':', 2 )
rval[int( id )] = ( path, files_path )
return rval
def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
if dataset.type == 'url':
try:
temp_name, dataset.is_multi_byte = sniff.stream_to_file( urllib.urlopen( dataset.path ), prefix='url_paste' )
except Exception, e:
file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
return
dataset.path = temp_name
# See if we have an empty file
if not os.path.exists( dataset.path ):
file_err( 'Uploaded temporary file (%s) does not exist.' % dataset.path, dataset, json_file )
return
if not os.path.getsize( dataset.path ) > 0:
file_err( 'The uploaded file is empty', dataset, json_file )
return
if not dataset.type == 'url':
# Already set is_multi_byte above if type == 'url'
try:
dataset.is_multi_byte = util.is_multi_byte( codecs.open( dataset.path, 'r', 'utf-8' ).read( 100 ) )
except UnicodeDecodeError, e:
dataset.is_multi_byte = False
# Is dataset content multi-byte?
if dataset.is_multi_byte:
data_type = 'multi-byte char'
ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
# Is dataset content supported sniffable binary?
elif check_bam( dataset.path ):
ext = 'bam'
data_type = 'bam'
elif check_sff( dataset.path ):
ext = 'sff'
data_type = 'sff'
else:
# See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
is_gzipped, is_valid = check_gzip( dataset.path )
if is_gzipped and not is_valid:
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
elif is_gzipped and is_valid:
# We need to uncompress the temp_name file, but BAM files must remain compressed in the BGZF format
CHUNK_SIZE = 2**20 # 1Mb
fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( dataset.path ), text=False )
gzipped_file = gzip.GzipFile( dataset.path, 'rb' )
while 1:
try:
chunk = gzipped_file.read( CHUNK_SIZE )
except IOError:
os.close( fd )
os.remove( uncompressed )
file_err( 'Problem decompressing gzipped data', dataset, json_file )
return
if not chunk:
break
os.write( fd, chunk )
os.close( fd )
gzipped_file.close()
# Replace the gzipped file with the decompressed file
shutil.move( uncompressed, dataset.path )
dataset.name = dataset.name.rstrip( '.gz' )
data_type = 'gzip'
if not data_type:
# See if we have a zip archive
is_zipped, is_valid, test_ext = check_zip( dataset.path )
if is_zipped and not is_valid:
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
elif is_zipped and is_valid:
# Currently, we force specific tools to handle this case. We also require the user
# to manually set the incoming file_type
if ( test_ext in unsniffable_binary_formats ) and dataset.file_type != 'binseq.zip':
file_err( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'", dataset, json_file )
return
elif test_ext == 'txt' and dataset.file_type != 'txtseq.zip':
file_err( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'", dataset, json_file )
return
if not ( dataset.file_type == 'binseq.zip' or dataset.file_type == 'txtseq.zip' ):
file_err( "You must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files", dataset, json_file )
return
data_type = 'zip'
ext = dataset.file_type
if not data_type:
if check_binary( dataset.path ):
# We have a binary dataset, but it is not Bam or Sff
data_type = 'binary'
#binary_ok = False
parts = dataset.name.split( "." )
if len( parts ) > 1:
ext = parts[1].strip().lower()
if ext not in unsniffable_binary_formats:
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
elif ext in unsniffable_binary_formats and dataset.file_type != ext:
err_msg = "You must manually set the 'File Format' to '%s' when uploading %s files." % ( ext.capitalize(), ext )
file_err( err_msg, dataset, json_file )
return
if not data_type:
# We must have a text file
if check_html( dataset.path ):
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
if data_type != 'binary' and data_type != 'zip':
if dataset.space_to_tab:
line_count = sniff.convert_newlines_sep2tabs( dataset.path )
else:
line_count = sniff.convert_newlines( dataset.path )
if dataset.file_type == 'auto':
ext = sniff.guess_ext( dataset.path )
else:
ext = dataset.file_type
data_type = ext
# Save job info for the framework
if ext == 'auto' and dataset.ext:
ext = dataset.ext
if ext == 'auto':
ext = 'data'
# Move the dataset to its "real" path
if dataset.get( 'link_data_only', False ):
pass # data will remain in place
elif dataset.type in ( 'server_dir', 'path_paste' ):
shutil.copy( dataset.path, output_path )
else:
shutil.move( dataset.path, output_path )
# Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
ext = ext,
stdout = 'uploaded %s file' % data_type,
name = dataset.name,
line_count = line_count )
json_file.write( to_json_string( info ) + "\n" )
# Groom the dataset content if necessary
datatype = Registry().get_datatype_by_extension( ext )
datatype.groom_dataset_content( output_path )
def add_composite_file( dataset, json_file, output_path, files_path ):
if dataset.composite_files:
os.mkdir( files_path )
for name, value in dataset.composite_files.iteritems():
value = util.bunch.Bunch( **value )
if dataset.composite_file_paths[ value.name ] is None and not value.optional:
file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
break
elif dataset.composite_file_paths[value.name] is not None:
if not value.is_binary:
if uploaded_dataset.composite_files[ value.name ].space_to_tab:
sniff.convert_newlines_sep2tabs( dataset.composite_file_paths[ value.name ][ 'path' ] )
else:
sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( files_path, name ) )
# Move the dataset to its "real" path
shutil.move( dataset.primary_file, output_path )
# Write the job info
info = dict( type = 'dataset',
dataset_id = dataset.dataset_id,
stdout = 'uploaded %s file' % dataset.file_type )
json_file.write( to_json_string( info ) + "\n" )
def __main__():
if len( sys.argv ) < 2:
print >>sys.stderr, 'usage: upload.py <json paramfile> <output spec> ...'
sys.exit( 1 )
output_paths = parse_outputs( sys.argv[2:] )
json_file = open( 'galaxy.json', 'w' )
for line in open( sys.argv[1], 'r' ):
dataset = from_json_string( line )
dataset = util.bunch.Bunch( **safe_dict( dataset ) )
try:
output_path = output_paths[int( dataset.dataset_id )][0]
except:
print >>sys.stderr, 'Output path for dataset %s not found on command line' % dataset.dataset_id
sys.exit( 1 )
if dataset.type == 'composite':
files_path = output_paths[int( dataset.dataset_id )][1]
add_composite_file( dataset, json_file, output_path, files_path )
else:
add_file( dataset, json_file, output_path )
# clean up paramfile
try:
os.remove( sys.argv[1] )
except:
pass
if __name__ == '__main__':
__main__()
|
volpino/Yeps-EURAC
|
tools/data_source/upload.py
|
Python
|
mit
| 14,047
|
[
"Galaxy"
] |
60b19cee33979a6e66797e0c3ceaba3ee106c30f3de23e2ef7131dc65bd51f8f
|
# -*- coding: utf-8 -*-
"""
Created on Thu May 7 19:32:22 2015
@author: sternc1
"""
import pandas as pd
import numpy as np
import simtk.openmm as mm
import simtk.unit as u
import mdtraj as md
from copy import copy, deepcopy
import re
from cclib.parser import Gaussian
from cclib.parser.utils import convertor
from mdtraj import Trajectory
from simtk.unit import Quantity, nanometers, kilojoules_per_mole
from chemistry.charmm import CharmmPsfFile
def to_optimize(param, stream, penalty = 10):
""" returns a list of dihedrals to optimize and updates CharmmParameterSet
with stream files
Parameters
----------
param : CharmmParameterSet
stream: list of stream files
penalty: int for CGenFF penalty cutoff (Default = 10)
Returns list of tuples containing dihedrals to optimize
"""
if type(stream) != list:
stream = [stream]
keys = [i for i in param.dihedral_types.keys()]
for j in stream:
param.read_stream_file(j)
return [k for k in param.dihedral_types.keys()
if k not in keys and param.dihedral_types[k].penalty >= penalty]
def read_scan_logfile(logfiles, structure):
""" parses Guassian09 torsion-scan log file
parameters
----------
logfiles: str of list of str
Name of Guassian 09 torsion scan log file
structure: charmm psf file
returns
-------
TorsionScanSet
"""
topology = md.load_psf(structure)
structure = CharmmPsfFile(structure)
positions = np.ndarray((0, topology.n_atoms, 3))
qm_energies = np.ndarray(0)
torsions = np.ndarray((0, 4), dtype=int)
directions = np.ndarray(0, dtype=int)
steps = np.ndarray((0, 3), dtype=int)
if type(logfiles) != list:
logfiles = [logfiles]
for file in logfiles:
print("loading %s" % file)
direction = np.ndarray(1)
torsion = np.ndarray((1,4), dtype=int)
step = []
index = (2, 12, -1)
f = file.split('/')[-1].split('.')
if f[2] == 'pos':
direction[0] = 1
else:
direction[0] = 0
fi = open(file, 'r')
for line in fi:
if re.search(' Scan ', line):
t = line.split()[2].split(',')
t[0] = t[0][-1]
t[-1] = t[-1][0]
for i in range(len(t)):
torsion[0][i] = (int(t[i]) - 1)
if re.search('Step', line):
try:
step = np.array(([int(line.rsplit()[j]) for j in index]))
step = step[np.newaxis,:]
steps = np.append(steps, step, axis=0)
except:
pass
fi.close()
log = Gaussian(file)
data = log.parse()
# convert angstroms to nanometers
positions = np.append(positions, data.atomcoords*0.1, axis=0)
qm_energies = np.append(qm_energies, (convertor(data.scfenergies, "eV", "kJmol-1") -
min(convertor(data.scfenergies, "eV", "kJmol-1"))), axis=0)
for i in range(len(data.scfenergies)):
torsions = np.append(torsions, torsion, axis=0)
directions = np.append(directions, direction, axis=0)
return TorsionScanSet(positions, topology, structure, torsions, directions, steps, qm_energies)
class TorsionScanSet(Trajectory):
"""container object for torsion scan
A TorsionScanSet should be constructed by loading Gaussian 09 torsion scan log files from disk
with an mdtraj.Topology object
Examples
--------
>>> torsion_set = read_scan_logfile('FRG.scanN.dir.log')
>>> print torsion_set
<torsions.TorsionScanSet with 346 frames, 22 atoms, 1 residues, 4 unique torsions without MM Energy at 0x10b099b10>
Attributes
----------
structure: chemistry.Structure
qm_energy: simtk.unit.Quantity((n_frames), unit=kilojoule/mole)
mm_energy: simtk.unit.Quantity((n_frames), unit=kilojoule/mole)
delta_energy: simtk.unit.Quantity((n_frames), unit=kilojoule/mole)
torsion_index: {np.ndarray, shape(n_frames, 4)}
step: {np.ndarray, shape(n_frame, 3)}
direction: {np.ndarray, shape(n_frame)}. 0 = negative, 1 = positive
"""
def __init__(self, positions, topology, structure, torsions, directions, steps, qm_energies):
"""Create new TorsionScanSet object"""
assert isinstance(topology, object)
super(TorsionScanSet, self).__init__(positions, topology)
self.structure = structure
self.qm_energy = Quantity(value=qm_energies, unit=kilojoules_per_mole)
self.mm_energy = Quantity()
self.delta_energy = Quantity()
self.torsion_index = torsions
self.direction = directions
self.steps = steps
def to_dataframe(self):
""" convert TorsionScanSet to pandas dataframe """
data = []
for i in range(self.n_frames):
if len(self.mm_energy) == self.n_frames and len(self.delta_energy) == self.n_frames:
data.append((self.torsion_index[i], self.direction[i], self.steps[i], self.qm_energy[i], self.mm_energy[i],
self.delta_energy[i]))
else:
data.append((self.torsion_index[i], self.direction[i], self.steps[i], self.qm_energy[i], float('nan'), float('nan')))
torsion_set = pd.DataFrame(data, columns=[ "torsion", "scan_direction", "step_point_total", "QM_energy KJ/mol",
"MM_energy KJ/mole", "delta KJ/mole"])
return torsion_set
def _string_summary_basic(self):
"""Basic summary of TorsionScanSet in string form."""
energy_str = 'with MM Energy' if self._have_mm_energy else 'without MM Energy'
value = "torsions.TorsionScanSet with %d frames, %d atoms, %d residues, %s" % (
self.n_frames, self.n_atoms, self.n_residues, energy_str)
return value
def extract_geom_opt(self):
key = []
for i, step in enumerate(self.steps):
try:
if step[1] != self.steps[i+1][1]:
key.append(i)
except IndexError:
key.append(i)
new_torsionScanSet = self.slice(key)
return new_torsionScanSet
def compute_energy(self, param, offset, platform=None,):
""" Computes energy for a given structure with a given parameter set
Parameters
----------
param: chemistry.charmm.CharmmParameterSet
platform: simtk.openmm.Platform to evaluate energy on (if None, will select automatically)
"""
# Create Context.
integrator = mm.VerletIntegrator(0.004*u.picoseconds)
system = self.structure.createSystem(param)
if platform != None:
context = mm.Context(system, integrator, platform)
else:
context = mm.Context(system, integrator)
# Compute potential energies for all snapshots.
self.mm_energy = Quantity(value=np.zeros([self.n_frames], np.float64), unit=kilojoules_per_mole)
for i in range(self.n_frames):
context.setPositions(self.openmm_positions(i))
state = context.getState(getEnergy=True)
self.mm_energy[i] = state.getPotentialEnergy()
# Subtract off minimum of mm_energy
self.mm_energy -= self.mm_energy.min() + Quantity(value=float(offset.value), unit=kilojoules_per_mole)
self.delta_energy = (self.qm_energy - self.mm_energy)
# Compute deviation between MM and QM energies with offset
#self.delta_energy = mm_energy - self.qm_energy + Quantity(value=offset, unit=kilojoule_per_mole)
# Clean up.
del context
del system
del integrator
# print('Heap at end of compute_energy'), hp.heeap()
@property
def _have_mm_energy(self):
return len(self.mm_energy) is not 0
# @property
# def _unique_torsions(self):
# Not returning the right amount. debug
# torsions = []
# for i in range(len(self.torsion_index)):
# try:
# if (self.torsion_index[i] != self.torsion_index[i+1]).all():
# torsions.append(self.torsion_index[i]), torsions.append(self.torsion_index[i+1])
# except:
# pass
# return len(torsions), torsions
def __getitem__(self, key):
"Get a slice of this trajectory"
return self.slice(key)
def slice(self, key, copy=True):
"""Slice trajectory, by extracting one or more frames into a separate object
This method can also be called using index bracket notation, i.e
`traj[1] == traj.slice(1)`
Parameters
----------
key : {int, np.ndarray, slice}
The slice to take. Can be either an int, a list of ints, or a slice
object.
copy : bool, default=True
Copy the arrays after slicing. If you set this to false, then if
you modify a slice, you'll modify the original array since they
point to the same data.
"""
xyz = self.xyz[key]
time = self.time[key]
torsions = self.torsion_index[key]
direction = self.direction[key]
steps = self.steps[key]
qm_energy = self.qm_energy[key]
unitcell_lengths, unitcell_angles = None, None
if self.unitcell_angles is not None:
unitcell_angles = self.unitcell_angles[key]
if self.unitcell_lengths is not None:
unitcell_lengths = self.unitcell_lengths[key]
if copy:
xyz = xyz.copy()
time = time.copy()
topology = deepcopy(self._topology)
structure = deepcopy(self.structure)
torsions = torsions.copy()
direction = direction.copy()
steps = steps.copy()
qm_energy = qm_energy.copy()
if self.unitcell_angles is not None:
unitcell_angles = unitcell_angles.copy()
if self.unitcell_lengths is not None:
unitcell_lengths = unitcell_lengths.copy()
newtraj = self.__class__(
xyz, topology, structure, torsions, direction, steps, qm_energy)
if self._rmsd_traces is not None:
newtraj._rmsd_traces = np.array(self._rmsd_traces[key],
ndmin=1, copy=True)
return newtraj
|
hainm/Torsions
|
torsions/TorsionScanSet.py
|
Python
|
gpl-2.0
| 10,496
|
[
"CHARMM",
"Gaussian",
"MDTraj",
"OpenMM",
"cclib"
] |
881ad348bca2655f21f1970b42bb2745df15cd52ce3b22d43ba35be9c14ed617
|
from __future__ import print_function
from __future__ import division
import os, sys
import csv
import unittest
from __main__ import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
from types import *
import math
import shutil
import pickle
import numpy as np
import zipfile
import json
import subprocess
from copy import deepcopy
from scipy import stats
import time
import shapepcalib as shapca
class ShapeVariationAnalyzer(ScriptedLoadableModule):
"""Uses ScriptedLoadableModule base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def __init__(self, parent):
ScriptedLoadableModule.__init__(self, parent)
parent.title = "Population Analysis"
parent.categories = ["Shape Analysis"]
parent.dependencies = []
parent.contributors = ["Lopez Mateo (University of North Carolina), Priscille de Dumast (University of Michigan), Laura Pascal (University of Michigan)"]
parent.helpText = """
Shape Variation Analyzer allows the PCA decomposition and exploration of 3D models.
The generated models can be evaluated by computing their specificity, compactness and generalization.
"""
parent.acknowledgementText = """
This work was supported by the National
Institutes of Dental and Craniofacial Research
and Biomedical Imaging and Bioengineering of
the National Institutes of Health under Award
Number R01DE024450.
"""
class ShapeVariationAnalyzerWidget(ScriptedLoadableModuleWidget):
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
# ---- Widget Setup ----
# Global Variables
self.logic = ShapeVariationAnalyzerLogic()
#print(dir(self.logic))
self.dictVTKFiles = dict()
self.dictGroups = dict()
self.dictCSVFile = dict()
self.directoryList = list()
self.groupSelected = set()
self.dictShapeModels = dict()
self.patientList = list()
self.dictResults = dict()
self.dictFeatData = dict()
#self.dictPCA = dict()
self.PCA_sliders=list()
self.PCA_sliders_label=list()
self.PCA_sliders_value_label=list()
self.PCANode = None
# Interface
self.moduleName = 'ShapeVariationAnalyzer'
scriptedModulesPath = eval('slicer.modules.%s.path' % self.moduleName.lower())
scriptedModulesPath = os.path.dirname(scriptedModulesPath)
path = os.path.join(scriptedModulesPath, 'Resources', 'UI', '%s.ui' % self.moduleName)
self.widget = slicer.util.loadUI(path)
self.layout.addWidget(self.widget)
# global variables of the Interface:
# Tab: Creation of CSV File for Classification Groups
self.collapsibleButton_creationCSVFile = self.getUI('CollapsibleButton_creationCSVFile')
self.spinBox_group = self.getUI('spinBox_group')
self.directoryButton_creationCSVFile = self.getUI('DirectoryButton_creationCSVFile')
self.stackedWidget_manageGroup = self.getUI('stackedWidget_manageGroup')
self.pushButton_addGroup = self.getUI('pushButton_addGroup')
self.pushButton_removeGroup = self.getUI('pushButton_removeGroup')
self.pushButton_modifyGroup = self.getUI('pushButton_modifyGroup')
self.pushButton_exportCSVfile = self.getUI('pushButton_exportCSVfile')
# Tab: Creation of New Classification Groups
self.collapsibleButton_previewClassificationGroups = self.getUI('CollapsibleButton_previewClassificationGroups')
self.pathLineEdit_previewGroups = self.getUI('pathLineEdit_previewGroups')
self.collapsibleGroupBox_previewVTKFiles = self.getUI('CollapsibleGroupBox_previewVTKFiles')
self.checkableComboBox_ChoiceOfGroup = self.getUI('CheckableComboBox_ChoiceOfGroup')
self.tableWidget_VTKFiles = self.getUI('tableWidget_VTKFiles')
self.pushButton_previewVTKFiles = self.getUI('pushButton_previewVTKFiles')
self.pushButton_exportUpdatedClassification = self.getUI('pushButton_exportUpdatedClassification')
# Tab: PCA Analysis
self.label_valueExploration=self.getUI('label_valueExploration')
self.label_varianceExploration=self.getUI('label_varianceExploration')
self.label_groupExploration=self.getUI('label_groupExploration')
self.label_minVariance=self.getUI('label_minVariance')
self.label_maxSlider=self.getUI('label_maxSlider')
self.label_colorMode=self.getUI('label_colorMode')
self.label_colorModeParam1=self.getUI('label_colorModeParam1')
self.label_colorModeParam2=self.getUI('label_colorModeParam2')
self.label_numberShape=self.getUI('label_numberShape')
self.label_normalLabel_1=self.getUI('label_normalLabel_1')
self.label_normalLabel_2=self.getUI('label_normalLabel_2')
self.label_normalLabel_3=self.getUI('label_normalLabel_3')
self.label_normalLabel_4=self.getUI('label_normalLabel_4')
self.label_normalLabel_5=self.getUI('label_normalLabel_5')
self.label_normalLabel_6=self.getUI('label_normalLabel_6')
self.label_normalLabel_7=self.getUI('label_normalLabel_7')
self.collapsibleButton_PCA = self.getUI('collapsibleButton_PCA')
self.pathLineEdit_CSVFilePCA = self.getUI('pathLineEdit_CSVFilePCA')
self.pathLineEdit_exploration = self.getUI('pathLineEdit_exploration')
self.comboBox_groupPCA = self.getUI('comboBox_groupPCA')
self.comboBox_colorMode = self.getUI('comboBox_colorMode')
self.pushButton_PCA = self.getUI('pushButton_PCA')
self.pushButton_resetSliders = self.getUI('pushButton_resetSliders')
self.pushButton_saveExploration=self.getUI('pushButton_saveExploration')
self.pushButton_toggleMean=self.getUI('pushButton_toggleMean')
self.pushButton_evaluateModels=self.getUI('pushButton_evaluateModels')
self.label_statePCA = self.getUI('label_statePCA')
self.gridLayout_PCAsliders=self.getUI('gridLayout_PCAsliders')
self.spinBox_minVariance=self.getUI('spinBox_minVariance')
self.spinBox_maxSlider=self.getUI('spinBox_maxSlider')
self.spinBox_colorModeParam1=self.getUI('spinBox_colorModeParam_1')
self.spinBox_colorModeParam2=self.getUI('spinBox_colorModeParam_2')
self.spinBox_numberShape=self.getUI('spinBox_numberShape')
self.ctkColorPickerButton_groupColor=self.getUI('ctkColorPickerButton_groupColor')
self.checkBox_useHiddenEigenmodes=self.getUI('checkBox_useHiddenEigenmodes')
# Tab: PCA Export
self.collapsibleButton_PCAExport = self.getUI('CollapsibleButton_PCAExport')
self.comboBox_SingleExportGroup = self.getUI('comboBox_SingleExportGroup')
self.comboBox_SingleExportPC = self.getUI('comboBox_SingleExportPC')
self.label_PC = self.getUI('label_PC')
self.label_Group = self.getUI('label_Group')
self.DirectoryButton_PCASingleExport = self.getUI('DirectoryButton_PCASingleExport')
self.pushButton_PCAExport = self.getUI('pushButton_PCAExport')
self.pushButton_PCACurrentExport = self.getUI('pushButton_PCACurrentExport')
self.checkBox_stdMaxMin = self.getUI('checkBox_stdMaxMin')
self.checkBox_stdRegular = self.getUI('checkBox_stdRegular')
self.doubleSpinBox_stdRegular = self.getUI('doubleSpinBox_stdRegular')
self.doubleSpinBox_stdmin = self.getUI('doubleSpinBox_stdmin')
self.doubleSpinBox_stdmax = self.getUI('doubleSpinBox_stdmax')
self.label_stdRegular = self.getUI('label_stdRegular')
self.label_stdmin = self.getUI('label_stdmin')
self.label_stdmax = self.getUI('label_stdmax')
self.doubleSpinBox_step = self.getUI('doubleSpinBox_step')
#self.doubleSpinBox_insideLimit=self.getUI('doubleSpinBox_insideLimit')
#self.doubleSpinBox_insideLimit=self.getUI('doubleSpinBox_outsidesideLimit')
# --------------------------------------------------------- #
# Widget Configuration #
# --------------------------------------------------------- #
##PCA exploration Widgets Configuration
#self.pushButton_PCA.setDisabled(True)
#self.comboBox_groupPCA.setDisabled(True)
self.comboBox_colorMode.addItem('Group color')
self.comboBox_colorMode.addItem('Unsigned distance to mean shape')
self.comboBox_colorMode.addItem('Signed distance to mean shape')
self.spinBox_minVariance.setValue(2)
self.spinBox_maxSlider.setMinimum(1)
self.spinBox_maxSlider.setMaximum(8)
self.spinBox_maxSlider.setValue(8)
self.spinBox_colorModeParam1.setMinimum(1)
self.spinBox_colorModeParam2.setMinimum(1)
self.spinBox_colorModeParam1.setMaximum(10000)
self.spinBox_colorModeParam2.setMaximum(10000)
self.spinBox_colorModeParam1.setValue(1)
self.spinBox_colorModeParam2.setValue(1)
self.spinBox_numberShape.setMinimum(100)
self.spinBox_numberShape.setMaximum(1000000)
self.spinBox_numberShape.setValue(10000)
self.checkBox_useHiddenEigenmodes.setChecked(True)
self.label_statePCA.hide()
self.ctkColorPickerButton_groupColor.color=qt.QColor(255,255,255)
self.ctkColorPickerButton_groupColor.setDisplayColorName(False)
self.label_normalLabel_1.hide()
self.label_normalLabel_2.hide()
self.label_normalLabel_3.hide()
self.label_normalLabel_4.hide()
self.label_normalLabel_5.hide()
self.label_normalLabel_6.hide()
self.label_normalLabel_7.hide()
self.comboBox_groupPCA.hide()
self.comboBox_colorMode.hide()
self.ctkColorPickerButton_groupColor.hide()
self.pushButton_resetSliders.hide()
self.label_valueExploration.hide()
self.label_groupExploration.hide()
self.label_varianceExploration.hide()
self.pushButton_saveExploration.hide()
self.pushButton_toggleMean.hide()
self.pushButton_evaluateModels.hide()
self.spinBox_minVariance.hide()
self.spinBox_maxSlider.hide()
self.label_minVariance.hide()
self.label_maxSlider.hide()
self.spinBox_colorModeParam1.hide()
self.spinBox_colorModeParam2.hide()
self.label_colorMode.hide()
self.label_colorModeParam1.hide()
self.label_colorModeParam2.hide()
self.label_numberShape.hide()
self.spinBox_numberShape.hide()
self.checkBox_useHiddenEigenmodes.hide()
# disable/enable and hide/show widget
#self.comboBox_healthyGroup.setDisabled(True)
self.pushButton_exportUpdatedClassification.setDisabled(True)
self.checkableComboBox_ChoiceOfGroup.setDisabled(True)
self.tableWidget_VTKFiles.setDisabled(True)
self.pushButton_previewVTKFiles.setDisabled(True)
self.label_statePCA.hide()
self.collapsibleButton_creationCSVFile.setChecked(False)
self.collapsibleButton_previewClassificationGroups.setChecked(False)
# initialisation of the stackedWidget to display the button "add group"
self.stackedWidget_manageGroup.setCurrentIndex(0)
# spinbox configuration in the tab "Creation of CSV File for Classification Groups"
self.spinBox_group.setMinimum(0)
self.spinBox_group.setMaximum(0)
self.spinBox_group.setValue(0)
# configuration of the table for preview VTK file
self.tableWidget_VTKFiles.setColumnCount(4)
self.tableWidget_VTKFiles.setHorizontalHeaderLabels([' VTK files ', ' Group ', ' Visualization ', 'Color'])
self.tableWidget_VTKFiles.setColumnWidth(0, 200)
horizontalHeader = self.tableWidget_VTKFiles.horizontalHeader()
horizontalHeader.setStretchLastSection(False)
'''horizontalHeader.setResizeMode(0,qt.QHeaderView.Stretch)
horizontalHeader.setResizeMode(1,qt.QHeaderView.ResizeToContents)
horizontalHeader.setResizeMode(2,qt.QHeaderView.ResizeToContents)
horizontalHeader.setResizeMode(3,qt.QHeaderView.ResizeToContents)'''
self.tableWidget_VTKFiles.verticalHeader().setVisible(True)
# TAB: PCA Export
self.pushButton_PCAExport.setEnabled(False)
self.comboBox_SingleExportPC.setEnabled(False)
self.comboBox_SingleExportGroup.setEnabled(False)
self.pushButton_PCACurrentExport.setEnabled(False)
self.checkBox_stdRegular.setChecked(True)
self.checkBox_stdMaxMin.setChecked(False)
self.doubleSpinBox_stdmin.setDisabled(True)
self.doubleSpinBox_stdmax.setDisabled(True)
# --------------------------------------------------------- #
# Connection #
# --------------------------------------------------------- #
# Tab: Creation of CSV File for Classification Groups
self.collapsibleButton_creationCSVFile.connect('clicked()',
lambda: self.onSelectedCollapsibleButtonOpen(self.collapsibleButton_creationCSVFile))
self.spinBox_group.connect('valueChanged(int)', self.onManageGroup)
self.pushButton_addGroup.connect('clicked()', self.onAddGroupForCreationCSVFile)
self.pushButton_removeGroup.connect('clicked()', self.onRemoveGroupForCreationCSVFile)
self.pushButton_modifyGroup.connect('clicked()', self.onModifyGroupForCreationCSVFile)
self.pushButton_exportCSVfile.connect('clicked()', self.onExportForCreationCSVFile)
# Tab: Preview / Update Classification Groups
self.collapsibleButton_previewClassificationGroups.connect('clicked()',
lambda: self.onSelectedCollapsibleButtonOpen(self.collapsibleButton_previewClassificationGroups))
self.pathLineEdit_previewGroups.connect('currentPathChanged(const QString)', self.onSelectPreviewGroups)
self.checkableComboBox_ChoiceOfGroup.connect('checkedIndexesChanged()', self.onCheckableComboBoxValueChanged)
self.pushButton_previewVTKFiles.connect('clicked()', self.onPreviewVTKFiles)
self.pushButton_exportUpdatedClassification.connect('clicked()', self.onExportUpdatedClassificationGroups)
# Tab: Select Input Data
self.collapsibleButton_PCA.connect('clicked()',
lambda: self.onSelectedCollapsibleButtonOpen(self.collapsibleButton_PCA))
slicer.mrmlScene.AddObserver(slicer.mrmlScene.EndCloseEvent, self.onCloseScene)
self.stateCSVMeansShape = False
self.stateCSVDataset = False
# Tab : PCA
self.pathLineEdit_CSVFilePCA.connect('currentPathChanged(const QString)', self.onCSV_PCA)
self.pathLineEdit_exploration.connect('currentPathChanged(const QString)', self.onLoadExploration)
self.pushButton_PCA.connect('clicked()', self.onExportForExploration)
self.pushButton_resetSliders.connect('clicked()', self.onResetSliders)
self.pushButton_saveExploration.connect('clicked()',self.onSaveExploration)
self.pushButton_toggleMean.connect('clicked()',self.onToggleMeanShape)
self.pushButton_evaluateModels.connect('clicked()',self.onEvaluateModels)
self.comboBox_groupPCA.connect('activated(QString)',self.explorePCA)
self.comboBox_colorMode.connect('activated(QString)',self.onColorModeChange)
self.spinBox_maxSlider.connect('valueChanged(int)',self.onUpdateSliderList)
self.spinBox_minVariance.connect('valueChanged(int)',self.onUpdateSliderList)
self.spinBox_colorModeParam1.connect('valueChanged(int)',self.onUpdateColorModeParam)
self.spinBox_colorModeParam2.connect('valueChanged(int)',self.onUpdateColorModeParam)
self.ctkColorPickerButton_groupColor.connect('colorChanged(QColor)',self.onGroupColorChanged)
self.checkBox_useHiddenEigenmodes.connect('stateChanged(int)',self.onEigenCheckBoxChanged)
self.evaluationFlag="DONE"
# Tab : PCA Export
self.pushButton_PCAExport.connect('clicked()', self.onExportForPCAExport)
self.pushButton_PCACurrentExport.connect('clicked()', self.onExportForPCACurrentExport)
self.checkBox_stdMaxMin.connect('clicked()', self.onMinMaxstdCheckBoxChanged)
self.checkBox_stdRegular.connect('clicked()', self.onRegularstdCheckBoxChanged)
def getUI(self, objectName):
""" Functions to recovery the widget in the .ui file
"""
return slicer.util.findChild(self.widget, objectName)
# function called each time that the user "enter" in Diagnostic Index interface
def enter(self):
#TODO
pass
# function called each time that the user "exit" in Diagnostic Index interface
def exit(self):
#TODO
pass
# function called each time that the scene is closed (if Diagnostic Index has been initialized)
def onCloseScene(self, obj, event):
print("onCloseScene")
#self.logic = ShapeVariationAnalyzerLogic(self)
sys.stdout.flush()
self.dictVTKFiles = dict()
self.dictGroups = dict()
self.dictCSVFile = dict()
self.directoryList = list()
self.groupSelected = set()
self.dictShapeModels = dict()
self.patientList = list()
self.dictResults = dict()
self.dictFeatData = dict()
# Tab: New Classification Groups
self.pathLineEdit_previewGroups.setCurrentPath(" ")
self.checkableComboBox_ChoiceOfGroup.setDisabled(True)
self.tableWidget_VTKFiles.clear()
self.tableWidget_VTKFiles.setColumnCount(4)
self.tableWidget_VTKFiles.setHorizontalHeaderLabels([' VTK files ', ' Group ', ' Visualization ', 'Color'])
self.tableWidget_VTKFiles.setColumnWidth(0, 200)
horizontalHeader = self.tableWidget_VTKFiles.horizontalHeader()
horizontalHeader.setStretchLastSection(False)
'''horizontalHeader.setResizeMode(0,qt.QHeaderView.Stretch)
horizontalHeader.setResizeMode(1,qt.QHeaderView.ResizeToContents)
horizontalHeader.setResizeMode(2,qt.QHeaderView.ResizeToContents)
horizontalHeader.setResizeMode(3,qt.QHeaderView.ResizeToContents)'''
self.tableWidget_VTKFiles.verticalHeader().setVisible(False)
self.tableWidget_VTKFiles.setDisabled(True)
self.pushButton_previewVTKFiles.setDisabled(True)
self.pushButton_exportUpdatedClassification.setDisabled(True)
#PCA
self.label_normalLabel_1.hide()
self.label_normalLabel_2.hide()
self.label_normalLabel_3.hide()
self.label_normalLabel_4.hide()
self.label_normalLabel_5.hide()
self.label_normalLabel_6.hide()
self.label_normalLabel_7.hide()
self.deletePCASliders()
self.comboBox_groupPCA.hide()
self.comboBox_colorMode.hide()
self.ctkColorPickerButton_groupColor.hide()
self.pushButton_resetSliders.hide()
self.label_valueExploration.hide()
self.label_groupExploration.hide()
self.label_varianceExploration.hide()
self.pushButton_saveExploration.hide()
self.pushButton_toggleMean.hide()
self.pushButton_evaluateModels.hide()
self.spinBox_minVariance.hide()
self.spinBox_maxSlider.hide()
self.label_minVariance.hide()
self.label_maxSlider.hide()
self.spinBox_colorModeParam1.hide()
self.spinBox_colorModeParam2.hide()
self.label_colorMode.hide()
self.label_colorModeParam1.hide()
self.label_colorModeParam2.hide()
self.label_numberShape.hide()
self.spinBox_numberShape.hide()
self.checkBox_useHiddenEigenmodes.hide()
self.checkBox_useHiddenEigenmodes.setChecked(True)
self.pushButton_PCA.setEnabled(False)
self.pathLineEdit_CSVFilePCA.disconnect('currentPathChanged(const QString)', self.onCSV_PCA)
self.pathLineEdit_CSVFilePCA.setCurrentPath(" ")
self.pathLineEdit_CSVFilePCA.connect('currentPathChanged(const QString)', self.onCSV_PCA)
self.pathLineEdit_exploration.setCurrentPath(" ")
if self.evaluationFlag!="DONE":
self.onKillEvaluation()
try:
self.pushButton_evaluateModels.clicked.disconnect()
except:
pass
self.pushButton_evaluateModels.setText("Evaluate models (It may take a long time)")
self.pushButton_evaluateModels.connect('clicked()',self.onEvaluateModels)
self.spinBox_minVariance.setValue(2)
# Enable/disable
self.pushButton_exportUpdatedClassification.setDisabled(True)
self.checkableComboBox_ChoiceOfGroup.setDisabled(True)
self.tableWidget_VTKFiles.setDisabled(True)
self.pushButton_previewVTKFiles.setDisabled(True)
self.label_statePCA.hide()
self.stateCSVMeansShape = False
self.stateCSVDataset = False
self.collapsibleButton_PCA.setChecked(True)
self.collapsibleButton_creationCSVFile.setChecked(False)
self.collapsibleButton_previewClassificationGroups.setChecked(False)
# initialisation of the stackedWidget to display the button "add group"
self.stackedWidget_manageGroup.setCurrentIndex(0)
# spinbox configuration in the tab "Creation of CSV File for Classification Groups"
self.spinBox_group.setMinimum(0)
self.spinBox_group.setMaximum(0)
self.spinBox_group.setValue(0)
def onSelectedCollapsibleButtonOpen(self, selectedCollapsibleButton):
""" Only one tab can be display at the same time:
When one tab is opened all the other tabs are closed
"""
if selectedCollapsibleButton.isChecked():
collapsibleButtonList = [self.collapsibleButton_creationCSVFile,
self.collapsibleButton_previewClassificationGroups,
self.collapsibleButton_PCA]
for collapsibleButton in collapsibleButtonList:
collapsibleButton.setChecked(False)
selectedCollapsibleButton.setChecked(True)
# ---------------------------------------------------- #
# Tab: Creation of CSV File for Classification Groups #
# ---------------------------------------------------- #
def onManageGroup(self):
""" Function to display the 3 button:
- "Add Group" for a group which hasn't been added yet
- "Remove Group" for the last group added
- "Modify Group" for all the groups added
"""
if self.spinBox_group.maximum == self.spinBox_group.value:
self.stackedWidget_manageGroup.setCurrentIndex(0)
else:
self.stackedWidget_manageGroup.setCurrentIndex(1)
if (self.spinBox_group.maximum - 1) == self.spinBox_group.value:
self.pushButton_removeGroup.show()
else:
self.pushButton_removeGroup.hide()
# Update the path of the directory button
if len(self.directoryList) > 0:
self.directoryButton_creationCSVFile.directory = self.directoryList[self.spinBox_group.value - 1]
def onAddGroupForCreationCSVFile(self):
"""Function to add a group of the dictionary
- Add the paths of all the vtk files found in the directory given
of a dictionary which will be used to create the CSV file
"""
# Error message
directory = self.directoryButton_creationCSVFile.directory
if directory in self.directoryList:
index = self.directoryList.index(directory) + 1
slicer.util.errorDisplay('Path of directory already used for the group ' + str(index))
return
# Add the paths of vtk files of the dictionary
self.logic.addGroupToDictionary(self.dictCSVFile, directory, self.directoryList, self.spinBox_group.value)
condition = self.logic.checkSeveralMeshInDict(self.dictCSVFile)
if not condition:
# Remove the paths of vtk files of the dictionary
self.logic.removeGroupToDictionary(self.dictCSVFile, self.directoryList, self.spinBox_group.value)
return
# Increment of the number of the group in the spinbox
self.spinBox_group.blockSignals(True)
self.spinBox_group.setMaximum(self.spinBox_group.value + 1)
self.spinBox_group.setValue(self.spinBox_group.value + 1)
self.spinBox_group.blockSignals(False)
# Message for the user
slicer.util.delayDisplay("Group Added")
def onRemoveGroupForCreationCSVFile(self):
""" Function to remove a group of the dictionary
- Remove the paths of all the vtk files corresponding to the selected group
of the dictionary which will be used to create the CSV file
"""
# Remove the paths of the vtk files of the dictionary
self.logic.removeGroupToDictionary(self.dictCSVFile, self.directoryList, self.spinBox_group.value)
# Decrement of the number of the group in the spinbox
self.spinBox_group.blockSignals(True)
self.spinBox_group.setMaximum(self.spinBox_group.maximum - 1)
self.spinBox_group.blockSignals(False)
# Change the buttons "remove group" and "modify group" in "add group"
self.stackedWidget_manageGroup.setCurrentIndex(0)
# Message for the user
slicer.util.delayDisplay("Group removed")
def onModifyGroupForCreationCSVFile(self):
""" Function to modify a group of the dictionary:
- Remove of the dictionary the paths of all vtk files corresponding to the selected group
- Add of the dictionary the new paths of all the vtk files
"""
# Error message
directory = self.directoryButton_creationCSVFile.directory
if directory in self.directoryList:
index = self.directoryList.index(directory) + 1
slicer.util.errorDisplay('Path of directory already used for the group ' + str(index))
return
# Remove the paths of vtk files of the dictionary
self.logic.removeGroupToDictionary(self.dictCSVFile, self.directoryList, self.spinBox_group.value)
# Add the paths of vtk files of the dictionary
self.logic.addGroupToDictionary(self.dictCSVFile, directory, self.directoryList, self.spinBox_group.value)
# Message for the user
slicer.util.delayDisplay("Group modified")
def onExportForCreationCSVFile(self):
""" Function to export the CSV file in the directory chosen by the user
- Save the CSV file from the dictionary previously filled
- Load automatically this CSV file in the next tab: "Creation of New Classification Groups"
"""
# Path of the csv file
dlg = ctk.ctkFileDialog()
filepath = dlg.getSaveFileName(None, "Export CSV file for Classification groups", os.path.join(qt.QDir.homePath(), "Desktop"), "CSV File (*.csv)")
directory = os.path.dirname(filepath)
basename = os.path.basename(filepath)
# Save the CSV File
self.logic.creationCSVFile(directory, basename, self.dictCSVFile, "Groups")
# Re-Initialization of the first tab
self.spinBox_group.setMaximum(1)
self.spinBox_group.setValue(1)
self.stackedWidget_manageGroup.setCurrentIndex(0)
self.directoryButton_creationCSVFile.directory = qt.QDir.homePath() + '/Desktop'
# Re-Initialization of:
# - the dictionary containing all the paths of the vtk groups
# - the list containing all the paths of the different directories
self.directoryList = list()
self.dictCSVFile = dict()
# Message in the python console
print("Export CSV File: " + filepath)
sys.stdout.flush()
# Load automatically the CSV file in the pathline in the next tab "Creation of New Classification Groups"
self.pathLineEdit_previewGroups.setCurrentPath(filepath)
self.pathLineEdit_CSVFilePCA.setCurrentPath(filepath)
#self.pathLineEdit_CSVFileDataset.setCurrentPath(filepath)
# ---------------------------------------------------- #
# Tab: Creation of New Classification Groups #
#
# Preview/Update classification Groups
#
# ---------------------------------------------------- #
def onSelectPreviewGroups(self):
""" Function to read the CSV file containing all the vtk
filepaths needed to create the new Classification Groups
"""
# Re-initialization of the dictionary containing all the vtk files
# which will be used to create a new Classification Groups
self.dictVTKFiles = dict()
# Check if the path exists:
if not os.path.exists(self.pathLineEdit_previewGroups.currentPath):
return
# Check if it's a CSV file
condition1 = self.logic.checkExtension(self.pathLineEdit_previewGroups.currentPath, ".csv")
if not condition1:
self.pathLineEdit_previewGroups.setCurrentPath(" ")
return
# Download the CSV file
self.logic.table = self.logic.readCSVFile(self.pathLineEdit_previewGroups.currentPath)
condition2 = self.logic.creationDictVTKFiles(self.dictVTKFiles)
condition3 = self.logic.checkSeveralMeshInDict(self.dictVTKFiles)
# If the file is not conformed:
# Re-initialization of the dictionary containing all the data
# which will be used to create a new Classification Groups
if not (condition2 and condition3):
self.dictVTKFiles = dict()
self.pathLineEdit_previewGroups.setCurrentPath(" ")
return
# Fill the table for the preview of the vtk files in Shape Population Viewer
self.fillTableForPreviewVTKFilesInSPV(self.dictVTKFiles,
self.checkableComboBox_ChoiceOfGroup,
self.tableWidget_VTKFiles)
# Enable/disable buttons
self.checkableComboBox_ChoiceOfGroup.setEnabled(True)
self.tableWidget_VTKFiles.setEnabled(True)
self.pushButton_previewVTKFiles.setEnabled(True)
# self.pushButton_compute.setEnabled(True)
def fillTableForPreviewVTKFilesInSPV(self, dictVTKFiles, checkableComboBox, table):
"""Function to fill the table of the preview of all VTK files
- Checkable combobox: allow the user to select one or several groups that he wants to display in SPV
- Column 0: filename of the vtk file
- Column 1: combobox with the group corresponding to the vtk file
- Column 2: checkbox to allow the user to choose which models will be displayed in SPV
- Column 3: color that the mesh will have in SPV
"""
row = 0
for key, value in dictVTKFiles.items():
# Fill the Checkable Combobox
checkableComboBox.addItem("Group " + str(key))
# Table:
for vtkFile in value:
table.setRowCount(row + 1)
# Column 0:
filename = os.path.basename(vtkFile)
labelVTKFile = qt.QLabel(filename)
labelVTKFile.setAlignment(0x84)
table.setCellWidget(row, 0, labelVTKFile)
# Column 1:
widget = qt.QWidget()
layout = qt.QHBoxLayout(widget)
comboBox = qt.QComboBox()
comboBox.addItems(list(dictVTKFiles.keys()))
comboBox.setCurrentIndex(key)
layout.addWidget(comboBox)
layout.setAlignment(0x84)
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
table.setCellWidget(row, 1, widget)
comboBox.connect('currentIndexChanged(int)', self.onGroupValueChanged)
# Column 2:
widget = qt.QWidget()
layout = qt.QHBoxLayout(widget)
checkBox = qt.QCheckBox()
layout.addWidget(checkBox)
layout.setAlignment(0x84)
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
table.setCellWidget(row, 2, widget)
checkBox.connect('stateChanged(int)', self.onCheckBoxTableValueChanged)
# Column 3:
table.setItem(row, 3, qt.QTableWidgetItem())
table.item(row,3).setBackground(qt.QColor(255,255,255))
row = row + 1
def onCheckableComboBoxValueChanged(self):
""" Function to manage the checkable combobox to allow
the user to choose the group that he wants to preview in SPV
"""
# Update the checkboxes in the qtableWidget of each vtk file
index = self.checkableComboBox_ChoiceOfGroup.currentIndex
for row in range(0,self.tableWidget_VTKFiles.rowCount):
# Recovery of the group of the vtk file contained in the combobox (column 2)
widget = self.tableWidget_VTKFiles.cellWidget(row, 1)
tuple = widget.children()
comboBox = qt.QComboBox()
comboBox = tuple[1]
group = comboBox.currentIndex + 1
if group == (index + 1):
# check the checkBox
widget = self.tableWidget_VTKFiles.cellWidget(row, 2)
tuple = widget.children()
checkBox = tuple[1]
checkBox.blockSignals(True)
item = self.checkableComboBox_ChoiceOfGroup.model().item(index, 0)
if item.checkState():
checkBox.setChecked(True)
self.groupSelected.add(index + 1)
else:
checkBox.setChecked(False)
self.groupSelected.discard(index + 1)
checkBox.blockSignals(False)
# Update the color in the qtableWidget of each vtk file
colorTransferFunction = self.logic.creationColorTransfer(self.groupSelected)
self.updateColorInTableForPreviewInSPV(colorTransferFunction)
def onGroupValueChanged(self):
""" Function to manage the combobox which
allow the user to change the group of a vtk file
"""
# Updade the dictionary which containing the VTK files sorted by groups
self.logic.onComboBoxTableValueChanged(self.dictVTKFiles, self.tableWidget_VTKFiles)
# Update the checkable combobox which display the groups selected to preview them in SPV
self.onCheckBoxTableValueChanged()
# Enable exportation of the last updated csv file
self.pushButton_exportUpdatedClassification.setEnabled(True)
# Default path to override the previous one
# self.directoryButton_exportUpdatedClassification.directory = os.path.dirname(self.pathLineEdit_previewGroups.currentPath)
def onCheckBoxTableValueChanged(self):
""" Function to manage the checkbox in
the table used to make a preview in SPV
"""
self.groupSelected = set()
# Update the checkable comboBox which allow to select what groups the user wants to display in SPV
self.checkableComboBox_ChoiceOfGroup.blockSignals(True)
allcheck = True
for key, value in self.dictVTKFiles.items():
item = self.checkableComboBox_ChoiceOfGroup.model().item(key, 0)
if not value == []:
for vtkFile in value:
filename = os.path.basename(vtkFile)
for row in range(0,self.tableWidget_VTKFiles.rowCount):
qlabel = self.tableWidget_VTKFiles.cellWidget(row, 0)
if qlabel.text == filename:
widget = self.tableWidget_VTKFiles.cellWidget(row, 2)
tuple = widget.children()
checkBox = tuple[1]
if not checkBox.checkState():
allcheck = False
item.setCheckState(0)
else:
self.groupSelected.add(key)
if allcheck:
item.setCheckState(2)
else:
item.setCheckState(0)
allcheck = True
self.checkableComboBox_ChoiceOfGroup.blockSignals(False)
# Update the color in the qtableWidget which will display in SPV
colorTransferFunction = self.logic.creationColorTransfer(self.groupSelected)
self.updateColorInTableForPreviewInSPV(colorTransferFunction)
def updateColorInTableForPreviewInSPV(self, colorTransferFunction):
""" Function to update the colors that the selected
vtk files will have in Shape Population Viewer
"""
for row in range(0,self.tableWidget_VTKFiles.rowCount):
# Recovery of the group display in the table for each vtk file
widget = self.tableWidget_VTKFiles.cellWidget(row, 1)
tuple = widget.children()
comboBox = qt.QComboBox()
comboBox = tuple[1]
group = comboBox.currentIndex + 1
# Recovery of the checkbox for each vtk file
widget = self.tableWidget_VTKFiles.cellWidget(row, 2)
tuple = widget.children()
checkBox = qt.QCheckBox()
checkBox = tuple[1]
# If the checkbox is check, the color is found thanks to the color transfer function
# Else the color is put at white
if checkBox.isChecked():
rgb = colorTransferFunction.GetColor(group)
widget = self.tableWidget_VTKFiles.cellWidget(row, 3)
self.tableWidget_VTKFiles.item(row,3).setBackground(qt.QColor(rgb[0]*255,rgb[1]*255,rgb[2]*255))
else:
self.tableWidget_VTKFiles.item(row,3).setBackground(qt.QColor(255,255,255))
def onPreviewVTKFiles(self):
""" Function to display the selected vtk files in Shape Population Viewer
- Add a color map "DisplayClassificationGroup"
- Launch the CLI ShapePopulationViewer
"""
if os.path.exists(self.pathLineEdit_previewGroups.currentPath):
# Creation of a color map to visualize each group with a different color in ShapePopulationViewer
self.logic.addColorMap(self.tableWidget_VTKFiles, self.dictVTKFiles)
# Creation of a CSV file to load the vtk files in ShapePopulationViewer
filePathCSV = slicer.app.temporaryPath + '/' + 'VTKFilesPreview_OAIndex.csv'
self.logic.creationCSVFileForSPV(filePathCSV, self.tableWidget_VTKFiles, self.dictVTKFiles)
slicer.modules.shapepopulationviewer.widgetRepresentation().loadCSVFile(filePathCSV)
slicer.util.selectModule(slicer.modules.shapepopulationviewer)
# Remove the vtk files previously created in the temporary directory of Slicer
for value in self.dictVTKFiles.values():
self.logic.removeDataVTKFiles(value)
def onExportUpdatedClassificationGroups(self):
""" Function to export the new Classification Groups
- Data saved:
- Save the mean vtk files in the selected directory
- Save the CSV file in the selected directory
- Load automatically the CSV file in the next tab: "Selection of Classification Groups"
"""
dlg = ctk.ctkFileDialog()
filepath = dlg.getSaveFileName(None, "Export Updated CSV file", "", "CSV File (*.csv)")
directory = os.path.dirname(filepath)
basename = os.path.basename(filepath)
# Save the CSV File and the shape model of each group
self.logic.creationCSVFile(directory, basename, self.dictVTKFiles, "Groups")
# Re-Initialization of the dictionary containing the path of the shape model of each group
# self.dictVTKFiles = dict()
# Message for the user
slicer.util.delayDisplay("Files Saved")
# Disable the option to export the new data
self.pushButton_exportUpdatedClassification.setDisabled(True)
# Load automatically the CSV file in the pathline in the next tab "Selection of Classification Groups"
if self.pathLineEdit_selectionClassificationGroups.currentPath == filepath:
self.pathLineEdit_selectionClassificationGroups.setCurrentPath(" ")
self.pathLineEdit_selectionClassificationGroups.setCurrentPath(filepath)
#self.pathLineEdit_CSVFileDataset.setCurrentPath(filepath)
# ---------------------------------------------------- #
# Tab: PCA Analysis #
# ---------------------------------------------------- #
def onCSV_PCA(self):
try:
self.logic.pca_exploration.loadCSVFile(self.pathLineEdit_CSVFilePCA.currentPath)
self.pathLineEdit_exploration.setCurrentPath(" ")
except shapca.CSVFileError as e:
print('CSVFileError:'+e.value)
slicer.util.errorDisplay('Invalid CSV file')
return
self.pushButton_PCA.setEnabled(True)
def onExportForExploration(self):
self.logic.pca_exploration.process()
self.comboBox_groupPCA.setEnabled(True)
self.comboBox_groupPCA.clear()
# Activate the PCA Single Export Widgets
self.comboBox_SingleExportGroup.setEnabled(True)
self.comboBox_SingleExportGroup.clear()
self.comboBox_SingleExportPC.setEnabled(True)
self.comboBox_SingleExportPC.clear()
# Add personalized groups to comboboxes with the CSV
dictPCA=self.logic.pca_exploration.getDictPCA()
for key, value in dictPCA.items():
group_name = value["group_name"]
if key != "All":
self.comboBox_groupPCA.addItem(str(key)+': '+group_name)
self.comboBox_SingleExportGroup.addItem(str(key)+': '+group_name)
else:
self.comboBox_groupPCA.addItem(key)
self.comboBox_SingleExportGroup.addItem(key)
self.setColorModeSpinBox()
self.showmean=False
self.generate3DVisualisationNodes()
self.generate2DVisualisationNodes()
index = self.comboBox_colorMode.findText('Group color', qt.Qt.MatchFixedString)
if index >= 0:
self.comboBox_colorMode.setCurrentIndex(index)
self.pathLineEdit_exploration.disconnect('currentPathChanged(const QString)', self.onLoadExploration)
self.pathLineEdit_exploration.setCurrentPath(' ')
self.pathLineEdit_exploration.connect('currentPathChanged(const QString)', self.onLoadExploration)
if self.evaluationFlag=="DONE":
try:
self.pushButton_evaluateModels.clicked.disconnect()
except:
pass
self.pushButton_evaluateModels.setText("Evaluate models (It may take a long time)")
self.pushButton_evaluateModels.connect('clicked()',self.onEvaluateModels)
self.explorePCA()
def onResetSliders(self):
self.logic.pca_exploration.resetPCAPolyData()
#self.polyDataPCA.Modified()
for slider in self.PCA_sliders:
slider.setSliderPosition(0)
def onChangePCAPolyData(self, num_slider):
ratio = self.PCA_sliders[num_slider].value
X=1-(((ratio/1000.0)+1)/2.0)
self.PCA_sliders_value_label[num_slider].setText(str(round(stats.norm.isf(X),3)))
self.logic.pca_exploration.updatePolyDataExploration(num_slider,ratio/1000.0)
#self.polyDataPCA.Modified()
def onLoadExploration(self):
JSONfile=self.pathLineEdit_exploration.currentPath
# Check if the path exists:
if not os.path.exists(JSONfile):
return
try:
self.logic.pca_exploration.load(JSONfile)
self.pathLineEdit_CSVFilePCA.disconnect('currentPathChanged(const QString)', self.onCSV_PCA)
self.pathLineEdit_CSVFilePCA.setCurrentPath(" ")
self.pathLineEdit_CSVFilePCA.connect('currentPathChanged(const QString)', self.onCSV_PCA)
except shapca.JSONFileError as e:
print('JSONFileError:'+e.value)
slicer.util.errorDisplay('Invalid JSON file')
return
self.comboBox_groupPCA.setEnabled(True)
self.comboBox_groupPCA.clear()
dictPCA=self.logic.pca_exploration.getDictPCA()
for key, value in dictPCA.items():
group_name = value["group_name"]
if key != "All":
self.comboBox_groupPCA.addItem(str(key)+': '+group_name)
else:
self.comboBox_groupPCA.addItem(key)
self.setColorModeSpinBox()
self.showmean=False
self.generate3DVisualisationNodes()
self.generate2DVisualisationNodes()
index = self.comboBox_colorMode.findText('Group color', qt.Qt.MatchFixedString)
if index >= 0:
self.comboBox_colorMode.setCurrentIndex(index)
#slicer.mrmlScene.RemoveAllDefaultNodes()
if self.evaluationFlag=="DONE":
try:
self.pushButton_evaluateModels.clicked.disconnect()
except:
pass
self.pushButton_evaluateModels.setText("Evaluate models (It may take a long time)")
self.pushButton_evaluateModels.connect('clicked()',self.onEvaluateModels)
self.explorePCA()
def onGroupColorChanged(self,newcolor):
#change the plot color
plotSeriesNode = slicer.mrmlScene.GetFirstNodeByName("PCA projection")
plotSeriesNode.SetColor(newcolor.red()/255.0,newcolor.green()/255.0,newcolor.blue()/255.0)
newcolor=(newcolor.red()/255.0,newcolor.green()/255.0,newcolor.blue()/255.0)
self.logic.pca_exploration.changeCurrentGroupColor(newcolor)
r,g,b=self.logic.pca_exploration.getColor()
displayNode = slicer.mrmlScene.GetFirstNodeByName("PCA Display")
displayNode.SetColor(r,g,b)
displayNode.Modified()
slicer.mrmlScene.GetFirstNodeByName("PCA Exploration").Modified()
#self.polyDataPCA.Modified()
def onSaveExploration(self):
dlg = ctk.ctkFileDialog()
JSONpath = dlg.getSaveFileName(None, "Export CSV file for Classification groups", os.path.join(qt.QDir.homePath(), "Desktop"), "JSON File (*.json)")
if JSONpath == '' or JSONpath==' ':
return
self.logic.pca_exploration.save(JSONpath)
self.pathLineEdit_exploration.disconnect('currentPathChanged(const QString)', self.onLoadExploration)
self.pathLineEdit_exploration.setCurrentPath(JSONpath)
self.pathLineEdit_exploration.connect('currentPathChanged(const QString)', self.onLoadExploration)
slicer.util.delayDisplay("Exploration saved")
def onToggleMeanShape(self):
if self.showmean==False:
self.showmean=True
self.setMeanShapeVisibility()
else :
self.showmean=False
self.setMeanShapeVisibility()
def onUpdateSliderList(self):
"""
Update the sliders and the 3D Visualization when the user
changes the Minimum Explained Variance
"""
self.spinBox_maxSlider.value
self.PCA_sliders
self.PCA_sliders_label
self.PCA_sliders_value_label
# Extract the new number of sliders
min_explained=self.spinBox_minVariance.value/100.0
num_components=self.logic.pca_exploration.getRelativeNumComponent(min_explained)
# Verify if the number of slider is not bigger than the displayable number
if num_components>self.spinBox_maxSlider.value:
num_components=self.spinBox_maxSlider.value
# Change the number of sliders according to num_components
if num_components < len(self.PCA_sliders):
component_to_delete=len(self.PCA_sliders)-num_components
for i in range(component_to_delete):
self.PCA_sliders[i+num_components].deleteLater()
self.PCA_sliders_label[i+num_components].deleteLater()
self.PCA_sliders_value_label[i+num_components].deleteLater()
del self.PCA_sliders[num_components : len(self.PCA_sliders)]
del self.PCA_sliders_label[num_components : len(self.PCA_sliders_label)]
del self.PCA_sliders_value_label[num_components : len(self.PCA_sliders_value_label)]
self.updateVariancePlot(num_components)
# Delete indexes in the Single Export PC combobox
for i in range(component_to_delete,0,-1):
self.comboBox_SingleExportPC.removeItem(i+num_components-1)
if num_components > len(self.PCA_sliders):
old_num_components=len(self.PCA_sliders)
component_to_add=num_components-len(self.PCA_sliders)
for i in range(component_to_add):
self.createAndAddSlider(old_num_components+i)
self.comboBox_SingleExportPC.addItem(old_num_components+i+1)
self.updateVariancePlot(num_components)
self.logic.pca_exploration.setNumberOfVisibleEigenmodes(num_components)
ratio = self.PCA_sliders[0].value
self.logic.pca_exploration.updatePolyDataExploration(0,ratio/1000.0)
def onColorModeChange(self):
if self.comboBox_colorMode.currentText == 'Group color':
self.logic.pca_exploration.setColorMode(0)
self.spinBox_colorModeParam1.hide()
self.spinBox_colorModeParam2.hide()
self.label_colorModeParam1.hide()
self.label_colorModeParam2.hide()
self.logic.disableExplorationScalarView()
elif self.comboBox_colorMode.currentText == 'Unsigned distance to mean shape':
self.logic.pca_exploration.setColorModeParam(self.spinBox_colorModeParam1.value,self.spinBox_colorModeParam2.value)
self.label_colorModeParam1.setText('Maximum Distance')
self.spinBox_colorModeParam1.show()
self.spinBox_colorModeParam2.hide()
self.label_colorModeParam1.show()
self.label_colorModeParam2.hide()
self.logic.pca_exploration.setColorMode(1)
explorationnode=slicer.mrmlScene.GetFirstNodeByName('PCA Exploration')
colornode = slicer.mrmlScene.GetFirstNodeByName('PCA Unsigned Distance Color Table')
if (explorationnode is not None) and (colornode is not None):
max_dist,_=self.logic.pca_exploration.getColorParam()
node = slicer.mrmlScene.GetFirstNodeByName("PCA Display")
node.SetScalarRange(0,max_dist)
explorationnode.GetDisplayNode().SetAndObserveColorNodeID(colornode.GetID())
#explorationnode.SetInterpolate(1)
explorationnode.Modified()
self.logic.enableExplorationScalarView()
elif self.comboBox_colorMode.currentText == 'Signed distance to mean shape':
self.logic.pca_exploration.setColorModeParam(self.spinBox_colorModeParam1.value,self.spinBox_colorModeParam2.value)
self.label_colorModeParam1.setText('Maximum Distance Outside')
self.label_colorModeParam2.setText('Maximum Distance Inside')
self.spinBox_colorModeParam1.show()
self.spinBox_colorModeParam2.show()
self.label_colorModeParam1.show()
self.label_colorModeParam2.show()
self.logic.pca_exploration.setColorMode(2)
explorationnode=slicer.mrmlScene.GetFirstNodeByName('PCA Exploration')
colornode = slicer.mrmlScene.GetFirstNodeByName('PCA Signed Distance Color Table')
if (explorationnode is not None) and (colornode is not None):
max_dist_outside,max_dist_inside=self.logic.pca_exploration.getColorParam()
node = slicer.mrmlScene.GetFirstNodeByName("PCA Display")
node.SetScalarRange(-max_dist_inside,max_dist_outside)
explorationnode.GetDisplayNode().SetAndObserveColorNodeID(colornode.GetID())
#explorationnode.SetInterpolate(1)
explorationnode.Modified()
self.logic.enableExplorationScalarView()
else:
print('Unexpected color mode option')
return
def onUpdateColorModeParam(self):
self.logic.pca_exploration.setColorModeParam(self.spinBox_colorModeParam1.value,self.spinBox_colorModeParam2.value)
self.onColorModeChange()
#self.logic.pca_exploration.generateDistanceColor()
def onDataSelected(self, mrlmlPlotSeriesIds, selectionCol):
for i in range(mrlmlPlotSeriesIds.GetNumberOfValues()):
Id=mrlmlPlotSeriesIds.GetValue(i)
plotserienode = slicer.mrmlScene.GetNodeByID(Id)
if plotserienode.GetName() == "PCA projection":
valueIds=selectionCol.GetItemAsObject(i)
if valueIds.GetNumberOfValues()==1:
Id=valueIds.GetValue(0)
#table=plotserienode.GetTableNode().GetTable()
#pc1=table.GetValue(Id,0).ToDouble()
#pc2=table.GetValue(Id,1).ToDouble()
self.logic.pca_exploration.setCurrentShapeFromId(Id)
self.explorePCA()
else:
Idlist=list()
for i in range(valueIds.GetNumberOfValues()):
Idlist.append(int(valueIds.GetValue(i)))
self.logic.pca_exploration.setCurrentShapeFromIdList(Idlist)
self.explorePCA()
def onCheckEvaluationState(self):
state=self.evaluationThread.GetStatusString()
if state=='Running'or state=='Scheduled':
seconds = time.time()-self.starting_time
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
if h==0 and m==0:
t = "00:%02d" % (s)
elif h==0 :
t = "%02d:%02d" % (m, s)
else:
t = "%d:%02d:%02d" % (h, m, s)
if int(s) ==0:
print("Model evaluation "+self.evaluationThread.GetStatusString()+" "+t)
self.pushButton_evaluateModels.setText("Abort evaluation ("+t+")")
else:
if self.evaluationFlag=="DONE":
return
print('Evaluation done')
self.checkThreadTimer.stop()
if self.evaluationFlag=="JSON" and self.pathLineEdit_exploration.currentPath==self.eval_param["inputJson"]:
self.logic.pca_exploration.reloadJSONFile(self.eval_param["inputJson"])
compactnessPCN,specificityPCN,generalizationPCN=self.generateEvaluationPlots()
self.plotViewNode.SetPlotChartNodeID(compactnessPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(specificityPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(generalizationPCN.GetID())
self.updateEvaluationPlots()
if self.evaluationFlag=="CSV" and self.pathLineEdit_CSVFilePCA.currentPath==self.originalCSV:
self.logic.pca_exploration.reloadJSONFile(self.eval_param["inputJson"])
compactnessPCN,specificityPCN,generalizationPCN=self.generateEvaluationPlots()
self.plotViewNode.SetPlotChartNodeID(compactnessPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(specificityPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(generalizationPCN.GetID())
self.updateEvaluationPlots()
self.evaluationFlag="DONE"
self.pushButton_evaluateModels.disconnect('clicked()',self.onKillEvaluation)
self.pushButton_evaluateModels.connect('clicked()',self.onEvaluateModels)
self.pushButton_evaluateModels.setText("Evaluate models (It may take a long time)")
slicer.util.infoDisplay("Evaluation done.")
def onKillEvaluation(self):
self.pushButton_evaluateModels.clicked.disconnect()
self.pushButton_evaluateModels.connect('clicked()',self.onEvaluateModels)
self.pushButton_evaluateModels.setText("Evaluate models (It may take a long time)")
self.checkThreadTimer.stop()
self.evaluationThread.Cancel()
minutes = int((time.time()-self.starting_time)/60)
print("Model evaluation "+self.evaluationThread.GetStatusString())
print('Evaluation Stopped after '+str(minutes)+' min')
self.evaluationFlag="DONE"
def onEvaluateModels(self):
jsonpath = self.pathLineEdit_exploration.currentPath
csvpath = self.pathLineEdit_CSVFilePCA.currentPath
shapeNumber=self.spinBox_numberShape.value
if os.path.isfile(jsonpath):
self.evaluationFlag="JSON"
self.starting_time=time.time()
self.eval_param = {}
self.eval_param["inputJson"] = jsonpath
self.eval_param["evaluation"] = str(len(self.PCA_sliders))
self.eval_param["shapeNum"] = str(shapeNumber)
else:
self.originalCSV=csvpath
self.evaluationFlag="CSV"
self.starting_time=time.time()
date=time.strftime("%b-%d-%Y-%H:%M:%S", time.gmtime())
temp_dir=os.path.join(slicer.app.temporaryPath,'ShapeVariationAnalyzer_Temp')
#temp_dir=os.path.join('/NIRAL/work/lpzmateo/data/ShapeVariationAnalyzer/','ShapeVariationAnalyzer_Temp')
try:
os.mkdir(temp_dir)
except:
pass
temp_dir=os.path.join(temp_dir,'temp_model_'+date)
os.mkdir(temp_dir)
model_path=os.path.join(temp_dir,'temp.json')
self.logic.pca_exploration.save(model_path)
self.eval_param = {}
self.eval_param["inputJson"] = model_path
self.eval_param["evaluation"] = str(len(self.PCA_sliders))
self.eval_param["shapeNum"] = str(shapeNumber)
moduleSPCA = slicer.modules.shapepca
self.evaluationThread=slicer.cli.run(moduleSPCA, None, self.eval_param, wait_for_completion=False)
self.pushButton_evaluateModels.clicked.disconnect()
self.pushButton_evaluateModels.connect('clicked()',self.onKillEvaluation)
self.pushButton_evaluateModels.setText("Abort evaluation (00:00)")
self.checkThreadTimer=qt.QTimer()
self.checkThreadTimer.connect('timeout()', self.onCheckEvaluationState)
self.checkThreadTimer.start(1000)
return
def onEigenCheckBoxChanged(self):
if self.checkBox_useHiddenEigenmodes.isChecked()==True:
self.logic.pca_exploration.useHiddenModes(True)
else:
self.logic.pca_exploration.useHiddenModes(False)
ratio = self.PCA_sliders[0].value
self.logic.pca_exploration.updatePolyDataExploration(0,ratio/1000.0)
def explorePCA(self):
# Detection of the selected group Id
if self.comboBox_groupPCA.currentText == "All":
keygroup = "All"
else:
keygroup = int(self.comboBox_groupPCA.currentText[0])
# Setting PCA model to use
self.logic.pca_exploration.setCurrentPCAModel(keygroup)
# Get color of the group and set the color picker with this color
r,g,b=self.logic.pca_exploration.getColor()
self.ctkColorPickerButton_groupColor.color=qt.QColor(int(r*255),int(g*255),int(b*255))
# Setting the maximum number of sliders
num_components=self.logic.pca_exploration.getNumComponent()
if self.spinBox_maxSlider.value> num_components:
self.spinBox_maxSlider.setMaximum(num_components)
self.spinBox_maxSlider.setValue(num_components)
else:
self.spinBox_maxSlider.setMaximum(num_components)
# Delete all the previous sliders
self.deletePCASliders()
# Computing the number of sliders to show
min_explained=self.spinBox_minVariance.value/100.0
sliders_number=self.logic.pca_exploration.getRelativeNumComponent(min_explained)
if sliders_number>self.spinBox_maxSlider.value:
sliders_number=self.spinBox_maxSlider.value
# Activate the Export Buttons
self.pushButton_PCAExport.setEnabled(True)
self.pushButton_PCACurrentExport.setEnabled(True)
# Create sliders and add the PC to the combobox for Single Export
for i in range(sliders_number):
self.createAndAddSlider(i)
self.comboBox_SingleExportPC.addItem(i+1)
#Update the plot view
self.updateVariancePlot(sliders_number)
self.updateProjectionPlot()
if self.logic.pca_exploration.evaluationExist():
self.updateEvaluationPlots()
#showing QtWidgets
self.label_normalLabel_1.show()
self.label_normalLabel_2.show()
self.label_normalLabel_3.show()
self.label_normalLabel_4.show()
self.label_normalLabel_5.show()
self.label_normalLabel_6.show()
self.label_normalLabel_7.show()
self.comboBox_groupPCA.show()
self.comboBox_colorMode.show()
self.ctkColorPickerButton_groupColor.show()
self.pushButton_resetSliders.show()
self.label_valueExploration.show()
self.label_groupExploration.show()
self.label_varianceExploration.show()
self.pushButton_saveExploration.show()
self.pushButton_toggleMean.show()
self.pushButton_evaluateModels.show()
self.spinBox_minVariance.show()
self.spinBox_maxSlider.show()
self.label_minVariance.show()
self.label_maxSlider.show()
self.label_colorMode.show()
self.label_numberShape.show()
self.spinBox_numberShape.show()
self.checkBox_useHiddenEigenmodes.show()
def setColorModeSpinBox(self):
data_std=self.logic.pca_exploration.getDataStd()
std=np.max(data_std)
self.spinBox_colorModeParam1.disconnect('valueChanged(int)',self.onUpdateColorModeParam)
self.spinBox_colorModeParam2.disconnect('valueChanged(int)',self.onUpdateColorModeParam)
self.spinBox_colorModeParam1.setValue(4*std)
self.spinBox_colorModeParam2.setValue(4*std)
self.spinBox_colorModeParam1.connect('valueChanged(int)',self.onUpdateColorModeParam)
self.spinBox_colorModeParam2.connect('valueChanged(int)',self.onUpdateColorModeParam)
def deletePCASliders(self):
##delete all object in the grid
for i in range(len(self.PCA_sliders)):
self.PCA_sliders[i].deleteLater()
self.PCA_sliders_label[i].deleteLater()
self.PCA_sliders_value_label[i].deleteLater()
self.PCA_sliders=list()
self.PCA_sliders_label=list()
self.PCA_sliders_value_label=list()
def createAndAddSlider(self,num_slider):
exp_ratio=self.logic.pca_exploration.getExplainedRatio()
#create the slider
slider =qt.QSlider(qt.Qt.Horizontal)
slider.setMaximum(999)
slider.setMinimum(-999)
slider.setTickInterval(1)
position=self.logic.pca_exploration.getCurrentRatio(num_slider)
#print(position)
slider.setSliderPosition(position)
#slider.setLayout(self.gridLayout_PCAsliders)
#create the variance ratio label
label = qt.QLabel()
label.setText(str(num_slider+1)+': '+str(round(exp_ratio[num_slider],5)*100)+'%')
label.setAlignment(qt.Qt.AlignCenter)
#create the value label
X=1-(((position/1000.0)+1)/2.0)
'''if num_slider==4:
print(X)'''
valueLabel = qt.QLabel()
valueLabel.setText(str(round(stats.norm.isf(X),3)))
#slider and label added to lists
self.PCA_sliders.append(slider)
self.PCA_sliders_label.append(label)
self.PCA_sliders_value_label.append(valueLabel)
#Slider and label added to the gridLayout
self.gridLayout_PCAsliders.addWidget(self.PCA_sliders_label[num_slider],num_slider+2,0)
self.gridLayout_PCAsliders.addWidget(self.PCA_sliders[num_slider],num_slider+2,1)
self.gridLayout_PCAsliders.addWidget(self.PCA_sliders_value_label[num_slider],num_slider+2,2)
#Connect
self.PCA_sliders[num_slider].valueChanged.connect(lambda state, x=num_slider: self.onChangePCAPolyData(x))
#Plots
def generate2DVisualisationNodes(self):
#clean previously created nodes
self.delete2DVisualisationNodes()
#generate PlotChartNodes to visualize the variance plot and the Projection plot
variancePCN = self.generateVariancePlot()
projectionPCN = self.generateProjectionPlot()
if self.logic.pca_exploration.evaluationExist():
compactnessPCN,specificityPCN,generalizationPCN = self.generateEvaluationPlots()
# Switch to a layout that contains a plot view to create a plot widget
layoutManager = slicer.app.layoutManager()
layoutManager.setLayout(36)
# Select chart in plot view
plotWidget = layoutManager.plotWidget(0)
self.plotViewNode = plotWidget.mrmlPlotViewNode()
if self.logic.pca_exploration.evaluationExist():
self.plotViewNode.SetPlotChartNodeID(compactnessPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(specificityPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(generalizationPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(projectionPCN.GetID())
self.plotViewNode.SetPlotChartNodeID(variancePCN.GetID())
plotView = plotWidget.plotView()
plotView.dataSelected.disconnect()
plotView.connect("dataSelected(vtkStringArray*, vtkCollection*)", self.onDataSelected)
def delete2DVisualisationNodes(self):
node = slicer.mrmlScene.GetFirstNodeByName("PCA projection plot chart")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA variance plot chart")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA projection")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("Variance (%)")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("Sum variance (%)")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("Level 1%")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("Level 95%")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA projection table")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA variance table")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
self.deleteFunctionPlot("Generalization")
self.deleteFunctionPlot("Specificity")
self.deleteFunctionPlot("Compactness")
def deleteFunctionPlot(self,name_y):
node = slicer.mrmlScene.GetFirstNodeByName("PCA "+name_y+" table")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName(name_y)
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA "+name_y+" plot chart")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
def generateProjectionPlot(self):
projectionTableNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLTableNode","PCA projection table")
table = projectionTableNode.GetTable()
pc1=vtk.vtkFloatArray()
pc2=vtk.vtkFloatArray()
labels = vtk.vtkStringArray()
pc1.SetName("pc1")
pc2.SetName("pc2")
labels.SetName("files")
table.AddColumn(pc1)
table.AddColumn(pc2)
table.AddColumn(labels)
#Projection plot serie
projectionPlotSeries = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", "PCA projection")
projectionPlotSeries.SetAndObserveTableNodeID(projectionTableNode.GetID())
projectionPlotSeries.SetXColumnName("pc1")
projectionPlotSeries.SetYColumnName("pc2")
projectionPlotSeries.SetLabelColumnName("files")
projectionPlotSeries.SetPlotType(slicer.vtkMRMLPlotSeriesNode.PlotTypeScatter)
projectionPlotSeries.SetLineStyle(slicer.vtkMRMLPlotSeriesNode.LineStyleNone)
projectionPlotSeries.SetMarkerStyle(slicer.vtkMRMLPlotSeriesNode.MarkerStyleSquare)
projectionPlotSeries.SetUniqueColor()
# Create projection plot chart node
projectionPlotChartNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotChartNode","PCA projection plot chart")
projectionPlotChartNode.AddAndObservePlotSeriesNodeID(projectionPlotSeries.GetID())
projectionPlotChartNode.SetTitle('Population projection')
projectionPlotChartNode.SetXAxisTitle('pc1')
projectionPlotChartNode.SetYAxisTitle('pc2')
return projectionPlotChartNode
def generateVariancePlot(self):
varianceTableNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLTableNode","PCA variance table")
table = varianceTableNode.GetTable()
x=vtk.vtkFloatArray()
evr=vtk.vtkFloatArray()
sumevr=vtk.vtkFloatArray()
level95=vtk.vtkFloatArray()
level1=vtk.vtkFloatArray()
x.SetName("Component")
evr.SetName("ExplainedVarianceRatio")
sumevr.SetName("SumExplainedVarianceRatio")
level95.SetName("level95%")
level1.SetName("level1%")
table.AddColumn(x)
table.AddColumn(evr)
table.AddColumn(sumevr)
table.AddColumn(level95)
table.AddColumn(level1)
#level1
level1PlotSeries = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", "Level 1%")
level1PlotSeries.SetAndObserveTableNodeID(varianceTableNode.GetID())
level1PlotSeries.SetXColumnName("Component")
level1PlotSeries.SetYColumnName("level1%")
level1PlotSeries.SetPlotType(slicer.vtkMRMLPlotSeriesNode.PlotTypeScatter)
level1PlotSeries.SetMarkerStyle(slicer.vtkMRMLPlotSeriesNode.MarkerStyleNone)
level1PlotSeries.SetUniqueColor()
#level95
level95PlotSeries = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", "Level 95%")
level95PlotSeries.SetAndObserveTableNodeID(varianceTableNode.GetID())
level95PlotSeries.SetXColumnName("Component")
level95PlotSeries.SetYColumnName("level95%")
level95PlotSeries.SetPlotType(slicer.vtkMRMLPlotSeriesNode.PlotTypeScatter)
level95PlotSeries.SetMarkerStyle(slicer.vtkMRMLPlotSeriesNode.MarkerStyleNone)
level95PlotSeries.SetUniqueColor()
#Sum Explained Variance plot serie
sumevrPlotSeries = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", "Sum variance (%)")
sumevrPlotSeries.SetAndObserveTableNodeID(varianceTableNode.GetID())
sumevrPlotSeries.SetXColumnName("Component")
sumevrPlotSeries.SetYColumnName("SumExplainedVarianceRatio")
sumevrPlotSeries.SetPlotType(slicer.vtkMRMLPlotSeriesNode.PlotTypeScatter)
sumevrPlotSeries.SetUniqueColor()
#Explained Variance plot serie
evrPlotSeries = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", "Variance (%)")
evrPlotSeries.SetAndObserveTableNodeID(varianceTableNode.GetID())
evrPlotSeries.SetXColumnName("Component")
evrPlotSeries.SetYColumnName("ExplainedVarianceRatio")
evrPlotSeries.SetPlotType(slicer.vtkMRMLPlotSeriesNode.PlotTypeScatterBar)
evrPlotSeries.SetUniqueColor()
# Create variance plot chart node
plotChartNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotChartNode","PCA variance plot chart")
plotChartNode.AddAndObservePlotSeriesNodeID(evrPlotSeries.GetID())
plotChartNode.AddAndObservePlotSeriesNodeID(sumevrPlotSeries.GetID())
plotChartNode.AddAndObservePlotSeriesNodeID(level95PlotSeries.GetID())
plotChartNode.AddAndObservePlotSeriesNodeID(level1PlotSeries.GetID())
plotChartNode.SetTitle('Explained Variance Ratio')
plotChartNode.SetXAxisTitle('Component')
plotChartNode.SetYAxisTitle('Explained Variance Ratio')
return plotChartNode
def generateEvaluationPlots(self):
#compactness
compactnessPCN=self.generateFunctionPlot("Component","Compactness")
#specificity
specificityPCN=self.generateFunctionPlot("Component","Specificity")
#generalization
generalizationPCN=self.generateFunctionPlot("Component","Generalization")
return compactnessPCN,specificityPCN,generalizationPCN
def generateFunctionPlot(self,name_x,name_y):
self.deleteFunctionPlot(name_y)
TableNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLTableNode","PCA "+name_y+" table")
table = TableNode.GetTable()
x=vtk.vtkFloatArray()
y=vtk.vtkFloatArray()
x.SetName(name_x)
y.SetName(name_y)
table.AddColumn(x)
table.AddColumn(y)
#Sum Explained specificity plot serie
PlotSeries = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", name_y)
PlotSeries.SetAndObserveTableNodeID(TableNode.GetID())
PlotSeries.SetXColumnName(name_x)
PlotSeries.SetYColumnName(name_y)
PlotSeries.SetPlotType(slicer.vtkMRMLPlotSeriesNode.PlotTypeScatter)
PlotSeries.SetUniqueColor()
# Create specificity plot chart node
PCN = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotChartNode","PCA "+name_y+" plot chart")
PCN.AddAndObservePlotSeriesNodeID(PlotSeries.GetID())
PCN.SetTitle(name_y)
PCN.SetXAxisTitle(name_x)
PCN.SetYAxisTitle(name_y)
return PCN
def updateVariancePlot(self,num_components):
varianceTableNode = slicer.mrmlScene.GetFirstNodeByName("PCA variance table")
table = varianceTableNode.GetTable()
table.Initialize()
level95 , level1=self.logic.pca_exploration.getPlotLevel(num_components)
level95.SetName("level95%")
level1.SetName("level1%")
table.AddColumn(level95)
table.AddColumn(level1)
x,evr,sumevr= self.logic.pca_exploration.getPCAVarianceExplainedRatio(num_components)
x.SetName("Component")
evr.SetName("ExplainedVarianceRatio")
sumevr.SetName("SumExplainedVarianceRatio")
table.AddColumn(x)
table.AddColumn(evr)
table.AddColumn(sumevr)
def updateProjectionPlot(self):
projectionTableNode = slicer.mrmlScene.GetFirstNodeByName("PCA projection table")
table = projectionTableNode.GetTable()
table.Initialize()
pc1,pc2=self.logic.pca_exploration.getPCAProjections()
labels = self.logic.pca_exploration.getPCAProjectionLabels()
pc1.SetName("pc1")
pc2.SetName("pc2")
labels.SetName("files")
table.AddColumn(pc1)
table.AddColumn(pc2)
table.AddColumn(labels)
#update color
plotSeriesNode = slicer.mrmlScene.GetFirstNodeByName("PCA projection")
r, g, b = self.logic.pca_exploration.getColor()
plotSeriesNode.SetColor(r, g, b)
#fit to contents
layoutManager = slicer.app.layoutManager()
plotWidget = layoutManager.plotWidget(0)
plotWidget.plotController().fitPlotToAxes()
def updateEvaluationPlots(self):
#compactness
x,compac,compac_err=self.logic.pca_exploration.getCompactness()
self.updateFunctionPlot("Component","Compactness",x,compac)
#specificity
x,spec,spec_err=self.logic.pca_exploration.getSpecificity()
self.updateFunctionPlot("Component","Specificity",x,spec)
#generalization
x,gene,gene_err=self.logic.pca_exploration.getGeneralization()
self.updateFunctionPlot("Component","Generalization",x,gene)
def updateFunctionPlot(self,name_x,name_y,x,y):
TableNode = slicer.mrmlScene.GetFirstNodeByName("PCA "+name_y+" table")
table = TableNode.GetTable()
table.Initialize()
x.SetName(name_x)
y.SetName(name_y)
table.AddColumn(x)
table.AddColumn(y)
#polydata
def generate3DVisualisationNodes(self):
self.delete3DVisualisationNodes()
##For Mean shape
#clear scene from previous PCA exploration
#create Model Node
PCANode = slicer.vtkMRMLModelNode()
PCANode.SetAndObservePolyData(self.logic.pca_exploration.getPolyDataMean())
PCANode.SetName("PCA Mean")
#create display node
modelDisplay = slicer.vtkMRMLModelDisplayNode()
modelDisplay.SetColor(0.5,0.5,0.5)
modelDisplay.SetOpacity(0.8)
#modelDisplay.SetBackfaceCulling(0)
modelDisplay.SetScene(slicer.mrmlScene)
modelDisplay.SetName("PCA Mean Display")
modelDisplay.VisibilityOff()
slicer.mrmlScene.AddNode(modelDisplay)
PCANode.SetAndObserveDisplayNodeID(modelDisplay.GetID())
slicer.mrmlScene.AddNode(PCANode)
self.setMeanShapeVisibility()
##For Exploration
#clear scene from previous PCA exploration
#create Model Node
PCANode = slicer.vtkMRMLModelNode()
PCANode.SetAndObservePolyData(self.logic.pca_exploration.getPolyDataExploration())
PCANode.SetName("PCA Exploration")
#create display node
R,G,B=self.logic.pca_exploration.getColor()
modelDisplay = slicer.vtkMRMLModelDisplayNode()
modelDisplay.SetColor(R,G,B)
modelDisplay.SetOpacity(1)
modelDisplay.AutoScalarRangeOff()
#modelDisplay.SetBackfaceCulling(0)
modelDisplay.SetScene(slicer.mrmlScene)
modelDisplay.SetName("PCA Display")
signedcolornode=self.logic.generateSignedDistanceLUT()
unsignedcolornode=self.logic.generateUnsignedDistanceLUT()
signedcolornode.SetName('PCA Signed Distance Color Table')
unsignedcolornode.SetName('PCA Unsigned Distance Color Table')
slicer.mrmlScene.AddNode(signedcolornode)
slicer.mrmlScene.AddNode(unsignedcolornode)
slicer.mrmlScene.AddNode(modelDisplay)
PCANode.SetAndObserveDisplayNodeID(modelDisplay.GetID())
slicer.mrmlScene.AddNode(PCANode)
def delete3DVisualisationNodes(self):
node = slicer.mrmlScene.GetFirstNodeByName("PCA Exploration")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA Display")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA Mean")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName("PCA Mean Display")
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName('PCA Signed Distance Color Table')
if node is not None:
slicer.mrmlScene.RemoveNode(node)
node = slicer.mrmlScene.GetFirstNodeByName('PCA Unsigned Distance Color Table')
if node is not None:
slicer.mrmlScene.RemoveNode(node)
def setMeanShapeVisibility(self):
node = slicer.mrmlScene.GetFirstNodeByName("PCA Mean Display")
if self.showmean==False:
node.VisibilityOff()
else :
node.VisibilityOn()
# ---------------------------------------------------- #
# #
# Tab: PCA Export #
# #
# ---------------------------------------------------- #
def onMinMaxstdCheckBoxChanged(self):
if self.checkBox_stdMaxMin.isChecked()==True:
self.checkBox_stdRegular.setChecked(False)
self.doubleSpinBox_stdRegular.setDisabled(True)
self.doubleSpinBox_stdmin.setDisabled(False)
self.doubleSpinBox_stdmax.setDisabled(False)
else:
self.checkBox_stdMaxMin.setChecked(True)
def onRegularstdCheckBoxChanged(self):
if self.checkBox_stdRegular.isChecked()==True:
self.checkBox_stdMaxMin.setChecked(False)
self.doubleSpinBox_stdRegular.setDisabled(False)
self.doubleSpinBox_stdmin.setDisabled(True)
self.doubleSpinBox_stdmax.setDisabled(True)
else:
self.checkBox_stdRegular.setChecked(True)
def onExportForPCAExport(self):
""" Function to export the CSV file in the directory chosen by the user
- Save the CSV file from the dictionary previously filled
- Load automatically this CSV file in the next tab: "Creation of New Classification Groups"
"""
# Path of the vtk file
#dlg = ctk.ctkFileDialog()
#filepath = dlg.getSaveFileName(None, "Export CSV file for Classification groups", os.path.join(qt.QDir.homePath(), "Desktop"), "CSV File (*.csv)")
# Variables for the file's name
Group = self.comboBox_SingleExportGroup.itemText(self.comboBox_SingleExportGroup.currentIndex)
PC = self.comboBox_SingleExportPC.itemText(self.comboBox_SingleExportPC.currentIndex)
std_regular = self.doubleSpinBox_stdRegular.textFromValue(self.doubleSpinBox_stdRegular.value)
step = self.doubleSpinBox_step.value
# X=1-(((ratio/1000.0)+1)/2.0) So we want the inverse to have ration/1000.0
num_slider = int(PC)-1
position_Slider=self.logic.pca_exploration.getCurrentRatio(num_slider)
self.onResetSliders()
ratio = 1000*((1- stats.norm.sf(float(std_regular)))*2 - 1)
# print("Ratio: " , ratio , " Ancienne position: ", position_Slider )
# Creation of a folder
folder_int = 1
while (os.path.exists(self.DirectoryButton_PCASingleExport.directory + '/PCAMultipleAxisExport_' + str(folder_int) + '/')):
folder_int += 1
Folder = '/PCAMultipleAxisExport_' + str(folder_int) + '/'
# Begining of the filepath
dirpath = self.DirectoryButton_PCASingleExport.directory + Folder
# Creation of the folder for the different deviations
try:
os.mkdir(dirpath)
except OSError:
print ("Creation of the directory %s failed" % dirpath)
else:
print ("Successfully created the directory %s " % dirpath)
# CREATION OF THE VTK FILES
if (self.checkBox_stdRegular.isChecked()):
std_max = float(std_regular)
std_min = -float(std_regular)
else:
std_max = self.doubleSpinBox_stdmax.value
std_min = self.doubleSpinBox_stdmin.value\
# From the mean, we go first to the max and then from the mean to the min (non-symetrical range)
for std_count in np.arange(0.0,std_max,step):
self.logic.exportAxis(dirpath,Group,PC,std_count)
for std_count in np.arange(-step,std_min,-step):
self.logic.exportAxis(dirpath,Group,PC,std_count)
# The two limits are also exported
self.logic.exportAxis(dirpath,Group,PC,std_max)
self.logic.exportAxis(dirpath,Group,PC,std_min)
# The previous visualisation is set back
self.PCA_sliders[num_slider].setSliderPosition(position_Slider)
def onExportForPCACurrentExport(self):
""" Export the current vistualisation of the module in a vtk file """
Group = "0"
PC = "1"
std = "5.9"
file_number = 1
dirpath = self.DirectoryButton_PCASingleExport.directory + "/PCACurrentExport/"
if( os.path.exists(dirpath)==False ):
# Creation of the folder for the current explorations if it's not existing
try:
os.mkdir(dirpath)
except OSError:
print ("Creation of the directory %s failed" % dirpath)
else:
print ("Successfully created the directory %s " % dirpath)
# Display the sign
if (std[0]!='-'):
std = '+' + std
filepath_current = dirpath + "/PCA_Group" + Group[0] + "_MixedComp_" + str(file_number)
#To don't overwrite on the other current exports
exist = False
while (exist == False):
#If the file already exists we just increment the last number
if (os.path.exists(filepath_current + ".vtk")):
filepath_end = len(filepath_current) - len(str(file_number))
file_number += 1
filepath_current = filepath_current[:filepath_end] + str(file_number)
else:
exist = True
filepath_current = filepath_current + ".vtk"
self.logic.pca_exploration.saveVTKFile(self.logic.pca_exploration.getPolyDataExploration(),filepath_current)
# ------------------------------------------------------------------------------------------ #
# #
# #
# LOGIC #
# #
# #
# ------------------------------------------------------------------------------------------ #
class ShapeVariationAnalyzerLogic(ScriptedLoadableModuleLogic):
def __init__(self):
self.table = vtk.vtkTable
self.colorBar = {'Point1': [0, 0, 1, 0], 'Point2': [0.5, 1, 1, 0], 'Point3': [1, 1, 0, 0]}
self.pca_exploration=shapca.pcaExplorer()
def addGroupToDictionary(self, dictCSVFile, directory, directoryList, group):
""" Function to add all the vtk filepaths
found in the given directory of a dictionary
"""
# Fill a dictionary which contains the vtk files for the classification groups sorted by group
valueList = list()
for file in os.listdir(directory):
if file.endswith(".vtk"):
filepath = directory + '/' + file
valueList.append(filepath)
dictCSVFile[group] = valueList
# Add the path of the directory
directoryList.insert((group - 1), directory)
def removeGroupToDictionary(self, dictCSVFile, directoryList, group):
""" Function to remove the group of the dictionary
"""
# Remove the group from the dictionary
dictCSVFile.pop(group, None)
# Remove the path of the directory
directoryList.pop(group - 1)
def checkExtension(self, filename, extension):
""" Check if the path given has the right extension
"""
if os.path.splitext(os.path.basename(filename))[1] == extension :
return True
elif os.path.basename(filename) == "" or os.path.basename(filename) == " " :
return False
slicer.util.errorDisplay('Wrong extension file, a ' + extension + ' file is needed!')
return False
def readCSVFile(self, filename):
""" Function to read a CSV file
"""
print("CSV FilePath: " + filename)
sys.stdout.flush()
CSVreader = vtk.vtkDelimitedTextReader()
CSVreader.SetFieldDelimiterCharacters(",")
CSVreader.SetFileName(filename)
CSVreader.SetHaveHeaders(True)
CSVreader.Update()
return CSVreader.GetOutput()
def creationDictVTKFiles(self, dict):
""" Function to create a dictionary containing all the vtk filepaths sorted by group
- the paths are given by a CSV file
- If one paths doesn't exist
Return False
Else if all the path of all vtk file exist
Return True
"""
for i in range(0, self.table.GetNumberOfRows()):
if not os.path.exists(self.table.GetValue(i,0).ToString()):
slicer.util.errorDisplay('VTK file not found, path not good at lign ' + str(i+2))
return False
value = dict.get(self.table.GetValue(i,1).ToInt(), None)
if value == None:
dict[self.table.GetValue(i,1).ToInt()] = self.table.GetValue(i,0).ToString()
else:
if isinstance(value, list):
value.append(self.table.GetValue(i,0).ToString())
else:
tempList = list()
tempList.append(value)
tempList.append(self.table.GetValue(i,0).ToString())
dict[self.table.GetValue(i,1).ToInt()] = tempList
return True
def addColorMap(self, table, dictVTKFiles):
""" Function to add a color map "DisplayClassificationGroup"
to all the vtk files which allow the user to visualize each
group with a different color in ShapePopulationViewer
"""
for key, value in dictVTKFiles.items():
for vtkFile in value:
# Read VTK File
reader = vtk.vtkDataSetReader()
reader.SetFileName(vtkFile)
reader.ReadAllVectorsOn()
reader.ReadAllScalarsOn()
reader.Update()
polyData = reader.GetOutput()
# Copy of the polydata
polyDataCopy = vtk.vtkPolyData()
polyDataCopy.DeepCopy(polyData)
pointData = polyDataCopy.GetPointData()
# Add a New Array "DisplayClassificationGroup" to the polydata copy
# which will have as the value for all the points the group associated of the mesh
numPts = polyDataCopy.GetPoints().GetNumberOfPoints()
arrayName = "DisplayClassificationGroup"
hasArrayInt = pointData.HasArray(arrayName)
if hasArrayInt == 1:
pointData.RemoveArray(arrayName)
arrayToAdd = vtk.vtkDoubleArray()
arrayToAdd.SetName(arrayName)
arrayToAdd.SetNumberOfComponents(1)
arrayToAdd.SetNumberOfTuples(numPts)
for i in range(0, numPts):
arrayToAdd.InsertTuple1(i, key)
pointData.AddArray(arrayToAdd)
# Save in the temporary directory in Slicer the vtk file with the new array
# to visualize them in Shape Population Viewer
writer = vtk.vtkPolyDataWriter()
filepath = slicer.app.temporaryPath + '/' + os.path.basename(vtkFile)
writer.SetFileName(filepath)
if vtk.VTK_MAJOR_VERSION <= 5:
writer.SetInput(polyDataCopy)
else:
writer.SetInputData(polyDataCopy)
writer.Update()
writer.Write()
def creationCSVFileForSPV(self, filename, table, dictVTKFiles):
""" Function to create a CSV file containing all the
selected vtk files that the user wants to display in SPV
"""
# Creation a CSV file with a header 'VTK Files'
file = open(filename, 'w')
cw = csv.writer(file, delimiter=',')
cw.writerow(['VTK Files'])
# Add the path of the vtk files if the users selected it
for row in range(0,table.rowCount):
# check the checkBox
widget = table.cellWidget(row, 2)
tuple = widget.children()
checkBox = qt.QCheckBox()
checkBox = tuple[1]
if checkBox.isChecked():
# Recovery of group fo each vtk file
widget = table.cellWidget(row, 1)
tuple = widget.children()
comboBox = qt.QComboBox()
comboBox = tuple[1]
group = comboBox.currentIndex + 1
# Recovery of the vtk filename
qlabel = table.cellWidget(row, 0)
vtkFile = qlabel.text
pathVTKFile = slicer.app.temporaryPath + '/' + vtkFile
cw.writerow([pathVTKFile])
file.close()
def fillTableForPreviewVTKFilesInSPV(self, dictVTKFiles, checkableComboBox, table):
"""Function to fill the table of the preview of all VTK files
- Checkable combobox: allow the user to select one or several groups that he wants to display in SPV
- Column 0: filename of the vtk file
- Column 1: combobox with the group corresponding to the vtk file
- Column 2: checkbox to allow the user to choose which models will be displayed in SPV
- Column 3: color that the mesh will have in SPV
"""
row = 0
for key, value in dictVTKFiles.items():
# Fill the Checkable Combobox
checkableComboBox.addItem("Group " + str(key))
# Table:
for vtkFile in value:
table.setRowCount(row + 1)
# Column 0:
filename = os.path.basename(vtkFile)
labelVTKFile = qt.QLabel(filename)
labelVTKFile.setAlignment(0x84)
table.setCellWidget(row, 0, labelVTKFile)
# Column 1:
widget = qt.QWidget()
layout = qt.QHBoxLayout(widget)
comboBox = qt.QComboBox()
comboBox.addItems(list(dictVTKFiles.keys()))
comboBox.setCurrentIndex(key)
layout.addWidget(comboBox)
layout.setAlignment(0x84)
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
table.setCellWidget(row, 1, widget)
comboBox.connect('currentIndexChanged(int)', self.interface.onGroupValueChanged)
# Column 2:
widget = qt.QWidget()
layout = qt.QHBoxLayout(widget)
checkBox = qt.QCheckBox()
layout.addWidget(checkBox)
layout.setAlignment(0x84)
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
table.setCellWidget(row, 2, widget)
checkBox.connect('stateChanged(int)', self.interface.onCheckBoxTableValueChanged)
# Column 3:
table.setItem(row, 3, qt.QTableWidgetItem())
table.item(row,3).setBackground(qt.QColor(255,255,255))
row = row + 1
def onComboBoxTableValueChanged(self, dictVTKFiles, table):
""" Function to change the group of a vtk file
- The user can change the group thanks to the combobox in the table used for the preview in SPV
"""
# For each row of the table
for row in range(0,table.rowCount):
# Recovery of the group associated to the vtk file which is in the combobox
widget = table.cellWidget(row, 1)
tuple = widget.children()
comboBox = qt.QComboBox()
comboBox = tuple[1]
group = comboBox.currentIndex
# Recovery of the filename of vtk file
qlabel = table.cellWidget(row, 0)
vtkFile = qlabel.text
# Update the dictionary if the vtk file has not the same group in the combobox than in the dictionary
value = dictVTKFiles.get(group, None)
if not any(vtkFile in s for s in value):
# Find which list of the dictionary the vtk file is in
for value in dictVTKFiles.values():
if any(vtkFile in s for s in value):
pathList = [s for s in value if vtkFile in s]
path = pathList[0]
# Remove the vtk file from the wrong group
value.remove(path)
# Add the vtk file in the right group
newvalue = dictVTKFiles.get(group, None)
newvalue.append(path)
break
def creationColorTransfer(self, groupSelected):
""" Function to create the same color transfer function than there is in SPV
"""
# Creation of the color transfer function with the updated range
colorTransferFunction = vtk.vtkColorTransferFunction()
if len(groupSelected) > 0:
groupSelectedList = list(groupSelected)
rangeColorTransfer = [groupSelectedList[0], groupSelectedList[len(groupSelectedList) - 1]]
colorTransferFunction.AdjustRange(rangeColorTransfer)
for key, value in self.colorBar.items():
# postion on the current arrow
x = (groupSelectedList[len(groupSelectedList) - 1] - groupSelectedList[0]) * value[0] + groupSelectedList[0]
# color of the current arrow
r = value[1]
g = value[2]
b = value[3]
colorTransferFunction.AddRGBPoint(x,r,g,b)
return colorTransferFunction
def removeDataVTKFiles(self, value):
""" Function to remove in the temporary directory all
the data used to create the mean for each group
"""
# remove of all the vtk file
for vtkFile in value:
filepath = slicer.app.temporaryPath + '/' + os.path.basename(vtkFile)
if os.path.exists(filepath):
os.remove(filepath)
def creationCSVFile(self, directory, CSVbasename, dictForCSV, option):
""" Function to create a CSV file:
- Two columns are always created:
- First column: path of the vtk files
- Second column: group associated to this vtk file
- If saveH5 is True, this CSV file will contain a New Classification Group, a thrid column is then added
- Third column: path of the shape model of each group
"""
CSVFilePath = str(directory) + "/" + CSVbasename
file = open(CSVFilePath, 'w')
cw = csv.writer(file, delimiter=',')
if option == "Groups":
cw.writerow(['VTK Files', 'Group'])
elif option == "MeanGroup":
cw.writerow(['Mean shapes VTK Files', 'Group'])
for key, value in dictForCSV.items():
if isinstance(value, list):
for vtkFile in value:
if option == "Groups":
cw.writerow([vtkFile, str(key)])
elif option == "MeanGroup":
cw.writerow([value, str(key)])
elif option == "NCG":
cw.writerow([value, str(key)])
file.close()
def checkSeveralMeshInDict(self, dict):
""" Function to check if in each group
there is at least more than one mesh
"""
for key, value in dict.items():
if type(value) is not type(list()) or len(value) == 1:
msg='The group ' + str(key) + ' must contain more than one mesh.'
raise CSVFileError(msg)
return False
return True
#################
# PCA ALGORITHM #
#################
def disableExplorationScalarView(self):
model1=slicer.mrmlScene.GetFirstNodeByName('PCA Exploration')
if model1 is not None:
model1.GetDisplayNode().SetScalarVisibility(0)
#model1.GetDisplayNode().SetScalarVisibility(1)
model1.Modified()
def enableExplorationScalarView(self):
exploration_node=slicer.mrmlScene.GetFirstNodeByName('PCA Exploration')
exploration_node.GetDisplayNode().SetActiveScalarName('Distance')
exploration_node.GetDisplayNode().SetScalarVisibility(1)
exploration_node.Modified()
def generateUnsignedDistanceLUT(self):
number_of_color=255
colorTableNode = slicer.vtkMRMLColorTableNode()
colorTableNode.SetName('PCA Unsigned Distance Color Table')
colorTableNode.SetTypeToUser()
colorTableNode.HideFromEditorsOff()
colorTableNode.SaveWithSceneOff()
colorTableNode.SetNumberOfColors(number_of_color)
colorTableNode.GetLookupTable().SetTableRange(0,number_of_color-1)
c=1
for i in range(number_of_color):
colorTableNode.AddColor(str(i), 1, c, c, 1)
c = c- 1.0/number_of_color
return colorTableNode
def generateSignedDistanceLUT(self):
number_of_color=255
colorTableNode = slicer.vtkMRMLColorTableNode()
colorTableNode.SetName('PCA Signed Distance Color Table')
colorTableNode.SetTypeToUser()
colorTableNode.HideFromEditorsOff()
colorTableNode.SaveWithSceneOff()
colorTableNode.SetNumberOfColors(number_of_color)
colorTableNode.GetLookupTable().SetTableRange(0,number_of_color-1)
blueshade=0
redshade=1
for i in range(number_of_color):
if blueshade <= 1:
colorTableNode.AddColor(str(i), blueshade, blueshade,1 , 1)
#print(str(i), blueshade, blueshade,1 , 1)
blueshade = blueshade+ 2.0/number_of_color
else:
colorTableNode.AddColor(str(i), 1, redshade, redshade, 1)
#print(str(i), 1, redshade, redshade, 1)
redshade = redshade- 2.0/number_of_color
return colorTableNode
def exportAxis(self,dirpath,Group,PC,std_count):
num_slider = int(PC)-1
filepath = dirpath + "PCA_Group" + Group[0] + "_Comp" + PC + "_std" + str(round(std_count,2)) + ".vtk"
ratio = 1000*((1- stats.norm.sf(std_count))*2 - 1)
self.pca_exploration.updatePolyDataExploration(num_slider,ratio/1000.0)
self.pca_exploration.saveVTKFile(self.pca_exploration.getPolyDataExploration(),filepath)
class ShapeVariationAnalyzerTest(ScriptedLoadableModuleTest):
"""
This is the test case for your scripted module.
Uses ScriptedLoadableModuleTest base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setUp(self):
pass
def runTest(self):
self.setUp()
self.delayDisplay('Starting the tests')
self.test_ShapeVariationAnalyzer()
def test_ShapeVariationAnalyzer(self):
self.logic = ShapeVariationAnalyzerLogic()
filepath_in = "./hippo.csv"
# Test of all the groups
keygroup = "All"
try:
self.logic.pca_exploration.loadCSVFile(filepath_in)
self.logic.pca_exploration.process()
# Add personalized groups to comboboxes with the CSV
dictPCA = self.logic.pca_exploration.getDictPCA()
# Setting PCA model to use
self.logic.pca_exploration.setCurrentPCAModel(keygroup)
except shapca.CSVFileError as e:
print('CSVFileError:'+e.value)
slicer.util.errorDisplay('Invalid CSV file')
try:
exp_ratio=self.logic.pca_exploration.getExplainedRatio()
error_bool = False
# Values for the hippo PCA exploration
comparison = [38.075,9.688,6.970,5.525,4.338,3.643,2.835,2.487]
for num_slider in range(8):
#print ( str(comparison[num_slider]) + " compare to " + str(round(exp_ratio[num_slider]*100,3)) )
if ( comparison[num_slider] != round(exp_ratio[num_slider]*100,3) ):
error_bool = True
if (error_bool == True):
print( 'Exploration Error: The PCA results are wrong.')
else:
print( "The PCA exploration is right.")
except:
print( 'Exploration failed' )
slicer.util.errorDisplay('Exploration failed')
filepath_out = "./test.vtk"
#Export Curent visualisation
self.logic.pca_exploration.saveVTKFile(self.logic.pca_exploration.getPolyDataExploration(),filepath_out)
if (os.path.exists(filepath_out)):
print(filepath_out + " created.")
else:
print('CSVExportError: '+filepath_out + " not created.")
slicer.util.errorDisplay('Target not created')
os.remove(filepath_out)
if (os.path.exists(filepath_out) == False):
print(filepath_out + " deleted.")
else:
print('CSVExportError: '+filepath_out + " not deleted.")
slicer.util.errorDisplay('Target not deleted')
|
pdedumast/ShapeVariationAnalyzer
|
ShapeVariationAnalyzer/ShapeVariationAnalyzer.py
|
Python
|
apache-2.0
| 106,269
|
[
"VTK"
] |
ef91f26cb93729162386a28b8527d3f98990fb6d416bfed481e75c3f6c6bb979
|
#!/usr/bin/env python
## Reads and manipulates Ed's galaxy property catalogs
## Requires pyfits, astLib
## sphe spehmakh@gmail.com
# import python essentials
import os
import sys
import time
import numpy as np
import math
import tempfile
import subprocess
# non-standard packages
import pyfits
from astLib.astWCS import WCS
import Tigger
# some useful constants
PI = math.pi
FWHM = math.sqrt(math.log(256))
MAP = {'Galaxy':'ID',
'ALOG10(MHI)':'mhi',
'lwidth':'lwidth',
'incl':'incl',
'z_obs':'z_obs',
'dist [Mpc]':'dist',
'Sint [Jy km/s]':'int_flux',
'RA-DEC pixel position in master cube':'radec_pix',
'Scaled cubelet dimensions':'scd',
'dV [km/s]':'dV',
'chan1, chan2':'chans',
}
def mysplit(string,delimiter=None):
if delimiter:
return [item.strip() for item in string.split(delimiter)]
else:
return [item.strip() for item in string.split()]
# Communication functions
def info(string):
t = "%d/%d/%d %d:%d:%d"%(time.localtime()[:6])
print "%s ##INFO: %s"%(t,string)
def warn(string):
t = "%d/%d/%d %d:%d:%d"%(time.localtime()[:6])
print "%s ##WARNING: %s"%(t,string)
def abort(string):
t = "%d/%d/%d %d:%d:%d"%(time.localtime()[:6])
raise SystemExit("%s ##ABORTING: %s"%(t,string))
# run things on the command line
def _run(command,options):
cmd = " ".join([command]+options)
info('running: %s'%cmd)
process = subprocess.Popen(cmd,
stderr=subprocess.PIPE if not isinstance(sys.stderr,file) else sys.stderr,
stdout=subprocess.PIPE if not isinstance(sys.stdout,file) else sys.stdout,
shell=True)
if process.stdout or process.stderr:
out,err = process.comunicate()
sys.stdout.write(out)
sys.stderr.write(err)
out = None
else:
process.wait()
if process.returncode:
abort('%s: returns errr code %d'%(command,process.returncode))
class Model(object):
"""
Class that reads and manipulates Ed's galaxy property files
"""
def __init__(self,textname,fitsname,sources=None,ra0=0,dec0=0):
"""
fitsname : Name of FITS cube where galaxies are simulated
"""
if sources is None:
sources = []
self.textname = textname
self.fitsname = fitsname
self.ra0 = ra0
self.dec0 = dec0
self.sources = sources
self.nsrcs = len(sources)
def load(self,textname=None,fitsname=None,append=False):
""" load galaxy properties from text file """
if textname is None:
textname = self.textname
if fitsname is None:
fitsname = self.fitsname
if append:
if isinstance(self.textname,list):
self.textname.append(textname)
else:
self.textname = [self.textname,textname]
sources = self.sources
else:
sources = []
self.fitsname = fitsname
nsrc = 0
info('loading galaxy properties from %s'%textname)
info('Corresponding FITS cube is %s'%fitsname)
std = open(textname)
wcs = WCS(pyfits.open(fitsname)[0].header,mode='pyfits')
nx,ny = pyfits.open(fitsname)[0].data.shape[-2:]
self.ra0,self.dec0 = wcs.pix2wcs(nx/2.,ny/2.)
for line in std.readlines():
if line[0] not in ['\n','#']:
key,val = mysplit(line,':')
if key == 'Galaxy':
nsrc +=1
src = Source()
sources.append(src)
if key.startswith('Scaled'):
val = mysplit(val)
elif key.startswith('chan'):
chan1,chan2 = mysplit(val)
src.addAttribute('chan1',chan1)
src.addAttribute('chan2',chan2)
elif key.startswith('RA-DEC'):
ra_pix,dec_pix = map(float,mysplit(val))
src.addAttribute('ra_pix',ra_pix)
src.addAttribute('dec_pix',dec_pix)
ra_deg,dec_deg = wcs.pix2wcs(ra_pix,dec_pix)
src.addAttribute('ra_deg',ra_deg)
src.addAttribute('dec_deg',dec_deg)
else:
src.addAttribute(MAP[key],val)
self.sources = sources
self.nsrcs = nsrc
info('Loaded %d sources'%nsrc)
return self
def writeto(self,filename,overwrite=False):
if os.path.exists(filename) and overwrite is False:
abort('%s already exists. Set overwrite to True to overwrite')
tf = tempfile.NamedTemporaryFile(suffix='.txt')
tf.write('#format:name ra_d dec_d emaj_s emin_s pa_d i\n')
for i,src in enumerate(self.sources):
tf.write('%d %.8g %.8g 0 0 0 %.4g\n'%(src.ID,src.ra_deg,src.dec_deg,src.int_flux/src.dV))
tf.flush()
options = [tf.name,filename]
if overwrite:
options += ['-f']
_run('tigger-convert',options=options)
tf.close()
# add non-standard attributes as tags
model = Tigger.load(filename)
for src0,src1 in zip(sorted(self.sources,key=lambda src:src.ID),
sorted(model.sources,key=lambda src: int(src.name))):
for attribute in 'mhi dV dist incl z_obs scd chan1 chan2'.split():
src1.setAttribute(attribute,getattr(src0,attribute))
model.save(filename)
# Rename sources according to the COPART
# _run('tigger-convert',[filename,'--rename','-f','--min-extent','0','--cluster-dist','10'])
class Source(object):
def __init__(self):
"""
ID : Galaxy ID
int_flux : Integrated flux (stokes I)
mhi: HI mass
lwidth : Line width
incl : Inclination angle
obs_z : Observed redshift
dist : Distance in Mpc
ra_pix : RA in pixels
dec_pix : DEC in pixels
ra_deg : RA in degrees
dec_deg : DEC in degrees
scd : Scaled cubelet dimension
dV : Velocity
chan1 : Initial channel
chan2 : Fianl channel
"""
self.ID = None
self.int_flux = None
self.mhi = None
self.lwidth = None
self.incl = None
self.obs_z = None
self.dist = None
self.ra_pix = None
self.dec_pix = None
self.ra_deg = None
self.dec_deg = None
self.scd = None
self.dV = None
self.chan1 = None
self.chan2 = None
def addAttribute(self,key,val):
try:
val = float(val)
if val%1 == 0:
val = int(val)
except ValueError:
val = val
setattr(self,key,val)
|
SpheMakh/HI-Inator
|
src/galProps.py
|
Python
|
gpl-2.0
| 6,783
|
[
"Galaxy"
] |
2980ac5a51c183dabe1a4d4350b489a1ad909503797892f8b4dc878504be26d0
|
#!/usr/bin/env python3
#
# BIP39 English wordlist
# Source: https://github.com/bitcoin/bips/blob/master/bip-0039/english.txt
words = tuple("""
abandon
ability
able
about
above
absent
absorb
abstract
absurd
abuse
access
accident
account
accuse
achieve
acid
acoustic
acquire
across
act
action
actor
actress
actual
adapt
add
addict
address
adjust
admit
adult
advance
advice
aerobic
affair
afford
afraid
again
age
agent
agree
ahead
aim
air
airport
aisle
alarm
album
alcohol
alert
alien
all
alley
allow
almost
alone
alpha
already
also
alter
always
amateur
amazing
among
amount
amused
analyst
anchor
ancient
anger
angle
angry
animal
ankle
announce
annual
another
answer
antenna
antique
anxiety
any
apart
apology
appear
apple
approve
april
arch
arctic
area
arena
argue
arm
armed
armor
army
around
arrange
arrest
arrive
arrow
art
artefact
artist
artwork
ask
aspect
assault
asset
assist
assume
asthma
athlete
atom
attack
attend
attitude
attract
auction
audit
august
aunt
author
auto
autumn
average
avocado
avoid
awake
aware
away
awesome
awful
awkward
axis
baby
bachelor
bacon
badge
bag
balance
balcony
ball
bamboo
banana
banner
bar
barely
bargain
barrel
base
basic
basket
battle
beach
bean
beauty
because
become
beef
before
begin
behave
behind
believe
below
belt
bench
benefit
best
betray
better
between
beyond
bicycle
bid
bike
bind
biology
bird
birth
bitter
black
blade
blame
blanket
blast
bleak
bless
blind
blood
blossom
blouse
blue
blur
blush
board
boat
body
boil
bomb
bone
bonus
book
boost
border
boring
borrow
boss
bottom
bounce
box
boy
bracket
brain
brand
brass
brave
bread
breeze
brick
bridge
brief
bright
bring
brisk
broccoli
broken
bronze
broom
brother
brown
brush
bubble
buddy
budget
buffalo
build
bulb
bulk
bullet
bundle
bunker
burden
burger
burst
bus
business
busy
butter
buyer
buzz
cabbage
cabin
cable
cactus
cage
cake
call
calm
camera
camp
can
canal
cancel
candy
cannon
canoe
canvas
canyon
capable
capital
captain
car
carbon
card
cargo
carpet
carry
cart
case
cash
casino
castle
casual
cat
catalog
catch
category
cattle
caught
cause
caution
cave
ceiling
celery
cement
census
century
cereal
certain
chair
chalk
champion
change
chaos
chapter
charge
chase
chat
cheap
check
cheese
chef
cherry
chest
chicken
chief
child
chimney
choice
choose
chronic
chuckle
chunk
churn
cigar
cinnamon
circle
citizen
city
civil
claim
clap
clarify
claw
clay
clean
clerk
clever
click
client
cliff
climb
clinic
clip
clock
clog
close
cloth
cloud
clown
club
clump
cluster
clutch
coach
coast
coconut
code
coffee
coil
coin
collect
color
column
combine
come
comfort
comic
common
company
concert
conduct
confirm
congress
connect
consider
control
convince
cook
cool
copper
copy
coral
core
corn
correct
cost
cotton
couch
country
couple
course
cousin
cover
coyote
crack
cradle
craft
cram
crane
crash
crater
crawl
crazy
cream
credit
creek
crew
cricket
crime
crisp
critic
crop
cross
crouch
crowd
crucial
cruel
cruise
crumble
crunch
crush
cry
crystal
cube
culture
cup
cupboard
curious
current
curtain
curve
cushion
custom
cute
cycle
dad
damage
damp
dance
danger
daring
dash
daughter
dawn
day
deal
debate
debris
decade
december
decide
decline
decorate
decrease
deer
defense
define
defy
degree
delay
deliver
demand
demise
denial
dentist
deny
depart
depend
deposit
depth
deputy
derive
describe
desert
design
desk
despair
destroy
detail
detect
develop
device
devote
diagram
dial
diamond
diary
dice
diesel
diet
differ
digital
dignity
dilemma
dinner
dinosaur
direct
dirt
disagree
discover
disease
dish
dismiss
disorder
display
distance
divert
divide
divorce
dizzy
doctor
document
dog
doll
dolphin
domain
donate
donkey
donor
door
dose
double
dove
draft
dragon
drama
drastic
draw
dream
dress
drift
drill
drink
drip
drive
drop
drum
dry
duck
dumb
dune
during
dust
dutch
duty
dwarf
dynamic
eager
eagle
early
earn
earth
easily
east
easy
echo
ecology
economy
edge
edit
educate
effort
egg
eight
either
elbow
elder
electric
elegant
element
elephant
elevator
elite
else
embark
embody
embrace
emerge
emotion
employ
empower
empty
enable
enact
end
endless
endorse
enemy
energy
enforce
engage
engine
enhance
enjoy
enlist
enough
enrich
enroll
ensure
enter
entire
entry
envelope
episode
equal
equip
era
erase
erode
erosion
error
erupt
escape
essay
essence
estate
eternal
ethics
evidence
evil
evoke
evolve
exact
example
excess
exchange
excite
exclude
excuse
execute
exercise
exhaust
exhibit
exile
exist
exit
exotic
expand
expect
expire
explain
expose
express
extend
extra
eye
eyebrow
fabric
face
faculty
fade
faint
faith
fall
false
fame
family
famous
fan
fancy
fantasy
farm
fashion
fat
fatal
father
fatigue
fault
favorite
feature
february
federal
fee
feed
feel
female
fence
festival
fetch
fever
few
fiber
fiction
field
figure
file
film
filter
final
find
fine
finger
finish
fire
firm
first
fiscal
fish
fit
fitness
fix
flag
flame
flash
flat
flavor
flee
flight
flip
float
flock
floor
flower
fluid
flush
fly
foam
focus
fog
foil
fold
follow
food
foot
force
forest
forget
fork
fortune
forum
forward
fossil
foster
found
fox
fragile
frame
frequent
fresh
friend
fringe
frog
front
frost
frown
frozen
fruit
fuel
fun
funny
furnace
fury
future
gadget
gain
galaxy
gallery
game
gap
garage
garbage
garden
garlic
garment
gas
gasp
gate
gather
gauge
gaze
general
genius
genre
gentle
genuine
gesture
ghost
giant
gift
giggle
ginger
giraffe
girl
give
glad
glance
glare
glass
glide
glimpse
globe
gloom
glory
glove
glow
glue
goat
goddess
gold
good
goose
gorilla
gospel
gossip
govern
gown
grab
grace
grain
grant
grape
grass
gravity
great
green
grid
grief
grit
grocery
group
grow
grunt
guard
guess
guide
guilt
guitar
gun
gym
habit
hair
half
hammer
hamster
hand
happy
harbor
hard
harsh
harvest
hat
have
hawk
hazard
head
health
heart
heavy
hedgehog
height
hello
helmet
help
hen
hero
hidden
high
hill
hint
hip
hire
history
hobby
hockey
hold
hole
holiday
hollow
home
honey
hood
hope
horn
horror
horse
hospital
host
hotel
hour
hover
hub
huge
human
humble
humor
hundred
hungry
hunt
hurdle
hurry
hurt
husband
hybrid
ice
icon
idea
identify
idle
ignore
ill
illegal
illness
image
imitate
immense
immune
impact
impose
improve
impulse
inch
include
income
increase
index
indicate
indoor
industry
infant
inflict
inform
inhale
inherit
initial
inject
injury
inmate
inner
innocent
input
inquiry
insane
insect
inside
inspire
install
intact
interest
into
invest
invite
involve
iron
island
isolate
issue
item
ivory
jacket
jaguar
jar
jazz
jealous
jeans
jelly
jewel
job
join
joke
journey
joy
judge
juice
jump
jungle
junior
junk
just
kangaroo
keen
keep
ketchup
key
kick
kid
kidney
kind
kingdom
kiss
kit
kitchen
kite
kitten
kiwi
knee
knife
knock
know
lab
label
labor
ladder
lady
lake
lamp
language
laptop
large
later
latin
laugh
laundry
lava
law
lawn
lawsuit
layer
lazy
leader
leaf
learn
leave
lecture
left
leg
legal
legend
leisure
lemon
lend
length
lens
leopard
lesson
letter
level
liar
liberty
library
license
life
lift
light
like
limb
limit
link
lion
liquid
list
little
live
lizard
load
loan
lobster
local
lock
logic
lonely
long
loop
lottery
loud
lounge
love
loyal
lucky
luggage
lumber
lunar
lunch
luxury
lyrics
machine
mad
magic
magnet
maid
mail
main
major
make
mammal
man
manage
mandate
mango
mansion
manual
maple
marble
march
margin
marine
market
marriage
mask
mass
master
match
material
math
matrix
matter
maximum
maze
meadow
mean
measure
meat
mechanic
medal
media
melody
melt
member
memory
mention
menu
mercy
merge
merit
merry
mesh
message
metal
method
middle
midnight
milk
million
mimic
mind
minimum
minor
minute
miracle
mirror
misery
miss
mistake
mix
mixed
mixture
mobile
model
modify
mom
moment
monitor
monkey
monster
month
moon
moral
more
morning
mosquito
mother
motion
motor
mountain
mouse
move
movie
much
muffin
mule
multiply
muscle
museum
mushroom
music
must
mutual
myself
mystery
myth
naive
name
napkin
narrow
nasty
nation
nature
near
neck
need
negative
neglect
neither
nephew
nerve
nest
net
network
neutral
never
news
next
nice
night
noble
noise
nominee
noodle
normal
north
nose
notable
note
nothing
notice
novel
now
nuclear
number
nurse
nut
oak
obey
object
oblige
obscure
observe
obtain
obvious
occur
ocean
october
odor
off
offer
office
often
oil
okay
old
olive
olympic
omit
once
one
onion
online
only
open
opera
opinion
oppose
option
orange
orbit
orchard
order
ordinary
organ
orient
original
orphan
ostrich
other
outdoor
outer
output
outside
oval
oven
over
own
owner
oxygen
oyster
ozone
pact
paddle
page
pair
palace
palm
panda
panel
panic
panther
paper
parade
parent
park
parrot
party
pass
patch
path
patient
patrol
pattern
pause
pave
payment
peace
peanut
pear
peasant
pelican
pen
penalty
pencil
people
pepper
perfect
permit
person
pet
phone
photo
phrase
physical
piano
picnic
picture
piece
pig
pigeon
pill
pilot
pink
pioneer
pipe
pistol
pitch
pizza
place
planet
plastic
plate
play
please
pledge
pluck
plug
plunge
poem
poet
point
polar
pole
police
pond
pony
pool
popular
portion
position
possible
post
potato
pottery
poverty
powder
power
practice
praise
predict
prefer
prepare
present
pretty
prevent
price
pride
primary
print
priority
prison
private
prize
problem
process
produce
profit
program
project
promote
proof
property
prosper
protect
proud
provide
public
pudding
pull
pulp
pulse
pumpkin
punch
pupil
puppy
purchase
purity
purpose
purse
push
put
puzzle
pyramid
quality
quantum
quarter
question
quick
quit
quiz
quote
rabbit
raccoon
race
rack
radar
radio
rail
rain
raise
rally
ramp
ranch
random
range
rapid
rare
rate
rather
raven
raw
razor
ready
real
reason
rebel
rebuild
recall
receive
recipe
record
recycle
reduce
reflect
reform
refuse
region
regret
regular
reject
relax
release
relief
rely
remain
remember
remind
remove
render
renew
rent
reopen
repair
repeat
replace
report
require
rescue
resemble
resist
resource
response
result
retire
retreat
return
reunion
reveal
review
reward
rhythm
rib
ribbon
rice
rich
ride
ridge
rifle
right
rigid
ring
riot
ripple
risk
ritual
rival
river
road
roast
robot
robust
rocket
romance
roof
rookie
room
rose
rotate
rough
round
route
royal
rubber
rude
rug
rule
run
runway
rural
sad
saddle
sadness
safe
sail
salad
salmon
salon
salt
salute
same
sample
sand
satisfy
satoshi
sauce
sausage
save
say
scale
scan
scare
scatter
scene
scheme
school
science
scissors
scorpion
scout
scrap
screen
script
scrub
sea
search
season
seat
second
secret
section
security
seed
seek
segment
select
sell
seminar
senior
sense
sentence
series
service
session
settle
setup
seven
shadow
shaft
shallow
share
shed
shell
sheriff
shield
shift
shine
ship
shiver
shock
shoe
shoot
shop
short
shoulder
shove
shrimp
shrug
shuffle
shy
sibling
sick
side
siege
sight
sign
silent
silk
silly
silver
similar
simple
since
sing
siren
sister
situate
six
size
skate
sketch
ski
skill
skin
skirt
skull
slab
slam
sleep
slender
slice
slide
slight
slim
slogan
slot
slow
slush
small
smart
smile
smoke
smooth
snack
snake
snap
sniff
snow
soap
soccer
social
sock
soda
soft
solar
soldier
solid
solution
solve
someone
song
soon
sorry
sort
soul
sound
soup
source
south
space
spare
spatial
spawn
speak
special
speed
spell
spend
sphere
spice
spider
spike
spin
spirit
split
spoil
sponsor
spoon
sport
spot
spray
spread
spring
spy
square
squeeze
squirrel
stable
stadium
staff
stage
stairs
stamp
stand
start
state
stay
steak
steel
stem
step
stereo
stick
still
sting
stock
stomach
stone
stool
story
stove
strategy
street
strike
strong
struggle
student
stuff
stumble
style
subject
submit
subway
success
such
sudden
suffer
sugar
suggest
suit
summer
sun
sunny
sunset
super
supply
supreme
sure
surface
surge
surprise
surround
survey
suspect
sustain
swallow
swamp
swap
swarm
swear
sweet
swift
swim
swing
switch
sword
symbol
symptom
syrup
system
table
tackle
tag
tail
talent
talk
tank
tape
target
task
taste
tattoo
taxi
teach
team
tell
ten
tenant
tennis
tent
term
test
text
thank
that
theme
then
theory
there
they
thing
this
thought
three
thrive
throw
thumb
thunder
ticket
tide
tiger
tilt
timber
time
tiny
tip
tired
tissue
title
toast
tobacco
today
toddler
toe
together
toilet
token
tomato
tomorrow
tone
tongue
tonight
tool
tooth
top
topic
topple
torch
tornado
tortoise
toss
total
tourist
toward
tower
town
toy
track
trade
traffic
tragic
train
transfer
trap
trash
travel
tray
treat
tree
trend
trial
tribe
trick
trigger
trim
trip
trophy
trouble
truck
true
truly
trumpet
trust
truth
try
tube
tuition
tumble
tuna
tunnel
turkey
turn
turtle
twelve
twenty
twice
twin
twist
two
type
typical
ugly
umbrella
unable
unaware
uncle
uncover
under
undo
unfair
unfold
unhappy
uniform
unique
unit
universe
unknown
unlock
until
unusual
unveil
update
upgrade
uphold
upon
upper
upset
urban
urge
usage
use
used
useful
useless
usual
utility
vacant
vacuum
vague
valid
valley
valve
van
vanish
vapor
various
vast
vault
vehicle
velvet
vendor
venture
venue
verb
verify
version
very
vessel
veteran
viable
vibrant
vicious
victory
video
view
village
vintage
violin
virtual
virus
visa
visit
visual
vital
vivid
vocal
voice
void
volcano
volume
vote
voyage
wage
wagon
wait
walk
wall
walnut
want
warfare
warm
warrior
wash
wasp
waste
water
wave
way
wealth
weapon
wear
weasel
weather
web
wedding
weekend
weird
welcome
west
wet
whale
what
wheat
wheel
when
where
whip
whisper
wide
width
wife
wild
will
win
window
wine
wing
wink
winner
winter
wire
wisdom
wise
wish
witness
wolf
woman
wonder
wood
wool
word
work
world
worry
worth
wrap
wreck
wrestle
wrist
write
wrong
yard
year
yellow
you
young
youth
zebra
zero
zone
zoo
""".split())
|
mmgen/mmgen
|
mmgen/wordlist/bip39.py
|
Python
|
gpl-3.0
| 13,273
|
[
"BLAST",
"CASINO",
"CRYSTAL",
"Galaxy",
"Jaguar",
"VisIt"
] |
783ca8977662ec094729549b91452577b4da6c5beb55cd4b0a9c6a7593bc8fee
|
""" Test for StorageManagement clients
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=protected-access,missing-docstring,invalid-name
import unittest
from mock import MagicMock, patch
from DIRAC import S_OK, S_ERROR
from DIRAC.StorageManagementSystem.Client.StorageManagerClient import getFilesToStage
from DIRAC.DataManagementSystem.Client.test.mock_DM import dm_mock
import errno
# Use side_effect instead of return_value so the dict in the result is copied
mockObjectSE1 = MagicMock()
mockObjectSE1.getFileMetadata.side_effect = lambda *_: S_OK(
{
"Successful": {"/a/lfn/1.txt": {"Accessible": False}},
"Failed": {},
}
)
mockObjectSE1.getStatus.side_effect = lambda *_: S_OK({"DiskSE": False, "TapeSE": True})
mockObjectSE2 = MagicMock()
mockObjectSE2.getFileMetadata.side_effect = lambda *_: S_OK(
{
"Successful": {"/a/lfn/2.txt": {"Cached": 1, "Accessible": True}},
"Failed": {},
}
)
mockObjectSE2.getStatus.side_effect = lambda *_: S_OK({"DiskSE": False, "TapeSE": True})
mockObjectSE3 = MagicMock()
mockObjectSE3.getFileMetadata.side_effect = lambda *_: S_OK(
{
"Successful": {},
"Failed": {"/a/lfn/2.txt": "error"},
}
)
mockObjectSE3.getStatus.side_effect = lambda *_: S_OK({"DiskSE": False, "TapeSE": True})
mockObjectSE4 = MagicMock()
mockObjectSE4.getFileMetadata.side_effect = lambda *_: S_OK(
{"Successful": {}, "Failed": {"/a/lfn/2.txt": S_ERROR(errno.ENOENT, "")["Message"]}}
)
mockObjectSE4.getStatus.side_effect = lambda *_: S_OK({"DiskSE": False, "TapeSE": True})
mockObjectSE5 = MagicMock()
mockObjectSE5.getFileMetadata.side_effect = lambda *_: S_OK(
{"Successful": {"/a/lfn/1.txt": {"Accessible": False}}, "Failed": {}}
)
mockObjectSE5.getStatus.side_effect = lambda *_: S_OK({"DiskSE": True, "TapeSE": False})
mockObjectSE6 = MagicMock()
mockObjectSE6.getFileMetadata.side_effect = lambda *_: S_OK(
{"Successful": {"/a/lfn/2.txt": {"Cached": 0, "Accessible": False}}, "Failed": {}}
)
mockObjectSE6.getStatus.side_effect = lambda *_: S_OK({"DiskSE": False, "TapeSE": True})
mockObjectDMSHelper = MagicMock()
mockObjectDMSHelper.getLocalSiteForSE.side_effect = lambda *_: S_OK("mySite")
mockObjectDMSHelper.getSitesForSE.side_effect = lambda *_: S_OK(["mySite"])
class ClientsTestCase(unittest.TestCase):
"""Base class for the clients test cases"""
def setUp(self):
from DIRAC import gLogger
gLogger.setLevel("DEBUG")
def tearDown(self):
pass
#############################################################################
class StorageManagerSuccess(ClientsTestCase):
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.DataManager", return_value=dm_mock)
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.StorageElement", return_value=mockObjectSE1)
def test_getFilesToStage_withFilesToStage(self, _patch, _patched):
"""Test where the StorageElement mock will return files offline"""
res = getFilesToStage(["/a/lfn/1.txt"], checkOnlyTapeSEs=False)
self.assertTrue(res["OK"])
self.assertEqual(res["Value"]["onlineLFNs"], [])
self.assertIn(res["Value"]["offlineLFNs"], [{"SE1": ["/a/lfn/1.txt"]}, {"SE2": ["/a/lfn/1.txt"]}])
self.assertEqual(res["Value"]["absentLFNs"], {})
self.assertEqual(res["Value"]["failedLFNs"], [])
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.DataManager", return_value=dm_mock)
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.StorageElement", return_value=mockObjectSE2)
def test_getFilesToStage_noFilesToStage(self, _patch, _patched):
"""Test where the StorageElement mock will return files online"""
res = getFilesToStage(["/a/lfn/2.txt"], checkOnlyTapeSEs=False)
self.assertTrue(res["OK"])
self.assertEqual(res["Value"]["onlineLFNs"], ["/a/lfn/2.txt"])
self.assertEqual(res["Value"]["offlineLFNs"], {})
self.assertEqual(res["Value"]["absentLFNs"], {})
self.assertEqual(res["Value"]["failedLFNs"], [])
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.DataManager", return_value=dm_mock)
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.StorageElement", return_value=mockObjectSE3)
def test_getFilesToStage_seErrors(self, _patch, _patched):
"""Test where the StorageElement will return failure"""
res = getFilesToStage(["/a/lfn/2.txt"], checkOnlyTapeSEs=False)
self.assertTrue(res["OK"])
self.assertEqual(res["Value"]["onlineLFNs"], [])
self.assertEqual(res["Value"]["offlineLFNs"], {})
self.assertEqual(res["Value"]["absentLFNs"], {})
self.assertEqual(res["Value"]["failedLFNs"], ["/a/lfn/2.txt"])
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.DataManager", return_value=dm_mock)
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.StorageElement", return_value=mockObjectSE4)
def test_getFilesToStage_noSuchFile(self, _patch, _patched):
"""Test where the StorageElement will return file is absent"""
res = getFilesToStage(["/a/lfn/2.txt"], checkOnlyTapeSEs=False)
self.assertTrue(res["OK"])
self.assertEqual(res["Value"]["onlineLFNs"], [])
self.assertEqual(res["Value"]["offlineLFNs"], {})
self.assertEqual(
res["Value"]["absentLFNs"], {"/a/lfn/2.txt": "No such file or directory ( 2 : File not at SE1,SE2)"}
)
self.assertEqual(res["Value"]["failedLFNs"], [])
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.DataManager", return_value=dm_mock)
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.StorageElement", return_value=mockObjectSE5)
def test_getFilesToStage_fileInaccessibleAtDisk(self, _patch, _patched):
"""Test where the StorageElement will return file is unavailable at a Disk SE"""
res = getFilesToStage(["/a/lfn/1.txt"], checkOnlyTapeSEs=False)
self.assertTrue(res["OK"])
self.assertEqual(res["Value"]["onlineLFNs"], [])
self.assertEqual(res["Value"]["offlineLFNs"], {})
self.assertEqual(res["Value"]["absentLFNs"], {})
self.assertEqual(res["Value"]["failedLFNs"], ["/a/lfn/1.txt"])
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.DataManager", return_value=dm_mock)
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.StorageElement", return_value=mockObjectSE2)
def test_getFilesToStage_tapeSEOnly_1(self, _patch, _patched):
"""Test where the StorageElement will return file is available"""
res = getFilesToStage(["/a/lfn/2.txt"], checkOnlyTapeSEs=True)
self.assertTrue(res["OK"])
self.assertEqual(res["Value"]["onlineLFNs"], ["/a/lfn/2.txt"])
self.assertEqual(res["Value"]["offlineLFNs"], {})
self.assertEqual(res["Value"]["absentLFNs"], {})
self.assertEqual(res["Value"]["failedLFNs"], [])
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.DataManager", return_value=dm_mock)
@patch("DIRAC.StorageManagementSystem.Client.StorageManagerClient.StorageElement", return_value=mockObjectSE6)
def test_getFilesToStage_tapeSEOnly_2(self, _patch, _patched):
"""Test where the StorageElement will return file is at offline at tape"""
with patch(
"DIRAC.StorageManagementSystem.Client.StorageManagerClient.random.choice",
new=MagicMock(return_value="SERandom"),
):
res = getFilesToStage(["/a/lfn/2.txt"], checkOnlyTapeSEs=True)
self.assertTrue(res["OK"])
self.assertEqual(res["Value"]["onlineLFNs"], [])
self.assertEqual(res["Value"]["offlineLFNs"], {"SERandom": ["/a/lfn/2.txt"]})
self.assertEqual(res["Value"]["absentLFNs"], {})
self.assertEqual(res["Value"]["failedLFNs"], [])
if __name__ == "__main__":
suite = unittest.defaultTestLoader.loadTestsFromTestCase(ClientsTestCase)
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(StorageManagerSuccess))
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
|
ic-hep/DIRAC
|
src/DIRAC/StorageManagementSystem/Client/test/Test_Client_StorageManagementSystem.py
|
Python
|
gpl-3.0
| 8,286
|
[
"DIRAC"
] |
3d7a4d472a055ac50c2dd4c40b5c60b5e893757a9f56181c14eddbd4d73a3f85
|
########################################################################
# File : ComputingElement.py
# Author : Stuart Paterson, A.T.
########################################################################
""" The Computing Element class is a base class for all the various
types CEs. It serves several purposes:
- collects general CE related parameters to generate CE description
for the job matching
- provides logic for evaluation of the number of available CPU slots
- provides logic for the proxy renewal while executing jobs
The CE parameters are collected from the following sources, in hierarchy
descending order:
- parameters provided through setParameters() method of the class
- parameters in /LocalSite configuration section
- parameters in /LocalSite/<ceName>/ResourceDict configuration section
- parameters in /LocalSite/ResourceDict configuration section
- parameters in /LocalSite/<ceName> configuration section
- parameters in /Resources/Computing/<ceName> configuration section
- parameters in /Resources/Computing/CEDefaults configuration section
The ComputingElement objects are usually instantiated with the help of
ComputingElementFactory.
"""
import os
import multiprocessing
from DIRAC.ConfigurationSystem.Client.Config import gConfig
from DIRAC.Core.Security.ProxyFile import writeToProxyFile
from DIRAC.Core.Security.ProxyInfo import getProxyInfoAsString
from DIRAC.Core.Security.ProxyInfo import formatProxyInfoAsString
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
from DIRAC.Core.Security.VOMS import VOMS
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
from DIRAC.Core.Security import Properties
from DIRAC.Core.Utilities.Time import dateTime, second
from DIRAC import S_OK, S_ERROR, gLogger, version
from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
__RCSID__ = "$Id$"
INTEGER_PARAMETERS = ['CPUTime', 'NumberOfProcessors']
FLOAT_PARAMETERS = ['WaitingToRunningRatio']
LIST_PARAMETERS = ['Tag', 'RequiredTag']
WAITING_TO_RUNNING_RATIO = 0.5
MAX_WAITING_JOBS = 1
MAX_TOTAL_JOBS = 1
class ComputingElement(object):
""" ComputingElement base class """
#############################################################################
def __init__(self, ceName):
""" Standard constructor
"""
self.log = gLogger.getSubLogger(ceName)
self.ceName = ceName
self.ceType = ''
self.ceParameters = {}
self.proxy = ''
self.valid = None
self.mandatoryParameters = []
self.batch = None
self.batchSystem = None
self.batchModuleFile = None
self.minProxyTime = gConfig.getValue('/Registry/MinProxyLifeTime', 10800) # secs
self.defaultProxyTime = gConfig.getValue('/Registry/DefaultProxyLifeTime', 43200) # secs
self.proxyCheckPeriod = gConfig.getValue('/Registry/ProxyCheckingPeriod', 3600) # secs
self.initializeParameters()
def setProxy(self, proxy, valid=0):
""" Set proxy for this instance
"""
self.proxy = proxy
self.valid = dateTime() + second * valid
return S_OK()
def _prepareProxy(self):
""" Set the environment variable X509_USER_PROXY
"""
if not self.proxy:
result = getProxyInfo()
if not result['OK']:
return S_ERROR("No proxy available")
if "path" in result['Value']:
os.environ['X509_USER_PROXY'] = result['Value']['path']
return S_OK()
else:
result = gProxyManager.dumpProxyToFile(self.proxy, requiredTimeLeft=self.minProxyTime)
if not result['OK']:
return result
os.environ['X509_USER_PROXY'] = result['Value']
gLogger.debug("Set proxy variable X509_USER_PROXY to %s" % os.environ['X509_USER_PROXY'])
return S_OK()
def isProxyValid(self, valid=1000):
""" Check if the stored proxy is valid
"""
if not self.valid:
result = S_ERROR('Proxy is not valid for the requested length')
result['Value'] = 0
return result
delta = self.valid - dateTime()
totalSeconds = delta.days * 86400 + delta.seconds
if totalSeconds > valid:
return S_OK(totalSeconds - valid)
result = S_ERROR('Proxy is not valid for the requested length')
result['Value'] = totalSeconds - valid
return result
def initializeParameters(self):
""" Initialize the CE parameters after they are collected from various sources
"""
# Collect global defaults first
for section in ['/Resources/Computing/CEDefaults', '/Resources/Computing/%s' % self.ceName]:
result = gConfig.getOptionsDict(section)
if result['OK']:
ceOptions = result['Value']
for key in ceOptions:
if key in INTEGER_PARAMETERS:
ceOptions[key] = int(ceOptions[key])
if key in FLOAT_PARAMETERS:
ceOptions[key] = float(ceOptions[key])
if key in LIST_PARAMETERS:
ceOptions[key] = gConfig.getValue(os.path.join(section, key), [])
self.ceParameters.update(ceOptions)
# Get local CE configuration
localConfigDict = getCEConfigDict(self.ceName)
self.ceParameters.update(localConfigDict)
# Adds site level parameters
section = '/LocalSite'
result = gConfig.getOptionsDict(section)
if result['OK'] and result['Value']:
localSiteParameters = result['Value']
self.log.debug('Local site parameters are: %s' % (localSiteParameters))
for option, value in localSiteParameters.iteritems():
if option == 'Architecture':
self.ceParameters['Platform'] = value
self.ceParameters['Architecture'] = value
elif option == 'LocalSE':
self.ceParameters['LocalSE'] = value.split(', ')
else:
self.ceParameters[option] = value
self._addCEConfigDefaults()
def isValid(self):
""" Check the sanity of the Computing Element definition
"""
for par in self.mandatoryParameters:
if par not in self.ceParameters:
return S_ERROR('Missing Mandatory Parameter in Configuration: %s' % par)
return S_OK()
#############################################################################
def _addCEConfigDefaults(self):
"""Method to make sure all necessary Configuration Parameters are defined
"""
self.ceParameters['WaitingToRunningRatio'] = float(
self.ceParameters.get('WaitingToRunningRatio', WAITING_TO_RUNNING_RATIO))
self.ceParameters['MaxWaitingJobs'] = int(self.ceParameters.get('MaxWaitingJobs', MAX_WAITING_JOBS))
self.ceParameters['MaxTotalJobs'] = int(self.ceParameters.get('MaxTotalJobs', MAX_TOTAL_JOBS))
def _reset(self):
""" Make specific CE parameter adjustments after they are collected or added
"""
pass
def loadBatchSystem(self):
""" Instantiate object representing the backend batch system
"""
if self.batchSystem is None:
self.batchSystem = self.ceParameters['BatchSystem']
objectLoader = ObjectLoader()
result = objectLoader.loadObject('Resources.Computing.BatchSystems.%s' % self.batchSystem, self.batchSystem)
if not result['OK']:
gLogger.error('Failed to load batch object: %s' % result['Message'])
return result
batchClass = result['Value']
self.batchModuleFile = result['ModuleFile']
self.batch = batchClass()
self.log.info("Batch system class from module: ", self.batchModuleFile)
def setParameters(self, ceOptions):
""" Add parameters from the given dictionary overriding the previous values
:param dict ceOptions: CE parameters dictionary to update already defined ones
"""
self.ceParameters.update(ceOptions)
# At this point we can know the exact type of CE,
# try to get generic parameters for this type
ceType = self.ceParameters.get('CEType')
if ceType:
result = gConfig.getOptionsDict('/Resources/Computing/%s' % ceType)
if result['OK']:
generalCEDict = result['Value']
generalCEDict.update(self.ceParameters)
self.ceParameters = generalCEDict
# If NumberOfProcessors is present in the description but is equal to zero
# interpret it as needing local evaluation
if self.ceParameters.get("NumberOfProcessors", -1) == 0:
self.ceParameters["NumberOfProcessors"] = multiprocessing.cpu_count()
for key in ceOptions:
if key in INTEGER_PARAMETERS:
self.ceParameters[key] = int(self.ceParameters[key])
if key in FLOAT_PARAMETERS:
self.ceParameters[key] = float(self.ceParameters[key])
self._reset()
return S_OK()
def getParameterDict(self):
""" Get the CE complete parameter dictionary
"""
return self.ceParameters
#############################################################################
def setCPUTimeLeft(self, cpuTimeLeft=None):
"""Update the CPUTime parameter of the CE classAd, necessary for running in filling mode
"""
if not cpuTimeLeft:
# do nothing
return S_OK()
try:
intCPUTimeLeft = int(cpuTimeLeft)
except ValueError:
return S_ERROR('Wrong type for setCPUTimeLeft argument')
self.ceParameters['CPUTime'] = intCPUTimeLeft
return S_OK(intCPUTimeLeft)
#############################################################################
def available(self, jobIDList=None):
"""This method returns the number of available slots in the target CE. The CE
instance polls for waiting and running jobs and compares to the limits
in the CE parameters.
:param jobIDList: list of already existing job IDs to be checked against
:type jobIDList: python:list
"""
# If there are no already registered jobs
if jobIDList is not None and not jobIDList:
result = S_OK()
result['RunningJobs'] = 0
result['WaitingJobs'] = 0
result['SubmittedJobs'] = 0
else:
result = self.ceParameters.get('CEType')
if result and result == 'CREAM':
result = self.getCEStatus(jobIDList)
else:
result = self.getCEStatus()
if not result['OK']:
return result
runningJobs = result['RunningJobs']
waitingJobs = result['WaitingJobs']
submittedJobs = result['SubmittedJobs']
availableProcessors = result.get('AvailableProcessors')
ceInfoDict = dict(result)
maxTotalJobs = int(self.ceParameters.get('MaxTotalJobs', 0))
ceInfoDict['MaxTotalJobs'] = maxTotalJobs
waitingToRunningRatio = float(self.ceParameters.get('WaitingToRunningRatio', 0.0))
# if there are no Running job we can submit to get at most 'MaxWaitingJobs'
# if there are Running jobs we can increase this to get a ratio W / R 'WaitingToRunningRatio'
maxWaitingJobs = int(max(int(self.ceParameters.get('MaxWaitingJobs', 0)),
runningJobs * waitingToRunningRatio))
self.log.verbose('Max Number of Jobs:', maxTotalJobs)
self.log.verbose('Max W/R Ratio:', waitingToRunningRatio)
self.log.verbose('Max Waiting Jobs:', maxWaitingJobs)
# Determine how many more jobs can be submitted
message = '%s CE: SubmittedJobs=%s' % (self.ceName, submittedJobs)
message += ', WaitingJobs=%s, RunningJobs=%s' % (waitingJobs, runningJobs)
totalJobs = runningJobs + waitingJobs
message += ', MaxTotalJobs=%s' % (maxTotalJobs)
if totalJobs >= maxTotalJobs:
self.log.verbose('Max Number of Jobs reached:', maxTotalJobs)
result['Value'] = 0
message = 'There are %s waiting jobs and total jobs %s >= %s max total jobs' % (
waitingJobs, totalJobs, maxTotalJobs)
else:
additionalJobs = 0
if waitingJobs < maxWaitingJobs:
additionalJobs = maxWaitingJobs - waitingJobs
if totalJobs + additionalJobs >= maxTotalJobs:
additionalJobs = maxTotalJobs - totalJobs
# For SSH CE case
if int(self.ceParameters.get('MaxWaitingJobs', 0)) == 0:
additionalJobs = maxTotalJobs - runningJobs
if availableProcessors is not None:
additionalJobs = min(additionalJobs, availableProcessors)
result['Value'] = additionalJobs
result['Message'] = message
result['CEInfoDict'] = ceInfoDict
return result
#############################################################################
def writeProxyToFile(self, proxy):
"""CE helper function to write a CE proxy string to a file.
"""
result = writeToProxyFile(proxy)
if not result['OK']:
self.log.error('Could not write proxy to file', result['Message'])
return result
proxyLocation = result['Value']
result = getProxyInfoAsString(proxyLocation)
if not result['OK']:
self.log.error('Could not get proxy info', result)
return result
else:
self.log.info('Payload proxy information:')
print result['Value']
return S_OK(proxyLocation)
#############################################################################
def _monitorProxy(self, pilotProxy, payloadProxy):
"""Base class for the monitor and update of the payload proxy, to be used in
derived classes for the basic renewal of the proxy, if further actions are
necessary they should be implemented there
"""
retVal = getProxyInfo(payloadProxy)
if not retVal['OK']:
self.log.error('Could not get payload proxy info', retVal)
return retVal
self.log.verbose('Payload Proxy information:\n%s' % formatProxyInfoAsString(retVal['Value']))
payloadProxyDict = retVal['Value']
payloadSecs = payloadProxyDict['chain'].getRemainingSecs()['Value']
if payloadSecs > self.minProxyTime:
self.log.verbose('No need to renew payload Proxy')
return S_OK()
# if there is no pilot proxy, assume there is a certificate and try a renewal
if not pilotProxy:
self.log.info('Using default credentials to get a new payload Proxy')
return gProxyManager.renewProxy(proxyToBeRenewed=payloadProxy, minLifeTime=self.minProxyTime,
newProxyLifeTime=self.defaultProxyTime,
proxyToConnect=pilotProxy)
# if there is pilot proxy
retVal = getProxyInfo(pilotProxy)
if not retVal['OK']:
return retVal
pilotProxyDict = retVal['Value']
if 'groupProperties' not in pilotProxyDict:
self.log.error('Invalid Pilot Proxy', 'Group has no properties defined')
return S_ERROR('Proxy has no group properties defined')
pilotProps = pilotProxyDict['groupProperties']
# if running with a pilot proxy, use it to renew the proxy of the payload
if Properties.PILOT in pilotProps or Properties.GENERIC_PILOT in pilotProps:
self.log.info('Using Pilot credentials to get a new payload Proxy')
return gProxyManager.renewProxy(proxyToBeRenewed=payloadProxy, minLifeTime=self.minProxyTime,
newProxyLifeTime=self.defaultProxyTime,
proxyToConnect=pilotProxy)
# if we are running with other type of proxy check if they are for the same user and group
# and copy the pilot proxy if necessary
self.log.info('Trying to copy pilot Proxy to get a new payload Proxy')
pilotProxySecs = pilotProxyDict['chain'].getRemainingSecs()['Value']
if pilotProxySecs <= payloadSecs:
errorStr = 'Pilot Proxy is not longer than payload Proxy'
self.log.error(errorStr)
return S_ERROR('Can not renew by copy: %s' % errorStr)
# check if both proxies belong to the same user and group
pilotDN = pilotProxyDict['chain'].getIssuerCert()['Value'].getSubjectDN()['Value']
retVal = pilotProxyDict['chain'].getDIRACGroup()
if not retVal['OK']:
return retVal
pilotGroup = retVal['Value']
payloadDN = payloadProxyDict['chain'].getIssuerCert()['Value'].getSubjectDN()['Value']
retVal = payloadProxyDict['chain'].getDIRACGroup()
if not retVal['OK']:
return retVal
payloadGroup = retVal['Value']
if pilotDN != payloadDN or pilotGroup != payloadGroup:
errorStr = 'Pilot Proxy and payload Proxy do not have same DN and Group'
self.log.error(errorStr)
return S_ERROR('Can not renew by copy: %s' % errorStr)
if pilotProxyDict.get('hasVOMS', False):
return pilotProxyDict['chain'].dumpAllToFile(payloadProxy)
attribute = Registry.getVOMSAttributeForGroup(payloadGroup)
vo = Registry.getVOMSVOForGroup(payloadGroup)
retVal = VOMS().setVOMSAttributes(pilotProxyDict['chain'], attribute=attribute, vo=vo)
if not retVal['OK']:
return retVal
chain = retVal['Value']
return chain.dumpAllToFile(payloadProxy)
def getDescription(self):
""" Get CE description as a dictionary
"""
ceDict = {}
for option, value in self.ceParameters.iteritems():
if isinstance(value, list):
ceDict[option] = value
elif isinstance(value, basestring):
try:
ceDict[option] = int(value)
except ValueError:
ceDict[option] = value
elif isinstance(value, (int, long, float)):
ceDict[option] = value
else:
self.log.warn('Type of option %s = %s not determined' % (option, value))
release = gConfig.getValue('/LocalSite/ReleaseVersion', version)
ceDict['DIRACVersion'] = release
ceDict['ReleaseVersion'] = release
project = gConfig.getValue("/LocalSite/ReleaseProject", "")
if project:
ceDict['ReleaseProject'] = project
result = self.getCEStatus()
if result['OK']:
if 'AvailableProcessors' in result:
cores = result['AvailableProcessors']
ceDict['NumberOfProcessors'] = cores
return S_OK(ceDict)
#############################################################################
def sendOutput(self, stdid, line): # pylint: disable=unused-argument, no-self-use
""" Callback function such that the results from the CE may be returned.
"""
print line
#############################################################################
def submitJob(self, executableFile, proxy, dummy=None, processors=1): # pylint: disable=unused-argument
""" Method to submit job, should be overridden in sub-class.
"""
name = 'submitJob()'
self.log.error('ComputingElement should be implemented in a subclass', name)
return S_ERROR('ComputingElement: %s should be implemented in a subclass' % (name))
#############################################################################
def getCEStatus(self, jobIDList=None): # pylint: disable=unused-argument
""" Method to get dynamic job information, can be overridden in sub-class.
"""
name = 'getCEStatus()'
self.log.error('ComputingElement should be implemented in a subclass', name)
return S_ERROR('ComputingElement: %s should be implemented in a subclass' % (name))
def getCEConfigDict(ceName):
"""Look into LocalSite for configuration Parameters for this CE
"""
ceConfigDict = {}
if ceName:
result = gConfig.getOptionsDict('/LocalSite/%s' % ceName)
if result['OK']:
ceConfigDict = result['Value']
return ceConfigDict
|
andresailer/DIRAC
|
Resources/Computing/ComputingElement.py
|
Python
|
gpl-3.0
| 19,133
|
[
"DIRAC"
] |
004b16ef32b37f5edc914ec29527b91c928b743112043616036efd82eadd2a28
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for testing `LinearOperator` and sub-classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import itertools
import numpy as np
import six
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.module import module
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import sort_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops import while_v2
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.platform import test
from tensorflow.python.saved_model import load as load_model
from tensorflow.python.saved_model import nested_structure_coder
from tensorflow.python.saved_model import save as save_model
from tensorflow.python.util import nest
class OperatorShapesInfo(object):
"""Object encoding expected shape for a test.
Encodes the expected shape of a matrix for a test. Also
allows additional metadata for the test harness.
"""
def __init__(self, shape, **kwargs):
self.shape = shape
self.__dict__.update(kwargs)
class CheckTapeSafeSkipOptions(object):
# Skip checking this particular method.
DETERMINANT = "determinant"
DIAG_PART = "diag_part"
LOG_ABS_DETERMINANT = "log_abs_determinant"
TRACE = "trace"
@six.add_metaclass(abc.ABCMeta) # pylint: disable=no-init
class LinearOperatorDerivedClassTest(test.TestCase):
"""Tests for derived classes.
Subclasses should implement every abstractmethod, and this will enable all
test methods to work.
"""
# Absolute/relative tolerance for tests.
_atol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
_rtol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
def assertAC(self, x, y, check_dtype=False):
"""Derived classes can set _atol, _rtol to get different tolerance."""
dtype = dtypes.as_dtype(x.dtype)
atol = self._atol[dtype]
rtol = self._rtol[dtype]
self.assertAllClose(x, y, atol=atol, rtol=rtol)
if check_dtype:
self.assertDTypeEqual(x, y.dtype)
@staticmethod
def adjoint_options():
return [False, True]
@staticmethod
def adjoint_arg_options():
return [False, True]
@staticmethod
def dtypes_to_test():
# TODO(langmore) Test tf.float16 once tf.linalg.solve works in 16bit.
return [dtypes.float32, dtypes.float64, dtypes.complex64, dtypes.complex128]
@staticmethod
def use_placeholder_options():
return [False, True]
@staticmethod
def use_blockwise_arg():
return False
@staticmethod
def operator_shapes_infos():
"""Returns list of OperatorShapesInfo, encapsulating the shape to test."""
raise NotImplementedError("operator_shapes_infos has not been implemented.")
@abc.abstractmethod
def operator_and_matrix(
self, shapes_info, dtype, use_placeholder,
ensure_self_adjoint_and_pd=False):
"""Build a batch matrix and an Operator that should have similar behavior.
Every operator acts like a (batch) matrix. This method returns both
together, and is used by tests.
Args:
shapes_info: `OperatorShapesInfo`, encoding shape information about the
operator.
dtype: Numpy dtype. Data type of returned array/operator.
use_placeholder: Python bool. If True, initialize the operator with a
placeholder of undefined shape and correct dtype.
ensure_self_adjoint_and_pd: If `True`,
construct this operator to be Hermitian Positive Definite, as well
as ensuring the hints `is_positive_definite` and `is_self_adjoint`
are set.
This is useful for testing methods such as `cholesky`.
Returns:
operator: `LinearOperator` subclass instance.
mat: `Tensor` representing operator.
"""
# Create a matrix as a numpy array with desired shape/dtype.
# Create a LinearOperator that should have the same behavior as the matrix.
raise NotImplementedError("Not implemented yet.")
@abc.abstractmethod
def make_rhs(self, operator, adjoint, with_batch=True):
"""Make a rhs appropriate for calling operator.solve(rhs).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making a 'rhs' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `rhs` with the same batch
shape as operator, and otherwise create a matrix without any batch
shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("make_rhs is not defined.")
@abc.abstractmethod
def make_x(self, operator, adjoint, with_batch=True):
"""Make an 'x' appropriate for calling operator.matmul(x).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making an 'x' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `x` with the same batch shape
as operator, and otherwise create a matrix without any batch shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("make_x is not defined.")
@staticmethod
def skip_these_tests():
"""List of test names to skip."""
# Subclasses should over-ride if they want to skip some tests.
# To skip "test_foo", add "foo" to this list.
return []
def assertRaisesError(self, msg):
"""assertRaisesRegexp or OpError, depending on context.executing_eagerly."""
if context.executing_eagerly():
return self.assertRaisesRegexp(Exception, msg)
return self.assertRaisesOpError(msg)
def check_tape_safe(self, operator, skip_options=None):
"""Check gradients are not None w.r.t. operator.variables.
Meant to be called from the derived class.
This ensures grads are not w.r.t every variable in operator.variables. If
more fine-grained testing is needed, a custom test should be written.
Args:
operator: LinearOperator. Exact checks done will depend on hints.
skip_options: Optional list of CheckTapeSafeSkipOptions.
Makes this test skip particular checks.
"""
skip_options = skip_options or []
if not operator.variables:
raise AssertionError("`operator.variables` was empty")
def _assert_not_none(iterable):
for item in iterable:
self.assertIsNotNone(item)
# Tape tests that can be run on every operator below.
with backprop.GradientTape() as tape:
_assert_not_none(tape.gradient(operator.to_dense(), operator.variables))
with backprop.GradientTape() as tape:
_assert_not_none(
tape.gradient(operator.adjoint().to_dense(), operator.variables))
x = math_ops.cast(
array_ops.ones(shape=operator.H.shape_tensor()[:-1]), operator.dtype)
with backprop.GradientTape() as tape:
_assert_not_none(tape.gradient(operator.matvec(x), operator.variables))
# Tests for square, but possibly non-singular operators below.
if not operator.is_square:
return
for option in [
CheckTapeSafeSkipOptions.DETERMINANT,
CheckTapeSafeSkipOptions.LOG_ABS_DETERMINANT,
CheckTapeSafeSkipOptions.DIAG_PART,
CheckTapeSafeSkipOptions.TRACE,
]:
with backprop.GradientTape() as tape:
if option not in skip_options:
_assert_not_none(
tape.gradient(getattr(operator, option)(), operator.variables))
# Tests for non-singular operators below.
if operator.is_non_singular is False: # pylint: disable=g-bool-id-comparison
return
with backprop.GradientTape() as tape:
_assert_not_none(
tape.gradient(operator.inverse().to_dense(), operator.variables))
with backprop.GradientTape() as tape:
_assert_not_none(tape.gradient(operator.solvevec(x), operator.variables))
# Tests for SPD operators below.
if not (operator.is_self_adjoint and operator.is_positive_definite):
return
with backprop.GradientTape() as tape:
_assert_not_none(
tape.gradient(operator.cholesky().to_dense(), operator.variables))
# pylint:disable=missing-docstring
def _test_to_dense(use_placeholder, shapes_info, dtype):
def test_to_dense(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_dense = operator.to_dense()
if not use_placeholder:
self.assertAllEqual(shapes_info.shape, op_dense.shape)
op_dense_v, mat_v = sess.run([op_dense, mat])
self.assertAC(op_dense_v, mat_v)
return test_to_dense
def _test_det(use_placeholder, shapes_info, dtype):
def test_det(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_det = operator.determinant()
if not use_placeholder:
self.assertAllEqual(shapes_info.shape[:-2], op_det.shape)
op_det_v, mat_det_v = sess.run(
[op_det, linalg_ops.matrix_determinant(mat)])
self.assertAC(op_det_v, mat_det_v)
return test_det
def _test_log_abs_det(use_placeholder, shapes_info, dtype):
def test_log_abs_det(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_log_abs_det = operator.log_abs_determinant()
_, mat_log_abs_det = linalg.slogdet(mat)
if not use_placeholder:
self.assertAllEqual(
shapes_info.shape[:-2], op_log_abs_det.shape)
op_log_abs_det_v, mat_log_abs_det_v = sess.run(
[op_log_abs_det, mat_log_abs_det])
self.assertAC(op_log_abs_det_v, mat_log_abs_det_v)
return test_log_abs_det
def _test_matmul_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg,
with_batch):
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(shapes_info.shape) <= 2:
return
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
x = self.make_x(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, compute A X^H^H = A X.
if adjoint_arg:
op_matmul = operator.matmul(
linalg.adjoint(x),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_matmul = operator.matmul(x, adjoint=adjoint)
mat_matmul = math_ops.matmul(mat, x, adjoint_a=adjoint)
if not use_placeholder:
self.assertAllEqual(op_matmul.shape,
mat_matmul.shape)
# If the operator is blockwise, test both blockwise `x` and `Tensor` `x`;
# else test only `Tensor` `x`. In both cases, evaluate all results in a
# single `sess.run` call to avoid re-sampling the random `x` in graph mode.
if blockwise_arg and len(operator.operators) > 1:
# pylint: disable=protected-access
block_dimensions = (
operator._block_range_dimensions() if adjoint else
operator._block_domain_dimensions())
block_dimensions_fn = (
operator._block_range_dimension_tensors if adjoint else
operator._block_domain_dimension_tensors)
# pylint: enable=protected-access
split_x = linear_operator_util.split_arg_into_blocks(
block_dimensions,
block_dimensions_fn,
x, axis=-2)
if adjoint_arg:
split_x = [linalg.adjoint(y) for y in split_x]
split_matmul = operator.matmul(
split_x, adjoint=adjoint, adjoint_arg=adjoint_arg)
self.assertEqual(len(split_matmul), len(operator.operators))
split_matmul = linear_operator_util.broadcast_matrix_batch_dims(
split_matmul)
fused_block_matmul = array_ops.concat(split_matmul, axis=-2)
op_matmul_v, mat_matmul_v, fused_block_matmul_v = sess.run([
op_matmul, mat_matmul, fused_block_matmul])
# Check that the operator applied to blockwise input gives the same result
# as matrix multiplication.
self.assertAC(fused_block_matmul_v, mat_matmul_v)
else:
op_matmul_v, mat_matmul_v = sess.run([op_matmul, mat_matmul])
# Check that the operator applied to a `Tensor` gives the same result as
# matrix multiplication.
self.assertAC(op_matmul_v, mat_matmul_v)
def _test_matmul(
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg):
def test_matmul(self):
_test_matmul_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg,
with_batch=True)
return test_matmul
def _test_matmul_with_broadcast(
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg):
def test_matmul_with_broadcast(self):
_test_matmul_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg,
with_batch=True)
return test_matmul_with_broadcast
def _test_adjoint(use_placeholder, shapes_info, dtype):
def test_adjoint(self):
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_adjoint = operator.adjoint().to_dense()
op_adjoint_h = operator.H.to_dense()
mat_adjoint = linalg.adjoint(mat)
op_adjoint_v, op_adjoint_h_v, mat_adjoint_v = sess.run(
[op_adjoint, op_adjoint_h, mat_adjoint])
self.assertAC(mat_adjoint_v, op_adjoint_v)
self.assertAC(mat_adjoint_v, op_adjoint_h_v)
return test_adjoint
def _test_cholesky(use_placeholder, shapes_info, dtype):
def test_cholesky(self):
with self.test_session(graph=ops.Graph()) as sess:
# This test fails to pass for float32 type by a small margin if we use
# random_seed.DEFAULT_GRAPH_SEED. The correct fix would be relaxing the
# test tolerance but the tolerance in this test is configured universally
# depending on its type. So instead of lowering tolerance for all tests
# or special casing this, just use a seed, +2, that makes this test pass.
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED + 2
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder,
ensure_self_adjoint_and_pd=True)
op_chol = operator.cholesky().to_dense()
mat_chol = linalg_ops.cholesky(mat)
op_chol_v, mat_chol_v = sess.run([op_chol, mat_chol])
self.assertAC(mat_chol_v, op_chol_v)
return test_cholesky
def _test_eigvalsh(use_placeholder, shapes_info, dtype):
def test_eigvalsh(self):
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder,
ensure_self_adjoint_and_pd=True)
# Eigenvalues are real, so we'll cast these to float64 and sort
# for comparison.
op_eigvals = sort_ops.sort(
math_ops.cast(operator.eigvals(), dtype=dtypes.float64), axis=-1)
if dtype.is_complex:
mat = math_ops.cast(mat, dtype=dtypes.complex128)
else:
mat = math_ops.cast(mat, dtype=dtypes.float64)
mat_eigvals = sort_ops.sort(
math_ops.cast(
linalg_ops.self_adjoint_eigvals(mat), dtype=dtypes.float64),
axis=-1)
op_eigvals_v, mat_eigvals_v = sess.run([op_eigvals, mat_eigvals])
atol = self._atol[dtype] # pylint: disable=protected-access
rtol = self._rtol[dtype] # pylint: disable=protected-access
if dtype == dtypes.float32 or dtype == dtypes.complex64:
atol = 2e-4
rtol = 2e-4
self.assertAllClose(op_eigvals_v, mat_eigvals_v, atol=atol, rtol=rtol)
return test_eigvalsh
def _test_cond(use_placeholder, shapes_info, dtype):
def test_cond(self):
with self.test_session(graph=ops.Graph()) as sess:
# svd does not work with zero dimensional matrices, so we'll
# skip
if 0 in shapes_info.shape[-2:]:
return
# ROCm platform does not yet support complex types
if test.is_built_with_rocm() and \
((dtype == dtypes.complex64) or (dtype == dtypes.complex128)):
return
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
# Ensure self-adjoint and PD so we get finite condition numbers.
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder,
ensure_self_adjoint_and_pd=True)
# Eigenvalues are real, so we'll cast these to float64 and sort
# for comparison.
op_cond = operator.cond()
s = math_ops.abs(linalg_ops.svd(mat, compute_uv=False))
mat_cond = math_ops.reduce_max(s, axis=-1) / math_ops.reduce_min(
s, axis=-1)
op_cond_v, mat_cond_v = sess.run([op_cond, mat_cond])
atol_override = {
dtypes.float16: 1e-2,
dtypes.float32: 1e-3,
dtypes.float64: 1e-6,
dtypes.complex64: 1e-3,
dtypes.complex128: 1e-6,
}
rtol_override = {
dtypes.float16: 1e-2,
dtypes.float32: 1e-3,
dtypes.float64: 1e-4,
dtypes.complex64: 1e-3,
dtypes.complex128: 1e-6,
}
atol = atol_override[dtype]
rtol = rtol_override[dtype]
self.assertAllClose(op_cond_v, mat_cond_v, atol=atol, rtol=rtol)
return test_cond
def _test_solve_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg,
with_batch):
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(shapes_info.shape) <= 2:
return
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
rhs = self.make_rhs(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, solve A X = (rhs^H)^H = rhs.
if adjoint_arg:
op_solve = operator.solve(
linalg.adjoint(rhs),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_solve = operator.solve(
rhs, adjoint=adjoint, adjoint_arg=adjoint_arg)
mat_solve = linear_operator_util.matrix_solve_with_broadcast(
mat, rhs, adjoint=adjoint)
if not use_placeholder:
self.assertAllEqual(op_solve.shape,
mat_solve.shape)
# If the operator is blockwise, test both blockwise rhs and `Tensor` rhs;
# else test only `Tensor` rhs. In both cases, evaluate all results in a
# single `sess.run` call to avoid re-sampling the random rhs in graph mode.
if blockwise_arg and len(operator.operators) > 1:
# pylint: disable=protected-access
block_dimensions = (
operator._block_range_dimensions() if adjoint else
operator._block_domain_dimensions())
block_dimensions_fn = (
operator._block_range_dimension_tensors if adjoint else
operator._block_domain_dimension_tensors)
# pylint: enable=protected-access
split_rhs = linear_operator_util.split_arg_into_blocks(
block_dimensions,
block_dimensions_fn,
rhs, axis=-2)
if adjoint_arg:
split_rhs = [linalg.adjoint(y) for y in split_rhs]
split_solve = operator.solve(
split_rhs, adjoint=adjoint, adjoint_arg=adjoint_arg)
self.assertEqual(len(split_solve), len(operator.operators))
split_solve = linear_operator_util.broadcast_matrix_batch_dims(
split_solve)
fused_block_solve = array_ops.concat(split_solve, axis=-2)
op_solve_v, mat_solve_v, fused_block_solve_v = sess.run([
op_solve, mat_solve, fused_block_solve])
# Check that the operator and matrix give the same solution when the rhs
# is blockwise.
self.assertAC(mat_solve_v, fused_block_solve_v)
else:
op_solve_v, mat_solve_v = sess.run([op_solve, mat_solve])
# Check that the operator and matrix give the same solution when the rhs is
# a `Tensor`.
self.assertAC(op_solve_v, mat_solve_v)
def _test_solve(
use_placeholder, shapes_info, dtype, adjoint, adjoint_arg, blockwise_arg):
def test_solve(self):
_test_solve_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg,
with_batch=True)
return test_solve
def _test_solve_with_broadcast(
use_placeholder, shapes_info, dtype, adjoint, adjoint_arg, blockwise_arg):
def test_solve_with_broadcast(self):
_test_solve_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
blockwise_arg,
with_batch=False)
return test_solve_with_broadcast
def _test_inverse(use_placeholder, shapes_info, dtype):
def test_inverse(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_inverse_v, mat_inverse_v = sess.run([
operator.inverse().to_dense(), linalg.inv(mat)])
self.assertAC(op_inverse_v, mat_inverse_v, check_dtype=True)
return test_inverse
def _test_trace(use_placeholder, shapes_info, dtype):
def test_trace(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_trace = operator.trace()
mat_trace = math_ops.trace(mat)
if not use_placeholder:
self.assertAllEqual(op_trace.shape, mat_trace.shape)
op_trace_v, mat_trace_v = sess.run([op_trace, mat_trace])
self.assertAC(op_trace_v, mat_trace_v)
return test_trace
def _test_add_to_tensor(use_placeholder, shapes_info, dtype):
def test_add_to_tensor(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_plus_2mat = operator.add_to_tensor(2 * mat)
if not use_placeholder:
self.assertAllEqual(shapes_info.shape, op_plus_2mat.shape)
op_plus_2mat_v, mat_v = sess.run([op_plus_2mat, mat])
self.assertAC(op_plus_2mat_v, 3 * mat_v)
return test_add_to_tensor
def _test_diag_part(use_placeholder, shapes_info, dtype):
def test_diag_part(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_diag_part = operator.diag_part()
mat_diag_part = array_ops.matrix_diag_part(mat)
if not use_placeholder:
self.assertAllEqual(mat_diag_part.shape,
op_diag_part.shape)
op_diag_part_, mat_diag_part_ = sess.run(
[op_diag_part, mat_diag_part])
self.assertAC(op_diag_part_, mat_diag_part_)
return test_diag_part
def _test_composite_tensor(use_placeholder, shapes_info, dtype):
def test_composite_tensor(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
self.assertIsInstance(operator, composite_tensor.CompositeTensor)
flat = nest.flatten(operator, expand_composites=True)
unflat = nest.pack_sequence_as(operator, flat, expand_composites=True)
self.assertIsInstance(unflat, type(operator))
# Input the operator to a `tf.function`.
x = self.make_x(operator, adjoint=False)
op_y = def_function.function(lambda op: op.matmul(x))(unflat)
mat_y = math_ops.matmul(mat, x)
if not use_placeholder:
self.assertAllEqual(mat_y.shape, op_y.shape)
# Test while_loop.
def body(op):
return type(op)(**op.parameters),
op_out, = while_v2.while_loop(
cond=lambda _: True,
body=body,
loop_vars=(operator,),
maximum_iterations=3)
loop_y = op_out.matmul(x)
op_y_, loop_y_, mat_y_ = sess.run([op_y, loop_y, mat_y])
self.assertAC(op_y_, mat_y_)
self.assertAC(loop_y_, mat_y_)
# Ensure that the `TypeSpec` can be encoded.
struct_coder = nested_structure_coder.StructureCoder()
struct_coder.encode_structure(operator._type_spec) # pylint: disable=protected-access
return test_composite_tensor
def _test_saved_model(use_placeholder, shapes_info, dtype):
def test_saved_model(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
x = self.make_x(operator, adjoint=False)
class Model(module.Module):
def __init__(self, init_x):
self.x = nest.map_structure(
lambda x_: variables.Variable(x_, shape=None),
init_x)
@def_function.function(input_signature=(operator._type_spec,)) # pylint: disable=protected-access
def do_matmul(self, op):
return op.matmul(self.x)
saved_model_dir = self.get_temp_dir()
m1 = Model(x)
sess.run([v.initializer for v in m1.variables])
sess.run(m1.x.assign(m1.x + 1.))
save_model.save(m1, saved_model_dir)
m2 = load_model.load(saved_model_dir)
sess.run(m2.x.initializer)
sess.run(m2.x.assign(m2.x + 1.))
y_op = m2.do_matmul(operator)
y_mat = math_ops.matmul(mat, m2.x)
y_op_, y_mat_ = sess.run([y_op, y_mat])
self.assertAC(y_op_, y_mat_)
return test_saved_model
# pylint:enable=missing-docstring
def add_tests(test_cls):
"""Add tests for LinearOperator methods."""
test_name_dict = {
"add_to_tensor": _test_add_to_tensor,
"cholesky": _test_cholesky,
"cond": _test_cond,
"composite_tensor": _test_composite_tensor,
"det": _test_det,
"diag_part": _test_diag_part,
"eigvalsh": _test_eigvalsh,
"inverse": _test_inverse,
"log_abs_det": _test_log_abs_det,
"matmul": _test_matmul,
"matmul_with_broadcast": _test_matmul_with_broadcast,
"saved_model": _test_saved_model,
"solve": _test_solve,
"solve_with_broadcast": _test_solve_with_broadcast,
"to_dense": _test_to_dense,
"trace": _test_trace,
}
tests_with_adjoint_args = [
"matmul",
"matmul_with_broadcast",
"solve",
"solve_with_broadcast",
]
for name, test_template_fn in test_name_dict.items():
if name in test_cls.skip_these_tests():
continue
for dtype, use_placeholder, shape_info in itertools.product(
test_cls.dtypes_to_test(),
test_cls.use_placeholder_options(),
test_cls.operator_shapes_infos()):
base_test_name = "_".join([
"test", name, "_shape={},dtype={},use_placeholder={}".format(
shape_info.shape, dtype, use_placeholder)])
if name in tests_with_adjoint_args:
for adjoint in test_cls.adjoint_options():
for adjoint_arg in test_cls.adjoint_arg_options():
test_name = base_test_name + ",adjoint={},adjoint_arg={}".format(
adjoint, adjoint_arg)
if hasattr(test_cls, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(
test_cls,
test_name,
test_util.run_deprecated_v1(
test_template_fn( # pylint: disable=too-many-function-args
use_placeholder, shape_info, dtype, adjoint,
adjoint_arg, test_cls.use_blockwise_arg())))
else:
if hasattr(test_cls, base_test_name):
raise RuntimeError("Test %s defined more than once" % base_test_name)
setattr(
test_cls,
base_test_name,
test_util.run_deprecated_v1(test_template_fn(
use_placeholder, shape_info, dtype)))
@six.add_metaclass(abc.ABCMeta)
class SquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for square operators.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@staticmethod
def operator_shapes_infos():
shapes_info = OperatorShapesInfo
# non-batch operators (n, n) and batch operators.
return [
shapes_info((0, 0)),
shapes_info((1, 1)),
shapes_info((1, 3, 3)),
shapes_info((3, 4, 4)),
shapes_info((2, 1, 4, 4))]
def make_rhs(self, operator, adjoint, with_batch=True):
# This operator is square, so rhs and x will have same shape.
# adjoint value makes no difference because the operator shape doesn't
# change since it is square, but be pedantic.
return self.make_x(operator, adjoint=not adjoint, with_batch=with_batch)
def make_x(self, operator, adjoint, with_batch=True):
# Value of adjoint makes no difference because the operator is square.
# Return the number of systems to solve, R, equal to 1 or 2.
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, N, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
@six.add_metaclass(abc.ABCMeta)
class NonSquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for generic rectangular operators.
Square shapes are never tested by this class, so if you want to test your
operator with a square shape, create two test classes, the other subclassing
SquareLinearOperatorFullMatrixTest.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@staticmethod
def skip_these_tests():
"""List of test names to skip."""
return [
"cholesky",
"eigvalsh",
"inverse",
"solve",
"solve_with_broadcast",
"det",
"log_abs_det"
]
@staticmethod
def operator_shapes_infos():
shapes_info = OperatorShapesInfo
# non-batch operators (n, n) and batch operators.
return [
shapes_info((2, 1)),
shapes_info((1, 2)),
shapes_info((1, 3, 2)),
shapes_info((3, 3, 4)),
shapes_info((2, 1, 2, 4))]
def make_rhs(self, operator, adjoint, with_batch=True):
# TODO(langmore) Add once we're testing solve_ls.
raise NotImplementedError(
"make_rhs not implemented because we don't test solve")
def make_x(self, operator, adjoint, with_batch=True):
# Return the number of systems for the argument 'x' for .matmul(x)
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, M, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
if adjoint:
n = operator.range_dimension.value
else:
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
if adjoint:
n = operator.range_dimension_tensor()
else:
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
def random_positive_definite_matrix(shape,
dtype,
oversampling_ratio=4,
force_well_conditioned=False):
"""[batch] positive definite Wisart matrix.
A Wishart(N, S) matrix is the S sample covariance matrix of an N-variate
(standard) Normal random variable.
Args:
shape: `TensorShape` or Python list. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype.
oversampling_ratio: S / N in the above. If S < N, the matrix will be
singular (unless `force_well_conditioned is True`).
force_well_conditioned: Python bool. If `True`, add `1` to the diagonal
of the Wishart matrix, then divide by 2, ensuring most eigenvalues are
close to 1.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
if not tensor_util.is_tf_type(shape):
shape = tensor_shape.TensorShape(shape)
# Matrix must be square.
shape.dims[-1].assert_is_compatible_with(shape.dims[-2])
shape = shape.as_list()
n = shape[-2]
s = oversampling_ratio * shape[-1]
wigner_shape = shape[:-2] + [n, s]
with ops.name_scope("random_positive_definite_matrix"):
wigner = random_normal(
wigner_shape,
dtype=dtype,
stddev=math_ops.cast(1 / np.sqrt(s), dtype.real_dtype))
wishart = math_ops.matmul(wigner, wigner, adjoint_b=True)
if force_well_conditioned:
wishart += linalg_ops.eye(n, dtype=dtype)
wishart /= math_ops.cast(2, dtype)
return wishart
def random_tril_matrix(shape,
dtype,
force_well_conditioned=False,
remove_upper=True):
"""[batch] lower triangular matrix.
Args:
shape: `TensorShape` or Python `list`. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype
force_well_conditioned: Python `bool`. If `True`, returned matrix will have
eigenvalues with modulus in `(1, 2)`. Otherwise, eigenvalues are unit
normal random variables.
remove_upper: Python `bool`.
If `True`, zero out the strictly upper triangle.
If `False`, the lower triangle of returned matrix will have desired
properties, but will not have the strictly upper triangle zero'd out.
Returns:
`Tensor` with desired shape and dtype.
"""
with ops.name_scope("random_tril_matrix"):
# Totally random matrix. Has no nice properties.
tril = random_normal(shape, dtype=dtype)
if remove_upper:
tril = array_ops.matrix_band_part(tril, -1, 0)
# Create a diagonal with entries having modulus in [1, 2].
if force_well_conditioned:
maxval = ops.convert_to_tensor(np.sqrt(2.), dtype=dtype.real_dtype)
diag = random_sign_uniform(
shape[:-1], dtype=dtype, minval=1., maxval=maxval)
tril = array_ops.matrix_set_diag(tril, diag)
return tril
def random_normal(shape, mean=0.0, stddev=1.0, dtype=dtypes.float32, seed=None):
"""Tensor with (possibly complex) Gaussian entries.
Samples are distributed like
```
N(mean, stddev^2), if dtype is real,
X + iY, where X, Y ~ N(mean, stddev^2) if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_normal"):
samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 1234
more_samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) Uniform entries.
Samples are distributed like
```
Uniform[minval, maxval], if dtype is real,
X + iY, where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_uniform"):
samples = random_ops.random_uniform(
shape, dtype=dtype.real_dtype, minval=minval, maxval=maxval, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 12345
more_samples = random_ops.random_uniform(
shape,
dtype=dtype.real_dtype,
minval=minval,
maxval=maxval,
seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_sign_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) random entries from a "sign Uniform".
Letting `Z` be a random variable equal to `-1` and `1` with equal probability,
Samples from this `Op` are distributed like
```
Z * X, where X ~ Uniform[minval, maxval], if dtype is real,
Z * (X + iY), where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_sign_uniform"):
unsigned_samples = random_uniform(
shape, minval=minval, maxval=maxval, dtype=dtype, seed=seed)
if seed is not None:
seed += 12
signs = math_ops.sign(
random_ops.random_uniform(shape, minval=-1., maxval=1., seed=seed))
return unsigned_samples * math_ops.cast(signs, unsigned_samples.dtype)
def random_normal_correlated_columns(shape,
mean=0.0,
stddev=1.0,
dtype=dtypes.float32,
eps=1e-4,
seed=None):
"""Batch matrix with (possibly complex) Gaussian entries and correlated cols.
Returns random batch matrix `A` with specified element-wise `mean`, `stddev`,
living close to an embedded hyperplane.
Suppose `shape[-2:] = (M, N)`.
If `M < N`, `A` is a random `M x N` [batch] matrix with iid Gaussian entries.
If `M >= N`, then the columns of `A` will be made almost dependent as follows:
```
L = random normal N x N-1 matrix, mean = 0, stddev = 1 / sqrt(N - 1)
B = random normal M x N-1 matrix, mean = 0, stddev = stddev.
G = (L B^H)^H, a random normal M x N matrix, living on N-1 dim hyperplane
E = a random normal M x N matrix, mean = 0, stddev = eps
mu = a constant M x N matrix, equal to the argument "mean"
A = G + E + mu
```
Args:
shape: Python list of integers.
Shape of the returned tensor. Must be at least length two.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
eps: Distance each column is perturbed from the low-dimensional subspace.
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
Raises:
ValueError: If `shape` is not at least length 2.
"""
dtype = dtypes.as_dtype(dtype)
if len(shape) < 2:
raise ValueError(
"Argument shape must be at least length 2. Found: %s" % shape)
# Shape is the final shape, e.g. [..., M, N]
shape = list(shape)
batch_shape = shape[:-2]
m, n = shape[-2:]
# If there is only one column, "they" are by definition correlated.
if n < 2 or n < m:
return random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype, seed=seed)
# Shape of the matrix with only n - 1 columns that we will embed in higher
# dimensional space.
smaller_shape = batch_shape + [m, n - 1]
# Shape of the embedding matrix, mapping batch matrices
# from [..., N-1, M] to [..., N, M]
embedding_mat_shape = batch_shape + [n, n - 1]
# This stddev for the embedding_mat ensures final result has correct stddev.
stddev_mat = 1 / np.sqrt(n - 1)
with ops.name_scope("random_normal_correlated_columns"):
smaller_mat = random_normal(
smaller_shape, mean=0.0, stddev=stddev_mat, dtype=dtype, seed=seed)
if seed is not None:
seed += 1287
embedding_mat = random_normal(embedding_mat_shape, dtype=dtype, seed=seed)
embedded_t = math_ops.matmul(embedding_mat, smaller_mat, transpose_b=True)
embedded = array_ops.matrix_transpose(embedded_t)
mean_mat = array_ops.ones_like(embedded) * mean
return embedded + random_normal(shape, stddev=eps, dtype=dtype) + mean_mat
|
frreiss/tensorflow-fred
|
tensorflow/python/ops/linalg/linear_operator_test_util.py
|
Python
|
apache-2.0
| 44,656
|
[
"Gaussian"
] |
7d1a35ebc83676927aa8fe809bf258ffd7e5649dccfc3657848e8e8cdcf76a03
|
# Copyright 2006 by Sean Davis. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
#
# $Id: __init__.py,v 1.12 2009-04-24 12:03:45 mdehoon Exp $
# Sean Davis <sdavis2 at mail dot nih dot gov>
# National Cancer Institute
# National Institutes of Health
# Bethesda, MD, USA
#
"""Parse Unigene flat file format files such as the Hs.data file.
Here is an overview of the flat file format that this parser deals with:
Line types/qualifiers::
ID UniGene cluster ID
TITLE Title for the cluster
GENE Gene symbol
CYTOBAND Cytological band
EXPRESS Tissues of origin for ESTs in cluster
RESTR_EXPR Single tissue or development stage contributes
more than half the total EST frequency for this gene.
GNM_TERMINUS genomic confirmation of presence of a 3' terminus;
T if a non-templated polyA tail is found among
a cluster's sequences; else
I if templated As are found in genomic sequence or
S if a canonical polyA signal is found on
the genomic sequence
GENE_ID Entrez gene identifier associated with at least one
sequence in this cluster;
to be used instead of LocusLink.
LOCUSLINK LocusLink identifier associated with at least one
sequence in this cluster;
deprecated in favor of GENE_ID
HOMOL Homology;
CHROMOSOME Chromosome. For plants, CHROMOSOME refers to mapping
on the arabidopsis genome.
STS STS
ACC= GenBank/EMBL/DDBJ accession number of STS
[optional field]
UNISTS= identifier in NCBI's UNISTS database
TXMAP Transcript map interval
MARKER= Marker found on at least one sequence in this
cluster
RHPANEL= Radiation Hybrid panel used to place marker
PROTSIM Protein Similarity data for the sequence with
highest-scoring protein similarity in this cluster
ORG= Organism
PROTGI= Sequence GI of protein
PROTID= Sequence ID of protein
PCT= Percent alignment
ALN= length of aligned region (aa)
SCOUNT Number of sequences in the cluster
SEQUENCE Sequence
ACC= GenBank/EMBL/DDBJ accession number of sequence
NID= Unique nucleotide sequence identifier (gi)
PID= Unique protein sequence identifier (used for
non-ESTs)
CLONE= Clone identifier (used for ESTs only)
END= End (5'/3') of clone insert read (used for
ESTs only)
LID= Library ID; see Hs.lib.info for library name
and tissue
MGC= 5' CDS-completeness indicator; if present, the
clone associated with this sequence is believed
CDS-complete. A value greater than 511 is the gi
of the CDS-complete mRNA matched by the EST,
otherwise the value is an indicator of the
reliability of the test indicating CDS
completeness; higher values indicate more
reliable CDS-completeness predictions.
SEQTYPE= Description of the nucleotide sequence.
Possible values are mRNA, EST and HTC.
TRACE= The Trace ID of the EST sequence, as provided by
NCBI Trace Archive
"""
class SequenceLine(object):
"""Store the information for one SEQUENCE line from a Unigene file
Initialize with the text part of the SEQUENCE line, or nothing.
Attributes and descriptions (access as LOWER CASE):
- ACC= GenBank/EMBL/DDBJ accession number of sequence
- NID= Unique nucleotide sequence identifier (gi)
- PID= Unique protein sequence identifier (used for non-ESTs)
- CLONE= Clone identifier (used for ESTs only)
- END= End (5'/3') of clone insert read (used for ESTs only)
- LID= Library ID; see Hs.lib.info for library name and tissue
- MGC= 5' CDS-completeness indicator; if present,
the clone associated with this sequence
is believed CDS-complete. A value greater than 511
is the gi of the CDS-complete mRNA matched by the EST,
otherwise the value is an indicator of the reliability
of the test indicating CDS completeness;
higher values indicate more reliable CDS-completeness
predictions.
- SEQTYPE= Description of the nucleotide sequence. Possible values
are mRNA, EST and HTC.
- TRACE= The Trace ID of the EST sequence, as provided by NCBI
Trace Archive
"""
def __init__(self, text=None):
self.acc = ''
self.nid = ''
self.lid = ''
self.pid = ''
self.clone = ''
self.image = ''
self.is_image = False
self.end = ''
self.mgc = ''
self.seqtype = ''
self.trace = ''
if text is not None:
self.text = text
self._init_from_text(text)
def _init_from_text(self, text):
parts = text.split('; ')
for part in parts:
key, val = part.split("=")
if key == 'CLONE':
if val[:5] == 'IMAGE':
self.is_image = True
self.image = val[6:]
setattr(self, key.lower(), val)
def __repr__(self):
return self.text
class ProtsimLine(object):
"""Store the information for one PROTSIM line from a Unigene file
Initialize with the text part of the PROTSIM line, or nothing.
Attributes and descriptions (access as LOWER CASE)
ORG= Organism
PROTGI= Sequence GI of protein
PROTID= Sequence ID of protein
PCT= Percent alignment
ALN= length of aligned region (aa)
"""
def __init__(self, text=None):
self.org = ''
self.protgi = ''
self.protid = ''
self.pct = ''
self.aln = ''
if text is not None:
self.text = text
self._init_from_text(text)
def _init_from_text(self, text):
parts = text.split('; ')
for part in parts:
key, val = part.split("=")
setattr(self, key.lower(), val)
def __repr__(self):
return self.text
class STSLine(object):
"""Store the information for one STS line from a Unigene file
Initialize with the text part of the STS line, or nothing.
Attributes and descriptions (access as LOWER CASE)
ACC= GenBank/EMBL/DDBJ accession number of STS [optional field]
UNISTS= identifier in NCBI's UNISTS database
"""
def __init__(self, text=None):
self.acc = ''
self.unists = ''
if text is not None:
self.text = text
self._init_from_text(text)
def _init_from_text(self, text):
parts = text.split(' ')
for part in parts:
key, val = part.split("=")
setattr(self, key.lower(), val)
def __repr__(self):
return self.text
class Record(object):
"""Store a Unigene record
Here is what is stored::
self.ID = '' # ID line
self.species = '' # Hs, Bt, etc.
self.title = '' # TITLE line
self.symbol = '' # GENE line
self.cytoband = '' # CYTOBAND line
self.express = [] # EXPRESS line, parsed on ';'
# Will be an array of strings
self.restr_expr = '' # RESTR_EXPR line
self.gnm_terminus = '' # GNM_TERMINUS line
self.gene_id = '' # GENE_ID line
self.locuslink = '' # LOCUSLINK line
self.homol = '' # HOMOL line
self.chromosome = '' # CHROMOSOME line
self.protsim = [] # PROTSIM entries, array of Protsims
# Type ProtsimLine
self.sequence = [] # SEQUENCE entries, array of Sequence entries
# Type SequenceLine
self.sts = [] # STS entries, array of STS entries
# Type STSLine
self.txmap = [] # TXMAP entries, array of TXMap entries
"""
def __init__(self):
self.ID = '' # ID line
self.species = '' # Hs, Bt, etc.
self.title = '' # TITLE line
self.symbol = '' # GENE line
self.cytoband = '' # CYTOBAND line
self.express = [] # EXPRESS line, parsed on ';'
self.restr_expr = '' # RESTR_EXPR line
self.gnm_terminus = '' # GNM_TERMINUS line
self.gene_id = '' # GENE_ID line
self.locuslink = '' # LOCUSLINK line
self.homol = '' # HOMOL line
self.chromosome = '' # CHROMOSOME line
self.protsim = [] # PROTSIM entries, array of Protsims
self.sequence = [] # SEQUENCE entries, array of Sequence entries
self.sts = [] # STS entries, array of STS entries
self.txmap = [] # TXMAP entries, array of TXMap entries
def __repr__(self):
return "<%s> %s %s\n%s" % (self.__class__.__name__,
self.ID, self.symbol, self.title)
def parse(handle):
while True:
record = _read(handle)
if not record:
return
yield record
def read(handle):
record = _read(handle)
if not record:
raise ValueError("No SwissProt record found")
# We should have reached the end of the record by now
remainder = handle.read()
if remainder:
raise ValueError("More than one SwissProt record found")
return record
# Everything below is private
def _read(handle):
UG_INDENT = 12
record = None
for line in handle:
tag, value = line[:UG_INDENT].rstrip(), line[UG_INDENT:].rstrip()
line = line.rstrip()
if tag == "ID":
record = Record()
record.ID = value
record.species = record.ID.split('.')[0]
elif tag == "TITLE":
record.title = value
elif tag == "GENE":
record.symbol = value
elif tag == "GENE_ID":
record.gene_id = value
elif tag == "LOCUSLINK":
record.locuslink = value
elif tag == "HOMOL":
if value == "YES":
record.homol = True
elif value == "NO":
record.homol = True
else:
raise ValueError("Cannot parse HOMOL line %s" % line)
elif tag == "EXPRESS":
record.express = [word.strip() for word in value.split("|")]
elif tag == "RESTR_EXPR":
record.restr_expr = [word.strip() for word in value.split("|")]
elif tag == "CHROMOSOME":
record.chromosome = value
elif tag == "CYTOBAND":
record.cytoband = value
elif tag == "PROTSIM":
protsim = ProtsimLine(value)
record.protsim.append(protsim)
elif tag == "SCOUNT":
scount = int(value)
elif tag == "SEQUENCE":
sequence = SequenceLine(value)
record.sequence.append(sequence)
elif tag == "STS":
sts = STSLine(value)
record.sts.append(sts)
elif tag == '//':
if len(record.sequence) != scount:
raise ValueError("The number of sequences specified in the record"
" (%d) does not agree with the number of sequences found (%d)" % (scount, len(record.sequence)))
return record
else:
raise ValueError("Unknown tag %s" % tag)
if record:
raise ValueError("Unexpected end of stream.")
|
zjuchenyuan/BioWeb
|
Lib/Bio/UniGene/__init__.py
|
Python
|
mit
| 12,357
|
[
"Biopython"
] |
d902f277fcaa232168baa0684e1cd0216c665ac272d7e7f04cdfbbdda1903898
|
from unittest.mock import ANY, Mock, call
import pytest
from flask import url_for
from freezegun import freeze_time
from notifications_python_client.errors import HTTPError
import app
from tests import service_json
from tests.conftest import (
SERVICE_ONE_ID,
create_active_caseworking_user,
create_active_user_with_permissions,
normalize_spaces,
)
@pytest.fixture()
def mock_no_users_for_service(mocker):
mocker.patch('app.models.user.Users.client_method', return_value=[])
@pytest.fixture(scope='function')
def mock_get_existing_user_by_email(mocker, api_user_active):
return mocker.patch('app.user_api_client.get_user_by_email', return_value=api_user_active)
@pytest.fixture(scope='function')
def mock_check_invite_token(mocker, sample_invite):
return mocker.patch('app.invite_api_client.check_token', return_value=sample_invite)
@freeze_time('2021-12-12 12:12:12')
def test_existing_user_accept_invite_calls_api_and_redirects_to_dashboard(
client_request,
service_one,
api_user_active,
mock_check_invite_token,
mock_get_existing_user_by_email,
mock_no_users_for_service,
mock_accept_invite,
mock_add_user_to_service,
mock_get_service,
mocker,
mock_events,
mock_get_user,
mock_update_user_attribute,
):
client_request.logout()
expected_service = service_one['id']
expected_permissions = {'view_activity', 'send_messages', 'manage_service', 'manage_api_keys'}
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_redirect=url_for('main.service_dashboard', service_id=expected_service, _external=True),
)
mock_check_invite_token.assert_called_with('thisisnotarealtoken')
mock_get_existing_user_by_email.assert_called_with('invited_user@test.gov.uk')
assert mock_accept_invite.call_count == 1
mock_add_user_to_service.assert_called_with(
expected_service,
api_user_active['id'],
expected_permissions,
[],
)
@pytest.mark.parametrize('trial_mode, expected_endpoint', (
(True, '.broadcast_tour'),
(False, '.broadcast_tour_live'),
))
def test_broadcast_service_shows_tour(
client_request,
service_one,
mock_check_invite_token,
mock_get_existing_user_by_email,
mock_no_users_for_service,
mock_accept_invite,
mock_add_user_to_service,
mock_update_user_attribute,
mocker,
mock_events,
mock_get_user,
trial_mode,
expected_endpoint,
):
client_request.logout()
service_one['permissions'] = ['broadcast']
service_one['restricted'] = trial_mode
mocker.patch('app.service_api_client.get_service', return_value={
'data': service_one,
})
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_redirect=url_for(
expected_endpoint,
service_id=SERVICE_ONE_ID,
step_index=1,
_external=True,
),
)
def test_existing_user_with_no_permissions_or_folder_permissions_accept_invite(
client_request,
mocker,
service_one,
api_user_active,
sample_invite,
mock_check_invite_token,
mock_get_existing_user_by_email,
mock_no_users_for_service,
mock_add_user_to_service,
mock_get_service,
mock_events,
mock_get_user,
mock_update_user_attribute,
):
client_request.logout()
expected_service = service_one['id']
sample_invite['permissions'] = ''
expected_permissions = set()
expected_folder_permissions = []
mocker.patch('app.invite_api_client.accept_invite', return_value=sample_invite)
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
)
mock_add_user_to_service.assert_called_with(expected_service,
api_user_active['id'],
expected_permissions,
expected_folder_permissions)
def test_if_existing_user_accepts_twice_they_redirect_to_sign_in(
client_request,
mocker,
sample_invite,
mock_check_invite_token,
mock_get_service,
mock_update_user_attribute,
):
client_request.logout()
# Logging out updates the current session ID to `None`
mock_update_user_attribute.reset_mock()
sample_invite['status'] = 'accepted'
page = client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_follow_redirects=True,
)
assert (
page.h1.string,
page.select('main p')[0].text.strip(),
) == (
'You need to sign in again',
'We signed you out because you have not used Notify for a while.',
)
# We don’t let people update `email_access_validated_at` using an
# already-accepted invite
assert mock_update_user_attribute.called is False
def test_invite_goes_in_session(
client_request,
mocker,
sample_invite,
mock_get_service,
api_user_active,
mock_check_invite_token,
mock_get_user_by_email,
mock_no_users_for_service,
mock_add_user_to_service,
mock_accept_invite,
):
sample_invite['email_address'] = 'test@user.gov.uk'
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
_expected_redirect=url_for(
'main.service_dashboard',
service_id=SERVICE_ONE_ID,
_external=True,
),
_follow_redirects=False,
)
with client_request.session_transaction() as session:
assert session['invited_user_id'] == sample_invite['id']
@pytest.mark.parametrize('user, landing_page_title', [
(create_active_user_with_permissions(), 'Dashboard'),
(create_active_caseworking_user(), 'Templates'),
])
def test_accepting_invite_removes_invite_from_session(
client_request,
mocker,
sample_invite,
mock_get_service,
service_one,
mock_check_invite_token,
mock_get_user_by_email,
mock_no_users_for_service,
mock_add_user_to_service,
mock_accept_invite,
mock_get_service_templates,
mock_get_template_statistics,
mock_has_no_jobs,
mock_get_service_statistics,
mock_get_template_folders,
mock_get_usage,
mock_get_billable_units,
mock_get_free_sms_fragment_limit,
mock_get_inbound_sms_summary,
mock_get_returned_letter_statistics_with_no_returned_letters,
mock_get_api_keys,
fake_uuid,
user,
landing_page_title,
):
sample_invite['email_address'] = user['email_address']
client_request.login(user)
page = client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_follow_redirects=True,
)
assert normalize_spaces(page.select_one('h1').text) == landing_page_title
with client_request.session_transaction() as session:
assert 'invited_user_id' not in session
@freeze_time('2021-12-12T12:12:12')
def test_existing_user_of_service_get_redirected_to_signin(
client_request,
mocker,
api_user_active,
sample_invite,
mock_get_service,
mock_get_user_by_email,
mock_check_invite_token,
mock_accept_invite,
mock_update_user_attribute,
):
client_request.logout()
sample_invite['email_address'] = api_user_active['email_address']
mocker.patch('app.models.user.Users.client_method', return_value=[api_user_active])
page = client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_follow_redirects=True,
)
assert (
page.h1.string,
page.select('main p')[0].text.strip(),
) == (
'You need to sign in again',
'We signed you out because you have not used Notify for a while.',
)
assert mock_accept_invite.call_count == 1
def test_accept_invite_redirects_if_api_raises_an_error_that_they_are_already_part_of_the_service(
client_request,
mocker,
api_user_active,
sample_invite,
mock_get_existing_user_by_email,
mock_check_invite_token,
mock_accept_invite,
mock_get_service,
mock_no_users_for_service,
mock_get_user,
mock_update_user_attribute,
):
client_request.logout()
mocker.patch('app.user_api_client.add_user_to_service', side_effect=HTTPError(
response=Mock(
status_code=400,
json={
"result": "error",
"message": {f"User id: {api_user_active['id']} already part of service id: {SERVICE_ONE_ID}"}
},
),
message=f"User id: {api_user_active['id']} already part of service id: {SERVICE_ONE_ID}"
))
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_follow_redirects=False,
_expected_redirect=url_for('main.service_dashboard', service_id=SERVICE_ONE_ID, _external=True)
)
def test_existing_signed_out_user_accept_invite_redirects_to_sign_in(
client_request,
service_one,
api_user_active,
sample_invite,
mock_check_invite_token,
mock_get_existing_user_by_email,
mock_no_users_for_service,
mock_add_user_to_service,
mock_accept_invite,
mock_get_service,
mocker,
mock_events,
mock_get_user,
mock_update_user_attribute,
):
client_request.logout()
expected_service = service_one['id']
expected_permissions = {'view_activity', 'send_messages', 'manage_service', 'manage_api_keys'}
page = client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_follow_redirects=True,
)
mock_check_invite_token.assert_called_with('thisisnotarealtoken')
mock_get_existing_user_by_email.assert_called_with('invited_user@test.gov.uk')
mock_add_user_to_service.assert_called_with(expected_service,
api_user_active['id'],
expected_permissions,
sample_invite['folder_permissions'])
assert mock_accept_invite.call_count == 1
assert (
page.h1.string,
page.select('main p')[0].text.strip(),
) == (
'You need to sign in again',
'We signed you out because you have not used Notify for a while.',
)
def test_new_user_accept_invite_calls_api_and_redirects_to_registration(
client_request,
service_one,
mock_check_invite_token,
mock_dont_get_user_by_email,
mock_add_user_to_service,
mock_no_users_for_service,
mock_get_service,
mocker,
):
client_request.logout()
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_redirect='http://localhost/register-from-invite',
)
mock_check_invite_token.assert_called_with('thisisnotarealtoken')
mock_dont_get_user_by_email.assert_called_with('invited_user@test.gov.uk')
def test_new_user_accept_invite_calls_api_and_views_registration_page(
client_request,
service_one,
sample_invite,
mock_check_invite_token,
mock_dont_get_user_by_email,
mock_get_invited_user_by_id,
mock_add_user_to_service,
mock_no_users_for_service,
mock_get_service,
mocker,
):
client_request.logout()
page = client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_follow_redirects=True,
)
mock_check_invite_token.assert_called_with('thisisnotarealtoken')
mock_dont_get_user_by_email.assert_called_with('invited_user@test.gov.uk')
mock_get_invited_user_by_id.assert_called_once_with(sample_invite['id'])
assert page.h1.string.strip() == 'Create an account'
assert normalize_spaces(page.select_one('main p').text) == (
'Your account will be created with this email address: '
'invited_user@test.gov.uk'
)
form = page.find('form')
name = form.find('input', id='name')
password = form.find('input', id='password')
service = form.find('input', type='hidden', id='service')
email = form.find('input', type='hidden', id='email_address')
assert email
assert email.attrs['value'] == 'invited_user@test.gov.uk'
assert name
assert password
assert service
assert service.attrs['value'] == service_one['id']
def test_cancelled_invited_user_accepts_invited_redirect_to_cancelled_invitation(
client_request,
mock_get_user,
mock_get_service,
sample_invite,
mock_check_invite_token,
mock_update_user_attribute,
):
client_request.logout()
mock_update_user_attribute.reset_mock()
sample_invite['status'] = 'cancelled'
page = client_request.get('main.accept_invite', token='thisisnotarealtoken')
app.invite_api_client.check_token.assert_called_with('thisisnotarealtoken')
assert page.h1.string.strip() == 'The invitation you were sent has been cancelled'
# We don’t let people update `email_access_validated_at` using an
# cancelled invite
assert mock_update_user_attribute.called is False
@pytest.mark.parametrize('admin_endpoint, api_endpoint', [
('main.accept_invite', 'app.invite_api_client.check_token'),
('main.accept_org_invite', 'app.org_invite_api_client.check_token'),
])
def test_new_user_accept_invite_with_malformed_token(
admin_endpoint,
api_endpoint,
client_request,
service_one,
mocker,
):
client_request.logout()
mocker.patch(api_endpoint, side_effect=HTTPError(
response=Mock(
status_code=400,
json={
'result': 'error',
'message': {
'invitation': {
'Something’s wrong with this link. Make sure you’ve copied the whole thing.'
}
}
}
),
message={'invitation': 'Something’s wrong with this link. Make sure you’ve copied the whole thing.'}
))
page = client_request.get(admin_endpoint, token='thisisnotarealtoken', _follow_redirects=True)
assert normalize_spaces(
page.select_one('.banner-dangerous').text
) == 'Something’s wrong with this link. Make sure you’ve copied the whole thing.'
def test_new_user_accept_invite_completes_new_registration_redirects_to_verify(
client_request,
service_one,
sample_invite,
api_user_active,
mock_check_invite_token,
mock_dont_get_user_by_email,
mock_email_is_not_already_in_use,
mock_register_user,
mock_send_verify_code,
mock_get_invited_user_by_id,
mock_accept_invite,
mock_no_users_for_service,
mock_add_user_to_service,
mock_get_service,
mocker,
):
client_request.logout()
expected_redirect_location = 'http://localhost/register-from-invite'
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_redirect=expected_redirect_location,
)
with client_request.session_transaction() as session:
assert session.get('invited_user_id') == sample_invite['id']
data = {'service': sample_invite['service'],
'email_address': sample_invite['email_address'],
'from_user': sample_invite['from_user'],
'password': 'longpassword',
'mobile_number': '+447890123456',
'name': 'Invited User',
'auth_type': 'email_auth'
}
expected_redirect_location = 'http://localhost/verify'
client_request.post(
'main.register_from_invite',
_data=data,
_expected_redirect=expected_redirect_location,
)
mock_send_verify_code.assert_called_once_with(ANY, 'sms', data['mobile_number'])
mock_get_invited_user_by_id.assert_called_once_with(sample_invite['id'])
mock_register_user.assert_called_with(data['name'],
data['email_address'],
data['mobile_number'],
data['password'],
data['auth_type'])
assert mock_accept_invite.call_count == 1
def test_signed_in_existing_user_cannot_use_anothers_invite(
client_request,
mocker,
api_user_active,
mock_check_invite_token,
sample_invite,
mock_get_user,
mock_accept_invite,
mock_get_service,
):
mocker.patch('app.user_api_client.get_users_for_service', return_value=[api_user_active])
page = client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_follow_redirects=True,
_expected_status=403,
)
assert page.h1.string.strip() == 'You’re not allowed to see this page'
flash_banners = page.find_all('div', class_='banner-dangerous')
assert len(flash_banners) == 1
banner_contents = normalize_spaces(flash_banners[0].text)
assert "You’re signed in as test@user.gov.uk." in banner_contents
assert "This invite is for another email address." in banner_contents
assert "Sign out and click the link again to accept this invite." in banner_contents
assert mock_accept_invite.call_count == 0
def test_accept_invite_does_not_treat_email_addresses_as_case_sensitive(
client_request,
mocker,
api_user_active,
sample_invite,
mock_accept_invite,
mock_check_invite_token,
mock_get_user_by_email
):
# the email address of api_user_active is 'test@user.gov.uk'
sample_invite['email_address'] = 'TEST@user.gov.uk'
mocker.patch('app.models.user.Users.client_method', return_value=[api_user_active])
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
_expected_redirect=url_for(
'main.service_dashboard',
service_id=SERVICE_ONE_ID,
_external=True,
)
)
def test_new_invited_user_verifies_and_added_to_service(
client_request,
service_one,
sample_invite,
api_user_active,
mock_check_invite_token,
mock_dont_get_user_by_email,
mock_email_is_not_already_in_use,
mock_register_user,
mock_send_verify_code,
mock_check_verify_code,
mock_get_user,
mock_update_user_attribute,
mock_add_user_to_service,
mock_accept_invite,
mock_get_service,
mock_get_invited_user_by_id,
mock_get_service_templates,
mock_get_template_statistics,
mock_has_no_jobs,
mock_has_permissions,
mock_no_users_for_service,
mock_get_service_statistics,
mock_get_usage,
mock_get_free_sms_fragment_limit,
mock_get_returned_letter_statistics_with_no_returned_letters,
mock_create_event,
mocker,
):
client_request.logout()
# visit accept token page
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_redirect=url_for('main.register_from_invite', _external=True),
)
# get redirected to register from invite
data = {
'service': sample_invite['service'],
'email_address': sample_invite['email_address'],
'from_user': sample_invite['from_user'],
'password': 'longpassword',
'mobile_number': '+447890123456',
'name': 'Invited User',
'auth_type': 'sms_auth'
}
client_request.post(
'main.register_from_invite',
_data=data,
_expected_redirect=url_for('main.verify', _external=True),
)
# that sends user on to verify
page = client_request.post(
'main.verify',
_data={'sms_code': '12345'},
_follow_redirects=True,
)
# when they post codes back to admin user should be added to
# service and sent on to dash board
expected_permissions = {'view_activity', 'send_messages', 'manage_service', 'manage_api_keys'}
with client_request.session_transaction() as session:
assert 'invited_user_id' not in session
new_user_id = session['user_id']
mock_add_user_to_service.assert_called_with(data['service'], new_user_id, expected_permissions, [])
mock_accept_invite.assert_called_with(data['service'], sample_invite['id'])
mock_check_verify_code.assert_called_once_with(new_user_id, '12345', 'sms')
assert service_one['id'] == session['service_id']
assert page.find('h1').text == 'Dashboard'
@pytest.mark.parametrize('service_permissions, trial_mode, expected_endpoint, extra_args', (
([], True, 'main.service_dashboard', {}),
([], False, 'main.service_dashboard', {}),
(['broadcast'], True, 'main.broadcast_tour', {'step_index': 1}),
(['broadcast'], False, 'main.broadcast_tour_live', {'step_index': 1}),
))
def test_new_invited_user_is_redirected_to_correct_place(
mocker,
client_request,
sample_invite,
mock_check_invite_token,
mock_check_verify_code,
mock_get_user,
mock_dont_get_user_by_email,
mock_add_user_to_service,
mock_get_invited_user_by_id,
mock_events,
mock_get_service,
service_permissions,
trial_mode,
expected_endpoint,
extra_args,
):
client_request.logout()
mocker.patch('app.service_api_client.get_service', return_value={
'data': service_json(
sample_invite['service'],
restricted=trial_mode,
permissions=service_permissions,
)
})
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
)
with client_request.session_transaction() as session:
session['user_details'] = {
'email': sample_invite['email_address'],
'id': sample_invite['id'],
}
client_request.post(
'main.verify',
_data={'sms_code': '12345'},
_expected_redirect=url_for(
expected_endpoint,
service_id=sample_invite['service'],
_external=True,
**extra_args
)
)
@freeze_time('2021-12-12 12:12:12')
def test_existing_user_accepts_and_sets_email_auth(
client_request,
api_user_active,
service_one,
sample_invite,
mock_get_existing_user_by_email,
mock_no_users_for_service,
mock_accept_invite,
mock_check_invite_token,
mock_update_user_attribute,
mock_add_user_to_service,
mocker
):
sample_invite['email_address'] = api_user_active['email_address']
service_one['permissions'].append('email_auth')
sample_invite['auth_type'] = 'email_auth'
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
_expected_redirect=url_for('main.service_dashboard', service_id=service_one['id'], _external=True),
)
mock_get_existing_user_by_email.assert_called_once_with('test@user.gov.uk')
assert mock_update_user_attribute.call_args_list == [
call(api_user_active['id'], email_access_validated_at='2021-12-12T12:12:12'),
call(api_user_active['id'], auth_type='email_auth'),
]
mock_add_user_to_service.assert_called_once_with(ANY, api_user_active['id'], ANY, ANY)
@freeze_time('2021-12-12 12:12:12')
def test_platform_admin_user_accepts_and_preserves_auth(
client_request,
platform_admin_user,
service_one,
sample_invite,
mock_check_invite_token,
mock_no_users_for_service,
mock_accept_invite,
mock_add_user_to_service,
mocker
):
sample_invite['email_address'] = platform_admin_user['email_address']
sample_invite['auth_type'] = 'email_auth'
service_one['permissions'].append('email_auth')
mocker.patch('app.user_api_client.get_user_by_email', return_value=platform_admin_user)
mock_update_user_attribute = mocker.patch(
'app.user_api_client.update_user_attribute',
return_value=platform_admin_user,
)
client_request.login(platform_admin_user)
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
_expected_redirect=url_for('main.service_dashboard', service_id=service_one['id'], _external=True),
)
mock_update_user_attribute.assert_called_once_with(
platform_admin_user['id'],
email_access_validated_at='2021-12-12T12:12:12',
)
assert mock_add_user_to_service.called
@freeze_time('2021-12-12 12:12:12')
def test_existing_user_doesnt_get_auth_changed_by_service_without_permission(
client_request,
api_user_active,
service_one,
sample_invite,
mock_get_user_by_email,
mock_no_users_for_service,
mock_check_invite_token,
mock_accept_invite,
mock_update_user_attribute,
mock_add_user_to_service,
mocker
):
sample_invite['email_address'] = api_user_active['email_address']
assert 'email_auth' not in service_one['permissions']
sample_invite['auth_type'] = 'email_auth'
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
_expected_redirect=url_for('main.service_dashboard', service_id=service_one['id'], _external=True),
)
mock_update_user_attribute.assert_called_once_with(
api_user_active['id'],
email_access_validated_at='2021-12-12T12:12:12',
)
@freeze_time('2021-12-12 12:12:12')
def test_existing_email_auth_user_without_phone_cannot_set_sms_auth(
client_request,
api_user_active,
service_one,
sample_invite,
mock_no_users_for_service,
mock_check_invite_token,
mock_accept_invite,
mock_update_user_attribute,
mock_add_user_to_service,
mocker
):
sample_invite['email_address'] = api_user_active['email_address']
service_one['permissions'].append('email_auth')
api_user_active['auth_type'] = 'email_auth'
api_user_active['mobile_number'] = None
sample_invite['auth_type'] = 'sms_auth'
mocker.patch('app.user_api_client.get_user_by_email', return_value=api_user_active)
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
_expected_redirect=url_for('main.service_dashboard', service_id=service_one['id'], _external=True),
)
mock_update_user_attribute.assert_called_once_with(
api_user_active['id'],
email_access_validated_at='2021-12-12T12:12:12',
)
@freeze_time('2021-12-12 12:12:12')
def test_existing_email_auth_user_with_phone_can_set_sms_auth(
client_request,
api_user_active,
service_one,
sample_invite,
mock_no_users_for_service,
mock_get_existing_user_by_email,
mock_check_invite_token,
mock_accept_invite,
mock_update_user_attribute,
mock_add_user_to_service,
mocker
):
sample_invite['email_address'] = api_user_active['email_address']
service_one['permissions'].append('email_auth')
sample_invite['auth_type'] = 'sms_auth'
client_request.get(
'main.accept_invite',
token='thisisnotarealtoken',
_expected_status=302,
_expected_redirect=url_for('main.service_dashboard', service_id=service_one['id'], _external=True),
)
mock_get_existing_user_by_email.assert_called_once_with(sample_invite['email_address'])
assert mock_update_user_attribute.call_args_list == [
call(api_user_active['id'], email_access_validated_at='2021-12-12T12:12:12'),
call(api_user_active['id'], auth_type='sms_auth'),
]
|
alphagov/notifications-admin
|
tests/app/main/views/test_accept_invite.py
|
Python
|
mit
| 27,419
|
[
"VisIt"
] |
bc93e30f88e2f6bff0ee3b044af9345598097c77c2bb9cd99bf52e3aeaf2ebb5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.