text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
"""
==========================================================================
Using the free water elimination model to remove free water contamination
==========================================================================
As shown previously (see :ref:`example_reconst_dti`), the diffusion tensor
model is a simple way to characterize diffusion anisotropy. However, in regions
near the cerebral ventricle and parenchyma can be underestimated by partial
volume effects of the cerebral spinal fluid (CSF). This free water
contamination can particularly corrupt diffusion tensor imaging analysis of
microstructural changes when different groups of subject show different brain
morphology (e.g. brain ventricle enlargement associated with brain tissue
atrophy that occurs in several brain pathologies and ageing).
A way to remove this free water influences is to expand the DTI model to take
into account an extra compartment representing the contributions of free water
diffusion. The expression of the expanded DTI model is shown below:
.. math::
S(\mathbf{g}, b) = S_0(1-f)e^{-b\mathbf{g}^T \mathbf{D}
\mathbf{g}}+S_0fe^{-b D_{iso}}
where $\mathbf{g}$ and $b$ are diffusion gradient direction and weighted
(more information see :ref:`example_reconst_dti`), $S(\mathbf{g}, b)$ is the
diffusion-weighted signal measured, $S_0$ is the signal in a measurement with
no diffusion weighting, $\mathbf{D}$ is the diffusion tensor, $f$ the volume
fraction of the free water component, and $D_iso$ is the isotropic value of the
free water diffusion (normally set to $3.0 \times 10^{-3} mm^{2}s^{-1}$).
In this example, we show how to process a diffusion weighting dataset using the
free water elimination.
Let's start by importing the relevant modules:
"""
import numpy as np
import dipy.reconst.fwdti as fwdti
import dipy.reconst.dti as dti
import matplotlib.pyplot as plt
from dipy.data import fetch_cenir_multib
from dipy.data import read_cenir_multib
from dipy.segment.mask import median_otsu
"""
Without spatial constrains the free water elimination model cannot be solved
in data acquired from one non-zero b-value _[Hoy2014]. Therefore, here we
download a dataset that was required from multiple b-values.
"""
fetch_cenir_multib(with_raw=False)
"""
From the downloaded data, we read only the data acquired with b-values up to
2000 $s.mm^{-2} to decrease the influence of non-Gaussian diffusion effects
of the tisse which are not taken into account by the free water elimination
model _[Hoy2014].
"""
bvals = [200, 400, 1000, 2000]
img, gtab = read_cenir_multib(bvals)
data = img.get_data()
affine = img.affine
"""
The free water DTI model can take some minutes to process the full data set.
Thus, we remove the background of the image to avoid unnecessary calculations.
"""
maskdata, mask = median_otsu(data, 4, 2, False, vol_idx=[0, 1], dilate=1)
"""
Moreover, for illustration purposes we process only an axial slice of the
data.
"""
axial_slice = 40
mask_roi = np.zeros(data.shape[:-1], dtype=bool)
mask_roi[:, :, axial_slice] = mask[:, :, axial_slice]
"""
The free water elimination model fit can then be initialized by instantiating
a FreeWaterTensorModel class object:
"""
fwdtimodel = fwdti.FreeWaterTensorModel(gtab)
"""
The data can then be fitted using the ``fit`` function of the defined model
object:
"""
fwdtifit = fwdtimodel.fit(data, mask=mask_roi)
"""
This 2-steps procedure will create a FreeWaterTensorFit object which contains
all the diffusion tensor statistics free for free water contaminations. Below
we extract the fractional anisotropy (FA) and the mean diffusivity (MD) of the
free water diffusion tensor."""
FA = fwdtifit.fa
MD = fwdtifit.md
"""
For comparison we also compute the same standard measures processed by the
standard DTI model
"""
dtimodel = dti.TensorModel(gtab)
dtifit = dtimodel.fit(data, mask=mask_roi)
dti_FA = dtifit.fa
dti_MD = dtifit.md
"""
Below the FA values for both free water elimnantion DTI model and standard DTI
model are plotted in panels A and B, while the repective MD values are ploted
in panels D and E. For a better visualization of the effect of the free water
correction, the differences between these two metrics are shown in panels C and
E. In addition to the standard diffusion statistics, the estimated volume
fraction of the free water contamination is shown on panel G.
"""
fig1, ax = plt.subplots(2, 4, figsize=(12, 6),
subplot_kw={'xticks': [], 'yticks': []})
fig1.subplots_adjust(hspace=0.3, wspace=0.05)
ax.flat[0].imshow(FA[:, :, axial_slice].T, origin='lower',
cmap='gray', vmin=0, vmax=1)
ax.flat[0].set_title('A) fwDTI FA')
ax.flat[1].imshow(dti_FA[:, :, axial_slice].T, origin='lower',
cmap='gray', vmin=0, vmax=1)
ax.flat[1].set_title('B) standard DTI FA')
FAdiff = abs(FA[:, :, axial_slice] - dti_FA[:, :, axial_slice])
ax.flat[2].imshow(FAdiff.T, cmap='gray', origin='lower', vmin=0, vmax=1)
ax.flat[2].set_title('C) FA difference')
ax.flat[3].axis('off')
ax.flat[4].imshow(MD[:, :, axial_slice].T, origin='lower',
cmap='gray', vmin=0, vmax=2.5e-3)
ax.flat[4].set_title('D) fwDTI MD')
ax.flat[5].imshow(dti_MD[:, :, axial_slice].T, origin='lower',
cmap='gray', vmin=0, vmax=2.5e-3)
ax.flat[5].set_title('E) standard DTI MD')
MDdiff = abs(MD[:, :, axial_slice] - dti_MD[:, :, axial_slice])
ax.flat[6].imshow(MDdiff.T, origin='lower', cmap='gray', vmin=0, vmax=2.5e-3)
ax.flat[6].set_title('F) MD difference')
F = fwdtifit.f
ax.flat[7].imshow(F[:, :, axial_slice].T, origin='lower',
cmap='gray', vmin=0, vmax=1)
ax.flat[7].set_title('G) free water volume')
plt.show()
fig1.savefig('In_vivo_free_water_DTI_and_standard_DTI_measures.png')
"""
.. figure:: In_vivo_free_water_DTI_and_standard_DTI_measures.png
:align: center
** In vivo diffusion measures obtain from the free water DTI and standard
DTI. The values of Fractional Anisotropy for the free water DTI model and
standard DTI model and their difference are shown in the upper panels (A-C),
while respective MD values are shown in the lower panels (D-F). In addition
the free water volume fraction estimated from the fwDTI model is shown in
panel G**.
From the figure, one can observe that the free water elimination model
produces in general higher values of FA and lower values of MD than the
standard DTI model. These differences in FA and MD estimation are expected
due to the suppression of the free water isotropic diffusion components.
Unexpected high amplitudes of FA are however observed in the periventricular
gray mater. This is a known artefact of regions associated to voxels with high
water volume fraction (i.e. voxels containing basically CSF). We are able to
remove this problematic voxels by excluding all FA values associated with
measured volume fractions above a reasonable threshold of 0.7:
"""
FA[F > 0.7] = 0
dti_FA[F > 0.7] = 0
"""
Above we reproduce the plots of the in vivo FA from the two DTI fits and where
we can see that the inflated FA values were practically removed:
"""
fig1, ax = plt.subplots(1, 3, figsize=(9, 3),
subplot_kw={'xticks': [], 'yticks': []})
fig1.subplots_adjust(hspace=0.3, wspace=0.05)
ax.flat[0].imshow(FA[:, :, axial_slice].T, origin='lower',
cmap='gray', vmin=0, vmax=1)
ax.flat[0].set_title('A) fwDTI FA')
ax.flat[1].imshow(dti_FA[:, :, axial_slice].T, origin='lower',
cmap='gray', vmin=0, vmax=1)
ax.flat[1].set_title('B) standard DTI FA')
FAdiff = abs(FA[:, :, axial_slice] - dti_FA[:, :, axial_slice])
ax.flat[2].imshow(FAdiff.T, cmap='gray', origin='lower', vmin=0, vmax=1)
ax.flat[2].set_title('C) FA difference')
plt.show()
fig1.savefig('In_vivo_free_water_DTI_and_standard_DTI_corrected.png')
"""
.. figure:: In_vivo_free_water_DTI_and_standard_DTI_corrected.png
:align: center
** In vivo FA measures obtain from the free water DTI (A) and standard
DTI (B) and their difference (C). Problematic inflated FA values of the
images were removed by dismissing voxels above a volume fraction threshold
of 0.7 **.
References:
.. [Hoy2014] Hoy, A.R., Koay, C.G., Kecskemeti, S.R., Alexander, A.L., 2014.
Optimization of a free water elimination two-compartmental model
for diffusion tensor imaging. NeuroImage 103, 323-333.
doi: 10.1016/j.neuroimage.2014.09.053
"""
|
villalonreina/dipy
|
doc/examples/reconst_fwdti.py
|
Python
|
bsd-3-clause
| 8,467
|
[
"Gaussian"
] |
969baf1229f9a6cf40275e95c1cb097d22087427c4ec4f5cbfc7151bce7d523e
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Canonicalizing list comprehensions into for and if statements.
e.g.
result = [x * x for x in xs]
becomes
result = []
for x in xs:
elt = x * x
result.append(elt)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.contrib.py2tf.pyct import parser
from tensorflow.contrib.py2tf.pyct import templates
from tensorflow.contrib.py2tf.pyct import transformer
class ListCompCanonicalizationTransformer(transformer.Base):
"""NodeTransformer to canonicalize list comprehensions."""
def __init__(self, context):
super(ListCompCanonicalizationTransformer, self).__init__(context)
def make_update_list_node(self, list_, elt):
return templates.replace('list_.append(elt)', list_=list_, elt=elt)[0]
def instantiate_list_node(self):
return parser.parse_str('[]').body[0].value
def visit_Assign(self, node):
if not isinstance(node.value, gast.ListComp):
return node
if len(node.targets) > 1:
raise ValueError('Only support single assignment.')
return self.canonicalize_listcomp(node.targets[0], node.value)
def canonicalize_listcomp(self, result_node, list_comp_node):
make_list = templates.replace(
'list_ = create_list',
list_=result_node,
create_list=self.instantiate_list_node())
loop_body = self.make_update_list_node(result_node, list_comp_node.elt)
for gen in reversed(list_comp_node.generators):
for gen_if in reversed(gen.ifs):
loop_body = templates.replace(
'if test: loop_body', test=gen_if, loop_body=loop_body)
loop_body = templates.replace(
'for target in iter_: loop_body',
iter_=gen.iter,
target=gen.target,
loop_body=loop_body)
return make_list + loop_body
def transform(node, context):
return ListCompCanonicalizationTransformer(context).visit(node)
|
zasdfgbnm/tensorflow
|
tensorflow/contrib/py2tf/converters/list_comprehension.py
|
Python
|
apache-2.0
| 2,616
|
[
"VisIt"
] |
cf99566ab6c60e61980408a173d36cc315decc6f2d25c428bfbc6de745f27190
|
from dgw.data.parsers import bam as bam_parser
__author__ = 'saulius'
import unittest
from numpy.testing import *
import numpy as np
# -- Stub classes to simulate pysam behaviour ---
class StubAlignedRead(object):
def __init__(self, pos, alen, is_reverse):
self.pos = pos
self.alen = alen
self.is_reverse = is_reverse
@property
def aend(self):
return self.pos + self.alen
class StubSamfile(object):
__fetch_response = None
__references = None
__lengths = None
def __init__(self, fetch_response, references, lengths):
"""
Simulates `Samfile` behaviour.
Returns `fetch_response` for all responses that satisfy the following criterion:
`chromosome` is in `references`,
0 <= `start`, end <= length_of_chromosome
where length_of_chromosome = lengths[references.index('chromosome')]
:param fetch_response: the response that will be returned
:param references: a list of references (chromosomes) in the genome
:param lengths: of these references
:return:
"""
self.__fetch_response = fetch_response
self.__references = references
self.__lengths = lengths
@property
def references(self):
return self.__references
@property
def lengths(self):
return self.__lengths
def fetch(self, chromosome, start, end):
if chromosome not in self.references:
raise ValueError('Invalid reference')
chromosome_length = self.lengths[self.references.index(chromosome)]
if start < 0:
raise ValueError('Start out of bounds {0} < 0'.format(start))
elif end > chromosome_length:
raise ValueError('End out of bounds {0} > {1}'.format(end, chromosome_length))
for item in self.__fetch_response:
if item.pos < end and item.aend >= start:
yield item
# -- Tests ------------------------------------------------------------
class TestReadExtendFunctions(unittest.TestCase):
def test_extend_regular_read(self):
aligned_read = StubAlignedRead(100, 36, False)
start, end = bam_parser._extend_read_to(aligned_read, 100)
self.assertEqual(100, start)
self.assertEqual(200, end)
def test_extend_reverse_read(self):
aligned_read = StubAlignedRead(164, 36, True)
start, end = bam_parser._extend_read_to(aligned_read, 100)
self.assertEqual(100, start)
self.assertEqual(200, end)
def test_invalid_extend_raises_exception(self):
aligned_read = StubAlignedRead(100, 36, True)
self.assertRaises(ValueError, bam_parser._extend_read_to, aligned_read, 10) # 0 < 36
class TestReadCountForRegionReading(unittest.TestCase):
def setUp(self):
aligned_reads = [ StubAlignedRead(10, 10, True), # A
StubAlignedRead(20, 10, True), # B
StubAlignedRead(5, 10, False), # C
StubAlignedRead(25, 10, False) # D
]
#01234567890123456789012345678901234
#..........AAAAAAAAAABBBBBBBBBB.....
#.....CCCCCCCCCC..........DDDDDDDDDD
self.samfile = StubSamfile(aligned_reads, ['chr1', 'chr2'], [50, 20])
def test_non_extended_read_no_binning(self):
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 3, 40, resolution=1, extend_to=None)
correct = np.array([0,0,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,0,0,0,0,0])
assert_array_equal(correct, peak_data)
def test_non_extended_read_binning(self):
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 3, 43, resolution=5, extend_to=None)
correct = np.array([1,2,2,2,2,2,1,0])
assert_array_equal(correct, peak_data)
def test_extended_read_no_binning(self):
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 3, 40, resolution=1, extend_to=15)
#012|3456789012345678901234567890123456789|
#...|..aaaaaAAAAAAAAAA....................|
#...|............bbbbbBBBBBBBBBB..........|
#...|..CCCCCCCCCCccccc.....DDDDDDDDDDddddd|
correct = np.array([0,0,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,1,1,1,1,1,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1])
assert_array_equal(correct, peak_data)
def test_extended_read_binning(self):
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 3, 43, resolution=5, extend_to=15)
#012[34567|89012|34567|89012|34567|89012|34567|89012]
#...[..aaa|aaAAA|AAAAA|AA...|.....|.....|.....|.....]
#...[.....|.....|..bbb|bbBBB|BBBBB|BB...|.....|.....]
#...[..CCC|CCCCC|CCccc|cc...|..DDD|DDDDD|DDddd|dd...]
correct = np.array([2,2,3,3,2,2,1,1])
assert_array_equal(correct, peak_data)
def test_extended_read_no_binning_extended_boundaries(self):
#0123456789012345[67]8901234567890123456789
#.....aaaaaAAAAAA[AA]AA....................
#...............b[bb]bbBBBBBBBBBB..........
#.....CCCCCCCCCCc[cc]cc.....DDDDDDDDDDddddd
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 16, 18, resolution=1, extend_to=15)
correct = np.array([3,3])
assert_array_equal(correct, peak_data)
#012345678901234567890123456789012345678[9012]
#.....aaaaaAAAAAAAAAA...................[....]
#...............bbbbbBBBBBBBBBB.........[....]
#.....CCCCCCCCCCccccc.....DDDDDDDDDDdddd[d...]
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 39, 43, resolution=1, extend_to=15)
correct = np.array([1,0,0,0])
assert_array_equal(correct, peak_data)
#01234567890123456789012345678901234567890[123]
#.....aaaaaAAAAAAAAAA.....................[...]
#...............bbbbbBBBBBBBBBB...........[...]
#.....CCCCCCCCCCccccc.....DDDDDDDDDDddddd[....]
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 41, 44, resolution=1, extend_to=15)
correct = np.array([0,0,0])
assert_array_equal(correct, peak_data)
def test_extended_read_no_binning_extended_boundaries_edge_case(self):
#[012345]67890123456789012345678901234567890123
#[.....a]aaaaAAAAAAAAAA........................
#[......].........bbbbbBBBBBBBBBB..............
#[.....C]CCCCCCCCCccccc.....DDDDDDDDDDddddd....
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 0, 6, resolution=1, extend_to=15)
correct = np.array([0,0,0,0,0,2])
assert_array_equal(correct, peak_data)
#[01234]567890123456789012345678901234567890123
#[.....]aaaaaAAAAAAAAAA........................
#[.....]..........bbbbbBBBBBBBBBB..............
#[.....]CCCCCCCCCCccccc.....DDDDDDDDDDddddd....
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 0, 5, resolution=1, extend_to=15)
correct = np.array([0,0,0,0,0])
assert_array_equal(correct, peak_data)
def test_extended_reads_extended_boundaries_binning(self):
#012345678901234567[8901234|5678901|2345678]90123
#.....aaaaaAAAAAAAA[AA.....|.......|.......].....
#...............bbb[bbBBBBB|BBBBB..|.......].....
#.....CCCCCCCCCCccc[cc.....|DDDDDDD|DDDdddd]d....
peak_data = bam_parser._read_samfile_region(self.samfile, 'chr1', 18, 39, resolution=7, extend_to=15)
correct = np.array([3,2,1])
assert_array_equal(correct, peak_data)
def test_read_samfile_region_raises_exception_when_read_region_is_bad(self):
self.assertRaises(ValueError, bam_parser._read_samfile_region, self.samfile, 'chr1', -20, 20, resolution=4, extend_to=15)
self.assertRaises(ValueError, bam_parser._read_samfile_region, self.samfile, 'chr1', 10, 60, resolution=4, extend_to=15)
if __name__ == '__main__':
unittest.main()
|
lukauskas/dgw
|
dgw/tests/data/parsers/test_bam.py
|
Python
|
gpl-3.0
| 7,992
|
[
"pysam"
] |
b2ca609eda392d28cd023cf39e1dad2686f8c4c5e048032e0d3a9a7ed48aa69d
|
# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""classes checker for Python code
"""
from __future__ import generators
import sys
from collections import defaultdict
import astroid
from astroid import YES, Instance, are_exclusive, AssAttr, Class
from astroid.bases import Generator, BUILTINS
from astroid.inference import InferenceContext
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import (
PYMETHODS, overrides_a_method, check_messages, is_attr_private,
is_attr_protected, node_frame_class, safe_infer, is_builtin_object,
decorated_with_property, unimplemented_abstract_methods)
import six
if sys.version_info >= (3, 0):
NEXT_METHOD = '__next__'
else:
NEXT_METHOD = 'next'
ITER_METHODS = ('__iter__', '__getitem__')
def _called_in_methods(func, klass, methods):
""" Check if the func was called in any of the given methods,
belonging to the *klass*. Returns True if so, False otherwise.
"""
if not isinstance(func, astroid.Function):
return False
for method in methods:
try:
infered = klass.getattr(method)
except astroid.NotFoundError:
continue
for infer_method in infered:
for callfunc in infer_method.nodes_of_class(astroid.CallFunc):
try:
bound = next(callfunc.func.infer())
except (astroid.InferenceError, StopIteration):
continue
if not isinstance(bound, astroid.BoundMethod):
continue
func_obj = bound._proxied
if isinstance(func_obj, astroid.UnboundMethod):
func_obj = func_obj._proxied
if func_obj.name == func.name:
return True
return False
def class_is_abstract(node):
"""return true if the given class node should be considered as an abstract
class
"""
for method in node.methods():
if method.parent.frame() is node:
if method.is_abstract(pass_is_abstract=False):
return True
return False
def _is_attribute_property(name, klass):
""" Check if the given attribute *name* is a property
in the given *klass*.
It will look for `property` calls or for functions
with the given name, decorated by `property` or `property`
subclasses.
Returns ``True`` if the name is a property in the given klass,
``False`` otherwise.
"""
try:
attributes = klass.getattr(name)
except astroid.NotFoundError:
return False
property_name = "{0}.property".format(BUILTINS)
for attr in attributes:
try:
infered = next(attr.infer())
except astroid.InferenceError:
continue
if (isinstance(infered, astroid.Function) and
decorated_with_property(infered)):
return True
if infered.pytype() == property_name:
return True
return False
MSGS = {
'F0202': ('Unable to check methods signature (%s / %s)',
'method-check-failed',
'Used when Pylint has been unable to check methods signature \
compatibility for an unexpected reason. Please report this kind \
if you don\'t make sense of it.'),
'E0202': ('An attribute defined in %s line %s hides this method',
'method-hidden',
'Used when a class defines a method which is hidden by an '
'instance attribute from an ancestor class or set by some '
'client code.'),
'E0203': ('Access to member %r before its definition line %s',
'access-member-before-definition',
'Used when an instance member is accessed before it\'s actually\
assigned.'),
'W0201': ('Attribute %r defined outside __init__',
'attribute-defined-outside-init',
'Used when an instance attribute is defined outside the __init__\
method.'),
'W0212': ('Access to a protected member %s of a client class', # E0214
'protected-access',
'Used when a protected member (i.e. class member with a name \
beginning with an underscore) is access outside the class or a \
descendant of the class where it\'s defined.'),
'E0211': ('Method has no argument',
'no-method-argument',
'Used when a method which should have the bound instance as \
first argument has no argument defined.'),
'E0213': ('Method should have "self" as first argument',
'no-self-argument',
'Used when a method has an attribute different the "self" as\
first argument. This is considered as an error since this is\
a so common convention that you shouldn\'t break it!'),
'C0202': ('Class method %s should have %s as first argument', # E0212
'bad-classmethod-argument',
'Used when a class method has a first argument named differently '
'than the value specified in valid-classmethod-first-arg option '
'(default to "cls"), recommended to easily differentiate them '
'from regular instance methods.'),
'C0203': ('Metaclass method %s should have %s as first argument', # E0214
'bad-mcs-method-argument',
'Used when a metaclass method has a first agument named '
'differently than the value specified in valid-classmethod-first'
'-arg option (default to "cls"), recommended to easily '
'differentiate them from regular instance methods.'),
'C0204': ('Metaclass class method %s should have %s as first argument',
'bad-mcs-classmethod-argument',
'Used when a metaclass class method has a first argument named '
'differently than the value specified in valid-metaclass-'
'classmethod-first-arg option (default to "mcs"), recommended to '
'easily differentiate them from regular instance methods.'),
'W0211': ('Static method with %r as first argument',
'bad-staticmethod-argument',
'Used when a static method has "self" or a value specified in '
'valid-classmethod-first-arg option or '
'valid-metaclass-classmethod-first-arg option as first argument.'
),
'R0201': ('Method could be a function',
'no-self-use',
'Used when a method doesn\'t use its bound instance, and so could\
be written as a function.'
),
'E0221': ('Interface resolved to %s is not a class',
'interface-is-not-class',
'Used when a class claims to implement an interface which is not \
a class.'),
'E0222': ('Missing method %r from %s interface',
'missing-interface-method',
'Used when a method declared in an interface is missing from a \
class implementing this interface'),
'W0221': ('Arguments number differs from %s method',
'arguments-differ',
'Used when a method has a different number of arguments than in \
the implemented interface or in an overridden method.'),
'W0222': ('Signature differs from %s method',
'signature-differs',
'Used when a method signature is different than in the \
implemented interface or in an overridden method.'),
'W0223': ('Method %r is abstract in class %r but is not overridden',
'abstract-method',
'Used when an abstract method (i.e. raise NotImplementedError) is \
not overridden in concrete class.'
),
'F0220': ('failed to resolve interfaces implemented by %s (%s)', # W0224
'unresolved-interface',
'Used when a Pylint as failed to find interfaces implemented by \
a class'),
'W0231': ('__init__ method from base class %r is not called',
'super-init-not-called',
'Used when an ancestor class method has an __init__ method \
which is not called by a derived class.'),
'W0232': ('Class has no __init__ method',
'no-init',
'Used when a class has no __init__ method, neither its parent \
classes.'),
'W0233': ('__init__ method from a non direct base class %r is called',
'non-parent-init-called',
'Used when an __init__ method is called on a class which is not \
in the direct ancestors for the analysed class.'),
'W0234': ('__iter__ returns non-iterator',
'non-iterator-returned',
'Used when an __iter__ method returns something which is not an \
iterable (i.e. has no `%s` method)' % NEXT_METHOD),
'E0235': ('__exit__ must accept 3 arguments: type, value, traceback',
'bad-context-manager',
'Used when the __exit__ special method, belonging to a \
context manager, does not accept 3 arguments \
(type, value, traceback).'),
'E0236': ('Invalid object %r in __slots__, must contain '
'only non empty strings',
'invalid-slots-object',
'Used when an invalid (non-string) object occurs in __slots__.'),
'E0237': ('Assigning to attribute %r not defined in class slots',
'assigning-non-slot',
'Used when assigning to an attribute not defined '
'in the class slots.'),
'E0238': ('Invalid __slots__ object',
'invalid-slots',
'Used when an invalid __slots__ is found in class. '
'Only a string, an iterable or a sequence is permitted.'),
'E0239': ('Inheriting %r, which is not a class.',
'inherit-non-class',
'Used when a class inherits from something which is not a '
'class.'),
}
class ClassChecker(BaseChecker):
"""checks for :
* methods without self as first argument
* overridden methods signature
* access only to existent members via self
* attributes not defined in the __init__ method
* supported interfaces implementation
* unreachable code
"""
__implements__ = (IAstroidChecker,)
# configuration section name
name = 'classes'
# messages
msgs = MSGS
priority = -2
# configuration options
options = (('ignore-iface-methods',
{'default' : (#zope interface
'isImplementedBy', 'deferred', 'extends', 'names',
'namesAndDescriptions', 'queryDescriptionFor', 'getBases',
'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue',
'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue',
'isImplementedByInstancesOf',
# twisted
'adaptWith',
# logilab.common interface
'is_implemented_by'),
'type' : 'csv',
'metavar' : '<method names>',
'help' : 'List of interface methods to ignore, \
separated by a comma. This is used for instance to not check methods defines \
in Zope\'s Interface base class.'}
),
('defining-attr-methods',
{'default' : ('__init__', '__new__', 'setUp'),
'type' : 'csv',
'metavar' : '<method names>',
'help' : 'List of method names used to declare (i.e. assign) \
instance attributes.'}
),
('valid-classmethod-first-arg',
{'default' : ('cls',),
'type' : 'csv',
'metavar' : '<argument names>',
'help' : 'List of valid names for the first argument in \
a class method.'}
),
('valid-metaclass-classmethod-first-arg',
{'default' : ('mcs',),
'type' : 'csv',
'metavar' : '<argument names>',
'help' : 'List of valid names for the first argument in \
a metaclass class method.'}
),
('exclude-protected',
{
'default': (
# namedtuple public API.
'_asdict', '_fields', '_replace', '_source', '_make'),
'type': 'csv',
'metavar': '<protected access exclusions>',
'help': ('List of member names, which should be excluded '
'from the protected access warning.')}
))
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
self._accessed = []
self._first_attrs = []
self._meth_could_be_func = None
def visit_class(self, node):
"""init visit variable _accessed and check interfaces
"""
self._accessed.append(defaultdict(list))
self._check_bases_classes(node)
self._check_interfaces(node)
# if not an interface, exception, metaclass
if node.type == 'class':
try:
node.local_attr('__init__')
except astroid.NotFoundError:
self.add_message('no-init', args=node, node=node)
self._check_slots(node)
self._check_proper_bases(node)
@check_messages('inherit-non-class')
def _check_proper_bases(self, node):
"""
Detect that a class inherits something which is not
a class or a type.
"""
for base in node.bases:
ancestor = safe_infer(base)
if ancestor in (YES, None):
continue
if (isinstance(ancestor, astroid.Instance) and
ancestor.is_subtype_of('%s.type' % (BUILTINS,))):
continue
if not isinstance(ancestor, astroid.Class):
self.add_message('inherit-non-class',
args=base.as_string(), node=node)
@check_messages('access-member-before-definition',
'attribute-defined-outside-init')
def leave_class(self, cnode):
"""close a class node:
check that instance attributes are defined in __init__ and check
access to existent members
"""
# check access to existent members on non metaclass classes
accessed = self._accessed.pop()
if cnode.type != 'metaclass':
self._check_accessed_members(cnode, accessed)
# checks attributes are defined in an allowed method such as __init__
if not self.linter.is_message_enabled('attribute-defined-outside-init'):
return
defining_methods = self.config.defining_attr_methods
current_module = cnode.root()
for attr, nodes in six.iteritems(cnode.instance_attrs):
# skip nodes which are not in the current module and it may screw up
# the output, while it's not worth it
nodes = [n for n in nodes if not
isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
and n.root() is current_module]
if not nodes:
continue # error detected by typechecking
# check if any method attr is defined in is a defining method
if any(node.frame().name in defining_methods
for node in nodes):
continue
# check attribute is defined in a parent's __init__
for parent in cnode.instance_attr_ancestors(attr):
attr_defined = False
# check if any parent method attr is defined in is a defining method
for node in parent.instance_attrs[attr]:
if node.frame().name in defining_methods:
attr_defined = True
if attr_defined:
# we're done :)
break
else:
# check attribute is defined as a class attribute
try:
cnode.local_attr(attr)
except astroid.NotFoundError:
for node in nodes:
if node.frame().name not in defining_methods:
# If the attribute was set by a callfunc in any
# of the defining methods, then don't emit
# the warning.
if _called_in_methods(node.frame(), cnode,
defining_methods):
continue
self.add_message('attribute-defined-outside-init',
args=attr, node=node)
def visit_function(self, node):
"""check method arguments, overriding"""
# ignore actual functions
if not node.is_method():
return
klass = node.parent.frame()
self._meth_could_be_func = True
# check first argument is self if this is actually a method
self._check_first_arg_for_type(node, klass.type == 'metaclass')
if node.name == '__init__':
self._check_init(node)
return
# check signature if the method overloads inherited method
for overridden in klass.local_attr_ancestors(node.name):
# get astroid for the searched method
try:
meth_node = overridden[node.name]
except KeyError:
# we have found the method but it's not in the local
# dictionary.
# This may happen with astroid build from living objects
continue
if not isinstance(meth_node, astroid.Function):
continue
self._check_signature(node, meth_node, 'overridden')
break
if node.decorators:
for decorator in node.decorators.nodes:
if isinstance(decorator, astroid.Getattr) and \
decorator.attrname in ('getter', 'setter', 'deleter'):
# attribute affectation will call this method, not hiding it
return
if isinstance(decorator, astroid.Name) and decorator.name == 'property':
# attribute affectation will either call a setter or raise
# an attribute error, anyway not hiding the function
return
# check if the method is hidden by an attribute
try:
overridden = klass.instance_attr(node.name)[0] # XXX
overridden_frame = overridden.frame()
if (isinstance(overridden_frame, astroid.Function)
and overridden_frame.type == 'method'):
overridden_frame = overridden_frame.parent.frame()
if (isinstance(overridden_frame, Class)
and klass.is_subtype_of(overridden_frame.qname())):
args = (overridden.root().name, overridden.fromlineno)
self.add_message('method-hidden', args=args, node=node)
except astroid.NotFoundError:
pass
# check non-iterators in __iter__
if node.name == '__iter__':
self._check_iter(node)
elif node.name == '__exit__':
self._check_exit(node)
def _check_slots(self, node):
if '__slots__' not in node.locals:
return
for slots in node.igetattr('__slots__'):
# check if __slots__ is a valid type
for meth in ITER_METHODS:
try:
slots.getattr(meth)
break
except astroid.NotFoundError:
continue
else:
self.add_message('invalid-slots', node=node)
continue
if isinstance(slots, astroid.Const):
# a string, ignore the following checks
continue
if not hasattr(slots, 'itered'):
# we can't obtain the values, maybe a .deque?
continue
if isinstance(slots, astroid.Dict):
values = [item[0] for item in slots.items]
else:
values = slots.itered()
if values is YES:
return
for elt in values:
try:
self._check_slots_elt(elt)
except astroid.InferenceError:
continue
def _check_slots_elt(self, elt):
for infered in elt.infer():
if infered is YES:
continue
if (not isinstance(infered, astroid.Const) or
not isinstance(infered.value, str)):
self.add_message('invalid-slots-object',
args=infered.as_string(),
node=elt)
continue
if not infered.value:
self.add_message('invalid-slots-object',
args=infered.as_string(),
node=elt)
def _check_iter(self, node):
try:
infered = node.infer_call_result(node)
except astroid.InferenceError:
return
for infered_node in infered:
if (infered_node is YES
or isinstance(infered_node, Generator)):
continue
if isinstance(infered_node, astroid.Instance):
try:
infered_node.local_attr(NEXT_METHOD)
except astroid.NotFoundError:
self.add_message('non-iterator-returned',
node=node)
break
def _check_exit(self, node):
positional = sum(1 for arg in node.args.args if arg.name != 'self')
if positional < 3 and not node.args.vararg:
self.add_message('bad-context-manager',
node=node)
elif positional > 3:
self.add_message('bad-context-manager',
node=node)
def leave_function(self, node):
"""on method node, check if this method couldn't be a function
ignore class, static and abstract methods, initializer,
methods overridden from a parent class and any
kind of method defined in an interface for this warning
"""
if node.is_method():
if node.args.args is not None:
self._first_attrs.pop()
if not self.linter.is_message_enabled('no-self-use'):
return
class_node = node.parent.frame()
if (self._meth_could_be_func and node.type == 'method'
and not node.name in PYMETHODS
and not (node.is_abstract() or
overrides_a_method(class_node, node.name))
and class_node.type != 'interface'):
self.add_message('no-self-use', node=node)
def visit_getattr(self, node):
"""check if the getattr is an access to a class member
if so, register it. Also check for access to protected
class member from outside its class (but ignore __special__
methods)
"""
attrname = node.attrname
# Check self
if self.is_first_attr(node):
self._accessed[-1][attrname].append(node)
return
if not self.linter.is_message_enabled('protected-access'):
return
self._check_protected_attribute_access(node)
def visit_assattr(self, node):
if isinstance(node.ass_type(), astroid.AugAssign) and self.is_first_attr(node):
self._accessed[-1][node.attrname].append(node)
self._check_in_slots(node)
def _check_in_slots(self, node):
""" Check that the given assattr node
is defined in the class slots.
"""
infered = safe_infer(node.expr)
if infered and isinstance(infered, Instance):
klass = infered._proxied
if '__slots__' not in klass.locals or not klass.newstyle:
return
slots = klass.slots()
if slots is None:
return
# If any ancestor doesn't use slots, the slots
# defined for this class are superfluous.
if any('__slots__' not in ancestor.locals and
ancestor.name != 'object'
for ancestor in klass.ancestors()):
return
if not any(slot.value == node.attrname for slot in slots):
# If we have a '__dict__' in slots, then
# assigning any name is valid.
if not any(slot.value == '__dict__' for slot in slots):
if _is_attribute_property(node.attrname, klass):
# Properties circumvent the slots mechanism,
# so we should not emit a warning for them.
return
self.add_message('assigning-non-slot',
args=(node.attrname, ), node=node)
@check_messages('protected-access')
def visit_assign(self, assign_node):
node = assign_node.targets[0]
if not isinstance(node, AssAttr):
return
if self.is_first_attr(node):
return
self._check_protected_attribute_access(node)
def _check_protected_attribute_access(self, node):
'''Given an attribute access node (set or get), check if attribute
access is legitimate. Call _check_first_attr with node before calling
this method. Valid cases are:
* self._attr in a method or cls._attr in a classmethod. Checked by
_check_first_attr.
* Klass._attr inside "Klass" class.
* Klass2._attr inside "Klass" class when Klass2 is a base class of
Klass.
'''
attrname = node.attrname
if (is_attr_protected(attrname) and
attrname not in self.config.exclude_protected):
klass = node_frame_class(node)
# XXX infer to be more safe and less dirty ??
# in classes, check we are not getting a parent method
# through the class object or through super
callee = node.expr.as_string()
# We are not in a class, no remaining valid case
if klass is None:
self.add_message('protected-access', node=node, args=attrname)
return
# If the expression begins with a call to super, that's ok.
if isinstance(node.expr, astroid.CallFunc) and \
isinstance(node.expr.func, astroid.Name) and \
node.expr.func.name == 'super':
return
# We are in a class, one remaining valid cases, Klass._attr inside
# Klass
if not (callee == klass.name or callee in klass.basenames):
# Detect property assignments in the body of the class.
# This is acceptable:
#
# class A:
# b = property(lambda: self._b)
stmt = node.parent.statement()
try:
if (isinstance(stmt, astroid.Assign) and
(stmt in klass.body or klass.parent_of(stmt)) and
isinstance(stmt.value, astroid.CallFunc) and
isinstance(stmt.value.func, astroid.Name) and
stmt.value.func.name == 'property' and
is_builtin_object(next(stmt.value.func.infer(), None))):
return
except astroid.InferenceError:
pass
self.add_message('protected-access', node=node, args=attrname)
def visit_name(self, node):
"""check if the name handle an access to a class member
if so, register it
"""
if self._first_attrs and (node.name == self._first_attrs[-1] or
not self._first_attrs[-1]):
self._meth_could_be_func = False
def _check_accessed_members(self, node, accessed):
"""check that accessed members are defined"""
# XXX refactor, probably much simpler now that E0201 is in type checker
for attr, nodes in six.iteritems(accessed):
# deactivate "except doesn't do anything", that's expected
# pylint: disable=W0704
try:
# is it a class attribute ?
node.local_attr(attr)
# yes, stop here
continue
except astroid.NotFoundError:
pass
# is it an instance attribute of a parent class ?
try:
next(node.instance_attr_ancestors(attr))
# yes, stop here
continue
except StopIteration:
pass
# is it an instance attribute ?
try:
defstmts = node.instance_attr(attr)
except astroid.NotFoundError:
pass
else:
# filter out augment assignment nodes
defstmts = [stmt for stmt in defstmts if stmt not in nodes]
if not defstmts:
# only augment assignment for this node, no-member should be
# triggered by the typecheck checker
continue
# filter defstmts to only pick the first one when there are
# several assignments in the same scope
scope = defstmts[0].scope()
defstmts = [stmt for i, stmt in enumerate(defstmts)
if i == 0 or stmt.scope() is not scope]
# if there are still more than one, don't attempt to be smarter
# than we can be
if len(defstmts) == 1:
defstmt = defstmts[0]
# check that if the node is accessed in the same method as
# it's defined, it's accessed after the initial assignment
frame = defstmt.frame()
lno = defstmt.fromlineno
for _node in nodes:
if _node.frame() is frame and _node.fromlineno < lno \
and not are_exclusive(_node.statement(), defstmt,
('AttributeError', 'Exception', 'BaseException')):
self.add_message('access-member-before-definition',
node=_node, args=(attr, lno))
def _check_first_arg_for_type(self, node, metaclass=0):
"""check the name of first argument, expect:
* 'self' for a regular method
* 'cls' for a class method or a metaclass regular method (actually
valid-classmethod-first-arg value)
* 'mcs' for a metaclass class method (actually
valid-metaclass-classmethod-first-arg)
* not one of the above for a static method
"""
# don't care about functions with unknown argument (builtins)
if node.args.args is None:
return
first_arg = node.args.args and node.argnames()[0]
self._first_attrs.append(first_arg)
first = self._first_attrs[-1]
# static method
if node.type == 'staticmethod':
if (first_arg == 'self' or
first_arg in self.config.valid_classmethod_first_arg or
first_arg in self.config.valid_metaclass_classmethod_first_arg):
self.add_message('bad-staticmethod-argument', args=first, node=node)
return
self._first_attrs[-1] = None
# class / regular method with no args
elif not node.args.args:
self.add_message('no-method-argument', node=node)
# metaclass
elif metaclass:
# metaclass __new__ or classmethod
if node.type == 'classmethod':
self._check_first_arg_config(
first,
self.config.valid_metaclass_classmethod_first_arg, node,
'bad-mcs-classmethod-argument', node.name)
# metaclass regular method
else:
self._check_first_arg_config(
first,
self.config.valid_classmethod_first_arg, node,
'bad-mcs-method-argument',
node.name)
# regular class
else:
# class method
if node.type == 'classmethod':
self._check_first_arg_config(
first,
self.config.valid_classmethod_first_arg, node,
'bad-classmethod-argument',
node.name)
# regular method without self as argument
elif first != 'self':
self.add_message('no-self-argument', node=node)
def _check_first_arg_config(self, first, config, node, message,
method_name):
if first not in config:
if len(config) == 1:
valid = repr(config[0])
else:
valid = ', '.join(repr(v) for v in config[:-1])
valid = '%s or %r' % (valid, config[-1])
self.add_message(message, args=(method_name, valid), node=node)
def _check_bases_classes(self, node):
"""check that the given class node implements abstract methods from
base classes
"""
def is_abstract(method):
return method.is_abstract(pass_is_abstract=False)
# check if this class abstract
if class_is_abstract(node):
return
methods = sorted(
unimplemented_abstract_methods(node, is_abstract).items(),
key=lambda item: item[0],
)
for name, method in methods:
owner = method.parent.frame()
if owner is node:
continue
# owner is not this class, it must be a parent class
# check that the ancestor's method is not abstract
if name in node.locals:
# it is redefined as an attribute or with a descriptor
continue
self.add_message('abstract-method', node=node,
args=(name, owner.name))
def _check_interfaces(self, node):
"""check that the given class node really implements declared
interfaces
"""
e0221_hack = [False]
def iface_handler(obj):
"""filter interface objects, it should be classes"""
if not isinstance(obj, astroid.Class):
e0221_hack[0] = True
self.add_message('interface-is-not-class', node=node,
args=(obj.as_string(),))
return False
return True
ignore_iface_methods = self.config.ignore_iface_methods
try:
for iface in node.interfaces(handler_func=iface_handler):
for imethod in iface.methods():
name = imethod.name
if name.startswith('_') or name in ignore_iface_methods:
# don't check method beginning with an underscore,
# usually belonging to the interface implementation
continue
# get class method astroid
try:
method = node_method(node, name)
except astroid.NotFoundError:
self.add_message('missing-interface-method',
args=(name, iface.name),
node=node)
continue
# ignore inherited methods
if method.parent.frame() is not node:
continue
# check signature
self._check_signature(method, imethod,
'%s interface' % iface.name)
except astroid.InferenceError:
if e0221_hack[0]:
return
implements = Instance(node).getattr('__implements__')[0]
assignment = implements.parent
assert isinstance(assignment, astroid.Assign)
# assignment.expr can be a Name or a Tuple or whatever.
# Use as_string() for the message
# FIXME: in case of multiple interfaces, find which one could not
# be resolved
self.add_message('unresolved-interface', node=implements,
args=(node.name, assignment.value.as_string()))
def _check_init(self, node):
"""check that the __init__ method call super or ancestors'__init__
method
"""
if (not self.linter.is_message_enabled('super-init-not-called') and
not self.linter.is_message_enabled('non-parent-init-called')):
return
klass_node = node.parent.frame()
to_call = _ancestors_to_call(klass_node)
not_called_yet = dict(to_call)
for stmt in node.nodes_of_class(astroid.CallFunc):
expr = stmt.func
if not isinstance(expr, astroid.Getattr) \
or expr.attrname != '__init__':
continue
# skip the test if using super
if isinstance(expr.expr, astroid.CallFunc) and \
isinstance(expr.expr.func, astroid.Name) and \
expr.expr.func.name == 'super':
return
try:
klass = next(expr.expr.infer())
if klass is YES:
continue
# The infered klass can be super(), which was
# assigned to a variable and the `__init__` was called later.
#
# base = super()
# base.__init__(...)
if (isinstance(klass, astroid.Instance) and
isinstance(klass._proxied, astroid.Class) and
is_builtin_object(klass._proxied) and
klass._proxied.name == 'super'):
return
try:
del not_called_yet[klass]
except KeyError:
if klass not in to_call:
self.add_message('non-parent-init-called',
node=expr, args=klass.name)
except astroid.InferenceError:
continue
for klass, method in six.iteritems(not_called_yet):
if klass.name == 'object' or method.parent.name == 'object':
continue
self.add_message('super-init-not-called', args=klass.name, node=node)
def _check_signature(self, method1, refmethod, class_type):
"""check that the signature of the two given methods match
class_type is in 'class', 'interface'
"""
if not (isinstance(method1, astroid.Function)
and isinstance(refmethod, astroid.Function)):
self.add_message('method-check-failed',
args=(method1, refmethod), node=method1)
return
# don't care about functions with unknown argument (builtins)
if method1.args.args is None or refmethod.args.args is None:
return
# if we use *args, **kwargs, skip the below checks
if method1.args.vararg or method1.args.kwarg:
return
if is_attr_private(method1.name):
return
if len(method1.args.args) != len(refmethod.args.args):
self.add_message('arguments-differ', args=class_type, node=method1)
elif len(method1.args.defaults) < len(refmethod.args.defaults):
self.add_message('signature-differs', args=class_type, node=method1)
def is_first_attr(self, node):
"""Check that attribute lookup name use first attribute variable name
(self for method, cls for classmethod and mcs for metaclass).
"""
return self._first_attrs and isinstance(node.expr, astroid.Name) and \
node.expr.name == self._first_attrs[-1]
def _ancestors_to_call(klass_node, method='__init__'):
"""return a dictionary where keys are the list of base classes providing
the queried method, and so that should/may be called from the method node
"""
to_call = {}
for base_node in klass_node.ancestors(recurs=False):
try:
to_call[base_node] = next(base_node.igetattr(method))
except astroid.InferenceError:
continue
return to_call
def node_method(node, method_name):
"""get astroid for <method_name> on the given class node, ensuring it
is a Function node
"""
for n in node.local_attr(method_name):
if isinstance(n, astroid.Function):
return n
raise astroid.NotFoundError(method_name)
def register(linter):
"""required method to auto register this checker """
linter.register_checker(ClassChecker(linter))
|
willemneal/Docky
|
lib/pylint/checkers/classes.py
|
Python
|
mit
| 42,351
|
[
"VisIt"
] |
fd90461c1c5779568714fc8521c5f686faed2d723d8b58bba6a5f97c92d9142a
|
"""End-To-End Memory Networks.
The implementation is based on http://arxiv.org/abs/1503.08895 [1]
"""
from __future__ import absolute_import
from __future__ import division
import inspect
import tensorflow as tf
import numpy as np
from six.moves import range
from tensorflow.python.ops import rnn, rnn_cell
def position_encoding(sentence_size, embedding_size):
J = sentence_size
d = embedding_size
l = np.zeros([d,J], dtype=np.float32)
for k in range(1,d+1):
for j in range(1, J+1):
l[k-1,j-1] = (1 - j / float(J)) - (k/float(d))*(1 - 2 * j / float(J))
return np.transpose(l)
def original_position_encoding(sentence_size, embedding_size):
"""
Position Encoding described in section 4.1 [1]
"""
encoding = np.ones((embedding_size, sentence_size), dtype=np.float32)
ls = sentence_size+1
le = embedding_size+1
for i in range(1, le):
for j in range(1, ls):
encoding[i-1, j-1] = (i - (le-1)/2) * (j - (ls-1)/2)
encoding = 1 + 4 * encoding / embedding_size / sentence_size
return np.transpose(encoding)
def zero_nil_slot(t, name=None):
"""
Overwrites the nil_slot (first row) of the input Tensor with zeros.
The nil_slot is a dummy slot and should not be trained and influence
the training algorithm.
"""
with tf.name_scope( name, "zero_nil_slot", [t]) as name:
t = tf.convert_to_tensor(t, name="t")
s = tf.shape(t)[1]
z = tf.zeros(tf.pack([1, s]))
return tf.concat(0, [z, tf.slice(t, [1, 0], [-1, -1])], name=name)
def add_gradient_noise(t, stddev=1e-3, name=None):
"""
Adds gradient noise as described in http://arxiv.org/abs/1511.06807 [2].
The input Tensor `t` should be a gradient.
The output will be `t` + gaussian noise.
0.001 was said to be a good fixed value for memory networks [2].
"""
with tf.name_scope( name, "add_gradient_noise", [t, stddev]) as name:
t = tf.convert_to_tensor(t, name="t")
gn = tf.random_normal(tf.shape(t), stddev=stddev)
return tf.add(t, gn, name=name)
class MemN2N(object):
"""End-To-End Memory Network."""
def __init__(self, batch_size, vocab_size, sentence_size, memory_size, embedding_size,
answer_size,
label_size,
glove_embedding,
session=tf.Session(),
hops=3,
max_grad_norm=40.0,
nonlin=None,
encoding=position_encoding,
l2 = 0.02,
lr = 0.01,
epsilon = 1e-8,
restoreLoc = None,
name='MemN2N'):
"""Creates an End-To-End Memory Network
Args:
batch_size: The size of the batch.
vocab_size: The size of the vocabulary (should include the nil word). The nil word
one-hot encoding should be 0.
sentence_size: The max size of a sentence in the data. All sentences should be padded
to this length. If padding is required it should be done with nil one-hot encoding (0).
memory_size: The max size of the memory. Since Tensorflow currently does not support jagged arrays
all memories must be padded to this length. If padding is required, the extra memories should be
empty memories; memories filled with the nil word ([0, 0, 0, ......, 0]).
embedding_size: The size of the word embedding.
hops: The number of hops. A hop consists of reading and addressing a memory slot.
Defaults to `3`.
max_grad_norm: Maximum L2 norm clipping value. Defaults to `40.0`.
nonlin: Non-linearity. Defaults to `None`.
initializer: Weight initializer. Defaults to `tf.random_normal_initializer(stddev=0.1)`.
optimizer: Optimizer algorithm used for SGD. Defaults to `tf.train.AdamOptimizer(learning_rate=1e-2)`.
encoding: A function returning a 2D Tensor (sentence_size, embedding_size). Defaults to `position_encoding`.
session: Tensorflow Session the model is run with. Defaults to `tf.Session()`.
name: Name of the End-To-End Memory Network. Defaults to `MemN2N`.
"""
frame = inspect.currentframe()
print('Memn2n Model Input')
_, _, _, values = inspect.getargvalues(frame)
print(values)
self._batch_size = batch_size
self._vocab_size = vocab_size
self._sentence_size = sentence_size
self._memory_size = memory_size
self._embedding_size = embedding_size
self._answer_size = answer_size
self._label_size = label_size
self._hops = hops
self._max_grad_norm = max_grad_norm
self._nonlin = nonlin
self._glove_embedding = glove_embedding
self._opt = tf.train.AdamOptimizer(learning_rate=lr, epsilon=epsilon)
self._name = name
self._l2 = l2
self._linear = True
self._rnn_hidden = 50
self._glove_tf = tf.constant(self._glove_embedding)
self._init = tf.random_normal_initializer(stddev=0.1)
self._build_inputs()
self._build_vars()
self._encoding = tf.constant(encoding(self._sentence_size, self._embedding_size), name="encoding")
self._answer_encoding = tf.constant(encoding(self._answer_size, self._embedding_size), name="answer_encoding")
# cross entropy
logits = self._inference(self._stories, self._queries, self._answers) # (batch_size, label_size)
print('logits', logits)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, tf.cast(self._labels, tf.float32), name="cross_entropy")
cross_entropy_sum = tf.reduce_sum(cross_entropy, name="cross_entropy_sum")
#hinge loss
#hinge_loss = tf.contrib.losses.hinge_loss(logits, tf.cast(self._labels, tf.float32))
#hinge_loss_sum = tf.reduce_sum(hinge_loss)
# loss op
reg_loss = self._l2 * tf.add_n(tf.get_collection('reg_loss'))
loss_op = cross_entropy_sum + reg_loss
#loss_op = hinge_loss_sum + reg_loss
loss_op_summary = tf.scalar_summary("loss", loss_op)
ema = tf.train.ExponentialMovingAverage(decay=0.99)
self.update_loss_ema = ema.apply([loss_op])
loss_ema = ema.average(loss_op)
self.loss_ema_op = tf.scalar_summary('batch_loss_ema', loss_ema)
# gradient pipeline
grads_and_vars = self._opt.compute_gradients(loss_op)
grads_and_vars = [(tf.clip_by_norm(g, self._max_grad_norm), v) for g,v in grads_and_vars]
grads_and_vars = [(add_gradient_noise(g), v) for g,v in grads_and_vars]
nil_grads_and_vars = []
for g, v in grads_and_vars:
if v.name in self._nil_vars:
nil_grads_and_vars.append((zero_nil_slot(g), v))
else:
nil_grads_and_vars.append((g, v))
train_op = self._opt.apply_gradients(nil_grads_and_vars, name="train_op")
# predict ops
predict_op = tf.argmax(logits, 1, name="predict_op")
predict_proba_op = tf.nn.softmax(logits, name="predict_proba_op")
predict_log_proba_op = tf.log(predict_proba_op, name="predict_log_proba_op")
# validation accuracy ops
self.val_acc_op = self._get_val_acc(predict_op, self._val_labels)
self.val_acc_summary = tf.scalar_summary("val_acc", self.val_acc_op)
# assign ops
self.loss_op = loss_op
self.predict_op = predict_op
self.predict_proba_op = predict_proba_op
self.predict_log_proba_op = predict_log_proba_op
self.train_op = train_op
self.loss_op_summary = loss_op_summary
# Summaries
self.merged = tf.merge_all_summaries()
self._sess = session
if restoreLoc is not None:
saver = tf.train.Saver()
print('Restoring model weights')
saver.restore(self._sess, restoreLoc)
else:
init_op = tf.initialize_all_variables()
self._sess.run(init_op)
def _build_inputs(self):
self._stories = tf.placeholder(tf.int32, [None, self._memory_size, self._sentence_size], name="stories")
self._queries = tf.placeholder(tf.int32, [None, self._sentence_size], name="queries")
self._answers = tf.placeholder(tf.int32, [None, self._label_size, self._answer_size], name="answers")
self._labels = tf.placeholder(tf.int32, [None, self._label_size], name="labels")
self._val_labels = tf.placeholder(tf.int32, [None], name="val_labels")#TODO: valuation output as index, not as one hot
self._linear_start= tf.placeholder(tf.bool,[None] , name="linear_start")
def _build_vars(self):
with tf.variable_scope(self._name + str(1)):
nil_word_slot = tf.zeros([1, self._embedding_size])
A = tf.concat(0, [ nil_word_slot, self._init([self._vocab_size-1, self._embedding_size]) ])
B = tf.concat(0, [ nil_word_slot, self._init([self._vocab_size-1, self._embedding_size]) ])
C = tf.concat(0, [ nil_word_slot, self._init([self._vocab_size-1, self._embedding_size]) ])
self.A = tf.Variable(tf.cast(self._glove_tf, tf.float32), name="A")#TODO: trainable = False
self.B = tf.Variable(tf.cast(self._glove_tf, tf.float32), name="B")
self.C = tf.Variable(C, name="C")
self.TA = tf.Variable(self._init([self._memory_size, self._embedding_size]), name='TA')
self.TC = tf.Variable(self._init([self._memory_size, self._embedding_size]), name='TC')
self.H = tf.Variable(self._init([self._embedding_size, self._embedding_size]), name="H")
self._nil_vars = set([self.A.name, self.B.name, self.C.name])
tf.add_to_collection('reg_loss', tf.nn.l2_loss(self.A))
tf.add_to_collection('reg_loss', tf.nn.l2_loss(self.B))
tf.add_to_collection('reg_loss', tf.nn.l2_loss(self.C))
tf.add_to_collection('reg_loss', tf.nn.l2_loss(self.TA))
tf.add_to_collection('reg_loss', tf.nn.l2_loss(self.TC))
tf.add_to_collection('reg_loss', tf.nn.l2_loss(self.H))
def _inference(self, stories, queries, answers):
with tf.variable_scope(self._name + str(2)):
q_emb = tf.nn.embedding_lookup(self.B, queries)
#q_emb = tf.nn.dropout(q_emb, 0.1)
print('q_emb', q_emb)
print('self._encoding', self._encoding)
#u_0 = tf.reduce_sum(q_emb * self._encoding, 1)
q_step = tf.transpose(q_emb, [1,0,2])
q_step = tf.reshape(q_step, [-1, self._embedding_size])
q_step = tf.split(0,self._sentence_size , q_step)
gru_cell = rnn_cell.GRUCell(self._rnn_hidden)
outputs, states = rnn.rnn(gru_cell, q_step, dtype=tf.float32)
print('states', states)
u_0 = states
print('u_0', u_0)
u = [u_0]
self.probs_hops = []
for _ in range(self._hops):
m_emb = tf.nn.embedding_lookup(self.A, stories)
#m_emd = tf.nn.dropout(m_emb, 0.1)
m = tf.reduce_sum(m_emb * self._encoding, 2) + self.TA
# hack to get around no reduce_dot
u_temp = tf.transpose(tf.expand_dims(u[-1], -1), [0, 2, 1])
dotted = tf.reduce_sum(m * u_temp, 2)
# Calculate probabilities
#probs = tf.nn.softmax(dotted)
#probs = tf.cond(self._linear_start, lambda: tf.identity(dotted), lambda: tf.nn.softmax(dotted))
#print('self._linear_start', self._linear_start)
print('dotted', dotted)
sfm = tf.nn.softmax(dotted)
print('sfm', sfm)
probs = tf.select(self._linear_start, dotted, sfm )
#print('probs', probs)
self.probs_hops.append(probs)
probs_temp = tf.transpose(tf.expand_dims(probs, -1), [0, 2, 1])
c_emb = tf.nn.embedding_lookup(self.C, stories)
c = tf.reduce_sum(c_emb * self._encoding, 2) + self.TC
c_temp = tf.transpose(c, [0, 2, 1])
o_k = tf.reduce_sum(c_temp * probs_temp, 2)
u_k = tf.matmul(u[-1], self.H) + o_k
#TRY DROPOUT
#u_k = tf.nn.dropout(u_k, 0.2)
# nonlinearity
if self._nonlin:
u_k = self._nonlin(u_k)
u.append(u_k)
as_emb = tf.nn.embedding_lookup(self.B, answers)
#as_emb = tf.nn.dropout(as_emb, 0.1)
print('as_emb', as_emb)
print('self._answer_encoding', self._answer_encoding)
as_enc = tf.reduce_sum(as_emb * self._answer_encoding, 2)
print('as_enc', as_enc)
print('u_k', u_k)
u_k_l = tf.tile(u_k, [1, self._label_size])
u_k_l = tf.reshape(u_k_l, [-1, self._label_size, self._embedding_size])
print('u_k_l',u_k_l)
as_ans = tf.sub(as_enc, u_k_l)
print('as_ans', as_ans)
as_ans = tf.reduce_sum(tf.square(as_ans), 2)
print('as_ans', as_ans)
return as_ans
def save_model(self, location):
saver = tf.train.Saver()
saver.save(self._sess, location, write_meta_graph=False)
def _get_val_acc(self, pred_op, val_labels):
corr_pred = tf.equal(tf.cast(pred_op, tf.int32), val_labels)
acc_op = tf.reduce_mean(tf.cast(corr_pred, tf.float32))
return acc_op
def batch_fit(self, stories, queries, answers, labels, linear_start):
"""Runs the training algorithm over the passed batch
Args:
stories: Tensor (None, memory_size, sentence_size)
queries: Tensor (None, sentence_size)
answers: Tensor (None, vocab_size)
Returns:
loss: floating-point number, the loss computed for the batch
"""
self.linear_start = linear_start
feed_dict = {self._stories: stories, self._queries: queries, self._answers: answers, self._labels:labels, self._linear_start:self.linear_start}
loss, loss_op_summary, _, _, loss_ema, probs = self._sess.run([self.loss_op, self.loss_op_summary, self.train_op, self.update_loss_ema, self.loss_ema_op, self.probs_hops], feed_dict=feed_dict)
return loss, loss_op_summary, loss_ema, probs
def predict(self, stories, queries, answers, linear_start):
"""Predicts answers as one-hot encoding.
Args:
stories: Tensor (None, memory_size, sentence_size)
queries: Tensor (None, sentence_size)
Returns:
answers: Tensor (None, vocab_size)
"""
self.linear_start = linear_start
feed_dict = {self._stories: stories, self._queries: queries, self._answers: answers, self._linear_start:self.linear_start}
return self._sess.run(self.predict_op, feed_dict=feed_dict)
def predict_test(self, stories, queries, answers):
"""Predicts answers as one-hot encoding.
Args:
stories: Tensor (None, memory_size, sentence_size)
queries: Tensor (None, sentence_size)
Returns:
answers, probabilities per hop: Tensor (None, vocab_size), Tensor (None, hops, memory_size)
"""
feed_dict = {self._stories: stories, self._queries: queries, self._answers: answers, self._linear_start:self.linear_start}
ops = [self.predict_op]
ops.extend(self.probs_hops)
return self._sess.run(ops, feed_dict=feed_dict)
def predict_proba(self, stories, queries, answers):
"""Predicts probabilities of answers.
Args:
stories: Tensor (None, memory_size, sentence_size)
queries: Tensor (None, sentence_size)
Returns:
answers: Tensor (None, vocab_size)
"""
feed_dict = {self._stories: stories, self._queries: queries, self._answers: answers, self._linear_start:self.linear_start}
return self._sess.run(self.predict_proba_op, feed_dict=feed_dict)
def get_val_acc_summary(self, stories, queries, answers, labels, linear_start):
feed_dict = {self._stories: stories, self._queries: queries, self._answers: answers, self._val_labels: labels, self._linear_start:linear_start}
return self._sess.run([self.val_acc_op, self.val_acc_summary], feed_dict=feed_dict)
def predict_log_proba(self, stories, queries, answers):
"""Predicts log probabilities of answers.
Args:
stories: Tensor (None, memory_size, sentence_size)
queries: Tensor (None, sentence_size)
Returns:
answers: Tensor (None, vocab_size)
"""
feed_dict = {self._stories: stories, self._queries: queries, self._answers: answers, self._linear_start:self.linear_start}
return self._sess.run(self.predict_log_proba_op, feed_dict=feed_dict)
|
tianlongwang/memn2n
|
memn2n/mc_memn2n.py
|
Python
|
mit
| 16,941
|
[
"Gaussian"
] |
276884f183cc54222b9ea807ed30b5496ed069edbb451acd24f424c162bde5ca
|
"""
elite_mapping.py
Setup for the Elite control mapping.
Copyright (C) 2016 Bob Helander
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from button_types import ToggleButton, InputButton, PadPageButton, FlashingButton
from systems_button_group import SystemsButtonGroup
def setup_ship(pad_state):
"""
Add the mapped buttons to the pad_state array (Elite Dangerous)
"""
#ESC
pad_state[0][0] = InputButton(0, 0, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0xB1, # ESC
flashing=True, description="ESC")
#Change page
pad_state[1][0] = PadPageButton(1, 0, red=0, green=3, page=1, description="Change Page 1")
#Deploy Heat Sink
pad_state[2][0] = InputButton(2, 0, red=3, green=0, pressed_red=3, pressed_green=3,
key_output=0x76, # KEY_V
flashing=True, description="Heat Sink")
#Chaff
pad_state[3][0] = InputButton(3, 0, red=3, green=3, pressed_red=3, pressed_green=3,
key_output=0xCA, # F9
flashing=True, description="Chaff")
#Deploy Shield Cell
pad_state[4][0] = InputButton(4, 0, red=0, green=3, pressed_red=3, pressed_green=3,
key_output=0xCB, # F10
flashing=True, description="Shield Cell")
#Flight Assist
pad_state[5][0] = ToggleButton(5, 0, red=3, green=3, toggled_red=3, toggled_green=0,
key_output_set=0x7A, # KEY_Z,
key_output_cleared=0x7A, flashing=True, description="Flight Assist")
#FSD
pad_state[8][1] = InputButton(8, 1, red=3, green=3, pressed_red=3, pressed_green=0,
key_output=0x6A, # KEY_J,
flashing=True, description="FSD")
#Landing Gear
pad_state[7][1] = ToggleButton(7, 1, red=0, green=3, toggled_red=3, toggled_green=0,
key_output_set=0xD1, # Insert,
key_output_cleared=0xD1, flashing=True, description="Gear")
#Landing Lights
pad_state[6][1] = ToggleButton(6, 1, red=0, green=3, toggled_red=3, toggled_green=3,
key_output_set=0xD4, # Delete
key_output_cleared=0xD4, flashing=True, description="Lights")
#75% Speed
pad_state[8][3] = InputButton(8, 3, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x2C, # Comma
flashing=False, description="75%")
#50% Speed
pad_state[8][4] = InputButton(8, 4, red=3, green=3, pressed_red=3, pressed_green=0,
key_output=0x2E, # Period
flashing=False, description="50%")
#0% Speed
pad_state[8][5] = InputButton(8, 5, red=3, green=0, pressed_red=3, pressed_green=0,
key_output=0x78, # KEY_X,
flashing=False, description="0%")
#Communications Panel
pad_state[0][1] = InputButton(0, 1, red=3, green=3, pressed_red=3, pressed_green=0,
key_output=0x32, # KEY_2,
flashing=False, description="Comms")
#Target Panel
pad_state[0][7] = InputButton(0, 7, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x31, # KEY_1,
flashing=False, description="Target Panel")
#Systems Panel
pad_state[2][7] = InputButton(2, 7, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x34, # KEY_4,
flashing=False, description="Systems Panel")
#Sensors Panel
pad_state[1][8] = InputButton(1, 8, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x33, # KEY_3,
flashing=False, description="Role Panel")
#Galaxy Map
pad_state[0][3] = InputButton(0, 3, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x69, # KEY_I,
flashing=False, description="Galaxy Map")
#System Map
pad_state[1][3] = InputButton(1, 3, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x6F, # KEY_O
flashing=False, description="System Map")
# W
pad_state[1][5] = InputButton(1, 5, red=3, green=3, pressed_red=0, pressed_green=3,
key_output=0x77, # KEY_W,
flashing=False, description="Up")
# A
pad_state[0][6] = InputButton(0, 6, red=3, green=3, pressed_red=0, pressed_green=3,
key_output=0x61, # KEY_A,
flashing=False, description="Left")
# S
pad_state[1][7] = InputButton(1, 7, red=3, green=3, pressed_red=0, pressed_green=3,
key_output=0x73, # KEY_S,
flashing=False, description="Down")
# D
pad_state[2][6] = InputButton(2, 6, red=3, green=3, pressed_red=0, pressed_green=3,
key_output=0x64, # KEY_D,
flashing=False, description="Right")
#Select
pad_state[1][6] = InputButton(1, 6, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x20, # VK_SPACE,
flashing=False, description="Select")
#Previous Tab
pad_state[0][5] = InputButton(0, 5, red=1, green=2, pressed_red=3, pressed_green=0,
key_output=0x71, # KEY_Q,
flashing=False, description="Previous")
#Next Tab
pad_state[2][5] = InputButton(2, 5, red=1, green=2, pressed_red=3, pressed_green=0,
key_output=0x65, # KEY_E,
flashing=False, description="Next")
#Engines
pad_state[5][4] = InputButton(5, 4, red=3, green=0, pressed_red=0, pressed_green=3,
key_output=0xDA, # Up Arrow
flashing=False, description="Engines")
#Systems
pad_state[4][5] = InputButton(4, 5, red=3, green=0, pressed_red=0, pressed_green=3,
key_output=0xD8, # Left Arrow
flashing=False, description="Systems")
#Weapons
pad_state[6][5] = InputButton(6, 5, red=3, green=0, pressed_red=0, pressed_green=3,
key_output=0xD7, # Right arrow
flashing=False, description="Weapons")
#Reset
pad_state[5][5] = InputButton(5, 5, red=3, green=3, pressed_red=0, pressed_green=3,
key_output=0xD9, # Down arrow
flashing=False, description="Reset")
# callback mappings will keep this object from being garbage collected
_ = SystemsButtonGroup(systems_button=pad_state[4][5],
weapons_button=pad_state[6][5],
engines_button=pad_state[5][4],
reset_button=pad_state[5][5])
#Hardpoints
pad_state[8][6] = ToggleButton(8, 6, red=0, green=3, toggled_red=3, toggled_green=0,
key_output_set=0x75, # KEY_U,
key_output_cleared=0x75, flashing=True, description="Hardpoints")
#Next Weapon Group
pad_state[8][7] = InputButton(8, 7, red=3, green=3, pressed_red=3, pressed_green=0,
key_output=0x6E, # KEY_N,
flashing=False, description="Next Weapon Group")
#Previous Weapon Group
pad_state[8][8] = InputButton(8, 8, red=3, green=3, pressed_red=3, pressed_green=0,
key_output=0x6D, # KEY_M,
flashing=False, description="Previous Weapon Group")
#Wingman's Target
pad_state[7][6] = InputButton(7, 6, red=3, green=0, pressed_red=3, pressed_green=3,
key_output=0x30, # KEY_0,
flashing=False, description="Wingman Target")
#Front Target
pad_state[7][7] = InputButton(7, 7, red=3, green=0, pressed_red=3, pressed_green=3,
key_output=0x74, # KEY_T,
flashing=False, description="Front Target")
#Most Threatening Target
pad_state[7][8] = InputButton(7, 8, red=3, green=0, pressed_red=3, pressed_green=3,
key_output=0x68, # KEY_H,
flashing=False, description="Most Threatening Target")
#Next Target
pad_state[6][7] = InputButton(6, 7, red=3, green=3, pressed_red=3, pressed_green=0,
key_output=0x67, # KEY_G,
flashing=False, description="Next Target")
#Previous Target
pad_state[6][8] = InputButton(6, 8, red=3, green=3, pressed_red=3, pressed_green=0,
key_output=0x66, # KEY_F,
flashing=False, description="Previous Target")
#Next Target Subsystem
pad_state[5][7] = InputButton(5, 7, red=3, green=0, pressed_red=3, pressed_green=3,
key_output=0x79, # KEY_Y,
flashing=False, description="Next Subsystem")
#Previous Target Subsystem
pad_state[5][8] = InputButton(5, 8, red=3, green=0, pressed_red=3, pressed_green=3,
key_output=0x63, # KEY_C,
flashing=False, description="Previous Subsystem")
#Increase Sensor Range
pad_state[3][7] = InputButton(3, 7, red=1, green=2, pressed_red=0, pressed_green=3,
key_output=0xD3, # Page Up
flashing=False, description="Increase Range")
#Decrease Sensor Range
pad_state[3][8] = InputButton(3, 8, red=1, green=2, pressed_red=0, pressed_green=3,
key_output=0xD6, # Page Down
flashing=False, description="Decrease Range")
#Hyperspace
pad_state[7][0] = InputButton(7, 0, red=1, green=2, pressed_red=3, pressed_green=0,
key_output=0x2B, # Add
flashing=True, description="Hyperspace")
#Supercruise
pad_state[6][0] = InputButton(6, 0, red=1, green=2, pressed_red=3, pressed_green=0,
key_output=0x2A, # Multiply
flashing=True, description="Supercruise")
#Wingman 1
pad_state[1][1] = InputButton(1, 1, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x37, # KEY_7,
flashing=False, description="Wingman 1")
#Wingman 2
pad_state[2][1] = InputButton(2, 1, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x38, # KEY_8,
flashing=False, description="Wingman 2")
#Wingman 3
pad_state[3][1] = InputButton(3, 1, red=0, green=3, pressed_red=3, pressed_green=0,
key_output=0x39, # KEY_9,
flashing=False, description="Wingman 3")
#Wingman Nav-Lock
pad_state[4][1] = InputButton(4, 1, red=2, green=3, pressed_red=3, pressed_green=0,
key_output=0x2D, # Minus
flashing=False, description="Winman Nav-Lock")
#Cargo Scoop
pad_state[6][2] = ToggleButton(6, 2, red=3, green=3, toggled_red=3, toggled_green=0,
key_output_set=0xD2, # Home
key_output_cleared=0xD2, flashing=True, description="Cargo Scoop")
#Silent Running
pad_state[7][2] = ToggleButton(7, 2, red=3, green=0, toggled_red=3, toggled_green=3,
key_output_set=0xCC, # F11
key_output_cleared=0xCC, flashing=True, description="Silent Running")
#Jettison Cargo
pad_state[4][2] = InputButton(4, 2, red=3, green=0, pressed_red=3, pressed_green=3,
key_output=0xD5, # End
flashing=False, description="Jettison Cargo")
|
bobhelander/launchpad_mapper
|
elite_mapping.py
|
Python
|
gpl-3.0
| 13,679
|
[
"Galaxy"
] |
386cc8e94214ec5369f926cdf39bfffd61b3eb908edb21c9ecb2a6cde0dc8ead
|
# Authors: Veeresh Taranalli <veeresht@gmail.com>
# License: BSD 3-Clause
"""
============================================
Pulse Shaping Filters (:mod:`commpy.filters`)
============================================
.. autosummary::
:toctree: generated/
rcosfilter -- Raised Cosine (RC) Filter.
rrcosfilter -- Root Raised Cosine (RRC) Filter.
gaussianfilter -- Gaussian Filter.
rectfilter -- Rectangular Filter.
"""
import numpy as np
__all__=['rcosfilter', 'rrcosfilter', 'gaussianfilter', 'rectfilter']
def rcosfilter(N, alpha, Ts, Fs):
"""
Generates a raised cosine (RC) filter (FIR) impulse response.
Parameters
----------
N : int
Length of the filter in samples.
alpha : float
Roll off factor (Valid values are [0, 1]).
Ts : float
Symbol period in seconds.
Fs : float
Sampling Rate in Hz.
Returns
-------
h_rc : 1-D ndarray (float)
Impulse response of the raised cosine filter.
time_idx : 1-D ndarray (float)
Array containing the time indices, in seconds, for the impulse response.
"""
T_delta = 1/float(Fs)
time_idx = ((np.arange(N)-N/2))*T_delta
sample_num = np.arange(N)
h_rc = np.zeros(N, dtype=float)
for x in sample_num:
t = (x-N/2)*T_delta
if t == 0.0:
h_rc[x] = 1.0
elif alpha != 0 and t == Ts/(2*alpha):
h_rc[x] = (np.pi/4)*(np.sin(np.pi*t/Ts)/(np.pi*t/Ts))
elif alpha != 0 and t == -Ts/(2*alpha):
h_rc[x] = (np.pi/4)*(np.sin(np.pi*t/Ts)/(np.pi*t/Ts))
else:
h_rc[x] = (np.sin(np.pi*t/Ts)/(np.pi*t/Ts))* \
(np.cos(np.pi*alpha*t/Ts)/(1-(((2*alpha*t)/Ts)*((2*alpha*t)/Ts))))
return time_idx, h_rc
def rrcosfilter(N, alpha, Ts, Fs):
"""
Generates a root raised cosine (RRC) filter (FIR) impulse response.
Parameters
----------
N : int
Length of the filter in samples.
alpha : float
Roll off factor (Valid values are [0, 1]).
Ts : float
Symbol period in seconds.
Fs : float
Sampling Rate in Hz.
Returns
---------
h_rrc : 1-D ndarray of floats
Impulse response of the root raised cosine filter.
time_idx : 1-D ndarray of floats
Array containing the time indices, in seconds, for
the impulse response.
"""
T_delta = 1/float(Fs)
time_idx = ((np.arange(N)-N/2))*T_delta
sample_num = np.arange(N)
h_rrc = np.zeros(N, dtype=float)
for x in sample_num:
t = (x-N/2)*T_delta
if t == 0.0:
h_rrc[x] = 1.0 - alpha + (4*alpha/np.pi)
elif alpha != 0 and t == Ts/(4*alpha):
h_rrc[x] = (alpha/np.sqrt(2))*(((1+2/np.pi)* \
(np.sin(np.pi/(4*alpha)))) + ((1-2/np.pi)*(np.cos(np.pi/(4*alpha)))))
elif alpha != 0 and t == -Ts/(4*alpha):
h_rrc[x] = (alpha/np.sqrt(2))*(((1+2/np.pi)* \
(np.sin(np.pi/(4*alpha)))) + ((1-2/np.pi)*(np.cos(np.pi/(4*alpha)))))
else:
h_rrc[x] = (np.sin(np.pi*t*(1-alpha)/Ts) + \
4*alpha*(t/Ts)*np.cos(np.pi*t*(1+alpha)/Ts))/ \
(np.pi*t*(1-(4*alpha*t/Ts)*(4*alpha*t/Ts))/Ts)
return time_idx, h_rrc
def gaussianfilter(N, alpha, Ts, Fs):
"""
Generates a gaussian filter (FIR) impulse response.
Parameters
----------
N : int
Length of the filter in samples.
alpha : float
Roll off factor (Valid values are [0, 1]).
Ts : float
Symbol period in seconds.
Fs : float
Sampling Rate in Hz.
Returns
-------
h_gaussian : 1-D ndarray of floats
Impulse response of the gaussian filter.
time_index : 1-D ndarray of floats
Array containing the time indices for the impulse response.
"""
T_delta = 1/float(Fs)
time_idx = ((np.arange(N)-N/2))*T_delta
h_gaussian = (np.sqrt(np.pi)/alpha)*np.exp(-((np.pi*time_idx/alpha)*(np.pi*time_idx/alpha)))
return time_idx, h_gaussian
def rectfilter(N, Ts, Fs):
"""
Generates a rectangular filter (FIR) impulse response.
Parameters
----------
N : int
Length of the filter in samples.
Ts : float
Symbol period in seconds.
Fs : float
Sampling Rate in Hz.
Returns
-------
h_rect : 1-D ndarray of floats
Impulse response of the rectangular filter.
time_index : 1-D ndarray of floats
Array containing the time indices for the impulse response.
"""
h_rect = np.ones(N)
T_delta = 1/float(Fs)
time_idx = ((np.arange(N)-N/2))*T_delta
return time_idx, h_rect
|
tarunlnmiit/CommPy
|
commpy/filters.py
|
Python
|
bsd-3-clause
| 4,729
|
[
"Gaussian"
] |
ce846408715a2f1cac3a7020316df1dc0b946486abbf9fcba80c5503bbcb3df0
|
#!/usr/bin/env python
'''
@name: probEstimate.py
@author: Juan C. Castro <jccastrog at gatech dot edu> & William T. Harvey <wharvey31@gatech.edu>
@update: 21-Jun-2017
@version: 1.0.8
@license: GNU General Public License v3.0.
please type "./protEstimate.py -h" for usage help
'''
'''1.0 Import modules, define functions, and initialize variables'''
#========================1.1 Import modules=========================
try:
import os, sys, argparse #interact with the system, use regular expressions
from os.path import basename
import numpy as np #make calculations base
from Bio import SeqIO #parse and manipulate fasta files
from scipy import stats
except:
sys.stderr.write('ERROR! Cannot import required modules remember probEstimate.py requires os, sys, argparse, numpy, scipy, and Bio')
#=====================1.2 Initialize variables======================
parser = argparse.ArgumentParser(description="protEstimate.py: Estimation of bacterial genomes in biological samples [jccastrog@gatech.edu]")
group = parser.add_argument_group('Required arguments') #Required
group.add_argument('-t', action='store', dest='target', required=True, default='', help='Subject sequences (ref) in FastA format.')
group.add_argument('-m', action='store', dest='map', required=True, default='', help='One or more Tabular BLAST files of reads vs genes (or contigs).',nargs='+')
group.add_argument('-p', action='store', dest='param', required=True,help="Parameters file obtained from fitModel.py")
group = parser.add_argument_group('Optional arguments') #Required
group.add_argument('-l', action='store', dest='mode', required=False, default='single', choices=["single","general"],help='Number of parameters to be used to estimate the prescence probability: single mode will use only sequencing breadth whereas general mode will use sequencing depth and sequencing breadth. (default : %(default)s)')
group.add_argument('-i', action='store', dest='perc_identity', required=False, default='95', help='Percentage of identity to recruit a read to the genome. (default: %(default)s)')
group.add_argument('-a', action= 'store', dest='aln_length', required=False, default='135', help='Alignment length to recruit a read to the genome. (default: %(default)s)')
args = parser.parse_args()
genomeSize = 0
thetaB = []
thetaBD = []
#=======================1.3 Define functions========================
def predProb(X,theta):
X = np.array(tuple(X))
theta = np.array(tuple(theta))
z = np.dot(X,theta)
g = 1/(1 + np.exp(-z))
return(g)
'''2.0 Load the blast results to calculate sequencing depth and breadth'''
#====================2.1 Sequencing depth and breadth=====================
#2.1.1 Parse reference====================================================
with open(args.target) as fastaFile:
for fastaParse in SeqIO.parse(fastaFile,"fasta"):
ID = fastaParse.id
seq = fastaParse.seq
genomeSize = genomeSize+len(seq)
#2.1.2 Parse parameters===================================================
with open(args.param) as paramFile:
lines = paramFile.readlines()
for line in lines:
line = line.rstrip('\n')
fields = line.split(',')
if len(fields)==2:
thetaB = []
for field in fields:
thetaB.append(float(field))
elif len(fields)==3:
for field in fields:
thetaBD.append(float(field))
#2.1.3 Parse mapping=====================================================
print 'File'+'\t'+'Depth'+'\t'+'Breadth'+'\t'+'p-Value'
for file in args.map:
genPos = dict()
depthPos = np.zeros(genomeSize)
wholeDepth = 0
with open(file) as map:
lines = map.readlines()
for line in lines:
line = line.rstrip('\n')
fields = line.split('\t')
alnLength = int(fields[3])
perIden = float(fields[2])
if alnLength>=int(args.aln_length) and perIden>=float(args.perc_identity):
subSta = int(fields[8])
subEnd = int(fields[9])
wholeDepth = wholeDepth + alnLength
if subSta < subEnd:
keys = range(subSta-1,subEnd)
for key in keys:
if key in genPos:
depthPos[key]+=1
continue
else:
genPos[key]=1
depthPos[key]+=1
else:
keys = range(subEnd-1,subSta)
for key in keys:
if key in genPos:
depthPos[key]+=1
continue
else:
genPos[key]=1
depthPos[key]+=1
#2.3.2 Calculate sequencing depth and breadth====================
seqDepth = wholeDepth/float(genomeSize)
seqDepth = stats.trim_mean(depthPos,0.025)
seqBreadth = sum(genPos.values())/float(genomeSize)
'''3.0 Calculate the probability of presence'''
if args.mode=='single':
X = [1,seqBreadth]
predict=predProb(X,thetaB)
else:
X = [1,seqBreadth,seqDepth]
predict=predProb(X,thetaBD)
print basename(file)+'\t'+str(seqDepth)+'\t'+str(seqBreadth)+'\t'+str(1-predict)
#======================================================================================
|
jccastrog/imGLAD
|
probEstimate.py
|
Python
|
gpl-3.0
| 4,852
|
[
"BLAST"
] |
95da3310550d2bbd97d55e642c3abb4d36062a953f09d01d35fc408235628478
|
#!/usr/bin/env python
from pyscf import gto
from pyscf import scf
from pyscf import mrpt
from pyscf.dmrgscf import DMRGSCF
#
# NEVPT2 calculation requires about 200 GB memory in total
#
b = 1.5
mol = gto.Mole()
mol.verbose = 5
mol.output = 'cr2-%3.2f.out' % b
mol.atom = [
['Cr',( 0.000000, 0.000000, -b/2)],
['Cr',( 0.000000, 0.000000, b/2)],
]
mol.basis = {'Cr': 'ccpvdz-dk'}
mol.symmetry = True
mol.build()
m = scf.RHF(mol).x2c().run(conv_tol=1e-9, chkfile='hf_chk-%s'%b, level_shift=0.5)
#
# Note: stream operations are used here. This one line code is equivalent to
# the following serial statements.
#
#m = scf.sfx2c1e(scf.RHF(mol))
#m.conv_tol = 1e-9
#m.chkfile = 'hf_chk-%s'%b
#m.level_shift = 0.5
#m.kernel()
dm = m.make_rdm1()
m.level_shift = 0
m.scf(dm)
mc = DMRGSCF(m, 20, 28) # 20o, 28e
mc.fcisolver.maxM = 1000
mc.fcisolver.tol = 1e-6
mc.chkfile = 'mc_chk_18o-%s'%b
cas_occ = {'A1g':4, 'A1u':4,
'E1ux':2, 'E1uy':2, 'E1gx':2, 'E1gy':2,
'E2ux':1, 'E2uy':1, 'E2gx':1, 'E2gy':1}
mo = mc.sort_mo_by_irrep(cas_occ)
mc.kernel(mo)
#
# DMRG-NEVPT2 (slow version)
# not available since StackBlock 1.5.3
#
# mrpt.NEVPT(mc).kernel()
#
# The compressed-MPS-perturber DMRG-NEVPT2 is more efficient.
#
mrpt.NEVPT(mc).compress_approx().kernel()
|
gkc1000/pyscf
|
examples/dmrg/30-dmrg_casscf_nevpt2_for_Cr2.py
|
Python
|
apache-2.0
| 1,289
|
[
"PySCF"
] |
6ed856cecd3299d57482a1804b628c87fa958bdaed4f612b984e13ab2b0dd4f5
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for `tf.data.experimental.rejection_resample()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.client import session
from tensorflow.python.data.experimental.ops import resampling
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.platform import test
def _time_resampling(data_np, target_dist, init_dist, num_to_sample): # pylint: disable=missing-docstring
dataset = dataset_ops.Dataset.from_tensor_slices(data_np).repeat()
# Reshape distribution via rejection sampling.
dataset = dataset.apply(
resampling.rejection_resample(
class_func=lambda x: x,
target_dist=target_dist,
initial_dist=init_dist,
seed=142))
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
dataset = dataset.with_options(options)
get_next = dataset_ops.make_one_shot_iterator(dataset).get_next()
with session.Session() as sess:
start_time = time.time()
for _ in xrange(num_to_sample):
sess.run(get_next)
end_time = time.time()
return end_time - start_time
class RejectionResampleBenchmark(test.Benchmark):
"""Benchmarks for `tf.data.experimental.rejection_resample()`."""
def benchmark_resample_performance(self):
init_dist = [0.25, 0.25, 0.25, 0.25]
target_dist = [0.0, 0.0, 0.0, 1.0]
num_classes = len(init_dist)
# We don't need many samples to test a dirac-delta target distribution
num_samples = 1000
data_np = np.random.choice(num_classes, num_samples, p=init_dist)
resample_time = _time_resampling(
data_np, target_dist, init_dist, num_to_sample=1000)
self.report_benchmark(iters=1000, wall_time=resample_time, name="resample")
if __name__ == "__main__":
test.main()
|
ghchinoy/tensorflow
|
tensorflow/python/data/experimental/benchmarks/rejection_resample_benchmark.py
|
Python
|
apache-2.0
| 2,654
|
[
"DIRAC"
] |
583aa760b1ce3df2e521b8116ad71afd45ce3ac980921e9b3dcadb04f471535b
|
#
# (c) 2015 Brian Ccoa, <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
This module adds shared support for generic api modules
In order to use this module, include it as part of a custom
module as shown below.
** Note: The order of the import statements does matter. **
from ansible.module_utils.basic import *
from ansible.module_utils.api import *
The 'api' module provides the following common argument specs:
* rate limit spec
- rate: number of requests per time unit (int)
- rate_limit: time window in which the limit is applied in seconds
* retry spec
- retries: number of attempts
- retry_pause: delay between attempts in seconds
"""
import time
def rate_limit_argument_spec(spec=None):
"""Creates an argument spec for working with rate limiting"""
arg_spec = (dict(
rate=dict(type='int'),
rate_limit=dict(type='int'),
))
if spec:
arg_spec.update(spec)
return arg_spec
def retry_argument_spec(spec=None):
"""Creates an argument spec for working with retrying"""
arg_spec = (dict(
retries=dict(type='int'),
retry_pause=dict(type='float', default=1),
))
if spec:
arg_spec.update(spec)
return arg_spec
def rate_limit(rate=None, rate_limit=None):
"""rate limiting decorator"""
minrate = None
if rate is not None and rate_limit is not None:
minrate = float(rate_limit) / float(rate)
def wrapper(f):
last = [0.0]
def ratelimited(*args,**kwargs):
if minrate is not None:
elapsed = time.clock() - last[0]
left = minrate - elapsed
if left > 0:
time.sleep(left)
last[0] = time.clock()
ret = f(*args,**kwargs)
return ret
return ratelimited
return wrapper
def retry(retries=None, retry_pause=1):
"""Retry decorator"""
def wrapper(f):
retry_count = 0
def retried(*args,**kwargs):
if retries is not None:
ret = None
while True:
retry_count += 1
if retry_count >= retries:
raise Exception("Retry limit exceeded: %d" % retries)
try:
ret = f(*args,**kwargs)
except:
pass
if ret:
break
time.sleep(retry_pause)
return ret
return retried
return wrapper
|
xpac1985/ansible
|
lib/ansible/module_utils/api.py
|
Python
|
gpl-3.0
| 3,198
|
[
"Brian"
] |
27850f7e69a04045ae26e0209869e1b33efa0cf828a150ce8c1806c12ff287bd
|
# -*- coding: utf-8 -*-
"""Test parsing variants."""
import logging
import re
import unittest
from pybel.constants import (
CONCEPT,
FRAGMENT,
FRAGMENT_DESCRIPTION,
FRAGMENT_MISSING,
FRAGMENT_START,
FRAGMENT_STOP,
FUSION_MISSING,
FUSION_REFERENCE,
FUSION_START,
FUSION_STOP,
GMOD,
IDENTIFIER,
KIND,
LOCATION,
NAME,
NAMESPACE,
PARTNER_3P,
PARTNER_5P,
PMOD,
PMOD_CODE,
PMOD_POSITION,
RANGE_3P,
RANGE_5P,
)
from pybel.dsl import GeneModification, Hgvs, ProteinModification
from pybel.language import Entity
from pybel.parser import ConceptParser
from pybel.parser.modifiers import (
get_fragment_language,
get_fusion_language,
get_gene_modification_language,
get_gene_substitution_language,
get_hgvs_language,
get_location_language,
get_protein_modification_language,
get_protein_substitution_language,
get_truncation_language,
)
log = logging.getLogger(__name__)
class TestHGVSParser(unittest.TestCase):
def setUp(self):
self.parser = get_hgvs_language()
def test_protein_del(self):
statement = "variant(p.Phe508del)"
expected = Hgvs("p.Phe508del")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_protein_del_quoted(self):
statement = 'variant("p.Phe508del")'
expected = Hgvs("p.Phe508del")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_protein_mut(self):
statement = "var(p.Gly576Ala)"
expected = Hgvs("p.Gly576Ala")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_unspecified(self):
statement = "var(=)"
expected = Hgvs("=")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_frameshift(self):
statement = "variant(p.Thr1220Lysfs)"
expected = Hgvs("p.Thr1220Lysfs")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_snp(self):
statement = "var(c.1521_1523delCTT)"
expected = Hgvs("c.1521_1523delCTT")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_chromosome_1(self):
statement = "variant(g.117199646_117199648delCTT)"
expected = Hgvs("g.117199646_117199648delCTT")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_chromosome_2(self):
statement = "var(c.1521_1523delCTT)"
expected = Hgvs("c.1521_1523delCTT")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_rna_del(self):
statement = "var(r.1653_1655delcuu)"
expected = Hgvs("r.1653_1655delcuu")
result = self.parser.parseString(statement)
self.assertEqual(expected, result.asDict())
def test_protein_trunc_triple(self):
statement = "var(p.Cys65*)"
result = self.parser.parseString(statement)
expected = Hgvs("p.Cys65*")
self.assertEqual(expected, result.asDict())
def test_protein_trunc_legacy(self):
statement = "var(p.65*)"
result = self.parser.parseString(statement)
expected = Hgvs("p.65*")
self.assertEqual(expected, result.asDict())
class TestPmod(unittest.TestCase):
def setUp(self):
identifier_parser = ConceptParser(
namespace_to_pattern={
"MOD": re.compile(".*"),
"HGNC": re.compile(".*"),
}
)
self.parser = get_protein_modification_language(
concept_qualified=identifier_parser.identifier_qualified,
concept_fqualified=identifier_parser.identifier_fqualified,
)
def _help_test_pmod_simple(self, statement):
result = self.parser.parseString(statement)
expected = {
KIND: PMOD,
CONCEPT: {
NAMESPACE: "go",
NAME: "protein phosphorylation",
IDENTIFIER: "0006468",
},
}
self.assertEqual(expected, ProteinModification("Ph"))
self.assertEqual(expected, result.asDict())
def test_bel_name(self):
# long function, legacy modification
self._help_test_pmod_simple("proteinModification(P)")
# long function, new modification
self._help_test_pmod_simple("proteinModification(Ph)")
# short function, legacy modification
self._help_test_pmod_simple("pmod(P)")
# short function, new modification
self._help_test_pmod_simple("pmod(Ph)")
def _help_test_pmod_with_residue(self, statement):
result = self.parser.parseString(statement)
expected = {
KIND: PMOD,
CONCEPT: {
NAMESPACE: "go",
NAME: "protein phosphorylation",
IDENTIFIER: "0006468",
},
PMOD_CODE: "Ser",
}
self.assertEqual(expected, ProteinModification("Ph", code="Ser"))
self.assertEqual(expected, result.asDict())
def test_residue(self):
# short amino acid
self._help_test_pmod_with_residue("pmod(Ph, S)")
# long amino acid
self._help_test_pmod_with_residue("pmod(Ph, Ser)")
def _help_test_pmod_full(self, statement):
result = self.parser.parseString(statement)
expected = {
KIND: PMOD,
CONCEPT: {
NAMESPACE: "go",
NAME: "protein phosphorylation",
IDENTIFIER: "0006468",
},
PMOD_CODE: "Ser",
PMOD_POSITION: 473,
}
self.assertEqual(expected, ProteinModification("Ph", code="Ser", position=473))
self.assertEqual(expected, result.asDict())
def test_full(self):
self._help_test_pmod_full("proteinModification(P, Ser, 473)")
self._help_test_pmod_full("proteinModification(P, S, 473)")
self._help_test_pmod_full("proteinModification(Ph, Ser, 473)")
self._help_test_pmod_full("proteinModification(Ph, S, 473)")
self._help_test_pmod_full("pmod(P, Ser, 473)")
self._help_test_pmod_full("pmod(P, S, 473)")
self._help_test_pmod_full("pmod(Ph, Ser, 473)")
self._help_test_pmod_full("pmod(Ph, S, 473)")
def _help_test_non_standard_namespace(self, statement):
result = self.parser.parseString(statement)
expected = {
KIND: PMOD,
CONCEPT: Entity(namespace="MOD", name="PhosRes"),
PMOD_CODE: "Ser",
PMOD_POSITION: 473,
}
self.assertEqual(
expected,
ProteinModification(name="PhosRes", namespace="MOD", code="Ser", position=473),
)
self.assertEqual(expected, result.asDict())
def test_full_with_non_standard_namespace(self):
self._help_test_non_standard_namespace("proteinModification(MOD:PhosRes, S, 473)")
self._help_test_non_standard_namespace("proteinModification(MOD:PhosRes, Ser, 473)")
self._help_test_non_standard_namespace("proteinModification(MOD:PhosRes, S, 473)")
self._help_test_non_standard_namespace("proteinModification(MOD:PhosRes, Ser, 473)")
class TestGeneModification(unittest.TestCase):
def setUp(self):
identifier_parser = ConceptParser()
self.parser = get_gene_modification_language(
concept_fqualified=identifier_parser.identifier_fqualified,
concept_qualified=identifier_parser.identifier_qualified,
)
self.expected = GeneModification("Me")
def test_dsl(self):
self.assertEqual(
{
KIND: GMOD,
CONCEPT: {
NAME: "DNA methylation",
IDENTIFIER: "0006306",
NAMESPACE: "go",
},
},
self.expected,
)
def test_gmod_short(self):
statement = "geneModification(M)"
result = self.parser.parseString(statement)
self.assertEqual(self.expected, result.asDict())
def test_gmod_unabbreviated(self):
statement = "geneModification(Me)"
result = self.parser.parseString(statement)
self.assertEqual(self.expected, result.asDict())
def test_gmod_long(self):
statement = "geneModification(methylation)"
result = self.parser.parseString(statement)
self.assertEqual(self.expected, result.asDict())
class TestProteinSubstitution(unittest.TestCase):
def setUp(self):
self.parser = get_protein_substitution_language()
def test_psub_1(self):
statement = "sub(A, 127, Y)"
result = self.parser.parseString(statement)
expected_list = Hgvs("p.Ala127Tyr")
self.assertEqual(expected_list, result.asDict())
def test_psub_2(self):
statement = "sub(Ala, 127, Tyr)"
result = self.parser.parseString(statement)
expected_list = Hgvs("p.Ala127Tyr")
self.assertEqual(expected_list, result.asDict())
class TestGeneSubstitutionParser(unittest.TestCase):
def setUp(self):
self.parser = get_gene_substitution_language()
def test_gsub(self):
statement = "sub(G,308,A)"
result = self.parser.parseString(statement)
expected_dict = Hgvs("c.308G>A")
self.assertEqual(expected_dict, result.asDict())
class TestFragmentParser(unittest.TestCase):
"""See http://openbel.org/language/web/version_2.0/bel_specification_version_2.0.html#_examples_2"""
def setUp(self):
self.parser = get_fragment_language()
def _help_test_known_length(self, s):
result = self.parser.parseString(s)
expected = {KIND: FRAGMENT, FRAGMENT_START: 5, FRAGMENT_STOP: 20}
self.assertEqual(expected, result.asDict())
def test_known_length_unquoted(self):
"""test known length"""
s = "frag(5_20)"
self._help_test_known_length(s)
def test_known_length_quotes(self):
"""test known length"""
s = 'frag("5_20")'
self._help_test_known_length(s)
def _help_test_unknown_length(self, s):
result = self.parser.parseString(s)
expected = {KIND: FRAGMENT, FRAGMENT_START: 1, FRAGMENT_STOP: "?"}
self.assertEqual(expected, result.asDict())
def test_unknown_length_unquoted(self):
"""amino-terminal fragment of unknown length"""
s = "frag(1_?)"
self._help_test_unknown_length(s)
def test_unknown_length_quoted(self):
"""amino-terminal fragment of unknown length"""
s = 'frag("1_?")'
self._help_test_unknown_length(s)
def _help_test_unknown_start_stop(self, s):
result = self.parser.parseString(s)
expected = {KIND: FRAGMENT, FRAGMENT_START: "?", FRAGMENT_STOP: "*"}
self.assertEqual(expected, result.asDict())
def test_unknown_start_stop_unquoted(self):
"""fragment with unknown start/stop"""
s = "frag(?_*)"
self._help_test_unknown_start_stop(s)
def test_unknown_start_stop_quoted(self):
"""fragment with unknown start/stop"""
s = 'frag("?_*")'
self._help_test_unknown_start_stop(s)
def _help_test_descriptor(self, s):
result = self.parser.parseString(s)
expected = {KIND: FRAGMENT, FRAGMENT_MISSING: "?", FRAGMENT_DESCRIPTION: "55kD"}
self.assertEqual(expected, result.asDict())
def test_descriptor_unquoted(self):
"""fragment with unknown start/stop and a descriptor"""
s = 'frag(?, "55kD")'
self._help_test_descriptor(s)
def test_descriptor_quoted(self):
"""fragment with unknown start/stop and a descriptor"""
s = 'frag("?", "55kD")'
self._help_test_descriptor(s)
class TestTruncationParser(unittest.TestCase):
def setUp(self):
self.parser = get_truncation_language()
def test_trunc_1(self):
statement = "trunc(40)"
result = self.parser.parseString(statement)
expected = Hgvs("p.40*")
self.assertEqual(expected, result.asDict())
def test_trunc_2(self):
"""Test a truncation in which the amino acid is specified."""
statement = "trunc(Gly40)"
result = self.parser.parseString(statement)
expected = Hgvs("p.Gly40*")
self.assertEqual(expected, result.asDict())
def test_trunc_missing_number(self):
"""Test that an error is raised for a truncation in which the position is omitted."""
statement = "trunc(Gly)"
with self.assertRaises(Exception):
self.parser.parseString(statement)
class TestFusionParser(unittest.TestCase):
def setUp(self):
identifier_parser = ConceptParser(namespace_to_pattern={"HGNC": re.compile(".*")})
identifier_qualified = identifier_parser.identifier_qualified
self.parser = get_fusion_language(identifier_qualified)
def test_rna_fusion_known_breakpoints(self):
"""RNA abundance of fusion with known breakpoints"""
statement = "fus(HGNC:TMPRSS2, r.1_79, HGNC:ERG, r.312_5034)"
result = self.parser.parseString(statement)
expected = {
PARTNER_5P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "TMPRSS2",
},
},
RANGE_5P: {FUSION_REFERENCE: "r", FUSION_START: 1, FUSION_STOP: 79},
PARTNER_3P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "ERG",
},
},
RANGE_3P: {
FUSION_REFERENCE: "r",
FUSION_START: 312,
FUSION_STOP: 5034,
},
}
self.assertEqual(expected, result.asDict())
def test_rna_fusion_unspecified_breakpoints(self):
"""RNA abundance of fusion with unspecified breakpoints"""
statement = "fus(HGNC:TMPRSS2, ?, HGNC:ERG, ?)"
result = self.parser.parseString(statement)
expected = {
PARTNER_5P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "TMPRSS2",
}
},
RANGE_5P: {FUSION_MISSING: "?"},
PARTNER_3P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "ERG",
},
},
RANGE_3P: {FUSION_MISSING: "?"},
}
self.assertEqual(expected, result.asDict())
def test_rna_fusion_specified_one_fuzzy_breakpoint(self):
"""RNA abundance of fusion with unspecified breakpoints"""
statement = "fusion(HGNC:TMPRSS2, r.1_79, HGNC:ERG, r.?_1)"
result = self.parser.parseString(statement)
expected = {
PARTNER_5P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "TMPRSS2",
},
},
RANGE_5P: {FUSION_REFERENCE: "r", FUSION_START: 1, FUSION_STOP: 79},
PARTNER_3P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "ERG",
},
},
RANGE_3P: {FUSION_REFERENCE: "r", FUSION_START: "?", FUSION_STOP: 1},
}
self.assertEqual(expected, result.asDict())
def test_rna_fusion_specified_fuzzy_breakpoints(self):
"""RNA abundance of fusion with unspecified breakpoints"""
statement = "fusion(HGNC:TMPRSS2, r.1_?, HGNC:ERG, r.?_1)"
result = self.parser.parseString(statement)
expected = {
PARTNER_5P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "TMPRSS2",
},
},
RANGE_5P: {FUSION_REFERENCE: "r", FUSION_START: 1, FUSION_STOP: "?"},
PARTNER_3P: {
CONCEPT: {
NAMESPACE: "HGNC",
NAME: "ERG",
},
},
RANGE_3P: {FUSION_REFERENCE: "r", FUSION_START: "?", FUSION_STOP: 1},
}
self.assertEqual(expected, result.asDict())
class TestLocation(unittest.TestCase):
def setUp(self):
identifier_parser = ConceptParser(namespace_to_pattern={"GO": re.compile(".*")})
identifier_qualified = identifier_parser.identifier_qualified
self.parser = get_location_language(identifier_qualified)
def test_a(self):
statement = "loc(GO:intracellular)"
result = self.parser.parseString(statement)
expected = {LOCATION: {NAMESPACE: "GO", NAME: "intracellular"}}
self.assertEqual(expected, result.asDict())
|
pybel/pybel
|
tests/test_parse/test_parse_bel_variants.py
|
Python
|
mit
| 16,938
|
[
"Pybel"
] |
967fc4b34ffa56ce6eabefaee0584ea4738b7264a7f56db5e97f8fd11829dea1
|
#
# Copyright 2021 Jack Grigg
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
r"""Manage the Fluent translation format.
It is a monolingual base class derived format with :class:`FluentFile`
and :class:`FluentUnit` providing file and unit level access.
"""
from fluent.syntax import FluentParser, ast, parse, serialize, visitor
from fluent.syntax.serializer import serialize_pattern
from fluent.syntax.stream import FluentParserStream
from translate.storage import base
def id_from_source(source):
# If the caller does not provide a unit ID, we need to generate one
# ourselves. A valid Fluent identifier has the following EBNF grammar:
# Identifier ::= [a-zA-Z] [a-zA-Z0-9_-]*
#
# This means we can't simply use the source string itself as the identifier
# (as e.g. PO files do). Instead, we hash the source string with a
# collision-resistant hash function.
import hashlib
return "gen-" + hashlib.sha256(source.encode()).hexdigest()
def source_from_entry(entry):
# Serialized patterns come in two forms:
# - Single-line patterns, which have a leading space we need to strip (for
# consistency with the expectations of what callers will set).
# - Multiline patterns, which have a leading newline we need to preserve.
return serialize_pattern(entry.value).lstrip(" ")
class FluentUnit(base.TranslationUnit):
"""A Fluent message."""
def __init__(self, source=None, entry=None):
super().__init__(source)
self._type = None
self._id = None
self._errors = {}
self._attributes = {}
if source is not None:
self._type = ast.Message
self._set_value(source)
# Default to source string
self._id = id_from_source(self.source)
if entry is not None:
self.parse(entry)
def _set_value(self, value):
self.source = value
# The source and target are equivalent for monolingual units.
self.target = self.source
def getid(self):
return self._id
def setid(self, value):
self._id = value
def adderror(self, errorname, errortext):
self._errors[errorname] = errortext
def geterrors(self):
return self._errors
def isheader(self):
return self._type in [
ast.GroupComment,
ast.ResourceComment,
]
def getattributes(self):
return self._attributes
def parse(self, entry):
# Handle this unit separately if it is invalid.
if isinstance(entry, ast.Junk):
for annotation in entry.annotations:
self.adderror(annotation.code, annotation.message)
self._set_value(entry.content)
return
this = self
class Parser(visitor.Visitor):
_found_id = False
def visit_Attribute(self, node):
this._attributes[node.id.name] = source_from_entry(node)
def visit_Comment(self, node):
if this._type not in [ast.Message, ast.Term]:
this._type = ast.Comment
this.addnote(node.content)
def visit_GroupComment(self, node):
this._type = ast.GroupComment
this.addnote(node.content)
def visit_ResourceComment(self, node):
this._type = ast.ResourceComment
this.addnote(node.content)
def visit_Identifier(self, node):
if not self._found_id:
# Only save the first identifier we encounter (the entry's
# value will also contain identifiers if it has selectors).
this._id = node.name
self._found_id = True
def visit_Message(self, node):
this._type = ast.Message
this._set_value(source_from_entry(node))
self.generic_visit(node)
def visit_Term(self, node):
this._type = ast.Term
this._set_value(source_from_entry(node))
self.generic_visit(node)
Parser().visit(entry)
def to_entry(self):
fp = FluentParser(False)
# Handle standalone comments separately; they don't have any values or
# attributes, just comment text.
if self._type in [ast.ResourceComment, ast.GroupComment, ast.Comment]:
return (self._type)(self.getnotes())
assert self.source is not None
value = fp.maybe_get_pattern(FluentParserStream(self.source))
attributes = [
ast.Attribute(
ast.Identifier(id),
fp.maybe_get_pattern(FluentParserStream(value)),
)
for (id, value) in self._attributes.items()
]
comment = None
if self.getnotes():
comment = ast.Comment(self.getnotes())
return (self._type if self._type is not None else ast.Message)(
ast.Identifier(self.getid()),
value=value,
attributes=attributes,
comment=comment,
)
class FluentFile(base.TranslationStore):
"""A Fluent file."""
Name = "Fluent file"
Mimetypes = []
Extensions = ["ftl"]
UnitClass = FluentUnit
def __init__(self, inputfile=None, **kwargs):
super().__init__(**kwargs)
self.filename = getattr(inputfile, "name", "")
if inputfile is not None:
fluentsrc = inputfile.read()
self.parse(fluentsrc)
def parse(self, fluentsrc):
resource = parse(fluentsrc.decode("utf-8"))
for entry in resource.body:
self.addunit(FluentUnit(entry=entry))
def serialize(self, out):
body = [unit.to_entry() for unit in self.units]
out.write(serialize(ast.Resource(body)).encode(self.encoding))
|
miurahr/translate
|
translate/storage/fluent.py
|
Python
|
gpl-2.0
| 6,515
|
[
"VisIt"
] |
402c17243b45ed14f7d2d35714492eea97e113216e63ba06a0c7664e0b29bfea
|
#!/usr/local/sci/bin/python
# PYTHON3.6.1
#
# Author: Kate Willett
# Created: 18 Jul 2018
# Last update: 15 Apr 2019
# Location: /data/local/hadkw/HADCRUH2/UPDATE2017/PROGS/PYTHON/
# GitHub: https://github.com/Kate-Willett/HadISDH_Build
# -----------------------
# CODE PURPOSE AND OUTPUT
# -----------------------
# THIS CODE DOES MANY THINGS BUT ONLY ONE THING AT A TIME! SO RE-RUN FOR MULTIPLE THINGS
# NOTE THAT FOR ANY 1BY1 OUTPUT IT REGRIDS TO BE 89.5 to -89.5 rather than 90 - -90 (180 boxes rather than 181!!!)
# AND ROLLS LONGITUDE TO -179.5 to 179.5
#
# AT THE MOMENT THIS ASSUMES COMPLETE FIELDS SO WON'T WORK FOR SST!!!
#
# ANOTHER ISSUE IS LAND / SEA MASKING - TOO MUCH LAND COVER, TOO MUCH SEA COVER - SO THERE WILL BE CONTAMINATION!
# I COMPUTE ANOMALIES AT 1by1 RES BEFORE REGRIDDING TO 5by5 TO MINIMISE THIS>
#
#
# This code reads in the ERA-Interim months of 1by1 6 hourly or monthly variables
# (e.g., T, Td and Surface Pressure etc) for the full time period
#
# If desired it converts to humidity variables
# If desired it averages to monthly means and saves to netCDF:
# days since 19790101 (float), 181 lats 90 to -90, 360 lons 0 to 359, <var>2m
# If desired it regrids to 5by5 (monthly means only) and saves to netCDF
# days since 19790101 (int), 36 lats -87.5 to 87.5, 72 lons -177.5 to 177.5, actuals
# If desired it calculates anomalies over a climatological references period given (default 1981-2010)
# and saves to netCDF
# For anomalies it also creates a land only and ocean only set of grids to save along side the complete
# fields
# days since 19790101 (int), 36 lats -87.5 to 87.5, 72 lons -177.5 to 177.5, anomalies,
# anomalies_land, anomalies_sea
#
# The ERA-Interim updates have to be downloaded from ERADownload.py code
# This requires a key to be set up in .ecmwfapirc annually - obtained from logging in to ECMWF
# https://confluence.ecmwf.int/display/WEBAPI/How+to+retrieve+ECMWF+Public+Datasets
# It also requires ecmwfapi to be downloaded and in the directory as you are running to code from
#
# The ERA5 updates have to be downloaded using ERA5Download.py which is in cdsapi-0.1.3/
# Each time you download change the filename to ERAINTERIM_6hr_1by1_MMYYYY.nc
# Save to /data/local/hadkw/HADCRUH2/UPDATE<yyyy>/OTHERDATA/
# Copy previous years of monthly ERAINTERIM data from the previous
# UPDATE<yyyy>/OTHERDATA/<var>2m_monthly_1by1_ERA-Interim_data_1979<yyyy>.nc
# to OTHERDATA/
#
# <references to related published material, e.g. that describes data set>
#
# -----------------------
# LIST OF MODULES
# -----------------------
# inbuilt:
# from datetime import datetime
# import matplotlib.pyplot as plt
# import numpy as np
# from matplotlib.dates import date2num,num2date
# import sys, os
# from scipy.optimize import curve_fit,fsolve,leastsq
# from scipy import pi,sqrt,exp
# from scipy.special import erf
# import scipy.stats
# from math import sqrt,pi
# import struct
# from netCDF4 import Dataset
# from netCDF4 import stringtoarr # for putting strings in as netCDF variables
# import pdb
#
# Kates:
# import CalcHums - written by kate Willett to calculate humidity variables
# import TestLeap - written by Kate Willett to identify leap years
# from ReadNetCDF import GetGrid4 - written by Kate Willett to pull out netCDF data
# from ReadNetCDF import GetGrid4Slice - written by Kate Willett to pull out a slice of netCDF data
# from GetNiceTimes import make_days_since
#
#-------------------------------------------------------------------
# DATA
# -----------------------
# ERA-Interim 1by1 6 hrly gridded data
# ERA<Mmm> = /data/local/hadkw/HADCRUH2/UPDATE<yyyy>/OTHERDATA/ERAINTERIM_<var>_6hr_1by1_<MMYYYY>.nc
#
# -----------------------
# HOW TO RUN THE CODE
# -----------------------
# First make sure the New ERA-Interim data are in the right place.
# Also check all editables in this file are as you wish
# python2.7 ExtractMergeRegridERA_JUL2018.py
#
# -----------------------
# OUTPUT
# -----------------------
# New ERA-Interim 1by1 monthly gridded data for 1979 to present
# NewERA<var> = /data/local/hadkw/HADCRUH2/UPDATE<yyyy>/OTHERDATA/<var>2m_monthly_1by1_ERA-Interim_data_1979<yyyy>.nc
#
# -----------------------
# VERSION/RELEASE NOTES
# -----------------------
#
# Version 1 (18 Jul 2018)
# ---------
#
# Enhancements
#
# Changes
#
# Bug fixes
#
# -----------------------
# OTHER INFORMATION
# -----------------------
#
#************************************************************************
# START
#************************************************************************
# inbuilt:
from datetime import datetime
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.dates import date2num,num2date
import sys, os
from scipy.optimize import curve_fit,fsolve,leastsq
from scipy import pi,sqrt,exp
from scipy.special import erf
import scipy.stats
from math import sqrt,pi
import struct
from netCDF4 import Dataset
from netCDF4 import stringtoarr # for putting strings in as netCDF variables
import pdb
# Kates:
import CalcHums
import TestLeap
from ReadNetCDF import GetGrid4
from ReadNetCDF import GetGrid4Slice
from GetNiceTimes import MakeDaysSince
### START OF EDITABLES ###############################
# Set up initial run choices
# Start and end years
styr = 1979
edyr = 2018
#edOLD = (edyr-styr)*12
stmon = 1
edmon = 12
# Set up output variables - for q, e, RH, dpd, Tw we will need to read in multiple input files
OutputVar = 'dpd' # this can be 't','td','q','rh','e','dpd','tw','ws','slp','sp','uv','sst'
# Is this a new run or an update?
ThisProg = 'Regrid'
# Update for updating an existing file (1by1 monthly or pentad)
# Build for building from scratch (1by1 6hr to 1by1 monthly or pentad)
# THIS AUTOMATICALLY REGRIDS LATS TO BE 180 RATHER THAN 181!!!
# Regrid for changing spatial res from 1by1 to 6hr
# IF OutputGrid = 1by1 then this just changes lats from 181 to 180
# IF OutputGrid = 5by5 then this changes to 36 lats (-87.5 to 87.5) and 72 lons (-177.5 to 177.5)
# Is this ERA-Interim or ERA5?
ThisRean = 'ERA-Interim' # 'ERA5' or 'ERA-Interim'
# Are you reading in hourlies or monthlies?
ReadInTime = 'monthly' # this can be '1hr', '6hr' or 'month' or maybe 'day' later
# Are you converting to monthlies? We will output hourlies anyway if they are read in
OutputTime = 'monthly' # this could be 'monthly' or 'pentad'
# Are you reading in 1by1 or 5by5? We will output 1by1 anyway if they are read in.
ReadInGrid = '1by1' # this can be '1by1' or '5by5'
# Are you converting to 5by5?
OutputGrid = '5by5' # this can be '1by1' or '5by5'
# Do you want to create anomalies and if so, what climatology period? We will output absolutes anyway
MakeAnoms = 1 # 1 for create anomalies (and clim and stdev fields), 0 for do NOT create anomalies
ClimStart = 1981 # any year but generally 1981
ClimEnd = 2010 # any year but generally 2010
### END OF EDITABLES ################
# Set up file paths and other necessary things
if (MakeAnoms == 1): # set the filename string for anomalies
AnomsStr = 'anoms'+str(ClimStart)+'-'+str(ClimEnd)+'_'
else:
AnomsStr = ''
# Set up file locations
updateyy = str(edyr)[2:4]
updateyyyy = str(edyr)
workingdir = '/data/users/hadkw/WORKING_HADISDH/UPDATE'+updateyyyy
if (ReadInGrid == '5by5'):
LandMask = workingdir+'/OTHERDATA/HadCRUT.4.3.0.0.land_fraction.nc' # 0 = 100% sea, 1 = 100% land - no islands!, latitude, longitude, land_area_fraction, -87.5 to 87.5, -177.5 to 177.5
elif (ReadInGrid == '1by1'):
LandMask = workingdir+'/OTHERDATA/lsmask.nc' # 1 = sea, 0 = land - no islands! lat, lon, mask 89.5 to -89.5Lat, 0.5 to 359.5 long
if (OutputVar in ['t','td']): # these are the simple ones that do not require conversion
InputERA = ThisRean+'_'+ReadInGrid+'_'+ReadInTime+'_'+OutputVar+'2m_'
if (ThisProg == 'Update'):
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+ThisRean+'_data_1979'+str(edyr-1)+'.nc'
else:
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+AnomsStr+ThisRean+'_data_1979'+str(edyr)+'.nc'
NewERAStr = OutputVar+'2m_'+OutputGrid+'_'+OutputTime+'_'+AnomsStr+ThisRean+'_data_1979'+updateyyyy+'.nc'
elif (OutputVar in ['ws','uv']):
InputERA = ThisRean+'_'+ReadInGrid+'_'+ReadInTime+'_'+OutputVar+'10m_'
if (ThisProg == 'Update'):
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+ThisRean+'_data_1979'+str(edyr-1)+'.nc'
else:
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+AnomsStr+ThisRean+'_data_1979'+str(edyr)+'.nc'
NewERAStr = OutputVar+'10m_'+OutputGrid+'_'+OutputTime+'_'+AnomsStr+ThisRean+'_data_1979'+updateyyyy+'.nc'
elif (OutputVar in ['slp','sp','sst']):
InputERA = ThisRean+'_'+ReadInGrid+'_'+ReadInTime+'_'+OutputVar+'_'
if (ThisProg == 'Update'):
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+ThisRean+'_data_1979'+str(edyr-1)+'.nc'
else:
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+AnomsStr+ThisRean+'_data_1979'+str(edyr)+'.nc'
NewERAStr = OutputVar+'_'+OutputGrid+'_'+OutputTime+'_'+AnomsStr+ThisRean+'_data_1979'+updateyyyy+'.nc'
elif (OutputVar in ['tw','q','rh','e','dpd']): # these require T, Td and SLP
InputERA = ThisRean+'_'+ReadInGrid+'_'+ReadInTime+'_'
if (ThisProg == 'Update'):
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+ThisRean+'_data_1979'+str(edyr-1)+'.nc'
else:
OldERAStr = OutputVar+'2m_'+ReadInGrid+'_'+ReadInTime+'_'+AnomsStr+ThisRean+'_data_1979'+str(edyr)+'.nc'
NewERAStr = OutputVar+'2m_'+OutputGrid+'_'+OutputTime+'_'+AnomsStr+ThisRean+'_data_1979'+updateyyyy+'.nc'
# Might have some other options
# Set up variables
mdi = -1e30
# Required variable names for reading in from ERA-Interim
LatInfo = ['latitude']
LonInfo = ['longitude']
# Dictionary for looking up variable names for netCDF read in of variables
NameDict = dict([('q','q2m'),
('rh','rh2m'),
('e','e2m'),
('tw','tw2m'),
('t','t2m'),
('td','td2m'),
('dpd','dpd2m'),
('slp','msl'),
('sp','sp'),
('uv',['u10','v10']), # this one might not work
('ws','si10'),
('sst','sst')])
# Dictionary for looking up variable standard (not actually always standard!!!) names for netCDF output of variables
StandardNameDict = dict([('q','specific_humidity'),
('rh','relative_humidity'),
('e','vapour_pressure'),
('tw','wetbulb_temperature'),
('t','drybulb_temperature'),
('td','dewpoint_temperature'),
('dpd','dewpoint depression'),
('slp','mean_sea_level_pressure'),
('sp','surface_pressure'),
('uv',['10 metre U wind component','10 metre V wind component']), # this one might not work
('ws','10 metre windspeed'),
('sst','sea_surface_temperature')])
# Dictionary for looking up variable long names for netCDF output of variables
LongNameDict = dict([('q','specific_humidity'),
('rh','2m relative humidity from 1by1 6hrly T and Td '+ThisRean),
('e','2m vapour_pressure from 1by1 6hrly T and Td '+ThisRean),
('tw','2m wetbulb_temperature from 1by1 6hrly T and Td '+ThisRean),
('t','2m drybulb_temperature from 1by1 6hrly T '+ThisRean),
('td','2m dewpoint_temperature from 1by1 6hrly Td '+ThisRean),
('dpd','2m dewpoint depression from 1by1 6hrly T and Td '+ThisRean),
('slp','2m mean_sea_level_pressure from 1by1 6hrly msl '+ThisRean),
('sp','2m surface_pressure from 1by1 6hrly sp '+ThisRean),
('uv',['10 metre U wind component from 1by1 6hrly '+ThisRean,'10 metre V wind component from 1by1 6hrly'+ThisRean]), # this one might not work
('ws','10 metre windspeed from 1by1 6hrly'+ThisRean),
('sst','sea surface temperature from 1by1 6hrly'+ThisRean)])
# Dictionary for looking up unit of variables
UnitDict = dict([('q','g/kg'),
('rh','%rh'),
('e','hPa'),
('tw','deg C'),
('t','deg C'),
('td','deg C'),
('dpd','deg C'),
('slp','hPa'),
('sp','hPa'),
('uv','m/s'),
('ws','m/s'),
('sst','deg C')])
nyrs = (edyr+1)-styr
nmons = nyrs*12
npts = nyrs*73
#ndays =
#n6hrs =
#n1hrs =
# set up nlons and nlats depending on what we are reading in and out
if (ReadInGrid == '1by1'):
nlonsIn = 360
nlatsIn= 181 # ERA style to have grids over the poles rather than up to the poles
elif (ReadInGrid == '5by5'):
nlonsIn = 72 # assuming this is correct
nlatsIn = 36 # assuming this is correct
if (OutputGrid == '1by1'):
nlonsOut = 360
nlatsOut = 180 # ERA style to have grids over the poles rather than up to the poles but this will be changed here with Build or Regrid
elif (OutputGrid == '5by5'):
nlonsOut = 72 # assuming this is correct
nlatsOut = 36 # assuming this is correct
## Array for monthly mean data for q, RH, e, T, Tw, Td, DPD one at a time though
##FullMonthArray = np.empty((nmons,nlats,nlons,7),dtype = float)
#FullMonthArray = np.empty((nmons,nlats,nlons),dtype = float)
#FullMonthArray.fill(mdi)
#************************************************************
# SUBROUTINES
#************************************************************
# GetHumidity
def GetHumidity(TheTDat,TheTdDat,TheSPDat,TheVar):
''' Calculates the desired humidity variable if the code is set up to output humidity '''
''' REQUIRES: '''
''' CalcHums.py file to be in the same directory as this file '''
if (TheVar == 't'):
TheHumDat = TheTDat
elif (TheVar == 'td'):
TheHumDat = TheTdDat
elif (TheVar == 'q'):
TheHumDat = CalcHums.sh(TheTdDat,TheTDat,TheSPDat,roundit=False)
elif (TheVar == 'e'):
TheHumDat = CalcHums.vap(TheTdDat,TheTDat,TheSPDat,roundit=False)
elif (TheVar == 'rh'):
TheHumDat = CalcHums.rh(TheTdDat,TheTDat,TheSPDat,roundit=False)
elif (TheVar == 'tw'):
TheHumDat = CalcHums.wb(TheTdDat,TheTDat,TheSPDat,roundit=False)
elif (TheVar == 'dpd'):
TheHumDat = CalcHums.dpd(TheTdDat,TheTDat,roundit=False)
return TheHumDat
#************************************************************
# RegridField
def RegridField(TheOutputGrid,TheOldData):
'''
This function does a simple regridding of data by averaging over the larger gridboxes
NO COSINE WEIGHTING FOR LATITUDE!!!!
NOTE:
FOR OutputGrid = 5by5 THIS AUTOMATICALLY FLIPS LATITUDE AND ROLLS LONGITUDE TO BE -87.5 to 87.5 and -177,5 to 177.5
FOR OutputGrid = 1by1 THIS JUST REGRIDS LATITUDE FROM 181 boxes 90 to -90 TO 180 boxes 89.5 to -89.5 and rolls longitude to -179.5 to 179.5
Assumes input grid is always 1by1
INPUTS:
TheOutputGrid - string of 1by1 or 5by5
TheOldData[:,:,:] - time, lat, long numpy array of complete field in original grid resolution
OUTPUTS:
TheNewData[:,:,:] - time, lat, long numpy array of complete field in new grid resolution
I'm hoping that things set above are seen by the function rather than being passed explicitly
'''
# Set up the desired output array
TheNewData = np.empty((len(TheOldData[:,0,0]),nlatsOut,nlonsOut),dtype = float)
TheNewData.fill(mdi)
if (TheOutputGrid == '1by1'):
# Then we know we're reading in original ERA-Interim or ERA5 data which has 181 lats
# regrid to 0.5 by 0.5 degree gridboxes and then reaverage over 89.5 to -89.5 lats
# shift lons back to -179.5 to 179.5 from 0 to 359
# regrid to 5by5
# First sort out the latitudes
for ln in range(nlonsIn):
for tt in range(len(TheNewData[:,0,0])):
subarr = np.repeat(TheOldData[tt,:,ln],2)
# this creates 362 grid boxes where each is repeated: [0a, 0b, 1a, 1b ...180a, 180b]
subarr = subarr[1:361]
# This removes the superfluous 90-90.5 and -90 to -90.5 boxes
subarr = np.reshape(subarr,(180,2))
# This now reshapes to 180 rows and 2 columns so that we can average the gridboxes across the columns
TheNewData[tt,:,ln] = np.mean(subarr,axis = 1) # hopefully this should work!
#pdb.set_trace()
# Then sort out the longitudes
for tt in range(len(TheNewData[:,0,0])):
TheNewData[tt,:,:] = np.roll(TheNewData[tt,:,:],180,axis = 1)
if (TheOutputGrid == '5by5'):
# Then we know we're reading in my converted ERA-Interim / ERA5 data which has 180 lats and already has lons rolled 180 degrees.
# flip lats to go south to north
# regrid to 5by5
# Regrid to 5by5 by simple averaging
# Data input here should already be 89.5 to -89.5 lat and -179.5 to 179.5 long!!!
StLt = 0
EdLt = 0
# Loop through the OutputGrid (5by5) lats and lons
for ltt in range(nlatsOut):
# create pointers to the five lats to average over
StLt = np.copy(EdLt)
EdLt = EdLt + 5
StLn = 0
EdLn = 0
for lnn in range(nlonsOut):
# create pointers to the five lons to average over
StLn = np.copy(EdLn)
EdLn = EdLn + 5
#print(ltt,lnn,StLt,EdLt,StLn,EdLn)
# Loop over each time point
for mm in range(len(TheNewData[:,0,0])):
# Create a subarr first so that we can deal with missing data
subarr = TheOldData[mm,StLt:EdLt,StLn:EdLn]
gots = np.where(subarr > mdi)
if (len(gots[0]) > 0):
# FILL THE LATITUDES BACKWARDS SO THAT THIS REVERSES THEM!!!
TheNewData[mm,35-ltt,lnn] = np.mean(subarr[gots])
#pdb.set_trace()
return TheNewData
#************************************************************
# BuildField
def BuildField(TheOutputVar, TheInputTime, TheOutputTime, InFileStr, TheStYr, TheEdYr):
''' function for building complete reanalyses files over the period specified
this can be very computationally expensive so do it by year
This requires initial reanalysis data to be read in in chunks of 1 year
I may change this to month later and will slice out 1 month at a time anyway
For derived variables this will read in the source vars and compute
NOTE: THIS AUTOMATICALLY REGRIDS LATITUDE TO BE 180 RATHER THAN 181 BOXES AND ROLLS LONGITUDE TO -179.5 to 179.5
INPUTS:
TheOutputVar - string lower case character of q, rh, t, td, dpd, tw, e, msl, sp, ws
TheInputTime - string of 1hr or 6hr
TheOutputTime - string of monthly or pentad
#OutputGrid - string of 1by1 or 5by5 (WHICH SHOULD BE SAME AS INPUT GRID) - ASSUME THIS IS ALWAYS 1by1 FOR NOW
InFileStr - string of dir+file string to read in
TheStYr = integer start year of data - assume Jan 1st (0101) start
TheEdYr = integer end year of data - assume Dec 31st (1231) end
OUTPUTS:
TheNewData[:,:,:] - time, lat, long numpy array of complete field in new time resolution
'''
# Set up the desired output array
if (TheOutputTime == 'monthly'):
TheNewData = np.empty((nmons,nlatsOut,nlonsOut),dtype = float)
elif (TheOutputTime == 'pentad'):
TheNewData = np.empty((npts,nlatsOut,nlonsOut),dtype = float)
TheNewData.fill(mdi)
# The input grids are different to the output grids (181 lat boxes rather than 180) so we need a TmpNewData first
TmpNewData = np.empty((len(TheNewData[:,0,0]),nlatsIn,nlonsIn),dtype = float)
TmpNewData.fill(mdi)
nyrs = (TheEdYr - TheStYr) + 1
# Begin the time counter for this dec - may need to do hourly in 5 year or 1 year chunks
# 0 to ~87600 + leap days for 1 hourly data (24*365*10)
# 0 to ~14600 + leap days for 6 hourly data (4*365*10)
# 0 to 120 for monthly data
HrStPoint = 0 # set as HrEdPoint which is actual ed point +1
HrEdPoint = 0 # set as HrStPoint + MonthHours or Month(must be +1 to work in Python!!!)
# Loop through the years
for y in range(nyrs):
# Get actual year we're working on
yr = y + StYr
print('Working Year: ',yr)
# First work out the time pointers for the year we're working with
if (TheOutputTime == 'monthly'):
mnarr = [31,29,31,30,31,30,31,31,30,31,30,31]
nbits = 12
elif (TheOutputTime == 'pentad'):
mnarr = list(np.repeat(5,73))
nbits = 73
# Is it a leap year?
if (TestLeap.TestLeap(yr) == 0.0):
if (TheOutputTime == 'monthly'):
mnarr[1] = 29
elif (TheOutputTime == 'pentad'):
mnarr[11] = 6
print('TestLeap (m, pt): ',mnarr[1],mnarr[11], yr)
# Loop through each month or pentad depending on thing
for m in range(nbits):
## string for file name
#mm = '%02i' % (m+1)
# Month pointer
MonthPointer = (y * nbits)+m
print('Month/Pentad Pointer: ',m)
# Set the time counter for this dec in either 1hr or 6hrs
# 0 to ~14600 + leap days
HrStPoint = np.copy(HrEdPoint) # set as HrEdPoint which is actual end point +1
if (ReadInTime == '1hr'):
HrEdPoint = HrStPoint + (mnarr[m]*24) # set as HrStPoint + MonthHours (must be +1 to work in Python!!!)
elif (ReadInTime == '6hr'):
HrEdPoint = HrStPoint + (mnarr[m]*4) # set as HrStPoint + MonthHours (must be +1 to work in Python!!!)
print('Hr Pointies for this month: ',HrStPoint,HrEdPoint)
# Open and read in the reanalysis files for the month
# Sort out time pointers to pull out month
# This assumes we're always reading in 1by1!!!!
SliceInfo = dict([('TimeSlice',[HrStPoint,HrEdPoint]),
('LatSlice',[0,181]),
('LonSlice',[0,360])])
# Are we working on a direct variable or do we need to read in lots of variables and convert (e.g., humidity)
# For humidity variables
if (TheOutputVar in ['q','rh','e','tw','dpd']):
# DOES automatically unpack the scale and offset
# However, SP is Pa and T and Td are Kelvin
# This kills memory so need to be tidy
ReadInfo = ['t2m']
FileName = InFileStr+'t2m_'+str(yr)+'0101'+str(yr)+'1231.nc'
T_Data,Latitudes,Longitudes = GetGrid4Slice(FileName,ReadInfo,SliceInfo,LatInfo,LonInfo)
# Unpack t
T_Data = T_Data-273.15
# DOES automatically unpack the scale and offset
# However, SP is Pa and T and Td are Kelvin
# This kills memory so need to be tidy
ReadInfo = ['d2m']
FileName = InFileStr+'td2m_'+str(yr)+'0101'+str(yr)+'1231.nc'
Td_Data,Latitudes,Longitudes = GetGrid4Slice(FileName,ReadInfo,SliceInfo,LatInfo,LonInfo)
# Unpack td
Td_Data = Td_Data-273.15
# DOES automatically unpack the scale and offset
# However, SP is Pa and T and Td are Kelvin
# This kills memory so need to be tidy
ReadInfo = ['sp']
FileName = InFileStr+'sp_'+str(yr)+'0101'+str(yr)+'1231.nc'
SP_Data,Latitudes,Longitudes = GetGrid4Slice(FileName,ReadInfo,SliceInfo,LatInfo,LonInfo)
# Unpack sp
SP_Data = SP_Data/100.
# Convert to desired humidity variable
TmpData = GetHumidity(T_Data,Td_Data,SP_Data,TheOutputVar)
# Empty the SP_Data array
SP_Data = 0
T_Data = 0
Td_Data = 0
else:
# DOES automatically unpack the scale and offset
# However, SP is Pa and T and Td are Kelvin
# This kills memory so need to be tidy
ReadInfo = [NameDict[TheOutputVar]] # the variable name to read in
#pdb.set_trace()
FileName = InFileStr+str(yr)+'0101'+str(yr)+'1231.nc'
TmpData,Latitudes,Longitudes = GetGrid4Slice(FileName,ReadInfo,SliceInfo,LatInfo,LonInfo)
#pdb.set_trace()
# Is there an unpack thing like for T - -273.15?
if (TheOutputVar in ['t','td','sst']): # t
if (TheOutputVar == 'sst'): # there are missing values over land.
TmpData[np.where(TmpData < 270.03)] = mdi # ERA Mdi is actually -32767 but in ncview it is 270.024
TmpData[np.where(TmpData > mdi)] = TmpData[np.where(TmpData > mdi)]-273.15
elif (TheOutputVar in ['slp','sp']): # pressure are Pa so need to be converted to hPa
TmpData = TmpData/100.
# Create monthly or pentad means
for ltt in range(nlatsIn):
for lnn in range(nlonsIn):
TmpNewData[MonthPointer,ltt,lnn] = np.mean(TmpData[:,ltt,lnn])
# Empty the data arrays
TmpData = 0
# Now regrid to 180 latitude boxes 89.5 to -89.5 and longitude from -179.5 to 179.5
TheNewData = RegridField('1by1',TmpNewData)
return TheNewData
#************************************************************
# CreateAnoms
def CreateAnoms(TheInputGrid,TheOutputTime,TheClimSt,TheClimEd,TheStYr,TheEdYr,TheInData):
'''
This function takes any grid and any var, computes climatologies/stdevs over given period and then anomalies
It also outputs land only and ocean only anomalies dependning on the grid
if (TheInputGrid == '5by5'):
LandMask = workingdir+'/OTHERDATA/HadCRUT.4.3.0.0.land_fraction.nc' # 0 = 100% sea, 1 = 100% land - no islands!, latitude, longitude, land_area_fraction, -87.5 to 87.5, -177.5 to 177.5
elif (TheInputGrid == '1by1'):
LandMask = workingdir+'/OTHERDATA/lsmask.nc' # 1 = sea, 0 = land - no islands! lat, lon, mask 89.5 to -89.5Lat, 0.5 to 359.5 long
INPUTS:
TheInputGrid - string of 1by1 or 5by5 to determine the land mask to use
TheOutputTime - string of monthly or pentad
TheClimSt - interger start year of climatology Always Jan start
TheClimEd - integer end year of climatology Always Dec end
TheStYr - integer start year of data to find climatology
TheEdYr - integer end year of data to find climatology
TheInData[:,:,:] - time, lat, lon array of actual values
OUTPUTS:
AllAnomsArr[:,:,:] - time, lat, lon array of anomalies
LandAnomsArr[:,:,:] - time, lat, lon array of land anomalies
OceanAnomsArr[:,:,:] - time, lat, lon array of ocean anomalies
ClimsArr[:,:,:] - time, lat, lon array of climatologies
StDevsArr[:,:,:] - time, lat, lon array of stdeviations
'''
# Set up for time
if (TheOutputTime == 'monthly'):
nclims = 12
elif (TheOutputTime == 'pentad'):
nclims = 73
nyrs = (TheEdYr - TheStYr) + 1
# Get land/sea mask and format accordingly
if (TheInputGrid == '1by1'):
MaskData,Lats,Longs = GetGrid4(LandMask,['mask'],['lat'],['lon'])
# Check shape and force to be 2d
if (len(np.shape(MaskData)) == 3):
MaskData = np.reshape(MaskData,(180,360))
# roll the longitudes
MaskData = np.roll(MaskData[:,:],180,axis = 1)
# swap the land/sea so that land = 1
land = np.where(MaskData == 0)
MaskData[np.where(MaskData == 1)] = 0
MaskData[land] = 1
elif (TheInputGrid == '5by5'):
MaskData,Lats,Longs = GetGrid4(LandMask,['land_area_fraction'],LatInfo,LonInfo)
if (len(np.shape(MaskData)) == 3):
MaskData = np.reshape(MaskData,(36,72))
# first create empty arrays
AllAnomsArr = np.empty_like(TheInData)
AllAnomsArr.fill(mdi)
LandAnomsArr = np.copy(AllAnomsArr)
OceanAnomsArr = np.copy(AllAnomsArr)
ClimsArr = np.copy(AllAnomsArr[0:nclims,:,:])
StDevsArr = np.copy(AllAnomsArr[0:nclims,:,:])
# loop through gridboxes
for lt in range(len(TheInData[0,:,0])):
for ln in range(len(TheInData[0,0,:])):
# pull out gridbox and reform to years by nclims (months or pentads)
SingleSeries = np.reshape(TheInData[:,lt,ln],(nyrs,nclims)) # nyrs rows, nclims columns
# create an empty array to fill with anomalies
NewSingleSeries = np.empty_like(SingleSeries)
NewSingleSeries.fill(mdi)
# loop through clims 1 to 12 or 73
for m in range(nclims):
# create, save and subtract climatological mean
# THERE ARE NO MISSING DATA IN ERA INTERIM but sst is missing over land
# test first time value only
if (SingleSeries[0,m] > mdi):
ClimsArr[m,lt,ln] = np.mean(SingleSeries[(TheClimSt-TheStYr):((TheClimEd-TheStYr)+1),m])
StDevsArr[m,lt,ln] = np.std(SingleSeries[(TheClimSt-TheStYr):((TheClimEd-TheStYr)+1),m])
NewSingleSeries[:,m] = SingleSeries[:,m] - ClimsArr[m,lt,ln]
# fill new arrays
AllAnomsArr[:,lt,ln] = np.reshape(NewSingleSeries,nyrs*nclims)
# is there any land?
if (MaskData[lt,ln] > 0):
LandAnomsArr[:,lt,ln] = np.reshape(NewSingleSeries,nyrs*nclims)
# is there any sea?
if (MaskData[lt,ln] < 1):
OceanAnomsArr[:,lt,ln] = np.reshape(NewSingleSeries,nyrs*nclims)
return AllAnomsArr, LandAnomsArr, OceanAnomsArr, ClimsArr, StDevsArr
#************************************************************
# WriteNetCDF
def WriteNetCDF(Filename,TheOutputTime, TheOutputGrid, TheOutputVar, TheFullArray, TheFullArrayAnoms, TheLandArrayAnoms, TheOceanArrayAnoms, TheClimsArray, TheStDevsArray,
TheStYr, TheEdYr, TheClimStart, TheClimEnd, TheName, TheStandardName, TheLongName, TheUnit):
'''
This function writes out a NetCDF 4 file
NOTE:
All 1by1 outputs will have lats 89.5 to -89.5 and lons -179.5 to 179.5
All 5by5 outputs will have lats -87.5 to 87.5 and lons -177.5 to 177.5
INPUTS:
FileOut - string file name
TheOutputTime - string monthly or pentad
TheOutputGrid - string 1by1 or 5by5
TheOutputVar - string lower case variable name
TheFullArray[:,:,:] - time, lat, lon array of actual values
TheFullArrayAnoms[:,:,:] - time, lat, lon array of anomalies
TheLandArrayAnoms[:,:,:] - time, lat, lon array of land anomalies
TheOceanArrayAnoms[:,:,:] - time, lat, lon array of ocean anomalies
TheClimsArray[:,:,:] - time(12 or 73), lat, lon array of climatology
TheStDevsArray[:,:,:] - time(12 or 73, lat, lon array of st devs
TheStYr - integer start year assumes Jan start
TheEdYr - integer end year assumes Dec start
TheClimStart - integer start of clim Jan start
TheClimEnd - integer end of clim Dec start
TheName - string short name of var q2m
TheStandardName - string standard name of variable
TheUnit - string unit of variable
OUTPUTS:
None
'''
# Sort out times in days since 1979-01-01
# Sort out climatology time
if (TheOutputTime == 'monthly'):
nClims = 12
TimPoints = MakeDaysSince(TheStYr,1,TheEdYr,12,'month') # use 'day','month','year'
elif (TheOutputTime == 'pentad'):
nClims = 73
TimPoints = MakeDaysSince(TheStYr,1,TheEdYr,73,'pentad') # use 'day','month','year'
nTims = len(TimPoints)
# Sort out Lats, Lons and LatBounds and LonBounds
if (TheOutputGrid == '1by1'):
LatList = np.flip(np.arange(180)-89.5)
LonList = np.arange(360)-179.5
LatBounds = np.empty((len(LatList),2),dtype='float')
LonBounds = np.empty((len(LonList),2),dtype='float')
LatBounds[:,0] = LatList + ((LatList[0]-LatList[1])/2.)
LatBounds[:,1] = LatList - ((LatList[0]-Latitudes[1])/2.)
LonBounds[:,0] = LonList - ((LonList[1]-LonList[0])/2.)
LonBounds[:,1] = LonList + ((LonList[1]-LonList[0])/2.)
elif (TheOutputGrid == '5by5'):
LatList = (np.arange(36)*5)-87.5
LonList = (np.arange(72)*5)-177.5
LatBounds = np.empty((len(LatList),2),dtype='float')
LonBounds = np.empty((len(LonList),2),dtype='float')
LatBounds[:,0] = LatList - ((LatList[1]-LatList[0])/2.)
LatBounds[:,1] = LatList + ((LatList[1]-Latitudes[0])/2.)
LonBounds[:,0] = LonList - ((LonList[1]-LonList[0])/2.)
LonBounds[:,1] = LonList + ((LonList[1]-LonList[0])/2.)
# No need to convert float data using given scale_factor and add_offset to integers - done within writing program (packV = (V-offset)/scale
# Not sure what this does to float precision though...
# Create a new netCDF file - have tried zlib=True,least_significant_digit=3 (and 1) - no difference
ncfw = Dataset(Filename,'w',format='NETCDF4_CLASSIC') # need to try NETCDF4 and also play with compression but test this first
# Set up the dimension names and quantities
ncfw.createDimension('time',nTims)
ncfw.createDimension('latitude',nlatsOut)
ncfw.createDimension('longitude',nlonsOut)
# If there are climatologies to be written then also set up clim dimension
if (len(np.shape(TheClimsArray)) > 1):
if (TheOutputTime == 'monthly'):
ncfw.createDimension('month_time',nClims)
elif (TheOutputTime == 'pentad'):
ncfw.createDimension('pentad_time',nClims)
# Go through each dimension and set up the variable and attributes for that dimension if needed
MyVarT = ncfw.createVariable('time','f4',('time',))
MyVarT.standard_name = 'time'
MyVarT.long_name = 'time'
MyVarT.units = 'days since 1979-1-1 00:00:00'
MyVarT.start_year = str(TheStYr)
MyVarT.end_year = str(TheEdYr)
MyVarT[:] = TimPoints
MyVarLt = ncfw.createVariable('latitude','f4',('latitude',))
MyVarLt.standard_name = 'latitude'
MyVarLt.long_name = 'gridbox centre latitude'
MyVarLt.units = 'degrees_north'
MyVarLt[:] = LatList
MyVarLn = ncfw.createVariable('longitude','f4',('longitude',))
MyVarLn.standard_name = 'longitude'
MyVarLn.long_name = 'gridbox centre longitude'
MyVarLn.units = 'degrees_east'
MyVarLn[:] = LonList
# If there are climatologies to be written then also set up clim dimension
if (len(np.shape(TheClimsArray)) > 1):
if (TheOutputTime == 'monthly'):
MyVarM = ncfw.createVariable('month_time','i4',('month_time',))
MyVarM.long_name = 'months of the year'
MyVarM.units = 'months'
MyVarM[:] = np.arange(nClims)
elif (TheOutputTime == 'pentad'):
MyVarM = ncfw.createVariable('pentad_time','i4',('pentad_time',))
MyVarM.long_name = 'pentads of the year'
MyVarM.units = 'pentads'
MyVarM[:] = np.arange(nClims)
# Go through each variable and set up the variable attributes
# I've added zlib=True so that the file is in compressed form
# I've added least_significant_digit=4 because we do not need to store information beyone 4 significant figures.
MyVarD = ncfw.createVariable(TheName,'f4',('time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
MyVarD.standard_name = TheStandardName
MyVarD.long_name = TheLongName
MyVarD.units = TheUnit
MyVarD.valid_min = np.min(TheFullArray)
MyVarD.valid_max = np.max(TheFullArray)
MyVarD.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarD[:,:,:] = TheFullArray[:,:,:]
# If there are climatologies etc to be written then also set them up
if (len(np.shape(TheClimsArray)) > 1):
MyVarA = ncfw.createVariable(TheName+'_anoms','f4',('time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
MyVarA.standard_name = TheStandardName+'_anomalies'
MyVarA.long_name = TheLongName+' anomalies from 1981-2010'
MyVarA.units = TheUnit
MyVarA.valid_min = np.min(TheFullArrayAnoms)
MyVarA.valid_max = np.max(TheFullArrayAnoms)
MyVarA.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarA[:,:,:] = TheFullArrayAnoms[:,:,:]
MyVarAL = ncfw.createVariable(TheName+'_anoms_land','f4',('time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
MyVarAL.standard_name = TheStandardName+'_anomalies'
MyVarAL.long_name = TheLongName+' anomalies from 1981-2010'
MyVarAL.units = TheUnit
MyVarAL.valid_min = np.min(TheLandArrayAnoms)
MyVarAL.valid_max = np.max(TheLandArrayAnoms)
MyVarAL.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarAL[:,:,:] = TheLandArrayAnoms[:,:,:]
MyVarAO = ncfw.createVariable(TheName+'_anoms_ocean','f4',('time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
MyVarAO.standard_name = TheStandardName+'_anomalies'
MyVarAO.long_name = TheLongName+' anomalies from 1981-2010'
MyVarAO.units = TheUnit
MyVarAO.valid_min = np.min(TheOceanArrayAnoms)
MyVarAO.valid_max = np.max(TheOceanArrayAnoms)
MyVarAO.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarAO[:,:,:] = TheOceanArrayAnoms[:,:,:]
if (TheOutputTime == 'monthly'):
MyVarC = ncfw.createVariable(TheName+'_clims','f4',('month_time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
elif (TheOutputTime == 'pentad'):
MyVarC = ncfw.createVariable(TheName+'_clims','f4',('pentad_time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
MyVarC.standard_name = TheStandardName+'_climatologies'
MyVarC.long_name = TheLongName+' climatology over 1981-2010'
MyVarC.units = TheUnit
MyVarC.valid_min = np.min(TheClimsArray)
MyVarC.valid_max = np.max(TheClimsArray)
MyVarC.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarC[:,:,:] = TheClimsArray[:,:,:]
if (TheOutputTime == 'monthly'):
MyVarS = ncfw.createVariable(TheName+'_stdevs','f4',('month_time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
elif (TheOutputTime == 'pentad'):
MyVarS = ncfw.createVariable(TheName+'_stdevs','f4',('pentad_time','latitude','longitude',),fill_value = mdi,zlib=True,least_significant_digit=4)
MyVarS.standard_name = TheStandardName+'_climatological_standard_deviations'
MyVarS.long_name = TheLongName+' climatological standard deviation over 1981-2010'
MyVarS.units = TheUnit
MyVarS.valid_min = np.min(TheStDevsArray)
MyVarS.valid_max = np.max(TheStDevsArray)
MyVarS.missing_value = mdi
# Provide the data to the variable - depending on howmany dimensions there are
MyVarS[:,:,:] = TheStDevsArray[:,:,:]
ncfw.close()
return
#************************************************************
# MAIN
#************************************************************
# What are we working on?
print('Working variable: ',OutputVar)
print('Input Time and Grid: ',ReadInTime,ReadInGrid)
print('Output Time and Grid: ',OutputTime,OutputGrid)
print('Type of run: ',ThisProg, styr, edyr, MakeAnoms)
print('Reanalysis: ',ThisRean)
# For ThisProg = Convert or Update read in monthly or pentad 1by1 (to present or previous year)
if (ThisProg != 'Build'):
ReadInfo = [OutputVar+'2m']
FileName = workingdir+'/OTHERDATA/'+OldERAStr
TheData,Latitudes,Longitudes = GetGrid4(FileName,ReadInfo,LatInfo,LonInfo)
# For Update we also need to read in most recent year (or months) of data and convert to desired variable (BuildField)
if (ThisProg == 'Update'):
print('Creating Update')
# Set up the desired output array
if (OutputTime == 'monthly'):
FullArray = np.empty((nmons,nlatsOut,nlonsOut),dtype = float)
elif (OutputTime == 'pentad'):
FullArray = np.empty((npts,nlatsOut,nlonsOut),dtype = float)
FullArray.fill(mdi)
# Build the most recent year
if (OutputTime == 'monthly'):
RecentField = FullArray[0:nmons-12,:,:]
elif (OutputTime == 'pentad'):
RecentField = FullArray[0:npts-73,:,:]
RecentField = BuildField(OutputVar, ReadInTime, OutputTime, workingdir+'/OTHERDATA/'+InputERA, edyr, edyr, RecentField)
# Fill the full array with data
if (OutputTime == 'monthly'):
FullArray[0:nmons-12,:,:] = TheData
FullArray[nmons-12:nmons,:,:] = RecentField
elif (OutputTime == 'pentad'):
FullArray[0:npts-73,:,:] = TheData
FullArray[npts-73:nmons,:,:] = RecentField
# Do we need to create anomalies?
# Just in case we don't, create blank arrays for write out
FullArrayAnoms = 0
LandArrayAnoms = 0
OceanArrayAnoms = 0
ClimsArray = 0
StDevArray = 0
if (MakeAnoms == 1):
print('Creating anomalies')
FullArrayAnoms, LandArrayAnoms, OceanArrayAnoms, ClimsArray, StDevArray = CreateAnoms(ReadInGrid,OutputTime,ClimStart,ClimEnd,styr,edyr,FullArray)
elif (ThisProg == 'Regrid'):
print('Creating Regrid')
# Do we need to create anomalies?
# Just in case we don't, create blank arrays for write out
FullArrayAnoms = 0
LandArrayAnoms = 0
OceanArrayAnoms = 0
ClimsArray = 0
StDevArray = 0
if (MakeAnoms == 1):
print('Creating anomalies')
TheDataAnoms, LandDataAnoms, OceanDataAnoms, ClimsDataArray, StDevDataArray = CreateAnoms(ReadInGrid,OutputTime,ClimStart,ClimEnd,styr,edyr,TheData)
#pdb.set_trace()
# Regrid the fields to desired resolution
FullArray = RegridField(OutputGrid,TheData)
if (MakeAnoms == 1):
FullArrayAnoms = RegridField(OutputGrid,TheDataAnoms)
LandArrayAnoms = RegridField(OutputGrid,LandDataAnoms)
OceanArrayAnoms = RegridField(OutputGrid,OceanDataAnoms)
ClimsArray = RegridField(OutputGrid,ClimsDataArray)
StDevArray = RegridField(OutputGrid,StDevDataArray)
# For ThisProg = Build then loop through the decs for Build only
elif (ThisProg == 'Build'):
print('Creating Build')
FullArray = BuildField(OutputVar, ReadInTime, OutputTime, workingdir+'/OTHERDATA/'+InputERA, styr, edyr)
# Do we need to create anomalies?
# Just in case we don't, create blank arrays for write out
FullArrayAnoms = 0
LandArrayAnoms = 0
OceanArrayAnoms = 0
ClimsArray = 0
StDevArray = 0
if (MakeAnoms == 1):
print('Creating anomalies')
FullArrayAnoms, LandArrayAnoms, OceanArrayAnoms, ClimsArray, StDevArray = CreateAnoms(ReadInGrid,OutputTime,ClimStart,ClimEnd,styr,edyr,FullArray)
# I've moved the anomaly bit to before any regridding because its better to do this (and the land / sea masking
# at the highest resolution stage
## Do we need to create anomalies?
## Just in case we don't, create blank arrays for write out
#FullArrayAnoms = 0
#LandArrayAnoms = 0
#OceanArrayAnoms = 0
#ClimsArray = 0
#StDevArray = 0
#if (MakeAnoms == 1):
#
# print('Creating anomalies')
#
# FullArrayAnoms, LandArrayAnoms, OceanArrayAnoms, ClimsArray, StDevArray = CreateAnoms(OutputGrid,OutputTime,ClimStart,ClimEnd,styr,edyr,FullArray)
# Write out
print('Writing out interim monthly array: ',OutputVar)
# Now write out netcdf of field
WriteNetCDF(workingdir+'/OTHERDATA/'+NewERAStr,OutputTime, OutputGrid, OutputVar, FullArray, FullArrayAnoms, LandArrayAnoms, OceanArrayAnoms, ClimsArray, StDevArray,
styr, edyr, ClimStart, ClimEnd, NameDict[OutputVar], StandardNameDict[OutputVar], LongNameDict[OutputVar], UnitDict[OutputVar])
print('And we are done!')
|
Kate-Willett/Climate_Explorer
|
PYTHON/ExtractMergeRegridERA_JUL2018.py
|
Python
|
cc0-1.0
| 44,486
|
[
"NetCDF"
] |
8ced00da3ac75b08ccecd8cb2ae3720b00985fcf978da0b9e5d8effe57f9aaa9
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
import unittest
import mock
import nikola
import nikola.plugins.command.import_wordpress
from .base import BaseTestCase
class BasicCommandImportWordpress(BaseTestCase):
def setUp(self):
self.module = nikola.plugins.command.import_wordpress
self.import_command = self.module.CommandImportWordpress()
self.import_command.onefile = False
self.import_filename = os.path.abspath(os.path.join(
os.path.dirname(__file__), 'wordpress_export_example.xml'))
def tearDown(self):
del self.import_command
del self.import_filename
class TestQTranslateContentSeparation(BasicCommandImportWordpress):
def test_conserves_qtranslate_less_post(self):
content = """Si vous préférez savoir à qui vous parlez commencez par visiter l'<a title="À propos" href="http://some.blog/about/">À propos</a>.
Quoiqu'il en soit, commentaires, questions et suggestions sont les bienvenues !"""
content_translations = self.module.separate_qtranslate_content(content)
self.assertEqual(1, len(content_translations))
self.assertEqual(content, content_translations[""])
def test_split_a_two_language_post(self):
content = """<!--:fr-->Si vous préférez savoir à qui vous parlez commencez par visiter l'<a title="À propos" href="http://some.blog/about/">À propos</a>.
Quoiqu'il en soit, commentaires, questions et suggestions sont les bienvenues !
<!--:--><!--:en-->If you'd like to know who you're talking to, please visit the <a title="À propos" href="http://some.blog/about/">about page</a>.
Comments, questions and suggestions are welcome !
<!--:-->"""
content_translations = self.module.separate_qtranslate_content(content)
self.assertEqual("""Si vous préférez savoir à qui vous parlez commencez par visiter l'<a title="À propos" href="http://some.blog/about/">À propos</a>.
Quoiqu'il en soit, commentaires, questions et suggestions sont les bienvenues !
""", content_translations["fr"])
self.assertEqual("""If you'd like to know who you're talking to, please visit the <a title="À propos" href="http://some.blog/about/">about page</a>.
Comments, questions and suggestions are welcome !
""", content_translations["en"])
def test_split_a_two_language_post_with_teaser(self):
content = """<!--:fr-->Si vous préférez savoir à qui vous parlez commencez par visiter l'<a title="À propos" href="http://some.blog/about/">À propos</a>.
Quoiqu'il en soit, commentaires, questions et suggestions sont les bienvenues !
<!--:--><!--:en-->If you'd like to know who you're talking to, please visit the <a title="À propos" href="http://some.blog/about/">about page</a>.
Comments, questions and suggestions are welcome !
<!--:--><!--more--><!--:fr-->
Plus de détails ici !
<!--:--><!--:en-->
More details here !
<!--:-->"""
content_translations = self.module.separate_qtranslate_content(content)
self.assertEqual("""Si vous préférez savoir à qui vous parlez commencez par visiter l'<a title="À propos" href="http://some.blog/about/">À propos</a>.
Quoiqu'il en soit, commentaires, questions et suggestions sont les bienvenues !
<!--more--> \n\
Plus de détails ici !
""", content_translations["fr"])
self.assertEqual("""If you'd like to know who you're talking to, please visit the <a title="À propos" href="http://some.blog/about/">about page</a>.
Comments, questions and suggestions are welcome !
<!--more--> \n\
More details here !
""", content_translations["en"])
def test_split_a_two_language_post_with_intermission(self):
content = """<!--:fr-->Voila voila<!--:-->COMMON<!--:en-->BLA<!--:-->"""
content_translations = self.module.separate_qtranslate_content(content)
self.assertEqual("Voila voila COMMON", content_translations["fr"])
self.assertEqual("COMMON BLA", content_translations["en"])
def test_split_a_two_language_post_with_uneven_repartition(self):
content = """<!--:fr-->Voila voila<!--:-->COMMON<!--:fr-->MOUF<!--:--><!--:en-->BLA<!--:-->"""
content_translations = self.module.separate_qtranslate_content(content)
self.assertEqual("Voila voila COMMON MOUF", content_translations["fr"])
self.assertEqual("COMMON BLA", content_translations["en"])
def test_split_a_two_language_post_with_uneven_repartition_bis(self):
content = """<!--:fr-->Voila voila<!--:--><!--:en-->BLA<!--:-->COMMON<!--:fr-->MOUF<!--:-->"""
content_translations = self.module.separate_qtranslate_content(content)
self.assertEqual("Voila voila COMMON MOUF", content_translations["fr"])
self.assertEqual("BLA COMMON", content_translations["en"])
class CommandImportWordpressRunTest(BasicCommandImportWordpress):
def setUp(self):
super(self.__class__, self).setUp()
self.data_import = mock.MagicMock()
self.site_generation = mock.MagicMock()
self.write_urlmap = mock.MagicMock()
self.write_configuration = mock.MagicMock()
site_generation_patch = mock.patch('os.system', self.site_generation)
data_import_patch = mock.patch(
'nikola.plugins.command.import_wordpress.CommandImportWordpress.import_posts', self.data_import)
write_urlmap_patch = mock.patch(
'nikola.plugins.command.import_wordpress.CommandImportWordpress.write_urlmap_csv', self.write_urlmap)
write_configuration_patch = mock.patch(
'nikola.plugins.command.import_wordpress.CommandImportWordpress.write_configuration', self.write_configuration)
self.patches = [site_generation_patch, data_import_patch,
write_urlmap_patch, write_configuration_patch]
for patch in self.patches:
patch.start()
def tearDown(self):
del self.data_import
del self.site_generation
del self.write_urlmap
del self.write_configuration
for patch in self.patches:
patch.stop()
del self.patches
super(self.__class__, self).tearDown()
def test_create_import(self):
valid_import_arguments = (
dict(options={'output_folder': 'some_folder'},
args=[self.import_filename]),
dict(args=[self.import_filename]),
dict(args=[self.import_filename, 'folder_argument']),
)
for arguments in valid_import_arguments:
self.import_command.execute(**arguments)
self.assertTrue(self.site_generation.called)
self.assertTrue(self.data_import.called)
self.assertTrue(self.write_urlmap.called)
self.assertTrue(self.write_configuration.called)
self.assertFalse(self.import_command.exclude_drafts)
def test_ignoring_drafts(self):
valid_import_arguments = (
dict(options={'exclude_drafts': True}, args=[
self.import_filename]),
dict(
options={'exclude_drafts': True,
'output_folder': 'some_folder'},
args=[self.import_filename]),
)
for arguments in valid_import_arguments:
self.import_command.execute(**arguments)
self.assertTrue(self.import_command.exclude_drafts)
class CommandImportWordpressTest(BasicCommandImportWordpress):
def test_create_import_work_without_argument(self):
# Running this without an argument must not fail.
# It should show the proper usage of the command.
self.import_command.execute()
def test_populate_context(self):
channel = self.import_command.get_channel_from_file(
self.import_filename)
self.import_command.transform_to_html = False
self.import_command.use_wordpress_compiler = False
context = self.import_command.populate_context(channel)
for required_key in ('POSTS', 'PAGES', 'COMPILERS'):
self.assertTrue(required_key in context)
self.assertEqual('de', context['DEFAULT_LANG'])
self.assertEqual('Wordpress blog title', context['BLOG_TITLE'])
self.assertEqual('Nikola test blog ;) - with moré Ümläüts',
context['BLOG_DESCRIPTION'])
self.assertEqual('http://some.blog/', context['SITE_URL'])
self.assertEqual('mail@some.blog', context['BLOG_EMAIL'])
self.assertEqual('Niko', context['BLOG_AUTHOR'])
def test_importing_posts_and_attachments(self):
channel = self.import_command.get_channel_from_file(
self.import_filename)
self.import_command.base_dir = ''
self.import_command.output_folder = 'new_site'
self.import_command.squash_newlines = True
self.import_command.no_downloads = False
self.import_command.export_categories_as_categories = False
self.import_command.export_comments = False
self.import_command.transform_to_html = False
self.import_command.use_wordpress_compiler = False
self.import_command.tag_saniziting_strategy = 'first'
self.import_command.context = self.import_command.populate_context(
channel)
# Ensuring clean results
self.import_command.url_map = {}
self.module.links = {}
write_metadata = mock.MagicMock()
write_content = mock.MagicMock()
write_post = mock.MagicMock()
write_attachments_info = mock.MagicMock()
download_mock = mock.MagicMock()
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.write_content', write_content):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.write_metadata', write_metadata):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.download_url_content_to_file', download_mock):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.write_attachments_info', write_attachments_info):
with mock.patch('nikola.plugins.command.import_wordpress.os.makedirs'):
self.import_command.import_posts(channel)
self.assertTrue(download_mock.called)
qpath = 'new_site/files/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png'
download_mock.assert_any_call(
'http://some.blog/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png',
qpath.replace('/', os.sep))
self.assertTrue(write_metadata.called)
write_metadata.assert_any_call(
'new_site/stories/kontakt.meta'.replace('/', os.sep), 'Kontakt',
'kontakt', '2009-07-16 20:20:32', '', [], **{'wp-status': 'publish'})
self.assertTrue(write_content.called)
write_content.assert_any_call('new_site/posts/2007/04/hoert.md'.replace('/', os.sep),
"""An image.
<img class="size-full wp-image-16" title="caption test" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="caption test" width="739" height="517" />
Some source code.
```Python
import sys
print sys.version
```
The end.
""", True)
self.assertTrue(write_attachments_info.called)
write_attachments_info.assert_any_call('new_site/posts/2008/07/arzt-und-pfusch-s-i-c-k.attachments.json'.replace('/', os.sep),
{10: {'wordpress_user_name': 'Niko',
'files_meta': [{'width': 300, 'height': 299},
{'width': 150, 'size': 'thumbnail', 'height': 150}],
'excerpt': 'Arzt+Pfusch - S.I.C.K.',
'date_utc': '2009-07-16 19:40:37',
'content': 'Das Cover von Arzt+Pfusch - S.I.C.K.',
'files': ['/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png',
'/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover-150x150.png'],
'title': 'Arzt+Pfusch - S.I.C.K.'}})
write_content.assert_any_call(
'new_site/posts/2008/07/arzt-und-pfusch-s-i-c-k.md'.replace('/', os.sep),
'''<img class="size-full wp-image-10 alignright" title="Arzt+Pfusch - S.I.C.K." src="http://some.blog/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png" alt="Arzt+Pfusch - S.I.C.K." width="210" height="209" />Arzt+Pfusch - S.I.C.K.Gerade bin ich \xfcber das Album <em>S.I.C.K</em> von <a title="Arzt+Pfusch" href="http://www.arztpfusch.com/" target="_blank">Arzt+Pfusch</a> gestolpert, welches Arzt+Pfusch zum Download f\xfcr lau anbieten. Das Album steht unter einer Creative Commons <a href="http://creativecommons.org/licenses/by-nc-nd/3.0/de/">BY-NC-ND</a>-Lizenz.
Die Ladung <em>noisebmstupidevildustrial</em> gibts als MP3s mit <a href="http://www.archive.org/download/dmp005/dmp005_64kb_mp3.zip">64kbps</a> und <a href="http://www.archive.org/download/dmp005/dmp005_vbr_mp3.zip">VBR</a>, als Ogg Vorbis und als FLAC (letztere <a href="http://www.archive.org/details/dmp005">hier</a>). <a href="http://www.archive.org/download/dmp005/dmp005-artwork.zip">Artwork</a> und <a href="http://www.archive.org/download/dmp005/dmp005-lyrics.txt">Lyrics</a> gibts nochmal einzeln zum Download.''', True)
write_content.assert_any_call(
'new_site/stories/kontakt.md'.replace('/', os.sep), """<h1>Datenschutz</h1>
Ich erhebe und speichere automatisch in meine Server Log Files Informationen, die dein Browser an mich \xfcbermittelt. Dies sind:
<ul>
<li>Browsertyp und -version</li>
<li>verwendetes Betriebssystem</li>
<li>Referrer URL (die zuvor besuchte Seite)</li>
<li>IP Adresse des zugreifenden Rechners</li>
<li>Uhrzeit der Serveranfrage.</li>
</ul>
Diese Daten sind f\xfcr mich nicht bestimmten Personen zuordenbar. Eine Zusammenf\xfchrung dieser Daten mit anderen Datenquellen wird nicht vorgenommen, die Daten werden einzig zu statistischen Zwecken erhoben.""", True)
self.assertTrue(len(self.import_command.url_map) > 0)
self.assertEqual(
self.import_command.url_map['http://some.blog/2007/04/hoert/'],
'http://some.blog/posts/2007/04/hoert.html')
self.assertEqual(
self.import_command.url_map[
'http://some.blog/2008/07/arzt-und-pfusch-s-i-c-k/'],
'http://some.blog/posts/2008/07/arzt-und-pfusch-s-i-c-k.html')
self.assertEqual(
self.import_command.url_map['http://some.blog/kontakt/'],
'http://some.blog/stories/kontakt.html')
image_thumbnails = [
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-64x64.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-300x175.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-36x36.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-24x24.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-48x48.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png',
'http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-150x150.png'
]
for link in image_thumbnails:
self.assertTrue(
link in self.module.links,
'No link to "{0}" found in {map}.'.format(
link,
map=self.module.links
)
)
def test_transforming_content(self):
"""Applying markup conversions to content."""
transform_code = mock.MagicMock()
transform_caption = mock.MagicMock()
transform_newlines = mock.MagicMock()
self.import_command.transform_to_html = False
self.import_command.use_wordpress_compiler = False
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_code', transform_code):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_caption', transform_caption):
with mock.patch('nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_multiple_newlines', transform_newlines):
self.import_command.transform_content("random content", "wp", None)
self.assertTrue(transform_code.called)
self.assertTrue(transform_caption.called)
self.assertTrue(transform_newlines.called)
def test_transforming_source_code(self):
"""
Tests the handling of sourcecode tags.
"""
content = """Hello World.
[sourcecode language="Python"]
import sys
print sys.version
[/sourcecode]"""
content = self.import_command.transform_code(content)
self.assertFalse('[/sourcecode]' in content)
self.assertFalse('[sourcecode language=' in content)
replaced_content = """Hello World.
```Python
import sys
print sys.version
```"""
self.assertEqual(content, replaced_content)
def test_transform_caption(self):
caption = '[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]'
transformed_content = self.import_command.transform_caption(caption)
expected_content = '<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />'
self.assertEqual(transformed_content, expected_content)
def test_transform_multiple_captions_in_a_post(self):
content = """asdasdas
[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]
asdasdas
asdasdas
[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" title="pretty" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]
asdasdas"""
expected_content = """asdasdas
<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />
asdasdas
asdasdas
<img class="size-full wp-image-16" title="pretty" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />
asdasdas"""
self.assertEqual(
expected_content, self.import_command.transform_caption(content))
def test_transform_multiple_newlines(self):
content = """This
has
way to many
newlines.
"""
expected_content = """This
has
way to many
newlines.
"""
self.import_command.squash_newlines = False
self.assertEqual(content,
self.import_command.transform_multiple_newlines(content))
self.import_command.squash_newlines = True
self.assertEqual(expected_content,
self.import_command.transform_multiple_newlines(content))
def test_transform_caption_with_link_inside(self):
content = """[caption caption="Fehlermeldung"]<a href="http://some.blog/openttd-missing_sound.png"><img class="size-thumbnail wp-image-551" title="openttd-missing_sound" src="http://some.blog/openttd-missing_sound-150x150.png" alt="Fehlermeldung" /></a>[/caption]"""
transformed_content = self.import_command.transform_caption(content)
expected_content = """<a href="http://some.blog/openttd-missing_sound.png"><img class="size-thumbnail wp-image-551" title="openttd-missing_sound" src="http://some.blog/openttd-missing_sound-150x150.png" alt="Fehlermeldung" /></a>"""
self.assertEqual(expected_content, transformed_content)
def test_get_configuration_output_path(self):
self.import_command.output_folder = 'new_site'
default_config_path = os.path.join('new_site', 'conf.py')
self.import_command.import_into_existing_site = False
self.assertEqual(default_config_path,
self.import_command.get_configuration_output_path())
self.import_command.import_into_existing_site = True
config_path_with_timestamp = self.import_command.get_configuration_output_path(
)
self.assertNotEqual(default_config_path, config_path_with_timestamp)
self.assertTrue(self.import_command.name in config_path_with_timestamp)
def test_write_content_does_not_detroy_text(self):
content = b"""FOO"""
open_mock = mock.mock_open()
with mock.patch('nikola.plugins.basic_import.open', open_mock, create=True):
self.import_command.write_content('some_file', content)
open_mock.assert_has_calls([
mock.call(u'some_file', u'wb+'),
mock.call().__enter__(),
mock.call().write(b'<html><body><p>FOO</p></body></html>'),
mock.call().__exit__(None, None, None)]
)
def test_configure_redirections(self):
"""
Testing the configuration of the redirections.
We need to make sure that we have valid sources and target links.
"""
url_map = {
'/somewhere/else': 'http://foo.bar/posts/somewhereelse.html'
}
redirections = self.import_command.configure_redirections(url_map)
self.assertEqual(1, len(redirections))
self.assertTrue(('somewhere/else/index.html', '/posts/somewhereelse.html') in redirections)
if __name__ == '__main__':
unittest.main()
|
x1101/nikola
|
tests/test_command_import_wordpress.py
|
Python
|
mit
| 22,546
|
[
"VisIt"
] |
1646948462bc1b7566692684c0e28d0c55669be586c890d1503fa574369e09bb
|
"""Needed a way to go through the evaluation VCF from VCF benchmarking and spit out the FP and FN
calls into separate VCFs and then extract reads from those regions into a BAM.
vcffilter can mark the FP and FNs but leaves all the other records in. Would need to chain with vcftools
and at this point things are complicated enough that it's easier to use Python for this.
This tools drops two VCFs, one for FN and one for FP. Each has an associated ROI BED file.
These BED files can be used with samtools view to generate BAMs that contain reads from just these variants
"""
import time
import pysam
import logging
logger = logging.getLogger(__name__)
def extract_fp_fn(fname_in, prefix_out):
"""
:param fname_in:
:param prefix_out:
:return:
"""
logger.debug('Starting filtering ...')
t0 = time.time()
mode = 'rb' if fname_in.endswith('bcf') else 'r'
vcf_in = pysam.VariantFile(fname_in, mode)
fp_vcf_out = pysam.VariantFile(prefix_out + '-fp.vcf', mode='w', header=vcf_in.header)
fp_roi_bed = open(prefix_out + '-fp-roi.bed', 'w')
fn_vcf_out = pysam.VariantFile(prefix_out + '-fn.vcf', mode='w', header=vcf_in.header)
fn_roi_bed = open(prefix_out + '-fn-roi.bed', 'w')
n, fp_cnt, fn_cnt = -1, 0, 0
for n, v in enumerate(vcf_in):
s = v.samples['TRUTH']
if s['BD'] == 'FN':
fn_vcf_out.write(v)
save_roi(fn_roi_bed, v)
fn_cnt += 1
s = v.samples['QUERY']
if s['BD'] == 'FP':
fp_vcf_out.write(v)
save_roi(fp_roi_bed, v)
fp_cnt += 1
logger.debug('Processed {} calls'.format(n + 1))
logger.debug('Sample had {} FP, {} FN'.format(fp_cnt, fn_cnt))
t1 = time.time()
logger.debug('Took {} s'.format(t1 - t0))
def save_roi(fp, v):
fp.write('{}\t{}\t{}\n'.format(v.chrom, v.start, v.stop))
|
sbg/Mitty
|
mitty/benchmarking/filterevalvcf.py
|
Python
|
apache-2.0
| 1,782
|
[
"pysam"
] |
88afa5b540a179455ff04833761446b251e86b4cc7e41cd2467ad7af4545e816
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libcerf(AutotoolsPackage):
"""A self-contained C library providing complex error functions, based
on Faddeeva's plasma dispersion function w(z). Also provides Dawson's
integral and Voigt's convolution of a Gaussian and a Lorentzian
"""
homepage = "http://sourceforge.net/projects/libcerf"
url = "http://downloads.sourceforge.net/project/libcerf/libcerf-1.3.tgz"
version('1.3', 'b3504c467204df71e62aeccf73a25612')
def configure_args(self):
spec = self.spec
options = []
# Clang reports unused functions as errors, see
# http://clang.debian.net/status.php?version=3.8.1&key=UNUSED_FUNCTION
if spec.satisfies('%clang'):
options.append('CFLAGS=-Wno-unused-function')
return options
|
krafczyk/spack
|
var/spack/repos/builtin/packages/libcerf/package.py
|
Python
|
lgpl-2.1
| 2,049
|
[
"Gaussian"
] |
7a0a33e04f4a406ce5aad1306b292df8ab3a2a2e7c6989dee51ceb29cacf839e
|
#!/usr/bin/env python
#
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Standard setup script.
"""
import os
import sys
from distutils.command.install_data import install_data
from distutils.command.sdist import sdist
from distutils.core import setup
from buildslave import version
scripts = ["bin/buildslave"]
# sdist is usually run on a non-Windows platform, but the buildslave.bat file
# still needs to get packaged.
if 'sdist' in sys.argv or sys.platform == 'win32':
scripts.append("contrib/windows/buildslave.bat")
scripts.append("contrib/windows/buildbot_service.py")
class our_install_data(install_data):
def finalize_options(self):
self.set_undefined_options('install',
('install_lib', 'install_dir'),
)
install_data.finalize_options(self)
def run(self):
install_data.run(self)
# ensure there's a buildslave/VERSION file
fn = os.path.join(self.install_dir, 'buildslave', 'VERSION')
open(fn, 'w').write(version)
self.outfiles.append(fn)
class our_sdist(sdist):
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# ensure there's a buildslave/VERSION file
fn = os.path.join(base_dir, 'buildslave', 'VERSION')
open(fn, 'w').write(version)
# ensure that NEWS has a copy of the latest release notes, copied from
# the master tree, with the proper version substituted
src_fn = os.path.join('..', 'master', 'docs', 'relnotes/index.rst')
src = open(src_fn).read()
src = src.replace('|version|', version)
dst_fn = os.path.join(base_dir, 'NEWS')
open(dst_fn, 'w').write(src)
setup_args = {
'name': "buildbot-slave",
'version': version,
'description': "BuildBot Slave Daemon",
'long_description': "See the 'buildbot' package for details",
'author': "Brian Warner",
'author_email': "warner-buildbot@lothar.com",
'maintainer': "Dustin J. Mitchell",
'maintainer_email': "dustin@v.igoro.us",
'url': "http://buildbot.net/",
'license': "GNU GPL",
'classifiers': [
'Development Status :: 5 - Production/Stable',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Testing',
],
'packages': [
"buildslave",
"buildslave.commands",
"buildslave.scripts",
"buildslave.monkeypatches",
"buildslave.test",
"buildslave.test.fake",
"buildslave.test.util",
"buildslave.test.unit",
],
'scripts': scripts,
# mention data_files, even if empty, so install_data is called and
# VERSION gets copied
'data_files': [("buildslave", [])],
'cmdclass': {
'install_data': our_install_data,
'sdist': our_sdist
}
}
# set zip_safe to false to force Windows installs to always unpack eggs
# into directories, which seems to work better --
# see http://buildbot.net/trac/ticket/907
if sys.platform == "win32":
setup_args['zip_safe'] = False
try:
# If setuptools is installed, then we'll add setuptools-specific arguments
# to the setup args.
import setuptools # @UnusedImport
except ImportError:
pass
else:
if sys.version_info[:2] >= (2, 6):
setup_args['install_requires'] = [
'twisted >= 8.0.0',
]
else:
# Latest supported on Python 2.5 version of Twisted is 12.10, and
# pip/easy_install currently can't select correct version of Twisted.
# Twisted depends on zope.interface, which became incompatible with
# Python 2.5 starting from 4.0.0 release.
setup_args['install_requires'] = [
'twisted >= 8.0.0, <= 12.1.0',
'zope.interface < 4.0.0',
]
setup_args['tests_require'] = [
'mock',
]
if os.getenv('NO_INSTALL_REQS'):
setup_args['install_requires'] = None
setup(**setup_args)
|
zozo123/buildbot
|
slave/setup.py
|
Python
|
gpl-3.0
| 4,830
|
[
"Brian"
] |
526e61913a1b671d7e9c44a975b8919461e1846f413acd6bf1e9b596ff93606d
|
# proxy module
from __future__ import absolute_import
from mayavi.modules.surface import *
|
enthought/etsproxy
|
enthought/mayavi/modules/surface.py
|
Python
|
bsd-3-clause
| 91
|
[
"Mayavi"
] |
c07d5755cb9124654ff447a5926466cb1573a0a9c3f311a85c51c54fc3eb45e6
|
#!/usr/bin/env python
# Install.py tool to do automate build of Colvars
from __future__ import print_function
import sys,os,subprocess
# help message
help = """
Syntax from src dir: make lib-colvars args="-m machine -e suffix"
Syntax from lib/colvars dir: python Install.py -m machine -e suffix
specify -m and optionally -e, order does not matter
-m = peform a clean followed by "make -f Makefile.machine"
machine = suffix of a lib/colvars/Makefile.* or of a
src/MAKE/MACHINES/Makefile.* file
-e = set EXTRAMAKE variable in Makefile.machine to Makefile.lammps.suffix
does not alter existing Makefile.machine
Examples:
make lib-colvars args="-m g++" # build COLVARS lib with GNU g++ compiler
"""
# print error message or help
def error(str=None):
if not str: print(help)
else: print("ERROR"),str
sys.exit()
# parse args
args = sys.argv[1:]
nargs = len(args)
if nargs == 0: error()
machine = None
extraflag = False
iarg = 0
while iarg < nargs:
if args[iarg] == "-m":
if iarg+2 > len(args): error()
machine = args[iarg+1]
iarg += 2
elif args[iarg] == "-e":
if iarg+2 > len(args): error()
extraflag = True
suffix = args[iarg+1]
iarg += 2
else: error()
# set lib from working dir
cwd = os.getcwd()
lib = os.path.basename(cwd)
def get_lammps_machine_flags(machine):
"""Parse Makefile.machine from LAMMPS, return dictionary of compiler flags"""
if not os.path.exists("../../src/MAKE/MACHINES/Makefile.%s" % machine):
error("Cannot locate src/MAKE/MACHINES/Makefile.%s" % machine)
lines = open("../../src/MAKE/MACHINES/Makefile.%s" % machine,
'r').readlines()
machine_flags = {}
for line in lines:
line = line.partition('#')[0]
line = line.rstrip()
words = line.split()
if (len(words) > 2):
if ((words[0] == 'CC') or (words[0] == 'CCFLAGS') or
(words[0] == 'SHFLAGS') or (words[0] == 'ARCHIVE') or
(words[0] == 'ARFLAGS') or (words[0] == 'SHELL')):
machine_flags[words[0]] = ' '.join(words[2:])
return machine_flags
def gen_colvars_makefile_machine(machine, machine_flags):
"""Generate Makefile.machine for Colvars given the compiler flags"""
machine_makefile = open("Makefile.%s" % machine, 'w')
machine_makefile.write('''# -*- makefile -*- to build Colvars module with %s
COLVARS_LIB = libcolvars.a
COLVARS_OBJ_DIR =
CXX = %s
CXXFLAGS = %s %s
AR = %s
ARFLAGS = %s
SHELL = %s
include Makefile.common
.PHONY: default clean
default: $(COLVARS_LIB) Makefile.lammps
clean:
-rm -f $(COLVARS_OBJS) $(COLVARS_LIB)
''' % (machine, machine_flags['CC'],
machine_flags['CCFLAGS'], machine_flags['SHFLAGS'] ,
machine_flags['ARCHIVE'], machine_flags['ARFLAGS'],
machine_flags['SHELL']))
if not os.path.exists("Makefile.%s" % machine):
machine_flags = get_lammps_machine_flags(machine)
gen_colvars_makefile_machine(machine, machine_flags)
if not os.path.exists("Makefile.%s" % machine):
error("lib/%s/Makefile.%s does not exist" % (lib,machine))
# create Makefile.auto as copy of Makefile.machine
# reset EXTRAMAKE if requested
lines = open("Makefile.%s" % machine,'r').readlines()
fp = open("Makefile.auto",'w')
for line in lines:
words = line.split()
if len(words) == 3 and extraflag and \
words[0] == "EXTRAMAKE" and words[1] == '=':
line = line.replace(words[2],"Makefile.lammps.%s" % suffix)
fp.write(line)
fp.close()
# make the library via Makefile.auto
try:
import multiprocessing
n_cpus = multiprocessing.cpu_count()
except:
n_cpus = 1
print("Building lib%s.a ..." % lib)
cmd = ["make -f Makefile.auto clean"]
print(subprocess.check_output(cmd, shell=True).decode())
cmd = ["make -f Makefile.auto -j%d" % n_cpus]
print(subprocess.check_output(cmd, shell=True).decode())
if os.path.exists("lib%s.a" % lib): print("Build was successful")
else: error("Build of lib/%s/lib%s.a was NOT successful" % (lib,lib))
if not os.path.exists("Makefile.lammps"):
print("lib/%s/Makefile.lammps was NOT created" % lib)
|
ovilab/lammps
|
lib/colvars/Install.py
|
Python
|
gpl-2.0
| 4,039
|
[
"LAMMPS"
] |
f58958e2992f10f065b571a57fa7d10fe4f5274390d6756c2827b3a2ba2e16ec
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Safari history plist plugin."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import plist as plist_formatter
from plaso.lib import event
from plaso.lib import timelib_test
from plaso.parsers import plist
from plaso.parsers.plist_plugins import safari
from plaso.parsers.plist_plugins import test_lib
class SafariPluginTest(test_lib.PlistPluginTestCase):
"""Tests for the Safari history plist plugin."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._plugin = safari.SafariHistoryPlugin(None)
self._parser = plist.PlistParser(event.PreprocessObject(), None)
def testProcess(self):
"""Tests the Process function."""
test_file = self._GetTestFilePath(['History.plist'])
plist_name = 'History.plist'
events = self._ParsePlistFileWithPlugin(
self._parser, self._plugin, test_file, plist_name)
event_objects = self._GetEventObjects(events)
# 18 entries in timeline.
self.assertEquals(len(event_objects), 18)
event_object = event_objects[8]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2013-07-08 17:31:00')
self.assertEquals(event_objects[10].timestamp, expected_timestamp)
expected_url = u'http://netverslun.sci-mx.is/aminosyrur'
self.assertEquals(event_object.url, expected_url)
expected_string = (
u'Visited: {0:s} (Am\xedn\xf3s\xfdrur ) Visit Count: 1').format(
expected_url)
self._TestGetMessageStrings(event_object, expected_string, expected_string)
if __name__ == '__main__':
unittest.main()
|
iwm911/plaso
|
plaso/parsers/plist_plugins/safari_test.py
|
Python
|
apache-2.0
| 2,306
|
[
"VisIt"
] |
7a70ff35644223ec95c1fb07f31b7513cdcc0fed47293cfa4adee29d4fdd93fb
|
########################################################################
# File: ReplicateAndRegister.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2013/03/13 18:49:12
########################################################################
""" :mod: ReplicateAndRegister
==========================
.. module: ReplicateAndRegister
:synopsis: ReplicateAndRegister operation handler
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
ReplicateAndRegister operation handler
"""
__RCSID__ = "$Id$"
# #
# @file ReplicateAndRegister.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2013/03/13 18:49:28
# @brief Definition of ReplicateAndRegister class.
# # imports
import re
from collections import defaultdict
# # from DIRAC
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Utilities.Adler import compareAdler, hexAdlerToInt, intAdlerToHex
from DIRAC.FrameworkSystem.Client.MonitoringClient import gMonitor
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
from DIRAC.DataManagementSystem.Agent.RequestOperations.DMSRequestOperationsBase import DMSRequestOperationsBase
from DIRAC.Resources.Storage.StorageElement import StorageElement
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from DIRAC.DataManagementSystem.Client.FTS3Operation import FTS3TransferOperation
from DIRAC.DataManagementSystem.Client.FTS3File import FTS3File
from DIRAC.DataManagementSystem.Client.FTS3Client import FTS3Client
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
def filterReplicas(opFile, logger=None, dataManager=None):
""" filter out banned/invalid source SEs """
if logger is None:
logger = gLogger
if dataManager is None:
dataManager = DataManager()
log = logger.getSubLogger("filterReplicas")
result = defaultdict(list)
replicas = dataManager.getActiveReplicas(opFile.LFN, getUrl=False)
if not replicas["OK"]:
log.error('Failed to get active replicas', replicas["Message"])
return replicas
reNotExists = re.compile(r".*such file.*")
replicas = replicas["Value"]
failed = replicas["Failed"].get(opFile.LFN, "")
if reNotExists.match(failed.lower()):
opFile.Status = "Failed"
opFile.Error = failed
return S_ERROR(failed)
replicas = replicas["Successful"].get(opFile.LFN, {})
noReplicas = False
if not replicas:
allReplicas = dataManager.getReplicas(opFile.LFN, getUrl=False)
if allReplicas['OK']:
allReplicas = allReplicas['Value']['Successful'].get(opFile.LFN, {})
if not allReplicas:
result['NoReplicas'].append(None)
noReplicas = True
else:
# There are replicas but we cannot get metadata because the replica is not active
result['NoActiveReplicas'] += list(allReplicas)
log.verbose("File has no%s replica in File Catalog" % ('' if noReplicas else ' active'), opFile.LFN)
else:
return allReplicas
if not opFile.Checksum or hexAdlerToInt(opFile.Checksum) is False:
# Set Checksum to FC checksum if not set in the request
fcMetadata = FileCatalog().getFileMetadata(opFile.LFN)
fcChecksum = fcMetadata.get(
'Value',
{}).get(
'Successful',
{}).get(
opFile.LFN,
{}).get('Checksum')
# Replace opFile.Checksum if it doesn't match a valid FC checksum
if fcChecksum:
if hexAdlerToInt(fcChecksum) is not False:
opFile.Checksum = fcChecksum
opFile.ChecksumType = fcMetadata['Value']['Successful'][opFile.LFN].get('ChecksumType', 'Adler32')
else:
opFile.Checksum = None
# If no replica was found, return what we collected as information
if not replicas:
return S_OK(result)
for repSEName in replicas:
repSEMetadata = StorageElement(repSEName).getFileMetadata(opFile.LFN)
error = repSEMetadata.get('Message', repSEMetadata.get('Value', {}).get('Failed', {}).get(opFile.LFN))
if error:
log.warn('unable to get metadata at %s for %s' % (repSEName, opFile.LFN), error.replace('\n', ''))
if 'File does not exist' in error or 'No such file' in error:
result['NoReplicas'].append(repSEName)
else:
result["NoMetadata"].append(repSEName)
elif not noReplicas:
repSEMetadata = repSEMetadata['Value']['Successful'][opFile.LFN]
seChecksum = hexAdlerToInt(repSEMetadata.get("Checksum"))
# As from here seChecksum is an integer or False, not a hex string!
if seChecksum is False and opFile.Checksum:
result['NoMetadata'].append(repSEName)
elif not seChecksum and opFile.Checksum:
opFile.Checksum = None
opFile.ChecksumType = None
elif seChecksum and (not opFile.Checksum or opFile.Checksum == 'False'):
# Use the SE checksum (convert to hex) and force type to be Adler32
opFile.Checksum = intAdlerToHex(seChecksum)
opFile.ChecksumType = 'Adler32'
if not opFile.Checksum or not seChecksum or compareAdler(
intAdlerToHex(seChecksum), opFile.Checksum):
# # All checksums are OK
result["Valid"].append(repSEName)
else:
log.warn(" %s checksum mismatch, FC: '%s' @%s: '%s'" %
(opFile.LFN, opFile.Checksum, repSEName, intAdlerToHex(seChecksum)))
result["Bad"].append(repSEName)
else:
# If a replica was found somewhere, don't set the file as no replicas
result['NoReplicas'] = []
return S_OK(result)
########################################################################
class ReplicateAndRegister(DMSRequestOperationsBase):
"""
.. class:: ReplicateAndRegister
ReplicateAndRegister operation handler
"""
def __init__(self, operation=None, csPath=None):
"""c'tor
:param self: self reference
:param Operation operation: Operation instance
:param str csPath: CS path for this handler
"""
super(ReplicateAndRegister, self).__init__(operation, csPath)
# # own gMonitor stuff for files
gMonitor.registerActivity("ReplicateAndRegisterAtt", "Replicate and register attempted",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("ReplicateOK", "Replications successful",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("ReplicateFail", "Replications failed",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RegisterOK", "Registrations successful",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RegisterFail", "Registrations failed",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
# # for FTS
gMonitor.registerActivity("FTSScheduleAtt", "Files schedule attempted",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("FTSScheduleOK", "File schedule successful",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("FTSScheduleFail", "File schedule failed",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM)
# # SE cache
# Clients
self.fc = FileCatalog()
def __call__(self):
""" call me maybe """
# # check replicas first
checkReplicas = self.__checkReplicas()
if not checkReplicas["OK"]:
self.log.error('Failed to check replicas', checkReplicas["Message"])
if hasattr(self, "FTSMode") and getattr(self, "FTSMode"):
bannedGroups = getattr(self, "FTSBannedGroups") if hasattr(self, "FTSBannedGroups") else ()
if self.request.OwnerGroup in bannedGroups:
self.log.verbose("usage of FTS system is banned for request's owner")
return self.dmTransfer()
return self.fts3Transfer()
return self.dmTransfer()
def __checkReplicas(self):
""" check done replicas and update file states """
waitingFiles = dict([(opFile.LFN, opFile) for opFile in self.operation
if opFile.Status in ("Waiting", "Scheduled")])
targetSESet = set(self.operation.targetSEList)
replicas = self.fc.getReplicas(waitingFiles.keys())
if not replicas["OK"]:
self.log.error('Failed to get replicas', replicas["Message"])
return replicas
reMissing = re.compile(r".*such file.*")
for failedLFN, errStr in replicas["Value"]["Failed"].iteritems():
waitingFiles[failedLFN].Error = errStr
if reMissing.search(errStr.lower()):
self.log.error("File does not exists", failedLFN)
gMonitor.addMark("ReplicateFail", len(targetSESet))
waitingFiles[failedLFN].Status = "Failed"
for successfulLFN, reps in replicas["Value"]["Successful"].iteritems():
if targetSESet.issubset(set(reps)):
self.log.info("file %s has been replicated to all targets" % successfulLFN)
waitingFiles[successfulLFN].Status = "Done"
return S_OK()
def _addMetadataToFiles(self, toSchedule):
""" Add metadata to those files that need to be scheduled through FTS
toSchedule is a dictionary:
{'lfn1': opFile, 'lfn2': opFile}
"""
if toSchedule:
self.log.info("found %s files to schedule, getting metadata from FC" % len(toSchedule))
else:
self.log.verbose("No files to schedule")
return S_OK([])
res = self.fc.getFileMetadata(toSchedule.keys())
if not res['OK']:
return res
else:
if res['Value']['Failed']:
self.log.warn("Can't schedule %d files: problems getting the metadata: %s" %
(len(res['Value']['Failed']), ', '.join(res['Value']['Failed'])))
metadata = res['Value']['Successful']
filesToSchedule = {}
for lfn, lfnMetadata in metadata.iteritems():
opFileToSchedule = toSchedule[lfn][0]
opFileToSchedule.GUID = lfnMetadata['GUID']
# In principle this is defined already in filterReplicas()
if not opFileToSchedule.Checksum:
opFileToSchedule.Checksum = metadata[lfn]['Checksum']
opFileToSchedule.ChecksumType = metadata[lfn]['ChecksumType']
opFileToSchedule.Size = metadata[lfn]['Size']
filesToSchedule[opFileToSchedule.LFN] = opFileToSchedule
return S_OK(filesToSchedule)
def _filterReplicas(self, opFile):
""" filter out banned/invalid source SEs """
return filterReplicas(opFile, logger=self.log, dataManager=self.dm)
def _checkExistingFTS3Operations(self):
"""
Check if there are ongoing FTS3Operation for the current RMS Operation
Under some conditions, we can be trying to schedule files while
there is still an FTS transfer going on. This typically happens
when the REA hangs. To prevent further race condition, we check
if there are FTS3Operations in a non Final state matching the
current operation ID. If so, we put the corresponding files in
scheduled mode. We will then wait till the FTS3 Operation performs
the callback
:returns: S_OK with True if we can go on, False if we should stop the processing
"""
res = FTS3Client().getOperationsFromRMSOpID(self.operation.OperationID)
if not res['OK']:
self.log.debug(
"Could not get FTS3Operations matching OperationID",
self.operation.OperationID)
return res
existingFTSOperations = res['Value']
# It is ok to have FTS Operations in a final state, so we
# care only about the others
unfinishedFTSOperations = [
ops for ops in existingFTSOperations if ops.status not in FTS3TransferOperation.FINAL_STATES]
if not unfinishedFTSOperations:
self.log.debug("No ongoing FTS3Operations, all good")
return S_OK(True)
self.log.warn("Some FTS3Operations already exist for the RMS Operation:",
[op.operationID for op in unfinishedFTSOperations])
# This would really be a screwed up situation !
if len(unfinishedFTSOperations) > 1:
self.log.warn("That's a serious problem !!")
# We take the rmsFileID of the files in the Operations,
# find the corresponding File object, and set them scheduled
rmsFileIDsToSetScheduled = set(
[ftsFile.rmsFileID for ftsOp in unfinishedFTSOperations for ftsFile in ftsOp.ftsFiles])
for opFile in self.operation:
# If it is in the DB, it has a FileID
opFileID = opFile.FileID
if opFileID in rmsFileIDsToSetScheduled:
self.log.warn("Setting RMSFile as already scheduled", opFileID)
opFile.Status = "Scheduled"
# We return here such that the Request is set back to Scheduled in the DB
# With no further modification
return S_OK(False)
def fts3Transfer(self):
""" replicate and register using FTS3 """
self.log.info("scheduling files in FTS3...")
# Check first if we do not have ongoing transfers
res = self._checkExistingFTS3Operations()
if not res['OK']:
return res
# if res['Value'] is False
# it means that there are ongoing transfers
# and we should stop here
if res['Value'] is False:
# return S_OK such that the request is put back
return S_OK()
fts3Files = []
toSchedule = {}
# Dict which maps the FileID to the object
rmsFilesIds = {}
for opFile in self.getWaitingFilesList():
rmsFilesIds[opFile.FileID] = opFile
opFile.Error = ''
gMonitor.addMark("FTSScheduleAtt")
# # check replicas
replicas = self._filterReplicas(opFile)
if not replicas["OK"]:
continue
replicas = replicas["Value"]
validReplicas = replicas["Valid"]
noMetaReplicas = replicas["NoMetadata"]
noReplicas = replicas['NoReplicas']
badReplicas = replicas['Bad']
noPFN = replicas['NoPFN']
if validReplicas:
validTargets = list(set(self.operation.targetSEList) - set(validReplicas))
if not validTargets:
self.log.info("file %s is already present at all targets" % opFile.LFN)
opFile.Status = "Done"
else:
toSchedule[opFile.LFN] = [opFile, validTargets]
else:
gMonitor.addMark("FTSScheduleFail")
if noMetaReplicas:
self.log.warn("unable to schedule '%s', couldn't get metadata at %s" % (opFile.LFN, ','.join(noMetaReplicas)))
opFile.Error = "Couldn't get metadata"
elif noReplicas:
self.log.error(
"Unable to schedule transfer", "File %s doesn't exist at %s" %
(opFile.LFN, ','.join(noReplicas)))
opFile.Error = 'No replicas found'
opFile.Status = 'Failed'
elif badReplicas:
self.log.error(
"Unable to schedule transfer",
"File %s, all replicas have a bad checksum at %s" %
(opFile.LFN,
','.join(badReplicas)))
opFile.Error = 'All replicas have a bad checksum'
opFile.Status = 'Failed'
elif noPFN:
self.log.warn(
"unable to schedule %s, could not get a PFN at %s" %
(opFile.LFN, ','.join(noPFN)))
res = self._addMetadataToFiles(toSchedule)
if not res['OK']:
return res
else:
filesToSchedule = res['Value']
for lfn in filesToSchedule:
opFile = filesToSchedule[lfn]
validTargets = toSchedule[lfn][1]
for targetSE in validTargets:
ftsFile = FTS3File.fromRMSFile(opFile, targetSE)
fts3Files.append(ftsFile)
if fts3Files:
res = Registry.getUsernameForDN(self.request.OwnerDN)
if not res['OK']:
self.log.error(
"Cannot get username for DN", "%s %s" %
(self.request.OwnerDN, res['Message']))
return res
username = res['Value']
fts3Operation = FTS3TransferOperation.fromRMSObjects(self.request, self.operation, username)
fts3Operation.ftsFiles = fts3Files
ftsSchedule = FTS3Client().persistOperation(fts3Operation)
if not ftsSchedule["OK"]:
self.log.error("Completely failed to schedule to FTS3:", ftsSchedule["Message"])
return ftsSchedule
# might have nothing to schedule
ftsSchedule = ftsSchedule["Value"]
self.log.info("Scheduled with FTS3Operation id %s" % ftsSchedule)
self.log.info("%d files have been scheduled to FTS3" % len(fts3Files))
for ftsFile in fts3Files:
opFile = rmsFilesIds[ftsFile.rmsFileID]
gMonitor.addMark("FTSScheduleOK", 1)
opFile.Status = "Scheduled"
self.log.debug("%s has been scheduled for FTS" % opFile.LFN)
else:
self.log.info("No files to schedule after metadata checks")
# Just in case some transfers could not be scheduled, try them with RM
return self.dmTransfer(fromFTS=True)
def dmTransfer(self, fromFTS=False):
""" replicate and register using dataManager """
# # get waiting files. If none just return
# # source SE
sourceSE = self.operation.SourceSE if self.operation.SourceSE else None
if sourceSE:
# # check source se for read
bannedSource = self.checkSEsRSS(sourceSE, 'ReadAccess')
if not bannedSource["OK"]:
gMonitor.addMark("ReplicateAndRegisterAtt", len(self.operation))
gMonitor.addMark("ReplicateFail", len(self.operation))
return bannedSource
if bannedSource["Value"]:
self.operation.Error = "SourceSE %s is banned for reading" % sourceSE
self.log.info(self.operation.Error)
return S_OK(self.operation.Error)
# # check targetSEs for write
bannedTargets = self.checkSEsRSS()
if not bannedTargets['OK']:
gMonitor.addMark("ReplicateAndRegisterAtt", len(self.operation))
gMonitor.addMark("ReplicateFail", len(self.operation))
return bannedTargets
if bannedTargets['Value']:
self.operation.Error = "%s targets are banned for writing" % ",".join(bannedTargets['Value'])
return S_OK(self.operation.Error)
# Can continue now
self.log.verbose("No targets banned for writing")
waitingFiles = self.getWaitingFilesList()
if not waitingFiles:
return S_OK()
# # loop over files
if fromFTS:
self.log.info("Trying transfer using replica manager as FTS failed")
else:
self.log.info("Transferring files using Data manager...")
errors = defaultdict(int)
delayExecution = 0
for opFile in waitingFiles:
if opFile.Error in ("Couldn't get metadata",
"File doesn't exist",
'No active replica found',
"All replicas have a bad checksum",):
err = "File already in error status"
errors[err] += 1
gMonitor.addMark("ReplicateAndRegisterAtt", 1)
opFile.Error = ''
lfn = opFile.LFN
# Check if replica is at the specified source
replicas = self._filterReplicas(opFile)
if not replicas["OK"]:
self.log.error('Failed to check replicas', replicas["Message"])
continue
replicas = replicas["Value"]
validReplicas = replicas.get("Valid")
noMetaReplicas = replicas.get("NoMetadata")
noReplicas = replicas.get('NoReplicas')
badReplicas = replicas.get('Bad')
noActiveReplicas = replicas.get('NoActiveReplicas')
if not validReplicas:
gMonitor.addMark("ReplicateFail")
if noMetaReplicas:
err = "Couldn't get metadata"
errors[err] += 1
self.log.verbose(
"unable to replicate '%s', couldn't get metadata at %s" %
(opFile.LFN, ','.join(noMetaReplicas)))
opFile.Error = err
elif noReplicas:
err = "File doesn't exist"
errors[err] += 1
self.log.verbose(
"Unable to replicate", "File %s doesn't exist at %s" %
(opFile.LFN, ','.join(noReplicas)))
opFile.Error = err
opFile.Status = 'Failed'
elif badReplicas:
err = "All replicas have a bad checksum"
errors[err] += 1
self.log.error(
"Unable to replicate", "%s, all replicas have a bad checksum at %s" %
(opFile.LFN, ','.join(badReplicas)))
opFile.Error = err
opFile.Status = 'Failed'
elif noActiveReplicas:
err = "No active replica found"
errors[err] += 1
self.log.verbose("Unable to schedule transfer",
"%s, %s at %s" % (opFile.LFN, err, ','.join(noActiveReplicas)))
opFile.Error = err
# All source SEs are banned, delay execution by 1 hour
delayExecution = 60
continue
# # get the first one in the list
if sourceSE not in validReplicas:
if sourceSE:
err = "File not at specified source"
errors[err] += 1
self.log.warn(
"%s is not at specified sourceSE %s, changed to %s" %
(lfn, sourceSE, validReplicas[0]))
sourceSE = validReplicas[0]
# # loop over targetSE
catalogs = self.operation.Catalog
if catalogs:
catalogs = [cat.strip() for cat in catalogs.split(',')]
for targetSE in self.operation.targetSEList:
# # call DataManager
if targetSE in validReplicas:
self.log.warn("Request to replicate %s to an existing location: %s" % (lfn, targetSE))
continue
res = self.dm.replicateAndRegister(lfn, targetSE, sourceSE=sourceSE, catalog=catalogs)
if res["OK"]:
if lfn in res["Value"]["Successful"]:
if "replicate" in res["Value"]["Successful"][lfn]:
repTime = res["Value"]["Successful"][lfn]["replicate"]
prString = "file %s replicated at %s in %s s." % (lfn, targetSE, repTime)
gMonitor.addMark("ReplicateOK", 1)
if "register" in res["Value"]["Successful"][lfn]:
gMonitor.addMark("RegisterOK", 1)
regTime = res["Value"]["Successful"][lfn]["register"]
prString += ' and registered in %s s.' % regTime
self.log.info(prString)
else:
gMonitor.addMark("RegisterFail", 1)
prString += " but failed to register"
self.log.warn(prString)
opFile.Error = "Failed to register"
# # add register replica operation
registerOperation = self.getRegisterOperation(
opFile, targetSE, type='RegisterReplica')
self.request.insertAfter(registerOperation, self.operation)
else:
self.log.error("Failed to replicate", "%s to %s" % (lfn, targetSE))
gMonitor.addMark("ReplicateFail", 1)
opFile.Error = "Failed to replicate"
else:
gMonitor.addMark("ReplicateFail", 1)
reason = res["Value"]["Failed"][lfn]
self.log.error(
"Failed to replicate and register", "File %s at %s:" %
(lfn, targetSE), reason)
opFile.Error = reason
else:
gMonitor.addMark("ReplicateFail", 1)
opFile.Error = "DataManager error: %s" % res["Message"]
self.log.error("DataManager error", res["Message"])
if not opFile.Error:
if len(self.operation.targetSEList) > 1:
self.log.info("file %s has been replicated to all targetSEs" % lfn)
opFile.Status = "Done"
# Log error counts
if delayExecution:
self.log.info("Delay execution of the request by %d minutes" % delayExecution)
self.request.delayNextExecution(delayExecution)
for error, count in errors.iteritems():
self.log.error(error, 'for %d files' % count)
return S_OK()
|
petricm/DIRAC
|
DataManagementSystem/Agent/RequestOperations/ReplicateAndRegister.py
|
Python
|
gpl-3.0
| 23,812
|
[
"DIRAC"
] |
08a1b0c9eead2537406a2cb80fcb63d7d07da762db8aee801d91ef8427c2fd50
|
# Copyright (c) 2020, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Morphology features.
Any public function from this namespace can be called via the features mechanism. If calling
directly the function in this namespace can only accept a morphology as its input. If you want to
apply it to a morphology population then you must use the features mechanism e.g. ``features.get``.
The features mechanism does not allow you to apply these features to neurites.
>>> import neurom
>>> from neurom import features
>>> m = neurom.load_morphology('path/to/morphology')
>>> features.get('soma_surface_area', m)
>>> population = neurom.load_morphologies('path/to/morphs')
>>> features.get('sholl_crossings', population)
For more details see :ref:`features`.
"""
import warnings
from functools import partial
import math
import numpy as np
from neurom import morphmath
from neurom.core.morphology import iter_neurites, iter_segments, Morphology
from neurom.core.types import tree_type_checker as is_type
from neurom.core.dataformat import COLS
from neurom.core.types import NeuriteType
from neurom.exceptions import NeuroMError
from neurom.features import feature, NameSpace, neurite as nf
from neurom.utils import str_to_plane
feature = partial(feature, namespace=NameSpace.NEURON)
@feature(shape=())
def soma_volume(morph):
"""Get the volume of a morphology's soma."""
return morph.soma.volume
@feature(shape=())
def soma_surface_area(morph):
"""Get the surface area of a morphology's soma.
Note:
The surface area is calculated by assuming the soma is spherical.
"""
return 4 * math.pi * morph.soma.radius ** 2
@feature(shape=())
def soma_radius(morph):
"""Get the radius of a morphology's soma."""
return morph.soma.radius
@feature(shape=())
def max_radial_distance(morph, neurite_type=NeuriteType.all):
"""Get the maximum radial distances of the termination sections."""
term_radial_distances = [nf.max_radial_distance(n)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
return max(term_radial_distances) if term_radial_distances else 0.
@feature(shape=(...,))
def number_of_sections_per_neurite(morph, neurite_type=NeuriteType.all):
"""List of numbers of sections per neurite."""
return [nf.number_of_sections(n)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def total_length_per_neurite(morph, neurite_type=NeuriteType.all):
"""Neurite lengths."""
return [nf.total_length(n)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def total_area_per_neurite(morph, neurite_type=NeuriteType.all):
"""Neurite areas."""
return [nf.total_area(n)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def total_volume_per_neurite(morph, neurite_type=NeuriteType.all):
"""Neurite volumes."""
return [nf.total_volume(n)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def trunk_origin_azimuths(morph, neurite_type=NeuriteType.all):
"""Get a list of all the trunk origin azimuths of a morph.
The azimuth is defined as Angle between x-axis and the vector
defined by (initial tree point - soma center) on the x-z plane.
The range of the azimuth angle [-pi, pi] radians
"""
def _azimuth(section, soma):
"""Azimuth of a section."""
vector = morphmath.vector(section[0], soma.center)
return morphmath.azimuth_from_vector(vector)
return [_azimuth(n.root_node.points, morph.soma)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def trunk_origin_elevations(morph, neurite_type=NeuriteType.all):
"""Get a list of all the trunk origin elevations of a morph.
The elevation is defined as the angle between x-axis and the
vector defined by (initial tree point - soma center)
on the x-y half-plane.
The range of the elevation angle [-pi/2, pi/2] radians
"""
def _elevation(section, soma):
"""Elevation of a section."""
vector = morphmath.vector(section[0], soma.center)
return morphmath.elevation_from_vector(vector)
return [_elevation(n.root_node.points, morph.soma)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def trunk_vectors(morph, neurite_type=NeuriteType.all):
"""Calculate the vectors between all the trunks of the morphology and the soma center."""
return [morphmath.vector(n.root_node.points[0], morph.soma.center)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def trunk_angles(
morph,
neurite_type=NeuriteType.all,
coords_only="xy",
sort_along="xy",
consecutive_only=True,
):
"""Calculate the angles between all the trunks of the morph.
By default, the angles are defined on the x-y plane and the trees are sorted from the y axis
and anticlock-wise.
Args:
morph: The morphology to process.
neurite_type: Only the neurites of this type are considered.
coords_only: Consider only the coordinates listed in this argument (should be a combination
of 'x', 'y' and 'z').
sort_along: Sort angles according to the given plane (should be 'xy', 'xz' or 'yz') before
computing the angles between the trunks.
consecutive_only: Compute only the angles between consecutive trunks (the default order of
neurite trunks is the same as the one used by
:func:`neurom.core.morphology.iter_neurites` but this order can be changed using the
`sort_along` parameter).
Returns:
list[[float]] or list[float]:
The angles between each trunk and all the others. If ``consecutive_only`` is ``True``,
only the angle with the next trunk is returned for each trunk.
"""
vectors = np.array(trunk_vectors(morph, neurite_type=neurite_type))
# In order to avoid the failure of the process in case the neurite_type does not exist
if len(vectors) == 0:
return []
if sort_along:
# Sorting angles according to the given plane
sort_coords = str_to_plane(sort_along)
order = np.argsort(
np.fromiter(
(
morphmath.angle_between_projections(i / np.linalg.norm(i), [0, 1])
for i in vectors[:, sort_coords]
),
dtype=float)
)
vectors = vectors[order]
# Select coordinates to consider
if coords_only:
coords = str_to_plane(coords_only)
vectors = vectors[:, coords]
# Compute angles between each trunk and the next ones
n_vectors = len(vectors)
cycling_vectors = np.vstack([vectors, vectors])
angles = [
(num_i, [
morphmath.angle_between_vectors(i, j)
for j in cycling_vectors[num_i: num_i + n_vectors]
])
for num_i, i in enumerate(vectors)
]
if consecutive_only:
angles = [i[1][-1] for i in angles if i[1]]
else:
angles = [i[1] for i in angles]
return angles
@feature(shape=(...,))
def trunk_angles_inter_types(
morph,
source_neurite_type=NeuriteType.apical_dendrite,
target_neurite_type=NeuriteType.basal_dendrite,
closest_component=None,
):
"""Calculate the angles between the trunks of the morph of a source type to target type.
Args:
morph: The morphology to process.
source_neurite_type: Only the neurites of this type are considered as sources.
target_neurite_type: Only the neurites of this type are considered as targets.
closest_component:
If ``closest_component`` is not ``None``, only one element is returned for each neurite
of source type:
* if set to 0, the one with the lowest absolute 3d angle is returned.
* if set to 1, the one with the lowest absolute elevation angle is returned.
* if set to 2, the one with the lowest absolute azimuth angle is returned.
Returns:
list[list[float]] or list[float]:
If ``closest_component`` is ``None``, a list of 3 elements is returned for each couple
of neurites:
* the absolute 3d angle between the two vectors.
* the elevation angle (or polar angle) between the two vectors.
* the azimuth angle between the two vectors.
If ``closest_component`` is not ``None``, only one of these values is returned for each
couple.
"""
source_vectors = trunk_vectors(morph, neurite_type=source_neurite_type)
target_vectors = trunk_vectors(morph, neurite_type=target_neurite_type)
# In order to avoid the failure of the process in case the neurite_type does not exist
if len(source_vectors) == 0 or len(target_vectors) == 0:
return []
angles = np.empty((len(source_vectors), len(target_vectors), 3), dtype=np.float)
for i, source in enumerate(source_vectors):
for j, target in enumerate(target_vectors):
angles[i, j, 0] = morphmath.angle_between_vectors(source, target)
angles[i, j, [1, 2]] = (
morphmath.spherical_from_vector(target) - morphmath.spherical_from_vector(source)
)
# Ensure elevation differences are in [-pi, pi]
angles[:, :, 1] = morphmath.angles_to_pi_interval(angles[:, :, 1])
# Ensure azimuth differences are in [-2pi, 2pi]
angles[:, :, 2] = morphmath.angles_to_pi_interval(angles[:, :, 2], scale=2.0)
if closest_component is not None:
angles = angles[
np.arange(len(angles)),
np.argmin(np.abs(angles[:, :, closest_component]), axis=1)
][:, np.newaxis, :]
return angles.tolist()
@feature(shape=(...,))
def trunk_angles_from_vector(
morph,
neurite_type=NeuriteType.all,
vector=None,
):
"""Calculate the angles between the trunks of the morph of a given type and a given vector.
Args:
morph: The morphology to process.
neurite_type: Only the neurites of this type are considered.
vector: The reference vector. If ``None``, the reference vector is set to ``(0, 1, 0)``.
Returns:
list[list[float]]:
For each neurite, an array with 3 elements is returned:
* the absolute 3d angle between the two vectors.
* the elevation angle (or polar angle) between the two vectors.
* the azimuth angle between the two vectors.
"""
if vector is None:
vector = (0, 1, 0)
vectors = np.array(trunk_vectors(morph, neurite_type=neurite_type))
# In order to avoid the failure of the process in case the neurite_type does not exist
if len(vectors) == 0:
return []
angles = np.empty((len(vectors), 3), dtype=float)
for i, i_vec in enumerate(vectors):
angles[i, 0] = morphmath.angle_between_vectors(vector, i_vec)
angles[i, (1, 2)] = (
morphmath.spherical_from_vector(i_vec) - morphmath.spherical_from_vector(vector)
)
# Ensure elevation difference are in [-pi, pi]
angles[:, 1] = morphmath.angles_to_pi_interval(angles[:, 1])
# Ensure azimuth difference are in [-2pi, 2pi]
angles[:, 2] = morphmath.angles_to_pi_interval(angles[:, 2], scale=2)
return angles.tolist()
@feature(shape=(...,))
def trunk_origin_radii(
morph,
neurite_type=NeuriteType.all,
min_length_filter=None,
max_length_filter=None,
):
"""Radii of the trunk sections of neurites in a morph.
.. warning::
If ``min_length_filter`` and / or ``max_length_filter`` is given, the points are filtered
and the mean radii of the remaining points is returned.
Note that if the ``min_length_filter`` is greater than the path distance of the last point
of the first section, the radius of this last point is returned.
Args:
morph: The morphology to process.
neurite_type: Only the neurites of this type are considered.
min_length_filter: The min length from which the neurite points are considered.
max_length_filter: The max length from which the neurite points are considered.
Returns:
list[float]:
* if ``min_length_filter`` and ``max_length_filter`` are ``None``, the radii of the
first point of each neurite are returned.
* else the mean radius of the points between the given ``min_length_filter`` and
``max_length_filter`` are returned.
"""
if max_length_filter is None and min_length_filter is None:
return [n.root_node.points[0][COLS.R]
for n in iter_neurites(morph, filt=is_type(neurite_type))]
if min_length_filter is not None and min_length_filter <= 0:
raise NeuroMError(
"In 'trunk_origin_radii': the 'min_length_filter' value must be strictly greater "
"than 0."
)
if max_length_filter is not None and max_length_filter <= 0:
raise NeuroMError(
"In 'trunk_origin_radii': the 'max_length_filter' value must be strictly greater "
"than 0."
)
if (
min_length_filter is not None
and max_length_filter is not None
and min_length_filter >= max_length_filter
):
raise NeuroMError(
"In 'trunk_origin_radii': the 'min_length_filter' value must be strictly less than the "
"'max_length_filter' value."
)
def _mean_radius(neurite):
points = neurite.root_node.points
interval_lengths = morphmath.interval_lengths(points)
path_lengths = np.insert(np.cumsum(interval_lengths), 0, 0)
valid_pts = np.ones(len(path_lengths), dtype=bool)
if min_length_filter is not None:
valid_pts = (valid_pts & (path_lengths >= min_length_filter))
if not valid_pts.any():
warnings.warn(
"In 'trunk_origin_radii': the 'min_length_filter' value is greater than the "
"path distance of the last point of the last section so the radius of this "
"point is returned."
)
return points[-1, COLS.R]
if max_length_filter is not None:
valid_max = (path_lengths <= max_length_filter)
valid_pts = (valid_pts & valid_max)
if not valid_pts.any():
warnings.warn(
"In 'trunk_origin_radii': the 'min_length_filter' and 'max_length_filter' "
"values excluded all the points of the section so the radius of the first "
"point after the 'min_length_filter' path distance is returned."
)
# pylint: disable=invalid-unary-operand-type
return points[~valid_max, COLS.R][0]
return points[valid_pts, COLS.R].mean()
return [_mean_radius(n) for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def trunk_section_lengths(morph, neurite_type=NeuriteType.all):
"""List of lengths of trunk sections of neurites in a morph."""
return [morphmath.section_length(n.root_node.points)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=())
def number_of_neurites(morph, neurite_type=NeuriteType.all):
"""Number of neurites in a morph."""
return sum(1 for _ in iter_neurites(morph, filt=is_type(neurite_type)))
@feature(shape=(...,))
def neurite_volume_density(morph, neurite_type=NeuriteType.all):
"""Get volume density per neurite."""
return [nf.volume_density(n)
for n in iter_neurites(morph, filt=is_type(neurite_type))]
@feature(shape=(...,))
def sholl_crossings(morph, neurite_type=NeuriteType.all, center=None, radii=None):
"""Calculate crossings of neurites.
Args:
morph(Morphology|list): morphology or a list of neurites
neurite_type(NeuriteType): Type of neurite to use. By default ``NeuriteType.all`` is used.
center(Point): center point, if None then soma center is taken
radii(iterable of floats): radii for which crossings will be counted,
if None then soma radius is taken
Returns:
Array of same length as radii, with a count of the number of crossings
for the respective radius
This function can also be used with a list of sections, as follow::
secs = (sec for sec in nm.iter_sections(morph) if complex_filter(sec))
sholl = nm.features.neuritefunc.sholl_crossings(secs,
center=morph.soma.center,
radii=np.arange(0, 1000, 100))
"""
def _count_crossings(neurite, radius):
"""Used to count_crossings of segments in neurite with radius."""
r2 = radius ** 2
count = 0
for start, end in iter_segments(neurite):
start_dist2, end_dist2 = (morphmath.point_dist2(center, start),
morphmath.point_dist2(center, end))
count += int(start_dist2 <= r2 <= end_dist2 or
end_dist2 <= r2 <= start_dist2)
return count
if center is None or radii is None:
assert isinstance(morph, Morphology) and morph.soma, \
'`sholl_crossings` input error. If `center` or `radii` is not set then `morph` is ' \
'expected to be an instance of Morphology and have a soma.'
if center is None:
center = morph.soma.center
if radii is None:
radii = [morph.soma.radius]
return [sum(_count_crossings(neurite, r)
for neurite in iter_neurites(morph, filt=is_type(neurite_type)))
for r in radii]
@feature(shape=(...,))
def sholl_frequency(morph, neurite_type=NeuriteType.all, step_size=10, bins=None):
"""Perform Sholl frequency calculations on a morph.
Args:
morph(Morphology): a morphology
neurite_type(NeuriteType): which neurites to operate on
step_size(float): step size between Sholl radii
bins(iterable of floats): custom binning to use for the Sholl radii. If None, it uses
intervals of step_size between min and max radii of ``morphologies``.
Note:
Given a morphology, the soma center is used for the concentric circles,
which range from the soma radii, and the maximum radial distance
in steps of `step_size`. Each segment of the morphology is tested, so a neurite that
bends back on itself, and crosses the same Sholl radius will get counted as
having crossed multiple times.
If a `neurite_type` is specified and there are no trees corresponding to it, an empty
list will be returned.
"""
neurite_filter = is_type(neurite_type)
if bins is None:
min_soma_edge = morph.soma.radius
max_radius_per_neurite = [
np.max(np.linalg.norm(n.points[:, COLS.XYZ] - morph.soma.center, axis=1))
for n in morph.neurites if neurite_filter(n)
]
if not max_radius_per_neurite:
return []
bins = np.arange(min_soma_edge, min_soma_edge + max(max_radius_per_neurite), step_size)
return sholl_crossings(morph, neurite_type, morph.soma.center, bins)
def _extent_along_axis(morph, axis, neurite_type):
"""Returns the total extent of the morpholog neurites.
The morphology is filtered by neurite type and the extent is calculated
along the coordinate axis direction (e.g. COLS.X).
"""
it_points = (
p
for n in iter_neurites(morph, filt=is_type(neurite_type))
for p in n.points[:, axis]
)
try:
return abs(np.ptp(np.fromiter(it_points, dtype=np.float32)))
except ValueError:
# a ValueError is thrown when there are no points passed to ptp
return 0.0
@feature(shape=())
def total_width(morph, neurite_type=NeuriteType.all):
"""Extent of morphology along axis x."""
return _extent_along_axis(morph, axis=COLS.X, neurite_type=neurite_type)
@feature(shape=())
def total_height(morph, neurite_type=NeuriteType.all):
"""Extent of morphology along axis y."""
return _extent_along_axis(morph, axis=COLS.Y, neurite_type=neurite_type)
@feature(shape=())
def total_depth(morph, neurite_type=NeuriteType.all):
"""Extent of morphology along axis z."""
return _extent_along_axis(morph, axis=COLS.Z, neurite_type=neurite_type)
|
BlueBrain/NeuroM
|
neurom/features/morphology.py
|
Python
|
bsd-3-clause
| 22,328
|
[
"NEURON"
] |
ea0c375a08c27951fca7da4e27f02a1e5d2261766aa176966ca57eab8b2088e8
|
from common import Modules, load_yara_rules, PEParseModule, ModuleMetadata
from pefile import PE, RESOURCE_TYPE
from string import printable
class CyberGate(PEParseModule):
first_value_table = None
precomputed_list = None
def __init__(self):
md = ModuleMetadata(
module_name="cybergate",
bot_name="CyberGate",
description="RAT",
authors=["Kevin Breen <kevin@techanarchy.net>", "Brian Wallace (@botnet_hunter)"],
version="1.0.0",
date="Aug 30, 2015",
references=[
"https://github.com/kevthehermit/YaraRules/blob/master/CyberGate.yar",
"https://github.com/kevthehermit/RATDecoders/blob/master/CyberGate.py"
]
)
PEParseModule.__init__(self, md)
self.yara_rules = None
self.prng_seed = 0
def _generate_yara_rules(self):
if self.yara_rules is None:
self.yara_rules = load_yara_rules("cybergate.yara")
return self.yara_rules
@staticmethod
def xor_decode(data):
key = 0xBC
encoded = bytearray(data)
for i in range(len(encoded)):
encoded[i] ^= key
return filter(lambda x: x in printable, str(encoded))
@staticmethod
def config_extract(raw_data):
try:
pe = PE(data=raw_data)
try:
rt_string_idx = [
entry.id for entry in
pe.DIRECTORY_ENTRY_RESOURCE.entries].index(RESOURCE_TYPE['RT_RCDATA'])
except ValueError:
return None
except AttributeError:
return None
rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx]
for entry in rt_string_directory.directory.entries:
if str(entry.name) == "XX-XX-XX-XX" or str(entry.name) == "CG-CG-CG-CG":
data_rva = entry.directory.entries[0].data.struct.OffsetToData
size = entry.directory.entries[0].data.struct.Size
data = pe.get_memory_mapped_image()[data_rva:data_rva + size]
config = data.split('####@####')
return config
except:
return None
@staticmethod
def run_config_extraction(data):
Config = {}
rawConfig = CyberGate.config_extract(data)
if rawConfig != None:
if len(rawConfig) > 20:
domains = ""
ports = ""
#Config sections 0 - 19 contain a list of Domains and Ports
for x in range(0,19):
if len(rawConfig[x]) > 1:
domains += CyberGate.xor_decode(rawConfig[x]).split(':')[0]
domains += "|"
ports += CyberGate.xor_decode(rawConfig[x]).split(':')[1]
ports += "|"
Config["Domain"] = domains
Config["Port"] = ports
Config["ServerID"] = CyberGate.xor_decode(rawConfig[20])
Config["Password"] = CyberGate.xor_decode(rawConfig[21])
Config["Install Flag"] = CyberGate.xor_decode(rawConfig[22])
Config["Install Directory"] = CyberGate.xor_decode(rawConfig[25])
Config["Install File Name"] = CyberGate.xor_decode(rawConfig[26])
Config["Active X Startup"] = CyberGate.xor_decode(rawConfig[27])
Config["REG Key HKLM"] = CyberGate.xor_decode(rawConfig[28])
Config["REG Key HKCU"] = CyberGate.xor_decode(rawConfig[29])
Config["Enable Message Box"] = CyberGate.xor_decode(rawConfig[30])
Config["Message Box Icon"] = CyberGate.xor_decode(rawConfig[31])
Config["Message Box Button"] = CyberGate.xor_decode(rawConfig[32])
Config["Install Message Title"] = CyberGate.xor_decode(rawConfig[33])
Config["Install Message Box"] = CyberGate.xor_decode(rawConfig[34]).replace('\r\n', ' ')
Config["Activate Keylogger"] = CyberGate.xor_decode(rawConfig[35])
Config["Keylogger Backspace = Delete"] = CyberGate.xor_decode(rawConfig[36])
Config["Keylogger Enable FTP"] = CyberGate.xor_decode(rawConfig[37])
Config["FTP Address"] = CyberGate.xor_decode(rawConfig[38])
Config["FTP Directory"] = CyberGate.xor_decode(rawConfig[39])
Config["FTP UserName"] = CyberGate.xor_decode(rawConfig[41])
Config["FTP Password"] = CyberGate.xor_decode(rawConfig[42])
Config["FTP Port"] = CyberGate.xor_decode(rawConfig[43])
Config["FTP Interval"] = CyberGate.xor_decode(rawConfig[44])
Config["Persistance"] = CyberGate.xor_decode(rawConfig[59])
Config["Hide File"] = CyberGate.xor_decode(rawConfig[60])
Config["Change Creation Date"] = CyberGate.xor_decode(rawConfig[61])
Config["Mutex"] = CyberGate.xor_decode(rawConfig[62])
Config["Melt File"] = CyberGate.xor_decode(rawConfig[63])
Config["CyberGate Version"] = CyberGate.xor_decode(rawConfig[67])
Config["Startup Policies"] = CyberGate.xor_decode(rawConfig[69])
Config["USB Spread"] = CyberGate.xor_decode(rawConfig[70])
Config["P2P Spread"] = CyberGate.xor_decode(rawConfig[71])
Config["Google Chrome Passwords"] = CyberGate.xor_decode(rawConfig[73])
Config["Process Injection"] = "Disabled"
if CyberGate.xor_decode(rawConfig[57]) == 0 or CyberGate.xor_decode(rawConfig[57]) == None:
Config["Process Injection"] = "Disabled"
elif CyberGate.xor_decode(rawConfig[57]) == 1:
Config["Process Injection"] = "Default Browser"
elif CyberGate.xor_decode(rawConfig[57]) == 2:
Config["Process Injection"] = CyberGate.xor_decode(rawConfig[58])
else:
return None
return Config
def get_bot_information(self, file_data):
results = CyberGate.run_config_extraction(file_data)
if results is None:
results = {}
elif "Domain" in results and "Port" in results:
domains = results["Domain"].split("|")
ports = results["Port"].split("|")
c2s = []
for i in xrange(len(domains)):
if len(domains[i].strip()) == 0 or len(ports[i].strip()) == 0:
continue
c2s.append({"c2_uri": "tcp://{0}:{1}/".format(domains[i], ports[i])})
results["c2s"] = c2s
pass
return results
Modules.list.append(CyberGate())
|
bwall/bamfdetect
|
BAMF_Detect/modules/cybergate.py
|
Python
|
mit
| 6,827
|
[
"Brian"
] |
dcea76e32f7bf2aef53019dc31a208446b04a3485ddb56e7de19dbf96f9a98b5
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
import unittest
import os
import json
import numpy as np
import warnings
import xml.etree.cElementTree as ET
from pymatgen.core.periodic_table import Element
from pymatgen.electronic_structure.core import OrbitalType
from pymatgen.io.vasp.inputs import Kpoints
from pymatgen.io.vasp.outputs import Chgcar, Locpot, Oszicar, Outcar, \
Vasprun, Procar, Xdatcar, Dynmat, BSVasprun, UnconvergedVASPWarning, \
Wavecar
from pymatgen import Spin, Orbital, Lattice, Structure
from pymatgen.entries.compatibility import MaterialsProjectCompatibility
from pymatgen.electronic_structure.core import Magmom
from pymatgen.util.testing import PymatgenTest
"""
Created on Jul 16, 2012
"""
__author__ = "Shyue Ping Ong, Stephen Dacek, Mark Turiansky"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Jul 16, 2012"
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
class VasprunTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.resetwarnings()
def test_multiple_dielectric(self):
v = Vasprun(os.path.join(test_dir, "vasprun.GW0.xml"))
self.assertEqual(len(v.other_dielectric), 3)
def test_charge_charge_dielectric(self):
"""
VASP 5.4.4 writes out two dielectric functions to vasprun.xml
These are the "density-density" and "velocity-velocity" linear response functions.
See the comments in `linear_optics.F` for details.
"""
v = Vasprun(os.path.join(test_dir, "vasprun.xml.dielectric_5.4.4"),
parse_potcar_file=False)
self.assertEqual(v.dielectric is not None, True)
self.assertEqual('density' in v.dielectric_data, True)
self.assertEqual('velocity' in v.dielectric_data, True)
def test_optical_absorption_coeff(self):
v = Vasprun(os.path.join(test_dir, "vasprun.BSE.xml.gz"))
absorption_coeff = v.optical_absorption_coeff
self.assertEqual(absorption_coeff[1], 24966408728.917931)
def test_vasprun_with_more_than_two_unlabelled_dielectric_functions(self):
with self.assertRaises(NotImplementedError):
Vasprun(os.path.join(test_dir, "vasprun.xml.dielectric_bad"),
parse_potcar_file=False)
def test_bad_vasprun(self):
self.assertRaises(ET.ParseError,
Vasprun, os.path.join(test_dir, "bad_vasprun.xml"))
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
v = Vasprun(os.path.join(test_dir, "bad_vasprun.xml"),
exception_on_bad_xml=False)
# Verify some things
self.assertEqual(len(v.ionic_steps), 1)
self.assertAlmostEqual(v.final_energy, -269.00551374)
self.assertTrue(issubclass(w[-1].category,
UserWarning))
def test_vdw(self):
v = Vasprun(os.path.join(test_dir, "vasprun.xml.vdw"))
self.assertAlmostEqual(v.final_energy, -9.78310677)
def test_properties(self):
filepath = os.path.join(test_dir, 'vasprun.xml.nonlm')
vasprun = Vasprun(filepath, parse_potcar_file=False)
orbs = list(vasprun.complete_dos.pdos[vasprun.final_structure[
0]].keys())
self.assertIn(OrbitalType.s, orbs)
filepath = os.path.join(test_dir, 'vasprun.xml')
vasprun = Vasprun(filepath, parse_potcar_file=False)
# Test NELM parsing.
self.assertEqual(vasprun.parameters["NELM"], 60)
# test pdos parsing
pdos0 = vasprun.complete_dos.pdos[vasprun.final_structure[0]]
self.assertAlmostEqual(pdos0[Orbital.s][Spin.up][16], 0.0026)
self.assertAlmostEqual(pdos0[Orbital.pz][Spin.down][16], 0.0012)
self.assertEqual(pdos0[Orbital.s][Spin.up].shape, (301,))
filepath2 = os.path.join(test_dir, 'lifepo4.xml')
vasprun_ggau = Vasprun(filepath2, parse_projected_eigen=True,
parse_potcar_file=False)
totalscsteps = sum([len(i['electronic_steps'])
for i in vasprun.ionic_steps])
self.assertEqual(29, len(vasprun.ionic_steps))
self.assertEqual(len(vasprun.structures), len(vasprun.ionic_steps))
self.assertEqual(vasprun.lattice,
vasprun.lattice_rec.reciprocal_lattice)
for i, step in enumerate(vasprun.ionic_steps):
self.assertEqual(vasprun.structures[i], step["structure"])
self.assertTrue(all([vasprun.structures[i] == vasprun.ionic_steps[i][
"structure"] for i in range(len(vasprun.ionic_steps))]))
self.assertEqual(308, totalscsteps,
"Incorrect number of energies read from vasprun.xml")
self.assertEqual(['Li'] + 4 * ['Fe'] + 4 * ['P'] + 16 * ["O"],
vasprun.atomic_symbols)
self.assertEqual(vasprun.final_structure.composition.reduced_formula,
"LiFe4(PO4)4")
self.assertIsNotNone(vasprun.incar, "Incar cannot be read")
self.assertIsNotNone(vasprun.kpoints, "Kpoints cannot be read")
self.assertIsNotNone(vasprun.eigenvalues, "Eigenvalues cannot be read")
self.assertAlmostEqual(vasprun.final_energy, -269.38319884, 7)
self.assertAlmostEqual(vasprun.tdos.get_gap(), 2.0589, 4)
expectedans = (2.539, 4.0906, 1.5516, False)
(gap, cbm, vbm, direct) = vasprun.eigenvalue_band_properties
self.assertAlmostEqual(gap, expectedans[0])
self.assertAlmostEqual(cbm, expectedans[1])
self.assertAlmostEqual(vbm, expectedans[2])
self.assertEqual(direct, expectedans[3])
self.assertFalse(vasprun.is_hubbard)
self.assertEqual(vasprun.potcar_symbols,
['PAW_PBE Li 17Jan2003', 'PAW_PBE Fe 06Sep2000',
'PAW_PBE Fe 06Sep2000', 'PAW_PBE P 17Jan2003',
'PAW_PBE O 08Apr2002'])
self.assertIsNotNone(vasprun.kpoints, "Kpoints cannot be read")
self.assertIsNotNone(vasprun.actual_kpoints,
"Actual kpoints cannot be read")
self.assertIsNotNone(vasprun.actual_kpoints_weights,
"Actual kpoints weights cannot be read")
for atomdoses in vasprun.pdos:
for orbitaldos in atomdoses:
self.assertIsNotNone(orbitaldos, "Partial Dos cannot be read")
# test skipping ionic steps.
vasprun_skip = Vasprun(filepath, 3, parse_potcar_file=False)
self.assertEqual(vasprun_skip.nionic_steps, 29)
self.assertEqual(len(vasprun_skip.ionic_steps),
int(vasprun.nionic_steps / 3) + 1)
self.assertEqual(len(vasprun_skip.ionic_steps),
len(vasprun_skip.structures))
self.assertEqual(len(vasprun_skip.ionic_steps),
int(vasprun.nionic_steps / 3) + 1)
# Check that nionic_steps is preserved no matter what.
self.assertEqual(vasprun_skip.nionic_steps,
vasprun.nionic_steps)
self.assertNotAlmostEqual(vasprun_skip.final_energy,
vasprun.final_energy)
# Test with ionic_step_offset
vasprun_offset = Vasprun(filepath, 3, 6, parse_potcar_file=False)
self.assertEqual(len(vasprun_offset.ionic_steps),
int(len(vasprun.ionic_steps) / 3) - 1)
self.assertEqual(vasprun_offset.structures[0],
vasprun_skip.structures[2])
self.assertTrue(vasprun_ggau.is_hubbard)
self.assertEqual(vasprun_ggau.hubbards["Fe"], 4.3)
self.assertAlmostEqual(vasprun_ggau.projected_eigenvalues[Spin.up][
0][0][96][0], 0.0032)
d = vasprun_ggau.as_dict()
self.assertEqual(d["elements"], ["Fe", "Li", "O", "P"])
self.assertEqual(d["nelements"], 4)
filepath = os.path.join(test_dir, 'vasprun.xml.unconverged')
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
vasprun_unconverged = Vasprun(filepath, parse_potcar_file=False)
# Verify some things
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category,
UnconvergedVASPWarning))
self.assertTrue(vasprun_unconverged.converged_ionic)
self.assertFalse(vasprun_unconverged.converged_electronic)
self.assertFalse(vasprun_unconverged.converged)
filepath = os.path.join(test_dir, 'vasprun.xml.dfpt')
vasprun_dfpt = Vasprun(filepath, parse_potcar_file=False)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static[0][0], 3.26105533)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static[0][1], -0.00459066)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static[2][2], 3.24330517)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static_wolfe[0][0],
3.33402531)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static_wolfe[0][1],
-0.00559998)
self.assertAlmostEqual(vasprun_dfpt.epsilon_static_wolfe[2][2],
3.31237357)
self.assertTrue(vasprun_dfpt.converged)
entry = vasprun_dfpt.get_computed_entry()
entry = MaterialsProjectCompatibility(
check_potcar_hash=False).process_entry(entry)
self.assertAlmostEqual(entry.uncorrected_energy + entry.correction,
entry.energy)
filepath = os.path.join(test_dir, 'vasprun.xml.dfpt.ionic')
vasprun_dfpt_ionic = Vasprun(filepath, parse_potcar_file=False)
self.assertAlmostEqual(vasprun_dfpt_ionic.epsilon_ionic[0][0],
515.73485838)
self.assertAlmostEqual(vasprun_dfpt_ionic.epsilon_ionic[0][1],
-0.00263523)
self.assertAlmostEqual(vasprun_dfpt_ionic.epsilon_ionic[2][2],
19.02110169)
filepath = os.path.join(test_dir, 'vasprun.xml.dfpt.unconverged')
vasprun_dfpt_unconv = Vasprun(filepath, parse_potcar_file=False)
self.assertFalse(vasprun_dfpt_unconv.converged_electronic)
self.assertTrue(vasprun_dfpt_unconv.converged_ionic)
self.assertFalse(vasprun_dfpt_unconv.converged)
vasprun_uniform = Vasprun(os.path.join(test_dir, "vasprun.xml.uniform"),
parse_potcar_file=False)
self.assertEqual(vasprun_uniform.kpoints.style,
Kpoints.supported_modes.Reciprocal)
vasprun_no_pdos = Vasprun(os.path.join(test_dir, "Li_no_projected.xml"),
parse_potcar_file=False)
self.assertIsNotNone(vasprun_no_pdos.complete_dos)
self.assertFalse(vasprun_no_pdos.dos_has_errors)
vasprun_diel = Vasprun(os.path.join(test_dir, "vasprun.xml.dielectric"),
parse_potcar_file=False)
self.assertAlmostEqual(0.4294, vasprun_diel.dielectric[0][10])
self.assertAlmostEqual(19.941, vasprun_diel.dielectric[1][51][0])
self.assertAlmostEqual(19.941, vasprun_diel.dielectric[1][51][1])
self.assertAlmostEqual(19.941, vasprun_diel.dielectric[1][51][2])
self.assertAlmostEqual(0.0, vasprun_diel.dielectric[1][51][3])
self.assertAlmostEqual(34.186, vasprun_diel.dielectric[2][85][0])
self.assertAlmostEqual(34.186, vasprun_diel.dielectric[2][85][1])
self.assertAlmostEqual(34.186, vasprun_diel.dielectric[2][85][2])
self.assertAlmostEqual(0.0, vasprun_diel.dielectric[2][85][3])
v = Vasprun(os.path.join(test_dir, "vasprun.xml.indirect.gz"))
(gap, cbm, vbm, direct) = v.eigenvalue_band_properties
self.assertFalse(direct)
vasprun_optical = Vasprun(
os.path.join(test_dir, "vasprun.xml.opticaltransitions"),
parse_potcar_file=False)
self.assertAlmostEqual(3.084, vasprun_optical.optical_transition[0][0])
self.assertAlmostEqual(3.087, vasprun_optical.optical_transition[3][0])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[0][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[1][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[7][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[19][1])
self.assertAlmostEqual(3.3799999999,
vasprun_optical.optical_transition[54][0])
self.assertAlmostEqual(3.381, vasprun_optical.optical_transition[55][0])
self.assertAlmostEqual(3.381, vasprun_optical.optical_transition[56][0])
self.assertAlmostEqual(10554.9860,
vasprun_optical.optical_transition[54][1])
self.assertAlmostEqual(0.0, vasprun_optical.optical_transition[55][1])
self.assertAlmostEqual(0.001, vasprun_optical.optical_transition[56][1])
def test_force_constants(self):
vasprun_fc = Vasprun(os.path.join(test_dir, "vasprun.xml.dfpt.phonon"),
parse_potcar_file=False)
fc_ans = [[-0.00184451, -0., -0.],
[-0., -0.00933824, -0.03021279],
[-0., -0.03021279, 0.01202547]]
nm_ans = [[0.0884346, -0.08837289, -0.24995639],
[-0.0884346, 0.08837289, 0.24995639],
[0.15306645, -0.05105771, -0.14441306],
[-0.15306645, 0.05105771, 0.14441306],
[-0.0884346, 0.08837289, 0.24995639],
[0.0884346, -0.08837289, -0.24995639],
[-0.15306645, 0.05105771, 0.14441306],
[0.15306645, -0.05105771, -0.14441306],
[-0.0884346, 0.08837289, 0.24995639],
[0.0884346, -0.08837289, -0.24995639],
[-0.15306645, 0.05105771, 0.14441306],
[0.15306645, -0.05105771, -0.14441306],
[0.0884346, -0.08837289, -0.24995639],
[-0.0884346, 0.08837289, 0.24995639],
[0.15306645, -0.05105771, -0.14441306],
[-0.15306645, 0.05105771, 0.14441306]]
nm_eigenval_ans = [-0.59067079, -0.59067079, -0.59067003, -0.59067003,
-0.59067003, -0.59067003, -0.585009, -0.585009,
-0.58500895, -0.58500883, -0.5062956, -0.5062956]
self.assertEqual(vasprun_fc.force_constants.shape, (16, 16, 3, 3))
self.assertTrue(np.allclose(vasprun_fc.force_constants[8, 9], fc_ans))
self.assertEqual(vasprun_fc.normalmode_eigenvals.size, 48)
self.assertTrue(np.allclose(vasprun_fc.normalmode_eigenvals[17:29],
nm_eigenval_ans))
self.assertEqual(vasprun_fc.normalmode_eigenvecs.shape, (48, 16, 3))
self.assertTrue(
np.allclose(vasprun_fc.normalmode_eigenvecs[33], nm_ans))
def test_Xe(self):
vr = Vasprun(os.path.join(test_dir, 'vasprun.xml.xe'),
parse_potcar_file=False)
self.assertEqual(vr.atomic_symbols, ['Xe'])
def test_invalid_element(self):
self.assertRaises(ValueError, Vasprun,
os.path.join(test_dir, 'vasprun.xml.wrong_sp'))
def test_selective_dynamics(self):
vsd = Vasprun(os.path.join(test_dir, 'vasprun.xml.indirect.gz'))
np.testing.assert_array_equal(
vsd.final_structure.site_properties.get('selective_dynamics'),
[[True] * 3, [False] * 3], "Selective dynamics parsing error")
def test_as_dict(self):
filepath = os.path.join(test_dir, 'vasprun.xml')
vasprun = Vasprun(filepath,
parse_potcar_file=False)
# Test that as_dict() is json-serializable
self.assertIsNotNone(json.dumps(vasprun.as_dict()))
self.assertEqual(
vasprun.as_dict()["input"]["potcar_type"],
['PAW_PBE', 'PAW_PBE', 'PAW_PBE', 'PAW_PBE', 'PAW_PBE'])
self.assertEqual(vasprun.as_dict()['input']['nkpoints'], 24)
def test_get_band_structure(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
filepath = os.path.join(test_dir, 'vasprun_Si_bands.xml')
vasprun = Vasprun(filepath,
parse_projected_eigen=True,
parse_potcar_file=False)
bs = vasprun.get_band_structure(kpoints_filename=os.path.join(test_dir,
'KPOINTS_Si_bands'))
cbm = bs.get_cbm()
vbm = bs.get_vbm()
self.assertEqual(cbm['kpoint_index'], [13],
"wrong cbm kpoint index")
self.assertAlmostEqual(cbm['energy'], 6.2301, "wrong cbm energy")
self.assertEqual(cbm['band_index'], {Spin.up: [4], Spin.down: [4]},
"wrong cbm bands")
self.assertEqual(vbm['kpoint_index'], [0, 63, 64])
self.assertAlmostEqual(vbm['energy'], 5.6158, "wrong vbm energy")
self.assertEqual(vbm['band_index'], {Spin.up: [1, 2, 3],
Spin.down: [1, 2, 3]},
"wrong vbm bands")
self.assertEqual(vbm['kpoint'].label, "\\Gamma", "wrong vbm label")
self.assertEqual(cbm['kpoint'].label, None, "wrong cbm label")
projected = bs.get_projection_on_elements()
self.assertAlmostEqual(projected[Spin.up][0][0]["Si"], 0.4238)
projected = bs.get_projections_on_elements_and_orbitals(
{"Si": ["s"]})
self.assertAlmostEqual(projected[Spin.up][0][0]["Si"]["s"], 0.4238)
def test_sc_step_overflow(self):
filepath = os.path.join(test_dir, 'vasprun.xml.sc_overflow')
# with warnings.catch_warnings(record=True) as w:
# warnings.simplefilter("always")
# vasprun = Vasprun(filepath)
# self.assertEqual(len(w), 3)
vasprun = Vasprun(filepath)
estep = vasprun.ionic_steps[0]['electronic_steps'][29]
self.assertTrue(np.isnan(estep['e_wo_entrp']))
def test_update_potcar(self):
filepath = os.path.join(test_dir, 'vasprun.xml')
potcar_path = os.path.join(test_dir, 'POTCAR.LiFePO4.gz')
potcar_path2 = os.path.join(test_dir, 'POTCAR2.LiFePO4.gz')
vasprun = Vasprun(filepath, parse_potcar_file=False)
self.assertEqual(vasprun.potcar_spec,
[{"titel": "PAW_PBE Li 17Jan2003", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE P 17Jan2003", "hash": None},
{"titel": "PAW_PBE O 08Apr2002", "hash": None}])
vasprun.update_potcar_spec(potcar_path)
self.assertEqual(vasprun.potcar_spec, [{"titel": "PAW_PBE Li 17Jan2003",
"hash": "65e83282d1707ec078c1012afbd05be8"},
{"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115"},
{"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115"},
{"titel": "PAW_PBE P 17Jan2003",
"hash": "7dc3393307131ae67785a0cdacb61d5f"},
{"titel": "PAW_PBE O 08Apr2002",
"hash": "7a25bc5b9a5393f46600a4939d357982"}])
vasprun2 = Vasprun(filepath, parse_potcar_file=False)
self.assertRaises(ValueError, vasprun2.update_potcar_spec, potcar_path2)
vasprun = Vasprun(filepath, parse_potcar_file=potcar_path)
self.assertEqual(vasprun.potcar_spec, [{"titel": "PAW_PBE Li 17Jan2003",
"hash": "65e83282d1707ec078c1012afbd05be8"},
{"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115"},
{"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115"},
{"titel": "PAW_PBE P 17Jan2003",
"hash": "7dc3393307131ae67785a0cdacb61d5f"},
{"titel": "PAW_PBE O 08Apr2002",
"hash": "7a25bc5b9a5393f46600a4939d357982"}])
self.assertRaises(ValueError, Vasprun, filepath,
parse_potcar_file=potcar_path2)
def test_search_for_potcar(self):
filepath = os.path.join(test_dir, 'vasprun.xml')
vasprun = Vasprun(filepath, parse_potcar_file=True)
self.assertEqual(vasprun.potcar_spec, [{"titel": "PAW_PBE Li 17Jan2003",
"hash": "65e83282d1707ec078c1012afbd05be8"},
{"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115"},
{"titel": "PAW_PBE Fe 06Sep2000",
"hash": "9530da8244e4dac17580869b4adab115"},
{"titel": "PAW_PBE P 17Jan2003",
"hash": "7dc3393307131ae67785a0cdacb61d5f"},
{"titel": "PAW_PBE O 08Apr2002",
"hash": "7a25bc5b9a5393f46600a4939d357982"}])
def test_potcar_not_found(self):
filepath = os.path.join(test_dir, 'vasprun.xml')
# Ensure no potcar is found and nothing is updated
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
vasprun = Vasprun(filepath, parse_potcar_file='.')
self.assertEqual(len(w), 2)
self.assertEqual(vasprun.potcar_spec, [{"titel": "PAW_PBE Li 17Jan2003", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE Fe 06Sep2000", "hash": None},
{"titel": "PAW_PBE P 17Jan2003", "hash": None},
{"titel": "PAW_PBE O 08Apr2002", "hash": None}])
def test_parsing_chemical_shift_calculations(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
filepath = os.path.join(test_dir, "nmr", "cs", "basic",
'vasprun.xml.chemical_shift.scstep')
vasprun = Vasprun(filepath)
nestep = len(vasprun.ionic_steps[-1]['electronic_steps'])
self.assertEqual(nestep, 10)
self.assertTrue(vasprun.converged)
def test_parsing_efg_calcs(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
filepath = os.path.join(test_dir, "nmr", "efg", "AlPO4",
'vasprun.xml')
vasprun = Vasprun(filepath)
nestep = len(vasprun.ionic_steps[-1]['electronic_steps'])
self.assertEqual(nestep, 18)
self.assertTrue(vasprun.converged)
def test_charged_structure(self):
vpath = os.path.join(test_dir, 'vasprun.charged.xml')
potcar_path = os.path.join(test_dir, 'POT_GGA_PAW_PBE', 'POTCAR.Si.gz')
vasprun = Vasprun(vpath, parse_potcar_file=False)
vasprun.update_charge_from_potcar(potcar_path)
self.assertEqual(vasprun.parameters.get("NELECT", 8), 9)
self.assertEqual(vasprun.structures[0].charge, 1)
class OutcarTest(PymatgenTest):
def test_init(self):
for f in ['OUTCAR', 'OUTCAR.gz']:
filepath = os.path.join(test_dir, f)
outcar = Outcar(filepath)
expected_mag = ({'d': 0.0, 'p': 0.003, 's': 0.002, 'tot': 0.005},
{'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813},
{'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813},
{'d': 0.0, 'p': -0.117, 's': 0.005, 'tot': -0.112},
{'d': 0.0, 'p': -0.165, 's': 0.004, 'tot': -0.162},
{'d': 0.0, 'p': -0.117, 's': 0.005, 'tot': -0.112},
{'d': 0.0, 'p': -0.165, 's': 0.004, 'tot': -0.162})
expected_chg = ({'p': 0.154, 's': 0.078, 'd': 0.0, 'tot': 0.232},
{'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486},
{'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486},
{'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964},
{'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947},
{'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964},
{'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947})
self.assertAlmostEqual(outcar.magnetization, expected_mag, 5,
"Wrong magnetization read from Outcar")
self.assertAlmostEqual(outcar.charge, expected_chg, 5,
"Wrong charge read from Outcar")
self.assertFalse(outcar.is_stopped)
self.assertEqual(outcar.run_stats, {'System time (sec)': 0.938,
'Total CPU time used (sec)': 545.142,
'Elapsed time (sec)': 546.709,
'Maximum memory used (kb)': 0.0,
'Average memory used (kb)': 0.0,
'User time (sec)': 544.204,
'cores': '8'})
self.assertAlmostEqual(outcar.efermi, 2.0112)
self.assertAlmostEqual(outcar.nelect, 44.9999991)
self.assertAlmostEqual(outcar.total_mag, 0.9999998)
self.assertIsNotNone(outcar.as_dict())
self.assertFalse(outcar.lepsilon)
filepath = os.path.join(test_dir, 'OUTCAR.stopped')
outcar = Outcar(filepath)
self.assertTrue(outcar.is_stopped)
for f in ['OUTCAR.lepsilon', 'OUTCAR.lepsilon.gz']:
filepath = os.path.join(test_dir, f)
outcar = Outcar(filepath)
self.assertTrue(outcar.lepsilon)
self.assertAlmostEqual(outcar.dielectric_tensor[0][0], 3.716432)
self.assertAlmostEqual(outcar.dielectric_tensor[0][1], -0.20464)
self.assertAlmostEqual(outcar.dielectric_tensor[1][2], -0.20464)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][0],
0.001419)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][2],
0.001419)
self.assertAlmostEqual(outcar.dielectric_ionic_tensor[2][2],
0.001419)
self.assertAlmostEqual(outcar.piezo_tensor[0][0], 0.52799)
self.assertAlmostEqual(outcar.piezo_tensor[1][3], 0.35998)
self.assertAlmostEqual(outcar.piezo_tensor[2][5], 0.35997)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[0][0], 0.05868)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[1][3], 0.06241)
self.assertAlmostEqual(outcar.piezo_ionic_tensor[2][5], 0.06242)
self.assertAlmostEqual(outcar.born[0][1][2], -0.385)
self.assertAlmostEqual(outcar.born[1][2][0], 0.36465)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][0][0], -572.5437,places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][1][0], 683.2985,places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[0][1][3], 73.07059,places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][0][0], 570.98927,places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][1][0], -683.68519,places=4)
self.assertAlmostEqual(outcar.internal_strain_tensor[1][2][2], 570.98927,places=4)
filepath = os.path.join(test_dir, 'OUTCAR.NiO_SOC.gz')
outcar = Outcar(filepath)
expected_mag = (
{'s': Magmom([0.0, 0.0, -0.001]), 'p': Magmom([0.0, 0.0, -0.003]),
'd': Magmom([0.0, 0.0, 1.674]), 'tot': Magmom([0.0, 0.0, 1.671])},
{'s': Magmom([0.0, 0.0, 0.001]), 'p': Magmom([0.0, 0.0, 0.003]),
'd': Magmom([0.0, 0.0, -1.674]),
'tot': Magmom([0.0, 0.0, -1.671])},
{'s': Magmom([0.0, 0.0, 0.0]), 'p': Magmom([0.0, 0.0, 0.0]),
'd': Magmom([0.0, 0.0, 0.0]), 'tot': Magmom([0.0, 0.0, 0.0])},
{'s': Magmom([0.0, 0.0, 0.0]), 'p': Magmom([0.0, 0.0, 0.0]),
'd': Magmom([0.0, 0.0, 0.0]), 'tot': Magmom([0.0, 0.0, 0.0])}
)
# test note: Magmom class uses np.allclose() when testing for equality
# so fine to use assertEqual here
self.assertEqual(outcar.magnetization, expected_mag,
"Wrong vector magnetization read from Outcar for SOC calculation")
def test_polarization(self):
filepath = os.path.join(test_dir, "OUTCAR.BaTiO3.polar")
outcar = Outcar(filepath)
self.assertEqual(outcar.spin, True)
self.assertEqual(outcar.noncollinear, False)
self.assertAlmostEqual(outcar.p_ion[0], 0.0)
self.assertAlmostEqual(outcar.p_ion[1], 0.0)
self.assertAlmostEqual(outcar.p_ion[2], -5.56684)
self.assertAlmostEqual(outcar.p_sp1[0], 2.00068)
self.assertAlmostEqual(outcar.p_sp2[0], -2.00044)
self.assertAlmostEqual(outcar.p_elec[0], 0.00024)
self.assertAlmostEqual(outcar.p_elec[1], 0.00019)
self.assertAlmostEqual(outcar.p_elec[2], 3.61674)
def test_pseudo_zval(self):
filepath = os.path.join(test_dir, "OUTCAR.BaTiO3.polar")
outcar = Outcar(filepath)
self.assertDictEqual({'Ba': 10.00, 'Ti': 10.00, 'O': 6.00},
outcar.zval_dict)
def test_dielectric(self):
filepath = os.path.join(test_dir, "OUTCAR.dielectric")
outcar = Outcar(filepath)
outcar.read_corrections()
self.assertAlmostEqual(outcar.data["dipol_quadrupol_correction"],
0.03565)
self.assertAlmostEqual(outcar.final_energy, -797.46760559)
def test_freq_dielectric(self):
filepath = os.path.join(test_dir, "OUTCAR.LOPTICS")
outcar = Outcar(filepath)
outcar.read_freq_dielectric()
self.assertAlmostEqual(outcar.frequencies[0], 0)
self.assertAlmostEqual(outcar.frequencies[-1], 39.826101)
self.assertAlmostEqual(outcar.dielectric_tensor_function[0][0, 0],
8.96938800)
self.assertAlmostEqual(outcar.dielectric_tensor_function[-1][0, 0],
7.36167000e-01 + 1.53800000e-03j)
self.assertEqual(len(outcar.frequencies),
len(outcar.dielectric_tensor_function))
np.testing.assert_array_equal(outcar.dielectric_tensor_function[0],
outcar.dielectric_tensor_function[
0].transpose())
def test_freq_dielectric_vasp544(self):
filepath = os.path.join(test_dir, "OUTCAR.LOPTICS.vasp544")
outcar = Outcar(filepath)
outcar.read_freq_dielectric()
self.assertAlmostEqual(outcar.frequencies[0], 0)
self.assertAlmostEqual(outcar.frequencies[-1], 39.63964)
self.assertAlmostEqual(outcar.dielectric_tensor_function[0][0, 0],
12.769435 + 0j)
self.assertAlmostEqual(outcar.dielectric_tensor_function[-1][0, 0],
0.828615 + 0.016594j)
self.assertEqual(len(outcar.frequencies),
len(outcar.dielectric_tensor_function))
np.testing.assert_array_equal(outcar.dielectric_tensor_function[0],
outcar.dielectric_tensor_function[
0].transpose())
def test_read_elastic_tensor(self):
filepath = os.path.join(test_dir, "OUTCAR.total_tensor.Li2O.gz")
outcar = Outcar(filepath)
outcar.read_elastic_tensor()
self.assertAlmostEqual(outcar.data["elastic_tensor"][0][0], 1986.3391)
self.assertAlmostEqual(outcar.data["elastic_tensor"][0][1], 187.8324)
self.assertAlmostEqual(outcar.data["elastic_tensor"][3][3], 586.3034)
def test_read_piezo_tensor(self):
filepath = os.path.join(test_dir, "OUTCAR.lepsilon.gz")
outcar = Outcar(filepath)
outcar.read_piezo_tensor()
self.assertAlmostEqual(outcar.data["piezo_tensor"][0][0], 0.52799)
self.assertAlmostEqual(outcar.data["piezo_tensor"][1][3], 0.35998)
self.assertAlmostEqual(outcar.data["piezo_tensor"][2][5], 0.35997)
def test_core_state_eigen(self):
filepath = os.path.join(test_dir, "OUTCAR.CL")
cl = Outcar(filepath).read_core_state_eigen()
self.assertAlmostEqual(cl[6]["2s"][-1], -174.4779)
filepath = os.path.join(test_dir, "OUTCAR.icorelevel")
cl = Outcar(filepath).read_core_state_eigen()
self.assertAlmostEqual(cl[4]["3d"][-1], -31.4522)
def test_avg_core_poten(self):
filepath = os.path.join(test_dir, "OUTCAR.lepsilon")
cp = Outcar(filepath).read_avg_core_poten()
self.assertAlmostEqual(cp[-1][1], -90.0487)
filepath = os.path.join(test_dir, "OUTCAR")
cp = Outcar(filepath).read_avg_core_poten()
self.assertAlmostEqual(cp[0][6], -73.1068)
def test_single_atom(self):
filepath = os.path.join(test_dir, "OUTCAR.Al")
outcar = Outcar(filepath)
expected_mag = ({u'p': 0.0, u's': 0.0, u'd': 0.0, u'tot': 0.0},)
expected_chg = ({u'p': 0.343, u's': 0.425, u'd': 0.0, u'tot': 0.768},)
self.assertAlmostEqual(outcar.magnetization, expected_mag)
self.assertAlmostEqual(outcar.charge, expected_chg)
self.assertFalse(outcar.is_stopped)
self.assertEqual(outcar.run_stats, {'System time (sec)': 0.592,
'Total CPU time used (sec)': 50.194,
'Elapsed time (sec)': 52.337,
'Maximum memory used (kb)': 62900.0,
'Average memory used (kb)': 0.0,
'User time (sec)': 49.602,
'cores': '32'})
self.assertAlmostEqual(outcar.efermi, 8.0942)
self.assertAlmostEqual(outcar.nelect, 3)
self.assertAlmostEqual(outcar.total_mag, 8.2e-06)
self.assertIsNotNone(outcar.as_dict())
def test_chemical_shielding(self):
filename = os.path.join(test_dir, "nmr", "cs", "core.diff",
"hydromagnesite", "OUTCAR")
outcar = Outcar(filename)
expected_chemical_shielding = [[191.9974, 69.5232, 0.6342],
[195.0808, 68.183, 0.833],
[192.0389, 69.5762, 0.6329],
[195.0844, 68.1756, 0.8336],
[192.005, 69.5289, 0.6339],
[195.0913, 68.1859, 0.833],
[192.0237, 69.565, 0.6333],
[195.0788, 68.1733, 0.8337]]
self.assertAlmostEqual(
len(outcar.data["chemical_shielding"]["valence_only"][20: 28]),
len(expected_chemical_shielding))
self.assertArrayAlmostEqual(outcar.data["chemical_shielding"]["valence_and_core"][20:28],
expected_chemical_shielding, decimal=5)
def test_chemical_shielding_with_different_core_contribution(self):
filename = os.path.join(test_dir, "nmr", "cs", "core.diff",
"core.diff.chemical.shifts.OUTCAR")
outcar = Outcar(filename)
c_vo = outcar.data["chemical_shielding"]["valence_only"][7]
for x1, x2 in zip(list(c_vo),
[198.7009, 73.7484, 1.0000]):
self.assertAlmostEqual(x1, x2)
c_vc = outcar.data["chemical_shielding"]["valence_and_core"][7]
for x1, x2 in zip(list(c_vc),
[-1.9406, 73.7484, 1.0000]):
self.assertAlmostEqual(x1, x2)
def test_cs_raw_tensors(self):
filename = os.path.join(test_dir, "nmr", "cs", "core.diff",
"core.diff.chemical.shifts.OUTCAR")
outcar = Outcar(filename)
unsym_tensors = outcar.data["unsym_cs_tensor"]
self.assertEqual(unsym_tensors[0],
[[-145.814605, -4.263425, 0.000301],
[4.263434, -145.812238, -8.7e-05],
[0.000136, -0.000189, -142.794068]])
self.assertEqual(unsym_tensors[29],
[[287.789318, -53.799325, 30.900024],
[-53.799571, 225.668117, -17.839598],
[3.801103, -2.195218, 88.896756]])
def test_cs_g0_contribution(self):
filename = os.path.join(test_dir, "nmr", "cs", "core.diff",
"core.diff.chemical.shifts.OUTCAR")
outcar = Outcar(filename)
g0_contrib = outcar.data["cs_g0_contribution"]
self.assertEqual(g0_contrib,
[[-8.773535, 9e-06, 1e-06],
[1.7e-05, -8.773536, -0.0792],
[-6e-06, -0.008328, -9.320237]])
def test_cs_core_contribution(self):
filename = os.path.join(test_dir, "nmr", "cs", "core.diff",
"core.diff.chemical.shifts.OUTCAR")
outcar = Outcar(filename)
core_contrib = outcar.data["cs_core_contribution"]
self.assertEqual(core_contrib,
{'Mg': -412.8248405,
'C': -200.5098812,
'O': -271.0766979})
def test_nmr_efg(self):
filename = os.path.join(test_dir, "nmr", "efg", "AlPO4", "OUTCAR")
outcar = Outcar(filename)
expected_efg = [
{'eta': 0.465, 'nuclear_quadrupole_moment': 146.6, 'cq': -5.573},
{'eta': 0.465, 'nuclear_quadrupole_moment': 146.6, 'cq': -5.573},
{'eta': 0.137, 'nuclear_quadrupole_moment': 146.6, 'cq': 6.327},
{'eta': 0.137, 'nuclear_quadrupole_moment': 146.6, 'cq': 6.327},
{'eta': 0.112, 'nuclear_quadrupole_moment': 146.6, 'cq': -7.453},
{'eta': 0.112, 'nuclear_quadrupole_moment': 146.6, 'cq': -7.453},
{'eta': 0.42, 'nuclear_quadrupole_moment': 146.6, 'cq': -5.58},
{'eta': 0.42, 'nuclear_quadrupole_moment': 146.6, 'cq': -5.58}]
self.assertEqual(len(outcar.data["efg"][2:10]), len(expected_efg))
for e1, e2 in zip(outcar.data["efg"][2:10], expected_efg):
for k in e1.keys():
self.assertAlmostEqual(e1[k], e2[k], places=5)
exepected_tensors = [[[11.11, 1.371, 2.652], [1.371, 3.635, -3.572], [2.652, -3.572, -14.746]],
[[11.11, -1.371, 2.652], [-1.371, 3.635, 3.572], [2.652, 3.572, -14.746]],
[[-3.098, 6.511, 7.732], [6.511, 1.419, 11.445], [7.732, 11.445, 1.678]],
[[-3.098, -6.511, 7.732], [-6.511, 1.419, -11.445], [7.732, -11.445, 1.678]],
[[2.344, -10.775, -7.006], [-10.775, -7.152, -11.309], [-7.006, -11.309, 4.808]],
[[2.344, 10.775, -7.006], [10.775, -7.152, 11.309], [-7.006, 11.309, 4.808]],
[[2.404, -0.588, -6.83], [-0.588, 10.435, 3.159], [-6.83, 3.159, -12.839]],
[[2.404, 0.588, -6.83], [0.588, 10.435, -3.159], [-6.83, -3.159, -12.839]]]
self.assertEqual(len(outcar.data["unsym_efg_tensor"][2:10]), len(exepected_tensors))
for e1, e2 in zip(outcar.data["unsym_efg_tensor"][2:10], exepected_tensors):
self.assertArrayAlmostEqual(e1, e2)
def test_read_fermi_contact_shift(self):
filepath = os.path.join(test_dir, "OUTCAR_fc")
outcar = Outcar(filepath)
outcar.read_fermi_contact_shift()
self.assertAlmostEqual(outcar.data["fermi_contact_shift"][u'fch'][0][0],
-0.002)
self.assertAlmostEqual(outcar.data["fermi_contact_shift"][u'th'][0][0],
-0.052)
self.assertAlmostEqual(outcar.data["fermi_contact_shift"][u'dh'][0][0],
0.0)
def test_drift(self):
outcar = Outcar(os.path.join(test_dir, "OUTCAR"))
self.assertEqual(len(outcar.drift), 5)
self.assertAlmostEqual(np.sum(outcar.drift), 0)
outcar = Outcar(os.path.join(test_dir, "OUTCAR.CL"))
self.assertEqual(len(outcar.drift), 79)
self.assertAlmostEqual(np.sum(outcar.drift), 0.448010)
def test_electrostatic_potential(self):
outcar = Outcar(os.path.join(test_dir, "OUTCAR"))
self.assertEqual(outcar.ngf, [54, 30, 54])
self.assertTrue(
np.allclose(outcar.sampling_radii, [0.9748, 0.9791, 0.7215]))
self.assertTrue(np.allclose(outcar.electrostatic_potential,
[-26.0704, -45.5046, -45.5046, -72.9539,
-73.0621, -72.9539, -73.0621]))
class BSVasprunTest(unittest.TestCase):
def test_get_band_structure(self):
filepath = os.path.join(test_dir, 'vasprun_Si_bands.xml')
vasprun = BSVasprun(filepath, parse_potcar_file=False)
bs = vasprun.get_band_structure(kpoints_filename=os.path.join(test_dir,
'KPOINTS_Si_bands'))
cbm = bs.get_cbm()
vbm = bs.get_vbm()
self.assertEqual(cbm['kpoint_index'], [13], "wrong cbm kpoint index")
self.assertAlmostEqual(cbm['energy'], 6.2301, "wrong cbm energy")
self.assertEqual(cbm['band_index'], {Spin.up: [4], Spin.down: [4]},
"wrong cbm bands")
self.assertEqual(vbm['kpoint_index'], [0, 63, 64])
self.assertAlmostEqual(vbm['energy'], 5.6158, "wrong vbm energy")
self.assertEqual(vbm['band_index'], {Spin.up: [1, 2, 3],
Spin.down: [1, 2, 3]},
"wrong vbm bands")
self.assertEqual(vbm['kpoint'].label, "\\Gamma", "wrong vbm label")
self.assertEqual(cbm['kpoint'].label, None, "wrong cbm label")
d = vasprun.as_dict()
self.assertIn("eigenvalues", d["output"])
class OszicarTest(unittest.TestCase):
def test_init(self):
filepath = os.path.join(test_dir, 'OSZICAR')
oszicar = Oszicar(filepath)
self.assertEqual(len(oszicar.electronic_steps),
len(oszicar.ionic_steps))
self.assertEqual(len(oszicar.all_energies), 60)
self.assertAlmostEqual(oszicar.final_energy, -526.63928)
class LocpotTest(unittest.TestCase):
def test_init(self):
filepath = os.path.join(test_dir, 'LOCPOT')
locpot = Locpot.from_file(filepath)
self.assertAlmostEqual(-217.05226954,
sum(locpot.get_average_along_axis(0)))
self.assertAlmostEqual(locpot.get_axis_grid(0)[-1], 2.87629, 2)
self.assertAlmostEqual(locpot.get_axis_grid(1)[-1], 2.87629, 2)
self.assertAlmostEqual(locpot.get_axis_grid(2)[-1], 2.87629, 2)
class ChgcarTest(PymatgenTest):
def test_init(self):
filepath = os.path.join(test_dir, 'CHGCAR.nospin')
chg = Chgcar.from_file(filepath)
self.assertAlmostEqual(chg.get_integrated_diff(0, 2)[0, 1], 0)
filepath = os.path.join(test_dir, 'CHGCAR.spin')
chg = Chgcar.from_file(filepath)
self.assertAlmostEqual(chg.get_integrated_diff(0, 1)[0, 1],
-0.0043896932237534022)
# test sum
chg += chg
self.assertAlmostEqual(chg.get_integrated_diff(0, 1)[0, 1],
-0.0043896932237534022 * 2)
filepath = os.path.join(test_dir, 'CHGCAR.Fe3O4')
chg = Chgcar.from_file(filepath)
ans = [1.56472768, 3.25985108, 3.49205728, 3.66275028, 3.8045896,
5.10813352]
myans = chg.get_integrated_diff(0, 3, 6)
self.assertTrue(np.allclose(myans[:, 1], ans))
def test_write(self):
filepath = os.path.join(test_dir, 'CHGCAR.spin')
chg = Chgcar.from_file(filepath)
chg.write_file("CHGCAR_pmg")
with open("CHGCAR_pmg") as f:
for i, line in enumerate(f):
if i == 22130:
self.assertEqual("augmentation occupancies 1 15\n", line)
if i == 44255:
self.assertEqual("augmentation occupancies 1 15\n", line)
os.remove("CHGCAR_pmg")
def test_soc_chgcar(self):
filepath = os.path.join(test_dir, "CHGCAR.NiO_SOC.gz")
chg = Chgcar.from_file(filepath)
self.assertEqual(set(chg.data.keys()),
{'total', 'diff_x', 'diff_y', 'diff_z', 'diff'})
self.assertTrue(chg.is_soc)
self.assertEqual(chg.data['diff'].shape, chg.data['diff_y'].shape)
# check our construction of chg.data['diff'] makes sense
# this has been checked visually too and seems reasonable
self.assertEqual(abs(chg.data['diff'][0][0][0]),
np.linalg.norm([chg.data['diff_x'][0][0][0],
chg.data['diff_y'][0][0][0],
chg.data['diff_z'][0][0][0]]))
# and that the net magnetization is about zero
# note: we get ~ 0.08 here, seems a little high compared to
# vasp output, but might be due to chgcar limitations?
self.assertAlmostEqual(chg.net_magnetization, 0.0, places=0)
chg.write_file("CHGCAR_pmg_soc")
chg_from_file = Chgcar.from_file("CHGCAR_pmg_soc")
self.assertTrue(chg_from_file.is_soc)
os.remove("CHGCAR_pmg_soc")
def test_hdf5(self):
chgcar = Chgcar.from_file(os.path.join(test_dir, "CHGCAR.NiO_SOC.gz"))
chgcar.to_hdf5("chgcar_test.hdf5")
import h5py
with h5py.File("chgcar_test.hdf5", "r") as f:
self.assertArrayAlmostEqual(np.array(f["vdata"]["total"]),
chgcar.data["total"])
self.assertArrayAlmostEqual(np.array(f["vdata"]["diff"]),
chgcar.data["diff"])
self.assertArrayAlmostEqual(np.array(f["lattice"]),
chgcar.structure.lattice.matrix)
self.assertArrayAlmostEqual(np.array(f["fcoords"]),
chgcar.structure.frac_coords)
for z in f["Z"]:
self.assertIn(z, [Element.Ni.Z, Element.O.Z])
for sp in f["species"]:
self.assertIn(sp, ["Ni", "O"])
chgcar2 = Chgcar.from_hdf5("chgcar_test.hdf5")
self.assertArrayAlmostEqual(chgcar2.data["total"],
chgcar.data["total"])
os.remove("chgcar_test.hdf5")
class ProcarTest(unittest.TestCase):
def test_init(self):
filepath = os.path.join(test_dir, 'PROCAR.simple')
p = Procar(filepath)
self.assertAlmostEqual(p.get_occupation(0, 'd')[Spin.up], 0)
self.assertAlmostEqual(p.get_occupation(0, 's')[Spin.up],
0.35381249999999997)
self.assertAlmostEqual(p.get_occupation(0, 'p')[Spin.up], 1.19540625)
self.assertRaises(ValueError, p.get_occupation, 1, 'm')
self.assertEqual(p.nbands, 10)
self.assertEqual(p.nkpoints, 10)
self.assertEqual(p.nions, 3)
lat = Lattice.cubic(3.)
s = Structure(lat, ["Li", "Na", "K"], [[0., 0., 0.],
[0.25, 0.25, 0.25],
[0.75, 0.75, 0.75]])
d = p.get_projection_on_elements(s)
self.assertAlmostEqual(d[Spin.up][2][2],
{'Na': 0.042, 'K': 0.646, 'Li': 0.042})
filepath = os.path.join(test_dir, 'PROCAR')
p = Procar(filepath)
self.assertAlmostEqual(p.get_occupation(0, 'dxy')[Spin.up],
0.96214813853000025)
self.assertAlmostEqual(p.get_occupation(0, 'dxy')[Spin.down],
0.85796295426000124)
def test_phase_factors(self):
filepath = os.path.join(test_dir, 'PROCAR.phase')
p = Procar(filepath)
self.assertAlmostEqual(p.phase_factors[Spin.up][0, 0, 0, 0],
-0.746 + 0.099j)
self.assertAlmostEqual(p.phase_factors[Spin.down][0, 0, 0, 0],
0.372 - 0.654j)
# Two Li should have same phase factor.
self.assertAlmostEqual(p.phase_factors[Spin.up][0, 0, 0, 0],
p.phase_factors[Spin.up][0, 0, 1, 0])
self.assertAlmostEqual(p.phase_factors[Spin.up][0, 0, 2, 0],
-0.053 + 0.007j)
self.assertAlmostEqual(p.phase_factors[Spin.down][0, 0, 2, 0],
0.027 - 0.047j)
class XdatcarTest(unittest.TestCase):
def test_init(self):
filepath = os.path.join(test_dir, 'XDATCAR_4')
x = Xdatcar(filepath)
structures = x.structures
self.assertEqual(len(structures), 4)
for s in structures:
self.assertEqual(s.formula, "Li2 O1")
filepath = os.path.join(test_dir, 'XDATCAR_5')
x = Xdatcar(filepath)
structures = x.structures
self.assertEqual(len(structures), 4)
for s in structures:
self.assertEqual(s.formula, "Li2 O1")
x.concatenate(os.path.join(test_dir, 'XDATCAR_4'))
self.assertEqual(len(x.structures), 8)
self.assertIsNotNone(x.get_string())
class DynmatTest(unittest.TestCase):
def test_init(self):
# nosetests pymatgen/io/vasp/tests/test_outputs.py:DynmatTest.test_init
filepath = os.path.join(test_dir, 'DYNMAT')
d = Dynmat(filepath)
self.assertEqual(d.nspecs, 2)
self.assertEqual(d.natoms, 6)
self.assertEqual(d.ndisps, 3)
self.assertTrue(np.allclose(d.masses, [63.546, 196.966]))
self.assertTrue(4 in d.data)
self.assertTrue(2 in d.data[4])
self.assertTrue(np.allclose(
d.data[4][2]['dispvec'], [0., 0.05, 0.]
))
self.assertTrue(np.allclose(
d.data[4][2]['dynmat'][3], [0.055046, -0.298080, 0.]
))
# TODO: test get_phonon_frequencies once cross-checked
class WavecarTest(unittest.TestCase):
def setUp(self):
self.w = Wavecar(os.path.join(test_dir, 'WAVECAR.N2'))
self.a = np.array([[10.0, 0.0, 0.0], [0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
self.vol = np.dot(self.a[0, :], np.cross(self.a[1, :], self.a[2, :]))
self.b = np.array([np.cross(self.a[1, :], self.a[2, :]),
np.cross(self.a[2, :], self.a[0, :]),
np.cross(self.a[0, :], self.a[1, :])])
self.b = 2 * np.pi * self.b / self.vol
def test_init(self):
self.assertEqual(self.w.filename, os.path.join(test_dir, 'WAVECAR.N2'))
self.assertAlmostEqual(self.w.efermi, -5.7232, places=4)
self.assertEqual(self.w.encut, 25)
self.assertEqual(self.w.nb, 9)
self.assertEqual(self.w.nk, 1)
self.assertTrue(np.allclose(self.w.a, self.a))
self.assertTrue(np.allclose(self.w.b, self.b))
self.assertAlmostEqual(self.w.vol, self.vol)
self.assertEqual(len(self.w.kpoints), self.w.nk)
self.assertEqual(len(self.w.coeffs), self.w.nk)
self.assertEqual(len(self.w.coeffs[0]), self.w.nb)
self.assertEqual(len(self.w.band_energy), self.w.nk)
self.assertEqual(self.w.band_energy[0].shape, (self.w.nb, 3))
self.assertLessEqual(len(self.w.Gpoints[0]), 257)
for k in range(self.w.nk):
for b in range(self.w.nb):
self.assertEqual(len(self.w.coeffs[k][b]),
len(self.w.Gpoints[k]))
with self.assertRaises(ValueError):
Wavecar(os.path.join(test_dir, 'WAVECAR.N2.malformed'))
import sys
from io import StringIO
saved_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
Wavecar(os.path.join(test_dir, 'WAVECAR.N2'), verbose=True)
self.assertNotEqual(out.getvalue().strip(), '')
finally:
sys.stdout = saved_stdout
self.w = Wavecar(os.path.join(test_dir, 'WAVECAR.N2.45210'))
self.assertEqual(self.w.filename, os.path.join(test_dir,
'WAVECAR.N2.45210'))
self.assertAlmostEqual(self.w.efermi, -5.7232, places=4)
self.assertEqual(self.w.encut, 25)
self.assertEqual(self.w.nb, 9)
self.assertEqual(self.w.nk, 1)
self.assertTrue(np.allclose(self.w.a, self.a))
self.assertTrue(np.allclose(self.w.b, self.b))
self.assertAlmostEqual(self.w.vol, self.vol)
self.assertEqual(len(self.w.kpoints), self.w.nk)
self.assertEqual(len(self.w.coeffs), self.w.nk)
self.assertEqual(len(self.w.coeffs[0]), self.w.nb)
self.assertEqual(len(self.w.band_energy), self.w.nk)
self.assertEqual(self.w.band_energy[0].shape, (self.w.nb, 3))
self.assertLessEqual(len(self.w.Gpoints[0]), 257)
with self.assertRaises(ValueError):
Wavecar(os.path.join(test_dir, 'WAVECAR.N2.spin'))
temp_ggp = Wavecar._generate_G_points
try:
Wavecar._generate_G_points = lambda x, y: []
with self.assertRaises(ValueError):
Wavecar(os.path.join(test_dir, 'WAVECAR.N2'))
finally:
Wavecar._generate_G_points = temp_ggp
def test__generate_nbmax(self):
self.w._generate_nbmax()
self.assertEqual(self.w._nbmax.tolist(), [5, 5, 5])
def test__generate_G_points(self):
for k in range(self.w.nk):
kp = self.w.kpoints[k]
self.assertLessEqual(len(self.w._generate_G_points(kp)), 257)
def test_evaluate_wavefunc(self):
self.w.Gpoints.append(np.array([0, 0, 0]))
self.w.kpoints.append(np.array([0, 0, 0]))
self.w.coeffs.append([[1 + 1j]])
self.assertAlmostEqual(self.w.evaluate_wavefunc(-1, -1, [0, 0, 0]),
(1 + 1j) / np.sqrt(self.vol), places=4)
self.assertAlmostEqual(self.w.evaluate_wavefunc(0, 0, [0, 0, 0]),
np.sum(self.w.coeffs[0][0]) / np.sqrt(self.vol),
places=4)
def test_fft_mesh(self):
mesh = self.w.fft_mesh(0, 5)
ind = np.argmax(np.abs(mesh))
self.assertEqual(np.unravel_index(ind, mesh.shape), (14, 1, 1))
self.assertEqual(mesh[tuple((self.w.ng / 2).astype(np.int))], 0j)
mesh = self.w.fft_mesh(0, 5, shift=False)
ind = np.argmax(np.abs(mesh))
self.assertEqual(np.unravel_index(ind, mesh.shape), (6, 8, 8))
self.assertEqual(mesh[0, 0, 0], 0j)
if __name__ == "__main__":
unittest.main()
|
nisse3000/pymatgen
|
pymatgen/io/vasp/tests/test_outputs.py
|
Python
|
mit
| 57,196
|
[
"VASP",
"pymatgen"
] |
aa393d21ab010551783e40c7a15d7f3f4ddc6683c2c8571890a98fcc1195dcaf
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Utilities for generating nicer plots.
"""
import math
import numpy as np
from pymatgen.core.periodic_table import Element
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Mar 13, 2012"
def pretty_plot(width=8, height=None, plt=None, dpi=None,
color_cycle=("qualitative", "Set1_9")):
"""
Provides a publication quality plot, with nice defaults for font sizes etc.
Args:
width (float): Width of plot in inches. Defaults to 8in.
height (float): Height of plot in inches. Defaults to width * golden
ratio.
plt (matplotlib.pyplot): If plt is supplied, changes will be made to an
existing plot. Otherwise, a new plot will be created.
dpi (int): Sets dot per inch for figure. Defaults to 300.
color_cycle (tuple): Set the color cycle for new plots to one of the
color sets in palettable. Defaults to a qualitative Set1_9.
Returns:
Matplotlib plot object with properly sized fonts.
"""
ticksize = int(width * 2.5)
golden_ratio = (math.sqrt(5) - 1) / 2
if not height:
height = int(width * golden_ratio)
if plt is None:
import matplotlib.pyplot as plt
import importlib
mod = importlib.import_module("palettable.colorbrewer.%s" %
color_cycle[0])
colors = getattr(mod, color_cycle[1]).mpl_colors
from cycler import cycler
plt.figure(figsize=(width, height), facecolor="w", dpi=dpi)
ax = plt.gca()
ax.set_prop_cycle(cycler('color', colors))
else:
fig = plt.gcf()
fig.set_size_inches(width, height)
plt.xticks(fontsize=ticksize)
plt.yticks(fontsize=ticksize)
ax = plt.gca()
ax.set_title(ax.get_title(), size=width * 4)
labelsize = int(width * 3)
ax.set_xlabel(ax.get_xlabel(), size=labelsize)
ax.set_ylabel(ax.get_ylabel(), size=labelsize)
return plt
def pretty_plot_two_axis(x, y1, y2, xlabel=None, y1label=None, y2label=None,
width=8, height=None, dpi=300, **plot_kwargs):
"""
Variant of pretty_plot that does a dual axis plot. Adapted from matplotlib
examples. Makes it easier to create plots with different axes.
Args:
x (np.ndarray/list): Data for x-axis.
y1 (dict/np.ndarray/list): Data for y1 axis (left). If a dict, it will
be interpreted as a {label: sequence}.
y2 (dict/np.ndarray/list): Data for y2 axis (right). If a dict, it will
be interpreted as a {label: sequence}.
xlabel (str): If not None, this will be the label for the x-axis.
y1label (str): If not None, this will be the label for the y1-axis.
y2label (str): If not None, this will be the label for the y2-axis.
width (float): Width of plot in inches. Defaults to 8in.
height (float): Height of plot in inches. Defaults to width * golden
ratio.
dpi (int): Sets dot per inch for figure. Defaults to 300.
plot_kwargs: Passthrough kwargs to matplotlib's plot method. E.g.,
linewidth, etc.
Returns:
matplotlib.pyplot
"""
import palettable.colorbrewer.diverging
colors = palettable.colorbrewer.diverging.RdYlBu_4.mpl_colors
c1 = colors[0]
c2 = colors[-1]
golden_ratio = (math.sqrt(5) - 1) / 2
if not height:
height = int(width * golden_ratio)
import matplotlib.pyplot as plt
width = 12
labelsize = int(width * 3)
ticksize = int(width * 2.5)
styles = ["-", "--", "-.", "."]
fig, ax1 = plt.subplots()
fig.set_size_inches((width, height))
if dpi:
fig.set_dpi(dpi)
if isinstance(y1, dict):
for i, (k, v) in enumerate(y1.items()):
ax1.plot(x, v, c=c1, marker='s', ls=styles[i % len(styles)],
label=k, **plot_kwargs)
ax1.legend(fontsize=labelsize)
else:
ax1.plot(x, y1, c=c1, marker='s', ls='-', **plot_kwargs)
if xlabel:
ax1.set_xlabel(xlabel, fontsize=labelsize)
if y1label:
# Make the y-axis label, ticks and tick labels match the line color.
ax1.set_ylabel(y1label, color=c1, fontsize=labelsize)
ax1.tick_params('x', labelsize=ticksize)
ax1.tick_params('y', colors=c1, labelsize=ticksize)
ax2 = ax1.twinx()
if isinstance(y2, dict):
for i, (k, v) in enumerate(y2.items()):
ax2.plot(x, v, c=c2, marker='o', ls=styles[i % len(styles)],
label=k)
ax2.legend(fontsize=labelsize)
else:
ax2.plot(x, y2, c=c2, marker='o', ls='-')
if y2label:
# Make the y-axis label, ticks and tick labels match the line color.
ax2.set_ylabel(y2label, color=c2, fontsize=labelsize)
ax2.tick_params('y', colors=c2, labelsize=ticksize)
return plt
def pretty_polyfit_plot(x, y, deg=1, xlabel=None, ylabel=None, **kwargs):
r"""
Convenience method to plot data with trend lines based on polynomial fit.
Args:
x: Sequence of x data.
y: Sequence of y data.
deg (int): Degree of polynomial. Defaults to 1.
xlabel (str): Label for x-axis.
ylabel (str): Label for y-axis.
\\*\\*kwargs: Keyword args passed to pretty_plot.
Returns:
matplotlib.pyplot object.
"""
plt = pretty_plot(**kwargs)
pp = np.polyfit(x, y, deg)
xp = np.linspace(min(x), max(x), 200)
plt.plot(xp, np.polyval(pp, xp), 'k--', x, y, 'o')
if xlabel:
plt.xlabel(xlabel)
if ylabel:
plt.ylabel(ylabel)
return plt
def periodic_table_heatmap(elemental_data, cbar_label="", cbar_label_size=14,
show_plot=False, cmap="YlOrRd", cmap_range=None, blank_color="grey",
value_format=None, max_row=9):
"""
A static method that generates a heat map overlayed on a periodic table.
Args:
elemental_data (dict): A dictionary with the element as a key and a
value assigned to it, e.g. surface energy and frequency, etc.
Elements missing in the elemental_data will be grey by default
in the final table elemental_data={"Fe": 4.2, "O": 5.0}.
cbar_label (string): Label of the colorbar. Default is "".
cbar_label_size (float): Font size for the colorbar label. Default is 14.
cmap_range (tuple): Minimum and maximum value of the colormap scale.
If None, the colormap will autotmatically scale to the range of the
data.
show_plot (bool): Whether to show the heatmap. Default is False.
value_format (str): Formatting string to show values. If None, no value
is shown. Example: "%.4f" shows float to four decimals.
cmap (string): Color scheme of the heatmap. Default is 'YlOrRd'.
Refer to the matplotlib documentation for other options.
blank_color (string): Color assigned for the missing elements in
elemental_data. Default is "grey".
max_row (integer): Maximum number of rows of the periodic table to be
shown. Default is 9, which means the periodic table heat map covers
the first 9 rows of elements.
"""
# Convert primitive_elemental data in the form of numpy array for plotting.
if cmap_range is not None:
max_val = cmap_range[1]
min_val = cmap_range[0]
else:
max_val = max(elemental_data.values())
min_val = min(elemental_data.values())
max_row = min(max_row, 9)
if max_row <= 0:
raise ValueError("The input argument 'max_row' must be positive!")
value_table = np.empty((max_row, 18)) * np.nan
blank_value = min_val - 0.01
for el in Element:
if el.row > max_row:
continue
value = elemental_data.get(el.symbol, blank_value)
value_table[el.row - 1, el.group - 1] = value
# Initialize the plt object
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
plt.gcf().set_size_inches(12, 8)
# We set nan type values to masked values (ie blank spaces)
data_mask = np.ma.masked_invalid(value_table.tolist())
heatmap = ax.pcolor(data_mask, cmap=cmap, edgecolors='w', linewidths=1,
vmin=min_val - 0.001, vmax=max_val + 0.001)
cbar = fig.colorbar(heatmap)
# Grey out missing elements in input data
cbar.cmap.set_under(blank_color)
# Set the colorbar label and tick marks
cbar.set_label(cbar_label, rotation=270, labelpad=25, size=cbar_label_size)
cbar.ax.tick_params(labelsize=cbar_label_size)
# Refine and make the table look nice
ax.axis('off')
ax.invert_yaxis()
# Label each block with corresponding element and value
for i, row in enumerate(value_table):
for j, el in enumerate(row):
if not np.isnan(el):
symbol = Element.from_row_and_group(i + 1, j + 1).symbol
plt.text(j + 0.5, i + 0.25, symbol,
horizontalalignment='center',
verticalalignment='center', fontsize=14)
if el != blank_value and value_format is not None:
plt.text(j + 0.5, i + 0.5, value_format % el,
horizontalalignment='center',
verticalalignment='center', fontsize=10)
plt.tight_layout()
if show_plot:
plt.show()
return plt
def format_formula(formula):
"""
Converts str of chemical formula into
latex format for labelling purposes
Args:
formula (str): Chemical formula
"""
formatted_formula = ""
number_format = ""
for i, s in enumerate(formula):
if s.isdigit():
if not number_format:
number_format = "_{"
number_format += s
if i == len(formula) - 1:
number_format += "}"
formatted_formula += number_format
else:
if number_format:
number_format += "}"
formatted_formula += number_format
number_format = ""
formatted_formula += s
return r"$%s$" % (formatted_formula)
def van_arkel_triangle(list_of_materials, annotate=True):
"""
A static method that generates a binary van Arkel-Ketelaar triangle to
quantify the ionic, metallic and covalent character of a compound
by plotting the electronegativity difference (y) vs average (x).
See:
A.E. van Arkel, Molecules and Crystals in Inorganic Chemistry,
Interscience, New York (1956)
and
J.A.A Ketelaar, Chemical Constitution (2nd edn.), An Introduction
to the Theory of the Chemical Bond, Elsevier, New York (1958)
Args:
list_of_materials (list): A list of computed entries of binary
materials or a list of lists containing two elements (str).
annotate (bool): Whether or not to lable the points on the
triangle with reduced formula (if list of entries) or pair
of elements (if list of list of str).
"""
# F-Fr has the largest X difference. We set this
# as our top corner of the triangle (most ionic)
pt1 = np.array([(Element("F").X + Element("Fr").X) / 2,
abs(Element("F").X - Element("Fr").X)])
# Cs-Fr has the lowest average X. We set this as our
# bottom left corner of the triangle (most metallic)
pt2 = np.array([(Element("Cs").X + Element("Fr").X) / 2,
abs(Element("Cs").X - Element("Fr").X)])
# O-F has the highest average X. We set this as our
# bottom right corner of the triangle (most covalent)
pt3 = np.array([(Element("O").X + Element("F").X) / 2,
abs(Element("O").X - Element("F").X)])
# get the parameters for the lines of the triangle
d = np.array(pt1) - np.array(pt2)
slope1 = d[1] / d[0]
b1 = pt1[1] - slope1 * pt1[0]
d = pt3 - pt1
slope2 = d[1] / d[0]
b2 = pt3[1] - slope2 * pt3[0]
# Initialize the plt object
import matplotlib.pyplot as plt
# set labels and appropriate limits for plot
plt.xlim(pt2[0] - 0.45, -b2 / slope2 + 0.45)
plt.ylim(-0.45, pt1[1] + 0.45)
plt.annotate("Ionic", xy=[pt1[0] - 0.3, pt1[1] + 0.05], fontsize=20)
plt.annotate("Covalent", xy=[-b2 / slope2 - 0.65, -0.4], fontsize=20)
plt.annotate("Metallic", xy=[pt2[0] - 0.4, -0.4], fontsize=20)
plt.xlabel(r"$\frac{\chi_{A}+\chi_{B}}{2}$", fontsize=25)
plt.ylabel(r"$|\chi_{A}-\chi_{B}|$", fontsize=25)
# Set the lines of the triangle
chi_list = [el.X for el in Element]
plt.plot([min(chi_list), pt1[0]], [slope1 * min(chi_list) + b1, pt1[1]], 'k-', linewidth=3)
plt.plot([pt1[0], -b2 / slope2], [pt1[1], 0], 'k-', linewidth=3)
plt.plot([min(chi_list), -b2 / slope2], [0, 0], 'k-', linewidth=3)
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
# Shade with appropriate colors corresponding to ionic, metallci and covalent
ax = plt.gca()
# ionic filling
ax.fill_between([min(chi_list), pt1[0]],
[slope1 * min(chi_list) + b1, pt1[1]], facecolor=[1, 1, 0],
zorder=-5, edgecolor=[1, 1, 0])
ax.fill_between([pt1[0], -b2 / slope2],
[pt1[1], slope2 * min(chi_list) - b1], facecolor=[1, 1, 0],
zorder=-5, edgecolor=[1, 1, 0])
# metal filling
XPt = Element("Pt").X
ax.fill_between([min(chi_list), (XPt + min(chi_list)) / 2],
[0, slope1 * (XPt + min(chi_list)) / 2 + b1],
facecolor=[1, 0, 0], zorder=-3, alpha=0.8)
ax.fill_between([(XPt + min(chi_list)) / 2, XPt],
[slope1 * ((XPt + min(chi_list)) / 2) + b1, 0],
facecolor=[1, 0, 0], zorder=-3, alpha=0.8)
# covalent filling
ax.fill_between([(XPt + min(chi_list)) / 2, ((XPt + min(chi_list)) / 2 + -b2 / slope2) / 2],
[0, slope2 * (((XPt + min(chi_list)) / 2 + -b2 / slope2) / 2) + b2],
facecolor=[0, 1, 0], zorder=-4, alpha=0.8)
ax.fill_between([((XPt + min(chi_list)) / 2 + -b2 / slope2) / 2, -b2 / slope2],
[slope2 * (((XPt + min(chi_list)) / 2 + -b2 / slope2) / 2) + b2, 0],
facecolor=[0, 1, 0], zorder=-4, alpha=0.8)
# Label the triangle with datapoints
for entry in list_of_materials:
if type(entry).__name__ not in ['ComputedEntry', 'ComputedStructureEntry']:
X_pair = [Element(el).X for el in entry]
formatted_formula = "%s-%s" % tuple(entry)
else:
X_pair = [Element(el).X for el in entry.composition.as_dict().keys()]
formatted_formula = format_formula(entry.composition.reduced_formula)
plt.scatter(np.mean(X_pair), abs(X_pair[0] - X_pair[1]), c='b', s=100)
if annotate:
plt.annotate(formatted_formula, fontsize=15,
xy=[np.mean(X_pair) + 0.005, abs(X_pair[0] - X_pair[1])])
plt.tight_layout()
return plt
def get_ax_fig_plt(ax=None, **kwargs):
"""
Helper function used in plot functions supporting an optional Axes argument.
If ax is None, we build the `matplotlib` figure and create the Axes else
we return the current active figure.
Args:
kwargs: keyword arguments are passed to plt.figure if ax is not None.
Returns:
ax: :class:`Axes` object
figure: matplotlib figure
plt: matplotlib pyplot module.
"""
import matplotlib.pyplot as plt
if ax is None:
fig = plt.figure(**kwargs)
ax = fig.add_subplot(1, 1, 1)
else:
fig = plt.gcf()
return ax, fig, plt
def get_ax3d_fig_plt(ax=None, **kwargs):
"""
Helper function used in plot functions supporting an optional Axes3D
argument. If ax is None, we build the `matplotlib` figure and create the
Axes3D else we return the current active figure.
Args:
kwargs: keyword arguments are passed to plt.figure if ax is not None.
Returns:
ax: :class:`Axes` object
figure: matplotlib figure
plt: matplotlib pyplot module.
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
if ax is None:
fig = plt.figure(**kwargs)
ax = axes3d.Axes3D(fig)
else:
fig = plt.gcf()
return ax, fig, plt
def get_axarray_fig_plt(ax_array, nrows=1, ncols=1, sharex=False, sharey=False,
squeeze=True, subplot_kw=None, gridspec_kw=None,
**fig_kw):
"""
Helper function used in plot functions that accept an optional array of Axes
as argument. If ax_array is None, we build the `matplotlib` figure and
create the array of Axes by calling plt.subplots else we return the
current active figure.
Returns:
ax: Array of :class:`Axes` objects
figure: matplotlib figure
plt: matplotlib pyplot module.
"""
import matplotlib.pyplot as plt
if ax_array is None:
fig, ax_array = plt.subplots(nrows=nrows, ncols=ncols, sharex=sharex,
sharey=sharey, squeeze=squeeze,
subplot_kw=subplot_kw,
gridspec_kw=gridspec_kw, **fig_kw)
else:
fig = plt.gcf()
ax_array = np.reshape(np.array(ax_array), (nrows, ncols))
if squeeze:
if ax_array.size == 1:
ax_array = ax_array[0]
elif any(s == 1 for s in ax_array.shape):
ax_array = ax_array.ravel()
return ax_array, fig, plt
def add_fig_kwargs(func):
"""
Decorator that adds keyword arguments for functions returning matplotlib
figures.
The function should return either a matplotlib figure or None to signal
some sort of error/unexpected event.
See doc string below for the list of supported options.
"""
from functools import wraps
@wraps(func)
def wrapper(*args, **kwargs):
# pop the kwds used by the decorator.
title = kwargs.pop("title", None)
size_kwargs = kwargs.pop("size_kwargs", None)
show = kwargs.pop("show", True)
savefig = kwargs.pop("savefig", None)
tight_layout = kwargs.pop("tight_layout", False)
ax_grid = kwargs.pop("ax_grid", None)
ax_annotate = kwargs.pop("ax_annotate", None)
fig_close = kwargs.pop("fig_close", False)
# Call func and return immediately if None is returned.
fig = func(*args, **kwargs)
if fig is None:
return fig
# Operate on matplotlib figure.
if title is not None:
fig.suptitle(title)
if size_kwargs is not None:
fig.set_size_inches(size_kwargs.pop("w"), size_kwargs.pop("h"),
**size_kwargs)
if ax_grid is not None:
for ax in fig.axes:
ax.grid(bool(ax_grid))
if ax_annotate:
from string import ascii_letters
tags = ascii_letters
if len(fig.axes) > len(tags):
tags = (1 + len(ascii_letters) // len(fig.axes)) * ascii_letters
for ax, tag in zip(fig.axes, tags):
ax.annotate("(%s)" % tag, xy=(0.05, 0.95), xycoords="axes fraction")
if tight_layout:
try:
fig.tight_layout()
except Exception as exc:
# For some unknown reason, this problem shows up only on travis.
# https://stackoverflow.com/questions/22708888/valueerror-when-using-matplotlib-tight-layout
print("Ignoring Exception raised by fig.tight_layout\n", str(exc))
if savefig:
fig.savefig(savefig)
import matplotlib.pyplot as plt
if show:
plt.show()
if fig_close:
plt.close(fig=fig)
return fig
# Add docstring to the decorated method.
s = "\n\n" + """\
Keyword arguments controlling the display of the figure:
================ ====================================================
kwargs Meaning
================ ====================================================
title Title of the plot (Default: None).
show True to show the figure (default: True).
savefig "abc.png" or "abc.eps" to save the figure to a file.
size_kwargs Dictionary with options passed to fig.set_size_inches
e.g. size_kwargs=dict(w=3, h=4)
tight_layout True to call fig.tight_layout (default: False)
ax_grid True (False) to add (remove) grid from all axes in fig.
Default: None i.e. fig is left unchanged.
ax_annotate Add labels to subplots e.g. (a), (b).
Default: False
fig_close Close figure. Default: False.
================ ====================================================
"""
if wrapper.__doc__ is not None:
# Add s at the end of the docstring.
wrapper.__doc__ += "\n" + s
else:
# Use s
wrapper.__doc__ = s
return wrapper
|
gVallverdu/pymatgen
|
pymatgen/util/plotting.py
|
Python
|
mit
| 21,622
|
[
"pymatgen"
] |
6af3af57b2322a6578dbe71fce76581d16e0be11dd4bc397eb5c1de43dae458d
|
#
# Copyright (C) 2013,2014 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Tests particle property setters/getters
import unittest as ut
import espressomd
import numpy as np
from espressomd.magnetostatics import *
class MagnetostaticsInteractionsTests(ut.TestCase):
# Handle to espresso system
system = espressomd.System()
def paramsMatch(self, inParams, outParams):
"""Check, if the parameters set and gotten back match.
Only check keys present in inParams.
"""
# Delete the bjerrum_length from outparams, because
# it is calculated from the prefactor in some cases
# and therefore not necessarily in inparams.
if "bjerrum_length" in outParams:
del outParams["bjerrum"]
for k in inParams.keys():
if k not in outParams:
print(k, "missing from returned parameters")
return False
if outParams[k] != inParams[k]:
print("Mismatch in parameter ", k, inParams[k], outParams[k])
return False
return True
def setUp(self):
self.system.box_l = 10, 10, 10
self.system.part[0].pos = 0, 0, 0
self.system.part[1].pos = 0.1, 0.1, 0.1
self.system.part[0].dip = 1.3, 2.1, -6
self.system.part[1].dip = 4.3, 4.1, 7.5
def generateTestForMagnetostaticInteraction(_interClass, _params):
"""Generates test cases for checking interaction parameters set and gotten back
from Es actually match. Only keys which are present in _params are checked
1st: Interaction parameters as dictionary, i.e., {"k"=1.,"r_0"=0.
2nd: Name of the interaction property to set (i.e. "P3M")
"""
params = _params
interClass = _interClass
def func(self):
# This code is run at the execution of the generated function.
# It will use the state of the variables in the outer function,
# which was there, when the outer function was called
# set Parameter
Inter = interClass(**params)
Inter.validate_params()
Inter._set_params_in_es_core()
# Read them out again
outParams = Inter.get_params()
self.assertTrue(self.paramsMatch(params, outParams), "Missmatch of parameters.\nParameters set " +
params.__str__() + " vs. output parameters " + outParams.__str__())
return func
test_DP3M = generateTestForMagnetostaticInteraction(DipolarP3M, dict(prefactor=1.0,
epsilon=0.0,
inter=1000,
mesh_off=[
0.5, 0.5, 0.5],
r_cut=2.4,
mesh=[
8, 8, 8],
cao=1,
alpha=12,
accuracy=0.01))
test_DdsCpu = generateTestForMagnetostaticInteraction(
DipolarDirectSumCpu, dict(prefactor=3.4))
test_DdsRCpu = generateTestForMagnetostaticInteraction(
DipolarDirectSumWithReplicaCpu, dict(prefactor=3.4, n_replica=2))
if __name__ == "__main__":
print("Features: ", espressomd.features())
ut.main()
|
rbardak/espresso
|
testsuite/python/magnetostaticInteractions.py
|
Python
|
gpl-3.0
| 4,400
|
[
"ESPResSo"
] |
2b5857c56117ecffc2e93f671c40ea40fcc2a91921a8f27e5f5b8d48b94bb958
|
# Copyright 2014 Open Connectome Project (http://openconnecto.me)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
Created on Mar 12, 2012
@author: dsussman
'''
import pyximport;
pyximport.install()
import numpy as np
from scipy import sparse as sp
import mrcap.roi as roi
import mrcap.fibergraph as fibergraph
import zindex
from scipy.io import loadmat, savemat
from collections import Counter
#from mayavi import mlab # DM - commented out
import itertools as itt
# from matplotlib import pyplot as plt # DM - commented out
import mrcap.fa as fa
#import mprage # DM - commented out
import argparse
import os
class ConnectedComponent(object):
vertexCC = None
ccsize = None
ncc = 0
n = 0
def __init__(self,G=None, fn=None):
if G is not None:
self.get_from_fiber_graph(G)
elif fn is not None:
self.load_from_file(fn)
def get_from_fiber_graph(self,G):
self.ncc,vertexCC = sp.cs_graph_components(G+G.transpose())
self.n = vertexCC.shape[0]
noniso = np.nonzero(np.not_equal(vertexCC,-2))[0]
cccounter = Counter(vertexCC[noniso])
cc_badLabel,_ = zip(*cccounter.most_common())
cc_dict = dict(zip(cc_badLabel, np.arange(self.ncc)+1))
cc_dict[-2] = 0
self.vertexCC = np.array([cc_dict[v] for v in vertexCC])
self.ccsize = Counter(vertexCC)
def save(self,fn, suffix=True):
if suffix:
np.save(fn+'_concomp.npy',sp.lil_matrix(self.vertexCC))
else:
np.save(fn,sp.lil_matrix(self.vertexCC))
def load_from_file(self,fn):
self.vertexCC = np.load(fn).item().toarray()
self.n = self.vertexCC.shape[1]
self.vertexCC = self.vertexCC.reshape(self.n)
def induced_subgraph(self, G, cc=1):
incc = np.equal(self.vertexCC,cc).nonzero()[0]
return G[:,incc][incc,:]
def __getitem__(self,key):
if type(key) is int:
return self.get_cc(key)
elif type(key) is tuple:
return self.get_coord_cc(key)
def get_cc(self,v):
return self.vertexCC[v]
def get_coord_cc(self,xyz):
return self.get_cc(zindex.XYZMorton(xyz))
def get_3d_cc(self,shape):
"""Takes a shape which is the shape of the new 3d image and 'colors' the image by connected component
Input
=====
shape -- 3-tuple
Output
======
cc3d -- array of with shape=shape. colored so that ccz[x,y,z]=vcc[i] where x,y,z is the XYZ coordinates for Morton index i
"""
cc3d = np.NaN*np.zeros(shape)
allCoord = itt.product(*[xrange(sz) for sz in shape])
[cc3d.itemset((xyz), self.vertexCC[zindex.XYZMorton(xyz)])
for xyz in allCoord if not self.vertexCC[zindex.XYZMorton(xyz)]==0];
return cc3d
def get_coords_for_lccs(self, ncc):
"""Computes coordinates for each voxel in the top ncc connected components"""
inlcc = (np.less_equal(self.vertexCC,ncc)*np.greater(self.vertexCC,0)).nonzero()[0]
coord = np.array([zindex.MortonXYZ(v) for v in inlcc])
return np.concatenate((coord,self.vertexCC[inlcc][np.newaxis].T),axis=1)
def _load_fibergraph(roi_fn, mat_fn):
"""Load fibergraph from roi_fn and mat_fn"""
roix = roi.ROIXML(roi_fn+'.xml')
rois = roi.ROIData(roi_fn+'.raw', roix.getShape())
fg = fibergraph.FiberGraph(roix.getShape(),rois,[])
fg.loadFromMatlab('fibergraph', mat_fn)
return fg
def cc_for_each_brain(graphDir, roiDir, ccDir, figDir):
"""Go through the directory graphDir and find the connected components
Saves the all connected component info in ccDir and saves some 3d-pics into figDir
If figDir is None then it does not save
"""
fiberSfx = '_fiber.mat'
roiSfx = '_roi'
brainFiles = [fn.split('_')[0] for fn in os.listdir(graphDir)]
for brainFn in brainFiles:
print "Processing brain "+brainFn
fg = _load_fibergraph(roiDir+brainFn+roiSfx,graphDir+brainFn+fiberSfx)
print 'Processing connected components'
vcc = ConnectedComponent(fg.graph)
vcc.save(ccDir+brainFn)
print 'ncc='+repr(vcc.ncc)
if figDir:
save_figures(vcc.get_coords_for_lccs(10), figDir+brainFn)
del fg
'''
Created on June 29, 2012
@author: dmhembe1
Determine lcc on a single big graph a provided my a remote user
This is for use in the one-click processing pipeline to be found at http://www.openconnecto.me/STUB
'''
def process_single_brain(graph_fn, lccOutputFileName):
print "Computint LCC for single brain... "
vcc = ConnectedComponent(loadmat(graph_fn)['fibergraph'])
if not os.path.exists(os.path.dirname(lccOutputFileName)):
print "Creating lcc directory %s" % os.path.dirname(lccOutputFileName)
os.makedirs(os.path.dirname(lccOutputFileName))
lcc = sp.lil_matrix(vcc.vertexCC)
np.save(lccOutputFileName, lcc) # save as .npy
return lcc
def get_slice(img3d, s, xyz):
if xyz=='xy':
return img3d[:,:,s]
if xyz=='xz':
return img3d[:,s,::-1].T
if xyz=='yz':
return img3d[s,::-1,::-1].T
print 'Not a valid view'
def show_overlay(img3d, cc3d, ncc=10, s=85, xyz = 'xy',alpha=.8):
"""Shows the connected components overlayed over img3d
Input
======
img3d -- 3d array
cc3d -- 3d array ( preferably of same shape as img3d, use get_3d_cc(...) )
ncc -- where to cut off the color scale
s -- slice to show
xyz -- which projection to use in {'xy','xz','yz'}
"""
cc = get_slice(cc3d,s,xyz)
img = get_slice(img3d,s,xyz)
notcc = np.isnan(cc)
incc = np.not_equal(notcc,True)
img4 = plt.cm.gray(img/np.nanmax(img))
if ncc is not np.Inf:
cc = plt.cm.jet(cc/float(ncc))
else:
cc = plt.cm.jet(np.log(cc)/np.log(np.nanmax(cc)))
cc[notcc,:]=img4[notcc,:]
cc[incc,3] = 1-img[incc]/(2*np.nanmax(img))
plt.imshow(cc)
#if ncc is not np.Inf:
# plt.imshow(cc,cmap=plt.cm.jet,clim=(1,ncc))
#else:
# plt.imshow(np.log(cc),cmap=plt.cm.jet)
#plt.imshow(img,alpha=alpha,cmap=plt.cm.gray)
def save_fa_overlay(faDir, ccDir, figDir, slist, orientationList):
brainFiles = [fn.split('_')[0] for fn in os.listdir(ccDir)]
f = plt.figure();
for bfn in brainFiles:
vcc = ConnectedComponent(fn=ccDir+bfn+'_concomp.npy')
fax = fa.FAXML(faDir+bfn+'_fa.xml')
fas = fa.FAData(faDir+bfn+'_fa.raw',fax.getShape())
cc3d = vcc.get_3d_cc(fax.getShape())
for view,s,xyz in zip(np.arange(len(slist)),slist,orientationList):
show_overlay(fas.data,cc3d,np.Inf,s,xyz,.5)
plt.savefig(figDir+bfn+'_ccfaOverlay_view'+repr(view)+'.pdf',)
plt.clf()
plt.close(f)
def save_overlay(faDir, mprDir, ccDir, figDir, slist, orientationList):
brainFiles = [fn.split('_')[0] for fn in os.listdir(ccDir)]
f = plt.figure(figsize=(14,9));
for bfn in brainFiles:
vcc = ConnectedComponent(fn=ccDir+bfn+'_concomp.npy')
fax = fa.FAXML(faDir+bfn+'_fa.xml')
fas = fa.FAData(faDir+bfn+'_fa.raw',fax.getShape())
mpx = mprage.MPRAGEXML(mprDir+'mprage_'+bfn+'_ss_crop.xml')
mpd = mprage.MPRAGEData(mprDir+'mprage_'+bfn+'_ss_crop.raw',mpx.getShape())
cc3d = vcc.get_3d_cc(fax.getShape())
for view,s,xyz in zip(np.arange(len(slist)),slist,orientationList):
plt.clf()
plt.subplot(221);
plt.title('FA Overlay')
show_overlay(fas.data,cc3d,np.Inf,s,xyz,.5)
plt.subplot(222);
plt.title('FA Original; '+bfn+', '+xyz+'-slice '+repr(s))
plt.imshow(get_slice(fas.data,s,xyz),cmap=plt.cm.gray)
plt.colorbar()
plt.subplot(223); plt.title('MPRAGE Overlay')
show_overlay(mpd.data,cc3d,np.Inf,s,xyz,.5)
plt.subplot(224);
plt.title('MPRAGE Original')
plt.imshow(get_slice(mpd.data,s,xyz),cmap=plt.cm.gray)
plt.colorbar()
#plt.tight_layout()
plt.savefig(figDir+bfn+'_ccfaOverlay_view'+repr(view)+'.pdf')
plt.close(f)
'''
def save_figures(coord, fn):
"""Saves 3 images which are 3d color representations of the coordinates in coord
Input
=====
coord -- an nx4 array of x,y,z coordinates and another scalar that gives color
fn -- save filename prefix"""
x,y,z,c = np.hsplit(coord,4)
f = mlab.figure()
mlab.points3d(x,y,z,c, mask_points=50, scale_mode='none',scale_factor=2.0)
mlab.view(0,180)
mlab.savefig(fn+'_view0,180.png',figure=f,magnification=4)
mlab.view(0,90)
mlab.savefig(fn+'_view0,90.png',figure=f,magnification=4)
mlab.view(90,90)
mlab.savefig(fn+'_view90,90.png',figure=f,magnification=4)
mlab.close(f)
'''
def get_3d_cc(vcc,shape):
"""Takes an array vcc and shape which is the shape of the new 3d image and 'colors' the image by connected component
For some reason this is 3 times as fast as the same thing in the ConnectedComponet class ?
Input
=====
vcc 1d array
shape 3tuple
Output
======
cc3d array of with shape=shape. colored so that ccz[x,y,z]=vcc[i] where x,y,z is the XYZ coordinates for Morton index i
"""
cc3d = np.NaN*np.zeros(shape)
allCoord = itt.product(*[xrange(sz) for sz in shape])
[cc3d.itemset((xyz), vcc[zindex.XYZMorton(xyz)])
for xyz in allCoord if not vcc[zindex.XYZMorton(xyz)]==0];
return cc3d
def main ():
parser = argparse.ArgumentParser(description='Draw the ROI map of a brain.')
parser.add_argument('roixmlfile', action="store")
parser.add_argument('roirawfile', action="store")
parser.add_argument('fibergraphfile', action="store")
parser.add_argument('ccfile', action="store")
result = parser.parse_args()
roix = roi.ROIXML(result.roixmlfile)
rois = roi.ROIData(result.roirawfile, roix.getShape())
fg = fibergraph.FiberGraph(roix.getShape(),rois,[])
fg.loadFromMatlab('fibergraph', result.fibergraphfile)
vcc = ConnectedComponent(G=fg.graph)
vcc.save(results.ccfile)
if __name__=='__main__':
# Added for -h flag # DM
parser = argparse.ArgumentParser(description="Largest connected component generator")
result = parser.parse_args()
graphDir = '/mnt/braingraph1data/projects/MRN/graphs/biggraphs/'
roiDir = '/mnt/braingraph1data/projects/will/mar12data/roi/'
ccDir = '/data/biggraphs/connectedcomp/'
figDir = '/home/dsussman/Dropbox/Figures/DTMRI/lccPics/'
cc_for_each_brain(graphDir, roiDir, ccDir, figDir)
|
neurodata/ndgrutedb
|
MR-OCP/mrcap/lcc.py
|
Python
|
apache-2.0
| 11,171
|
[
"Mayavi"
] |
ff287064e4e680749d0c583a3650bccff4d7777dd311762945ac16c2572f7480
|
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import datetime
from distutils.version import StrictVersion
import gribapi
import numpy as np
import iris
import iris.exceptions
import iris.fileformats.grib
from iris.tests import mock
import iris.tests.stock
import iris.util
# Run tests in no graphics mode if matplotlib is not available.
if tests.MPL_AVAILABLE:
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import iris.plot as iplt
import iris.quickplot as qplt
# Construct a mock object to mimic the gribapi for GribWrapper testing.
_mock_gribapi = mock.Mock(spec=gribapi)
_mock_gribapi.GribInternalError = Exception
def _mock_gribapi_fetch(message, key):
"""
Fake the gribapi key-fetch.
Fetch key-value from the fake message (dictionary).
If the key is not present, raise the diagnostic exception.
"""
if key in message:
return message[key]
else:
raise _mock_gribapi.GribInternalError
def _mock_gribapi__grib_is_missing(grib_message, keyname):
"""
Fake the gribapi key-existence enquiry.
Return whether the key exists in the fake message (dictionary).
"""
return (keyname not in grib_message)
def _mock_gribapi__grib_get_native_type(grib_message, keyname):
"""
Fake the gribapi type-discovery operation.
Return type of key-value in the fake message (dictionary).
If the key is not present, raise the diagnostic exception.
"""
if keyname in grib_message:
return type(grib_message[keyname])
raise _mock_gribapi.GribInternalError(keyname)
_mock_gribapi.grib_get_long = mock.Mock(side_effect=_mock_gribapi_fetch)
_mock_gribapi.grib_get_string = mock.Mock(side_effect=_mock_gribapi_fetch)
_mock_gribapi.grib_get_double = mock.Mock(side_effect=_mock_gribapi_fetch)
_mock_gribapi.grib_get_double_array = mock.Mock(
side_effect=_mock_gribapi_fetch)
_mock_gribapi.grib_is_missing = mock.Mock(
side_effect=_mock_gribapi__grib_is_missing)
_mock_gribapi.grib_get_native_type = mock.Mock(
side_effect=_mock_gribapi__grib_get_native_type)
# define seconds in an hour, for general test usage
_hour_secs = 3600.0
class FakeGribMessage(dict):
"""
A 'fake grib message' object, for testing GribWrapper construction.
Behaves as a dictionary, containing key-values for message keys.
"""
def __init__(self, **kwargs):
"""
Create a fake message object.
General keys can be set/add as required via **kwargs.
The keys 'edition' and 'time_code' are specially managed.
"""
# Start with a bare dictionary
dict.__init__(self)
# Extract specially-recognised keys.
edition = kwargs.pop('edition', 1)
time_code = kwargs.pop('time_code', None)
# Set the minimally required keys.
self._init_minimal_message(edition=edition)
# Also set a time-code, if given.
if time_code is not None:
self.set_timeunit_code(time_code)
# Finally, add any remaining passed key-values.
self.update(**kwargs)
def _init_minimal_message(self, edition=1):
# Set values for all the required keys.
# 'edition' controls the edition-specific keys.
self.update({
'Ni': 1,
'Nj': 1,
'numberOfValues': 1,
'alternativeRowScanning': 0,
'centre': 'ecmf',
'year': 2007,
'month': 3,
'day': 23,
'hour': 12,
'minute': 0,
'indicatorOfUnitOfTimeRange': 1,
'shapeOfTheEarth': 6,
'gridType': 'rotated_ll',
'angleOfRotation': 0.0,
'iDirectionIncrementInDegrees': 0.036,
'jDirectionIncrementInDegrees': 0.036,
'iScansNegatively': 0,
'jScansPositively': 1,
'longitudeOfFirstGridPointInDegrees': -5.70,
'latitudeOfFirstGridPointInDegrees': -4.452,
'jPointsAreConsecutive': 0,
'values': np.array([[1.0]]),
'indicatorOfParameter': 9999,
'parameterNumber': 9999,
})
# Add edition-dependent settings.
self['edition'] = edition
if edition == 1:
self.update({
'startStep': 24,
'timeRangeIndicator': 1,
'P1': 2, 'P2': 0,
# time unit - needed AS WELL as 'indicatorOfUnitOfTimeRange'
'unitOfTime': 1,
'table2Version': 9999,
})
if edition == 2:
self.update({
'iDirectionIncrementGiven': 1,
'jDirectionIncrementGiven': 1,
'uvRelativeToGrid': 0,
'forecastTime': 24,
'productDefinitionTemplateNumber': 0,
'stepRange': 24,
'discipline': 9999,
'parameterCategory': 9999,
'tablesVersion': 4
})
def set_timeunit_code(self, timecode):
# Do timecode setting (somewhat edition-dependent).
self['indicatorOfUnitOfTimeRange'] = timecode
if self['edition'] == 1:
# for some odd reason, GRIB1 code uses *both* of these
# NOTE kludge -- the 2 keys are really the same thing
self['unitOfTime'] = timecode
@tests.skip_data
class TestGribLoad(tests.GraphicsTest):
def setUp(self):
iris.fileformats.grib.hindcast_workaround = True
def tearDown(self):
iris.fileformats.grib.hindcast_workaround = False
def test_load(self):
cubes = iris.load(tests.get_data_path(('GRIB', 'rotated_uk',
"uk_wrongparam.grib1")))
self.assertCML(cubes, ("grib_load", "rotated.cml"))
cubes = iris.load(tests.get_data_path(('GRIB', "time_processed",
"time_bound.grib1")))
self.assertCML(cubes, ("grib_load", "time_bound_grib1.cml"))
cubes = iris.load(tests.get_data_path(('GRIB', "time_processed",
"time_bound.grib2")))
self.assertCML(cubes, ("grib_load", "time_bound_grib2.cml"))
cubes = iris.load(tests.get_data_path(('GRIB', "3_layer_viz",
"3_layer.grib2")))
cubes = iris.cube.CubeList([cubes[1], cubes[0], cubes[2]])
self.assertCML(cubes, ("grib_load", "3_layer.cml"))
def test_load_masked(self):
gribfile = tests.get_data_path(
('GRIB', 'missing_values', 'missing_values.grib2'))
cubes = iris.load(gribfile)
self.assertCML(cubes, ('grib_load', 'missing_values_grib2.cml'))
@tests.skip_plot
def test_y_fastest(self):
cubes = iris.load(tests.get_data_path(("GRIB", "y_fastest",
"y_fast.grib2")))
self.assertCML(cubes, ("grib_load", "y_fastest.cml"))
iplt.contourf(cubes[0])
plt.gca().coastlines()
plt.title("y changes fastest")
self.check_graphic()
@tests.skip_plot
def test_ij_directions(self):
def old_compat_load(name):
cube = iris.load(tests.get_data_path(('GRIB', 'ij_directions',
name)))[0]
return [cube]
cubes = old_compat_load("ipos_jpos.grib2")
self.assertCML(cubes, ("grib_load", "ipos_jpos.cml"))
iplt.contourf(cubes[0])
plt.gca().coastlines()
plt.title("ipos_jpos cube")
self.check_graphic()
cubes = old_compat_load("ipos_jneg.grib2")
self.assertCML(cubes, ("grib_load", "ipos_jneg.cml"))
iplt.contourf(cubes[0])
plt.gca().coastlines()
plt.title("ipos_jneg cube")
self.check_graphic()
cubes = old_compat_load("ineg_jneg.grib2")
self.assertCML(cubes, ("grib_load", "ineg_jneg.cml"))
iplt.contourf(cubes[0])
plt.gca().coastlines()
plt.title("ineg_jneg cube")
self.check_graphic()
cubes = old_compat_load("ineg_jpos.grib2")
self.assertCML(cubes, ("grib_load", "ineg_jpos.cml"))
iplt.contourf(cubes[0])
plt.gca().coastlines()
plt.title("ineg_jpos cube")
self.check_graphic()
def test_shape_of_earth(self):
def old_compat_load(name):
cube = iris.load(tests.get_data_path(('GRIB', 'shape_of_earth',
name)))[0]
return cube
# pre-defined sphere
cube = old_compat_load("0.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_0.cml"))
# custom sphere
cube = old_compat_load("1.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_1.cml"))
# IAU65 oblate sphere
cube = old_compat_load("2.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_2.cml"))
# custom oblate spheroid (km)
cube = old_compat_load("3.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_3.cml"))
# IAG-GRS80 oblate spheroid
cube = old_compat_load("4.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_4.cml"))
# WGS84
cube = old_compat_load("5.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_5.cml"))
# pre-defined sphere
cube = old_compat_load("6.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_6.cml"))
# custom oblate spheroid (m)
cube = old_compat_load("7.grib2")
self.assertCML(cube, ("grib_load", "earth_shape_7.cml"))
# grib1 - same as grib2 shape 6, above
cube = old_compat_load("global.grib1")
self.assertCML(cube, ("grib_load", "earth_shape_grib1.cml"))
@tests.skip_plot
def test_polar_stereo_grib1(self):
cube = iris.load_cube(tests.get_data_path(
("GRIB", "polar_stereo", "ST4.2013052210.01h")))
self.assertCML(cube, ("grib_load", "polar_stereo_grib1.cml"))
qplt.contourf(cube, norm=LogNorm())
plt.gca().coastlines()
plt.gca().gridlines()
plt.title("polar stereo grib1")
self.check_graphic()
@tests.skip_plot
def test_polar_stereo_grib2(self):
cube = iris.load_cube(tests.get_data_path(
("GRIB", "polar_stereo",
"CMC_glb_TMP_ISBL_1015_ps30km_2013052000_P006.grib2")))
self.assertCML(cube, ("grib_load", "polar_stereo_grib2.cml"))
qplt.contourf(cube)
plt.gca().coastlines()
plt.gca().gridlines()
plt.title("polar stereo grib2")
self.check_graphic()
@tests.skip_plot
def test_lambert_grib1(self):
cube = iris.load_cube(tests.get_data_path(
("GRIB", "lambert", "lambert.grib1")))
self.assertCML(cube, ("grib_load", "lambert_grib1.cml"))
qplt.contourf(cube)
plt.gca().coastlines()
plt.gca().gridlines()
plt.title("lambert grib1")
self.check_graphic()
@tests.skip_plot
def test_lambert_grib2(self):
cube = iris.load_cube(tests.get_data_path(
("GRIB", "lambert", "lambert.grib2")))
self.assertCML(cube, ("grib_load", "lambert_grib2.cml"))
qplt.contourf(cube)
plt.gca().coastlines()
plt.gca().gridlines()
plt.title("lambert grib2")
self.check_graphic()
def test_regular_gg_grib1(self):
cube = iris.load_cube(tests.get_data_path(
('GRIB', 'gaussian', 'regular_gg.grib1')))
self.assertCML(cube, ('grib_load', 'regular_gg_grib1.cml'))
def test_regular_gg_grib2(self):
cube = iris.load_cube(tests.get_data_path(
('GRIB', 'gaussian', 'regular_gg.grib2')))
self.assertCML(cube, ('grib_load', 'regular_gg_grib2.cml'))
def test_reduced_ll(self):
cube = iris.load_cube(tests.get_data_path(
("GRIB", "reduced", "reduced_ll.grib1")))
self.assertCML(cube, ("grib_load", "reduced_ll_grib1.cml"))
def test_reduced_gg(self):
cube = iris.load_cube(tests.get_data_path(
("GRIB", "reduced", "reduced_gg.grib2")))
self.assertCML(cube, ("grib_load", "reduced_gg_grib2.cml"))
def test_reduced_missing(self):
cube = iris.load_cube(tests.get_data_path(
("GRIB", "reduced", "reduced_ll_missing.grib1")))
self.assertCML(cube, ("grib_load", "reduced_ll_missing_grib1.cml"))
class TestGribTimecodes(tests.IrisTest):
def _run_timetests(self, test_set):
# Check the unit-handling for given units-codes and editions.
# Operates on lists of cases for various time-units and grib-editions.
# Format: (edition, code, expected-exception,
# equivalent-seconds, description-string)
with mock.patch('iris.fileformats.grib.gribapi', _mock_gribapi):
for test_controls in test_set:
(
grib_edition, timeunit_codenum,
expected_error,
timeunit_secs, timeunit_str
) = test_controls
# Construct a suitable fake test message.
message = FakeGribMessage(
edition=grib_edition,
time_code=timeunit_codenum
)
if expected_error:
# Expect GribWrapper construction to fail.
with self.assertRaises(type(expected_error)) as ar_context:
msg = iris.fileformats.grib.GribWrapper(message)
self.assertEqual(
ar_context.exception.args,
expected_error.args)
continue
# 'ELSE'...
# Expect the wrapper construction to work.
# Make a GribWrapper object and test it.
wrapped_msg = iris.fileformats.grib.GribWrapper(message)
# Check the units string.
forecast_timeunit = wrapped_msg._forecastTimeUnit
self.assertEqual(
forecast_timeunit, timeunit_str,
'Bad unit string for edition={ed:01d}, '
'unitcode={code:01d} : '
'expected="{wanted}" GOT="{got}"'.format(
ed=grib_edition,
code=timeunit_codenum,
wanted=timeunit_str,
got=forecast_timeunit
)
)
# Check the data-starttime calculation.
interval_start_to_end = (
wrapped_msg._phenomenonDateTime
- wrapped_msg._referenceDateTime
)
if grib_edition == 1:
interval_from_units = wrapped_msg.P1
else:
interval_from_units = wrapped_msg.forecastTime
interval_from_units *= datetime.timedelta(0, timeunit_secs)
self.assertEqual(
interval_start_to_end, interval_from_units,
'Inconsistent start time offset for edition={ed:01d}, '
'unitcode={code:01d} : '
'from-unit="{unit_str}" '
'from-phenom-minus-ref="{e2e_str}"'.format(
ed=grib_edition,
code=timeunit_codenum,
unit_str=interval_from_units,
e2e_str=interval_start_to_end
)
)
# Test groups of testcases for various time-units and grib-editions.
# Format: (edition, code, expected-exception,
# equivalent-seconds, description-string)
def test_timeunits_common(self):
tests = (
(1, 0, None, 60.0, 'minutes'),
(1, 1, None, _hour_secs, 'hours'),
(1, 2, None, 24.0 * _hour_secs, 'days'),
(1, 10, None, 3.0 * _hour_secs, '3 hours'),
(1, 11, None, 6.0 * _hour_secs, '6 hours'),
(1, 12, None, 12.0 * _hour_secs, '12 hours'),
)
TestGribTimecodes._run_timetests(self, tests)
@staticmethod
def _err_bad_timeunit(code):
return iris.exceptions.NotYetImplementedError(
'Unhandled time unit for forecast '
'indicatorOfUnitOfTimeRange : {code}'.format(code=code)
)
def test_timeunits_grib1_specific(self):
tests = (
(1, 13, None, 0.25 * _hour_secs, '15 minutes'),
(1, 14, None, 0.5 * _hour_secs, '30 minutes'),
(1, 254, None, 1.0, 'seconds'),
(1, 111, TestGribTimecodes._err_bad_timeunit(111), 1.0, '??'),
)
TestGribTimecodes._run_timetests(self, tests)
def test_timeunits_grib2_specific(self):
tests = (
(2, 13, None, 1.0, 'seconds'),
# check the extra grib1 keys FAIL
(2, 14, TestGribTimecodes._err_bad_timeunit(14), 0.0, '??'),
(2, 254, TestGribTimecodes._err_bad_timeunit(254), 0.0, '??'),
)
TestGribTimecodes._run_timetests(self, tests)
def test_timeunits_calendar(self):
tests = (
(1, 3, TestGribTimecodes._err_bad_timeunit(3), 0.0, 'months'),
(1, 4, TestGribTimecodes._err_bad_timeunit(4), 0.0, 'years'),
(1, 5, TestGribTimecodes._err_bad_timeunit(5), 0.0, 'decades'),
(1, 6, TestGribTimecodes._err_bad_timeunit(6), 0.0, '30 years'),
(1, 7, TestGribTimecodes._err_bad_timeunit(7), 0.0, 'centuries'),
)
TestGribTimecodes._run_timetests(self, tests)
def test_timeunits_invalid(self):
tests = (
(1, 111, TestGribTimecodes._err_bad_timeunit(111), 1.0, '??'),
(2, 27, TestGribTimecodes._err_bad_timeunit(27), 1.0, '??'),
)
TestGribTimecodes._run_timetests(self, tests)
def test_load_probability_forecast(self):
# Test GribWrapper interpretation of PDT 4.9 data.
# NOTE:
# Currently Iris has only partial support for PDT 4.9.
# Though it can load the data, key metadata (thresholds) is lost.
# At present, we are not testing for this.
# Make a testing grib message in memory, with gribapi.
grib_message = gribapi.grib_new_from_samples('GRIB2')
gribapi.grib_set_long(grib_message, 'productDefinitionTemplateNumber',
9)
gribapi.grib_set_string(grib_message, 'stepRange', '10-55')
grib_wrapper = iris.fileformats.grib.GribWrapper(grib_message)
# Define two expected datetimes for _periodEndDateTime as
# gribapi v1.9.16 mis-calculates this.
# See https://software.ecmwf.int/wiki/display/GRIB/\
# GRIB+API+version+1.9.18+released
try:
# gribapi v1.9.16 has no __version__ attribute.
gribapi_ver = gribapi.__version__
except AttributeError:
gribapi_ver = gribapi.grib_get_api_version()
if StrictVersion(gribapi_ver) < StrictVersion('1.9.18'):
exp_end_date = datetime.datetime(year=2007, month=3, day=25,
hour=12, minute=0, second=0)
else:
exp_end_date = datetime.datetime(year=2007, month=3, day=25,
hour=19, minute=0, second=0)
# Check that it captures the statistics time period info.
# (And for now, nothing else)
self.assertEqual(
grib_wrapper._referenceDateTime,
datetime.datetime(year=2007, month=3, day=23,
hour=12, minute=0, second=0)
)
self.assertEqual(
grib_wrapper._periodStartDateTime,
datetime.datetime(year=2007, month=3, day=23,
hour=22, minute=0, second=0)
)
self.assertEqual(grib_wrapper._periodEndDateTime, exp_end_date)
def test_warn_unknown_pdts(self):
# Test loading of an unrecognised GRIB Product Definition Template.
# Get a temporary file by name (deleted afterward by context).
with self.temp_filename() as temp_gribfile_path:
# Write a test grib message to the temporary file.
with open(temp_gribfile_path, 'wb') as temp_gribfile:
grib_message = gribapi.grib_new_from_samples('GRIB2')
# Set the PDT to something unexpected.
gribapi.grib_set_long(
grib_message, 'productDefinitionTemplateNumber', 5)
gribapi.grib_write(grib_message, temp_gribfile)
# Load the message from the file as a cube.
cube_generator = iris.fileformats.grib.load_cubes(
temp_gribfile_path)
cube = next(cube_generator)
# Check the cube has an extra "warning" attribute.
self.assertEqual(
cube.attributes['GRIB_LOAD_WARNING'],
'unsupported GRIB2 ProductDefinitionTemplate: #4.5'
)
class TestGribSimple(tests.IrisTest):
# A testing class that does not need the test data.
def mock_grib(self):
# A mock grib message, with attributes that can't be Mocks themselves.
grib = mock.Mock()
grib.startStep = 0
grib.phenomenon_points = lambda unit: 3
grib._forecastTimeUnit = "hours"
grib.productDefinitionTemplateNumber = 0
# define a level type (NB these 2 are effectively the same)
grib.levelType = 1
grib.typeOfFirstFixedSurface = 1
grib.typeOfSecondFixedSurface = 1
return grib
def cube_from_message(self, grib):
# Parameter translation now uses the GribWrapper, so we must convert
# the Mock-based fake message to a FakeGribMessage.
with mock.patch('iris.fileformats.grib.gribapi', _mock_gribapi):
grib_message = FakeGribMessage(**grib.__dict__)
wrapped_msg = iris.fileformats.grib.GribWrapper(grib_message)
cube, _, _ = iris.fileformats.rules._make_cube(
wrapped_msg, iris.fileformats.grib.load_rules.convert)
return cube
class TestGrib1LoadPhenomenon(TestGribSimple):
# Test recognition of grib phenomenon types.
def mock_grib(self):
grib = super(TestGrib1LoadPhenomenon, self).mock_grib()
grib.edition = 1
return grib
def test_grib1_unknownparam(self):
grib = self.mock_grib()
grib.table2Version = 0
grib.indicatorOfParameter = 9999
cube = self.cube_from_message(grib)
self.assertEqual(cube.standard_name, None)
self.assertEqual(cube.long_name, None)
self.assertEqual(cube.units, iris.unit.Unit("???"))
def test_grib1_unknown_local_param(self):
grib = self.mock_grib()
grib.table2Version = 128
grib.indicatorOfParameter = 999
cube = self.cube_from_message(grib)
self.assertEqual(cube.standard_name, None)
self.assertEqual(cube.long_name, 'UNKNOWN LOCAL PARAM 999.128')
self.assertEqual(cube.units, iris.unit.Unit("???"))
def test_grib1_unknown_standard_param(self):
grib = self.mock_grib()
grib.table2Version = 1
grib.indicatorOfParameter = 975
cube = self.cube_from_message(grib)
self.assertEqual(cube.standard_name, None)
self.assertEqual(cube.long_name, 'UNKNOWN LOCAL PARAM 975.1')
self.assertEqual(cube.units, iris.unit.Unit("???"))
def known_grib1(self, param, standard_str, units_str):
grib = self.mock_grib()
grib.table2Version = 1
grib.indicatorOfParameter = param
cube = self.cube_from_message(grib)
self.assertEqual(cube.standard_name, standard_str)
self.assertEqual(cube.long_name, None)
self.assertEqual(cube.units, iris.unit.Unit(units_str))
def test_grib1_known_standard_params(self):
# at present, there are just a very few of these
self.known_grib1(11, 'air_temperature', 'kelvin')
self.known_grib1(33, 'x_wind', 'm s-1')
self.known_grib1(34, 'y_wind', 'm s-1')
class TestGrib2LoadPhenomenon(TestGribSimple):
# Test recognition of grib phenomenon types.
def mock_grib(self):
grib = super(TestGrib2LoadPhenomenon, self).mock_grib()
grib.edition = 2
grib._forecastTimeUnit = 'hours'
grib._forecastTime = 0.0
grib.phenomenon_points = lambda unit: [0.0]
return grib
def known_grib2(self, discipline, category, param,
standard_name, long_name, units_str):
grib = self.mock_grib()
grib.discipline = discipline
grib.parameterCategory = category
grib.parameterNumber = param
cube = self.cube_from_message(grib)
try:
iris_unit = iris.unit.Unit(units_str)
except ValueError:
iris_unit = iris.unit.Unit('???')
self.assertEqual(cube.standard_name, standard_name)
self.assertEqual(cube.long_name, long_name)
self.assertEqual(cube.units, iris_unit)
def test_grib2_unknownparam(self):
grib = self.mock_grib()
grib.discipline = 999
grib.parameterCategory = 999
grib.parameterNumber = 9999
cube = self.cube_from_message(grib)
self.assertEqual(cube.standard_name, None)
self.assertEqual(cube.long_name, None)
self.assertEqual(cube.units, iris.unit.Unit("???"))
def test_grib2_known_standard_params(self):
# check we know how to translate at least these params
# I.E. all the ones the older scheme provided.
full_set = [
(0, 0, 0, "air_temperature", None, "kelvin"),
(0, 0, 2, "air_potential_temperature", None, "K"),
(0, 1, 0, "specific_humidity", None, "kg kg-1"),
(0, 1, 1, "relative_humidity", None, "%"),
(0, 1, 3, None, "precipitable_water", "kg m-2"),
(0, 1, 22, None, "cloud_mixing_ratio", "kg kg-1"),
(0, 1, 13, "liquid_water_content_of_surface_snow", None, "kg m-2"),
(0, 2, 1, "wind_speed", None, "m s-1"),
(0, 2, 2, "x_wind", None, "m s-1"),
(0, 2, 3, "y_wind", None, "m s-1"),
(0, 2, 8, "lagrangian_tendency_of_air_pressure", None, "Pa s-1"),
(0, 2, 10, "atmosphere_absolute_vorticity", None, "s-1"),
(0, 3, 0, "air_pressure", None, "Pa"),
(0, 3, 1, "air_pressure_at_sea_level", None, "Pa"),
(0, 3, 3, None, "icao_standard_atmosphere_reference_height", "m"),
(0, 3, 5, "geopotential_height", None, "m"),
(0, 3, 9, "geopotential_height_anomaly", None, "m"),
(0, 6, 1, "cloud_area_fraction", None, "%"),
(0, 6, 6, "atmosphere_mass_content_of_cloud_liquid_water", None,
"kg m-2"),
(0, 7, 6,
"atmosphere_specific_convective_available_potential_energy",
None, "J kg-1"),
(0, 7, 7, None, "convective_inhibition", "J kg-1"),
(0, 7, 8, None, "storm_relative_helicity", "J kg-1"),
(0, 14, 0, "atmosphere_mole_content_of_ozone", None, "Dobson"),
(2, 0, 0, "land_area_fraction", None, "1"),
(10, 2, 0, "sea_ice_area_fraction", None, "1")]
for (discipline, category, number,
standard_name, long_name, units) in full_set:
self.known_grib2(discipline, category, number,
standard_name, long_name, units)
if __name__ == "__main__":
tests.main()
|
ghislainp/iris
|
lib/iris/tests/test_grib_load.py
|
Python
|
gpl-3.0
| 28,650
|
[
"Gaussian"
] |
c8de41bcaea929c269010c78f44927ff5c4c4db4b573bb88b16cad48e5ccbf32
|
# Copyright (c) 2006, 2008, 2010, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014 Brett Cannon <brett@python.org>
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""
generic classes/functions for pyreverse core/extensions
"""
from __future__ import print_function
import os
import re
import sys
########### pyreverse option utils ##############################
RCFILE = '.pyreverserc'
def get_default_options():
"""
Read config file and return list of options
"""
options = []
home = os.environ.get('HOME', '')
if home:
rcfile = os.path.join(home, RCFILE)
try:
options = open(rcfile).read().split()
except IOError:
pass # ignore if no config file found
return options
def insert_default_options():
"""insert default options to sys.argv
"""
options = get_default_options()
options.reverse()
for arg in options:
sys.argv.insert(1, arg)
# astroid utilities ###########################################################
SPECIAL = re.compile('^__[A-Za-z0-9]+[A-Za-z0-9_]*__$')
PRIVATE = re.compile('^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$')
PROTECTED = re.compile('^_[_A-Za-z0-9]*$')
def get_visibility(name):
"""return the visibility from a name: public, protected, private or special
"""
if SPECIAL.match(name):
visibility = 'special'
elif PRIVATE.match(name):
visibility = 'private'
elif PROTECTED.match(name):
visibility = 'protected'
else:
visibility = 'public'
return visibility
ABSTRACT = re.compile('^.*Abstract.*')
FINAL = re.compile('^[A-Z_]*$')
def is_abstract(node):
"""return true if the given class node correspond to an abstract class
definition
"""
return ABSTRACT.match(node.name)
def is_final(node):
"""return true if the given class/function node correspond to final
definition
"""
return FINAL.match(node.name)
def is_interface(node):
# bw compat
return node.type == 'interface'
def is_exception(node):
# bw compat
return node.type == 'exception'
# Helpers #####################################################################
_CONSTRUCTOR = 1
_SPECIAL = 2
_PROTECTED = 4
_PRIVATE = 8
MODES = {
'ALL' : 0,
'PUB_ONLY' : _SPECIAL + _PROTECTED + _PRIVATE,
'SPECIAL' : _SPECIAL,
'OTHER' : _PROTECTED + _PRIVATE,
}
VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED,
'private': _PRIVATE, 'public': 0}
class FilterMixIn(object):
"""filter nodes according to a mode and nodes' visibility
"""
def __init__(self, mode):
"init filter modes"
__mode = 0
for nummod in mode.split('+'):
try:
__mode += MODES[nummod]
except KeyError as ex:
print('Unknown filter mode %s' % ex, file=sys.stderr)
self.__mode = __mode
def show_attr(self, node):
"""return true if the node should be treated
"""
visibility = get_visibility(getattr(node, 'name', node))
return not self.__mode & VIS_MOD[visibility]
class ASTWalker(object):
"""a walker visiting a tree in preorder, calling on the handler:
* visit_<class name> on entering a node, where class name is the class of
the node in lower case
* leave_<class name> on leaving a node, where class name is the class of
the node in lower case
"""
def __init__(self, handler):
self.handler = handler
self._cache = {}
def walk(self, node, _done=None):
"""walk on the tree from <node>, getting callbacks from handler"""
if _done is None:
_done = set()
if node in _done:
raise AssertionError((id(node), node, node.parent))
_done.add(node)
self.visit(node)
for child_node in node.get_children():
assert child_node is not node
self.walk(child_node, _done)
self.leave(node)
assert node.parent is not node
def get_callbacks(self, node):
"""get callbacks from handler for the visited node"""
klass = node.__class__
methods = self._cache.get(klass)
if methods is None:
handler = self.handler
kid = klass.__name__.lower()
e_method = getattr(handler, 'visit_%s' % kid,
getattr(handler, 'visit_default', None))
l_method = getattr(handler, 'leave_%s' % kid,
getattr(handler, 'leave_default', None))
self._cache[klass] = (e_method, l_method)
else:
e_method, l_method = methods
return e_method, l_method
def visit(self, node):
"""walk on the tree from <node>, getting callbacks from handler"""
method = self.get_callbacks(node)[0]
if method is not None:
method(node)
def leave(self, node):
"""walk on the tree from <node>, getting callbacks from handler"""
method = self.get_callbacks(node)[1]
if method is not None:
method(node)
class LocalsVisitor(ASTWalker):
"""visit a project by traversing the locals dictionary"""
def __init__(self):
ASTWalker.__init__(self, self)
self._visited = {}
def visit(self, node):
"""launch the visit starting from the given node"""
if node in self._visited:
return None
self._visited[node] = 1 # FIXME: use set ?
methods = self.get_callbacks(node)
if methods[0] is not None:
methods[0](node)
if hasattr(node, 'locals'): # skip Instance and other proxy
for local_node in node.values():
self.visit(local_node)
if methods[1] is not None:
return methods[1](node)
return None
|
lucidmotifs/auto-aoc
|
.venv/lib/python3.5/site-packages/pylint/pyreverse/utils.py
|
Python
|
mit
| 6,195
|
[
"VisIt"
] |
972fd777b2fd8390909cb7d48834aec24ba943f6a900d38cd11e89beea71aa15
|
"""
Test the about xblock
"""
import datetime
import ddt
import mock
import pytz
from ccx_keys.locator import CCXLocator
from django.conf import settings
from django.urls import reverse
from django.test.utils import override_settings
from milestones.tests.utils import MilestonesTestCaseMixin
from mock import patch
from six import text_type
from waffle.testutils import override_switch
from course_modes.models import CourseMode
from lms.djangoapps.ccx.tests.factories import CcxFactory
from openedx.core.djangoapps.waffle_utils.testutils import override_waffle_flag
from openedx.features.course_experience.waffle import WAFFLE_NAMESPACE as COURSE_EXPERIENCE_WAFFLE_NAMESPACE
from openedx.features.course_experience.waffle import ENABLE_COURSE_ABOUT_SIDEBAR_HTML
from openedx.features.course_experience import COURSE_ENABLE_UNENROLLED_ACCESS_FLAG
from shoppingcart.models import Order, PaidCourseRegistration
from student.models import CourseEnrollment
from student.tests.factories import AdminFactory, CourseEnrollmentAllowedFactory, UserFactory
from track.tests import EventTrackingTestCase
from util.milestones_helpers import get_prerequisite_courses_display, set_prerequisite_courses
from xmodule.course_module import (
CATALOG_VISIBILITY_ABOUT,
CATALOG_VISIBILITY_NONE,
COURSE_VISIBILITY_PRIVATE,
COURSE_VISIBILITY_PUBLIC_OUTLINE,
COURSE_VISIBILITY_PUBLIC
)
from xmodule.modulestore.tests.django_utils import (
TEST_DATA_MIXED_MODULESTORE,
TEST_DATA_SPLIT_MODULESTORE,
ModuleStoreTestCase,
SharedModuleStoreTestCase
)
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.utils import TEST_DATA_DIR
from xmodule.modulestore.xml_importer import import_course_from_xml
from .helpers import LoginEnrollmentTestCase
# HTML for registration button
REG_STR = "<form id=\"class_enroll_form\" method=\"post\" data-remote=\"true\" action=\"/change_enrollment\">"
SHIB_ERROR_STR = "The currently logged-in user account does not have permission to enroll in this course."
@ddt.ddt
class AboutTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase, EventTrackingTestCase, MilestonesTestCaseMixin):
"""
Tests about xblock.
"""
@classmethod
def setUpClass(cls):
super(AboutTestCase, cls).setUpClass()
cls.course = CourseFactory.create()
cls.course_without_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_NONE)
cls.course_with_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_ABOUT)
cls.purchase_course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
cls.about = ItemFactory.create(
category="about", parent_location=cls.course_without_about.location,
data="WITHOUT ABOUT", display_name="overview"
)
cls.about = ItemFactory.create(
category="about", parent_location=cls.course_with_about.location,
data="WITH ABOUT", display_name="overview"
)
def setUp(self):
super(AboutTestCase, self).setUp()
self.course_mode = CourseMode(
course_id=self.purchase_course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
min_price=10
)
self.course_mode.save()
def test_anonymous_user(self):
"""
This test asserts that a non-logged in user can visit the course about page
"""
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
def test_logged_in(self):
"""
This test asserts that a logged-in user can visit the course about page
"""
self.setup_user()
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
def test_already_enrolled(self):
"""
Asserts that the end user sees the appropriate messaging
when he/she visits the course about page, but is already enrolled
"""
self.setup_user()
self.enroll(self.course, True)
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are enrolled in this course", resp.content)
self.assertIn("View Course", resp.content)
@override_settings(COURSE_ABOUT_VISIBILITY_PERMISSION="see_about_page")
def test_visible_about_page_settings(self):
"""
Verify that the About Page honors the permission settings in the course module
"""
url = reverse('about_course', args=[text_type(self.course_with_about.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("WITH ABOUT", resp.content)
url = reverse('about_course', args=[text_type(self.course_without_about.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 404)
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_logged_in_marketing(self):
self.setup_user()
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
# should be redirected
self.assertEqual(resp.status_code, 302)
# follow this time, and check we're redirected to the course home page
resp = self.client.get(url, follow=True)
target_url = resp.redirect_chain[-1][0]
course_home_url = reverse('openedx.course_experience.course_home', args=[text_type(self.course.id)])
self.assertTrue(target_url.endswith(course_home_url))
@patch.dict(settings.FEATURES, {'ENABLE_COURSE_HOME_REDIRECT': False})
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_logged_in_marketing_without_course_home_redirect(self):
"""
Verify user is not redirected to course home page when
ENABLE_COURSE_HOME_REDIRECT is set to False
"""
self.setup_user()
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
# should not be redirected
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
@patch.dict(settings.FEATURES, {'ENABLE_COURSE_HOME_REDIRECT': True})
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': False})
def test_logged_in_marketing_without_mktg_site(self):
"""
Verify user is not redirected to course home page when
ENABLE_MKTG_SITE is set to False
"""
self.setup_user()
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
# should not be redirected
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True})
def test_pre_requisite_course(self):
pre_requisite_course = CourseFactory.create(org='edX', course='900', display_name='pre requisite course')
course = CourseFactory.create(pre_requisite_courses=[text_type(pre_requisite_course.id)])
self.setup_user()
url = reverse('about_course', args=[text_type(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[text_type(pre_requisite_courses[0]['key'])])
self.assertIn(u"<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.decode(resp.charset).strip('\n'))
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True})
def test_about_page_unfulfilled_prereqs(self):
pre_requisite_course = CourseFactory.create(
org='edX',
course='901',
display_name='pre requisite course',
)
pre_requisite_courses = [text_type(pre_requisite_course.id)]
# for this failure to occur, the enrollment window needs to be in the past
course = CourseFactory.create(
org='edX',
course='1000',
# closed enrollment
enrollment_start=datetime.datetime(2013, 1, 1),
enrollment_end=datetime.datetime(2014, 1, 1),
start=datetime.datetime(2013, 1, 1),
end=datetime.datetime(2030, 1, 1),
pre_requisite_courses=pre_requisite_courses,
)
set_prerequisite_courses(course.id, pre_requisite_courses)
self.setup_user()
self.enroll(self.course, True)
self.enroll(pre_requisite_course, True)
url = reverse('about_course', args=[text_type(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[text_type(pre_requisite_courses[0]['key'])])
self.assertIn(u"<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.decode(resp.charset).strip('\n'))
url = reverse('about_course', args=[unicode(pre_requisite_course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
@ddt.data(
[COURSE_VISIBILITY_PRIVATE],
[COURSE_VISIBILITY_PUBLIC_OUTLINE],
[COURSE_VISIBILITY_PUBLIC],
)
@ddt.unpack
def test_about_page_public_view(self, course_visibility):
"""
Assert that anonymous or unenrolled users see View Course option
when unenrolled access flag is set
"""
with mock.patch('xmodule.course_module.CourseDescriptor.course_visibility', course_visibility):
with override_waffle_flag(COURSE_ENABLE_UNENROLLED_ACCESS_FLAG, active=True):
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
if course_visibility == COURSE_VISIBILITY_PUBLIC or course_visibility == COURSE_VISIBILITY_PUBLIC_OUTLINE:
self.assertIn("View Course", resp.content)
else:
self.assertIn("Enroll in", resp.content)
class AboutTestCaseXML(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
Tests for the course about page
"""
MODULESTORE = TEST_DATA_MIXED_MODULESTORE
def setUp(self):
"""
Set up the tests
"""
super(AboutTestCaseXML, self).setUp()
# The following test course (which lives at common/test/data/2014)
# is closed; we're testing that an about page still appears when
# the course is already closed
self.xml_course_id = self.store.make_course_key('edX', 'detached_pages', '2014')
import_course_from_xml(
self.store,
'test_user',
TEST_DATA_DIR,
source_dirs=['2014'],
static_content_store=None,
target_id=self.xml_course_id,
raise_on_failure=True,
create_if_not_present=True,
)
# this text appears in that course's about page
# common/test/data/2014/about/overview.html
self.xml_data = "about page 463139"
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_logged_in_xml(self):
self.setup_user()
url = reverse('about_course', args=[text_type(self.xml_course_id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_anonymous_user_xml(self):
url = reverse('about_course', args=[text_type(self.xml_course_id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
class AboutWithCappedEnrollmentsTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
This test case will check the About page when a course has a capped enrollment
"""
@classmethod
def setUpClass(cls):
super(AboutWithCappedEnrollmentsTestCase, cls).setUpClass()
cls.course = CourseFactory.create(metadata={"max_student_enrollments_allowed": 1})
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_enrollment_cap(self):
"""
This test will make sure that enrollment caps are enforced
"""
self.setup_user()
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn('<a href="#" class="register">', resp.content)
self.enroll(self.course, verify=True)
# create a new account since the first account is already enrolled in the course
self.email = 'foo_second@test.com'
self.password = 'bar'
self.username = 'test_second'
self.create_account(self.username, self.email, self.password)
self.activate_user(self.email)
self.login(self.email, self.password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
# Try to enroll as well
result = self.enroll(self.course)
self.assertFalse(result)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
class AboutWithInvitationOnly(SharedModuleStoreTestCase):
"""
This test case will check the About page when a course is invitation only.
"""
@classmethod
def setUpClass(cls):
super(AboutWithInvitationOnly, cls).setUpClass()
cls.course = CourseFactory.create(metadata={"invitation_only": True})
cls.about = ItemFactory.create(
category="about", parent_location=cls.course.location,
display_name="overview"
)
def test_invitation_only(self):
"""
Test for user not logged in, invitation only course.
"""
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_invitation_only_but_allowed(self):
"""
Test for user logged in and allowed to enroll in invitation only course.
"""
# Course is invitation only, student is allowed to enroll and logged in
user = UserFactory.create(username='allowed_student', password='test', email='allowed_student@test.com')
CourseEnrollmentAllowedFactory(email=user.email, course_id=self.course.id)
self.client.login(username=user.username, password='test')
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(u"Enroll in {}".format(self.course.id.course), resp.content.decode('utf-8'))
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
class AboutWithClosedEnrollment(ModuleStoreTestCase):
"""
This test case will check the About page for a course that has enrollment start/end
set but it is currently outside of that period.
"""
def setUp(self):
super(AboutWithClosedEnrollment, self).setUp()
self.course = CourseFactory.create(metadata={"invitation_only": False})
# Setup enrollment period to be in future
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
self.course.enrollment_start = tomorrow
self.course.enrollment_end = nextday
self.course = self.update_course(self.course, self.user.id)
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
display_name="overview"
)
def test_closed_enrollmement(self):
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_course_price_is_not_visble_in_sidebar(self):
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
# course price is not visible ihe course_about page when the course
# mode is not set to honor
self.assertNotIn('<span class="important-dates-item-text">$10</span>', resp.content)
@ddt.ddt
class AboutSidebarHTMLTestCase(SharedModuleStoreTestCase):
"""
This test case will check the About page for the content in the HTML sidebar.
"""
def setUp(self):
super(AboutSidebarHTMLTestCase, self).setUp()
self.course = CourseFactory.create()
@ddt.data(
("", "", False),
("about_sidebar_html", "About Sidebar HTML Heading", False),
("about_sidebar_html", "", False),
("", "", True),
("about_sidebar_html", "About Sidebar HTML Heading", True),
("about_sidebar_html", "", True),
)
@ddt.unpack
def test_html_sidebar_enabled(self, itemfactory_display_name, itemfactory_data, waffle_switch_value):
with override_switch(
'{}.{}'.format(
COURSE_EXPERIENCE_WAFFLE_NAMESPACE,
ENABLE_COURSE_ABOUT_SIDEBAR_HTML
),
active=waffle_switch_value
):
if itemfactory_display_name:
ItemFactory.create(
category="about",
parent_location=self.course.location,
display_name=itemfactory_display_name,
data=itemfactory_data,
)
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
if waffle_switch_value and itemfactory_display_name and itemfactory_data:
self.assertIn('<section class="about-sidebar-html">', resp.content)
self.assertIn(itemfactory_data, resp.content)
else:
self.assertNotIn('<section class="about-sidebar-html">', resp.content)
@patch.dict(settings.FEATURES, {'ENABLE_SHOPPING_CART': True})
@patch.dict(settings.FEATURES, {'ENABLE_PAID_COURSE_REGISTRATION': True})
class AboutPurchaseCourseTestCase(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
This test class runs through a suite of verifications regarding
purchaseable courses
"""
@classmethod
def setUpClass(cls):
super(AboutPurchaseCourseTestCase, cls).setUpClass()
cls.course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
cls.closed_course = CourseFactory.create(
org='MITx',
number='closed',
display_name='Closed Course To Buy',
enrollment_start=tomorrow,
enrollment_end=nextday
)
def setUp(self):
super(AboutPurchaseCourseTestCase, self).setUp()
self._set_ecomm(self.course)
self._set_ecomm(self.closed_course)
def _set_ecomm(self, course):
"""
Helper method to turn on ecommerce on the course
"""
course_mode = CourseMode(
course_id=course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
min_price=10,
)
course_mode.save()
def test_anonymous_user(self):
"""
Make sure an anonymous user sees the purchase button
"""
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_logged_in(self):
"""
Make sure a logged in user sees the purchase button
"""
self.setup_user()
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_already_in_cart(self):
"""
This makes sure if a user has this course in the cart, that the expected message
appears
"""
self.setup_user()
cart = Order.get_cart_for_user(self.user)
PaidCourseRegistration.add_to_order(cart, self.course.id)
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("This course is in your", resp.content)
self.assertNotIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_already_enrolled(self):
"""
This makes sure that the already enrolled message appears for paywalled courses
"""
self.setup_user()
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, self.course.id)
url = reverse('about_course', args=[text_type(self.course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are enrolled in this course", resp.content)
self.assertIn("View Course", resp.content)
self.assertNotIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
def test_closed_enrollment(self):
"""
This makes sure that paywalled courses also honor the registration
window
"""
self.setup_user()
url = reverse('about_course', args=[text_type(self.closed_course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
self.assertNotIn("Add closed to Cart <span>($10 USD)</span>", resp.content)
# course price is visible ihe course_about page when the course
# mode is set to honor and it's price is set
self.assertIn('<span class="important-dates-item-text">$10</span>', resp.content)
def test_invitation_only(self):
"""
This makes sure that the invitation only restirction takes prescendence over
any purchase enablements
"""
course = CourseFactory.create(metadata={"invitation_only": True})
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[text_type(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
def test_enrollment_cap(self):
"""
Make sure that capped enrollments work even with
paywalled courses
"""
course = CourseFactory.create(
metadata={
"max_student_enrollments_allowed": 1,
"display_coursenumber": "buyme",
}
)
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[text_type(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart <span>($10 USD)</span>", resp.content)
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, course.id)
# create a new account since the first account is already enrolled in the course
email = 'foo_second@test.com'
password = 'bar'
username = 'test_second'
self.create_account(username,
email, password)
self.activate_user(email)
self.login(email, password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
self.assertNotIn("Add buyme to Cart ($10)", resp.content)
def test_free_course_display(self):
"""
Make sure other courses that don't have shopping cart enabled don't display the add-to-cart button
and don't display the course_price field if Cosmetic Price is disabled.
"""
course = CourseFactory.create(org='MITx', number='free', display_name='Course For Free')
self.setup_user()
url = reverse('about_course', args=[text_type(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertNotIn("Add free to Cart (Free)", resp.content)
self.assertNotIn('<p class="important-dates-item-title">Price</p>', resp.content)
class CourseAboutTestCaseCCX(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test for unenrolled student tries to access ccx.
Note: Only CCX coach can enroll a student in CCX. In sum self-registration not allowed.
"""
MODULESTORE = TEST_DATA_SPLIT_MODULESTORE
@classmethod
def setUpClass(cls):
super(CourseAboutTestCaseCCX, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(CourseAboutTestCaseCCX, self).setUp()
# Create ccx coach account
self.coach = coach = AdminFactory.create(password="test")
self.client.login(username=coach.username, password="test")
def test_redirect_to_dashboard_unenrolled_ccx(self):
"""
Assert that when unenrolled user tries to access CCX do not allow the user to self-register.
Redirect him to his student dashboard
"""
# create ccx
ccx = CcxFactory(course_id=self.course.id, coach=self.coach)
ccx_locator = CCXLocator.from_course_locator(self.course.id, unicode(ccx.id))
self.setup_user()
url = reverse('openedx.course_experience.course_home', args=[ccx_locator])
response = self.client.get(url)
expected = reverse('dashboard')
self.assertRedirects(response, expected, status_code=302, target_status_code=200)
|
jolyonb/edx-platform
|
lms/djangoapps/courseware/tests/test_about.py
|
Python
|
agpl-3.0
| 28,229
|
[
"VisIt"
] |
fb1d46a9a42e8e944eea21bdc48b68a78f3f9479feb5fa836de9db3cfdc4e70f
|
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from numpy import array
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
run_module_suite, assert_raises, assert_allclose)
from scipy import signal
window_funcs = [
('boxcar', ()),
('triang', ()),
('parzen', ()),
('bohman', ()),
('blackman', ()),
('nuttall', ()),
('blackmanharris', ()),
('flattop', ()),
('bartlett', ()),
('hanning', ()),
('barthann', ()),
('hamming', ()),
('kaiser', (1,)),
('gaussian', (0.5,)),
('general_gaussian', (1.5, 2)),
('chebwin', (1,)),
('slepian', (2,)),
('cosine', ()),
('hann', ()),
('exponential', ()),
('tukey', (0.5,)),
]
cheb_odd_true = array([0.200938, 0.107729, 0.134941, 0.165348,
0.198891, 0.235450, 0.274846, 0.316836,
0.361119, 0.407338, 0.455079, 0.503883,
0.553248, 0.602637, 0.651489, 0.699227,
0.745266, 0.789028, 0.829947, 0.867485,
0.901138, 0.930448, 0.955010, 0.974482,
0.988591, 0.997138, 1.000000, 0.997138,
0.988591, 0.974482, 0.955010, 0.930448,
0.901138, 0.867485, 0.829947, 0.789028,
0.745266, 0.699227, 0.651489, 0.602637,
0.553248, 0.503883, 0.455079, 0.407338,
0.361119, 0.316836, 0.274846, 0.235450,
0.198891, 0.165348, 0.134941, 0.107729,
0.200938])
cheb_even_true = array([0.203894, 0.107279, 0.133904,
0.163608, 0.196338, 0.231986,
0.270385, 0.311313, 0.354493,
0.399594, 0.446233, 0.493983,
0.542378, 0.590916, 0.639071,
0.686302, 0.732055, 0.775783,
0.816944, 0.855021, 0.889525,
0.920006, 0.946060, 0.967339,
0.983557, 0.994494, 1.000000,
1.000000, 0.994494, 0.983557,
0.967339, 0.946060, 0.920006,
0.889525, 0.855021, 0.816944,
0.775783, 0.732055, 0.686302,
0.639071, 0.590916, 0.542378,
0.493983, 0.446233, 0.399594,
0.354493, 0.311313, 0.270385,
0.231986, 0.196338, 0.163608,
0.133904, 0.107279, 0.203894])
class TestChebWin(object):
def test_cheb_odd_high_attenuation(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
cheb_odd = signal.chebwin(53, at=-40)
assert_array_almost_equal(cheb_odd, cheb_odd_true, decimal=4)
def test_cheb_even_high_attenuation(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
cheb_even = signal.chebwin(54, at=-40)
assert_array_almost_equal(cheb_even, cheb_even_true, decimal=4)
def test_cheb_odd_low_attenuation(self):
cheb_odd_low_at_true = array([1.000000, 0.519052, 0.586405,
0.610151, 0.586405, 0.519052,
1.000000])
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
cheb_odd = signal.chebwin(7, at=-10)
assert_array_almost_equal(cheb_odd, cheb_odd_low_at_true, decimal=4)
def test_cheb_even_low_attenuation(self):
cheb_even_low_at_true = array([1.000000, 0.451924, 0.51027,
0.541338, 0.541338, 0.51027,
0.451924, 1.000000])
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
cheb_even = signal.chebwin(8, at=-10)
assert_array_almost_equal(cheb_even, cheb_even_low_at_true, decimal=4)
exponential_data = {
(4, None, 0.2, False): array([4.53999297624848542e-05,
6.73794699908546700e-03, 1.00000000000000000e+00,
6.73794699908546700e-03]),
(4, None, 0.2, True): array([0.00055308437014783, 0.0820849986238988,
0.0820849986238988, 0.00055308437014783]),
(4, None, 1.0, False): array([0.1353352832366127, 0.36787944117144233, 1.,
0.36787944117144233]),
(4, None, 1.0, True): array([0.22313016014842982, 0.60653065971263342,
0.60653065971263342, 0.22313016014842982]),
(4, 2, 0.2, False): array([4.53999297624848542e-05, 6.73794699908546700e-03,
1.00000000000000000e+00, 6.73794699908546700e-03]),
(4, 2, 0.2, True): None,
(4, 2, 1.0, False): array([0.1353352832366127, 0.36787944117144233, 1.,
0.36787944117144233]),
(4, 2, 1.0, True): None,
(5, None, 0.2, False): array([4.53999297624848542e-05,
6.73794699908546700e-03, 1.00000000000000000e+00,
6.73794699908546700e-03, 4.53999297624848542e-05]),
(5, None, 0.2, True): array([4.53999297624848542e-05,
6.73794699908546700e-03, 1.00000000000000000e+00,
6.73794699908546700e-03, 4.53999297624848542e-05]),
(5, None, 1.0, False): array([0.1353352832366127, 0.36787944117144233, 1.,
0.36787944117144233, 0.1353352832366127]),
(5, None, 1.0, True): array([0.1353352832366127, 0.36787944117144233, 1.,
0.36787944117144233, 0.1353352832366127]),
(5, 2, 0.2, False): array([4.53999297624848542e-05, 6.73794699908546700e-03,
1.00000000000000000e+00, 6.73794699908546700e-03,
4.53999297624848542e-05]),
(5, 2, 0.2, True): None,
(5, 2, 1.0, False): array([0.1353352832366127, 0.36787944117144233, 1.,
0.36787944117144233, 0.1353352832366127]),
(5, 2, 1.0, True): None
}
def test_exponential():
for k, v in exponential_data.items():
if v is None:
assert_raises(ValueError, signal.exponential, *k)
else:
win = signal.exponential(*k)
assert_allclose(win, v, rtol=1e-14)
tukey_data = {
(4, 0.5, True): array([0.0, 1.0, 1.0, 0.0]),
(4, 0.9, True): array([0.0, 0.84312081893436686, 0.84312081893436686, 0.0]),
(4, 1.0, True): array([0.0, 0.75, 0.75, 0.0]),
(4, 0.5, False): array([0.0, 1.0, 1.0, 1.0]),
(4, 0.9, False): array([0.0, 0.58682408883346526, 1.0, 0.58682408883346526]),
(4, 1.0, False): array([0.0, 0.5, 1.0, 0.5]),
(5, 0.0, True): array([1.0, 1.0, 1.0, 1.0, 1.0]),
(5, 0.8, True): array([0.0, 0.69134171618254492, 1.0, 0.69134171618254492, 0.0]),
(5, 1.0, True): array([0.0, 0.5, 1.0, 0.5, 0.0]),
}
def test_tukey():
# Test against hardcoded data
for k, v in tukey_data.items():
if v is None:
assert_raises(ValueError, signal.tukey, *k)
else:
win = signal.tukey(*k)
assert_allclose(win, v, rtol=1e-14)
# Test extremes of alpha correspond to boxcar and hann
tuk0 = signal.tukey(100,0)
tuk1 = signal.tukey(100,1)
box0 = signal.boxcar(100)
han1 = signal.hann(100)
assert_array_almost_equal(tuk0, box0)
assert_array_almost_equal(tuk1, han1)
class TestGetWindow(object):
def test_boxcar(self):
w = signal.get_window('boxcar', 12)
assert_array_equal(w, np.ones_like(w))
def test_cheb_odd(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
w = signal.get_window(('chebwin', -40), 53, fftbins=False)
assert_array_almost_equal(w, cheb_odd_true, decimal=4)
def test_cheb_even(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
w = signal.get_window(('chebwin', -40), 54, fftbins=False)
assert_array_almost_equal(w, cheb_even_true, decimal=4)
def test_array_as_window(self):
# github issue 3603
osfactor = 128
sig = np.arange(128)
win = signal.get_window(('kaiser', 8.0), osfactor // 2)
assert_raises(ValueError, signal.resample, (sig, len(sig) * osfactor), {'window': win})
def test_windowfunc_basics():
for window_name, params in window_funcs:
window = getattr(signal, window_name)
w1 = window(7, *params, sym=True)
w2 = window(7, *params, sym=False)
assert_array_almost_equal(w1, w2)
# just check the below runs
window(6, *params, sym=True)
window(6, *params, sym=False)
def test_needs_params():
for winstr in ['kaiser', 'ksr', 'gaussian', 'gauss', 'gss',
'general gaussian', 'general_gaussian',
'general gauss', 'general_gauss', 'ggs',
'slepian', 'optimal', 'slep', 'dss', 'dpss',
'chebwin', 'cheb', 'exponential', 'poisson', 'tukey',
'tuk']:
assert_raises(ValueError, signal.get_window, winstr, 7)
if __name__ == "__main__":
run_module_suite()
|
FRidh/scipy
|
scipy/signal/tests/test_windows.py
|
Python
|
bsd-3-clause
| 9,372
|
[
"Gaussian"
] |
dc0289879474ba6c4fa753691ece68c4a9cc0b3cc50f37c4075a8be6060e0cf3
|
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
CDS Invenio utilities to run SQL queries.
The main API functions are:
- run_sql()
- run_sql_cached()
- run_sql_many()
but see the others as well.
"""
__revision__ = "$Id$"
# dbquery clients can import these from here:
# pylint: disable=W0611
from MySQLdb import Warning, Error, InterfaceError, DataError, \
DatabaseError, OperationalError, IntegrityError, \
InternalError, NotSupportedError, \
ProgrammingError
import string
import time
import marshal
import re
from zlib import compress, decompress
from thread import get_ident
from invenio.config import CFG_ACCESS_CONTROL_LEVEL_SITE, \
CFG_MISCUTIL_SQL_MAX_CACHED_QUERIES, CFG_MISCUTIL_SQL_USE_SQLALCHEMY, \
CFG_MISCUTIL_SQL_RUN_SQL_MANY_LIMIT
if CFG_MISCUTIL_SQL_USE_SQLALCHEMY:
try:
import sqlalchemy.pool as pool
import MySQLdb as mysqldb
mysqldb = pool.manage(mysqldb, use_threadlocal=True)
connect = mysqldb.connect
except ImportError:
CFG_MISCUTIL_SQL_USE_SQLALCHEMY = False
from MySQLdb import connect
else:
from MySQLdb import connect
## DB config variables. These variables are to be set in
## invenio-local.conf by admins and then replaced in situ in this file
## by calling "inveniocfg --update-dbexec".
## Note that they are defined here and not in config.py in order to
## prevent them from being exported accidentally elsewhere, as no-one
## should know DB credentials but this file.
## FIXME: this is more of a blast-from-the-past that should be fixed
## both here and in inveniocfg when the time permits.
CFG_DATABASE_HOST = 'localhost'
CFG_DATABASE_PORT = '3306'
CFG_DATABASE_NAME = 'cdsinvenio'
CFG_DATABASE_USER = 'cdsinvenio'
CFG_DATABASE_PASS = 'my123p$ss'
_DB_CONN = {}
try:
_db_cache
except NameError:
_db_cache = {}
def _db_login(relogin = 0):
"""Login to the database."""
## Note: we are using "use_unicode=False", because we want to
## receive strings from MySQL as Python UTF-8 binary string
## objects, not as Python Unicode string objects, as of yet.
## Note: "charset='utf8'" is needed for recent MySQLdb versions
## (such as 1.2.1_p2 and above). For older MySQLdb versions such
## as 1.2.0, an explicit "init_command='SET NAMES utf8'" parameter
## would constitute an equivalent. But we are not bothering with
## older MySQLdb versions here, since we are recommending to
## upgrade to more recent versions anyway.
if CFG_MISCUTIL_SQL_USE_SQLALCHEMY:
return connect(host=CFG_DATABASE_HOST, port=int(CFG_DATABASE_PORT),
db=CFG_DATABASE_NAME, user=CFG_DATABASE_USER,
passwd=CFG_DATABASE_PASS,
use_unicode=False, charset='utf8')
else:
thread_ident = get_ident()
if relogin:
_DB_CONN[thread_ident] = connect(host=CFG_DATABASE_HOST,
port=int(CFG_DATABASE_PORT),
db=CFG_DATABASE_NAME,
user=CFG_DATABASE_USER,
passwd=CFG_DATABASE_PASS,
use_unicode=False, charset='utf8')
return _DB_CONN[thread_ident]
else:
if _DB_CONN.has_key(thread_ident):
return _DB_CONN[thread_ident]
else:
_DB_CONN[thread_ident] = connect(host=CFG_DATABASE_HOST,
port=int(CFG_DATABASE_PORT),
db=CFG_DATABASE_NAME,
user=CFG_DATABASE_USER,
passwd=CFG_DATABASE_PASS,
use_unicode=False, charset='utf8')
return _DB_CONN[thread_ident]
def _db_logout():
"""Close a connection."""
try:
del _DB_CONN[get_ident()]
except KeyError:
pass
def run_sql_cached(sql, param=None, n=0, with_desc=0, affected_tables=['bibrec']):
"""
Run the SQL query and cache the SQL command for later reuse.
@param param: tuple of string params to insert in the query
(see notes below)
@param n: number of tuples in result (0 for unbounded)
@param with_desc: if true, will return a
DB API 7-tuple describing columns in query
@param affected_tables: is a list of tablenames of affected tables,
used to decide whether we should update the cache or whether we
can return cached result, depending on the last modification time
for corresponding tables. If empty, and if the cached result is
present in the cache, always return the cached result without
recomputing it. (This is useful to speed up queries that operate
on objects that virtually never change, e.g. list of defined
logical fields, that remain usually constant in between Apache
restarts. Note that this would be a dangerous default for any
query.)
@return: the result as provided by run_sql()
Note that it is pointless and even wrong to use this function with
SQL commands different from SELECT.
"""
## FIXME: The code below, checking table update times, was found
## to be slow in user storm situations. So let us rather run SQL
## statement live; it seems faster to let MySQL use its own cache
## than to constantly verify table update time. Later, a proper
## time-driven data cacher might be introduced here. Or, better
## yet, we can plug dedicated data cachers to every place that
## called run_sql_cached.
return run_sql(sql, param, n, with_desc)
global _db_cache
if CFG_ACCESS_CONTROL_LEVEL_SITE == 3:
# do not connect to the database as the site is closed for maintenance:
return []
key = repr((sql, param, n, with_desc))
# Garbage collecting needed?
if len(_db_cache) >= CFG_MISCUTIL_SQL_MAX_CACHED_QUERIES:
_db_cache = {}
# Query already in the cache?
if not _db_cache.has_key(key) or \
(affected_tables and _db_cache[key][1] <= max([get_table_update_time(table) for table in affected_tables])):
# Let's update the cache
result = run_sql(sql, param, n, with_desc)
_db_cache[key] = (result, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
### log_sql_query_cached(key, result, False) ### UNCOMMENT ONLY IF you REALLY want to log all queries
else:
result = _db_cache[key][0]
### log_sql_query_cached(key, result, True) ### UNCOMMENT ONLY IF you REALLY want to log all queries
return result
def run_sql(sql, param=None, n=0, with_desc=0):
"""Run SQL on the server with PARAM and return result.
@param param: tuple of string params to insert in the query (see
notes below)
@param n: number of tuples in result (0 for unbounded)
@param with_desc: if True, will return a DB API 7-tuple describing
columns in query.
@return: If SELECT, SHOW, DESCRIBE statements, return tuples of
data, followed by description if parameter with_desc is
provided. If INSERT, return last row id. Otherwise return
SQL result as provided by database.
@note: When the site is closed for maintenance (as governed by the
config variable CFG_ACCESS_CONTROL_LEVEL_SITE), do not attempt
to run any SQL queries but return empty list immediately.
Useful to be able to have the website up while MySQL database
is down for maintenance, hot copies, table repairs, etc.
@note: In case of problems, exceptions are returned according to
the Python DB API 2.0. The client code can import them from
this file and catch them.
"""
if CFG_ACCESS_CONTROL_LEVEL_SITE == 3:
# do not connect to the database as the site is closed for maintenance:
return []
### log_sql_query(sql, param) ### UNCOMMENT ONLY IF you REALLY want to log all queries
if param:
param = tuple(param)
try:
db = _db_login()
cur = db.cursor()
rc = cur.execute(sql, param)
except OperationalError: # unexpected disconnect, bad malloc error, etc
# FIXME: now reconnect is always forced, we may perhaps want to ping() first?
try:
db = _db_login(relogin=1)
cur = db.cursor()
rc = cur.execute(sql, param)
except OperationalError: # again an unexpected disconnect, bad malloc error, etc
raise
if string.upper(string.split(sql)[0]) in ("SELECT", "SHOW", "DESC", "DESCRIBE"):
if n:
recset = cur.fetchmany(n)
else:
recset = cur.fetchall()
if with_desc:
return recset, cur.description
else:
return recset
else:
if string.upper(string.split(sql)[0]) == "INSERT":
rc = cur.lastrowid
return rc
def run_sql_many(query, params, limit=CFG_MISCUTIL_SQL_RUN_SQL_MANY_LIMIT):
"""Run SQL on the server with PARAM.
This method does executemany and is therefore more efficient than execute
but it has sense only with queries that affect state of a database
(INSERT, UPDATE). That is why the results just count number of affected rows
@param params: tuple of tuple of string params to insert in the query
@param limit: query will be executed in parts when number of
parameters is greater than limit (each iteration runs at most
`limit' parameters)
@return: SQL result as provided by database
"""
i = 0
r = None
while i < len(params):
## make partial query safely (mimicking procedure from run_sql())
try:
db = _db_login()
cur = db.cursor()
rc = cur.executemany(query, params[i:i+limit])
except OperationalError:
try:
db = _db_login(relogin=1)
cur = db.cursor()
rc = cur.executemany(query, params[i:i+limit])
except OperationalError:
raise
## collect its result:
if r is None:
r = rc
else:
r += rc
i += limit
return r
def blob_to_string(ablob):
"""Return string representation of ABLOB. Useful to treat MySQL
BLOBs in the same way for both recent and old MySQLdb versions.
"""
if ablob:
if type(ablob) is str:
# BLOB is already a string in MySQLdb 0.9.2
return ablob
else:
# BLOB is array.array in MySQLdb 1.0.0 and later
return ablob.tostring()
else:
return ablob
def log_sql_query_cached(key, result, hit_p):
"""Log SQL query cached into prefix/var/log/dbquery.log log file. In order
to enable logging of all SQL queries, please uncomment two lines
in run_sql_cached() above. Useful for fine-level debugging only!
"""
from invenio.config import CFG_LOGDIR
from invenio.dateutils import convert_datestruct_to_datetext
from invenio.textutils import indent_text
log_path = CFG_LOGDIR + '/dbquery.log'
date_of_log = convert_datestruct_to_datetext(time.localtime())
message = date_of_log + '-->\n'
message += indent_text('Key:\n' + indent_text(str(key), 2, wrap=True), 2)
message += indent_text('Result:\n' + indent_text(str(result) + (hit_p and ' HIT' or ' MISS'), 2, wrap=True), 2)
message += 'Cached queries: %i\n\n' % len(_db_cache)
try:
log_file = open(log_path, 'a+')
log_file.writelines(message)
log_file.close()
except:
pass
def log_sql_query(sql, param=None):
"""Log SQL query into prefix/var/log/dbquery.log log file. In order
to enable logging of all SQL queries, please uncomment one line
in run_sql() above. Useful for fine-level debugging only!
"""
from invenio.config import CFG_LOGDIR
from invenio.dateutils import convert_datestruct_to_datetext
from invenio.textutils import indent_text
log_path = CFG_LOGDIR + '/dbquery.log'
date_of_log = convert_datestruct_to_datetext(time.localtime())
message = date_of_log + '-->\n'
message += indent_text('Query:\n' + indent_text(str(sql), 2, wrap=True), 2)
message += indent_text('Params:\n' + indent_text(str(param), 2, wrap=True), 2)
message += '-----------------------------\n\n'
try:
log_file = open(log_path, 'a+')
log_file.writelines(message)
log_file.close()
except:
pass
def get_table_update_time(tablename):
"""Return update time of TABLENAME. TABLENAME can contain
wildcard `%' in which case we return the maximum update time
value.
"""
# Note: in order to work with all of MySQL 4.0, 4.1, 5.0, this
# function uses SHOW TABLE STATUS technique with a dirty column
# position lookup to return the correct value. (Making use of
# Index_Length column that is either of type long (when there are
# some indexes defined) or of type None (when there are no indexes
# defined, e.g. table is empty). When we shall use solely
# MySQL-5.0, we can employ a much cleaner technique of using
# SELECT UPDATE_TIME FROM INFORMATION_SCHEMA.TABLES WHERE
# table_name='collection'.
res = run_sql("SHOW TABLE STATUS LIKE %s", (tablename, ))
update_times = [] # store all update times
for row in res:
if type(row[10]) is long or \
row[10] is None:
# MySQL-4.1 and 5.0 have creation_time in 11th position,
# so return next column:
update_times.append(str(row[12]))
else:
# MySQL-4.0 has creation_time in 10th position, which is
# of type datetime.datetime or str (depending on the
# version of MySQLdb), so return next column:
update_times.append(str(row[11]))
return max(update_times)
def get_table_status_info(tablename):
"""Return table status information on TABLENAME. Returned is a
dict with keys like Name, Rows, Data_length, Max_data_length,
etc. If TABLENAME does not exist, return empty dict.
"""
# Note: again a hack so that it works on all MySQL 4.0, 4.1, 5.0
res = run_sql("SHOW TABLE STATUS LIKE %s", (tablename, ))
table_status_info = {} # store all update times
for row in res:
if type(row[10]) is long or \
row[10] is None:
# MySQL-4.1 and 5.0 have creation time in 11th position:
table_status_info['Name'] = row[0]
table_status_info['Rows'] = row[4]
table_status_info['Data_length'] = row[6]
table_status_info['Max_data_length'] = row[8]
table_status_info['Create_time'] = row[11]
table_status_info['Update_time'] = row[12]
else:
# MySQL-4.0 has creation_time in 10th position, which is
# of type datetime.datetime or str (depending on the
# version of MySQLdb):
table_status_info['Name'] = row[0]
table_status_info['Rows'] = row[3]
table_status_info['Data_length'] = row[5]
table_status_info['Max_data_length'] = row[7]
table_status_info['Create_time'] = row[10]
table_status_info['Update_time'] = row[11]
return table_status_info
def serialize_via_marshal(obj):
"""Serialize Python object via marshal into a compressed string."""
return compress(marshal.dumps(obj))
def deserialize_via_marshal(astring):
"""Decompress and deserialize string into a Python object via marshal."""
return marshal.loads(decompress(astring))
try:
import psyco
psyco.bind(serialize_via_marshal)
psyco.bind(deserialize_via_marshal)
except StandardError, e:
pass
def wash_table_column_name(colname):
"""
Evaluate table-column name to see if it is clean.
This function accepts only names containing [a-zA-Z0-9_].
@param colname: The string to be checked
@type colname: str
@return: colname if test passed
@rtype: str
@raise Exception: Raises an exception if colname is invalid.
"""
if re.search('[^\w]', colname):
raise Exception('The table column %s is not valid.' % repr(colname))
return colname
def real_escape_string(unescaped_string):
"""
Escapes special characters in the unescaped string for use in a DB query.
@param unescaped_string: The string to be escaped
@type unescaped_string: str
@return: Returns the escaped string
@rtype: str
"""
connection_object = _db_login()
escaped_string = connection_object.escape_string(unescaped_string)
return escaped_string
|
lbjay/cds-invenio
|
modules/miscutil/lib/dbquery.py
|
Python
|
gpl-2.0
| 17,539
|
[
"BLAST"
] |
e017ea33cdfd08630ebc671c0b5fa0260db3fa72fc33d0f39e8080560c603653
|
# Copyright (C) 2020 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest as ut
import importlib_wrapper
sample, skipIfMissingFeatures = importlib_wrapper.configure_and_import(
"@SAMPLES_DIR@/reaction_ensemble_complex_reaction.py")
@skipIfMissingFeatures
class Sample(ut.TestCase):
system = sample.system
def test_concentrations(self):
err_msg = "Concentration of species {} doesn't match analytical result"
for ptype in sample.types:
self.assertAlmostEqual(sample.concentrations[ptype],
sample.concentrations_numerical[ptype],
delta=1e-3,
msg=err_msg.format(sample.types_name[ptype]))
self.assertLess(sample.concentrations_95ci[ptype], 1e-3,
msg="95% confidence interval too large")
self.assertAlmostEqual(sample.K_sim, sample.K, delta=1e-3)
self.assertAlmostEqual(sample.N0_sim, sample.N0, delta=1e-3)
if __name__ == "__main__":
ut.main()
|
espressomd/espresso
|
testsuite/scripts/samples/test_reaction_ensemble_complex_reaction.py
|
Python
|
gpl-3.0
| 1,700
|
[
"ESPResSo"
] |
e61b69abaa36da95816d34f2954ca33073523b4da6900f0aa57c671310b901b5
|
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Matti Hamalainen <msh@nmr.mgh.harvard.edu>
# Denis A. Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import os
from os import path as op
import sys
from struct import pack
from glob import glob
import numpy as np
from scipy.sparse import coo_matrix, csr_matrix, eye as speye
from .bem import read_bem_surfaces
from .io.constants import FIFF
from .io.open import fiff_open
from .io.tree import dir_tree_find
from .io.tag import find_tag
from .io.write import (write_int, start_file, end_block,
start_block, end_file, write_string,
write_float_sparse_rcs)
from .channels.channels import _get_meg_system
from .transforms import transform_surface_to
from .utils import logger, verbose, get_subjects_dir
from .externals.six import string_types
###############################################################################
# AUTOMATED SURFACE FINDING
@verbose
def get_head_surf(subject, source=('bem', 'head'), subjects_dir=None,
verbose=None):
"""Load the subject head surface
Parameters
----------
subject : str
Subject name.
source : str | list of str
Type to load. Common choices would be `'bem'` or `'head'`. We first
try loading `'$SUBJECTS_DIR/$SUBJECT/bem/$SUBJECT-$SOURCE.fif'`, and
then look for `'$SUBJECT*$SOURCE.fif'` in the same directory by going
through all files matching the pattern. The head surface will be read
from the first file containing a head surface. Can also be a list
to try multiple strings.
subjects_dir : str, or None
Path to the SUBJECTS_DIR. If None, the path is obtained by using
the environment variable SUBJECTS_DIR.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
surf : dict
The head surface.
"""
# Load the head surface from the BEM
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
# use realpath to allow for linked surfaces (c.f. MNE manual 196-197)
if isinstance(source, string_types):
source = [source]
surf = None
for this_source in source:
this_head = op.realpath(op.join(subjects_dir, subject, 'bem',
'%s-%s.fif' % (subject, this_source)))
if op.exists(this_head):
surf = read_bem_surfaces(this_head, True,
FIFF.FIFFV_BEM_SURF_ID_HEAD,
verbose=False)
else:
# let's do a more sophisticated search
path = op.join(subjects_dir, subject, 'bem')
if not op.isdir(path):
raise IOError('Subject bem directory "%s" does not exist'
% path)
files = sorted(glob(op.join(path, '%s*%s.fif'
% (subject, this_source))))
for this_head in files:
try:
surf = read_bem_surfaces(this_head, True,
FIFF.FIFFV_BEM_SURF_ID_HEAD,
verbose=False)
except ValueError:
pass
else:
break
if surf is not None:
break
if surf is None:
raise IOError('No file matching "%s*%s" and containing a head '
'surface found' % (subject, this_source))
logger.info('Using surface from %s' % this_head)
return surf
@verbose
def get_meg_helmet_surf(info, trans=None, verbose=None):
"""Load the MEG helmet associated with the MEG sensors
Parameters
----------
info : instance of io.meas_info.Info
Measurement info.
trans : dict
The head<->MRI transformation, usually obtained using
read_trans(). Can be None, in which case the surface will
be in head coordinates instead of MRI coordinates.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
surf : dict
The MEG helmet as a surface.
"""
system = _get_meg_system(info)
logger.info('Getting helmet for system %s' % system)
fname = op.join(op.split(__file__)[0], 'data', 'helmets',
system + '.fif.gz')
surf = read_bem_surfaces(fname, False, FIFF.FIFFV_MNE_SURF_MEG_HELMET,
verbose=False)
# Ignore what the file says, it's in device coords and we want MRI coords
surf['coord_frame'] = FIFF.FIFFV_COORD_DEVICE
transform_surface_to(surf, 'head', info['dev_head_t'])
if trans is not None:
transform_surface_to(surf, 'mri', trans)
return surf
###############################################################################
# EFFICIENCY UTILITIES
def fast_cross_3d(x, y):
"""Compute cross product between list of 3D vectors
Much faster than np.cross() when the number of cross products
becomes large (>500). This is because np.cross() methods become
less memory efficient at this stage.
Parameters
----------
x : array
Input array 1.
y : array
Input array 2.
Returns
-------
z : array
Cross product of x and y.
Notes
-----
x and y must both be 2D row vectors. One must have length 1, or both
lengths must match.
"""
assert x.ndim == 2
assert y.ndim == 2
assert x.shape[1] == 3
assert y.shape[1] == 3
assert (x.shape[0] == 1 or y.shape[0] == 1) or x.shape[0] == y.shape[0]
if max([x.shape[0], y.shape[0]]) >= 500:
return np.c_[x[:, 1] * y[:, 2] - x[:, 2] * y[:, 1],
x[:, 2] * y[:, 0] - x[:, 0] * y[:, 2],
x[:, 0] * y[:, 1] - x[:, 1] * y[:, 0]]
else:
return np.cross(x, y)
def _fast_cross_nd_sum(a, b, c):
"""Fast cross and sum"""
return ((a[..., 1] * b[..., 2] - a[..., 2] * b[..., 1]) * c[..., 0] +
(a[..., 2] * b[..., 0] - a[..., 0] * b[..., 2]) * c[..., 1] +
(a[..., 0] * b[..., 1] - a[..., 1] * b[..., 0]) * c[..., 2])
def _accumulate_normals(tris, tri_nn, npts):
"""Efficiently accumulate triangle normals"""
# this code replaces the following, but is faster (vectorized):
#
# this['nn'] = np.zeros((this['np'], 3))
# for p in xrange(this['ntri']):
# verts = this['tris'][p]
# this['nn'][verts, :] += this['tri_nn'][p, :]
#
nn = np.zeros((npts, 3))
for verts in tris.T: # note this only loops 3x (number of verts per tri)
for idx in range(3): # x, y, z
nn[:, idx] += np.bincount(verts, weights=tri_nn[:, idx],
minlength=npts)
return nn
def _triangle_neighbors(tris, npts):
"""Efficiently compute vertex neighboring triangles"""
# this code replaces the following, but is faster (vectorized):
#
# this['neighbor_tri'] = [list() for _ in xrange(this['np'])]
# for p in xrange(this['ntri']):
# verts = this['tris'][p]
# this['neighbor_tri'][verts[0]].append(p)
# this['neighbor_tri'][verts[1]].append(p)
# this['neighbor_tri'][verts[2]].append(p)
# this['neighbor_tri'] = [np.array(nb, int) for nb in this['neighbor_tri']]
#
verts = tris.ravel()
counts = np.bincount(verts, minlength=npts)
reord = np.argsort(verts)
tri_idx = np.unravel_index(reord, (len(tris), 3))[0]
idx = np.cumsum(np.r_[0, counts])
# the sort below slows it down a bit, but is needed for equivalence
neighbor_tri = [np.sort(tri_idx[v1:v2])
for v1, v2 in zip(idx[:-1], idx[1:])]
return neighbor_tri
def _triangle_coords(r, geom, best):
"""Get coordinates of a vertex projected to a triangle"""
r1 = geom['r1'][best]
tri_nn = geom['nn'][best]
r12 = geom['r12'][best]
r13 = geom['r13'][best]
a = geom['a'][best]
b = geom['b'][best]
c = geom['c'][best]
rr = r - r1
z = np.sum(rr * tri_nn)
v1 = np.sum(rr * r12)
v2 = np.sum(rr * r13)
det = a * b - c * c
x = (b * v1 - c * v2) / det
y = (a * v2 - c * v1) / det
return x, y, z
@verbose
def _complete_surface_info(this, do_neighbor_vert=False, verbose=None):
"""Complete surface info"""
# based on mne_source_space_add_geometry_info() in mne_add_geometry_info.c
# Main triangulation [mne_add_triangle_data()]
this['tri_area'] = np.zeros(this['ntri'])
r1 = this['rr'][this['tris'][:, 0], :]
r2 = this['rr'][this['tris'][:, 1], :]
r3 = this['rr'][this['tris'][:, 2], :]
this['tri_cent'] = (r1 + r2 + r3) / 3.0
this['tri_nn'] = fast_cross_3d((r2 - r1), (r3 - r1))
# Triangle normals and areas
size = np.sqrt(np.sum(this['tri_nn'] ** 2, axis=1))
this['tri_area'] = size / 2.0
zidx = np.where(size == 0)[0]
for idx in zidx:
logger.info(' Warning: zero size triangle # %s' % idx)
size[zidx] = 1.0 # prevent ugly divide-by-zero
this['tri_nn'] /= size[:, None]
# Find neighboring triangles, accumulate vertex normals, normalize
logger.info(' Triangle neighbors and vertex normals...')
this['neighbor_tri'] = _triangle_neighbors(this['tris'], this['np'])
this['nn'] = _accumulate_normals(this['tris'], this['tri_nn'], this['np'])
_normalize_vectors(this['nn'])
# Check for topological defects
idx = np.where([len(n) == 0 for n in this['neighbor_tri']])[0]
if len(idx) > 0:
logger.info(' Vertices [%s] do not have any neighboring'
'triangles!' % ','.join([str(ii) for ii in idx]))
idx = np.where([len(n) < 3 for n in this['neighbor_tri']])[0]
if len(idx) > 0:
logger.info(' Vertices [%s] have fewer than three neighboring '
'tris, omitted' % ','.join([str(ii) for ii in idx]))
for k in idx:
this['neighbor_tri'] = np.array([], int)
# Determine the neighboring vertices and fix errors
if do_neighbor_vert is True:
logger.info(' Vertex neighbors...')
this['neighbor_vert'] = [_get_surf_neighbors(this, k)
for k in range(this['np'])]
return this
def _get_surf_neighbors(surf, k):
"""Calculate the surface neighbors based on triangulation"""
verts = surf['tris'][surf['neighbor_tri'][k]]
verts = np.setdiff1d(verts, [k], assume_unique=False)
assert np.all(verts < surf['np'])
nneighbors = len(verts)
nneigh_max = len(surf['neighbor_tri'][k])
if nneighbors > nneigh_max:
raise RuntimeError('Too many neighbors for vertex %d' % k)
elif nneighbors != nneigh_max:
logger.info(' Incorrect number of distinct neighbors for vertex'
' %d (%d instead of %d) [fixed].' % (k, nneighbors,
nneigh_max))
return verts
def _normalize_vectors(rr):
"""Normalize surface vertices"""
size = np.sqrt(np.sum(rr * rr, axis=1))
size[size == 0] = 1.0 # avoid divide-by-zero
rr /= size[:, np.newaxis] # operate in-place
def _compute_nearest(xhs, rr, use_balltree=True, return_dists=False):
"""Find nearest neighbors
Note: The rows in xhs and rr must all be unit-length vectors, otherwise
the result will be incorrect.
Parameters
----------
xhs : array, shape=(n_samples, n_dim)
Points of data set.
rr : array, shape=(n_query, n_dim)
Points to find nearest neighbors for.
use_balltree : bool
Use fast BallTree based search from scikit-learn. If scikit-learn
is not installed it will fall back to the slow brute force search.
return_dists : bool
If True, return associated distances.
Returns
-------
nearest : array, shape=(n_query,)
Index of nearest neighbor in xhs for every point in rr.
distances : array, shape=(n_query,)
The distances. Only returned if return_dists is True.
"""
if use_balltree:
try:
from sklearn.neighbors import BallTree
except ImportError:
logger.info('Nearest-neighbor searches will be significantly '
'faster if scikit-learn is installed.')
use_balltree = False
if xhs.size == 0 or rr.size == 0:
if return_dists:
return np.array([], int), np.array([])
return np.array([], int)
if use_balltree is True:
ball_tree = BallTree(xhs)
if return_dists:
out = ball_tree.query(rr, k=1, return_distance=True)
return out[1][:, 0], out[0][:, 0]
else:
nearest = ball_tree.query(rr, k=1, return_distance=False)[:, 0]
return nearest
else:
from scipy.spatial.distance import cdist
if return_dists:
nearest = list()
dists = list()
for r in rr:
d = cdist(r[np.newaxis, :], xhs)
idx = np.argmin(d)
nearest.append(idx)
dists.append(d[0, idx])
return (np.array(nearest), np.array(dists))
else:
nearest = np.array([np.argmin(cdist(r[np.newaxis, :], xhs))
for r in rr])
return nearest
###############################################################################
# Handle freesurfer
def _fread3(fobj):
"""Docstring"""
b1, b2, b3 = np.fromfile(fobj, ">u1", 3)
return (b1 << 16) + (b2 << 8) + b3
def _fread3_many(fobj, n):
"""Read 3-byte ints from an open binary file object."""
b1, b2, b3 = np.fromfile(fobj, ">u1",
3 * n).reshape(-1, 3).astype(np.int).T
return (b1 << 16) + (b2 << 8) + b3
def read_curvature(filepath):
"""Load in curavature values from the ?h.curv file."""
with open(filepath, "rb") as fobj:
magic = _fread3(fobj)
if magic == 16777215:
vnum = np.fromfile(fobj, ">i4", 3)[0]
curv = np.fromfile(fobj, ">f4", vnum)
else:
vnum = magic
_fread3(fobj)
curv = np.fromfile(fobj, ">i2", vnum) / 100
bin_curv = 1 - np.array(curv != 0, np.int)
return bin_curv
@verbose
def read_surface(fname, verbose=None):
"""Load a Freesurfer surface mesh in triangular format
Parameters
----------
fname : str
The name of the file containing the surface.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
rr : array, shape=(n_vertices, 3)
Coordinate points.
tris : int array, shape=(n_faces, 3)
Triangulation (each line contains indexes for three points which
together form a face).
See Also
--------
write_surface
"""
TRIANGLE_MAGIC = 16777214
QUAD_MAGIC = 16777215
NEW_QUAD_MAGIC = 16777213
with open(fname, "rb", buffering=0) as fobj: # buffering=0 for np bug
magic = _fread3(fobj)
# Quad file or new quad
if magic in (QUAD_MAGIC, NEW_QUAD_MAGIC):
create_stamp = ''
nvert = _fread3(fobj)
nquad = _fread3(fobj)
(fmt, div) = (">i2", 100.) if magic == QUAD_MAGIC else (">f4", 1.)
coords = np.fromfile(fobj, fmt, nvert * 3).astype(np.float) / div
coords = coords.reshape(-1, 3)
quads = _fread3_many(fobj, nquad * 4)
quads = quads.reshape(nquad, 4)
# Face splitting follows
faces = np.zeros((2 * nquad, 3), dtype=np.int)
nface = 0
for quad in quads:
if (quad[0] % 2) == 0:
faces[nface:nface + 2] = [[quad[0], quad[1], quad[3]],
[quad[2], quad[3], quad[1]]]
else:
faces[nface:nface + 2] = [[quad[0], quad[1], quad[2]],
[quad[0], quad[2], quad[3]]]
nface += 2
elif magic == TRIANGLE_MAGIC: # Triangle file
create_stamp = fobj.readline()
fobj.readline()
vnum = np.fromfile(fobj, ">i4", 1)[0]
fnum = np.fromfile(fobj, ">i4", 1)[0]
coords = np.fromfile(fobj, ">f4", vnum * 3).reshape(vnum, 3)
faces = np.fromfile(fobj, ">i4", fnum * 3).reshape(fnum, 3)
else:
raise ValueError("%s does not appear to be a Freesurfer surface"
% fname)
logger.info('Triangle file: %s nvert = %s ntri = %s'
% (create_stamp.strip(), len(coords), len(faces)))
coords = coords.astype(np.float) # XXX: due to mayavi bug on mac 32bits
return coords, faces
@verbose
def _read_surface_geom(fname, patch_stats=True, norm_rr=False, verbose=None):
"""Load the surface as dict, optionally add the geometry information"""
# based on mne_load_surface_geom() in mne_surface_io.c
if isinstance(fname, string_types):
rr, tris = read_surface(fname) # mne_read_triangle_file()
nvert = len(rr)
ntri = len(tris)
s = dict(rr=rr, tris=tris, use_tris=tris, ntri=ntri,
np=nvert)
elif isinstance(fname, dict):
s = fname
else:
raise RuntimeError('fname cannot be understood as str or dict')
if patch_stats is True:
s = _complete_surface_info(s)
if norm_rr is True:
_normalize_vectors(s['rr'])
return s
##############################################################################
# SURFACE CREATION
def _get_ico_surface(grade, patch_stats=False):
"""Return an icosahedral surface of the desired grade"""
# always use verbose=False since users don't need to know we're pulling
# these from a file
ico_file_name = op.join(op.dirname(__file__), 'data',
'icos.fif.gz')
ico = read_bem_surfaces(ico_file_name, patch_stats, s_id=9000 + grade,
verbose=False)
return ico
def _tessellate_sphere_surf(level, rad=1.0):
"""Return a surface structure instead of the details"""
rr, tris = _tessellate_sphere(level)
npt = len(rr) # called "npt" instead of "np" because of numpy...
ntri = len(tris)
nn = rr.copy()
rr *= rad
s = dict(rr=rr, np=npt, tris=tris, use_tris=tris, ntri=ntri, nuse=np,
nn=nn, inuse=np.ones(npt, int))
return s
def _norm_midpt(ai, bi, rr):
a = np.array([rr[aii] for aii in ai])
b = np.array([rr[bii] for bii in bi])
c = (a + b) / 2.
return c / np.sqrt(np.sum(c ** 2, 1))[:, np.newaxis]
def _tessellate_sphere(mylevel):
"""Create a tessellation of a unit sphere"""
# Vertices of a unit octahedron
rr = np.array([[1, 0, 0], [-1, 0, 0], # xplus, xminus
[0, 1, 0], [0, -1, 0], # yplus, yminus
[0, 0, 1], [0, 0, -1]], float) # zplus, zminus
tris = np.array([[0, 4, 2], [2, 4, 1], [1, 4, 3], [3, 4, 0],
[0, 2, 5], [2, 1, 5], [1, 3, 5], [3, 0, 5]], int)
# A unit octahedron
if mylevel < 1:
raise ValueError('# of levels must be >= 1')
# Reverse order of points in each triangle
# for counter-clockwise ordering
tris = tris[:, [2, 1, 0]]
# Subdivide each starting triangle (mylevel - 1) times
for _ in range(1, mylevel):
"""
Subdivide each triangle in the old approximation and normalize
the new points thus generated to lie on the surface of the unit
sphere.
Each input triangle with vertices labelled [0,1,2] as shown
below will be turned into four new triangles:
Make new points
a = (0+2)/2
b = (0+1)/2
c = (1+2)/2
1
/\ Normalize a, b, c
/ \
b/____\c Construct new triangles
/\ /\ [0,b,a]
/ \ / \ [b,1,c]
/____\/____\ [a,b,c]
0 a 2 [a,c,2]
"""
# use new method: first make new points (rr)
a = _norm_midpt(tris[:, 0], tris[:, 2], rr)
b = _norm_midpt(tris[:, 0], tris[:, 1], rr)
c = _norm_midpt(tris[:, 1], tris[:, 2], rr)
lims = np.cumsum([len(rr), len(a), len(b), len(c)])
aidx = np.arange(lims[0], lims[1])
bidx = np.arange(lims[1], lims[2])
cidx = np.arange(lims[2], lims[3])
rr = np.concatenate((rr, a, b, c))
# now that we have our points, make new triangle definitions
tris = np.array((np.c_[tris[:, 0], bidx, aidx],
np.c_[bidx, tris[:, 1], cidx],
np.c_[aidx, bidx, cidx],
np.c_[aidx, cidx, tris[:, 2]]), int).swapaxes(0, 1)
tris = np.reshape(tris, (np.prod(tris.shape[:2]), 3))
# Copy the resulting approximation into standard table
rr_orig = rr
rr = np.empty_like(rr)
nnode = 0
for k, tri in enumerate(tris):
for j in range(3):
coord = rr_orig[tri[j]]
# this is faster than cdist (no need for sqrt)
similarity = np.dot(rr[:nnode], coord)
idx = np.where(similarity > 0.99999)[0]
if len(idx) > 0:
tris[k, j] = idx[0]
else:
rr[nnode] = coord
tris[k, j] = nnode
nnode += 1
rr = rr[:nnode].copy()
return rr, tris
def _create_surf_spacing(surf, hemi, subject, stype, sval, ico_surf,
subjects_dir):
"""Load a surf and use the subdivided icosahedron to get points"""
# Based on load_source_space_surf_spacing() in load_source_space.c
surf = _read_surface_geom(surf)
if stype in ['ico', 'oct']:
# ## from mne_ico_downsample.c ## #
surf_name = op.join(subjects_dir, subject, 'surf', hemi + '.sphere')
logger.info('Loading geometry from %s...' % surf_name)
from_surf = _read_surface_geom(surf_name, norm_rr=True,
patch_stats=False)
if not len(from_surf['rr']) == surf['np']:
raise RuntimeError('Mismatch between number of surface vertices, '
'possible parcellation error?')
_normalize_vectors(ico_surf['rr'])
# Make the maps
logger.info('Mapping %s %s -> %s (%d) ...'
% (hemi, subject, stype, sval))
mmap = _compute_nearest(from_surf['rr'], ico_surf['rr'])
nmap = len(mmap)
surf['inuse'] = np.zeros(surf['np'], int)
for k in range(nmap):
if surf['inuse'][mmap[k]]:
# Try the nearest neighbors
neigh = _get_surf_neighbors(surf, mmap[k])
was = mmap[k]
inds = np.where(np.logical_not(surf['inuse'][neigh]))[0]
if len(inds) == 0:
raise RuntimeError('Could not find neighbor for vertex '
'%d / %d' % (k, nmap))
else:
mmap[k] = neigh[inds[-1]]
logger.info(' Source space vertex moved from %d to %d '
'because of double occupation', was, mmap[k])
elif mmap[k] < 0 or mmap[k] > surf['np']:
raise RuntimeError('Map number out of range (%d), this is '
'probably due to inconsistent surfaces. '
'Parts of the FreeSurfer reconstruction '
'need to be redone.' % mmap[k])
surf['inuse'][mmap[k]] = True
logger.info('Setting up the triangulation for the decimated '
'surface...')
surf['use_tris'] = np.array([mmap[ist] for ist in ico_surf['tris']],
np.int32)
else: # use_all is True
surf['inuse'] = np.ones(surf['np'], int)
surf['use_tris'] = None
if surf['use_tris'] is not None:
surf['nuse_tri'] = len(surf['use_tris'])
else:
surf['nuse_tri'] = 0
surf['nuse'] = np.sum(surf['inuse'])
surf['vertno'] = np.where(surf['inuse'])[0]
# set some final params
inds = np.arange(surf['np'])
sizes = np.sqrt(np.sum(surf['nn'] ** 2, axis=1))
surf['nn'][inds] = surf['nn'][inds] / sizes[:, np.newaxis]
surf['inuse'][sizes <= 0] = False
surf['nuse'] = np.sum(surf['inuse'])
surf['subject_his_id'] = subject
return surf
def write_surface(fname, coords, faces, create_stamp=''):
"""Write a triangular Freesurfer surface mesh
Accepts the same data format as is returned by read_surface().
Parameters
----------
fname : str
File to write.
coords : array, shape=(n_vertices, 3)
Coordinate points.
faces : int array, shape=(n_faces, 3)
Triangulation (each line contains indexes for three points which
together form a face).
create_stamp : str
Comment that is written to the beginning of the file. Can not contain
line breaks.
See Also
--------
read_surface
"""
if len(create_stamp.splitlines()) > 1:
raise ValueError("create_stamp can only contain one line")
with open(fname, 'wb') as fid:
fid.write(pack('>3B', 255, 255, 254))
strs = ['%s\n' % create_stamp, '\n']
strs = [s.encode('utf-8') for s in strs]
fid.writelines(strs)
vnum = len(coords)
fnum = len(faces)
fid.write(pack('>2i', vnum, fnum))
fid.write(np.array(coords, dtype='>f4').tostring())
fid.write(np.array(faces, dtype='>i4').tostring())
###############################################################################
# Decimation
def _decimate_surface(points, triangles, reduction):
"""Aux function"""
if 'DISPLAY' not in os.environ and sys.platform != 'win32':
os.environ['ETS_TOOLKIT'] = 'null'
try:
from tvtk.api import tvtk
except ImportError:
raise ValueError('This function requires the TVTK package to be '
'installed')
if triangles.max() > len(points) - 1:
raise ValueError('The triangles refer to undefined points. '
'Please check your mesh.')
src = tvtk.PolyData(points=points, polys=triangles)
decimate = tvtk.QuadricDecimation(input=src, target_reduction=reduction)
decimate.update()
out = decimate.output
tris = out.polys.to_array()
# n-tuples + interleaved n-next -- reshape trick
return out.points.to_array(), tris.reshape(tris.size / 4, 4)[:, 1:]
def decimate_surface(points, triangles, n_triangles):
""" Decimate surface data
Note. Requires TVTK to be installed for this to function.
Note. If an if an odd target number was requested,
the ``quadric decimation`` algorithm used results in the
next even number of triangles. For example a reduction request to 30001
triangles will result in 30000 triangles.
Parameters
----------
points : ndarray
The surface to be decimated, a 3 x number of points array.
triangles : ndarray
The surface to be decimated, a 3 x number of triangles array.
n_triangles : int
The desired number of triangles.
Returns
-------
points : ndarray
The decimated points.
triangles : ndarray
The decimated triangles.
"""
reduction = 1 - (float(n_triangles) / len(triangles))
return _decimate_surface(points, triangles, reduction)
###############################################################################
# Morph maps
@verbose
def read_morph_map(subject_from, subject_to, subjects_dir=None,
verbose=None):
"""Read morph map
Morph maps can be generated with mne_make_morph_maps. If one isn't
available, it will be generated automatically and saved to the
``subjects_dir/morph_maps`` directory.
Parameters
----------
subject_from : string
Name of the original subject as named in the SUBJECTS_DIR.
subject_to : string
Name of the subject on which to morph as named in the SUBJECTS_DIR.
subjects_dir : string
Path to SUBJECTS_DIR is not set in the environment.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
left_map, right_map : sparse matrix
The morph maps for the 2 hemispheres.
"""
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
# First check for morph-map dir existence
mmap_dir = op.join(subjects_dir, 'morph-maps')
if not op.isdir(mmap_dir):
try:
os.mkdir(mmap_dir)
except Exception:
logger.warning('Could not find or make morph map directory "%s"'
% mmap_dir)
# Does the file exist
fname = op.join(mmap_dir, '%s-%s-morph.fif' % (subject_from, subject_to))
if not op.exists(fname):
fname = op.join(mmap_dir, '%s-%s-morph.fif'
% (subject_to, subject_from))
if not op.exists(fname):
logger.warning('Morph map "%s" does not exist, '
'creating it and saving it to disk (this may take '
'a few minutes)' % fname)
logger.info('Creating morph map %s -> %s'
% (subject_from, subject_to))
mmap_1 = _make_morph_map(subject_from, subject_to, subjects_dir)
logger.info('Creating morph map %s -> %s'
% (subject_to, subject_from))
mmap_2 = _make_morph_map(subject_to, subject_from, subjects_dir)
try:
_write_morph_map(fname, subject_from, subject_to,
mmap_1, mmap_2)
except Exception as exp:
logger.warning('Could not write morph-map file "%s" '
'(error: %s)' % (fname, exp))
return mmap_1
f, tree, _ = fiff_open(fname)
with f as fid:
# Locate all maps
maps = dir_tree_find(tree, FIFF.FIFFB_MNE_MORPH_MAP)
if len(maps) == 0:
raise ValueError('Morphing map data not found')
# Find the correct ones
left_map = None
right_map = None
for m in maps:
tag = find_tag(fid, m, FIFF.FIFF_MNE_MORPH_MAP_FROM)
if tag.data == subject_from:
tag = find_tag(fid, m, FIFF.FIFF_MNE_MORPH_MAP_TO)
if tag.data == subject_to:
# Names match: which hemishere is this?
tag = find_tag(fid, m, FIFF.FIFF_MNE_HEMI)
if tag.data == FIFF.FIFFV_MNE_SURF_LEFT_HEMI:
tag = find_tag(fid, m, FIFF.FIFF_MNE_MORPH_MAP)
left_map = tag.data
logger.info(' Left-hemisphere map read.')
elif tag.data == FIFF.FIFFV_MNE_SURF_RIGHT_HEMI:
tag = find_tag(fid, m, FIFF.FIFF_MNE_MORPH_MAP)
right_map = tag.data
logger.info(' Right-hemisphere map read.')
if left_map is None or right_map is None:
raise ValueError('Could not find both hemispheres in %s' % fname)
return left_map, right_map
def _write_morph_map(fname, subject_from, subject_to, mmap_1, mmap_2):
"""Write a morph map to disk"""
fid = start_file(fname)
assert len(mmap_1) == 2
assert len(mmap_2) == 2
hemis = [FIFF.FIFFV_MNE_SURF_LEFT_HEMI, FIFF.FIFFV_MNE_SURF_RIGHT_HEMI]
for m, hemi in zip(mmap_1, hemis):
start_block(fid, FIFF.FIFFB_MNE_MORPH_MAP)
write_string(fid, FIFF.FIFF_MNE_MORPH_MAP_FROM, subject_from)
write_string(fid, FIFF.FIFF_MNE_MORPH_MAP_TO, subject_to)
write_int(fid, FIFF.FIFF_MNE_HEMI, hemi)
write_float_sparse_rcs(fid, FIFF.FIFF_MNE_MORPH_MAP, m)
end_block(fid, FIFF.FIFFB_MNE_MORPH_MAP)
for m, hemi in zip(mmap_2, hemis):
start_block(fid, FIFF.FIFFB_MNE_MORPH_MAP)
write_string(fid, FIFF.FIFF_MNE_MORPH_MAP_FROM, subject_to)
write_string(fid, FIFF.FIFF_MNE_MORPH_MAP_TO, subject_from)
write_int(fid, FIFF.FIFF_MNE_HEMI, hemi)
write_float_sparse_rcs(fid, FIFF.FIFF_MNE_MORPH_MAP, m)
end_block(fid, FIFF.FIFFB_MNE_MORPH_MAP)
end_file(fid)
def _get_tri_dist(p, q, p0, q0, a, b, c, dist):
"""Auxiliary function for getting the distance to a triangle edge"""
return np.sqrt((p - p0) * (p - p0) * a +
(q - q0) * (q - q0) * b +
(p - p0) * (q - q0) * c +
dist * dist)
def _get_tri_supp_geom(tris, rr):
"""Create supplementary geometry information using tris and rrs"""
r1 = rr[tris[:, 0], :]
r12 = rr[tris[:, 1], :] - r1
r13 = rr[tris[:, 2], :] - r1
r1213 = np.array([r12, r13]).swapaxes(0, 1)
a = np.sum(r12 * r12, axis=1)
b = np.sum(r13 * r13, axis=1)
c = np.sum(r12 * r13, axis=1)
mat = np.rollaxis(np.array([[b, -c], [-c, a]]), 2)
mat /= (a * b - c * c)[:, np.newaxis, np.newaxis]
nn = fast_cross_3d(r12, r13)
_normalize_vectors(nn)
return dict(r1=r1, r12=r12, r13=r13, r1213=r1213,
a=a, b=b, c=c, mat=mat, nn=nn)
@verbose
def _make_morph_map(subject_from, subject_to, subjects_dir=None):
"""Construct morph map from one subject to another
Note that this is close, but not exactly like the C version.
For example, parts are more accurate due to double precision,
so expect some small morph-map differences!
Note: This seems easily parallelizable, but the overhead
of pickling all the data structures makes it less efficient
than just running on a single core :(
"""
subjects_dir = get_subjects_dir(subjects_dir)
morph_maps = list()
# add speedy short-circuit for self-maps
if subject_from == subject_to:
for hemi in ['lh', 'rh']:
fname = op.join(subjects_dir, subject_from, 'surf',
'%s.sphere.reg' % hemi)
from_pts = read_surface(fname, verbose=False)[0]
n_pts = len(from_pts)
morph_maps.append(speye(n_pts, n_pts, format='csr'))
return morph_maps
for hemi in ['lh', 'rh']:
# load surfaces and normalize points to be on unit sphere
fname = op.join(subjects_dir, subject_from, 'surf',
'%s.sphere.reg' % hemi)
from_pts, from_tris = read_surface(fname, verbose=False)
n_from_pts = len(from_pts)
_normalize_vectors(from_pts)
tri_geom = _get_tri_supp_geom(from_tris, from_pts)
fname = op.join(subjects_dir, subject_to, 'surf',
'%s.sphere.reg' % hemi)
to_pts = read_surface(fname, verbose=False)[0]
n_to_pts = len(to_pts)
_normalize_vectors(to_pts)
# from surface: get nearest neighbors, find triangles for each vertex
nn_pts_idx = _compute_nearest(from_pts, to_pts)
from_pt_tris = _triangle_neighbors(from_tris, len(from_pts))
from_pt_tris = [from_pt_tris[pt_idx] for pt_idx in nn_pts_idx]
# find triangle in which point lies and assoc. weights
nn_tri_inds = []
nn_tris_weights = []
for pt_tris, to_pt in zip(from_pt_tris, to_pts):
p, q, idx, dist = _find_nearest_tri_pt(pt_tris, to_pt, tri_geom)
nn_tri_inds.append(idx)
nn_tris_weights.extend([1. - (p + q), p, q])
nn_tris = from_tris[nn_tri_inds]
row_ind = np.repeat(np.arange(n_to_pts), 3)
this_map = csr_matrix((nn_tris_weights, (row_ind, nn_tris.ravel())),
shape=(n_to_pts, n_from_pts))
morph_maps.append(this_map)
return morph_maps
def _find_nearest_tri_pt(pt_tris, to_pt, tri_geom, run_all=False):
"""Find nearest point mapping to a set of triangles
If run_all is False, if the point lies within a triangle, it stops.
If run_all is True, edges of other triangles are checked in case
those (somehow) are closer.
"""
# The following dense code is equivalent to the following:
# rr = r1[pt_tris] - to_pts[ii]
# v1s = np.sum(rr * r12[pt_tris], axis=1)
# v2s = np.sum(rr * r13[pt_tris], axis=1)
# aas = a[pt_tris]
# bbs = b[pt_tris]
# ccs = c[pt_tris]
# dets = aas * bbs - ccs * ccs
# pp = (bbs * v1s - ccs * v2s) / dets
# qq = (aas * v2s - ccs * v1s) / dets
# pqs = np.array(pp, qq)
# This einsum is equivalent to doing:
# pqs = np.array([np.dot(x, y) for x, y in zip(r1213, r1-to_pt)])
r1 = tri_geom['r1'][pt_tris]
rrs = to_pt - r1
tri_nn = tri_geom['nn'][pt_tris]
vect = np.einsum('ijk,ik->ij', tri_geom['r1213'][pt_tris], rrs)
mats = tri_geom['mat'][pt_tris]
# This einsum is equivalent to doing:
# pqs = np.array([np.dot(m, v) for m, v in zip(mats, vect)]).T
pqs = np.einsum('ijk,ik->ji', mats, vect)
found = False
dists = np.sum(rrs * tri_nn, axis=1)
# There can be multiple (sadness), find closest
idx = np.where(np.all(pqs >= 0., axis=0))[0]
idx = idx[np.where(np.all(pqs[:, idx] <= 1., axis=0))[0]]
idx = idx[np.where(np.sum(pqs[:, idx], axis=0) < 1.)[0]]
dist = np.inf
if len(idx) > 0:
found = True
pt = idx[np.argmin(np.abs(dists[idx]))]
p, q = pqs[:, pt]
dist = dists[pt]
# re-reference back to original numbers
pt = pt_tris[pt]
if found is False or run_all is True:
# don't include ones that we might have found before
s = np.setdiff1d(np.arange(len(pt_tris)), idx) # ones to check sides
# Tough: must investigate the sides
pp, qq, ptt, distt = _nearest_tri_edge(pt_tris[s], to_pt, pqs[:, s],
dists[s], tri_geom)
if np.abs(distt) < np.abs(dist):
p, q, pt, dist = pp, qq, ptt, distt
return p, q, pt, dist
def _nearest_tri_edge(pt_tris, to_pt, pqs, dist, tri_geom):
"""Get nearest location from a point to the edge of a set of triangles"""
# We might do something intelligent here. However, for now
# it is ok to do it in the hard way
aa = tri_geom['a'][pt_tris]
bb = tri_geom['b'][pt_tris]
cc = tri_geom['c'][pt_tris]
pp = pqs[0]
qq = pqs[1]
# Find the nearest point from a triangle:
# Side 1 -> 2
p0 = np.minimum(np.maximum(pp + 0.5 * (qq * cc) / aa,
0.0), 1.0)
q0 = np.zeros_like(p0)
# Side 2 -> 3
t1 = (0.5 * ((2.0 * aa - cc) * (1.0 - pp) +
(2.0 * bb - cc) * qq) / (aa + bb - cc))
t1 = np.minimum(np.maximum(t1, 0.0), 1.0)
p1 = 1.0 - t1
q1 = t1
# Side 1 -> 3
q2 = np.minimum(np.maximum(qq + 0.5 * (pp * cc) / bb, 0.0), 1.0)
p2 = np.zeros_like(q2)
# figure out which one had the lowest distance
dist0 = _get_tri_dist(pp, qq, p0, q0, aa, bb, cc, dist)
dist1 = _get_tri_dist(pp, qq, p1, q1, aa, bb, cc, dist)
dist2 = _get_tri_dist(pp, qq, p2, q2, aa, bb, cc, dist)
pp = np.r_[p0, p1, p2]
qq = np.r_[q0, q1, q2]
dists = np.r_[dist0, dist1, dist2]
ii = np.argmin(np.abs(dists))
p, q, pt, dist = pp[ii], qq[ii], pt_tris[ii % len(pt_tris)], dists[ii]
return p, q, pt, dist
def mesh_edges(tris):
"""Returns sparse matrix with edges as an adjacency matrix
Parameters
----------
tris : array of shape [n_triangles x 3]
The triangles.
Returns
-------
edges : sparse matrix
The adjacency matrix.
"""
if np.max(tris) > len(np.unique(tris)):
raise ValueError('Cannot compute connectivity on a selection of '
'triangles.')
npoints = np.max(tris) + 1
ones_ntris = np.ones(3 * len(tris))
a, b, c = tris.T
x = np.concatenate((a, b, c))
y = np.concatenate((b, c, a))
edges = coo_matrix((ones_ntris, (x, y)), shape=(npoints, npoints))
edges = edges.tocsr()
edges = edges + edges.T
return edges
def mesh_dist(tris, vert):
"""Compute adjacency matrix weighted by distances
It generates an adjacency matrix where the entries are the distances
between neighboring vertices.
Parameters
----------
tris : array (n_tris x 3)
Mesh triangulation
vert : array (n_vert x 3)
Vertex locations
Returns
-------
dist_matrix : scipy.sparse.csr_matrix
Sparse matrix with distances between adjacent vertices
"""
edges = mesh_edges(tris).tocoo()
# Euclidean distances between neighboring vertices
dist = np.sqrt(np.sum((vert[edges.row, :] - vert[edges.col, :]) ** 2,
axis=1))
dist_matrix = csr_matrix((dist, (edges.row, edges.col)), shape=edges.shape)
return dist_matrix
|
rajegannathan/grasp-lift-eeg-cat-dog-solution-updated
|
python-packages/mne-python-0.10/mne/surface.py
|
Python
|
bsd-3-clause
| 41,230
|
[
"Mayavi"
] |
cda162b96195424efbc0641dfed6cc15cc5c7ffc18669a61823cad578a5fa03b
|
# Orca
# Copyright (C) 2016 UrbanSim Inc.
# See full license in LICENSE.
from __future__ import print_function
try:
from inspect import getfullargspec as getargspec
except ImportError:
from inspect import getargspec
import logging
import time
import warnings
from collections import namedtuple
try:
from collections.abc import Callable
except ImportError: # Python 2.7
from collections import Callable
from contextlib import contextmanager
from functools import wraps
import pandas as pd
import tables
import tlz as tz
from . import utils
from .utils.logutil import log_start_finish
warnings.filterwarnings('ignore', category=tables.NaturalNameWarning)
logger = logging.getLogger(__name__)
_TABLES = {}
_COLUMNS = {}
_STEPS = {}
_BROADCASTS = {}
_INJECTABLES = {}
_CACHING = True
_TABLE_CACHE = {}
_COLUMN_CACHE = {}
_INJECTABLE_CACHE = {}
_MEMOIZED = {}
_CS_FOREVER = 'forever'
_CS_ITER = 'iteration'
_CS_STEP = 'step'
CacheItem = namedtuple('CacheItem', ['name', 'value', 'scope'])
def clear_all():
"""
Clear any and all stored state from Orca.
"""
_TABLES.clear()
_COLUMNS.clear()
_STEPS.clear()
_BROADCASTS.clear()
_INJECTABLES.clear()
_TABLE_CACHE.clear()
_COLUMN_CACHE.clear()
_INJECTABLE_CACHE.clear()
for m in _MEMOIZED.values():
m.value.clear_cached()
_MEMOIZED.clear()
logger.debug('pipeline state cleared')
def clear_cache(scope=None):
"""
Clear all cached data.
Parameters
----------
scope : {None, 'step', 'iteration', 'forever'}, optional
Clear cached values with a given scope.
By default all cached values are removed.
"""
if not scope:
_TABLE_CACHE.clear()
_COLUMN_CACHE.clear()
_INJECTABLE_CACHE.clear()
for m in _MEMOIZED.values():
m.value.clear_cached()
logger.debug('pipeline cache cleared')
else:
for d in (_TABLE_CACHE, _COLUMN_CACHE, _INJECTABLE_CACHE):
items = tz.valfilter(lambda x: x.scope == scope, d)
for k in items:
del d[k]
for m in tz.filter(lambda x: x.scope == scope, _MEMOIZED.values()):
m.value.clear_cached()
logger.debug('cleared cached values with scope {!r}'.format(scope))
def enable_cache():
"""
Allow caching of registered variables that explicitly have
caching enabled.
"""
global _CACHING
_CACHING = True
def disable_cache():
"""
Turn off caching across Orca, even for registered variables
that have caching enabled.
"""
global _CACHING
_CACHING = False
def cache_on():
"""
Whether caching is currently enabled or disabled.
Returns
-------
on : bool
True if caching is enabled.
"""
return _CACHING
@contextmanager
def cache_disabled():
turn_back_on = True if cache_on() else False
disable_cache()
yield
if turn_back_on:
enable_cache()
# for errors that occur during Orca runs
class OrcaError(Exception):
pass
class DataFrameWrapper(object):
"""
Wraps a DataFrame so it can provide certain columns and handle
computed columns.
Parameters
----------
name : str
Name for the table.
frame : pandas.DataFrame
copy_col : bool, optional
Whether to return copies when evaluating columns.
Attributes
----------
name : str
Table name.
copy_col : bool
Whether to return copies when evaluating columns.
local : pandas.DataFrame
The wrapped DataFrame.
"""
def __init__(self, name, frame, copy_col=True):
self.name = name
self.local = frame
self.copy_col = copy_col
@property
def columns(self):
"""
Columns in this table.
"""
return self.local_columns + list_columns_for_table(self.name)
@property
def local_columns(self):
"""
Columns that are part of the wrapped DataFrame.
"""
return list(self.local.columns)
@property
def index(self):
"""
Table index.
"""
return self.local.index
def to_frame(self, columns=None):
"""
Make a DataFrame with the given columns.
Will always return a copy of the underlying table.
Parameters
----------
columns : sequence or string, optional
Sequence of the column names desired in the DataFrame. A string
can also be passed if only one column is desired.
If None all columns are returned, including registered columns.
Returns
-------
frame : pandas.DataFrame
"""
extra_cols = _columns_for_table(self.name)
if columns is not None:
columns = [columns] if isinstance(columns, str) else columns
columns = set(columns)
set_extra_cols = set(extra_cols)
local_cols = set(self.local.columns) & columns - set_extra_cols
df = self.local[list(local_cols)].copy()
extra_cols = {k: extra_cols[k] for k in (columns & set_extra_cols)}
else:
df = self.local.copy()
with log_start_finish(
'computing {!r} columns for table {!r}'.format(
len(extra_cols), self.name),
logger):
for name, col in extra_cols.items():
with log_start_finish(
'computing column {!r} for table {!r}'.format(
name, self.name),
logger):
df[name] = col()
return df
def update_col(self, column_name, series):
"""
Add or replace a column in the underlying DataFrame.
Parameters
----------
column_name : str
Column to add or replace.
series : pandas.Series or sequence
Column data.
"""
logger.debug('updating column {!r} in table {!r}'.format(
column_name, self.name))
self.local[column_name] = series
def __setitem__(self, key, value):
return self.update_col(key, value)
def get_column(self, column_name):
"""
Returns a column as a Series.
Parameters
----------
column_name : str
Returns
-------
column : pandas.Series
"""
with log_start_finish(
'getting single column {!r} from table {!r}'.format(
column_name, self.name),
logger):
extra_cols = _columns_for_table(self.name)
if column_name in extra_cols:
with log_start_finish(
'computing column {!r} for table {!r}'.format(
column_name, self.name),
logger):
column = extra_cols[column_name]()
else:
column = self.local[column_name]
if self.copy_col:
return column.copy()
else:
return column
def __getitem__(self, key):
return self.get_column(key)
def __getattr__(self, key):
return self.get_column(key)
def column_type(self, column_name):
"""
Report column type as one of 'local', 'series', or 'function'.
Parameters
----------
column_name : str
Returns
-------
col_type : {'local', 'series', 'function'}
'local' means that the column is part of the registered table,
'series' means the column is a registered Pandas Series,
and 'function' means the column is a registered function providing
a Pandas Series.
"""
extra_cols = list_columns_for_table(self.name)
if column_name in extra_cols:
col = _COLUMNS[(self.name, column_name)]
if isinstance(col, _SeriesWrapper):
return 'series'
elif isinstance(col, _ColumnFuncWrapper):
return 'function'
elif column_name in self.local_columns:
return 'local'
raise KeyError('column {!r} not found'.format(column_name))
def update_col_from_series(self, column_name, series, cast=False):
"""
Update existing values in a column from another series.
Index values must match in both column and series. Optionally
casts data type to match the existing column.
Parameters
---------------
column_name : str
series : panas.Series
cast: bool, optional, default False
"""
logger.debug('updating column {!r} in table {!r}'.format(
column_name, self.name))
col_dtype = self.local[column_name].dtype
if series.dtype != col_dtype:
if cast:
series = series.astype(col_dtype)
else:
err_msg = "Data type mismatch, existing:{}, update:{}"
err_msg = err_msg.format(col_dtype, series.dtype)
raise ValueError(err_msg)
self.local.loc[series.index, column_name] = series
def __len__(self):
return len(self.local)
def clear_cached(self):
"""
Remove cached results from this table's computed columns.
"""
_TABLE_CACHE.pop(self.name, None)
for col in _columns_for_table(self.name).values():
col.clear_cached()
logger.debug('cleared cached columns for table {!r}'.format(self.name))
class TableFuncWrapper(object):
"""
Wrap a function that provides a DataFrame.
Parameters
----------
name : str
Name for the table.
func : callable
Callable that returns a DataFrame.
cache : bool, optional
Whether to cache the results of calling the wrapped function.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
copy_col : bool, optional
Whether to return copies when evaluating columns.
Attributes
----------
name : str
Table name.
cache : bool
Whether caching is enabled for this table.
copy_col : bool
Whether to return copies when evaluating columns.
"""
def __init__(
self, name, func, cache=False, cache_scope=_CS_FOREVER,
copy_col=True):
self.name = name
self._func = func
self._argspec = getargspec(func)
self.cache = cache
self.cache_scope = cache_scope
self.copy_col = copy_col
self._columns = []
self._index = None
self._len = 0
@property
def columns(self):
"""
Columns in this table. (May contain only computed columns
if the wrapped function has not been called yet.)
"""
return self._columns + list_columns_for_table(self.name)
@property
def local_columns(self):
"""
Only the columns contained in the DataFrame returned by the
wrapped function. (No registered columns included.)
"""
if self._columns:
return self._columns
else:
self._call_func()
return self._columns
@property
def index(self):
"""
Index of the underlying table. Will be None if that index is
unknown.
"""
return self._index
def _call_func(self):
"""
Call the wrapped function and return the result wrapped by
DataFrameWrapper.
Also updates attributes like columns, index, and length.
"""
if _CACHING and self.cache and self.name in _TABLE_CACHE:
logger.debug('returning table {!r} from cache'.format(self.name))
return _TABLE_CACHE[self.name].value
with log_start_finish(
'call function to get frame for table {!r}'.format(
self.name),
logger):
kwargs = _collect_variables(names=self._argspec.args,
expressions=self._argspec.defaults)
frame = self._func(**kwargs)
self._columns = list(frame.columns)
self._index = frame.index
self._len = len(frame)
wrapped = DataFrameWrapper(self.name, frame, copy_col=self.copy_col)
if self.cache:
_TABLE_CACHE[self.name] = CacheItem(
self.name, wrapped, self.cache_scope)
return wrapped
def __call__(self):
return self._call_func()
def to_frame(self, columns=None):
"""
Make a DataFrame with the given columns.
Will always return a copy of the underlying table.
Parameters
----------
columns : sequence, optional
Sequence of the column names desired in the DataFrame.
If None all columns are returned.
Returns
-------
frame : pandas.DataFrame
"""
return self._call_func().to_frame(columns)
def get_column(self, column_name):
"""
Returns a column as a Series.
Parameters
----------
column_name : str
Returns
-------
column : pandas.Series
"""
frame = self._call_func()
return DataFrameWrapper(self.name, frame,
copy_col=self.copy_col).get_column(column_name)
def __getitem__(self, key):
return self.get_column(key)
def __getattr__(self, key):
return self.get_column(key)
def __len__(self):
return self._len
def column_type(self, column_name):
"""
Report column type as one of 'local', 'series', or 'function'.
Parameters
----------
column_name : str
Returns
-------
col_type : {'local', 'series', 'function'}
'local' means that the column is part of the registered table,
'series' means the column is a registered Pandas Series,
and 'function' means the column is a registered function providing
a Pandas Series.
"""
extra_cols = list_columns_for_table(self.name)
if column_name in extra_cols:
col = _COLUMNS[(self.name, column_name)]
if isinstance(col, _SeriesWrapper):
return 'series'
elif isinstance(col, _ColumnFuncWrapper):
return 'function'
elif column_name in self.local_columns:
return 'local'
raise KeyError('column {!r} not found'.format(column_name))
def clear_cached(self):
"""
Remove this table's cached result and that of associated columns.
"""
_TABLE_CACHE.pop(self.name, None)
for col in _columns_for_table(self.name).values():
col.clear_cached()
logger.debug(
'cleared cached result and cached columns for table {!r}'.format(
self.name))
def func_source_data(self):
"""
Return data about the wrapped function source, including file name,
line number, and source code.
Returns
-------
filename : str
lineno : int
The line number on which the function starts.
source : str
"""
return utils.func_source_data(self._func)
class _ColumnFuncWrapper(object):
"""
Wrap a function that returns a Series.
Parameters
----------
table_name : str
Table with which the column will be associated.
column_name : str
Name for the column.
func : callable
Should return a Series that has an
index matching the table to which it is being added.
cache : bool, optional
Whether to cache the result of calling the wrapped function.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
Attributes
----------
name : str
Column name.
table_name : str
Name of table this column is associated with.
cache : bool
Whether caching is enabled for this column.
"""
def __init__(
self, table_name, column_name, func, cache=False,
cache_scope=_CS_FOREVER):
self.table_name = table_name
self.name = column_name
self._func = func
self._argspec = getargspec(func)
self.cache = cache
self.cache_scope = cache_scope
def __call__(self):
"""
Evaluate the wrapped function and return the result.
"""
if (_CACHING and
self.cache and
(self.table_name, self.name) in _COLUMN_CACHE):
logger.debug(
'returning column {!r} for table {!r} from cache'.format(
self.name, self.table_name))
return _COLUMN_CACHE[(self.table_name, self.name)].value
with log_start_finish(
('call function to provide column {!r} for table {!r}'
).format(self.name, self.table_name), logger):
kwargs = _collect_variables(names=self._argspec.args,
expressions=self._argspec.defaults)
col = self._func(**kwargs)
if self.cache:
_COLUMN_CACHE[(self.table_name, self.name)] = CacheItem(
(self.table_name, self.name), col, self.cache_scope)
return col
def clear_cached(self):
"""
Remove any cached result of this column.
"""
x = _COLUMN_CACHE.pop((self.table_name, self.name), None)
if x is not None:
logger.debug(
'cleared cached value for column {!r} in table {!r}'.format(
self.name, self.table_name))
def func_source_data(self):
"""
Return data about the wrapped function source, including file name,
line number, and source code.
Returns
-------
filename : str
lineno : int
The line number on which the function starts.
source : str
"""
return utils.func_source_data(self._func)
class _SeriesWrapper(object):
"""
Wrap a Series for the purpose of giving it the same interface as a
`_ColumnFuncWrapper`.
Parameters
----------
table_name : str
Table with which the column will be associated.
column_name : str
Name for the column.
series : pandas.Series
Series with index matching the table to which it is being added.
Attributes
----------
name : str
Column name.
table_name : str
Name of table this column is associated with.
"""
def __init__(self, table_name, column_name, series):
self.table_name = table_name
self.name = column_name
self._column = series
def __call__(self):
return self._column
def clear_cached(self):
"""
Here for compatibility with `_ColumnFuncWrapper`.
"""
pass
class _InjectableFuncWrapper(object):
"""
Wraps a function that will provide an injectable value elsewhere.
Parameters
----------
name : str
func : callable
cache : bool, optional
Whether to cache the result of calling the wrapped function.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
Attributes
----------
name : str
Name of this injectable.
cache : bool
Whether caching is enabled for this injectable function.
"""
def __init__(self, name, func, cache=False, cache_scope=_CS_FOREVER):
self.name = name
self._func = func
self._argspec = getargspec(func)
self.cache = cache
self.cache_scope = cache_scope
def __call__(self):
if _CACHING and self.cache and self.name in _INJECTABLE_CACHE:
logger.debug(
'returning injectable {!r} from cache'.format(self.name))
return _INJECTABLE_CACHE[self.name].value
with log_start_finish(
'call function to provide injectable {!r}'.format(self.name),
logger):
kwargs = _collect_variables(names=self._argspec.args,
expressions=self._argspec.defaults)
result = self._func(**kwargs)
if self.cache:
_INJECTABLE_CACHE[self.name] = CacheItem(
self.name, result, self.cache_scope)
return result
def clear_cached(self):
"""
Clear a cached result for this injectable.
"""
x = _INJECTABLE_CACHE.pop(self.name, None)
if x:
logger.debug(
'injectable {!r} removed from cache'.format(self.name))
class _StepFuncWrapper(object):
"""
Wrap a step function for argument matching.
Parameters
----------
step_name : str
func : callable
Attributes
----------
name : str
Name of step.
"""
def __init__(self, step_name, func):
self.name = step_name
self._func = func
self._argspec = getargspec(func)
def __call__(self):
with log_start_finish('calling step {!r}'.format(self.name), logger):
kwargs = _collect_variables(names=self._argspec.args,
expressions=self._argspec.defaults)
return self._func(**kwargs)
def _tables_used(self):
"""
Tables injected into the step.
Returns
-------
tables : set of str
"""
args = list(self._argspec.args)
if self._argspec.defaults:
default_args = list(self._argspec.defaults)
else:
default_args = []
# Combine names from argument names and argument default values.
names = args[:len(args) - len(default_args)] + default_args
tables = set()
for name in names:
parent_name = name.split('.')[0]
if is_table(parent_name):
tables.add(parent_name)
return tables
def func_source_data(self):
"""
Return data about a step function's source, including file name,
line number, and source code.
Returns
-------
filename : str
lineno : int
The line number on which the function starts.
source : str
"""
return utils.func_source_data(self._func)
def is_table(name):
"""
Returns whether a given name refers to a registered table.
"""
return name in _TABLES
def list_tables():
"""
List of table names.
"""
return list(_TABLES.keys())
def list_columns():
"""
List of (table name, registered column name) pairs.
"""
return list(_COLUMNS.keys())
def list_steps():
"""
List of registered step names.
"""
return list(_STEPS.keys())
def list_injectables():
"""
List of registered injectables.
"""
return list(_INJECTABLES.keys())
def list_broadcasts():
"""
List of registered broadcasts as (cast table name, onto table name).
"""
return list(_BROADCASTS.keys())
def is_expression(name):
"""
Checks whether a given name is a simple variable name or a compound
variable expression.
Parameters
----------
name : str
Returns
-------
is_expr : bool
"""
return '.' in name
def _collect_variables(names, expressions=None):
"""
Map labels and expressions to registered variables.
Handles argument matching.
Example:
_collect_variables(names=['zones', 'zone_id'],
expressions=['parcels.zone_id'])
Would return a dict representing:
{'parcels': <DataFrameWrapper for zones>,
'zone_id': <pandas.Series for parcels.zone_id>}
Parameters
----------
names : list of str
List of registered variable names and/or labels.
If mixing names and labels, labels must come at the end.
expressions : list of str, optional
List of registered variable expressions for labels defined
at end of `names`. Length must match the number of labels.
Returns
-------
variables : dict
Keys match `names`. Values correspond to registered variables,
which may be wrappers or evaluated functions if appropriate.
"""
# Map registered variable labels to expressions.
if not expressions:
expressions = []
offset = len(names) - len(expressions)
labels_map = dict(tz.concatv(
zip(names[:offset], names[:offset]),
zip(names[offset:], expressions)))
all_variables = tz.merge(_INJECTABLES, _TABLES)
variables = {}
for label, expression in labels_map.items():
# In the future, more registered variable expressions could be
# supported. Currently supports names of registered variables
# and references to table columns.
if '.' in expression:
# Registered variable expression refers to column.
table_name, column_name = expression.split('.')
table = get_table(table_name)
variables[label] = table.get_column(column_name)
else:
thing = all_variables[expression]
if isinstance(thing, (_InjectableFuncWrapper, TableFuncWrapper)):
# Registered variable object is function.
variables[label] = thing()
else:
variables[label] = thing
return variables
def add_table(
table_name, table, cache=False, cache_scope=_CS_FOREVER,
copy_col=True):
"""
Register a table with Orca.
Parameters
----------
table_name : str
Should be globally unique to this table.
table : pandas.DataFrame or function
If a function, the function should return a DataFrame.
The function's argument names and keyword argument values
will be matched to registered variables when the function
needs to be evaluated by Orca.
cache : bool, optional
Whether to cache the results of a provided callable. Does not
apply if `table` is a DataFrame.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
copy_col : bool, optional
Whether to return copies when evaluating columns.
Returns
-------
wrapped : `DataFrameWrapper` or `TableFuncWrapper`
"""
if isinstance(table, Callable):
table = TableFuncWrapper(table_name, table, cache=cache,
cache_scope=cache_scope, copy_col=copy_col)
else:
table = DataFrameWrapper(table_name, table, copy_col=copy_col)
# clear any cached data from a previously registered table
table.clear_cached()
logger.debug('registering table {!r}'.format(table_name))
_TABLES[table_name] = table
return table
def table(
table_name=None, cache=False, cache_scope=_CS_FOREVER, copy_col=True):
"""
Decorates functions that return DataFrames.
Decorator version of `add_table`. Table name defaults to
name of function.
The function's argument names and keyword argument values
will be matched to registered variables when the function
needs to be evaluated by Orca.
The argument name "iter_var" may be used to have the current
iteration variable injected.
"""
def decorator(func):
if table_name:
name = table_name
else:
name = func.__name__
add_table(
name, func, cache=cache, cache_scope=cache_scope,
copy_col=copy_col)
return func
return decorator
def get_raw_table(table_name):
"""
Get a wrapped table by name and don't do anything to it.
Parameters
----------
table_name : str
Returns
-------
table : DataFrameWrapper or TableFuncWrapper
"""
if is_table(table_name):
return _TABLES[table_name]
else:
raise KeyError('table not found: {}'.format(table_name))
def get_table(table_name):
"""
Get a registered table.
Decorated functions will be converted to `DataFrameWrapper`.
Parameters
----------
table_name : str
Returns
-------
table : `DataFrameWrapper`
"""
table = get_raw_table(table_name)
if isinstance(table, TableFuncWrapper):
table = table()
return table
def table_type(table_name):
"""
Returns the type of a registered table.
The type can be either "dataframe" or "function".
Parameters
----------
table_name : str
Returns
-------
table_type : {'dataframe', 'function'}
"""
table = get_raw_table(table_name)
if isinstance(table, DataFrameWrapper):
return 'dataframe'
elif isinstance(table, TableFuncWrapper):
return 'function'
def add_column(
table_name, column_name, column, cache=False, cache_scope=_CS_FOREVER):
"""
Add a new column to a table from a Series or callable.
Parameters
----------
table_name : str
Table with which the column will be associated.
column_name : str
Name for the column.
column : pandas.Series or callable
Series should have an index matching the table to which it
is being added. If a callable, the function's argument
names and keyword argument values will be matched to
registered variables when the function needs to be
evaluated by Orca. The function should return a Series.
cache : bool, optional
Whether to cache the results of a provided callable. Does not
apply if `column` is a Series.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
"""
if isinstance(column, Callable):
column = \
_ColumnFuncWrapper(
table_name, column_name, column,
cache=cache, cache_scope=cache_scope)
else:
column = _SeriesWrapper(table_name, column_name, column)
# clear any cached data from a previously registered column
column.clear_cached()
logger.debug('registering column {!r} on table {!r}'.format(
column_name, table_name))
_COLUMNS[(table_name, column_name)] = column
return column
def column(table_name, column_name=None, cache=False, cache_scope=_CS_FOREVER):
"""
Decorates functions that return a Series.
Decorator version of `add_column`. Series index must match
the named table. Column name defaults to name of function.
The function's argument names and keyword argument values
will be matched to registered variables when the function
needs to be evaluated by Orca.
The argument name "iter_var" may be used to have the current
iteration variable injected.
The index of the returned Series must match the named table.
"""
def decorator(func):
if column_name:
name = column_name
else:
name = func.__name__
add_column(
table_name, name, func, cache=cache, cache_scope=cache_scope)
return func
return decorator
def list_columns_for_table(table_name):
"""
Return a list of all the extra columns registered for a given table.
Parameters
----------
table_name : str
Returns
-------
columns : list of str
"""
return [cname for tname, cname in _COLUMNS.keys() if tname == table_name]
def _columns_for_table(table_name):
"""
Return all of the columns registered for a given table.
Parameters
----------
table_name : str
Returns
-------
columns : dict of column wrappers
Keys will be column names.
"""
return {cname: col
for (tname, cname), col in _COLUMNS.items()
if tname == table_name}
def column_map(tables, columns):
"""
Take a list of tables and a list of column names and resolve which
columns come from which table.
Parameters
----------
tables : sequence of _DataFrameWrapper or _TableFuncWrapper
Could also be sequence of modified pandas.DataFrames, the important
thing is that they have ``.name`` and ``.columns`` attributes.
columns : sequence of str
The column names of interest.
Returns
-------
col_map : dict
Maps table names to lists of column names.
"""
if not columns:
return {t.name: None for t in tables}
columns = set(columns)
colmap = {
t.name: list(set(t.columns).intersection(columns)) for t in tables}
foundcols = tz.reduce(
lambda x, y: x.union(y), (set(v) for v in colmap.values()))
if foundcols != columns:
raise RuntimeError('Not all required columns were found. '
'Missing: {}'.format(list(columns - foundcols)))
return colmap
def get_raw_column(table_name, column_name):
"""
Get a wrapped, registered column.
This function cannot return columns that are part of wrapped
DataFrames, it's only for columns registered directly through Orca.
Parameters
----------
table_name : str
column_name : str
Returns
-------
wrapped : _SeriesWrapper or _ColumnFuncWrapper
"""
try:
return _COLUMNS[(table_name, column_name)]
except KeyError:
raise KeyError('column {!r} not found for table {!r}'.format(
column_name, table_name))
def _memoize_function(f, name, cache_scope=_CS_FOREVER):
"""
Wraps a function for memoization and ties it's cache into the
Orca cacheing system.
Parameters
----------
f : function
name : str
Name of injectable.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
"""
cache = {}
@wraps(f)
def wrapper(*args, **kwargs):
try:
cache_key = (
args or None, frozenset(kwargs.items()) if kwargs else None)
in_cache = cache_key in cache
except TypeError:
raise TypeError(
'function arguments must be hashable for memoization')
if _CACHING and in_cache:
return cache[cache_key]
else:
result = f(*args, **kwargs)
cache[cache_key] = result
return result
wrapper.__wrapped__ = f
wrapper.cache = cache
wrapper.clear_cached = lambda: cache.clear()
_MEMOIZED[name] = CacheItem(name, wrapper, cache_scope)
return wrapper
def add_injectable(
name, value, autocall=True, cache=False, cache_scope=_CS_FOREVER,
memoize=False):
"""
Add a value that will be injected into other functions.
Parameters
----------
name : str
value
If a callable and `autocall` is True then the function's
argument names and keyword argument values will be matched
to registered variables when the function needs to be
evaluated by Orca. The return value will
be passed to any functions using this injectable. In all other
cases, `value` will be passed through untouched.
autocall : bool, optional
Set to True to have injectable functions automatically called
(with argument matching) and the result injected instead of
the function itself.
cache : bool, optional
Whether to cache the return value of an injectable function.
Only applies when `value` is a callable and `autocall` is True.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
memoize : bool, optional
If autocall is False it is still possible to cache function results
by setting this flag to True. Cached values are stored in a dictionary
keyed by argument values, so the argument values must be hashable.
Memoized functions have their caches cleared according to the same
rules as universal caching.
"""
if isinstance(value, Callable):
if autocall:
value = _InjectableFuncWrapper(
name, value, cache=cache, cache_scope=cache_scope)
# clear any cached data from a previously registered value
value.clear_cached()
elif not autocall and memoize:
value = _memoize_function(value, name, cache_scope=cache_scope)
logger.debug('registering injectable {!r}'.format(name))
_INJECTABLES[name] = value
def injectable(
name=None, autocall=True, cache=False, cache_scope=_CS_FOREVER,
memoize=False):
"""
Decorates functions that will be injected into other functions.
Decorator version of `add_injectable`. Name defaults to
name of function.
The function's argument names and keyword argument values
will be matched to registered variables when the function
needs to be evaluated by Orca.
The argument name "iter_var" may be used to have the current
iteration variable injected.
"""
def decorator(func):
if name:
n = name
else:
n = func.__name__
add_injectable(
n, func, autocall=autocall, cache=cache, cache_scope=cache_scope,
memoize=memoize)
return func
return decorator
def is_injectable(name):
"""
Checks whether a given name can be mapped to an injectable.
"""
return name in _INJECTABLES
def get_raw_injectable(name):
"""
Return a raw, possibly wrapped injectable.
Parameters
----------
name : str
Returns
-------
inj : _InjectableFuncWrapper or object
"""
if is_injectable(name):
return _INJECTABLES[name]
else:
raise KeyError('injectable not found: {!r}'.format(name))
def injectable_type(name):
"""
Classify an injectable as either 'variable' or 'function'.
Parameters
----------
name : str
Returns
-------
inj_type : {'variable', 'function'}
If the injectable is an automatically called function or any other
type of callable the type will be 'function', all other injectables
will be have type 'variable'.
"""
inj = get_raw_injectable(name)
if isinstance(inj, (_InjectableFuncWrapper, Callable)):
return 'function'
else:
return 'variable'
def get_injectable(name):
"""
Get an injectable by name. *Does not* evaluate wrapped functions.
Parameters
----------
name : str
Returns
-------
injectable
Original value or evaluated value of an _InjectableFuncWrapper.
"""
i = get_raw_injectable(name)
return i() if isinstance(i, _InjectableFuncWrapper) else i
def get_injectable_func_source_data(name):
"""
Return data about an injectable function's source, including file name,
line number, and source code.
Parameters
----------
name : str
Returns
-------
filename : str
lineno : int
The line number on which the function starts.
source : str
"""
if injectable_type(name) != 'function':
raise ValueError('injectable {!r} is not a function'.format(name))
inj = get_raw_injectable(name)
if isinstance(inj, _InjectableFuncWrapper):
return utils.func_source_data(inj._func)
elif hasattr(inj, '__wrapped__'):
return utils.func_source_data(inj.__wrapped__)
else:
return utils.func_source_data(inj)
def add_step(step_name, func):
"""
Add a step function to Orca.
The function's argument names and keyword argument values
will be matched to registered variables when the function
needs to be evaluated by Orca.
The argument name "iter_var" may be used to have the current
iteration variable injected.
Parameters
----------
step_name : str
func : callable
"""
if isinstance(func, Callable):
logger.debug('registering step {!r}'.format(step_name))
_STEPS[step_name] = _StepFuncWrapper(step_name, func)
else:
raise TypeError('func must be a callable')
def step(step_name=None):
"""
Decorates functions that will be called by the `run` function.
Decorator version of `add_step`. step name defaults to
name of function.
The function's argument names and keyword argument values
will be matched to registered variables when the function
needs to be evaluated by Orca.
The argument name "iter_var" may be used to have the current
iteration variable injected.
"""
def decorator(func):
if step_name:
name = step_name
else:
name = func.__name__
add_step(name, func)
return func
return decorator
def is_step(step_name):
"""
Check whether a given name refers to a registered step.
"""
return step_name in _STEPS
def get_step(step_name):
"""
Get a wrapped step by name.
Parameters
----------
"""
if is_step(step_name):
return _STEPS[step_name]
else:
raise KeyError('no step named {}'.format(step_name))
Broadcast = namedtuple(
'Broadcast',
['cast', 'onto', 'cast_on', 'onto_on', 'cast_index', 'onto_index'])
def broadcast(cast, onto, cast_on=None, onto_on=None,
cast_index=False, onto_index=False):
"""
Register a rule for merging two tables by broadcasting one onto
the other.
Parameters
----------
cast, onto : str
Names of registered tables.
cast_on, onto_on : str, optional
Column names used for merge, equivalent of ``left_on``/``right_on``
parameters of pandas.merge.
cast_index, onto_index : bool, optional
Whether to use table indexes for merge. Equivalent of
``left_index``/``right_index`` parameters of pandas.merge.
"""
logger.debug(
'registering broadcast of table {!r} onto {!r}'.format(cast, onto))
_BROADCASTS[(cast, onto)] = \
Broadcast(cast, onto, cast_on, onto_on, cast_index, onto_index)
def _get_broadcasts(tables):
"""
Get the broadcasts associated with a set of tables.
Parameters
----------
tables : sequence of str
Table names for which broadcasts have been registered.
Returns
-------
casts : dict of `Broadcast`
Keys are tuples of strings like (cast_name, onto_name).
"""
tables = set(tables)
casts = tz.keyfilter(
lambda x: x[0] in tables and x[1] in tables, _BROADCASTS)
if tables - set(tz.concat(casts.keys())):
raise ValueError('Not enough links to merge all tables.')
return casts
def is_broadcast(cast_name, onto_name):
"""
Checks whether a relationship exists for broadcast `cast_name`
onto `onto_name`.
"""
return (cast_name, onto_name) in _BROADCASTS
def get_broadcast(cast_name, onto_name):
"""
Get a single broadcast.
Broadcasts are stored data about how to do a Pandas join.
A Broadcast object is a namedtuple with these attributes:
- cast: the name of the table being broadcast
- onto: the name of the table onto which "cast" is broadcast
- cast_on: The optional name of a column on which to join.
None if the table index will be used instead.
- onto_on: The optional name of a column on which to join.
None if the table index will be used instead.
- cast_index: True if the table index should be used for the join.
- onto_index: True if the table index should be used for the join.
Parameters
----------
cast_name : str
The name of the table being braodcast.
onto_name : str
The name of the table onto which `cast_name` is broadcast.
Returns
-------
broadcast : Broadcast
"""
if is_broadcast(cast_name, onto_name):
return _BROADCASTS[(cast_name, onto_name)]
else:
raise KeyError(
'no rule found for broadcasting {!r} onto {!r}'.format(
cast_name, onto_name))
# utilities for merge_tables
def _all_reachable_tables(t):
"""
A generator that provides all the names of tables that can be
reached via merges starting at the given target table.
"""
for k, v in t.items():
for tname in _all_reachable_tables(v):
yield tname
yield k
def _recursive_getitem(d, key):
"""
Descend into a dict of dicts to return the one that contains
a given key. Every value in the dict must be another dict.
"""
if key in d:
return d
else:
for v in d.values():
return _recursive_getitem(v, key)
else:
raise KeyError('Key not found: {}'.format(key))
def _dict_value_to_pairs(d):
"""
Takes the first value of a dictionary (which it self should be
a dictionary) and turns it into a series of {key: value} dicts.
For example, _dict_value_to_pairs({'c': {'a': 1, 'b': 2}}) will yield
{'a': 1} and {'b': 2}.
"""
d = d[tz.first(d)]
for k, v in d.items():
yield {k: v}
def _is_leaf_node(merge_node):
"""
Returns True for dicts like {'a': {}}.
"""
return len(merge_node) == 1 and not next(iter(merge_node.values()))
def _next_merge(merge_node):
"""
Gets a node that has only leaf nodes below it. This table and
the ones below are ready to be merged to make a new leaf node.
"""
if all(_is_leaf_node(d) for d in _dict_value_to_pairs(merge_node)):
return merge_node
else:
for d in tz.remove(_is_leaf_node, _dict_value_to_pairs(merge_node)):
return _next_merge(d)
else:
raise OrcaError('No node found for next merge.')
def merge_tables(target, tables, columns=None, drop_intersection=True):
"""
Merge a number of tables onto a target table. Tables must have
registered merge rules via the `broadcast` function.
Parameters
----------
target : str, DataFrameWrapper, or TableFuncWrapper
Name of the table (or wrapped table) onto which tables will be merged.
tables : list of `DataFrameWrapper`, `TableFuncWrapper`, or str
All of the tables to merge. Should include the target table.
columns : list of str, optional
If given, columns will be mapped to `tables` and only those columns
will be requested from each table. The final merged table will have
only these columns. By default all columns are used from every
table.
drop_intersection : bool
If True, keep the left most occurence of any column name if it occurs
on more than one table. This prevents getting back the same column
with suffixes applied by pd.merge. If false, columns names will be
suffixed with the table names - e.g. zone_id_buildings and
zone_id_parcels.
Returns
-------
merged : pandas.DataFrame
"""
# allow target to be string or table wrapper
if isinstance(target, (DataFrameWrapper, TableFuncWrapper)):
target = target.name
# allow tables to be strings or table wrappers
tables = [get_table(t)
if not isinstance(t, (DataFrameWrapper, TableFuncWrapper)) else t
for t in tables]
merges = {t.name: {} for t in tables}
tables = {t.name: t for t in tables}
casts = _get_broadcasts(tables.keys())
logger.debug(
'attempting to merge tables {} to target table {}'.format(
tables.keys(), target))
# relate all the tables by registered broadcasts
for table, onto in casts:
merges[onto][table] = merges[table]
merges = {target: merges[target]}
# verify that all the tables can be merged to the target
all_tables = set(_all_reachable_tables(merges))
if all_tables != set(tables.keys()):
raise RuntimeError(
('Not all tables can be merged to target "{}". Unlinked tables: {}'
).format(target, list(set(tables.keys()) - all_tables)))
# add any columns necessary for indexing into other tables
# during merges
if columns:
columns = list(columns)
for c in casts.values():
if c.onto_on:
columns.append(c.onto_on)
if c.cast_on:
columns.append(c.cast_on)
# get column map for which columns go with which table
colmap = column_map(tables.values(), columns)
# get frames
frames = {name: t.to_frame(columns=colmap[name])
for name, t in tables.items()}
past_intersections = set()
# perform merges until there's only one table left
while merges[target]:
nm = _next_merge(merges)
onto = tz.first(nm)
onto_table = frames[onto]
# loop over all the tables that can be broadcast onto
# the onto_table and merge them all in.
for cast in nm[onto]:
cast_table = frames[cast]
bc = casts[(cast, onto)]
with log_start_finish(
'merge tables {} and {}'.format(onto, cast), logger):
intersection = set(onto_table.columns).\
intersection(cast_table.columns)
# intersection is ok if it's the join key
intersection.discard(bc.onto_on)
intersection.discard(bc.cast_on)
# otherwise drop so as not to create conflicts
if drop_intersection:
cast_table = cast_table.drop(intersection, axis=1)
else:
# add suffix to past intersections which wouldn't get
# picked up by the merge - these we have to rename by hand
renames = dict(zip(
past_intersections,
[c+'_'+onto for c in past_intersections]
))
onto_table = onto_table.rename(columns=renames)
# keep track of past intersections in case there's an odd
# number of intersections
past_intersections = past_intersections.union(intersection)
onto_table = pd.merge(
onto_table, cast_table,
suffixes=['_'+onto, '_'+cast],
left_on=bc.onto_on, right_on=bc.cast_on,
left_index=bc.onto_index, right_index=bc.cast_index)
# replace the existing table with the merged one
frames[onto] = onto_table
# free up space by dropping the cast table
del frames[cast]
# mark the onto table as having no more things to broadcast
# onto it.
_recursive_getitem(merges, onto)[onto] = {}
logger.debug('finished merge')
return frames[target]
def get_step_table_names(steps):
"""
Returns a list of table names injected into the provided steps.
Parameters
----------
steps: list of str
Steps to gather table inputs from.
Returns
-------
list of str
"""
table_names = set()
for s in steps:
table_names |= get_step(s)._tables_used()
return list(table_names)
def write_tables(fname, table_names=None, prefix=None, compress=False, local=False):
"""
Writes tables to a pandas.HDFStore file.
Parameters
----------
fname : str
File name for HDFStore. Will be opened in append mode and closed
at the end of this function.
table_names: list of str, optional, default None
List of tables to write. If None, all registered tables will
be written.
prefix: str
If not None, used to prefix the output table names so that
multiple iterations can go in the same file.
compress: boolean
Whether to compress output file using standard HDF5-readable
zlib compression, default False.
"""
if table_names is None:
table_names = list_tables()
tables = (get_table(t) for t in table_names)
key_template = '{}/{{}}'.format(prefix) if prefix is not None else '{}'
# set compression options to zlib level-1 if compress arg is True
complib = compress and 'zlib' or None
complevel = compress and 1 or 0
with pd.HDFStore(fname, mode='a', complib=complib, complevel=complevel) as store:
for t in tables:
# if local arg is True, store only local columns
columns = None
if local is True:
columns = t.local_columns
store[key_template.format(t.name)] = t.to_frame(columns=columns)
iter_step = namedtuple('iter_step', 'step_num,step_name')
def run(steps, iter_vars=None, data_out=None, out_interval=1,
out_base_tables=None, out_run_tables=None, compress=False,
out_base_local=True, out_run_local=True):
"""
Run steps in series, optionally repeatedly over some sequence.
The current iteration variable is set as a global injectable
called ``iter_var``.
Parameters
----------
steps : list of str
List of steps to run identified by their name.
iter_vars : iterable, optional
The values of `iter_vars` will be made available as an injectable
called ``iter_var`` when repeatedly running `steps`.
data_out : str, optional
An optional filename to which all tables injected into any step
in `steps` will be saved every `out_interval` iterations.
File will be a pandas HDF data store.
out_interval : int, optional
Iteration interval on which to save data to `data_out`. For example,
2 will save out every 2 iterations, 5 every 5 iterations.
Default is every iteration.
The results of the first and last iterations are always included.
The input (base) tables are also included and prefixed with `base/`,
these represent the state of the system before any steps have been
executed.
The interval is defined relative to the first iteration. For example,
a run begining in 2015 with an out_interval of 2, will write out
results for 2015, 2017, etc.
out_base_tables: list of str, optional, default None
List of base tables to write. If not provided, tables injected
into 'steps' will be written.
out_run_tables: list of str, optional, default None
List of run tables to write. If not provided, tables injected
into 'steps' will be written.
compress: boolean, optional, default False
Whether to compress output file using standard HDF5 zlib compression.
Compression yields much smaller files using slightly more CPU.
out_base_local: boolean, optional, default True
For tables in out_base_tables, whether to store only local columns (True)
or both, local and computed columns (False).
out_run_local: boolean, optional, default True
For tables in out_run_tables, whether to store only local columns (True)
or both, local and computed columns (False).
"""
iter_vars = iter_vars or [None]
max_i = len(iter_vars)
# get the tables to write out
if out_base_tables is None or out_run_tables is None:
step_tables = get_step_table_names(steps)
if out_base_tables is None:
out_base_tables = step_tables
if out_run_tables is None:
out_run_tables = step_tables
# write out the base (inputs)
if data_out:
add_injectable('iter_var', iter_vars[0])
write_tables(data_out, out_base_tables, 'base', compress=compress, local=out_base_local)
# run the steps
for i, var in enumerate(iter_vars, start=1):
add_injectable('iter_var', var)
if var is not None:
print('Running iteration {} with iteration value {!r}'.format(
i, var))
logger.debug(
'running iteration {} with iteration value {!r}'.format(
i, var))
t1 = time.time()
for j, step_name in enumerate(steps):
add_injectable('iter_step', iter_step(j, step_name))
print('Running step {!r}'.format(step_name))
with log_start_finish(
'run step {!r}'.format(step_name), logger,
logging.INFO):
step = get_step(step_name)
t2 = time.time()
step()
print("Time to execute step '{}': {:.2f} s".format(
step_name, time.time() - t2))
clear_cache(scope=_CS_STEP)
print(
('Total time to execute iteration {} '
'with iteration value {!r}: '
'{:.2f} s').format(i, var, time.time() - t1))
# write out the results for the current iteration
if data_out:
if (i - 1) % out_interval == 0 or i == max_i:
write_tables(data_out, out_run_tables, var, compress=compress, local=out_run_local)
clear_cache(scope=_CS_ITER)
@contextmanager
def injectables(**kwargs):
"""
Temporarily add injectables to the pipeline environment.
Takes only keyword arguments.
Injectables will be returned to their original state when the context
manager exits.
"""
global _INJECTABLES
original = _INJECTABLES.copy()
_INJECTABLES.update(kwargs)
yield
_INJECTABLES = original
@contextmanager
def temporary_tables(**kwargs):
"""
Temporarily set DataFrames as registered tables.
Tables will be returned to their original state when the context
manager exits. Caching is not enabled for tables registered via
this function.
"""
global _TABLES
original = _TABLES.copy()
for k, v in kwargs.items():
if not isinstance(v, pd.DataFrame):
raise ValueError('tables only accepts DataFrames')
add_table(k, v)
yield
_TABLES = original
def eval_variable(name, **kwargs):
"""
Execute a single variable function registered with Orca
and return the result. Any keyword arguments are temporarily set
as injectables. This gives the value as would be injected into a function.
Parameters
----------
name : str
Name of variable to evaluate.
Use variable expressions to specify columns.
Returns
-------
object
For injectables and columns this directly returns whatever
object is returned by the registered function.
For tables this returns a DataFrameWrapper as if the table
had been injected into a function.
"""
with injectables(**kwargs):
vars = _collect_variables([name], [name])
return vars[name]
def eval_step(name, **kwargs):
"""
Evaluate a step as would be done within the pipeline environment
and return the result. Any keyword arguments are temporarily set
as injectables.
Parameters
----------
name : str
Name of step to run.
Returns
-------
object
Anything returned by a step. (Though note that in Orca runs
return values from steps are ignored.)
"""
with injectables(**kwargs):
return get_step(name)()
|
synthicity/orca
|
orca/orca.py
|
Python
|
bsd-3-clause
| 60,782
|
[
"ORCA"
] |
23006be0b6dfb43fc5e3af670282c57b61a18e797c575d21fa5eb91a93399e83
|
import json
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.db import connection
from django.contrib.auth.models import User
from catmaid.control.authentication import requires_user_role, \
can_edit_class_instance_or_fail, can_edit_all_or_fail
from catmaid.control.common import insert_into_log
from catmaid.models import UserRole, Project, Class, ClassInstance, \
ClassInstanceClassInstance, Relation, Treenode
import operator
from collections import defaultdict
@requires_user_role([UserRole.Annotate, UserRole.Browse])
def get_all_skeletons_of_neuron(request, project_id=None, neuron_id=None):
p = get_object_or_404(Project, pk=project_id)
neuron = get_object_or_404(ClassInstance,
pk=neuron_id,
class_column__class_name='neuron',
project=p)
qs = ClassInstance.objects.filter(
project=p,
cici_via_a__relation__relation_name='model_of',
cici_via_a__class_instance_b=neuron)
return HttpResponse(json.dumps([x.id for x in qs]), content_type="text/json")
def _delete_if_empty(neuron_id):
""" Delete this neuron if no class_instance is a model_of it;
which is to say, it contains no skeletons. """
is_empty = not ClassInstanceClassInstance.objects.filter(
class_instance_b=neuron_id,
relation__relation_name='model_of').exists()
if is_empty:
ClassInstance.objects.filter(pk=neuron_id).delete()
return is_empty
@requires_user_role(UserRole.Annotate)
def delete_neuron(request, project_id=None, neuron_id=None):
""" Deletes a neuron if and only if two things are the case: 1. The user
ownes all treenodes of the skeleton modeling the neuron in question and
2. The neuron is not annotated by other users.
"""
# Make sure the user can edit the neuron in general
can_edit_class_instance_or_fail(request.user, neuron_id, 'neuron')
# Create class and relation dictionaries
classes = dict(Class.objects.filter(
project_id=project_id).values_list('class_name', 'id'))
relations = dict(Relation.objects.filter(
project_id=project_id).values_list('relation_name', 'id'))
# Make sure the user has permission to edit all treenodes of all skeletons
skeleton_ids = ClassInstanceClassInstance.objects.filter(
class_instance_b=neuron_id,
relation_id=relations['model_of']).values_list(
'class_instance_a', flat=True)
for skid in skeleton_ids:
others_nodes = Treenode.objects.filter(skeleton_id=skid).exclude(
user_id=request.user.id).values_list('id', flat=True)
if others_nodes:
try:
can_edit_all_or_fail(request.user, others_nodes, 'treenode')
except Exception:
raise Exception("You don't have permission to remove all " \
"treenodes of skeleton %s modeling this neuron. The " \
"neuron won't be deleted." % skid)
# Make sure the user has permission to edit all annotations of this neuron
annotation_ids = set(ClassInstanceClassInstance.objects.filter(
class_instance_a_id=neuron_id,
relation_id=relations['annotated_with']).values_list(
'id', flat=True))
if annotation_ids:
try:
can_edit_all_or_fail(request.user, annotation_ids,
'class_instance_class_instance')
except Exception:
raise Exception("You don't have permission to remove all " \
"annotations linked to this neuron. The neuron won't " \
"be deleted.")
# Try to get the root node to have a valid location for a log entry
if skeleton_ids:
try:
root_node = Treenode.objects.get(
skeleton_id=skeleton_ids[0], parent=None)
root_location = (root_node.location_x, root_node.location_y,
root_node.location_z)
except (Treenode.DoesNotExist, Treenode.MultipleObjectsReturned):
root_location = None
else:
root_location = None
# Delete neuron (and implicitely all annotation links due to Django's
# cascading deletion)
neuron = get_object_or_404(ClassInstance, pk=neuron_id)
neuron.delete()
# Delete all annotations that are not used anymore
used_annotation_ids = set(ClassInstanceClassInstance.objects.filter(
class_instance_b_id__in=annotation_ids,
relation_id=relations['annotated_with']).values_list(
'id', flat=True))
unused_annotation_ids = annotation_ids.difference(used_annotation_ids)
ClassInstance.objects.filter(id__in=unused_annotation_ids).delete()
# Delete the skeletons (and their treenodes through cascading delete)
cursor = connection.cursor()
for skid in skeleton_ids:
# Because there are constraints used in the database that Django is not
# aware of, it's emulation of cascading deletion doesn't work.
# Therefore, raw SQL needs to be used to use true cascading deletion.
cursor.execute('''
BEGIN;
DELETE FROM change_request WHERE treenode_id IN (
SELECT id FROM treenode WHERE skeleton_id=%s AND project_id=%s);
DELETE FROM change_request WHERE connector_id IN (
SELECT id FROM treenode_connector WHERE skeleton_id=%s AND project_id=%s);
DELETE FROM treenode_class_instance WHERE treenode_id IN (
SELECT id FROM treenode WHERE skeleton_id=%s AND project_id=%s);
DELETE FROM treenode WHERE skeleton_id=%s AND project_id=%s;
DELETE FROM treenode_connector WHERE skeleton_id=%s AND project_id=%s;
DELETE FROM class_instance WHERE id=%s AND project_id=%s;
DELETE FROM review WHERE skeleton_id=%s AND project_id=%s;
COMMIT;
''', (skid, project_id) * 7)
# Insert log entry and refer to position of the first skeleton's root node
insert_into_log(project_id, request.user.id, 'remove_neuron', root_location,
'Deleted neuron %s and skeleton(s) %s.' % (neuron_id,
', '.join([str(s) for s in skeleton_ids])))
return HttpResponse(json.dumps({
'success': "Deleted neuron #%s as well as its skeletons and " \
"annotations." % neuron_id}))
@requires_user_role(UserRole.Annotate)
def give_neuron_to_other_user(request, project_id=None, neuron_id=None):
neuron_id = int(neuron_id)
target_user = User.objects.get(pk=int(request.POST['target_user_id']))
# 1. Check that the request.user is superuser
# or owns the neuron and the skeletons under it
neuron = ClassInstance.objects.get(pk=neuron_id)
if not request.user.is_superuser and neuron.user.id != request.user.id:
return HttpResponse(json.dumps({'error': 'You don\'t own the neuron!'}))
qs = ClassInstanceClassInstance.objects.filter(
class_instance_b=neuron_id,
relation__relation_name='model_of').values_list('class_instance_a__user_id', 'class_instance_a')
skeletons = defaultdict(list) # user_id vs list of owned skeletons
for row in qs:
skeletons[row[0]].append(row[1])
if not skeletons:
return HttpResponse(json.dumps({'error': 'The neuron does not contain any skeletons!'}))
sks = {k:v[:] for k,v in skeletons.iteritems()} # deep copy
if request.user.id in sks:
del sks[request.user.id]
if not request.user.is_superuser and sks:
return HttpResponse(json.dumps({'error': 'You don\'t own: %s' % reduce(operator.add, sks.values())}))
# 2. Change neuron's and skeleton's and class_instance_class_instance relationship owner to target_user
# Update user_id of the relation 'model_of' between any skeletons and the chosen neuron
ClassInstanceClassInstance.objects.filter(
relation__relation_name='model_of',
class_instance_b=neuron_id).update(user=target_user)
# Update user_id of the neuron
ClassInstance.objects.filter(pk=neuron_id).update(user=target_user)
# Update user_id of the skeleton(s)
ClassInstance.objects.filter(pk__in=reduce(operator.add, skeletons.values())).update(user=target_user)
return HttpResponse(json.dumps({'success':'Moved neuron #%s to %s staging area.'}))
|
htem/CATMAID
|
django/applications/catmaid/control/neuron.py
|
Python
|
agpl-3.0
| 8,383
|
[
"NEURON"
] |
a1eb45dfcbccf5c075d498ae91cad2dcbe733bf87dbe7386b4f95bd7632d2e26
|
"""
@name: Modules/Core/Mqtt/_test/test_protocol.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2015-2019 by D. Brian Kimmel
@license: MIT License
@note: Created on Jun 3, 2015
@Summary:
Passed all 4 tests - DBK- 2019-08-15
"""
from Modules.Core.Config.config_tools import AccessInformation
__updated__ = '2019-11-12'
# Import system type stuff
from twisted.trial import unittest
# Import PyMh files and modules.
from _test.testing_mixin import SetupPyHouseObj
from Modules.Core.Mqtt.mqtt import MqttInformation, MqttBrokerInformation
from Modules.Core.Mqtt.mqtt_protocol import MQTTProtocol
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
class SetupMixin(object):
"""
"""
def setUp(self):
self.m_pyhouse_obj = SetupPyHouseObj().BuildPyHouseObj()
class A0(unittest.TestCase):
def test_00_Print(self):
_x = PrettyFormatAny.form('_test', 'title', 190) # so it is defined when printing is cleaned up.
print('Id: test_mqtt_protocol')
class B1_Packet(SetupMixin, unittest.TestCase):
"""
"""
def setUp(self):
SetupMixin.setUp(self)
def test_01_String(self):
pass
class B2_Packet(SetupMixin, unittest.TestCase):
"""
"""
m_broker = {}
def setUp(self):
SetupMixin.setUp(self)
self.m_mqtt = MqttInformation()
self.m_mqtt.ClientID = "TestClient"
self.m_broker = MqttBrokerInformation()
self.m_broker.BrokerName = "Test BrokerS"
self.m_broker.Keepalive = 30000
self.m_broker.Will.Topic = None
self.m_broker.Will.Message = None
self.m_broker.Will.QoS = 0
self.m_broker.Will.Retain = False
self.m_broker.CleanStart = True
self.m_broker.Access = AccessInformation()
self.m_broker.Access.Name = None
self.m_broker.Access.Password = None
def test_01_Fixed(self):
l_packet_type = 0x01
l_remaining_length = 17
_l_fixed = MQTTProtocol()._build_fixed_header(l_packet_type, l_remaining_length)
# print(PrettyFormatAny.form(_l_fixed, 'FixedHeader'))
def test_02_connect(self):
"""
"""
l_fixed, l_var, l_pay = MQTTProtocol()._build_connect(self.m_broker, self.m_mqtt)
# print('\n Fixed: {}'.format(FormatBytes(l_fixed)))
# print('Variable: {}'.format(FormatBytes(l_var)))
# print(' Payload: {}'.format(FormatBytes(l_pay)))
self.assertEqual(l_fixed, bytearray(b'\x10\x16'))
# ## END DBK
|
DBrianKimmel/PyHouse
|
Project/src/Modules/Core/Mqtt/_test/test_mqtt_protocol.py
|
Python
|
mit
| 2,548
|
[
"Brian"
] |
0eaf781adc32b9cff531ed94dff1dd8d481e86798bf3b22b12ae9e10d4c91060
|
"""
Append data to an existing CTD Profile dataset
==============================================
Use the TAMOC ambient module to append data to a CTD Profile object that has
already been created as in the other examples in this ./bin director. This
file demonstrates working with the data from the R/V Brooks McCall at Station
BM 54 on May 30, 2010, stored in the file /Raw_Data/ctd_BM54.cnv.
In this example, we compute a typical nitrogen profile and append that data
to the data in the CTD dataset.
This script demonstrates the new version of the `ambient.Profile` object, which uses `xarray`. For the older version, which used netCDF datasets, see the script with the same file name but prepended by 'nc_'.
Notes
-----
Much of the input data in the script (e.g., columns to extract, column names,
lat and lon location data, date and time, etc.) is read by the user manually
from the header file of the CTD text file. These data are then hand-coded in
the script text. While it would be straightforward to automate this process
for a given format of CTD files, this step is left to the user to customize to
their own data sets.
Requires
--------
This script reads data from the text file::
../../tamoc/data/ctd_BM54.cnv
Returns
-------
This script generates an `ambient.Profile` object, whose netCDF file is
written to the file::
../../test/output/BM54.nc
"""
# S. Socolofsky, July 2013, Texas A&M University <socolofs@tamu.edu>.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from tamoc import ambient
from tamoc import seawater
from tamoc import dbm
from netCDF4 import date2num, num2date
from datetime import datetime
import xarray as xr
import numpy as np
import matplotlib.pyplot as plt
import os
def get_ctd_profile():
"""
Load CTD Data into an 'ambient.Profile' object.
This function performs the steps in ./profile_from_ctd.py to read in the
CTD data and create a Profile object. This is the data set that will be
used to demonstrate how to append data to a Profiile object.
"""
# Get the path to the input file
__location__ = os.path.realpath(os.path.join(os.getcwd(),
os.path.dirname(__file__),
'../../tamoc/data'))
dat_file = os.path.join(__location__,'ctd_BM54.cnv')
# Load in the data using numpy.loadtxt
raw = np.loadtxt(dat_file, comments = '#', skiprows = 175,
usecols = (0, 1, 3, 8, 9, 10, 12))
# Remove reversals in the CTD data and get only the down-cast
raw_data = ambient.extract_profile(raw, z_col=3, z_start=50.0)
# Reorganize this data into the correct order
data = np.zeros(raw_data.shape)
ztsp = ['z', 'temperature', 'salinity', 'pressure']
ztsp_units = ['m', 'deg C', 'psu', 'db']
chem_names = ['oxygen', 'wetlab_fluorescence', 'density']
chem_units = ['mg/l', 'mg/m^3', 'kg/m^3']
data[:,0] = raw_data[:,3]
data[:,1] = raw_data[:,0]
data[:,2] = raw_data[:,4]
data[:,3] = raw_data[:,1]
data[:,4] = raw_data[:,6]
# Create an ambient.Profile object for this dataset
chem_names = ['oxygen']
bm54 = ambient.Profile(data, ztsp=ztsp, ztsp_units=ztsp_units,
chem_names=chem_names, chem_units=chem_units)
return bm54
if __name__ == '__main__':
"""
Demonstrate how to add data to an existing Profile object
"""
# Get the ambient.Profile object with the original CTD data
profile = get_ctd_profile()
# Compute a dissolved nitrogen profile...start with a model for air
air = dbm.FluidMixture(['nitrogen', 'oxygen', 'argon', 'carbon_dioxide'])
yk = np.array([0.78084, 0.20946, 0.009340, 0.00036])
m = air.masses(yk)
# Compute the solubility of nitrogen at the air-water interface, then
# correct for seawater compressibility
z_coords = profile.interp_ds.coords['z'].values
n2_conc = np.zeros(len(z_coords))
for i in range(len(z_coords)):
T, S, P = profile.get_values(z_coords[i], ['temperature', 'salinity',
'pressure'])
Cs = air.solubility(m, T, 101325., S)[0,:] * \
seawater.density(T, S, P) / seawater.density(T, S, 101325.)
n2_conc[i] = Cs[0]
# Add this computed nitrogen profile to the Profile dataset
data = np.vstack((z_coords, n2_conc)).transpose()
symbols = ['z', 'nitrogen']
units = ['m', 'kg/m^3']
comments = ['measured', 'computed from CTD data']
profile.append(data, symbols, units, comments, 0)
# Close the dataset
profile.close_nc()
# Plot the oxygen and nitrogren profiles to show that data have been
# added to the Profile object
z = np.linspace(profile.z_min, profile.z_max, 250)
o2 = np.zeros(z.shape)
n2 = np.zeros(z.shape)
for i in range(len(z)):
n2[i], o2[i] = profile.get_values(z[i], ['nitrogen', 'oxygen'])
plt.figure()
plt.clf()
plt.show()
ax1 = plt.subplot(121)
ax1.plot(o2, z)
ax1.set_xlabel('Oxygen (kg/m^3)')
ax1.set_ylabel('Depth (m)')
ax1.invert_yaxis()
ax1.set_title('Measured data')
ax2 = plt.subplot(122)
ax2.plot(n2, z)
ax2.set_xlabel('Nitrogen (kg/m^3)')
ax2.set_ylabel('Depth (m)')
ax2.invert_yaxis()
ax2.set_title('Computed data')
plt.draw()
|
socolofs/tamoc
|
bin/ambient/np_profile_append.py
|
Python
|
mit
| 5,445
|
[
"NetCDF"
] |
6d547451d5683b09f46698948d40249fca5624b6217e663335a7a06159670264
|
# Copyright 2015 Adriana Supady & Mateusz Marianski
#
# This file is part of fafoom.
#
# Fafoom is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Fafoom is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with fafoom. If not, see <http://www.gnu.org/licenses/>.
"""Measure and set dihedral angles and rings."""
from __future__ import division
from operator import itemgetter
import numpy as np
from rdkit import Chem
from rdkit.Chem import rdMolTransforms
from utilities import get_vec, tor_rmsd, xyz2sdf
def ig(x):
return itemgetter(x)
def dihedral_measure(sdf_string, position):
""" Measure the dihedral angle.
Args:
sdf_string (string)
position (list): 4 atoms defining the dihedral
Returns:
float value
Raises:
ValueError: If the lenght of the list is not equal 4.
"""
if len(position) != 4:
raise ValueError("The position needs to be defined by 4 integers")
mol = Chem.MolFromMolBlock(sdf_string, removeHs=False)
val = float(rdMolTransforms.GetDihedralDeg(
mol.GetConformer(),
ig(0)(position), ig(1)(position),
ig(2)(position), ig(3)(position)))
return float('{0:.2f}'.format(val))
def dihedral_set(sdf_string, position, value):
""" Set the dihedral angle.
Args:
sdf_string (string):
position (list): 4 atoms defining the dihedral
value : value to set
Returns:
modified sdf_string
Raises:
ValueError: If the lenght of the list is not equal 4.
"""
if len(position) != 4:
raise ValueError("The position needs to be defined by 4 integers")
mol = Chem.MolFromMolBlock(sdf_string, removeHs=False)
rdMolTransforms.SetDihedralDeg(mol.GetConformer(), ig(0)(position),
ig(1)(position), ig(2)(position),
ig(3)(position), value)
return Chem.MolToMolBlock(mol)
def pyranosering_set(sdf_string, position, new_dih, new_ang):
""" Set the pyranosering.
Args:
sdf_string (string)
position (list): 7 atoms defining the ring, i.e. positions of
['C0','C1','C2','C3','C4','O', 'O0']
new_dih (list) : 5 values for the dihedral angles
new_ang (list): 5 values for the bond angles
Returns:
modified sdf_string
Raises:
ValueError: If the lenght of the position is not equal 7 ot if the
length of new_dih/new_ang is not equal to 5.
"""
if len(position) != 7:
raise ValueError("The position needs to be defined by 7 integers")
if len(new_dih) != 5:
raise ValueError("Five dihedral angles are needed for the new ring "
"conformation.")
if len(new_ang) != 5:
raise ValueError("Five bond angles are needed for the new ring "
"conformation.")
from scipy.linalg import expm3
atoms_ring = {}
for n, name in zip(range(len(position)),
['C0', 'C1', 'C2', 'C3', 'C4', 'O', 'O0']):
atoms_ring[name] = position[n]
def initialize(sdf_string):
molecule = Chem.MolFromMolBlock(sdf_string, removeHs=False)
return molecule
def calculate_normal_vector(list_of_atoms, xyz):
"""Calculate the normal vector of a plane by
cross product of two vectors belonging to it.
Args:
list_of_atoms: list of 3 atoms
xyz: numpy array with atoms xyz position
"""
r0 = xyz[list_of_atoms[1], :] - xyz[list_of_atoms[0], :]
r1 = xyz[list_of_atoms[2], :] - xyz[list_of_atoms[1], :]
cross_product = np.cross(r1, r0)
return cross_product
def measure_angle(list_of_atoms, xyz):
"""Calculate an angle between three atoms:
angle = acos(dot(X,Y)/(norm(X)*norm(Y)))
Args:
list_of_atoms: list of 3 atoms
xyz: numpy array with atoms xyz positions
"""
r0 = xyz[list_of_atoms[0], :] - xyz[list_of_atoms[1], :]
r1 = xyz[list_of_atoms[2], :] - xyz[list_of_atoms[1], :]
norm_r0 = np.sqrt(np.sum(r0**2))
norm_r1 = np.sqrt(np.sum(r1**2))
norm = norm_r0*norm_r1
dot_product = np.dot(r0, r1)/norm
angle = np.arccos(dot_product)
#Calculate the axis of rotation (axor):
axor = np.cross(r0, r1)
return angle*180.0/np.pi, axor
def measure_dihedral(list_of_atoms, xyz):
"""Calculate a dihedral angle between two planes defined by
a list of four atoms. It returns the angle and the rotation axis
required to set a new dihedral.
Args:
list_of_atoms: list of 4 atoms
xyz: numpy array with atom xyz positions
"""
plane1 = calculate_normal_vector(list_of_atoms[:3], xyz)
plane2 = calculate_normal_vector(list_of_atoms[1:], xyz)
#Calculate the axis of rotation (axor)
axor = np.cross(plane1, plane2)
#Calculate a norm of normal vectors:
norm_plane1 = np.sqrt(np.sum(plane1**2))
norm_plane2 = np.sqrt(np.sum(plane2**2))
norm = norm_plane1 * norm_plane2
#Measure the angle between two planes:
dot_product = np.dot(plane1, plane2)/norm
alpha = np.arccos(dot_product)
#The cosine function is symetric thus, to distinguish between
#negative and positive angles, one has to calculate if the fourth
#point is above or below the plane defined by first 3 points:
ppoint = - np.dot(plane1, xyz[list_of_atoms[0], :])
dpoint = (np.dot(plane1, xyz[list_of_atoms[3], :])+ppoint)/norm_plane1
if dpoint >= 0:
return -(alpha*180.0)/np.pi, axor
else:
return (alpha*180.0)/np.pi, axor
def determine_carried_atoms(at1, at2, conn_mat):
"""Find all atoms necessary to be carried over during rotation
of an atom 2:
Args:
at1, at2: two atoms number
"""
#1. Zero the connections in connectivity matrix
tmp_conn = np.copy(conn_mat)
tmp_conn[at1, at2] = 0
tmp_conn[at2, at1] = 0
#2. Determine the connected atoms:
carried_atoms = [at2]
s = True
while s:
s = False
#Always iterate over entire list because I might have branching
for at in carried_atoms:
#List of indexes of connected atoms:
conn_atoms = np.where(tmp_conn[at] != 0)[0]
conn_atoms.tolist
for x in conn_atoms:
if x not in carried_atoms:
carried_atoms.append(x)
s = True
return carried_atoms
def set_angle(list_of_atoms, new_ang, atoms_ring, xyz, conn_mat):
"""Set a new angle between three atoms
Args:
list_of_atoms: list of three atoms
new_ang: value of dihedral angle (in degrees) to be set
atoms_ring: dictionary of atoms in the ring. It recognizes
if the last atom is 'C0O' (obsolete)
xyz: numpy array with atoms xyz positions
conn_mat: connectivity matrix
Returns:
xyz: modified numpy array with new atoms positions
"""
#Determine the axis of rotation:
old_ang, axor = measure_angle(list_of_atoms, xyz)
norm_axor = np.sqrt(np.sum(axor**2))
normalized_axor = axor/norm_axor
#Determine which atoms should be dragged along with the bond:
carried_atoms = determine_carried_atoms(list_of_atoms[1],
list_of_atoms[2], conn_mat)
#Each carried_atom is rotated by euler-rodrigues formula:
#Also, I move the midpoint of the bond to the mid atom
#the rotation step and then move the atom back.
rot_angle = np.pi*(new_ang - old_ang)/180.
#Shake it, baby! Rotation matrix:
#print old_ang, new_ang, rot_angle*180./np.pi
rot1 = expm3(np.cross(np.eye(3), normalized_axor*rot_angle))
translation = xyz[list_of_atoms[1], :]
for at in carried_atoms:
xyz[at, :] = np.dot(rot1, xyz[at, :]-translation)
xyz[at, :] = xyz[at, :]+translation
return xyz
def set_dihedral(list_of_atoms, new_dih, atoms_ring, xyz, conn_mat):
"""Set a new dihedral angle between two planes defined by
atoms first and last three atoms of the supplied list.
Args:
list_of_atoms: list of four atoms
new_dih: value of dihedral angle (in degrees) to be set
atoms_ring: dictionary of atoms in the ring. It recognizes
if the last atom is 'C0O'
xyz: numpy array with atoms xyz positions
conn_mat: connectivity matrix
Returns:
xyz: modified numpy array with new atoms positions
"""
#Determine the axis of rotation:
old_dih, axor = measure_dihedral(list_of_atoms, xyz)
norm_axor = np.sqrt(np.sum(axor**2))
normalized_axor = axor/norm_axor
#Check if the bond is the last bond, next to broken one.
#If yes, refer to the oxygen:
if 'O0a' in atoms_ring.keys():
if list_of_atoms[-1] == atoms_ring['O0a']:
new_dih += 120.0
else:
if list_of_atoms[-1] == atoms_ring['O0b']:
new_dih -= 120.0
#Determine which atoms should be dragged along with the bond:
carried_atoms = determine_carried_atoms(list_of_atoms[1],
list_of_atoms[2], conn_mat)
#Each carried_atom is rotated by Euler-Rodrigues formula:
#Reverse if the angle is less than zero, so it rotates in
#right direction.
#Also, I move the midpoint of the bond to the center for
#the rotation step and then move the atom back.
if old_dih >= 0.0:
rot_angle = np.pi*(new_dih - old_dih)/180.
else:
rot_angle = -np.pi*(new_dih - old_dih)/180.
#Shake it, baby! Rotation matrix:
rot1 = expm3(np.cross(np.eye(3), normalized_axor*rot_angle))
translation = (xyz[list_of_atoms[1], :]+xyz[list_of_atoms[2], :])/2
for at in carried_atoms:
xyz[at, :] = np.dot(rot1, xyz[at, :]-translation)
xyz[at, :] = xyz[at, :]+translation
return xyz
def mutate_ring(molecule, new_dih, new_ang):
"""Mutate a ring to given conformation defined as a list of torsional
angles accoring to the 10.1016/S0040-4020(00)01019-X (IUPAC) paper
"""
n_at = molecule.GetNumAtoms()
n_bonds = molecule.GetNumBonds()
m_string = Chem.MolToMolBlock(molecule)
#Split the string to xyz, connectivity matrix and atom list
m_coords = m_string.split('\n')[4:4+n_at]
xyz = np.zeros((n_at, 3))
atom_list = []
n = 0
for line in m_coords:
xyz[n, :] += np.array(map(float, line.split()[:3]))
atom_list.append(line.split()[3])
n += 1
#Molecule Connectivity Matrix
m_conn = m_string.split('\n')[4+n_at:4+n_at+n_bonds]
conn_mat = np.zeros((n_at, n_at))
for line in m_conn:
at1 = int(line.split()[0])
at2 = int(line.split()[1])
conn_mat[at1-1, at2-1] = 1
conn_mat[at2-1, at1-1] = 1
#Introduce a cut between ring C0 and C1:
#I chose these atoms according to the torsion
#definitions in the IUPAC paper
#doi: 10.1016/S0040-4020(00)01019-X
conn_mat[atoms_ring['C0'], atoms_ring['C1']] = 0
conn_mat[atoms_ring['C1'], atoms_ring['C0']] = 0
#Construct a list of atoms in order:
#C0, C1, C2, C3, C4, O, C0, O0a/b (oxygen at anomeric carbon)
#I use this list to rotate bonds.
atoms_list = []
for x in range(0, 5):
atoms_list.append(atoms_ring['C'+str(x)])
atoms_list.append(atoms_ring['O'])
atoms_list.append(atoms_ring['C0'])
atoms_list.append(atoms_ring['O0'])
#Determine the anomer - alpha/beta, based on improper
#dihedral angle C1-C0-O-O0
imdih = []
for at in ['C1', 'C0', 'O', 'O0']:
imdih.append(atoms_ring[at])
test_anomer = measure_dihedral(imdih, xyz)[0]
if test_anomer > 0.0:
atoms_ring['O0b'] = atoms_ring.pop('O0')
else:
atoms_ring['O0a'] = atoms_ring.pop('O0')
#Adjust the 'internal' angles in the ring:
for n in range(len(new_ang)):
xyz = set_angle(atoms_list[n:n+3], new_ang[n], atoms_ring, xyz,
conn_mat)
#Rotate the dihedral angles in the ring:
for n in range(len(new_dih)):
xyz = set_dihedral(atoms_list[n:n+4], new_dih[n], atoms_ring, xyz,
conn_mat)
a = []
a.append("%10s\n" % n_at)
for n in new_dih:
a.append("%10.4f" % n)
a.append("\n")
for n in range(n_at):
a.append("%10s%10.4f%10.4f%10.4f\n" % (atom_list[n], xyz[n, 0],
xyz[n, 1], xyz[n, 2]))
xyz_string = ''.join(a)
return xyz_string
molecule = initialize(sdf_string)
sdf_string = xyz2sdf(mutate_ring(molecule, new_dih, new_ang), sdf_string)
return sdf_string
def pyranosering_measure(sdf_string, position, dict_of_options):
"""Assign the ring to a conformation from the dictionary of options.
Args:
sdf_string (string)
position (list): 7 atoms defining the ring
dict_of_options (dict) : options for the ring
Returns:
An integer that corresponds to the best matching dict key
Raises:
ValueError: If the lenght of the position is not equal 7.
"""
if len(position) != 7:
raise ValueError("The position needs to be defined by 7 integers")
ang1 = dihedral_measure(sdf_string, position[0:4])
ang2 = dihedral_measure(sdf_string, position[1:5])
ang3 = dihedral_measure(sdf_string, position[2:6])
ang4 = dihedral_measure(sdf_string, (ig(3)(position), ig(4)(position),
ig(5)(position), ig(0)(position)))
ang5 = dihedral_measure(sdf_string, (ig(4)(position), ig(5)(position),
ig(0)(position), ig(1)(position)))
ang6 = dihedral_measure(sdf_string, (ig(5)(position), ig(0)(position),
ig(1)(position), ig(2)(position)))
all_ang = [ang1, ang2, ang3, ang4, ang5, ang6]
rmsd_dict = {}
for key in dict_of_options:
rmsd_dict[key] = (tor_rmsd(2, get_vec(all_ang, dict_of_options[key])))
return int(min(rmsd_dict.iteritems(), key=ig(1))[0])
|
FHIBioGroup/fafoom-dev
|
fafoom/measure.py
|
Python
|
gpl-3.0
| 15,433
|
[
"RDKit"
] |
09e8f5cade117646140d3a499e49b424ec05c1f0eb6a69cbf30f3900fd807819
|
#!/usr/bin/env python
'''
Script to read data from DEIMOS EK60 instrument at MARS and write out NetCDF.
Use the conventions for timeSeriesProfile feature type and write as much metadata as possible.
This script is meant to preserve the data identically as it is reported in the orignal files.
Mike McCann
MBARI 10 June 2019
'''
import os
import sys
import csv
import numpy as np
from collections import defaultdict
from datetime import datetime
from netCDF4 import Dataset
# Add grandparent dir to pythonpath so that we can see the CANON and toNetCDF modules
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../") )
from CANON.toNetCDF import BaseWriter
class ParserWriter(BaseWriter):
'''Handle all information needed to parse EK60 CSV output
and write the data as a CF-compliant NetCDF timeSeriesProfile file.
'''
_FillValue = 999
missing_value = -999
esec_list = []
dep_list = []
ad_list = []
def _save_data(self, dep_per_time, ad_per_time):
dep = dep_per_time[0]
deps = []
for dl in self.dep_list:
if dep > dl:
deps.append(self._FillValue)
else:
deps.append(ad_per_time.pop(0))
dep = dep_per_time[0]
self.ad_list.append(deps)
def process_deimos_tsp_csv_file(self):
'''Process DEIMOS data into a TimeSeriesProfile CF-NetCDF featureType file.
The .csv file looks like:
Ping_index,Distance_gps,Distance_vl,Ping_date,Ping_time,Ping_milliseconds,Latitude,Longitude,Depth_start,Depth_stop,Range_start,Range_stop,Sample_count
0,-9.9e+37,0.0000000000,2019-10-06,00:00:01,250.0,999.0000000000,999.0000000000,-0.1909107200,999.7994406400,-0.1909107200,999.7994406400,2619,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-9.9e+37,-89.140898,-87.511737,-87.431613,-88.386803,-90.688064,-92.684762,-93.915170,-93.629650,-92.724587,-92.733174,-94.681568,-97.019348,-98.350405,-98.916013,....
It's simpler than Sam's format with the acoustic intensity data in the None key in equal
depth bins between Depth_start and Depth_stop.
'''
self._FillValue = -9.9e+37
self.missing_value = -9.9e+37
self.logger.info(f"Opening file {self.args.inFile} to collect acoustic data")
with open(self.args.inFile) as fh:
for count, rec in enumerate(csv.DictReader(fh)):
self.logger.debug((rec['Ping_index'], rec['Ping_date'], rec['Ping_time'], rec['Sample_count']))
dt = datetime.strptime(f"{rec['Ping_date']}{rec['Ping_time']}.{float(rec['Ping_milliseconds']):03.0f}",
"%Y-%m-%d%H:%M:%S.%f")
if count == 0:
first_dt = dt
start_dep = float(rec['Depth_start'])
stop_dep = float(rec['Depth_stop'])
sample_count = int(rec['Sample_count'])
assert(float(rec['Depth_start']) == start_dep)
assert(float(rec['Depth_stop']) == stop_dep)
assert(float(rec['Sample_count']) == sample_count)
self.esec_list.append((dt - datetime(1970, 1, 1)).total_seconds())
self.ad_list.append([float(d) for d in rec[None]])
self.dep_list = np.linspace(start_dep, stop_dep, sample_count)
self.logger.info(f"Collected {len(self.esec_list)} time steps, from {first_dt} to {dt}, between depths {start_dep} and {stop_dep}")
self.write_sv(ad_name="sv38")
def process_deimos_sam_csv_file(self):
'''The .csv file looks like:
Process_ID, Interval, Layer, Sv_mean, NASC, Height_mean, Depth_mean, Layer_depth_min, Layer_depth_max, Ping_S, Ping_E, Dist_M, Date_M, Time_M, Lat_M, Lon_M, Noise_Sv_1m, Minimum_Sv_threshold_applied, Maximum_Sv_threshold_applied, Standard_deviation, Thickness_mean, Range_mean, Exclude_below_line_range_mean, Exclude_above_line_range_mean
10, 2597618, 7, -76.938230, 0.014737, 0.016895, 6.541552, 6.000000, 7.000000, 0, 112, 0.000000, 20190523, 00:17:39.6930, 999.00000000, 999.00000000, -999.000000, 0, 0, 0.00000004525452, 0.016895, 883.458448, 10.000000, 880.285609
10, 2597618, 8, -71.759814, 0.135963, 0.047305, 7.719744, 7.000000, 8.000000, 0, 112, 0.000000, 20190523, 00:17:39.6930, 999.00000000, 999.00000000, -999.000000, 0, 0, 0.00000013429169, 0.047305, 882.280256, 10.000000, 880.285609
'''
last_dt = None
last_dep = 0
depths = defaultdict(lambda:0)
self.logger.info(f"Opening file {self.args.inFile} to collect depths")
with open(self.args.inFile) as fh:
for rec in csv.DictReader(fh, skipinitialspace=True):
# Depth dimension
dep = (float(rec['Layer_depth_min']) + float(rec['Layer_depth_max'])) / 2.0
depths[dep] += 1
self.dep_list = sorted(depths.keys())
self.logger.info(f"Collected {len(self.dep_list)} depth from {self.dep_list[0]} m to {self.dep_list[-1]} m")
dep_per_time = []
ad_per_time = []
self.logger.info(f"Opening file {self.args.inFile} to collect acoustic data")
with open(self.args.inFile) as fh:
for count, rec in enumerate(csv.DictReader(fh, skipinitialspace=True)):
self.logger.debug(rec['Date_M'], rec['Time_M'], rec['Layer_depth_min'], rec['Depth_mean'], rec['Layer_depth_max'])
dt = datetime.strptime(rec['Date_M']+rec['Time_M'], "%Y%m%d%H:%M:%S.%f")
if dt != last_dt:
# A new time step encountered
if not count % (6 * 24):
self.logger.info(f"{dt}")
self.esec_list.append((dt - datetime(1970, 1, 1)).total_seconds())
if not dep_per_time:
first_dt = dt
else:
self._save_data(dep_per_time, ad_per_time)
dep_per_time = []
ad_per_time = []
dep_per_time.append((float(rec['Layer_depth_min']) + float(rec['Layer_depth_max'])) / 2.0)
ad_per_time.append(float(rec['Sv_mean']))
last_dt = dt
last_dep = dep
# Save the last set of depth and acoustic data
self._save_data(dep_per_time, ad_per_time)
self.logger.info(f"Collected {len(self.esec_list)} time steps, from {first_dt} to {dt}")
self.write_sv(ad_name='Sv_mean')
def write_sv(self, ad_name='Sv_mean'):
'''Write lists out as NetCDF.
'''
# Create the NetCDF file
outFile = '.'.join(self.args.inFile.split('.')[:-1]) + '.nc'
outFile = outFile.replace(' ', '_')
self.ncFile = Dataset(outFile, 'w')
# If specified on command line override the default generic title with what is specified
self.ncFile.title = 'DEIMOS Acoustic Data'
if self.args.title:
self.ncFile.title = self.args.title
# Combine any summary text specified on commamd line with the generic summary stating the original source file
self.ncFile.summary = f"Observational oceanographic data translated from '{self.args.inFile}'"
if self.args.summary:
self.ncFile.summary = self.args.summary
if not self.args.summary.endswith('.'):
self.ncFile.summary += '.'
self.ncFile.summary += ' Translated with no modification from original data file %s' % self.args.inFile
# Text cribbed from the README
self.ncFile.summary += '''
IMPORTANT: these acoustic data have gone through automated noise-removal
procedures, but have not been fully quality-controlled. As such, they are
likely to contain spurious signals due to noise, non-biological scatterers, etc.
The data are binned at reduced spatial and temporal resolution and have not been
calibrated.
'''
self.ncFile.summary += '''THESE DATA ARE NOT "SCIENCE QUALITY" AND SHOULD ONLY BE USED FOR PRELIMINARY VISUALIZATIONS.'
For data appropriate for publications, presentations, etc., please contact:
Kelly Benoit-Bird <kbb@mbari.org>
Chad Waluk <cwaluk@mbari.org>'''
# If specified on command line override the default generic license with what is specified
if self.args.license:
self.ncFile.license = self.args.license
# timeSeriesProfile dataset, time and depth are the netCDF dimensions with arrays
self.ncFile.createDimension('time', len(self.esec_list))
self.time = self.ncFile.createVariable('time', 'float64', ('time',))
self.time.standard_name = 'time'
self.time.units = 'seconds since 1970-01-01'
self.time[:] = self.esec_list
self.ncFile.createDimension('depth', len(self.dep_list))
self.depth = self.ncFile.createVariable('depth', 'float64', ('depth',))
self.depth.standard_name = 'depth'
self.depth.units = 'm'
self.depth[:] = self.dep_list
# Position of MARS - singleton dimensions
self.ncFile.createDimension('longitude', 1)
self.longitude = self.ncFile.createVariable('longitude', 'float64', ('longitude',))
self.longitude.standard_name = 'longitude'
self.longitude.units = 'degrees_east'
self.longitude[:] = [-122.18681000]
self.ncFile.createDimension('latitude', 1)
self.latitude = self.ncFile.createVariable('latitude', 'float64', ('latitude',))
self.latitude.standard_name = 'latitude'
self.latitude.units = 'degrees_north'
self.latitude[:] = [36.71137000]
# Record Variables - Acoustic Data
ad = self.ncFile.createVariable(ad_name, 'float64', ('time', 'depth', 'latitude', 'longitude'), fill_value=self._FillValue)
ad.long_name = 'SV'
ad.coordinates = 'time depth latitude longitude'
ad.units = 'db'
ad.missing_value = self.missing_value
ad_array = np.array(self.ad_list)
ad[:,:,:,:] = ad_array.reshape(ad_array.shape[0], ad_array.shape[1], 1, 1)
self.add_global_metadata(featureType='timeseriesProfile')
self.ncFile.close()
self.logger.info("Wrote %s" % outFile)
# End write_sv()
if __name__ == '__main__':
'''Example execution:
Using process_deimos_sam_csv_file():
stoqs/loaders/CANON/toNetCDF/deimosCSVToNetCDF.py --sam --inFile deimos-2019-CANON-Spring.csv
Using process_deimos_tsp_csv_file():
stoqs/loaders/CANON/toNetCDF/deimosCSVToNetCDF.py --inFile Deimos\ -\ Cleaned\ Smoothed\ Data\ -\ Original\ Full\ Resolution\ -\ 2019-10-06\ 0000to0100.sv.csv
'''
pw = ParserWriter()
pw.process_command_line()
##pw.process_deimos_sam_csv_file()
pw.process_deimos_tsp_csv_file()
pw.logger.info("Done.")
|
stoqs/stoqs
|
stoqs/loaders/CANON/toNetCDF/deimosCSVToNetCDF.py
|
Python
|
gpl-3.0
| 10,997
|
[
"NetCDF"
] |
3cb9eb9b26114a813ceee237437a092c94fd3d9051f462c5f7bf69aa214fb3a4
|
"""Support for package tracking sensors from 17track.net."""
from datetime import timedelta
import logging
from py17track import Client as SeventeenTrackClient
from py17track.errors import SeventeenTrackError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LOCATION,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle, slugify
_LOGGER = logging.getLogger(__name__)
ATTR_DESTINATION_COUNTRY = "destination_country"
ATTR_FRIENDLY_NAME = "friendly_name"
ATTR_INFO_TEXT = "info_text"
ATTR_ORIGIN_COUNTRY = "origin_country"
ATTR_PACKAGES = "packages"
ATTR_PACKAGE_TYPE = "package_type"
ATTR_STATUS = "status"
ATTR_TRACKING_INFO_LANGUAGE = "tracking_info_language"
ATTR_TRACKING_NUMBER = "tracking_number"
CONF_SHOW_ARCHIVED = "show_archived"
CONF_SHOW_DELIVERED = "show_delivered"
DATA_PACKAGES = "package_data"
DATA_SUMMARY = "summary_data"
DEFAULT_ATTRIBUTION = "Data provided by 17track.net"
DEFAULT_SCAN_INTERVAL = timedelta(minutes=10)
UNIQUE_ID_TEMPLATE = "package_{0}_{1}"
ENTITY_ID_TEMPLATE = "sensor.seventeentrack_package_{0}"
NOTIFICATION_DELIVERED_ID = "package_delivered_{0}"
NOTIFICATION_DELIVERED_TITLE = "Package {0} delivered"
NOTIFICATION_DELIVERED_MESSAGE = (
"Package Delivered: {0}<br />Visit 17.track for more information: "
"https://t.17track.net/track#nums={1}"
)
VALUE_DELIVERED = "Delivered"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_SHOW_ARCHIVED, default=False): cv.boolean,
vol.Optional(CONF_SHOW_DELIVERED, default=False): cv.boolean,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Configure the platform and add the sensors."""
websession = aiohttp_client.async_get_clientsession(hass)
client = SeventeenTrackClient(websession)
try:
login_result = await client.profile.login(
config[CONF_USERNAME], config[CONF_PASSWORD]
)
if not login_result:
_LOGGER.error("Invalid username and password provided")
return
except SeventeenTrackError as err:
_LOGGER.error("There was an error while logging in: %s", err)
return
scan_interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
data = SeventeenTrackData(
client,
async_add_entities,
scan_interval,
config[CONF_SHOW_ARCHIVED],
config[CONF_SHOW_DELIVERED],
)
await data.async_update()
class SeventeenTrackSummarySensor(Entity):
"""Define a summary sensor."""
def __init__(self, data, status, initial_state):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._data = data
self._state = initial_state
self._status = status
@property
def available(self):
"""Return whether the entity is available."""
return self._state is not None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
return "mdi:package"
@property
def name(self):
"""Return the name."""
return f"Seventeentrack Packages {self._status}"
@property
def state(self):
"""Return the state."""
return self._state
@property
def unique_id(self):
"""Return a unique, Home Assistant friendly identifier for this entity."""
return "summary_{0}_{1}".format(self._data.account_id, slugify(self._status))
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return "packages"
async def async_update(self):
"""Update the sensor."""
await self._data.async_update()
package_data = []
for package in self._data.packages.values():
if package.status != self._status:
continue
package_data.append(
{
ATTR_FRIENDLY_NAME: package.friendly_name,
ATTR_INFO_TEXT: package.info_text,
ATTR_STATUS: package.status,
ATTR_TRACKING_NUMBER: package.tracking_number,
}
)
if package_data:
self._attrs[ATTR_PACKAGES] = package_data
self._state = self._data.summary.get(self._status)
class SeventeenTrackPackageSensor(Entity):
"""Define an individual package sensor."""
def __init__(self, data, package):
"""Initialize."""
self._attrs = {
ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION,
ATTR_DESTINATION_COUNTRY: package.destination_country,
ATTR_INFO_TEXT: package.info_text,
ATTR_LOCATION: package.location,
ATTR_ORIGIN_COUNTRY: package.origin_country,
ATTR_PACKAGE_TYPE: package.package_type,
ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language,
ATTR_TRACKING_NUMBER: package.tracking_number,
}
self._data = data
self._friendly_name = package.friendly_name
self._state = package.status
self._tracking_number = package.tracking_number
self.entity_id = ENTITY_ID_TEMPLATE.format(self._tracking_number)
@property
def available(self):
"""Return whether the entity is available."""
return self._data.packages.get(self._tracking_number) is not None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
return "mdi:package"
@property
def name(self):
"""Return the name."""
name = self._friendly_name
if not name:
name = self._tracking_number
return f"Seventeentrack Package: {name}"
@property
def state(self):
"""Return the state."""
return self._state
@property
def unique_id(self):
"""Return a unique, Home Assistant friendly identifier for this entity."""
return UNIQUE_ID_TEMPLATE.format(self._data.account_id, self._tracking_number)
async def async_update(self):
"""Update the sensor."""
await self._data.async_update()
if not self.available:
self.hass.async_create_task(self._remove())
return
package = self._data.packages.get(self._tracking_number, None)
# If the user has elected to not see delivered packages and one gets
# delivered, post a notification:
if package.status == VALUE_DELIVERED and not self._data.show_delivered:
self._notify_delivered()
self.hass.async_create_task(self._remove())
return
self._attrs.update(
{ATTR_INFO_TEXT: package.info_text, ATTR_LOCATION: package.location}
)
self._state = package.status
self._friendly_name = package.friendly_name
async def _remove(self):
"""Remove entity itself."""
await self.async_remove()
reg = await self.hass.helpers.entity_registry.async_get_registry()
entity_id = reg.async_get_entity_id(
"sensor",
"seventeentrack",
UNIQUE_ID_TEMPLATE.format(self._data.account_id, self._tracking_number),
)
if entity_id:
reg.async_remove(entity_id)
def _notify_delivered(self):
"""Notify when package is delivered."""
_LOGGER.info("Package delivered: %s", self._tracking_number)
identification = (
self._friendly_name if self._friendly_name else self._tracking_number
)
message = NOTIFICATION_DELIVERED_MESSAGE.format(
self._tracking_number, identification
)
title = NOTIFICATION_DELIVERED_TITLE.format(identification)
notification_id = NOTIFICATION_DELIVERED_TITLE.format(self._tracking_number)
self.hass.components.persistent_notification.create(
message, title=title, notification_id=notification_id
)
class SeventeenTrackData:
"""Define a data handler for 17track.net."""
def __init__(
self, client, async_add_entities, scan_interval, show_archived, show_delivered
):
"""Initialize."""
self._async_add_entities = async_add_entities
self._client = client
self._scan_interval = scan_interval
self._show_archived = show_archived
self.account_id = client.profile.account_id
self.packages = {}
self.show_delivered = show_delivered
self.summary = {}
self.async_update = Throttle(self._scan_interval)(self._async_update)
self.first_update = True
async def _async_update(self):
"""Get updated data from 17track.net."""
try:
packages = await self._client.profile.packages(
show_archived=self._show_archived
)
_LOGGER.debug("New package data received: %s", packages)
new_packages = {p.tracking_number: p for p in packages}
to_add = set(new_packages) - set(self.packages)
_LOGGER.debug("Will add new tracking numbers: %s", to_add)
if to_add:
self._async_add_entities(
[
SeventeenTrackPackageSensor(self, new_packages[tracking_number])
for tracking_number in to_add
],
True,
)
self.packages = new_packages
except SeventeenTrackError as err:
_LOGGER.error("There was an error retrieving packages: %s", err)
try:
self.summary = await self._client.profile.summary(
show_archived=self._show_archived
)
_LOGGER.debug("New summary data received: %s", self.summary)
# creating summary sensors on first update
if self.first_update:
self.first_update = False
self._async_add_entities(
[
SeventeenTrackSummarySensor(self, status, quantity)
for status, quantity in self.summary.items()
],
True,
)
except SeventeenTrackError as err:
_LOGGER.error("There was an error retrieving the summary: %s", err)
self.summary = {}
|
postlund/home-assistant
|
homeassistant/components/seventeentrack/sensor.py
|
Python
|
apache-2.0
| 10,853
|
[
"VisIt"
] |
95ca4440f70942bd10a169347a04b1b2e70c9e0380a039a74f559686c5894c61
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Brian Coca <bcoca@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
module: systemd
author:
- Ansible Core Team
version_added: "2.2"
short_description: Manage systemd units
description:
- Controls systemd units (services, timers, and so on) on remote hosts.
options:
name:
description:
- Name of the unit. This parameter takes the name of exactly one unit to work with.
- When no extension is given, it is implied to a C(.service) as systemd.
- When using in a chroot environment you always need to specify the name of the unit with the extension. For example, C(crond.service).
type: str
aliases: [ service, unit ]
state:
description:
- C(started)/C(stopped) are idempotent actions that will not run commands unless necessary.
C(restarted) will always bounce the unit. C(reloaded) will always reload.
type: str
choices: [ reloaded, restarted, started, stopped ]
enabled:
description:
- Whether the unit should start on boot. B(At least one of state and enabled are required.)
type: bool
force:
description:
- Whether to override existing symlinks.
type: bool
version_added: 2.6
masked:
description:
- Whether the unit should be masked or not, a masked unit is impossible to start.
type: bool
daemon_reload:
description:
- Run daemon-reload before doing any other operations, to make sure systemd has read any changes.
- When set to C(true), runs daemon-reload even if the module does not start or stop anything.
type: bool
default: no
aliases: [ daemon-reload ]
daemon_reexec:
description:
- Run daemon_reexec command before doing any other operations, the systemd manager will serialize the manager state.
type: bool
default: no
aliases: [ daemon-reexec ]
version_added: "2.8"
scope:
description:
- Run systemctl within a given service manager scope, either as the default system scope C(system),
the current user's scope C(user), or the scope of all users C(global).
- "For systemd to work with 'user', the executing user must have its own instance of dbus started and accessible (systemd requirement)."
- "The user dbus process is normally started during normal login, but not during the run of Ansible tasks.
Otherwise you will probably get a 'Failed to connect to bus: no such file or directory' error."
- The user must have access, normally given via setting the C(XDG_RUNTIME_DIR) variable, see example below.
type: str
choices: [ system, user, global ]
default: system
version_added: "2.7"
no_block:
description:
- Do not synchronously wait for the requested operation to finish.
Enqueued job will continue without Ansible blocking on its completion.
type: bool
default: no
version_added: "2.3"
extends_documentation_fragment: action_common_attributes
attributes:
check_mode:
support: full
diff_mode:
support: none
platform:
platforms: posix
notes:
- Since 2.4, one of the following options is required C(state), C(enabled), C(masked), C(daemon_reload), (C(daemon_reexec) since 2.8),
and all except C(daemon_reload) and (C(daemon_reexec) since 2.8) also require C(name).
- Before 2.4 you always required C(name).
- Globs are not supported in name, i.e C(postgres*.service).
- The service names might vary by specific OS/distribution
requirements:
- A system managed by systemd.
'''
EXAMPLES = '''
- name: Make sure a service unit is running
ansible.builtin.systemd:
state: started
name: httpd
- name: Stop service cron on debian, if running
ansible.builtin.systemd:
name: cron
state: stopped
- name: Restart service cron on centos, in all cases, also issue daemon-reload to pick up config changes
ansible.builtin.systemd:
state: restarted
daemon_reload: yes
name: crond
- name: Reload service httpd, in all cases
ansible.builtin.systemd:
name: httpd.service
state: reloaded
- name: Enable service httpd and ensure it is not masked
ansible.builtin.systemd:
name: httpd
enabled: yes
masked: no
- name: Enable a timer unit for dnf-automatic
ansible.builtin.systemd:
name: dnf-automatic.timer
state: started
enabled: yes
- name: Just force systemd to reread configs (2.4 and above)
ansible.builtin.systemd:
daemon_reload: yes
- name: Just force systemd to re-execute itself (2.8 and above)
ansible.builtin.systemd:
daemon_reexec: yes
- name: Run a user service when XDG_RUNTIME_DIR is not set on remote login
ansible.builtin.systemd:
name: myservice
state: started
scope: user
environment:
XDG_RUNTIME_DIR: "/run/user/{{ myuid }}"
'''
RETURN = '''
status:
description: A dictionary with the key=value pairs returned from C(systemctl show).
returned: success
type: complex
sample: {
"ActiveEnterTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ActiveEnterTimestampMonotonic": "8135942",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "auditd.service systemd-user-sessions.service time-sync.target systemd-journald.socket basic.target system.slice",
"AllowIsolate": "no",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "1000",
"CPUAccounting": "no",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "1024",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "18446744073709551615",
"ConditionResult": "yes",
"ConditionTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ConditionTimestampMonotonic": "7902742",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/crond.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"Delegate": "no",
"Description": "Command Scheduler",
"DevicePolicy": "auto",
"EnvironmentFile": "/etc/sysconfig/crond (ignore_errors=no)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "595",
"ExecMainStartTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ExecMainStartTimestampMonotonic": "8134990",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/crond ; argv[]=/usr/sbin/crond -n $CRONDARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FragmentPath": "/usr/lib/systemd/system/crond.service",
"GuessMainPID": "yes",
"IOScheduling": "0",
"Id": "crond.service",
"IgnoreOnIsolate": "no",
"IgnoreOnSnapshot": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"InactiveExitTimestampMonotonic": "8135942",
"JobTimeoutUSec": "0",
"KillMode": "process",
"KillSignal": "15",
"LimitAS": "18446744073709551615",
"LimitCORE": "18446744073709551615",
"LimitCPU": "18446744073709551615",
"LimitDATA": "18446744073709551615",
"LimitFSIZE": "18446744073709551615",
"LimitLOCKS": "18446744073709551615",
"LimitMEMLOCK": "65536",
"LimitMSGQUEUE": "819200",
"LimitNICE": "0",
"LimitNOFILE": "4096",
"LimitNPROC": "3902",
"LimitRSS": "18446744073709551615",
"LimitRTPRIO": "0",
"LimitRTTIME": "18446744073709551615",
"LimitSIGPENDING": "3902",
"LimitSTACK": "18446744073709551615",
"LoadState": "loaded",
"MainPID": "595",
"MemoryAccounting": "no",
"MemoryLimit": "18446744073709551615",
"MountFlags": "0",
"Names": "crond.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureIsolate": "no",
"PermissionsStartOnly": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"Requires": "basic.target",
"Restart": "no",
"RestartUSec": "100ms",
"Result": "success",
"RootDirectoryStartOnly": "no",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitInterval": "10000000",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "simple",
"UMask": "0022",
"UnitFileState": "enabled",
"WantedBy": "multi-user.target",
"Wants": "system.slice",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0",
}
''' # NOQA
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.facts.system.chroot import is_chroot
from ansible.module_utils.service import sysv_exists, sysv_is_enabled, fail_if_missing
from ansible.module_utils._text import to_native
def is_running_service(service_status):
return service_status['ActiveState'] in set(['active', 'activating'])
def is_deactivating_service(service_status):
return service_status['ActiveState'] in set(['deactivating'])
def request_was_ignored(out):
return '=' not in out and ('ignoring request' in out or 'ignoring command' in out)
def parse_systemctl_show(lines):
# The output of 'systemctl show' can contain values that span multiple lines. At first glance it
# appears that such values are always surrounded by {}, so the previous version of this code
# assumed that any value starting with { was a multi-line value; it would then consume lines
# until it saw a line that ended with }. However, it is possible to have a single-line value
# that starts with { but does not end with } (this could happen in the value for Description=,
# for example), and the previous version of this code would then consume all remaining lines as
# part of that value. Cryptically, this would lead to Ansible reporting that the service file
# couldn't be found.
#
# To avoid this issue, the following code only accepts multi-line values for keys whose names
# start with Exec (e.g., ExecStart=), since these are the only keys whose values are known to
# span multiple lines.
parsed = {}
multival = []
k = None
for line in lines:
if k is None:
if '=' in line:
k, v = line.split('=', 1)
if k.startswith('Exec') and v.lstrip().startswith('{'):
if not v.rstrip().endswith('}'):
multival.append(v)
continue
parsed[k] = v.strip()
k = None
else:
multival.append(line)
if line.rstrip().endswith('}'):
parsed[k] = '\n'.join(multival).strip()
multival = []
k = None
return parsed
# ===========================================
# Main control flow
def main():
# initialize
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', aliases=['service', 'unit']),
state=dict(type='str', choices=['reloaded', 'restarted', 'started', 'stopped']),
enabled=dict(type='bool'),
force=dict(type='bool'),
masked=dict(type='bool'),
daemon_reload=dict(type='bool', default=False, aliases=['daemon-reload']),
daemon_reexec=dict(type='bool', default=False, aliases=['daemon-reexec']),
scope=dict(type='str', default='system', choices=['system', 'user', 'global']),
no_block=dict(type='bool', default=False),
),
supports_check_mode=True,
required_one_of=[['state', 'enabled', 'masked', 'daemon_reload', 'daemon_reexec']],
required_by=dict(
state=('name', ),
enabled=('name', ),
masked=('name', ),
),
)
unit = module.params['name']
if unit is not None:
for globpattern in (r"*", r"?", r"["):
if globpattern in unit:
module.fail_json(msg="This module does not currently support using glob patterns, found '%s' in service name: %s" % (globpattern, unit))
systemctl = module.get_bin_path('systemctl', True)
if os.getenv('XDG_RUNTIME_DIR') is None:
os.environ['XDG_RUNTIME_DIR'] = '/run/user/%s' % os.geteuid()
''' Set CLI options depending on params '''
# if scope is 'system' or None, we can ignore as there is no extra switch.
# The other choices match the corresponding switch
if module.params['scope'] != 'system':
systemctl += " --%s" % module.params['scope']
if module.params['no_block']:
systemctl += " --no-block"
if module.params['force']:
systemctl += " --force"
rc = 0
out = err = ''
result = dict(
name=unit,
changed=False,
status=dict(),
)
# Run daemon-reload first, if requested
if module.params['daemon_reload'] and not module.check_mode:
(rc, out, err) = module.run_command("%s daemon-reload" % (systemctl))
if rc != 0:
module.fail_json(msg='failure %d during daemon-reload: %s' % (rc, err))
# Run daemon-reexec
if module.params['daemon_reexec'] and not module.check_mode:
(rc, out, err) = module.run_command("%s daemon-reexec" % (systemctl))
if rc != 0:
module.fail_json(msg='failure %d during daemon-reexec: %s' % (rc, err))
if unit:
found = False
is_initd = sysv_exists(unit)
is_systemd = False
# check service data, cannot error out on rc as it changes across versions, assume not found
(rc, out, err) = module.run_command("%s show '%s'" % (systemctl, unit))
if rc == 0 and not (request_was_ignored(out) or request_was_ignored(err)):
# load return of systemctl show into dictionary for easy access and return
if out:
result['status'] = parse_systemctl_show(to_native(out).split('\n'))
is_systemd = 'LoadState' in result['status'] and result['status']['LoadState'] != 'not-found'
is_masked = 'LoadState' in result['status'] and result['status']['LoadState'] == 'masked'
# Check for loading error
if is_systemd and not is_masked and 'LoadError' in result['status']:
module.fail_json(msg="Error loading unit file '%s': %s" % (unit, result['status']['LoadError']))
# Workaround for https://github.com/ansible/ansible/issues/71528
elif err and rc == 1 and 'Failed to parse bus message' in err:
result['status'] = parse_systemctl_show(to_native(out).split('\n'))
unit_base, sep, suffix = unit.partition('@')
unit_search = '{unit_base}{sep}'.format(unit_base=unit_base, sep=sep)
(rc, out, err) = module.run_command("{systemctl} list-unit-files '{unit_search}*'".format(systemctl=systemctl, unit_search=unit_search))
is_systemd = unit_search in out
(rc, out, err) = module.run_command("{systemctl} is-active '{unit}'".format(systemctl=systemctl, unit=unit))
result['status']['ActiveState'] = out.rstrip('\n')
else:
# list taken from man systemctl(1) for systemd 244
valid_enabled_states = [
"enabled",
"enabled-runtime",
"linked",
"linked-runtime",
"masked",
"masked-runtime",
"static",
"indirect",
"disabled",
"generated",
"transient"]
(rc, out, err) = module.run_command("%s is-enabled '%s'" % (systemctl, unit))
if out.strip() in valid_enabled_states:
is_systemd = True
else:
# fallback list-unit-files as show does not work on some systems (chroot)
# not used as primary as it skips some services (like those using init.d) and requires .service/etc notation
(rc, out, err) = module.run_command("%s list-unit-files '%s'" % (systemctl, unit))
if rc == 0:
is_systemd = True
else:
# Check for systemctl command
module.run_command(systemctl, check_rc=True)
# Does service exist?
found = is_systemd or is_initd
if is_initd and not is_systemd:
module.warn('The service (%s) is actually an init script but the system is managed by systemd' % unit)
# mask/unmask the service, if requested, can operate on services before they are installed
if module.params['masked'] is not None:
# state is not masked unless systemd affirms otherwise
(rc, out, err) = module.run_command("%s is-enabled '%s'" % (systemctl, unit))
masked = out.strip() == "masked"
if masked != module.params['masked']:
result['changed'] = True
if module.params['masked']:
action = 'mask'
else:
action = 'unmask'
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
# some versions of system CAN mask/unmask non existing services, we only fail on missing if they don't
fail_if_missing(module, found, unit, msg='host')
# Enable/disable service startup at boot if requested
if module.params['enabled'] is not None:
if module.params['enabled']:
action = 'enable'
else:
action = 'disable'
fail_if_missing(module, found, unit, msg='host')
# do we need to enable the service?
enabled = False
(rc, out, err) = module.run_command("%s is-enabled '%s' -l" % (systemctl, unit))
# check systemctl result or if it is a init script
if rc == 0:
enabled = True
# Check if the service is indirect or alias and if out contains exactly 1 line of string 'indirect'/ 'alias' it's disabled
if out.splitlines() == ["indirect"] or out.splitlines() == ["alias"]:
enabled = False
elif rc == 1:
# if not a user or global user service and both init script and unit file exist stdout should have enabled/disabled, otherwise use rc entries
if module.params['scope'] == 'system' and \
is_initd and \
not out.strip().endswith('disabled') and \
sysv_is_enabled(unit):
enabled = True
# default to current state
result['enabled'] = enabled
# Change enable/disable if needed
if enabled != module.params['enabled']:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, unit, out + err))
result['enabled'] = not enabled
# set service state if requested
if module.params['state'] is not None:
fail_if_missing(module, found, unit, msg="host")
# default to desired state
result['state'] = module.params['state']
# What is current service state?
if 'ActiveState' in result['status']:
action = None
if module.params['state'] == 'started':
if not is_running_service(result['status']):
action = 'start'
elif module.params['state'] == 'stopped':
if is_running_service(result['status']) or is_deactivating_service(result['status']):
action = 'stop'
else:
if not is_running_service(result['status']):
action = 'start'
else:
action = module.params['state'][:-2] # remove 'ed' from restarted/reloaded
result['state'] = 'started'
if action:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, unit, err))
# check for chroot
elif is_chroot(module) or os.environ.get('SYSTEMD_OFFLINE') == '1':
module.warn("Target is a chroot or systemd is offline. This can lead to false positives or prevent the init system tools from working.")
else:
# this should not happen?
module.fail_json(msg="Service is in unknown state", status=result['status'])
module.exit_json(**result)
if __name__ == '__main__':
main()
|
privateip/ansible
|
lib/ansible/modules/systemd.py
|
Python
|
gpl-3.0
| 23,358
|
[
"Brian"
] |
94293668e0a78651055536cbbbde3c902c9762e94c48338795aa339d12c86dc6
|
#### Convenience Functions to be moved to kerneltools ####
import numpy as np
def forrt(X,m=None):
"""
RFFT with order like Munro (1976) FORTT routine.
"""
if m is None:
m = len(X)
y = np.fft.rfft(X,m)/m
return np.r_[y.real,y[1:-1].imag]
def revrt(X,m=None):
"""
Inverse of forrt. Equivalent to Munro (1976) REVRT routine.
"""
if m is None:
m = len(X)
y = X[:m/2+1] + np.r_[0,X[m/2+1:],0]*1j
return np.fft.irfft(y)*m
def silverman_transform(bw, M, RANGE):
"""
FFT of Gaussian kernel following to Silverman AS 176.
Notes
-----
Underflow is intentional as a dampener.
"""
J = np.arange(M/2+1)
FAC1 = 2*(np.pi*bw/RANGE)**2
JFAC = J**2*FAC1
BC = 1 - 1./3 * (J*1./M*np.pi)**2
FAC = np.exp(-JFAC)/BC
kern_est = np.r_[FAC,FAC[1:-1]]
return kern_est
def linbin(X,a,b,M, trunc=1):
"""
Linear Binning as described in Fan and Marron (1994)
"""
gcnts = np.zeros(M)
delta = (b-a)/(M-1)
for x in X:
lxi = ((x - a)/delta) # +1
li = int(lxi)
rem = lxi - li
if li > 1 and li < M:
gcnts[li] = gcnts[li] + 1-rem
gcnts[li+1] = gcnts[li+1] + rem
if li > M and trunc == 0:
gcnts[M] = gncts[M] + 1
return gcnts
def counts(x,v):
"""
Counts the number of elements of x that fall within the grid points v
Notes
-----
Using np.digitize and np.bincount
"""
idx = np.digitize(x,v)
try: # numpy 1.6
return np.bincount(idx, minlength=len(v))
except:
bc = np.bincount(idx)
return np.r_[bc,np.zeros(len(v)-len(bc))]
def kdesum(x,axis=0):
return np.asarray([np.sum(x[i] - x, axis) for i in range(len(x))])
|
pprett/statsmodels
|
statsmodels/nonparametric/kdetools.py
|
Python
|
bsd-3-clause
| 1,766
|
[
"Gaussian"
] |
75553c9fbf2b54dc6d8d89b964e5d952b7897ea8899d517e6003a968e89c5226
|
from __future__ import division
import numpy as np
na = np.newaxis
import operator
import parallel
def kde(sigma,locs,vals=None):
'''
returns a density function p defined by
p(x) = \sum_{xbar \in locs} vals[xbar] * rbf(xbar,x; sigma)
rbf(xbar,x; sigma) is the value of a normalized isotropic gaussian density
with mean xbar and variance sigma evaluated at x
by default, vals[i] = 1/len(locs), useful for the case when locs are samples
'''
# TODO should the computation here be done in log space with logaddexp?
# yes, esp for higher dimensions...
if vals is None:
vals = 1./locs.shape[0] * np.ones(locs.shape[0])
else:
assert vals.ndim == 1 and np.allclose(vals.sum(),1)
locs, vals = np.array(locs), np.array(vals)
assert locs.ndim == 2 and locs.shape[0] == vals.shape[0]
K = locs.shape[1] # dimensionality
def p(x):
assert x.ndim == 2
# parallelizes over locs but not over x
chunksize = 1000000 # max intermediate array size is chunksize doubles
locchunks = np.array_split(locs,x.shape[0]*locs.shape[0]//chunksize+1)
valchunks = np.array_split(vals,x.shape[0]*locs.shape[0]//chunksize+1)
numchunks = len(locchunks)
def f((ls,vs,x,K,sigma)):
return np.dot(np.ones(len(ls)),vs[:,na]
/ (2*np.pi)**(K/2) / np.sqrt(sigma**K)
* np.exp(-0.5*((ls[:,na,:] - x[na,:,:])**2).sum(2)/sigma))
return reduce(operator.add, parallel.dv.map_sync(f, zip(locchunks,valchunks,[x]*numchunks,[K]*numchunks,[sigma]*numchunks)))
return p
|
mattjj/dirichlet-truncated-multinomial
|
density.py
|
Python
|
mit
| 1,623
|
[
"Gaussian"
] |
482e0eb98934d275ee4aefe65730dd7cf36f76be3abac683d99c3bdbad3a8a45
|
# Version: 0.14+dev
"""
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example 'git describe --tags --dirty --always' reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time. However,
when you use "setup.py build" or "setup.py sdist", `_version.py` in the new
copy is replaced by a small static file that contains just the generated
version data.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the "git archive" command. As a result, generated tarballs will
contain enough information to get the proper version.
## Installation
First, decide on values for the following configuration variables:
* `VCS`: the version control system you use. Currently accepts "git".
* `style`: the style of version string to be produced. See "Styles" below for
details. Defaults to "pep440", which looks like
`TAG[+DISTANCE.gSHORTHASH[.dirty]]`.
* `versionfile_source`:
A project-relative pathname into which the generated version strings should
be written. This is usually a `_version.py` next to your project's main
`__init__.py` file, so it can be imported at runtime. If your project uses
`src/myproject/__init__.py`, this should be `src/myproject/_version.py`.
This file should be checked in to your VCS as usual: the copy created below
by `setup.py setup_versioneer` will include code that parses expanded VCS
keywords in generated tarballs. The 'build' and 'sdist' commands will
replace it with a copy that has just the calculated version string.
This must be set even if your project does not have any modules (and will
therefore never import `_version.py`), since "setup.py sdist" -based trees
still need somewhere to record the pre-calculated version strings. Anywhere
in the source tree should do. If there is a `__init__.py` next to your
`_version.py`, the `setup.py setup_versioneer` command (described below)
will append some `__version__`-setting assignments, if they aren't already
present.
* `versionfile_build`:
Like `versionfile_source`, but relative to the build directory instead of
the source directory. These will differ when your setup.py uses
'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`,
then you will probably have `versionfile_build='myproject/_version.py'` and
`versionfile_source='src/myproject/_version.py'`.
If this is set to None, then `setup.py build` will not attempt to rewrite
any `_version.py` in the built tree. If your project does not have any
libraries (e.g. if it only builds a script), then you should use
`versionfile_build = None` and override `distutils.command.build_scripts`
to explicitly insert a copy of `versioneer.get_version()` into your
generated script.
* `tag_prefix`:
a string, like 'PROJECTNAME-', which appears at the start of all VCS tags.
If your tags look like 'myproject-1.2.0', then you should use
tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this
should be an empty string.
* `parentdir_prefix`:
a string, frequently the same as tag_prefix, which appears at the start of
all unpacked tarball filenames. If your tarball unpacks into
'myproject-1.2.0', this should be 'myproject-'.
This tool provides one script, named `versioneer`. That script has one mode,
"install", which writes a copy of `versioneer.py` into the current directory
and runs `versioneer.py setup` to finish the installation.
To versioneer-enable your project:
* 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and
populating it with the configuration values you decided earlier:
````
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix = ""
parentdir_prefix = myproject-
````
* 2: Run `versioneer install`. This will do the following:
* copy `versioneer.py` into the top of your source tree
* create `_version.py` in the right place (`versionfile_source`)
* modify your `__init__.py` (if one exists next to `_version.py`) to define
`__version__` (by calling a function from `_version.py`)
* modify your `MANIFEST.in` to include both `versioneer.py` and the
generated `_version.py` in sdist tarballs
* 3: add a `import versioneer` to your setup.py, and add the following
arguments to the setup() call:
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
* 4: commit these changes to your VCS. To make sure you won't forget,
`versioneer install` will mark everything it touched for addition using
`git add`. Don't forget to add `setup.py` and `setup.cfg` too.
## Post-Installation Usage
Once established, all uses of your tree from a VCS checkout should get the
current version string. All generated tarballs should include an embedded
version string (so users who unpack them will not need a VCS tool installed).
If you distribute your project through PyPI, then the release process should
boil down to two steps:
* 1: git tag 1.0
* 2: python setup.py register sdist upload
If you distribute it through github (i.e. users use github to generate
tarballs with `git archive`), the process is:
* 1: git tag 1.0
* 2: git push; git push --tags
Currently, all version strings must be based upon a tag. Versioneer will
report "unknown" until your tree has at least one tag in its history. This
restriction will be fixed eventually (see issue #12).
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default "pep440" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of "unknown".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, "pep440", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional "local
version" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
that this commit is two revisions ("+2") beyond the "0.11" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. "0.11".
Other styles are available. See details.md in the Versioneer source tree for
descriptions.
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
### Upgrading from 0.10 to 0.11
You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running
`setup.py setup_versioneer`. This will enable the use of additional
version-control systems (SVN, etc) in the future.
### Upgrading from 0.11 to 0.12
Nothing special.
## Upgrading to 0.14
0.14 changes the format of the version string. 0.13 and earlier used
hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a
plus-separated "local version" section strings, with dot-separated
components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old
format, but should be ok with the new one.
## Upgrading to XXX
Starting with this version, Versioneer is configured with a `[versioneer]`
section in your `setup.cfg` file. Earlier versions required the `setup.py` to
set attributes on the `versioneer` module immediately after import. The new
version will refuse to run (exception during import) until you have provided
the necessary `setup.cfg` section.
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is hereby released into the
public domain. The `_version.py` that it creates is also in the public
domain.
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
from distutils.command.build import build as _build
from distutils.command.sdist import sdist as _sdist
from distutils.core import Command
class VersioneerConfig:
pass
def find_setup_cfg():
try:
setup_cfg = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"setup.cfg")
except NameError:
setup_cfg = "setup.cfg"
return setup_cfg
def get_config():
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
parser = configparser.SafeConfigParser()
setup_cfg = find_setup_cfg()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
pass
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % args[0])
return None
return stdout
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.14+dev (https://github.com/warner/python-versioneer)
import errno
import os
import re
import subprocess
import sys
def get_keywords():
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
pass
def get_config():
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
pass
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% args[0])
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%%s', but '%%s' doesn't start with "
"prefix '%%s'" %% (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs-tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %%s" %% root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag, this yields TAG-NUM-gHEX[-dirty]
# if there are no tags, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long"],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
# now build up version string, with post-release "local version
# identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
# get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
# exceptions:
# 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
# TAG[.post.devDISTANCE] . No -dirty
# exceptions:
# 1: no tags. 0.post.devDISTANCE
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
# TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that
# .dev0 sorts backwards (a dirty tree will appear "older" than the
# corresponding clean one), but you shouldn't be releasing software with
# -dirty anyways.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
# TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
# TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty
# --always'
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
# TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty
# --always -long'. The distance/hash is unconditional.
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}
'''
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag, this yields TAG-NUM-gHEX[-dirty]
# if there are no tags, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long"],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.14+dev) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
import sys
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
# now build up version string, with post-release "local version
# identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
# get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
# exceptions:
# 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
# TAG[.post.devDISTANCE] . No -dirty
# exceptions:
# 1: no tags. 0.post.devDISTANCE
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
# TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that
# .dev0 sorts backwards (a dirty tree will appear "older" than the
# corresponding clean one), but you shouldn't be releasing software with
# -dirty anyways.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
# TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
# TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty
# --always'
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
# TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty
# --always -long'. The distance/hash is unconditional.
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_root():
try:
return os.path.dirname(os.path.abspath(__file__))
except NameError:
return os.path.dirname(os.path.abspath(sys.argv[0]))
def vcs_function(vcs, suffix):
return getattr(sys.modules[__name__], '%s_%s' % (vcs, suffix), None)
def get_versions():
# returns dict with two keys: 'version' and 'full'
cfg = get_config()
verbose = cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
assert cfg.parentdir_prefix is not None, \
"please set versioneer.parentdir_prefix"
assert cfg.VCS is not None, "please set versioneer.VCS"
# I am in versioneer.py, which must live at the top of the source tree,
# which we use to compute the root directory. py2exe/bbfreeze/non-CPython
# don't have __file__, in which case we fall back to sys.argv[0] (which
# ought to be the setup.py script). We prefer __file__ since that's more
# robust in cases where setup.py was invoked in some weird way (e.g. pip)
root = get_root()
versionfile_abs = os.path.join(root, cfg.versionfile_source)
get_keywords_f = vcs_function(cfg.VCS, "get_keywords")
versions_from_keywords_f = vcs_function(cfg.VCS, "versions_from_keywords")
pieces_from_vcs_f = vcs_function(cfg.VCS, "pieces_from_vcs")
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
if get_keywords_f and versions_from_keywords_f:
try:
vcs_keywords = get_keywords_f(versionfile_abs)
ver = versions_from_keywords_f(vcs_keywords, cfg.tag_prefix,
verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
if pieces_from_vcs_f:
try:
pieces = pieces_from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version"}
def get_version():
return get_versions()["version"]
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
ver = get_version()
print("Version is currently: %s" % ver)
class cmd_build(_build):
def run(self):
cfg = get_config()
versions = get_versions()
_build.run(self)
# now locate _version.py in the new build/ directory and replace it
# with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
if 'cx_Freeze' in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
class cmd_build_exe(_build_exe):
def run(self):
cfg = get_config()
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
assert cfg.VCS is not None, "please set versioneer.VCS"
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
cfg = get_config()
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory (remembering
# that it may be a hardlink) and replace it with an updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
def get_cmdclass():
cmds = {'version': cmd_version,
'build': cmd_build,
'sdist': cmd_sdist,
}
if 'cx_Freeze' in sys.modules: # cx_freeze enabled?
cmds['build_exe'] = cmd_build_exe
del cmds['build']
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix = ""
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
try:
cfg = get_config()
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(find_setup_cfg(), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
assert cfg.VCS is not None, "please set versioneer.VCS"
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(get_root(), "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-time keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.vcs = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
|
jshiv/turntable
|
versioneer.py
|
Python
|
mit
| 57,307
|
[
"Brian"
] |
c42f23e30a70b8cae136749eefde20b0fed8b7811751862576acee2e1d11163d
|
"""
@name: PyHouse_Install/src/uninstall.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2016-2016 by D. Brian Kimmel
@license: MIT License
@note: Created Jan 12, 2016
@Summary:
"""
import subprocess
def cleanup():
# sudo deluser pyhouse
subprocess.call('sudo', 'deluser', 'pyhouse')
# sudo rm -rf /home/pyhouse/
subprocess.call('sudo', 'rm', '-rf', '/home/pyhouse')
# sudo rm -rf PyHouse_Install
subprocess.call('sudo', 'rm', '-rf', 'PyHouse_Install')
if __name__ == "__main__":
print('Setup cleanup of PyHouse_Install.\n')
cleanup()
# ## END DBK
|
DBrianKimmel/PyHouse_Install
|
uninstall.py
|
Python
|
mit
| 640
|
[
"Brian"
] |
f7f1e7d72e9543c3cb5e682cd8ecc6ef772c39e07c330a57343104968e6147eb
|
Dict = {"10guy": "10+Guy",
"chucknorris": "3g3xw",
"anditsgone": "Aaaaand+Its+Gone",
"advicemallard": "Actual+Advice+Mallard",
"aintnobodygottimeforthat": "Aint+Nobody+Got+Time+For+That",
"amitheonlyone": "Am+I+The+Only+One+Around+Here",
"ancientaliens": "Ancient+Aliens",
"backinmyday": "Back+In+My+Day",
"badluckbrian": "Bad+Luck+Brian",
"batmanslap": "Batman+Slapping+Robin",
"beargrylls": "Bear+Grylls",
"braceyourselves": "Brace+Yourselves+X+is+Coming",
"badjokeeel": "Bad+Joke+Eel",
"cristos": "Buddy+Christ",
"businesscat": "Business+Cat",
"butthurtdweller": "Butthurt+Dweller",
"captainhindsight": "Captain+Hindsight",
"chubbybubbles": "Chubby+Bubbles+Girl",
"cleavegegirl": "Cleavage+Girl",
"collegefreshman": "College+Freshman",
"condescendingwonka": "Condescending+Wonka",
"confessionbear": "Confession+Bear",
"conspiracykeanu": "Conspiracy+Keanu",
"disastergirl": "Disaster+Girl",
"drevillaser": "Dr+Evil+Laser",
"dwightschrute": "Dwight+Schrute",
"evilracoon": "Evil+Plotting+Raccoon",
"firstdayontheinternet": "First+Day+On+The+Internet+Kid",
"firstworldproblem": "First+World+Problems",
"futuramafry": "Futurama+Fry",
"futuramazoidberg": "Futurama+Zoidberg",
"gayseal": "Homophobic+Seal",
"goodguygreg": "Good+Guy+Greg",
"gordonramsay": "Angry+Chef+Gordon+Ramsay",
"grumpycat": "Grumpy+Cat",
"highdog": "High+Dog",
"storygrandpa": "Storytelling+Grandpa",
"facepalmbear": "Facepalm+Bear",
"awkwardpenguin": "Socially+Awesome+Awkward+Penguin",
"hideyokids": "Hide+Yo+Kids+Hide+Yo+Wife",
"ishouldbuyaboat": "I+Should+Buy+A+Boat+Cat",
"itooliketolivedangerously": "I+Too+Like+To+Live+Dangerously",
"illhaveyouknow": "Ill+Have+You+Know+Spongebob",
"inception": "Inception",
"insanitywolf": "Insanity+Wolf",
"josephducreux": "Joseph+Ducreux",
"laughingvillains": "Laughing+Villains",
"mrmackey": "Mr+Mackey",
"officercartman": "Officer+Cartman",
"onedoesnot": "One+Does+Not+Simply",
"overlyattachedgf": "Overly+Attached+Girlfriend",
"manlyman": "Overly+Manly+Man",
"patrioteagle": "Patriotic+Eagle",
"pedobear": "Pedobear",
"pettergriffinnews": "Peter+Griffin+News",
"philosoraptor": "Philosoraptor",
"putitpatrick": "Put+It+Somewhere+Else+Patrick",
"rpgfan": "RPG+Fan",
"dotafan": "RPG+Fan",
"rastateacher": "Rasta+Science+Teacher",
"redditorwife": "Redditors+Wife",
"redneckrandall": "Redneck+Randal",
"photogenicguy": "Ridiculously+Photogenic+Guy",
"ronswanson" : "Ron+Swanson",
"sadkeanu": "Sad+Keanu",
"samueljackson": "Samuel+Jackson+Glance",
"scrooge": "Scrooge+McDuck+2",
"scumbaggirl": "Scumbag+Girl",
"scumbagsteve": "Scumbag+Steve",
"seriousxzibit": "Serious+Xzibit",
"takemymoney": "Shut+Up+And+Take+My+Money+Fry",
"spidermancomputer": "Spiderman+Computer+Desk",
"spidermanhospital": "Spiderman+Hospital",
"successkid": "Success+Kid+Original",
"suddenclarity": "Sudden+Clarity+Clarence",
"skiinstructor": "Super+Cool+Ski+Instructor",
"thatwouldbegreat": "That+Would+Be+Great",
"mostinterestingman": "The+Most+Interesting+Man+In+The+World",
"skepticalkid": "Third+World+Skeptical+Kid",
"thirdworldsuccess": "Third+World+Success+Kid",
"thatescalatedquickly": "Well+That+Escalated+Quickly",
"whynotboth": "Why+Not+Both",
"yodawg": "Yo+Dawg+Heard+You",
"confessionkid": "confession+kid",
"lazycollegesenior": "Lazy+College+Senior",
"matrixmorpheus": "Matrix+Morpheus",
"toodamnhigh": "Too+Damn+High",
"darthvader": "star-wars-vader-force-choke",
"ghettojesus": "Ghetto+Jesus",
"facepalm2":"Frustrated+Boromir",
}
|
adisuciu/AeB-Telegram-Bot
|
meme.py
|
Python
|
gpl-2.0
| 4,143
|
[
"Brian"
] |
ec2ee3470d3dc52ef8b81e681e5e6ec9ca36acb25df587f6f8dff8f179cb0724
|
#
# Copyright 2001 - 2016 Ludek Smid [http://www.ospace.net/]
#
# This file is part of Outer Space.
#
# Outer Space is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Outer Space is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outer Space; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import time, math
from ige.ospace import Rules
import ige
from ige import log
def recordScanLevel(distance, tObj, tSigMod, scannerPwr, owner, playerMaps):
level = min((tObj.signature + tSigMod) * scannerPwr / max(0.0001, distance), Rules.maxScanPwr)
if level >= Rules.level1InfoScanPwr and owner > 0:
if owner not in playerMaps:
playerMaps[owner] = {}
playerMaps[owner][tObj] = max(level, playerMaps[owner].get(tObj, 0))
def computeScanner(obj1, obj2, playerMaps, signatures):
d = math.hypot(obj1.x - obj2.x, obj1.y - obj2.y)
sigMod = signatures.get(obj2.oid, 0)
if hasattr(obj1, "scannerPwr"):
recordScanLevel(d, obj2, sigMod, obj1.scannerPwr, obj1.owner, playerMaps)
elif hasattr(obj1, "scannerPwrs"):
for owner, scannerPwr in obj1.scannerPwrs.iteritems():
recordScanLevel(d, obj2, sigMod, scannerPwr, owner, playerMaps)
else:
raise ige.ServerException("Unsupported object")
def detectClose(obj1, obj2, signatures):
# ignore itself
if obj1.oid >= obj2.oid:
return
d = math.hypot(obj1.x - obj2.x, obj1.y - obj2.y)
if d < 1.0:
log.debug("CLOSE FLEET detected", obj1.oid, obj2.oid, obj1.signature, obj2.signature)
signatures[obj1.oid] = signatures.get(obj1.oid, 0) + obj2.signature
signatures[obj2.oid] = signatures.get(obj2.oid, 0) + obj1.signature
def computeMap(galaxyCmdObj, tran, galaxy):
log.debug("SCAN2 Phase - starting")
start = time.time()
_map, fleets, alwaysVisible = generateMap(galaxyCmdObj, tran, galaxy)
# compute close fleets
sectors, surroundingSectors = generateSectors(fleets, sectorSize = 1, size = 1)
signatures = {}
processSectors(fleets, sectors, surroundingSectors, detectClose, (signatures,))
log.debug("CLOSE FLEETS - result", signatures)
# compute map
start0 = time.time()
sectors, surroundingSectors = generateSectors(_map, sectorSize = 5, size = 10)
playerMaps = {}
processSectors(_map, sectors, surroundingSectors, computeScanner, (playerMaps, signatures))
# add always visible items
for owner in playerMaps:
for visibleObject in alwaysVisible.values():
playerMaps[owner][visibleObject] = max(Rules.level1InfoScanPwr, playerMaps[owner].get(visibleObject, 0))
stop = time.time()
log.debug("Time : %0.3f s" % (stop - start))
log.debug("Time : %0.3f s (including sector generation)" % (stop - start0))
return playerMaps
def processSectors(_map, sectors, surroundingSectors, callable, args):
while sectors:
# get (and remove) random sector
(sX, sY), sObjs = sectors.popitem()
# build list of objects in surrounding sectors
objs = []
for dx in surroundingSectors:
for dy in surroundingSectors:
sIdx = (sX + dx, sY + dy)
if sIdx not in sectors:
continue
objs.extend(sectors[sIdx])
# check objects in current sector
for obj1Idx in sObjs:
obj1 = _map[obj1Idx]
# with objects in surrounding sectors
for obj2Idx in objs:
obj2 = _map[obj2Idx]
callable(obj1, obj2, *args)
callable(obj2, obj1, *args)
# with objects in current sector
for obj2Idx in sObjs:
# allow object scan on itself
if obj1Idx > obj2Idx:
continue
obj2 = _map[obj2Idx]
callable(obj1, obj2, *args)
callable(obj2, obj1, *args)
def generateSectors(_map, sectorSize = 5, size = 10):
# generate sector map
sectors = {}
# can be optimized to not include corner sectors
surroundingSectors = range(-size / sectorSize, size / sectorSize + 1)
for i in _map:
obj = _map[i]
sIdx = (int(obj.x / sectorSize), int(obj.y / sectorSize))
if sIdx in sectors:
sectors[sIdx].append(obj.oid)
else:
sectors[sIdx] = [obj.oid]
return sectors, surroundingSectors
def generateMap(cmdObj, tran, galaxy):
_map = {}
fleets = {}
alwaysVisible = {}
# all systems are part of the map
for systemID in galaxy.systems:
system = tran.db[systemID]
# black holes are always visible, for easier orientation on the galaxy map
if system.starClass[0] == 'b':
alwaysVisible[systemID] = system
_map[systemID] = system
# get mobile objects (fleet, ...)
for objID in cmdObj.cmd(system).getObjectsInSpace(tran, system):
obj = tran.db[objID]
_map[objID] = obj
fleets[objID] = obj
return _map, fleets, alwaysVisible
|
ospaceteam/outerspace
|
server/lib/ige/ospace/Scanner.py
|
Python
|
gpl-2.0
| 5,575
|
[
"Galaxy"
] |
050caf3d17813b0277c89f07067e344378a397e453f9e7c7e34067d79cd6824d
|
# -*- coding: utf-8 -*-
# This file is part of periscope3.
# Copyright (c) 2013 Roman Hudec <black3r@klikni.cz>
#
# This file contains parts of code from periscope.
# Copyright (c) 2008-2011 Patrick Dessalle <patrick@dessalle.be>
#
# periscope is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# periscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with periscope; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import zipfile, os, urllib.request, urllib.error, urllib.parse
import os, re, urllib.request, urllib.parse, urllib.error
import bs4 as BeautifulSoup
showNum = {
"24":38,
"30 rock":46,
"90210":244,
"afterlife":200,
"alias":5,
"aliens in america":119,
"ally mcbeal":158,
"american dad":138,
"andromeda":60,
"andy barker: p.i.":49,
"angel":98,
"army wives":242,
"arrested development":161,
"ashes to ashes":151,
"avatar: the last airbender":125,
"back to you":183,
"band of brothers":143,
"battlestar galactica":42,
"big day":237,
"big love":88,
"big shots":137,
"bionic woman":113,
"black adder":176,
"black books":175,
"blade":177,
"blood ties":140,
"bonekickers":227,
"bones":59,
"boston legal":77,
"breaking bad":133,
"brotherhood":210,
"brothers & sisters":66,
"buffy the vampire slayer":99,
"burn notice":50,
"californication":103,
"carnivale":170,
"carpoolers":146,
"cashmere mafia":129,
"charmed":87,
"chuck":111,
"city of vice":257,
"cold case":95,
"criminal minds":106,
"csi":27,
"csi miami":51,
"csi ny":52,
"curb your enthusiasm":69,
"damages":124,
"dark angel":131,
"day break":6,
"dead like me":13,
"deadwood":48,
"desperate housewives":29,
"dexter":55,
"dirt":145,
"dirty sexy money":118,
"do not disturb":252,
"doctor who":141,
"dollhouse" : 448,
"drive":97,
"eli stone":149,
"entourage":25,
"er (e.r.)":39,
"eureka":43,
"everybody hates chris":81,
"everybody loves raymond":86,
"exes & ohs":199,
"extras":142,
"fallen":101,
"family guy":62,
"farscape":92,
"fawlty towers":178,
"fear itself":201,
"felicity":217,
"firefly":84,
"flash gordon":134,
"flashpoint":221,
"friday night lights":57,
"friends":65,
"fringe":204,
"futurama":126,
"generation kill":223,
"ghost whisperer":14,
"gilmore girls":28,
"gossip girl":114,
"greek":102,
"grey's anatomy":7,
"hank":538,
"heroes":8,
"hidden palms":44,
"hotel babylon":164,
"house m.d.":9,
"how i met your mother":110,
"hustle":160,
"in justice":144,
"in plain sight":198,
"in treatment":139,
"into the west":256,
"invasion":184,
"it's always sunny in philadelphia":243,
"jeeves and wooster":180,
"jekyll":61,
"jericho":37,
"joey":83,
"john adams":155,
"john from cincinnati":79,
"journeyman":108,
"k-ville":107,
"keeping up appearances":167,
"knight rider":163,
"kyle xy":10,
"lab rats":233,
"las vegas":75,
"life":109,
"life is wild":120,
"life on mars (uk)":90,
"lipstick jungle":150,
"lost":3,
"lost in austen":254,
"lucky louie":238,
"mad men":136,
"meadowlands":45,
"medium":12,
"melrose place":189,
"men in trees":127,
"miami vice":208,
"monk":85,
"moonlight":117,
"my name is earl":15,
"ncis":30,
"new amsterdam":153,
"nip/tuck":23,
"northern exposure":241,
"numb3rs":11,
"october road":132,
"one tree hill":16,
"over there":93,
"oz":36,
"painkiller jane":35,
"pepper dennis":82,
"police squad":190,
"popetown":179,
"pretender":245,
"primeval":130,
"prison break":2,
"private practice":115,
"privileged":248,
"project runway":226,
"psych":17,
"pushing daisies":116,
"queer as folk":229,
"reaper":112,
"regenesis":152,
"rescue me":91,
"robin hood":121,
"rome":63,
"roswell":159,
"samantha who?":123,
"samurai girl":255,
"saving grace":104,
"scrubs":26,
"secret diary of a call girl":196,
"seinfeld":89,
"sex and the city":68,
"shameless":193,
"shark":24,
"sharpe":186,
"six feet under":94,
"skins":147,
"smallville":1,
"sophie":203,
"south park":71,
"spooks":148,
"standoff":70,
"stargate atlantis":54,
"stargate sg-1":53,
"studio 60 on the sunset strip":33,
"supernatural":19,
"swingtown":202,
"taken":67,
"tell me you love me":182,
"terminator: the sarah connor chronicles":128,
"the 4400":20,
"the andromeda strain":181,
"the big bang theory":154,
"the black donnellys":216,
"the cleaner":225,
"the closer":78,
"the dead zone":31,
"the dresden files":64,
"the fixer":213,
"the inbetweeners":197,
"the it crowd":185,
"the l word":74,
"the middleman":222,
"the net":174,
"the no. 1 ladies' detective agency":162,
"the o.c. (the oc)":21,
"the office":58,
"the outer limits":211,
"the riches":156,
"the secret life of the american teenager":218,
"the shield":40,
"the simple life":234,
"the simpsons":32,
"the sopranos":18,
"the tudors":76,
"the unit":47,
"the war at home":80,
"the west wing":168,
"the wire":72,
"the x-files":100,
"threshold":96,
"til death":171,
"tin man":122,
"top gear":232,
"torchwood":135,
"traveler":41,
"tripping the rift":188,
"tru calling":4,
"true blood":205,
"twin peaks":169,
"two and a half men":56,
"ugly betty":34,
"ultimate force":194,
"unhitched":157,
"veronica mars":22,
"weeds":73,
"will & grace":172,
"without a trace":105,
"women's murder club":166,
"wonderfalls":165
}
from . import SubtitleDatabase
class TvSubtitles(SubtitleDatabase.SubtitleDB):
url = "http://www.tvsubtitles.net"
site_name = "TvSubtitles"
URL_SHOW_PATTERN = "http://www.tvsubtitles.net/tvshow-%s.html"
URL_SEASON_PATTERN = "http://www.tvsubtitles.net/tvshow-%s-%d.html"
def __init__(self):
super(TvSubtitles, self).__init__({"en":'en', "fr":'fr'})## TODO ??
self.host = TvSubtitles.url
def _get_episode_urls(self, show, season, episode, langs):
showId = showNum.get(show, None)
if not showId:
return []
show_url = self.URL_SEASON_PATTERN % (showId, season)
logging.debug("Show url: %s" % show_url)
page = urllib.request.urlopen(show_url)
content = page.read()
content = content.replace("SCR'+'IPT", "script")
soup = BeautifulSoup.BeautifulSoup(content)
td_content = "%sx%s"%(season, episode)
tds = soup.findAll(text=td_content)
links = []
for td in tds:
imgs = td.parent.parent.findAll("td")[3].findAll("img")
for img in imgs:
# If there is an alt, and that alt in langs or you didn't specify a langs
if img['alt'] and ((langs and img['alt'] in langs) or (not langs)):
url = self.host + "/" + img.parent['href']
lang = img['alt']
logging.debug("Found lang %s - %s" %(lang, url))
links.append((url, lang))
return links
def query(self, show, season, episode, teams, langs):
showId = showNum.get(show, None)
if not showId:
return []
show_url = self.URL_SEASON_PATTERN % (showId, season)
logging.debug("Show url: %s" % show_url)
page = urllib.request.urlopen(show_url)
content = page.read()
content = content.replace("SCR'+'IPT", "script")
soup = BeautifulSoup.BeautifulSoup(content)
td_content = "%dx%02d"%(season, episode)
tds = soup.findAll(text=td_content)
links = []
for td in tds:
imgs = td.parent.parent.findAll("td")[3].findAll("img")
for img in imgs:
# If there is an alt, and that alt in langs or you didn't specify a langs
if img['alt'] and ((langs and img['alt'] in langs) or (not langs)):
url = img.parent['href']
lang = img['alt']
logging.debug("Found lang %s - %s" %(lang, url))
if url.startswith("subtitle"):
url = self.host + "/" + url
logging.debug("Parse : %s" %url)
sub = self.parseSubtitlePage(url, lang, show, season, episode, teams)
if sub:
links.append(sub)
else:
page2 = urllib.request.urlopen(self.host + "/" + url)
soup2 = BeautifulSoup.BeautifulSoup(page2)
subs = soup2.findAll("div", {"class" : "subtitlen"})
for sub in subs:
url = self.host + sub.get('href', None)
logging.debug("Parse2 : %s" %url)
sub = self.parseSubtitlePage(url, lang, show, season, episode, teams)
if sub:
links.append(sub)
return links
def parseSubtitlePage(self, url, lang, show, season, episode, teams):
fteams = []
for team in teams:
fteams += team.split("-")
fteams = set(fteams)
subid = url.rsplit("-", 1)[1].split('.', 1)[0]
link = self.host + "/download-" + subid + ".html"
page = urllib.request.urlopen(url)
content = page.read()
content = content.replace("SCR'+'IPT", "script")
soup = BeautifulSoup.BeautifulSoup(content)
subteams = set()
releases = soup.findAll(text="release:")
if releases:
subteams.update([releases[0].parent.parent.parent.parent.findAll("td")[2].string.lower()])
rips = soup.findAll(text="rip:")
if rips:
subteams.update([rips[0].parent.parent.parent.parent.findAll("td")[2].string.lower()])
if subteams.issubset(fteams):
logging.debug("It'a match ! : %s <= %s" %(subteams, fteams))
result = {}
result["release"] = "%s.S%.2dE%.2d.%s" %(show.replace(" ", ".").title(), int(season), int(episode), '.'.join(subteams).upper()
)
result["lang"] = lang
result["link"] = link
result["page"] = url
return result
else:
logging.debug("It'not a match ! : %s > %s" %(subteams, fteams))
return None
def process(self, filename, langs):
''' main method to call on the plugin, pass the filename and the wished
languages and it will query TvSubtitles.net '''
fname = str(self.getFileName(filename).lower())
guessedData = self.guessFileData(fname)
logging.debug(fname)
if guessedData['type'] == 'tvshow':
subs = self.query(guessedData['name'], guessedData['season'], guessedData['episode'], guessedData['teams'], langs)
return subs
else:
return []
|
black3r/periscope3
|
periscope/plugins/TvSubtitles.py
|
Python
|
gpl-2.0
| 10,030
|
[
"Firefly"
] |
fcbbab762bb0b45e25937e6f8c14c72541e65f9c937961a41e0796b998e3269b
|
######################################################################
# Copyright (C) 2013-2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Demonstrate linear Gaussian state-space model.
Some of the functions in this module are re-usable:
* ``model`` can be used to construct the classical linear state-space model.
* ``infer`` can be used to apply linear state-space model to given data.
"""
import numpy as np
import scipy
import matplotlib.pyplot as plt
from bayespy.nodes import GaussianMarkovChain
from bayespy.nodes import Gaussian, GaussianARD
from bayespy.nodes import Gamma
from bayespy.nodes import SumMultiply
from bayespy.inference.vmp.nodes.gamma import diagonal
from bayespy.utils import random
from bayespy.inference.vmp.vmp import VB
from bayespy.inference.vmp import transformations
import bayespy.plot.plotting as bpplt
def model(M=10, N=100, D=3):
"""
Construct linear state-space model.
See, for instance, the following publication:
"Fast variational Bayesian linear state-space model"
Luttinen (ECML 2013)
"""
# Dynamics matrix with ARD
alpha = Gamma(1e-5,
1e-5,
plates=(D,),
name='alpha')
A = GaussianARD(0,
alpha,
shape=(D,),
plates=(D,),
plotter=bpplt.GaussianHintonPlotter(rows=0,
cols=1,
scale=0),
name='A')
A.initialize_from_value(np.identity(D))
# Latent states with dynamics
X = GaussianMarkovChain(np.zeros(D), # mean of x0
1e-3*np.identity(D), # prec of x0
A, # dynamics
np.ones(D), # innovation
n=N, # time instances
plotter=bpplt.GaussianMarkovChainPlotter(scale=2),
name='X')
X.initialize_from_value(np.random.randn(N,D))
# Mixing matrix from latent space to observation space using ARD
gamma = Gamma(1e-5,
1e-5,
plates=(D,),
name='gamma')
gamma.initialize_from_value(1e-2*np.ones(D))
C = GaussianARD(0,
gamma,
shape=(D,),
plates=(M,1),
plotter=bpplt.GaussianHintonPlotter(rows=0,
cols=2,
scale=0),
name='C')
C.initialize_from_value(np.random.randn(M,1,D))
# Observation noise
tau = Gamma(1e-5,
1e-5,
name='tau')
tau.initialize_from_value(1e2)
# Underlying noiseless function
F = SumMultiply('i,i',
C,
X,
name='F')
# Noisy observations
Y = GaussianARD(F,
tau,
name='Y')
Q = VB(Y, F, C, gamma, X, A, alpha, tau, C)
return Q
def infer(y, D,
mask=True,
maxiter=100,
rotate=True,
debug=False,
precompute=False,
update_hyper=0,
start_rotating=0,
plot_C=True,
monitor=True,
autosave=None):
"""
Apply linear state-space model for the given data.
"""
(M, N) = np.shape(y)
# Construct the model
Q = model(M, N, D)
if not plot_C:
Q['C'].set_plotter(None)
if autosave is not None:
Q.set_autosave(autosave, iterations=10)
# Observe data
Q['Y'].observe(y, mask=mask)
# Set up rotation speed-up
if rotate:
# Initial rotate the D-dimensional state space (X, A, C)
# Does not update hyperparameters
rotA_init = transformations.RotateGaussianARD(Q['A'],
axis=0,
precompute=precompute)
rotX_init = transformations.RotateGaussianMarkovChain(Q['X'],
rotA_init)
rotC_init = transformations.RotateGaussianARD(Q['C'],
axis=0,
precompute=precompute)
R_X_init = transformations.RotationOptimizer(rotX_init, rotC_init, D)
# Rotate the D-dimensional state space (X, A, C)
rotA = transformations.RotateGaussianARD(Q['A'],
Q['alpha'],
axis=0,
precompute=precompute)
rotX = transformations.RotateGaussianMarkovChain(Q['X'],
rotA)
rotC = transformations.RotateGaussianARD(Q['C'],
Q['gamma'],
axis=0,
precompute=precompute)
R_X = transformations.RotationOptimizer(rotX, rotC, D)
# Keyword arguments for the rotation
if debug:
rotate_kwargs = {'maxiter': 10,
'check_bound': True,
'check_gradient': True}
else:
rotate_kwargs = {'maxiter': 10}
# Plot initial distributions
if monitor:
Q.plot()
# Run inference using rotations
for ind in range(maxiter):
if ind < update_hyper:
# It might be a good idea to learn the lower level nodes a bit
# before starting to learn the upper level nodes.
Q.update('X', 'C', 'A', 'tau', plot=monitor)
if rotate and ind >= start_rotating:
# Use the rotation which does not update alpha nor beta
R_X_init.rotate(**rotate_kwargs)
else:
Q.update(plot=monitor)
if rotate and ind >= start_rotating:
# It might be a good idea to not rotate immediately because it
# might lead to pruning out components too efficiently before
# even estimating them roughly
R_X.rotate(**rotate_kwargs)
# Return the posterior approximation
return Q
def simulate_data(M, N):
"""
Generate a dataset using linear state-space model.
The process has two latent oscillation components and one random walk
component.
"""
# Simulate some data
D = 3
c = np.random.randn(M, D)
w = 0.3
a = np.array([[np.cos(w), -np.sin(w), 0],
[np.sin(w), np.cos(w), 0],
[0, 0, 1]])
x = np.empty((N,D))
f = np.empty((M,N))
y = np.empty((M,N))
x[0] = 10*np.random.randn(D)
f[:,0] = np.dot(c,x[0])
y[:,0] = f[:,0] + 3*np.random.randn(M)
for n in range(N-1):
x[n+1] = np.dot(a,x[n]) + np.random.randn(D)
f[:,n+1] = np.dot(c,x[n+1])
y[:,n+1] = f[:,n+1] + 3*np.random.randn(M)
return (y, f)
def demo(M=6, N=200, D=3, maxiter=100, debug=False, seed=42, rotate=True,
precompute=False, plot=True, monitor=True):
"""
Run the demo for linear state-space model.
"""
# Use deterministic random numbers
if seed is not None:
np.random.seed(seed)
# Get data
(y, f) = simulate_data(M, N)
# Add missing values randomly
mask = random.mask(M, N, p=0.3)
# Add missing values to a period of time
mask[:,30:80] = False
y[~mask] = np.nan # BayesPy doesn't require this. Just for plotting.
# Run inference
Q = infer(y, D,
mask=mask,
rotate=rotate,
debug=debug,
monitor=monitor,
maxiter=maxiter)
if plot:
# Show results
plt.figure()
bpplt.timeseries_normal(Q['F'], scale=2)
bpplt.timeseries(f, 'b-')
bpplt.timeseries(y, 'r.')
plt.show()
if __name__ == '__main__':
import sys, getopt, os
try:
opts, args = getopt.getopt(sys.argv[1:],
"",
["m=",
"n=",
"d=",
"seed=",
"maxiter=",
"debug",
"precompute",
"no-plot",
"no-monitor",
"no-rotation"])
except getopt.GetoptError:
print('python lssm.py <options>')
print('--m=<INT> Dimensionality of data vectors')
print('--n=<INT> Number of data vectors')
print('--d=<INT> Dimensionality of the latent vectors in the model')
print('--no-rotation Do not apply speed-up rotations')
print('--maxiter=<INT> Maximum number of VB iterations')
print('--seed=<INT> Seed (integer) for the random number generator')
print('--debug Check that the rotations are implemented correctly')
print('--no-plot Do not plot the results')
print('--no-monitor Do not plot distributions during learning')
print('--precompute Precompute some moments when rotating. May '
'speed up or slow down.')
sys.exit(2)
kwargs = {}
for opt, arg in opts:
if opt == "--no-rotation":
kwargs["rotate"] = False
elif opt == "--maxiter":
kwargs["maxiter"] = int(arg)
elif opt == "--debug":
kwargs["debug"] = True
elif opt == "--precompute":
kwargs["precompute"] = True
elif opt == "--seed":
kwargs["seed"] = int(arg)
elif opt in ("--m",):
kwargs["M"] = int(arg)
elif opt in ("--n",):
kwargs["N"] = int(arg)
elif opt in ("--d",):
kwargs["D"] = int(arg)
elif opt in ("--no-plot"):
kwargs["plot"] = False
elif opt in ("--no-monitor"):
kwargs["monitor"] = False
else:
raise ValueError("Unhandled option given")
demo(**kwargs)
|
nipunbatra/bayespy
|
bayespy/demos/lssm.py
|
Python
|
gpl-3.0
| 11,413
|
[
"Gaussian"
] |
4565e9c26e3a837d2cfb4d044bc9f87fedd0d3cabca9a74a999d694d4671156e
|
#!/usr/bin/env python
# Copyright (C) 2004 Rune Linding & Lars Juhl Jensen - EMBL
# The DisEMBL is licensed under the GPL license
# (http://www.opensource.org/licenses/gpl-license.php)
# DisEMBL pipeline
# Modified to work with current versions of Biopython (1.7+)
# by Shyam Saladi (saladi1@illinois.edu), Janauary 2013
# Bio:SeqIO completely replaces Bio:Fasta
from string import *
from sys import argv
from Bio import File
from Bio import SeqIO
import fpformat
import sys
import tempfile
import os
from os import system,popen3
# change these to the correct paths
NN_bin = os.environ['NN_bin']
SG_bin = os.environ['SG_bin']
def JensenNet(sequence):
outFile = tempfile.mktemp()
inFile= tempfile.mktemp()
open(inFile,'w').write(sequence+'\n')
system(NN_bin + '< ' + inFile +' > ' + outFile)
REM465 = []
COILS = []
HOTLOOPS = []
resultsFile = open(outFile,'r')
results = resultsFile.readlines()
resultsFile.close()
for result in results:
coil = float(fpformat.fix(split(result)[0],6))
COILS.append(coil)
hotloop = float(fpformat.fix(split(result)[1],6))
HOTLOOPS.append(hotloop)
rem465 = float(fpformat.fix(split(result)[2],6))
REM465.append(rem465)
os.remove(inFile)
os.remove(outFile)
return COILS, HOTLOOPS, REM465
def SavitzkyGolay(window,derivative,datalist):
if len(datalist) < 2*window:
window = len(datalist)/2
elif window == 0:
window = 1
stdin, stdout, stderr = popen3(SG_bin + ' -V0 -D' + str(derivative) + ' -n' + str(window)+','+str(window))
for data in datalist:
stdin.write(`data`+'\n')
try:
stdin.close()
except:
print stderr.readlines()
results = stdout.readlines()
stdout.close()
SG_results = []
for result in results:
f = float(fpformat.fix(result,6))
if f < 0:
SG_results.append(0)
else:
SG_results.append(f)
return SG_results
def getSlices(NNdata, fold, join_frame, peak_frame, expect_val):
slices = []
inSlice = 0
for i in range(len(NNdata)):
if inSlice:
if NNdata[i] < expect_val:
if maxSlice >= fold*expect_val:
slices.append([beginSlice, endSlice])
inSlice = 0
else:
endSlice += 1
if NNdata[i] > maxSlice:
maxSlice = NNdata[i]
elif NNdata[i] >= expect_val:
beginSlice = i
endSlice = i
inSlice = 1
maxSlice = NNdata[i]
if inSlice and maxSlice >= fold*expect_val:
slices.append([beginSlice, endSlice])
i = 0
while i < len(slices):
if i+1 < len(slices) and slices[i+1][0]-slices[i][1] <= join_frame:
slices[i] = [ slices[i][0], slices[i+1][1] ]
del slices[i+1]
elif slices[i][1]-slices[i][0]+1 < peak_frame:
del slices[i]
else:
i += 1
return slices
def reportSlicesTXT(slices, sequence):
if slices == []:
s = lower(sequence)
else:
if slices[0][0] > 0:
s = lower(sequence[0:slices[0][0]])
else:
s = ''
for i in range(len(slices)):
if i > 0:
sys.stdout.write(', ')
sys.stdout.write( str(slices[i][0]+1) + '-' + str(slices[i][1]+1) )
s = s + upper(sequence[slices[i][0]:(slices[i][1]+1)])
if i < len(slices)-1:
s = s + lower(sequence[(slices[i][1]+1):(slices[i+1][0])])
elif slices[i][1] < len(sequence)-1:
s = s + lower(sequence[(slices[i][1]+1):(len(sequence))])
print ''
print s
def runDisEMBLpipeline():
try:
smooth_frame = int(sys.argv[1])
peak_frame = int(sys.argv[2])
join_frame = int(sys.argv[3])
fold_coils = float(sys.argv[4])
fold_hotloops = float(sys.argv[5])
fold_rem465 = float(sys.argv[6])
file = str(sys.argv[7])
try:
mode = sys.argv[8]
except:
mode = 'default'
except:
print '\nDisEMBL.py smooth_frame peak_frame join_frame fold_coils fold_hotloops fold_rem465 sequence_file [mode]\n'
print 'A default run would be: ./DisEMBL.py 8 8 4 1.2 1.4 1.2 fasta_file'
print 'Mode: "default"(nothing) or "scores" which will give scores per residue in TAB seperated format'
raise SystemExit
db = open(file,'r')
print ' ____ _ _____ __ __ ____ _ _ _ _'
print '| _ \(_)___| ____| \/ | __ )| | / || || |'
print '| | | | / __| _| | |\/| | _ \| | | || || |_'
print '| |_| | \__ \ |___| | | | |_) | |___ | ||__ _|'
print '|____/|_|___/_____|_| |_|____/|_____| |_(_) |_|'
print '# Copyright (C) 2004 - Rune Linding & Lars Juhl Jensen '
print '# EMBL Biocomputing Unit - Heidelberg - Germany '
print '#'
for cur_record in SeqIO.parse(db, "fasta"):
sequence = upper(str(cur_record.seq.tostring()))
# Run NN
COILS_raw, HOTLOOPS_raw, REM465_raw = JensenNet(sequence)
# Run Savitzky-Golay
REM465_smooth = SavitzkyGolay(smooth_frame,0,REM465_raw)
COILS_smooth = SavitzkyGolay(smooth_frame,0,COILS_raw)
HOTLOOPS_smooth = SavitzkyGolay(smooth_frame,0,HOTLOOPS_raw)
if mode == 'default':
sys.stdout.write('> '+cur_record.id+'_COILS ')
reportSlicesTXT( getSlices(COILS_smooth, fold_coils, join_frame, peak_frame, 0.43), sequence )
sys.stdout.write('> '+cur_record.id+'_REM465 ')
reportSlicesTXT( getSlices(REM465_smooth, fold_rem465, join_frame, peak_frame, 0.50), sequence )
sys.stdout.write('> '+cur_record.id+'_HOTLOOPS ')
reportSlicesTXT( getSlices(HOTLOOPS_smooth, fold_hotloops, join_frame, peak_frame, 0.086), sequence )
sys.stdout.write('\n')
elif mode == 'scores':
sys.stdout.write('# RESIDUE COILS REM465 HOTLOOPS\n')
for i in range(len(REM465_smooth)):
sys.stdout.write(sequence[i]+'\t'+fpformat.fix(COILS_smooth[i],5)+'\t'+fpformat.fix(REM465_smooth[i],5)+'\t'+fpformat.fix(HOTLOOPS_smooth[i],5)+'\n')
else:
sys.stderr.write('Wrong mode given: '+mode+'\n')
raise SystemExit
db.close()
return
runDisEMBLpipeline()
|
jurnho/DisEMBL-1.4-fix
|
DisEMBL-1.4/DisEMBL.py
|
Python
|
gpl-2.0
| 6,498
|
[
"Biopython"
] |
d0655f00ed8137358586cbfc79c15888f39272d352e6ec2923901bd8f0b44262
|
"""
This class is used to define the plot using the plot attributes.
"""
from DIRAC import S_OK
from DIRAC.MonitoringSystem.Client.Types.PilotMonitoring import PilotMonitoring
from DIRAC.MonitoringSystem.private.Plotters.BasePlotter import BasePlotter
class WMSHistoryPlotter(BasePlotter):
"""
.. class:: PilotMonitoringPlotter
It is used to crate the plots.
param: str _typeName monitoring type
param: list _typeKeyFields list of keys what we monitor (list of attributes)
"""
_typeName = "PilotMonitoring"
_typeKeyFields = PilotMonitoring().keyFields
def reportNumberOfSubmissions(self, reportRequest):
"""It is used to retrieve the data from the database.
:param dict reportRequest: contains attributes used to create the plot.
:return: S_OK or S_ERROR {'data':value1, 'granularity':value2} value1 is a dictionary, value2 is the bucket length
"""
retVal = self._getTimedData(
startTime=reportRequest["startTime"],
endTime=reportRequest["endTime"],
selectField="NumTotal",
preCondDict=reportRequest["condDict"],
metadataDict=None,
)
if not retVal["OK"]:
return retVal
dataDict, granularity = retVal["Value"]
return S_OK({"data": dataDict, "granularity": granularity})
def _plotNumberOfSubmissions(self, reportRequest, plotInfo, filename):
"""It creates the plot.
:param dict reportRequest: plot attributes
:param dict plotInfo: contains all the data which are used to create the plot
:param str filename:
:return: S_OK or S_ERROR { 'plot' : value1, 'thumbnail' : value2 } value1 and value2 are TRUE/FALSE
"""
metadata = {
"title": "Pilot Submissions by %s" % reportRequest["grouping"],
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
"span": plotInfo["granularity"],
"skipEdgeColor": True,
"ylabel": "Submissions",
}
plotInfo["data"] = self._fillWithZero(
granularity=plotInfo["granularity"],
startEpoch=reportRequest["startTime"],
endEpoch=reportRequest["endTime"],
dataDict=plotInfo["data"],
)
return self._generateStackedLinePlot(filename=filename, dataDict=plotInfo["data"], metadata=metadata)
def reportNumSucceeded(self, reportRequest):
"""It is used to retrieve the data from the database.
:param dict reportRequest: contains attributes used to create the plot.
:return: S_OK or S_ERROR {'data':value1, 'granularity':value2} value1 is a dictionary, value2 is the bucket length
"""
retVal = self._getTimedData(
startTime=reportRequest["startTime"],
endTime=reportRequest["endTime"],
selectField="NumSucceeded",
preCondDict=reportRequest["condDict"],
metadataDict=None,
)
if not retVal["OK"]:
return retVal
dataDict, granularity = retVal["Value"]
return S_OK({"data": dataDict, "granularity": granularity})
def _plotNumSucceeded(self, reportRequest, plotInfo, filename):
"""It creates the plot.
:param dict reportRequest: plot attributes
:param dict plotInfo: contains all the data which are used to create the plot
:param str filename:
:return: S_OK or S_ERROR { 'plot' : value1, 'thumbnail' : value2 } value1 and value2 are TRUE/FALSE
"""
metadata = {
"title": "SuSubmissions by %s" % reportRequest["grouping"],
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
"span": plotInfo["granularity"],
"skipEdgeColor": True,
"ylabel": "submissions",
}
plotInfo["data"] = self._fillWithZero(
granularity=plotInfo["granularity"],
startEpoch=reportRequest["startTime"],
endEpoch=reportRequest["endTime"],
dataDict=plotInfo["data"],
)
return self._generateStackedLinePlot(filename=filename, dataDict=plotInfo["data"], metadata=metadata)
|
ic-hep/DIRAC
|
src/DIRAC/MonitoringSystem/private/Plotters/PilotMonitoringPlotter.py
|
Python
|
gpl-3.0
| 4,264
|
[
"DIRAC"
] |
29bcdd632d7f1109382d78afed534f3a148709e6b622825c86b1d1d773ba137e
|
import unittest
from test import support
from itertools import *
from weakref import proxy
from decimal import Decimal
from fractions import Fraction
import sys
import operator
import random
import copy
import pickle
from functools import reduce
maxsize = support.MAX_Py_ssize_t
minsize = -maxsize-1
def lzip(*args):
return list(zip(*args))
def onearg(x):
'Test function of one argument'
return 2*x
def errfunc(*args):
'Test function that raises an error'
raise ValueError
def gen3():
'Non-restartable source sequence'
for i in (0, 1, 2):
yield i
def isEven(x):
'Test predicate'
return x%2==0
def isOdd(x):
'Test predicate'
return x%2==1
class StopNow:
'Class emulating an empty iterable.'
def __iter__(self):
return self
def __next__(self):
raise StopIteration
def take(n, seq):
'Convenience function for partially consuming a long of infinite iterable'
return list(islice(seq, n))
def prod(iterable):
return reduce(operator.mul, iterable, 1)
def fact(n):
'Factorial'
return prod(range(1, n+1))
class TestBasicOps(unittest.TestCase):
def test_chain(self):
def chain2(*iterables):
'Pure python version in the docs'
for it in iterables:
for element in it:
yield element
for c in (chain, chain2):
self.assertEqual(list(c('abc', 'def')), list('abcdef'))
self.assertEqual(list(c('abc')), list('abc'))
self.assertEqual(list(c('')), [])
self.assertEqual(take(4, c('abc', 'def')), list('abcd'))
self.assertRaises(TypeError, list,c(2, 3))
def test_chain_from_iterable(self):
self.assertEqual(list(chain.from_iterable(['abc', 'def'])), list('abcdef'))
self.assertEqual(list(chain.from_iterable(['abc'])), list('abc'))
self.assertEqual(list(chain.from_iterable([''])), [])
self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd'))
self.assertRaises(TypeError, list, chain.from_iterable([2, 3]))
def test_combinations(self):
self.assertRaises(TypeError, combinations, 'abc') # missing r argument
self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments
self.assertRaises(TypeError, combinations, None) # pool is not iterable
self.assertRaises(ValueError, combinations, 'abc', -2) # r is negative
self.assertEqual(list(combinations('abc', 32)), []) # r > n
self.assertEqual(list(combinations(range(4), 3)),
[(0,1,2), (0,1,3), (0,2,3), (1,2,3)])
def combinations1(iterable, r):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
if r > n:
return
indices = list(range(r))
yield tuple(pool[i] for i in indices)
while 1:
for i in reversed(range(r)):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
def combinations2(iterable, r):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
for indices in permutations(range(n), r):
if sorted(indices) == list(indices):
yield tuple(pool[i] for i in indices)
def combinations3(iterable, r):
'Pure python version from cwr()'
pool = tuple(iterable)
n = len(pool)
for indices in combinations_with_replacement(range(n), r):
if len(set(indices)) == r:
yield tuple(pool[i] for i in indices)
for n in range(7):
values = [5*x-12 for x in range(n)]
for r in range(n+2):
result = list(combinations(values, r))
self.assertEqual(len(result), 0 if r>n else fact(n) / fact(r) / fact(n-r)) # right number of combs
self.assertEqual(len(result), len(set(result))) # no repeats
self.assertEqual(result, sorted(result)) # lexicographic order
for c in result:
self.assertEqual(len(c), r) # r-length combinations
self.assertEqual(len(set(c)), r) # no duplicate elements
self.assertEqual(list(c), sorted(c)) # keep original ordering
self.assertTrue(all(e in values for e in c)) # elements taken from input iterable
self.assertEqual(list(c),
[e for e in values if e in c]) # comb is a subsequence of the input iterable
self.assertEqual(result, list(combinations1(values, r))) # matches first pure python version
self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version
self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(combinations('abcde', 3))))), 1)
def test_combinations_with_replacement(self):
cwr = combinations_with_replacement
self.assertRaises(TypeError, cwr, 'abc') # missing r argument
self.assertRaises(TypeError, cwr, 'abc', 2, 1) # too many arguments
self.assertRaises(TypeError, cwr, None) # pool is not iterable
self.assertRaises(ValueError, cwr, 'abc', -2) # r is negative
self.assertEqual(list(cwr('ABC', 2)),
[('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')])
def cwr1(iterable, r):
'Pure python version shown in the docs'
# number items returned: (n+r-1)! / r! / (n-1)! when n>0
pool = tuple(iterable)
n = len(pool)
if not n and r:
return
indices = [0] * r
yield tuple(pool[i] for i in indices)
while 1:
for i in reversed(range(r)):
if indices[i] != n - 1:
break
else:
return
indices[i:] = [indices[i] + 1] * (r - i)
yield tuple(pool[i] for i in indices)
def cwr2(iterable, r):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
for indices in product(range(n), repeat=r):
if sorted(indices) == list(indices):
yield tuple(pool[i] for i in indices)
def numcombs(n, r):
if not n:
return 0 if r else 1
return fact(n+r-1) / fact(r)/ fact(n-1)
for n in range(7):
values = [5*x-12 for x in range(n)]
for r in range(n+2):
result = list(cwr(values, r))
self.assertEqual(len(result), numcombs(n, r)) # right number of combs
self.assertEqual(len(result), len(set(result))) # no repeats
self.assertEqual(result, sorted(result)) # lexicographic order
regular_combs = list(combinations(values, r)) # compare to combs without replacement
if n == 0 or r <= 1:
self.assertEqual(result, regular_combs) # cases that should be identical
else:
self.assertTrue(set(result) >= set(regular_combs)) # rest should be supersets of regular combs
for c in result:
self.assertEqual(len(c), r) # r-length combinations
noruns = [k for k,v in groupby(c)] # combo without consecutive repeats
self.assertEqual(len(noruns), len(set(noruns))) # no repeats other than consecutive
self.assertEqual(list(c), sorted(c)) # keep original ordering
self.assertTrue(all(e in values for e in c)) # elements taken from input iterable
self.assertEqual(noruns,
[e for e in values if e in c]) # comb is a subsequence of the input iterable
self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version
self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, cwr('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(cwr('abcde', 3))))), 1)
def test_permutations(self):
self.assertRaises(TypeError, permutations) # too few arguments
self.assertRaises(TypeError, permutations, 'abc', 2, 1) # too many arguments
self.assertRaises(TypeError, permutations, None) # pool is not iterable
self.assertRaises(ValueError, permutations, 'abc', -2) # r is negative
self.assertEqual(list(permutations('abc', 32)), []) # r > n
self.assertRaises(TypeError, permutations, 'abc', 's') # r is not an int or None
self.assertEqual(list(permutations(range(3), 2)),
[(0,1), (0,2), (1,0), (1,2), (2,0), (2,1)])
def permutations1(iterable, r=None):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
r = n if r is None else r
if r > n:
return
indices = list(range(n))
cycles = list(range(n-r+1, n+1))[::-1]
yield tuple(pool[i] for i in indices[:r])
while n:
for i in reversed(range(r)):
cycles[i] -= 1
if cycles[i] == 0:
indices[i:] = indices[i+1:] + indices[i:i+1]
cycles[i] = n - i
else:
j = cycles[i]
indices[i], indices[-j] = indices[-j], indices[i]
yield tuple(pool[i] for i in indices[:r])
break
else:
return
def permutations2(iterable, r=None):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
r = n if r is None else r
for indices in product(range(n), repeat=r):
if len(set(indices)) == r:
yield tuple(pool[i] for i in indices)
for n in range(7):
values = [5*x-12 for x in range(n)]
for r in range(n+2):
result = list(permutations(values, r))
self.assertEqual(len(result), 0 if r>n else fact(n) / fact(n-r)) # right number of perms
self.assertEqual(len(result), len(set(result))) # no repeats
self.assertEqual(result, sorted(result)) # lexicographic order
for p in result:
self.assertEqual(len(p), r) # r-length permutations
self.assertEqual(len(set(p)), r) # no duplicate elements
self.assertTrue(all(e in values for e in p)) # elements taken from input iterable
self.assertEqual(result, list(permutations1(values, r))) # matches first pure python version
self.assertEqual(result, list(permutations2(values, r))) # matches second pure python version
if r == n:
self.assertEqual(result, list(permutations(values, None))) # test r as None
self.assertEqual(result, list(permutations(values))) # test default r
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(permutations('abcde', 3))))), 1)
def test_combinatorics(self):
# Test relationships between product(), permutations(),
# combinations() and combinations_with_replacement().
for n in range(6):
s = 'ABCDEFG'[:n]
for r in range(8):
prod = list(product(s, repeat=r))
cwr = list(combinations_with_replacement(s, r))
perm = list(permutations(s, r))
comb = list(combinations(s, r))
# Check size
self.assertEqual(len(prod), n**r)
self.assertEqual(len(cwr), (fact(n+r-1) / fact(r)/ fact(n-1)) if n else (not r))
self.assertEqual(len(perm), 0 if r>n else fact(n) / fact(n-r))
self.assertEqual(len(comb), 0 if r>n else fact(n) / fact(r) / fact(n-r))
# Check lexicographic order without repeated tuples
self.assertEqual(prod, sorted(set(prod)))
self.assertEqual(cwr, sorted(set(cwr)))
self.assertEqual(perm, sorted(set(perm)))
self.assertEqual(comb, sorted(set(comb)))
# Check interrelationships
self.assertEqual(cwr, [t for t in prod if sorted(t)==list(t)]) # cwr: prods which are sorted
self.assertEqual(perm, [t for t in prod if len(set(t))==r]) # perm: prods with no dups
self.assertEqual(comb, [t for t in perm if sorted(t)==list(t)]) # comb: perms that are sorted
self.assertEqual(comb, [t for t in cwr if len(set(t))==r]) # comb: cwrs without dups
self.assertEqual(comb, list(filter(set(cwr).__contains__, perm))) # comb: perm that is a cwr
self.assertEqual(comb, list(filter(set(perm).__contains__, cwr))) # comb: cwr that is a perm
self.assertEqual(comb, sorted(set(cwr) & set(perm))) # comb: both a cwr and a perm
def test_compress(self):
self.assertEqual(list(compress(data='ABCDEF', selectors=[1,0,1,0,1,1])), list('ACEF'))
self.assertEqual(list(compress('ABCDEF', [1,0,1,0,1,1])), list('ACEF'))
self.assertEqual(list(compress('ABCDEF', [0,0,0,0,0,0])), list(''))
self.assertEqual(list(compress('ABCDEF', [1,1,1,1,1,1])), list('ABCDEF'))
self.assertEqual(list(compress('ABCDEF', [1,0,1])), list('AC'))
self.assertEqual(list(compress('ABC', [0,1,1,1,1,1])), list('BC'))
n = 10000
data = chain.from_iterable(repeat(range(6), n))
selectors = chain.from_iterable(repeat((0, 1)))
self.assertEqual(list(compress(data, selectors)), [1,3,5] * n)
self.assertRaises(TypeError, compress, None, range(6)) # 1st arg not iterable
self.assertRaises(TypeError, compress, range(6), None) # 2nd arg not iterable
self.assertRaises(TypeError, compress, range(6)) # too few args
self.assertRaises(TypeError, compress, range(6), None) # too many args
def test_count(self):
self.assertEqual(lzip('abc',count()), [('a', 0), ('b', 1), ('c', 2)])
self.assertEqual(lzip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)])
self.assertEqual(take(2, lzip('abc',count(3))), [('a', 3), ('b', 4)])
self.assertEqual(take(2, zip('abc',count(-1))), [('a', -1), ('b', 0)])
self.assertEqual(take(2, zip('abc',count(-3))), [('a', -3), ('b', -2)])
self.assertRaises(TypeError, count, 2, 3, 4)
self.assertRaises(TypeError, count, 'a')
self.assertEqual(list(islice(count(maxsize-5), 10)),
list(range(maxsize-5, maxsize+5)))
self.assertEqual(list(islice(count(-maxsize-5), 10)),
list(range(-maxsize-5, -maxsize+5)))
self.assertEqual(list(islice(count(10, maxsize+5), 3)),
list(range(10, 10+3*(maxsize+5), maxsize+5)))
c = count(3)
self.assertEqual(repr(c), 'count(3)')
next(c)
self.assertEqual(repr(c), 'count(4)')
c = count(-9)
self.assertEqual(repr(c), 'count(-9)')
next(c)
self.assertEqual(repr(count(10.25)), 'count(10.25)')
self.assertEqual(next(c), -8)
for i in (-sys.maxsize-5, -sys.maxsize+5 ,-10, -1, 0, 10, sys.maxsize-5, sys.maxsize+5):
# Test repr (ignoring the L in longs)
r1 = repr(count(i)).replace('L', '')
r2 = 'count(%r)'.__mod__(i).replace('L', '')
self.assertEqual(r1, r2)
# check copy, deepcopy, pickle
for value in -3, 3, maxsize-5, maxsize+5:
c = count(value)
self.assertEqual(next(copy.copy(c)), value)
self.assertEqual(next(copy.deepcopy(c)), value)
self.assertEqual(next(pickle.loads(pickle.dumps(c))), value)
#check proper internal error handling for large "step' sizes
count(1, maxsize+5); sys.exc_info()
def test_count_with_stride(self):
self.assertEqual(lzip('abc',count(2,3)), [('a', 2), ('b', 5), ('c', 8)])
self.assertEqual(lzip('abc',count(start=2,step=3)),
[('a', 2), ('b', 5), ('c', 8)])
self.assertEqual(lzip('abc',count(step=-1)),
[('a', 0), ('b', -1), ('c', -2)])
self.assertEqual(lzip('abc',count(2,0)), [('a', 2), ('b', 2), ('c', 2)])
self.assertEqual(lzip('abc',count(2,1)), [('a', 2), ('b', 3), ('c', 4)])
self.assertEqual(lzip('abc',count(2,3)), [('a', 2), ('b', 5), ('c', 8)])
self.assertEqual(take(20, count(maxsize-15, 3)), take(20, range(maxsize-15, maxsize+100, 3)))
self.assertEqual(take(20, count(-maxsize-15, 3)), take(20, range(-maxsize-15,-maxsize+100, 3)))
self.assertEqual(take(3, count(2, 3.25-4j)), [2, 5.25-4j, 8.5-8j])
self.assertEqual(take(3, count(Decimal('1.1'), Decimal('.1'))),
[Decimal('1.1'), Decimal('1.2'), Decimal('1.3')])
self.assertEqual(take(3, count(Fraction(2,3), Fraction(1,7))),
[Fraction(2,3), Fraction(17,21), Fraction(20,21)])
self.assertEqual(repr(take(3, count(10, 2.5))), repr([10, 12.5, 15.0]))
c = count(3, 5)
self.assertEqual(repr(c), 'count(3, 5)')
next(c)
self.assertEqual(repr(c), 'count(8, 5)')
c = count(-9, 0)
self.assertEqual(repr(c), 'count(-9, 0)')
next(c)
self.assertEqual(repr(c), 'count(-9, 0)')
c = count(-9, -3)
self.assertEqual(repr(c), 'count(-9, -3)')
next(c)
self.assertEqual(repr(c), 'count(-12, -3)')
self.assertEqual(repr(c), 'count(-12, -3)')
self.assertEqual(repr(count(10.5, 1.25)), 'count(10.5, 1.25)')
self.assertEqual(repr(count(10.5, 1)), 'count(10.5)') # suppress step=1 when it's an int
self.assertEqual(repr(count(10.5, 1.00)), 'count(10.5, 1.0)') # do show float values lilke 1.0
for i in (-sys.maxsize-5, -sys.maxsize+5 ,-10, -1, 0, 10, sys.maxsize-5, sys.maxsize+5):
for j in (-sys.maxsize-5, -sys.maxsize+5 ,-10, -1, 0, 1, 10, sys.maxsize-5, sys.maxsize+5):
# Test repr (ignoring the L in longs)
r1 = repr(count(i, j)).replace('L', '')
if j == 1:
r2 = ('count(%r)' % i).replace('L', '')
else:
r2 = ('count(%r, %r)' % (i, j)).replace('L', '')
self.assertEqual(r1, r2)
def test_cycle(self):
self.assertEqual(take(10, cycle('abc')), list('abcabcabca'))
self.assertEqual(list(cycle('')), [])
self.assertRaises(TypeError, cycle)
self.assertRaises(TypeError, cycle, 5)
self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0])
def test_groupby(self):
# Check whether it accepts arguments correctly
self.assertEqual([], list(groupby([])))
self.assertEqual([], list(groupby([], key=id)))
self.assertRaises(TypeError, list, groupby('abc', []))
self.assertRaises(TypeError, groupby, None)
self.assertRaises(TypeError, groupby, 'abc', lambda x:x, 10)
# Check normal input
s = [(0, 10, 20), (0, 11,21), (0,12,21), (1,13,21), (1,14,22),
(2,15,22), (3,16,23), (3,17,23)]
dup = []
for k, g in groupby(s, lambda r:r[0]):
for elem in g:
self.assertEqual(k, elem[0])
dup.append(elem)
self.assertEqual(s, dup)
# Check nested case
dup = []
for k, g in groupby(s, lambda r:r[0]):
for ik, ig in groupby(g, lambda r:r[2]):
for elem in ig:
self.assertEqual(k, elem[0])
self.assertEqual(ik, elem[2])
dup.append(elem)
self.assertEqual(s, dup)
# Check case where inner iterator is not used
keys = [k for k, g in groupby(s, lambda r:r[0])]
expectedkeys = set([r[0] for r in s])
self.assertEqual(set(keys), expectedkeys)
self.assertEqual(len(keys), len(expectedkeys))
# Exercise pipes and filters style
s = 'abracadabra'
# sort s | uniq
r = [k for k, g in groupby(sorted(s))]
self.assertEqual(r, ['a', 'b', 'c', 'd', 'r'])
# sort s | uniq -d
r = [k for k, g in groupby(sorted(s)) if list(islice(g,1,2))]
self.assertEqual(r, ['a', 'b', 'r'])
# sort s | uniq -c
r = [(len(list(g)), k) for k, g in groupby(sorted(s))]
self.assertEqual(r, [(5, 'a'), (2, 'b'), (1, 'c'), (1, 'd'), (2, 'r')])
# sort s | uniq -c | sort -rn | head -3
r = sorted([(len(list(g)) , k) for k, g in groupby(sorted(s))], reverse=True)[:3]
self.assertEqual(r, [(5, 'a'), (2, 'r'), (2, 'b')])
# iter.__next__ failure
class ExpectedError(Exception):
pass
def delayed_raise(n=0):
for i in range(n):
yield 'yo'
raise ExpectedError
def gulp(iterable, keyp=None, func=list):
return [func(g) for k, g in groupby(iterable, keyp)]
# iter.__next__ failure on outer object
self.assertRaises(ExpectedError, gulp, delayed_raise(0))
# iter.__next__ failure on inner object
self.assertRaises(ExpectedError, gulp, delayed_raise(1))
# __cmp__ failure
class DummyCmp:
def __eq__(self, dst):
raise ExpectedError
s = [DummyCmp(), DummyCmp(), None]
# __eq__ failure on outer object
self.assertRaises(ExpectedError, gulp, s, func=id)
# __eq__ failure on inner object
self.assertRaises(ExpectedError, gulp, s)
# keyfunc failure
def keyfunc(obj):
if keyfunc.skip > 0:
keyfunc.skip -= 1
return obj
else:
raise ExpectedError
# keyfunc failure on outer object
keyfunc.skip = 0
self.assertRaises(ExpectedError, gulp, [None], keyfunc)
keyfunc.skip = 1
self.assertRaises(ExpectedError, gulp, [None, None], keyfunc)
def test_filter(self):
self.assertEqual(list(filter(isEven, range(6))), [0,2,4])
self.assertEqual(list(filter(None, [0,1,0,2,0])), [1,2])
self.assertEqual(list(filter(bool, [0,1,0,2,0])), [1,2])
self.assertEqual(take(4, filter(isEven, count())), [0,2,4,6])
self.assertRaises(TypeError, filter)
self.assertRaises(TypeError, filter, lambda x:x)
self.assertRaises(TypeError, filter, lambda x:x, range(6), 7)
self.assertRaises(TypeError, filter, isEven, 3)
self.assertRaises(TypeError, next, filter(range(6), range(6)))
def test_filterfalse(self):
self.assertEqual(list(filterfalse(isEven, range(6))), [1,3,5])
self.assertEqual(list(filterfalse(None, [0,1,0,2,0])), [0,0,0])
self.assertEqual(list(filterfalse(bool, [0,1,0,2,0])), [0,0,0])
self.assertEqual(take(4, filterfalse(isEven, count())), [1,3,5,7])
self.assertRaises(TypeError, filterfalse)
self.assertRaises(TypeError, filterfalse, lambda x:x)
self.assertRaises(TypeError, filterfalse, lambda x:x, range(6), 7)
self.assertRaises(TypeError, filterfalse, isEven, 3)
self.assertRaises(TypeError, next, filterfalse(range(6), range(6)))
def test_zip(self):
# XXX This is rather silly now that builtin zip() calls zip()...
ans = [(x,y) for x, y in zip('abc',count())]
self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
self.assertEqual(list(zip('abc', range(6))), lzip('abc', range(6)))
self.assertEqual(list(zip('abcdef', range(3))), lzip('abcdef', range(3)))
self.assertEqual(take(3,zip('abcdef', count())), lzip('abcdef', range(3)))
self.assertEqual(list(zip('abcdef')), lzip('abcdef'))
self.assertEqual(list(zip()), lzip())
self.assertRaises(TypeError, zip, 3)
self.assertRaises(TypeError, zip, range(3), 3)
# Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in zip('abc', 'def')],
lzip('abc', 'def'))
self.assertEqual([pair for pair in zip('abc', 'def')],
lzip('abc', 'def'))
ids = list(map(id, zip('abc', 'def')))
self.assertEqual(min(ids), max(ids))
ids = list(map(id, list(zip('abc', 'def'))))
self.assertEqual(len(dict.fromkeys(ids)), len(ids))
def test_ziplongest(self):
for args in [
['abc', range(6)],
[range(6), 'abc'],
[range(1000), range(2000,2100), range(3000,3050)],
[range(1000), range(0), range(3000,3050), range(1200), range(1500)],
[range(1000), range(0), range(3000,3050), range(1200), range(1500), range(0)],
]:
target = [tuple([arg[i] if i < len(arg) else None for arg in args])
for i in range(max(map(len, args)))]
self.assertEqual(list(zip_longest(*args)), target)
self.assertEqual(list(zip_longest(*args, **{})), target)
target = [tuple((e is None and 'X' or e) for e in t) for t in target] # Replace None fills with 'X'
self.assertEqual(list(zip_longest(*args, **dict(fillvalue='X'))), target)
self.assertEqual(take(3,zip_longest('abcdef', count())), list(zip('abcdef', range(3)))) # take 3 from infinite input
self.assertEqual(list(zip_longest()), list(zip()))
self.assertEqual(list(zip_longest([])), list(zip([])))
self.assertEqual(list(zip_longest('abcdef')), list(zip('abcdef')))
self.assertEqual(list(zip_longest('abc', 'defg', **{})),
list(zip(list('abc')+[None], 'defg'))) # empty keyword dict
self.assertRaises(TypeError, zip_longest, 3)
self.assertRaises(TypeError, zip_longest, range(3), 3)
for stmt in [
"zip_longest('abc', fv=1)",
"zip_longest('abc', fillvalue=1, bogus_keyword=None)",
]:
try:
eval(stmt, globals(), locals())
except TypeError:
pass
else:
self.fail('Did not raise Type in: ' + stmt)
# Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in zip_longest('abc', 'def')],
list(zip('abc', 'def')))
self.assertEqual([pair for pair in zip_longest('abc', 'def')],
list(zip('abc', 'def')))
ids = list(map(id, zip_longest('abc', 'def')))
self.assertEqual(min(ids), max(ids))
ids = list(map(id, list(zip_longest('abc', 'def'))))
self.assertEqual(len(dict.fromkeys(ids)), len(ids))
def test_bug_7244(self):
class Repeater:
# this class is similar to itertools.repeat
def __init__(self, o, t, e):
self.o = o
self.t = int(t)
self.e = e
def __iter__(self): # its iterator is itself
return self
def __next__(self):
if self.t > 0:
self.t -= 1
return self.o
else:
raise self.e
# Formerly this code in would fail in debug mode
# with Undetected Error and Stop Iteration
r1 = Repeater(1, 3, StopIteration)
r2 = Repeater(2, 4, StopIteration)
def run(r1, r2):
result = []
for i, j in zip_longest(r1, r2, fillvalue=0):
with support.captured_output('stdout'):
print((i, j))
result.append((i, j))
return result
self.assertEqual(run(r1, r2), [(1,2), (1,2), (1,2), (0,2)])
# Formerly, the RuntimeError would be lost
# and StopIteration would stop as expected
r1 = Repeater(1, 3, RuntimeError)
r2 = Repeater(2, 4, StopIteration)
it = zip_longest(r1, r2, fillvalue=0)
self.assertEqual(next(it), (1, 2))
self.assertEqual(next(it), (1, 2))
self.assertEqual(next(it), (1, 2))
self.assertRaises(RuntimeError, next, it)
def test_product(self):
for args, result in [
([], [()]), # zero iterables
(['ab'], [('a',), ('b',)]), # one iterable
([range(2), range(3)], [(0,0), (0,1), (0,2), (1,0), (1,1), (1,2)]), # two iterables
([range(0), range(2), range(3)], []), # first iterable with zero length
([range(2), range(0), range(3)], []), # middle iterable with zero length
([range(2), range(3), range(0)], []), # last iterable with zero length
]:
self.assertEqual(list(product(*args)), result)
for r in range(4):
self.assertEqual(list(product(*(args*r))),
list(product(*args, **dict(repeat=r))))
self.assertEqual(len(list(product(*[range(7)]*6))), 7**6)
self.assertRaises(TypeError, product, range(6), None)
def product1(*args, **kwds):
pools = list(map(tuple, args)) * kwds.get('repeat', 1)
n = len(pools)
if n == 0:
yield ()
return
if any(len(pool) == 0 for pool in pools):
return
indices = [0] * n
yield tuple(pool[i] for pool, i in zip(pools, indices))
while 1:
for i in reversed(range(n)): # right to left
if indices[i] == len(pools[i]) - 1:
continue
indices[i] += 1
for j in range(i+1, n):
indices[j] = 0
yield tuple(pool[i] for pool, i in zip(pools, indices))
break
else:
return
def product2(*args, **kwds):
'Pure python version used in docs'
pools = list(map(tuple, args)) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
argtypes = ['', 'abc', '', range(0), range(4), dict(a=1, b=2, c=3),
set('abcdefg'), range(11), tuple(range(13))]
for i in range(100):
args = [random.choice(argtypes) for j in range(random.randrange(5))]
expected_len = prod(map(len, args))
self.assertEqual(len(list(product(*args))), expected_len)
self.assertEqual(list(product(*args)), list(product1(*args)))
self.assertEqual(list(product(*args)), list(product2(*args)))
args = map(iter, args)
self.assertEqual(len(list(product(*args))), expected_len)
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, product('abc', 'def')))), 1)
self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1)
def test_repeat(self):
self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a'])
self.assertEqual(lzip(range(3),repeat('a')),
[(0, 'a'), (1, 'a'), (2, 'a')])
self.assertEqual(list(repeat('a', 3)), ['a', 'a', 'a'])
self.assertEqual(take(3, repeat('a')), ['a', 'a', 'a'])
self.assertEqual(list(repeat('a', 0)), [])
self.assertEqual(list(repeat('a', -3)), [])
self.assertRaises(TypeError, repeat)
self.assertRaises(TypeError, repeat, None, 3, 4)
self.assertRaises(TypeError, repeat, None, 'a')
r = repeat(1+0j)
self.assertEqual(repr(r), 'repeat((1+0j))')
r = repeat(1+0j, 5)
self.assertEqual(repr(r), 'repeat((1+0j), 5)')
list(r)
self.assertEqual(repr(r), 'repeat((1+0j), 0)')
def test_map(self):
self.assertEqual(list(map(operator.pow, range(3), range(1,7))),
[0**1, 1**2, 2**3])
def tupleize(*args):
return args
self.assertEqual(list(map(tupleize, 'abc', range(5))),
[('a',0),('b',1),('c',2)])
self.assertEqual(list(map(tupleize, 'abc', count())),
[('a',0),('b',1),('c',2)])
self.assertEqual(take(2,map(tupleize, 'abc', count())),
[('a',0),('b',1)])
self.assertEqual(list(map(operator.pow, [])), [])
self.assertRaises(TypeError, map)
self.assertRaises(TypeError, list, map(None, range(3), range(3)))
self.assertRaises(TypeError, map, operator.neg)
self.assertRaises(TypeError, next, map(10, range(5)))
self.assertRaises(ValueError, next, map(errfunc, [4], [5]))
self.assertRaises(TypeError, next, map(onearg, [4], [5]))
def test_starmap(self):
self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))),
[0**1, 1**2, 2**3])
self.assertEqual(take(3, starmap(operator.pow, zip(count(), count(1)))),
[0**1, 1**2, 2**3])
self.assertEqual(list(starmap(operator.pow, [])), [])
self.assertEqual(list(starmap(operator.pow, [iter([4,5])])), [4**5])
self.assertRaises(TypeError, list, starmap(operator.pow, [None]))
self.assertRaises(TypeError, starmap)
self.assertRaises(TypeError, starmap, operator.pow, [(4,5)], 'extra')
self.assertRaises(TypeError, next, starmap(10, [(4,5)]))
self.assertRaises(ValueError, next, starmap(errfunc, [(4,5)]))
self.assertRaises(TypeError, next, starmap(onearg, [(4,5)]))
def test_islice(self):
for args in [ # islice(args) should agree with range(args)
(10, 20, 3),
(10, 3, 20),
(10, 20),
(10, 3),
(20,)
]:
self.assertEqual(list(islice(range(100), *args)),
list(range(*args)))
for args, tgtargs in [ # Stop when seqn is exhausted
((10, 110, 3), ((10, 100, 3))),
((10, 110), ((10, 100))),
((110,), (100,))
]:
self.assertEqual(list(islice(range(100), *args)),
list(range(*tgtargs)))
# Test stop=None
self.assertEqual(list(islice(range(10), None)), list(range(10)))
self.assertEqual(list(islice(range(10), None, None)), list(range(10)))
self.assertEqual(list(islice(range(10), None, None, None)), list(range(10)))
self.assertEqual(list(islice(range(10), 2, None)), list(range(2, 10)))
self.assertEqual(list(islice(range(10), 1, None, 2)), list(range(1, 10, 2)))
# Test number of items consumed SF #1171417
it = iter(range(10))
self.assertEqual(list(islice(it, 3)), list(range(3)))
self.assertEqual(list(it), list(range(3, 10)))
# Test invalid arguments
self.assertRaises(TypeError, islice, range(10))
self.assertRaises(TypeError, islice, range(10), 1, 2, 3, 4)
self.assertRaises(ValueError, islice, range(10), -5, 10, 1)
self.assertRaises(ValueError, islice, range(10), 1, -5, -1)
self.assertRaises(ValueError, islice, range(10), 1, 10, -1)
self.assertRaises(ValueError, islice, range(10), 1, 10, 0)
self.assertRaises(ValueError, islice, range(10), 'a')
self.assertRaises(ValueError, islice, range(10), 'a', 1)
self.assertRaises(ValueError, islice, range(10), 1, 'a')
self.assertRaises(ValueError, islice, range(10), 'a', 1, 1)
self.assertRaises(ValueError, islice, range(10), 1, 'a', 1)
self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1)
# Issue #10323: Less islice in a predictable state
c = count()
self.assertEqual(list(islice(c, 1, 3, 50)), [1])
self.assertEqual(next(c), 3)
def test_takewhile(self):
data = [1, 3, 5, 20, 2, 4, 6, 8]
underten = lambda x: x<10
self.assertEqual(list(takewhile(underten, data)), [1, 3, 5])
self.assertEqual(list(takewhile(underten, [])), [])
self.assertRaises(TypeError, takewhile)
self.assertRaises(TypeError, takewhile, operator.pow)
self.assertRaises(TypeError, takewhile, operator.pow, [(4,5)], 'extra')
self.assertRaises(TypeError, next, takewhile(10, [(4,5)]))
self.assertRaises(ValueError, next, takewhile(errfunc, [(4,5)]))
t = takewhile(bool, [1, 1, 1, 0, 0, 0])
self.assertEqual(list(t), [1, 1, 1])
self.assertRaises(StopIteration, next, t)
def test_dropwhile(self):
data = [1, 3, 5, 20, 2, 4, 6, 8]
underten = lambda x: x<10
self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8])
self.assertEqual(list(dropwhile(underten, [])), [])
self.assertRaises(TypeError, dropwhile)
self.assertRaises(TypeError, dropwhile, operator.pow)
self.assertRaises(TypeError, dropwhile, operator.pow, [(4,5)], 'extra')
self.assertRaises(TypeError, next, dropwhile(10, [(4,5)]))
self.assertRaises(ValueError, next, dropwhile(errfunc, [(4,5)]))
def test_tee(self):
n = 200
def irange(n):
for i in range(n):
yield i
a, b = tee([]) # test empty iterator
self.assertEqual(list(a), [])
self.assertEqual(list(b), [])
a, b = tee(irange(n)) # test 100% interleaved
self.assertEqual(lzip(a,b), lzip(range(n), range(n)))
a, b = tee(irange(n)) # test 0% interleaved
self.assertEqual(list(a), list(range(n)))
self.assertEqual(list(b), list(range(n)))
a, b = tee(irange(n)) # test dealloc of leading iterator
for i in range(100):
self.assertEqual(next(a), i)
del a
self.assertEqual(list(b), list(range(n)))
a, b = tee(irange(n)) # test dealloc of trailing iterator
for i in range(100):
self.assertEqual(next(a), i)
del b
self.assertEqual(list(a), list(range(100, n)))
for j in range(5): # test randomly interleaved
order = [0]*n + [1]*n
random.shuffle(order)
lists = ([], [])
its = tee(irange(n))
for i in order:
value = next(its[i])
lists[i].append(value)
self.assertEqual(lists[0], list(range(n)))
self.assertEqual(lists[1], list(range(n)))
# test argument format checking
self.assertRaises(TypeError, tee)
self.assertRaises(TypeError, tee, 3)
self.assertRaises(TypeError, tee, [1,2], 'x')
self.assertRaises(TypeError, tee, [1,2], 3, 'x')
# tee object should be instantiable
a, b = tee('abc')
c = type(a)('def')
self.assertEqual(list(c), list('def'))
# test long-lagged and multi-way split
a, b, c = tee(range(2000), 3)
for i in range(100):
self.assertEqual(next(a), i)
self.assertEqual(list(b), list(range(2000)))
self.assertEqual([next(c), next(c)], list(range(2)))
self.assertEqual(list(a), list(range(100,2000)))
self.assertEqual(list(c), list(range(2,2000)))
# test values of n
self.assertRaises(TypeError, tee, 'abc', 'invalid')
self.assertRaises(ValueError, tee, [], -1)
for n in range(5):
result = tee('abc', n)
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), n)
self.assertEqual([list(x) for x in result], [list('abc')]*n)
# tee pass-through to copyable iterator
a, b = tee('abc')
c, d = tee(a)
self.assertTrue(a is c)
# test tee_new
t1, t2 = tee('abc')
tnew = type(t1)
self.assertRaises(TypeError, tnew)
self.assertRaises(TypeError, tnew, 10)
t3 = tnew(t1)
self.assertTrue(list(t1) == list(t2) == list(t3) == list('abc'))
# test that tee objects are weak referencable
a, b = tee(range(10))
p = proxy(a)
self.assertEqual(getattr(p, '__class__'), type(b))
del a
self.assertRaises(ReferenceError, getattr, p, '__class__')
def test_StopIteration(self):
self.assertRaises(StopIteration, next, zip())
for f in (chain, cycle, zip, groupby):
self.assertRaises(StopIteration, next, f([]))
self.assertRaises(StopIteration, next, f(StopNow()))
self.assertRaises(StopIteration, next, islice([], None))
self.assertRaises(StopIteration, next, islice(StopNow(), None))
p, q = tee([])
self.assertRaises(StopIteration, next, p)
self.assertRaises(StopIteration, next, q)
p, q = tee(StopNow())
self.assertRaises(StopIteration, next, p)
self.assertRaises(StopIteration, next, q)
self.assertRaises(StopIteration, next, repeat(None, 0))
for f in (filter, filterfalse, map, takewhile, dropwhile, starmap):
self.assertRaises(StopIteration, next, f(lambda x:x, []))
self.assertRaises(StopIteration, next, f(lambda x:x, StopNow()))
class TestExamples(unittest.TestCase):
def test_chain(self):
self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF')
def test_chain_from_iterable(self):
self.assertEqual(''.join(chain.from_iterable(['ABC', 'DEF'])), 'ABCDEF')
def test_combinations(self):
self.assertEqual(list(combinations('ABCD', 2)),
[('A','B'), ('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')])
self.assertEqual(list(combinations(range(4), 3)),
[(0,1,2), (0,1,3), (0,2,3), (1,2,3)])
def test_combinations_with_replacement(self):
self.assertEqual(list(combinations_with_replacement('ABC', 2)),
[('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')])
def test_compress(self):
self.assertEqual(list(compress('ABCDEF', [1,0,1,0,1,1])), list('ACEF'))
def test_count(self):
self.assertEqual(list(islice(count(10), 5)), [10, 11, 12, 13, 14])
def test_cycle(self):
self.assertEqual(list(islice(cycle('ABCD'), 12)), list('ABCDABCDABCD'))
def test_dropwhile(self):
self.assertEqual(list(dropwhile(lambda x: x<5, [1,4,6,4,1])), [6,4,1])
def test_groupby(self):
self.assertEqual([k for k, g in groupby('AAAABBBCCDAABBB')],
list('ABCDAB'))
self.assertEqual([(list(g)) for k, g in groupby('AAAABBBCCD')],
[list('AAAA'), list('BBB'), list('CC'), list('D')])
def test_filter(self):
self.assertEqual(list(filter(lambda x: x%2, range(10))), [1,3,5,7,9])
def test_filterfalse(self):
self.assertEqual(list(filterfalse(lambda x: x%2, range(10))), [0,2,4,6,8])
def test_map(self):
self.assertEqual(list(map(pow, (2,3,10), (5,2,3))), [32, 9, 1000])
def test_islice(self):
self.assertEqual(list(islice('ABCDEFG', 2)), list('AB'))
self.assertEqual(list(islice('ABCDEFG', 2, 4)), list('CD'))
self.assertEqual(list(islice('ABCDEFG', 2, None)), list('CDEFG'))
self.assertEqual(list(islice('ABCDEFG', 0, None, 2)), list('ACEG'))
def test_zip(self):
self.assertEqual(list(zip('ABCD', 'xy')), [('A', 'x'), ('B', 'y')])
def test_zip_longest(self):
self.assertEqual(list(zip_longest('ABCD', 'xy', fillvalue='-')),
[('A', 'x'), ('B', 'y'), ('C', '-'), ('D', '-')])
def test_permutations(self):
self.assertEqual(list(permutations('ABCD', 2)),
list(map(tuple, 'AB AC AD BA BC BD CA CB CD DA DB DC'.split())))
self.assertEqual(list(permutations(range(3))),
[(0,1,2), (0,2,1), (1,0,2), (1,2,0), (2,0,1), (2,1,0)])
def test_product(self):
self.assertEqual(list(product('ABCD', 'xy')),
list(map(tuple, 'Ax Ay Bx By Cx Cy Dx Dy'.split())))
self.assertEqual(list(product(range(2), repeat=3)),
[(0,0,0), (0,0,1), (0,1,0), (0,1,1),
(1,0,0), (1,0,1), (1,1,0), (1,1,1)])
def test_repeat(self):
self.assertEqual(list(repeat(10, 3)), [10, 10, 10])
def test_stapmap(self):
self.assertEqual(list(starmap(pow, [(2,5), (3,2), (10,3)])),
[32, 9, 1000])
def test_takewhile(self):
self.assertEqual(list(takewhile(lambda x: x<5, [1,4,6,4,1])), [1,4])
class TestGC(unittest.TestCase):
def makecycle(self, iterator, container):
container.append(iterator)
next(iterator)
del container, iterator
def test_chain(self):
a = []
self.makecycle(chain(a), a)
def test_chain_from_iterable(self):
a = []
self.makecycle(chain.from_iterable([a]), a)
def test_combinations(self):
a = []
self.makecycle(combinations([1,2,a,3], 3), a)
def test_combinations_with_replacement(self):
a = []
self.makecycle(combinations_with_replacement([1,2,a,3], 3), a)
def test_compress(self):
a = []
self.makecycle(compress('ABCDEF', [1,0,1,0,1,0]), a)
def test_count(self):
a = []
Int = type('Int', (int,), dict(x=a))
self.makecycle(count(Int(0), Int(1)), a)
def test_cycle(self):
a = []
self.makecycle(cycle([a]*2), a)
def test_dropwhile(self):
a = []
self.makecycle(dropwhile(bool, [0, a, a]), a)
def test_groupby(self):
a = []
self.makecycle(groupby([a]*2, lambda x:x), a)
def test_issue2246(self):
# Issue 2246 -- the _grouper iterator was not included in GC
n = 10
keyfunc = lambda x: x
for i, j in groupby(range(n), key=keyfunc):
keyfunc.__dict__.setdefault('x',[]).append(j)
def test_filter(self):
a = []
self.makecycle(filter(lambda x:True, [a]*2), a)
def test_filterfalse(self):
a = []
self.makecycle(filterfalse(lambda x:False, a), a)
def test_zip(self):
a = []
self.makecycle(zip([a]*2, [a]*3), a)
def test_zip_longest(self):
a = []
self.makecycle(zip_longest([a]*2, [a]*3), a)
b = [a, None]
self.makecycle(zip_longest([a]*2, [a]*3, fillvalue=b), a)
def test_map(self):
a = []
self.makecycle(map(lambda x:x, [a]*2), a)
def test_islice(self):
a = []
self.makecycle(islice([a]*2, None), a)
def test_permutations(self):
a = []
self.makecycle(permutations([1,2,a,3], 3), a)
def test_product(self):
a = []
self.makecycle(product([1,2,a,3], repeat=3), a)
def test_repeat(self):
a = []
self.makecycle(repeat(a), a)
def test_starmap(self):
a = []
self.makecycle(starmap(lambda *t: t, [(a,a)]*2), a)
def test_takewhile(self):
a = []
self.makecycle(takewhile(bool, [1, 0, a, a]), a)
def R(seqn):
'Regular generator'
for i in seqn:
yield i
class G:
'Sequence using __getitem__'
def __init__(self, seqn):
self.seqn = seqn
def __getitem__(self, i):
return self.seqn[i]
class I:
'Sequence using iterator protocol'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def __next__(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class Ig:
'Sequence using iterator protocol defined with a generator'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
for val in self.seqn:
yield val
class X:
'Missing __getitem__ and __iter__'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __next__(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class N:
'Iterator missing __next__()'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
class E:
'Test propagation of exceptions'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def __next__(self):
3 // 0
class S:
'Test immediate stop'
def __init__(self, seqn):
pass
def __iter__(self):
return self
def __next__(self):
raise StopIteration
def L(seqn):
'Test multiple tiers of iterators'
return chain(map(lambda x:x, R(Ig(G(seqn)))))
class TestVariousIteratorArgs(unittest.TestCase):
def test_chain(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(chain(g(s))), list(g(s)))
self.assertEqual(list(chain(g(s), g(s))), list(g(s))+list(g(s)))
self.assertRaises(TypeError, list, chain(X(s)))
self.assertRaises(TypeError, list, chain(N(s)))
self.assertRaises(ZeroDivisionError, list, chain(E(s)))
def test_compress(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
n = len(s)
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(compress(g(s), repeat(1))), list(g(s)))
self.assertRaises(TypeError, compress, X(s), repeat(1))
self.assertRaises(TypeError, compress, N(s), repeat(1))
self.assertRaises(ZeroDivisionError, list, compress(E(s), repeat(1)))
def test_product(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
self.assertRaises(TypeError, product, X(s))
self.assertRaises(TypeError, product, N(s))
self.assertRaises(ZeroDivisionError, product, E(s))
def test_cycle(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
tgtlen = len(s) * 3
expected = list(g(s))*3
actual = list(islice(cycle(g(s)), tgtlen))
self.assertEqual(actual, expected)
self.assertRaises(TypeError, cycle, X(s))
self.assertRaises(TypeError, cycle, N(s))
self.assertRaises(ZeroDivisionError, list, cycle(E(s)))
def test_groupby(self):
for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual([k for k, sb in groupby(g(s))], list(g(s)))
self.assertRaises(TypeError, groupby, X(s))
self.assertRaises(TypeError, groupby, N(s))
self.assertRaises(ZeroDivisionError, list, groupby(E(s)))
def test_filter(self):
for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(filter(isEven, g(s))),
[x for x in g(s) if isEven(x)])
self.assertRaises(TypeError, filter, isEven, X(s))
self.assertRaises(TypeError, filter, isEven, N(s))
self.assertRaises(ZeroDivisionError, list, filter(isEven, E(s)))
def test_filterfalse(self):
for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(filterfalse(isEven, g(s))),
[x for x in g(s) if isOdd(x)])
self.assertRaises(TypeError, filterfalse, isEven, X(s))
self.assertRaises(TypeError, filterfalse, isEven, N(s))
self.assertRaises(ZeroDivisionError, list, filterfalse(isEven, E(s)))
def test_zip(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(zip(g(s))), lzip(g(s)))
self.assertEqual(list(zip(g(s), g(s))), lzip(g(s), g(s)))
self.assertRaises(TypeError, zip, X(s))
self.assertRaises(TypeError, zip, N(s))
self.assertRaises(ZeroDivisionError, list, zip(E(s)))
def test_ziplongest(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(zip_longest(g(s))), list(zip(g(s))))
self.assertEqual(list(zip_longest(g(s), g(s))), list(zip(g(s), g(s))))
self.assertRaises(TypeError, zip_longest, X(s))
self.assertRaises(TypeError, zip_longest, N(s))
self.assertRaises(ZeroDivisionError, list, zip_longest(E(s)))
def test_map(self):
for s in (range(10), range(0), range(100), (7,11), range(20,50,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(map(onearg, g(s))),
[onearg(x) for x in g(s)])
self.assertEqual(list(map(operator.pow, g(s), g(s))),
[x**x for x in g(s)])
self.assertRaises(TypeError, map, onearg, X(s))
self.assertRaises(TypeError, map, onearg, N(s))
self.assertRaises(ZeroDivisionError, list, map(onearg, E(s)))
def test_islice(self):
for s in ("12345", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(islice(g(s),1,None,2)), list(g(s))[1::2])
self.assertRaises(TypeError, islice, X(s), 10)
self.assertRaises(TypeError, islice, N(s), 10)
self.assertRaises(ZeroDivisionError, list, islice(E(s), 10))
def test_starmap(self):
for s in (range(10), range(0), range(100), (7,11), range(20,50,5)):
for g in (G, I, Ig, S, L, R):
ss = lzip(s, s)
self.assertEqual(list(starmap(operator.pow, g(ss))),
[x**x for x in g(s)])
self.assertRaises(TypeError, starmap, operator.pow, X(ss))
self.assertRaises(TypeError, starmap, operator.pow, N(ss))
self.assertRaises(ZeroDivisionError, list, starmap(operator.pow, E(ss)))
def test_takewhile(self):
for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
tgt = []
for elem in g(s):
if not isEven(elem): break
tgt.append(elem)
self.assertEqual(list(takewhile(isEven, g(s))), tgt)
self.assertRaises(TypeError, takewhile, isEven, X(s))
self.assertRaises(TypeError, takewhile, isEven, N(s))
self.assertRaises(ZeroDivisionError, list, takewhile(isEven, E(s)))
def test_dropwhile(self):
for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
tgt = []
for elem in g(s):
if not tgt and isOdd(elem): continue
tgt.append(elem)
self.assertEqual(list(dropwhile(isOdd, g(s))), tgt)
self.assertRaises(TypeError, dropwhile, isOdd, X(s))
self.assertRaises(TypeError, dropwhile, isOdd, N(s))
self.assertRaises(ZeroDivisionError, list, dropwhile(isOdd, E(s)))
def test_tee(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
it1, it2 = tee(g(s))
self.assertEqual(list(it1), list(g(s)))
self.assertEqual(list(it2), list(g(s)))
self.assertRaises(TypeError, tee, X(s))
self.assertRaises(TypeError, tee, N(s))
self.assertRaises(ZeroDivisionError, list, tee(E(s))[0])
class LengthTransparency(unittest.TestCase):
def test_repeat(self):
from test.test_iterlen import len
self.assertEqual(len(repeat(None, 50)), 50)
self.assertRaises(TypeError, len, repeat(None))
class RegressionTests(unittest.TestCase):
def test_sf_793826(self):
# Fix Armin Rigo's successful efforts to wreak havoc
def mutatingtuple(tuple1, f, tuple2):
# this builds a tuple t which is a copy of tuple1,
# then calls f(t), then mutates t to be equal to tuple2
# (needs len(tuple1) == len(tuple2)).
def g(value, first=[1]):
if first:
del first[:]
f(next(z))
return value
items = list(tuple2)
items[1:1] = list(tuple1)
gen = map(g, items)
z = zip(*[gen]*len(tuple1))
next(z)
def f(t):
global T
T = t
first[:] = list(T)
first = []
mutatingtuple((1,2,3), f, (4,5,6))
second = list(T)
self.assertEqual(first, second)
def test_sf_950057(self):
# Make sure that chain() and cycle() catch exceptions immediately
# rather than when shifting between input sources
def gen1():
hist.append(0)
yield 1
hist.append(1)
raise AssertionError
hist.append(2)
def gen2(x):
hist.append(3)
yield 2
hist.append(4)
if x:
raise StopIteration
hist = []
self.assertRaises(AssertionError, list, chain(gen1(), gen2(False)))
self.assertEqual(hist, [0,1])
hist = []
self.assertRaises(AssertionError, list, chain(gen1(), gen2(True)))
self.assertEqual(hist, [0,1])
hist = []
self.assertRaises(AssertionError, list, cycle(gen1()))
self.assertEqual(hist, [0,1])
class SubclassWithKwargsTest(unittest.TestCase):
def test_keywords_in_subclass(self):
# count is not subclassable...
for cls in (repeat, zip, filter, filterfalse, chain, map,
starmap, islice, takewhile, dropwhile, cycle, compress):
class Subclass(cls):
def __init__(self, newarg=None, *args):
cls.__init__(self, *args)
try:
Subclass(newarg=1)
except TypeError as err:
# we expect type errors because of wrong argument count
self.assertFalse("does not take keyword arguments" in err.args[0])
libreftest = """ Doctest for examples in the library reference: libitertools.tex
>>> amounts = [120.15, 764.05, 823.14]
>>> for checknum, amount in zip(count(1200), amounts):
... print('Check %d is for $%.2f' % (checknum, amount))
...
Check 1200 is for $120.15
Check 1201 is for $764.05
Check 1202 is for $823.14
>>> import operator
>>> for cube in map(operator.pow, range(1,4), repeat(3)):
... print(cube)
...
1
8
27
>>> reportlines = ['EuroPython', 'Roster', '', 'alex', '', 'laura', '', 'martin', '', 'walter', '', 'samuele']
>>> for name in islice(reportlines, 3, None, 2):
... print(name.title())
...
Alex
Laura
Martin
Walter
Samuele
>>> from operator import itemgetter
>>> d = dict(a=1, b=2, c=1, d=2, e=1, f=2, g=3)
>>> di = sorted(sorted(d.items()), key=itemgetter(1))
>>> for k, g in groupby(di, itemgetter(1)):
... print(k, list(map(itemgetter(0), g)))
...
1 ['a', 'c', 'e']
2 ['b', 'd', 'f']
3 ['g']
# Find runs of consecutive numbers using groupby. The key to the solution
# is differencing with a range so that consecutive numbers all appear in
# same group.
>>> data = [ 1, 4,5,6, 10, 15,16,17,18, 22, 25,26,27,28]
>>> for k, g in groupby(enumerate(data), lambda t:t[0]-t[1]):
... print(list(map(operator.itemgetter(1), g)))
...
[1]
[4, 5, 6]
[10]
[15, 16, 17, 18]
[22]
[25, 26, 27, 28]
>>> def take(n, iterable):
... "Return first n items of the iterable as a list"
... return list(islice(iterable, n))
>>> def enumerate(iterable, start=0):
... return zip(count(start), iterable)
>>> def tabulate(function, start=0):
... "Return function(0), function(1), ..."
... return map(function, count(start))
>>> def nth(iterable, n, default=None):
... "Returns the nth item or a default value"
... return next(islice(iterable, n, None), default)
>>> def quantify(iterable, pred=bool):
... "Count how many times the predicate is true"
... return sum(map(pred, iterable))
>>> def padnone(iterable):
... "Returns the sequence elements and then returns None indefinitely"
... return chain(iterable, repeat(None))
>>> def ncycles(iterable, n):
... "Returns the sequence elements n times"
... return chain(*repeat(iterable, n))
>>> def dotproduct(vec1, vec2):
... return sum(map(operator.mul, vec1, vec2))
>>> def flatten(listOfLists):
... return list(chain.from_iterable(listOfLists))
>>> def repeatfunc(func, times=None, *args):
... "Repeat calls to func with specified arguments."
... " Example: repeatfunc(random.random)"
... if times is None:
... return starmap(func, repeat(args))
... else:
... return starmap(func, repeat(args, times))
>>> def pairwise(iterable):
... "s -> (s0,s1), (s1,s2), (s2, s3), ..."
... a, b = tee(iterable)
... try:
... next(b)
... except StopIteration:
... pass
... return zip(a, b)
>>> def grouper(n, iterable, fillvalue=None):
... "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
... args = [iter(iterable)] * n
... return zip_longest(*args, fillvalue=fillvalue)
>>> def roundrobin(*iterables):
... "roundrobin('ABC', 'D', 'EF') --> A D E B F C"
... # Recipe credited to George Sakkis
... pending = len(iterables)
... nexts = cycle(iter(it).__next__ for it in iterables)
... while pending:
... try:
... for next in nexts:
... yield next()
... except StopIteration:
... pending -= 1
... nexts = cycle(islice(nexts, pending))
>>> def powerset(iterable):
... "powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
... s = list(iterable)
... return chain.from_iterable(combinations(s, r) for r in range(len(s)+1))
>>> def unique_everseen(iterable, key=None):
... "List unique elements, preserving order. Remember all elements ever seen."
... # unique_everseen('AAAABBBCCDAABBB') --> A B C D
... # unique_everseen('ABBCcAD', str.lower) --> A B C D
... seen = set()
... seen_add = seen.add
... if key is None:
... for element in iterable:
... if element not in seen:
... seen_add(element)
... yield element
... else:
... for element in iterable:
... k = key(element)
... if k not in seen:
... seen_add(k)
... yield element
>>> def unique_justseen(iterable, key=None):
... "List unique elements, preserving order. Remember only the element just seen."
... # unique_justseen('AAAABBBCCDAABBB') --> A B C D A B
... # unique_justseen('ABBCcAD', str.lower) --> A B C A D
... return map(next, map(itemgetter(1), groupby(iterable, key)))
This is not part of the examples but it tests to make sure the definitions
perform as purported.
>>> take(10, count())
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> list(enumerate('abc'))
[(0, 'a'), (1, 'b'), (2, 'c')]
>>> list(islice(tabulate(lambda x: 2*x), 4))
[0, 2, 4, 6]
>>> nth('abcde', 3)
'd'
>>> nth('abcde', 9) is None
True
>>> quantify(range(99), lambda x: x%2==0)
50
>>> a = [[1, 2, 3], [4, 5, 6]]
>>> flatten(a)
[1, 2, 3, 4, 5, 6]
>>> list(repeatfunc(pow, 5, 2, 3))
[8, 8, 8, 8, 8]
>>> import random
>>> take(5, map(int, repeatfunc(random.random)))
[0, 0, 0, 0, 0]
>>> list(pairwise('abcd'))
[('a', 'b'), ('b', 'c'), ('c', 'd')]
>>> list(pairwise([]))
[]
>>> list(pairwise('a'))
[]
>>> list(islice(padnone('abc'), 0, 6))
['a', 'b', 'c', None, None, None]
>>> list(ncycles('abc', 3))
['a', 'b', 'c', 'a', 'b', 'c', 'a', 'b', 'c']
>>> dotproduct([1,2,3], [4,5,6])
32
>>> list(grouper(3, 'abcdefg', 'x'))
[('a', 'b', 'c'), ('d', 'e', 'f'), ('g', 'x', 'x')]
>>> list(roundrobin('abc', 'd', 'ef'))
['a', 'd', 'e', 'b', 'f', 'c']
>>> list(powerset([1,2,3]))
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
>>> all(len(list(powerset(range(n)))) == 2**n for n in range(18))
True
>>> list(powerset('abcde')) == sorted(sorted(set(powerset('abcde'))), key=len)
True
>>> list(unique_everseen('AAAABBBCCDAABBB'))
['A', 'B', 'C', 'D']
>>> list(unique_everseen('ABBCcAD', str.lower))
['A', 'B', 'C', 'D']
>>> list(unique_justseen('AAAABBBCCDAABBB'))
['A', 'B', 'C', 'D', 'A', 'B']
>>> list(unique_justseen('ABBCcAD', str.lower))
['A', 'B', 'C', 'A', 'D']
"""
__test__ = {'libreftest' : libreftest}
def test_main(verbose=None):
test_classes = (TestBasicOps, TestVariousIteratorArgs, TestGC,
RegressionTests, LengthTransparency,
SubclassWithKwargsTest, TestExamples)
support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
# doctest the examples in the library reference
support.run_doctest(sys.modules[__name__], verbose)
if __name__ == "__main__":
test_main(verbose=True)
|
mancoast/CPythonPyc_test
|
fail/314_test_itertools.py
|
Python
|
gpl-3.0
| 67,570
|
[
"GULP"
] |
123ec438a1a121480c404818f540d1dadad6acc9258f6db00d4cf9cf1bf2591d
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <bogdan.neacsa@codemart.ro>
.. moduleauthor:: Ionel Ortelecan <ionel.ortelecan@codemart.ro>
"""
import cherrypy
import json
from copy import deepcopy
import tvb.interfaces.web.controllers.base_controller as base
import tvb.basic.traits.traited_interface as traited_interface
from tvb.interfaces.web.controllers.base_controller import using_template, settings
from tvb.interfaces.web.controllers.users_controller import logged
from tvb.interfaces.web.controllers.flow_controller import SelectedAdapterContext
from tvb.basic.traits.parameters_factory import get_traited_instance_for_name
from tvb.basic.logger.builder import get_logger
from tvb.core.adapters.abcadapter import ABCAdapter
from tvb.core.services.flow_service import FlowService
from tvb.core.services.operation_service import RANGE_PARAMETER_1, RANGE_PARAMETER_2
from tvb.adapters.visualizers.connectivity import ConnectivityViewer
from tvb.simulator.models import Model
from tvb.simulator.integrators import Integrator
from tvb.config import SIMULATOR_CLASS, SIMULATOR_MODULE
from tvb.datatypes import noise_framework
PARAM_CONNECTIVITY = 'connectivity'
PARAM_SURFACE = 'surface'
PARAM_MODEL = 'model'
PARAM_INTEGRATOR = 'integrator'
MODEL_PARAMETERS = 'model_parameters'
INTEGRATOR_PARAMETERS = 'integrator_parameters'
PARAMS_MODEL_PATTERN = 'model_parameters_option_%s_%s'
class SpatioTemporalController(base.BaseController):
"""
Base class which contains methods related to spatio-temporal actions.
"""
def __init__(self):
base.BaseController.__init__(self)
self.flow_service = FlowService()
self.logger = get_logger(__name__)
editable_entities = [dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus',
subsection='regionstim', description='Create a new Stimulus on Region level'),
dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus',
subsection='surfacestim', description='Create a new Stimulus on Surface level')]
self.submenu_list = editable_entities
@cherrypy.expose
@using_template('base_template')
@logged()
@settings()
def index(self, **data):
"""
Displays the main page for the spatio temporal section.
"""
template_specification = dict(title="Spatio temporal", data=data)
template_specification['mainContent'] = 'header_menu'
return self.fill_default_attributes(template_specification)
@staticmethod
def get_connectivity_parameters(input_connectivity, surface_data=None):
"""
Returns a dictionary which contains all the needed data for drawing a connectivity.
"""
viewer = ConnectivityViewer()
global_params, global_pages = viewer.compute_connectivity_global_params(input_connectivity, surface_data)
global_params.update(global_pages)
global_params['selectedConnectivityGid'] = input_connectivity.gid
return global_params
def get_data_from_burst_configuration(self):
"""
Returns the model, integrator, connectivity and surface instances from the burst configuration.
"""
### Read from session current burst-configuration
burst_configuration = base.get_from_session(base.KEY_BURST_CONFIG)
if burst_configuration is None:
return None, None, None
first_range = burst_configuration.get_simulation_parameter_value(RANGE_PARAMETER_1)
second_range = burst_configuration.get_simulation_parameter_value(RANGE_PARAMETER_2)
if ((first_range is not None and str(first_range).startswith(MODEL_PARAMETERS)) or
(second_range is not None and str(second_range).startswith(MODEL_PARAMETERS))):
base.set_error_message("When configuring model parameters you are not allowed to specify range values.")
raise cherrypy.HTTPRedirect("/burst/")
group = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
simulator_adapter = self.flow_service.build_adapter_instance(group)
try:
params_dict = simulator_adapter.convert_ui_inputs(burst_configuration.get_all_simulator_values()[0], False)
except Exception, excep:
self.logger.exception(excep)
base.set_error_message("Some of the provided parameters have an invalid value.")
raise cherrypy.HTTPRedirect("/burst/")
### Prepare Model instance
model = burst_configuration.get_simulation_parameter_value(PARAM_MODEL)
model_parameters = params_dict[MODEL_PARAMETERS]
noise_framework.build_noise(model_parameters)
try:
model = get_traited_instance_for_name(model, Model, model_parameters)
except Exception, ex:
self.logger.exception(ex)
self.logger.info("Could not create the model instance with the given parameters. "
"A new model instance will be created with the default values.")
model = get_traited_instance_for_name(model, Model, {})
### Prepare Integrator instance
integrator = burst_configuration.get_simulation_parameter_value(PARAM_INTEGRATOR)
integrator_parameters = params_dict[INTEGRATOR_PARAMETERS]
noise_framework.build_noise(integrator_parameters)
try:
integrator = get_traited_instance_for_name(integrator, Integrator, integrator_parameters)
except Exception, ex:
self.logger.exception(ex)
self.logger.info("Could not create the integrator instance with the given parameters. "
"A new integrator instance will be created with the default values.")
integrator = get_traited_instance_for_name(integrator, Integrator, {})
### Prepare Connectivity
connectivity_gid = burst_configuration.get_simulation_parameter_value(PARAM_CONNECTIVITY)
connectivity = ABCAdapter.load_entity_by_gid(connectivity_gid)
### Prepare Surface
surface_gid = burst_configuration.get_simulation_parameter_value(PARAM_SURFACE)
surface = None
if surface_gid is not None and len(surface_gid):
surface = ABCAdapter.load_entity_by_gid(surface_gid)
return model, integrator, connectivity, surface
@staticmethod
def display_surface(surface_gid):
"""
Generates the HTML for displaying the surface with the given ID.
"""
surface = ABCAdapter.load_entity_by_gid(surface_gid)
base.add2session(PARAM_SURFACE, surface_gid)
url_vertices_pick, url_normals_pick, url_triangles_pick = surface.get_urls_for_pick_rendering()
url_vertices, url_normals, _, url_triangles, alphas, alphas_indices = surface.get_urls_for_rendering(True, None)
template_specification = dict()
template_specification['urlVerticesPick'] = json.dumps(url_vertices_pick)
template_specification['urlTrianglesPick'] = json.dumps(url_triangles_pick)
template_specification['urlNormalsPick'] = json.dumps(url_normals_pick)
template_specification['urlVertices'] = json.dumps(url_vertices)
template_specification['urlTriangles'] = json.dumps(url_triangles)
template_specification['urlNormals'] = json.dumps(url_normals)
template_specification['alphas'] = json.dumps(alphas)
template_specification['alphas_indices'] = json.dumps(alphas_indices)
template_specification['brainCenter'] = json.dumps(surface.center())
return template_specification
@staticmethod
def prepare_entity_interface(input_list):
"""
Prepares the input tree obtained from a creator.
"""
return {'inputList': input_list,
base.KEY_PARAMETERS_CONFIG: False}
def get_creator_and_interface(self, creator_module, creator_class, datatype_instance, lock_midpoint_for_eq=None):
"""
Returns a Tuple: a creator instance and a dictionary for the creator interface.
The interface is prepared for rendering, it is populated with existent data, in case of a
parameter of type DataType. The name of the attributes are also prefixed to identify groups.
"""
algo_group = self.flow_service.get_algorithm_by_module_and_class(creator_module, creator_class)[1]
group, _ = self.flow_service.prepare_adapter(base.get_current_project().id, algo_group)
#I didn't use the interface(from the above line) returned by the method 'prepare_adapter' from flow service
# because the selects that display dataTypes will also have the 'All' entry.
datatype_instance.trait.bound = traited_interface.INTERFACE_ATTRIBUTES_ONLY
input_list = datatype_instance.interface[traited_interface.INTERFACE_ATTRIBUTES]
if lock_midpoint_for_eq is not None:
for idx in lock_midpoint_for_eq:
input_list[idx] = self._lock_midpoints(input_list[idx])
category = self.flow_service.get_visualisers_category()
input_list = self.flow_service.prepare_parameters(input_list, base.get_current_project().id, category.id)
input_list = ABCAdapter.prepare_param_names(input_list)
return self.flow_service.build_adapter_instance(group), input_list
@staticmethod
def get_series_json(data, label):
""" For each data point entry, build the FLOT specific JSON. """
series = "{\"data\": " + json.dumps(data) + ","
series += "\"label\": \"" + label + "\""
series += "}"
return series
@staticmethod
def build_final_json(list_of_series):
""" Given a list with all the data points, build the final FLOT json. """
final_json = "["
for i, value in enumerate(list_of_series):
if i:
final_json += ","
final_json += value
final_json += "]"
return final_json
@staticmethod
def get_ui_message(list_of_equation_names):
"""
The message returned by this method should be displayed if
the equation with the given name couldn't be evaluated in all points.
"""
if len(list_of_equation_names):
return ("Could not evaluate the " + ", ".join(list_of_equation_names) + " equation(s) "
"in all the points. Some of the values were changed.")
else:
return ""
def get_select_existent_entities(self, label, entity_type, entity_gid=None):
"""
Returns the dictionary needed for drawing the select which display all
the created entities of the specified type.
"""
project_id = base.get_current_project().id
category = self.flow_service.get_visualisers_category()
interface = [{'name': 'existentEntitiesSelect', 'label': label, 'type': entity_type}]
if entity_gid is not None:
interface[0]['default'] = entity_gid
interface = self.flow_service.prepare_parameters(interface, project_id, category.id)
interface = ABCAdapter.prepare_param_names(interface)
return interface
@staticmethod
def add_interface_to_session(left_input_tree, right_input_tree):
"""
left_input_tree and right_input_tree are expected to be lists of dictionaries.
Those 2 given lists will be concatenated and added to session.
In order to work the filters, the interface should be added to session.
"""
entire_tree = deepcopy(left_input_tree)
entire_tree.extend(right_input_tree)
SelectedAdapterContext().add_adapter_to_session(None, entire_tree)
def fill_default_attributes(self, template_dictionary, subsection='stimulus'):
"""
Overwrite base controller to add required parameters for adapter templates.
"""
template_dictionary[base.KEY_SECTION] = 'stimulus'
template_dictionary[base.KEY_SUB_SECTION] = subsection
template_dictionary[base.KEY_SUBMENU_LIST] = self.submenu_list
template_dictionary[base.KEY_INCLUDE_RESOURCES] = 'spatial/included_resources'
base.BaseController.fill_default_attributes(self, template_dictionary)
return template_dictionary
def get_x_axis_range(self, min_x_str, max_x_str):
"""
Fill range for the X-axis displayed in 2D graph.
"""
min_x = 0
max_x = 100
error_msg = ''
if self.is_int(min_x_str):
min_x = int(min_x_str)
if self.is_int(max_x_str):
max_x = int(max_x_str)
else:
min_x = 0
error_msg = "The max value for the x-axis should be an integer value."
if min_x >= max_x:
error_msg = "The min value for the x-axis should be smaller then the max value of the x-axis."
min_x = 0
max_x = 100
else:
error_msg = "The min value for the x-axis should be an integer value."
return min_x, max_x, error_msg
@staticmethod
def _lock_midpoints(equations_dict):
"""
Set mid-points for gaussian / double gausians as locked to 0.0 in case of spatial equations.
"""
for equation in equations_dict[ABCAdapter.KEY_OPTIONS]:
if equation[ABCAdapter.KEY_NAME] == 'Gaussian':
for entry in equation[ABCAdapter.KEY_ATTRIBUTES][1][ABCAdapter.KEY_ATTRIBUTES]:
if entry[ABCAdapter.KEY_NAME] == 'midpoint':
entry['locked'] = True
if equation[ABCAdapter.KEY_NAME] == 'DoubleGaussian':
for entry in equation[ABCAdapter.KEY_ATTRIBUTES][1][ABCAdapter.KEY_ATTRIBUTES]:
if entry[ABCAdapter.KEY_NAME] == 'midpoint1':
entry['locked'] = True
return equations_dict
@staticmethod
def is_int(str_value):
"""
Checks if the given string may be converted to an int value.
"""
try:
int(str_value)
return True
except Exception:
return False
def get_data_for_param_sliders(self, connectivity_node_index, context_model_parameters):
"""
Method used only for handling the exception.
"""
try:
return context_model_parameters.get_data_for_param_sliders(connectivity_node_index)
except ValueError, excep:
self.logger.info("All the model parameters that are configurable should be valid arrays or numbers.")
self.logger.exception(excep)
base.set_error_message("All the model parameters that are configurable should be valid arrays or numbers.")
raise cherrypy.HTTPRedirect("/burst/")
|
stuart-knock/tvb-framework
|
tvb/interfaces/web/controllers/spatial/base_spatio_temporal_controller.py
|
Python
|
gpl-2.0
| 16,353
|
[
"Gaussian"
] |
f694dff6b1153c0f8c955ca42f597017642715536ab23cdec399483766369345
|
"""Functionality to query and extract information from aligned BAM files.
"""
import collections
import contextlib
import os
import itertools
import subprocess
import numpy
import pysam
import toolz as tz
from bcbio import utils
from bcbio.bam import ref
from bcbio.distributed import objectstore
from bcbio.distributed.transaction import file_transaction
from bcbio.log import logger
from bcbio.pipeline import config_utils
import bcbio.pipeline.datadict as dd
from bcbio.provenance import do
def is_paired(bam_file):
"""Determine if a BAM file has paired reads.
"""
bam_file = objectstore.cl_input(bam_file)
cmd = ("sambamba view -h {bam_file} | head -50000 | "
"sambamba view -S -F paired /dev/stdin | head -1 | wc -l")
out = subprocess.check_output(cmd.format(**locals()), shell=True,
executable=do.find_bash(),
stderr=open("/dev/null", "w"))
return int(out) > 0
def index(in_bam, config, check_timestamp=True):
"""Index a BAM file, skipping if index present.
Centralizes BAM indexing providing ability to switch indexing approaches.
"""
assert is_bam(in_bam), "%s in not a BAM file" % in_bam
index_file = "%s.bai" % in_bam
alt_index_file = "%s.bai" % os.path.splitext(in_bam)[0]
if check_timestamp:
bai_exists = utils.file_uptodate(index_file, in_bam) or utils.file_uptodate(alt_index_file, in_bam)
else:
bai_exists = utils.file_exists(index_file) or utils.file_exists(alt_index_file)
if not bai_exists:
# Remove old index files and re-run to prevent linking into tx directory
for fname in [index_file, alt_index_file]:
utils.remove_safe(fname)
sambamba = _get_sambamba(config)
samtools = config_utils.get_program("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, index_file) as tx_index_file:
assert tx_index_file.find(".bam.bai") > 0
tx_bam_file = tx_index_file.replace(".bam.bai", ".bam")
utils.symlink_plus(in_bam, tx_bam_file)
if sambamba:
cmd = "{sambamba} index -t {num_cores} {tx_bam_file}"
else:
cmd = "{samtools} index {tx_bam_file}"
do.run(cmd.format(**locals()), "Index BAM file: %s" % os.path.basename(in_bam))
return index_file if utils.file_exists(index_file) else alt_index_file
def remove(in_bam):
"""
remove bam file and the index if exists
"""
if utils.file_exists(in_bam):
utils.remove_safe(in_bam)
if utils.file_exists(in_bam + ".bai"):
utils.remove_safe(in_bam + ".bai")
def idxstats(in_bam, data):
"""Return BAM index stats for the given file, using samtools idxstats.
"""
index(in_bam, data["config"])
AlignInfo = collections.namedtuple("AlignInfo", ["contig", "length", "aligned", "unaligned"])
samtools = config_utils.get_program("samtools", data["config"])
idxstats_out = subprocess.check_output([samtools, "idxstats", in_bam])
out = []
for line in idxstats_out.split("\n"):
if line.strip():
contig, length, aligned, unaligned = line.split("\t")
out.append(AlignInfo(contig, int(length), int(aligned), int(unaligned)))
return out
def get_downsample_pct(in_bam, target_counts, data):
"""Retrieve percentage of file to downsample to get to target counts.
"""
total = sum(x.aligned for x in idxstats(in_bam, data))
with contextlib.closing(pysam.Samfile(in_bam, "rb")) as work_bam:
n_rgs = max(1, len(work_bam.header.get("RG", [])))
rg_target = n_rgs * target_counts
if total > rg_target:
return float(rg_target) / float(total)
def get_aligned_reads(in_bam, data):
index(in_bam, data["config"])
bam_stats = idxstats(in_bam, data)
align = sum(x.aligned for x in bam_stats)
unaligned = sum(x.unaligned for x in bam_stats)
total = float(align + unaligned)
return 1.0 * align / total
def downsample(in_bam, data, target_counts, read_filter="", always_run=False,
work_dir=None):
"""Downsample a BAM file to the specified number of target counts.
"""
index(in_bam, data["config"])
ds_pct = get_downsample_pct(in_bam, target_counts, data)
if always_run and not ds_pct:
ds_pct = 1.0
if ds_pct:
out_file = "%s-downsample%s" % os.path.splitext(in_bam)
if work_dir:
out_file = os.path.join(work_dir, os.path.basename(out_file))
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
sambamba = config_utils.get_program("sambamba", data["config"])
num_cores = dd.get_num_cores(data)
cmd = ("{sambamba} view -t {num_cores} {read_filter} -f bam -o {tx_out_file} "
"--subsample={ds_pct:.3} --subsampling-seed=42 {in_bam}")
do.run(cmd.format(**locals()), "Downsample BAM file: %s" % os.path.basename(in_bam))
return out_file
def check_header(in_bam, rgnames, ref_file, config):
"""Ensure passed in BAM header matches reference file and read groups names.
"""
_check_bam_contigs(in_bam, ref_file, config)
_check_sample(in_bam, rgnames)
def _check_sample(in_bam, rgnames):
"""Ensure input sample name matches expected run group names.
"""
with contextlib.closing(pysam.Samfile(in_bam, "rb")) as bamfile:
rg = bamfile.header.get("RG", [{}])
msgs = []
warnings = []
if len(rg) > 1:
warnings.append("Multiple read groups found in input BAM. Expect single RG per BAM.")
elif len(rg) == 0:
msgs.append("No read groups found in input BAM. Expect single RG per BAM.")
elif rg[0].get("SM") != rgnames["sample"]:
msgs.append("Read group sample name (SM) does not match configuration `description`: %s vs %s"
% (rg[0].get("SM"), rgnames["sample"]))
if len(msgs) > 0:
raise ValueError("Problems with pre-aligned input BAM file: %s\n" % (in_bam)
+ "\n".join(msgs) +
"\nSetting `bam_clean: picard` in the configuration can often fix this issue.")
if warnings:
print("*** Potential problems in input BAM compared to reference:\n%s\n" %
"\n".join(warnings))
def _check_bam_contigs(in_bam, ref_file, config):
"""Ensure a pre-aligned BAM file matches the expected reference genome.
"""
ref_contigs = [c.name for c in ref.file_contigs(ref_file, config)]
with contextlib.closing(pysam.Samfile(in_bam, "rb")) as bamfile:
bam_contigs = [c["SN"] for c in bamfile.header["SQ"]]
problems = []
warnings = []
for bc, rc in itertools.izip_longest(bam_contigs, ref_contigs):
if bc != rc:
if bc and rc:
problems.append("Reference mismatch. BAM: %s Reference: %s" % (bc, rc))
elif bc:
warnings.append("Extra BAM chromosomes: %s" % bc)
elif rc:
warnings.append("Extra reference chromosomes: %s" % rc)
if problems:
raise ValueError("Unexpected order, name or contig mismatches between input BAM and reference file:\n%s\n"
"Setting `bam_clean: picard` in the configuration can often fix this issue."
% "\n".join(problems))
if warnings:
print("*** Potential problems in input BAM compared to reference:\n%s\n" %
"\n".join(warnings))
def open_samfile(in_file):
if is_bam(in_file):
return pysam.Samfile(in_file, "rb")
elif is_sam(in_file):
return pysam.Samfile(in_file, "r")
else:
raise IOError("in_file must be either a BAM file or SAM file. Is the "
"extension .sam or .bam?")
def is_bam(in_file):
_, ext = os.path.splitext(in_file)
if ext == ".bam":
return True
else:
return False
def is_sam(in_file):
_, ext = os.path.splitext(in_file)
if ext == ".sam":
return True
else:
return False
def mapped(in_bam, config):
"""
return a bam file of only the mapped reads
"""
out_file = os.path.splitext(in_bam)[0] + ".mapped.bam"
if utils.file_exists(out_file):
return out_file
sambamba = _get_sambamba(config)
with file_transaction(config, out_file) as tx_out_file:
if sambamba:
cmd = ("{sambamba} view --format=bam -F 'not (unmapped or mate_is_unmapped)' "
"{in_bam} -o {tx_out_file}")
else:
samtools = config_utils.get_program("samtools", config)
cmd = "{samtools} view -b -F 4 {in_bam} -o {tx_out_file}"
do.run(cmd.format(**locals()),
"Filtering mapped reads to %s." % (tx_out_file))
return out_file
def count(in_bam, config=None):
"""
return the counts in a BAM file
"""
if not config:
config = {}
sambamba = _get_sambamba(config)
if sambamba:
cmd = ("{sambamba} view -c {in_bam}").format(**locals())
else:
samtools = config_utils.get_program("samtools", config)
cmd = ("{samtools} view -c {in_bam}").format(**locals())
out = subprocess.check_output(cmd, shell=True)
return int(out)
def sam_to_bam(in_sam, config):
if is_bam(in_sam):
return in_sam
assert is_sam(in_sam), "%s is not a SAM file" % in_sam
out_file = os.path.splitext(in_sam)[0] + ".bam"
if utils.file_exists(out_file):
return out_file
samtools = config_utils.get_program("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, out_file) as tx_out_file:
cmd = "{samtools} view -@ {num_cores} -h -S -b {in_sam} -o {tx_out_file}"
do.run(cmd.format(**locals()),
("Convert SAM to BAM (%s cores): %s to %s"
% (str(num_cores), in_sam, out_file)))
return out_file
def sam_to_bam_stream_cmd(config, named_pipe=None):
sambamba = config_utils.get_program("sambamba", config)
num_cores = config["algorithm"].get("num_cores", 1)
pipe = named_pipe if named_pipe else "/dev/stdin"
cmd = " {sambamba} view --format=bam -S -t {num_cores} {pipe} ".format(**locals())
return cmd
def bam_to_sam(in_file, config):
if is_sam(in_file):
return in_file
assert is_bam(in_file), "%s is not a BAM file" % in_file
out_file = os.path.splitext(in_file)[0] + ".sam"
if utils.file_exists(out_file):
return out_file
samtools = config_utils.get_program("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, out_file) as tx_out_file:
cmd = "{samtools} view -@ {num_cores} -h {in_file} -o {tx_out_file}"
do.run(cmd.format(**locals()),
("Convert BAM to SAM (%s cores): %s to %s"
% (str(num_cores), in_file, out_file)))
return out_file
def reheader(header, bam_file, config):
samtools = config_utils.get_program("samtools", config)
base, ext = os.path.splitext(bam_file)
out_file = base + ".reheadered" + ext
cmd = "{samtools} reheader {header} {bam_file} > {out_file}"
do.run(cmd.format(**locals()), "Reheadering %s." % bam_file)
return out_file
def merge(bamfiles, out_bam, config):
assert all(map(is_bam, bamfiles)), ("Not all of the files to merge are not BAM "
"files: %s " % (bamfiles))
assert all(map(utils.file_exists, bamfiles)), ("Not all of the files to merge "
"exist: %s" % (bamfiles))
if len(bamfiles) == 1:
return bamfiles[0]
if os.path.exists(out_bam):
return out_bam
sambamba = _get_sambamba(config)
sambamba = None
samtools = config_utils.get_program("samtools", config)
bamtools = config_utils.get_program("bamtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, out_bam) as tx_out_bam:
try:
if sambamba:
cmd = "{sambamba} merge -t {num_cores} {tx_out_bam} " + " ".join(bamfiles)
else:
cmd = "{samtools} merge -@ {num_cores} {tx_out_bam} " + " ".join(bamfiles)
do.run(cmd.format(**locals()), "Merge %s into %s." % (bamfiles, out_bam))
except subprocess.CalledProcessError:
files = " -in ".join(bamfiles)
cmd = "{bamtools} merge -in {files} -out {tx_out_bam}"
do.run(cmd.format(**locals()), "Error with other tools. Merge %s into %s with bamtools" %
(bamfiles, out_bam))
index(out_bam, config)
return out_bam
def sort(in_bam, config, order="coordinate"):
"""Sort a BAM file, skipping if already present.
"""
assert is_bam(in_bam), "%s in not a BAM file" % in_bam
if bam_already_sorted(in_bam, config, order):
return in_bam
sort_stem = _get_sort_stem(in_bam, order)
sort_file = sort_stem + ".bam"
if not utils.file_exists(sort_file):
sambamba = _get_sambamba(config)
samtools = config_utils.get_program("samtools", config)
cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, sort_file) as tx_sort_file:
tx_sort_stem = os.path.splitext(tx_sort_file)[0]
tx_dir = utils.safe_makedir(os.path.dirname(tx_sort_file))
order_flag = "-n" if order == "queryname" else ""
resources = config_utils.get_resources("samtools", config)
mem = resources.get("memory", "2G")
samtools_cmd = ("{samtools} sort -@ {cores} -m {mem} {order_flag} "
"{in_bam} {tx_sort_stem}")
if sambamba:
if tz.get_in(["resources", "sambamba"], config):
sm_resources = config_utils.get_resources("sambamba", config)
mem = sm_resources.get("memory", "2G")
# sambamba uses total memory, not memory per core
mem = config_utils.adjust_memory(mem, cores, "increase").upper()
# Use samtools compatible natural sorting
# https://github.com/lomereiter/sambamba/issues/132
order_flag = "--natural-sort" if order == "queryname" else ""
cmd = ("{sambamba} sort -t {cores} -m {mem} {order_flag} "
"-o {tx_sort_file} --tmpdir={tx_dir} {in_bam}")
else:
cmd = samtools_cmd
# sambamba has intermittent multicore failures. Allow
# retries with single core
try:
do.run(cmd.format(**locals()),
"Sort BAM file (multi core, %s): %s to %s" %
(order, os.path.basename(in_bam),
os.path.basename(sort_file)))
except:
logger.exception("Multi-core sorting failed, reverting to single core")
order_flag = "-n" if order == "queryname" else ""
do.run(samtools_cmd.format(**locals()),
"Sort BAM file (single core, %s): %s to %s" %
(order, os.path.basename(in_bam),
os.path.basename(sort_file)))
return sort_file
def sort_cmd(config, tmp_dir, named_pipe=None, order="coordinate"):
""" Get a sort command, suitable for piping
"""
sambamba = _get_sambamba(config)
pipe = named_pipe if named_pipe else "/dev/stdin"
order_flag = "-n" if order == "queryname" else ""
resources = config_utils.get_resources("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
mem = config_utils.adjust_memory(resources.get("memory", "2G"), 1, "decrease").upper()
cmd = ("{sambamba} sort -m {mem} --tmpdir {tmp_dir} -t {num_cores} {order_flag} -o /dev/stdout {pipe}")
return cmd.format(**locals())
def _get_sambamba(config):
try:
sambamba = config_utils.get_program("sambamba", config)
except config_utils.CmdNotFound:
sambamba = None
return sambamba
def bam_already_sorted(in_bam, config, order):
return order == _get_sort_order(in_bam, config)
def _get_sort_order(in_bam, config):
with open_samfile(in_bam) as bam_handle:
header = bam_handle.header
return utils.get_in(header, ("HD", "SO"), None)
def _get_sort_stem(in_bam, order):
SUFFIXES = {"coordinate": ".sorted", "queryname": ".nsorted"}
sort_base = os.path.splitext(in_bam)[0]
for suffix in SUFFIXES:
sort_base = sort_base.split(suffix)[0]
return sort_base + SUFFIXES[order]
def sample_name(in_bam):
"""Get sample name from BAM file.
"""
with contextlib.closing(pysam.AlignmentFile(in_bam, "rb", check_sq=False)) as in_pysam:
try:
if "RG" in in_pysam.header:
return in_pysam.header["RG"][0]["SM"]
except ValueError:
return None
def estimate_read_length(bam_file, nreads=1000):
"""
estimate median read length of a SAM/BAM file
"""
with open_samfile(bam_file) as bam_handle:
reads = tz.itertoolz.take(nreads, bam_handle)
lengths = [len(x.seq) for x in reads]
return int(numpy.median(lengths))
def estimate_fragment_size(bam_file, nreads=1000):
"""
estimate median fragment size of a SAM/BAM file
"""
with open_samfile(bam_file) as bam_handle:
reads = tz.itertoolz.take(nreads, bam_handle)
lengths = [x.tlen for x in reads]
return int(numpy.median(lengths))
def filter_stream_cmd(bam_file, data, filter_flag):
"""
return a command to keep only alignments matching the filter flag
see https://github.com/lomereiter/sambamba/wiki/%5Bsambamba-view%5D-Filter-expression-syntax for examples
"""
sambamba = config_utils.get_program("sambamba", data["config"])
num_cores = dd.get_num_cores(data)
cmd = ('{sambamba} view -t {num_cores} -f bam -F "{filter_flag}" {bam_file}')
return cmd.format(**locals())
def filter_primary_stream_cmd(bam_file, data):
return filter_stream_cmd(bam_file, data, "not secondary_alignment")
def filter_primary(bam_file, data):
stem, ext = os.path.splitext(bam_file)
out_file = stem + ".primary" + ext
if utils.file_exists(out_file):
return out_file
with file_transaction(out_file) as tx_out_file:
cmd = filter_primary_stream_cmd(bam_file, data)
cmd += "> {tx_out_file}"
do.run(cmd.format(**locals()), ("Filtering primary alignments in %s." %
os.path.basename(bam_file)))
return out_file
|
guillermo-carrasco/bcbio-nextgen
|
bcbio/bam/__init__.py
|
Python
|
mit
| 18,753
|
[
"pysam"
] |
c8550e49b637874db8c1c5a365f9726f78e116b203688fc68e2876c338385429
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Module to test fitting routines
"""
# pylint: disable=invalid-name
import os.path
import warnings
from unittest import mock
from importlib.metadata import EntryPoint
import pytest
import numpy as np
import unittest.mock as mk
from numpy import linalg
from numpy.testing import assert_allclose, assert_almost_equal, assert_equal
from astropy.modeling import models
from astropy.modeling.core import Fittable2DModel, Parameter
from astropy.modeling.fitting import (
SimplexLSQFitter, SLSQPLSQFitter, LinearLSQFitter, LevMarLSQFitter,
JointFitter, Fitter, FittingWithOutlierRemoval)
from astropy.modeling.optimizers import Optimization
from astropy.utils import NumpyRNGContext
from astropy.utils.data import get_pkg_data_filename
from astropy.stats import sigma_clip
from astropy.utils.compat.optional_deps import HAS_SCIPY
from astropy.utils.exceptions import AstropyUserWarning
from astropy.modeling.fitting import populate_entry_points
from . import irafutil
if HAS_SCIPY:
from scipy import optimize
fitters = [SimplexLSQFitter, SLSQPLSQFitter]
_RANDOM_SEED = 0x1337
class TestPolynomial2D:
"""Tests for 2D polynomail fitting."""
def setup_class(self):
self.model = models.Polynomial2D(2)
self.y, self.x = np.mgrid[:5, :5]
def poly2(x, y):
return 1 + 2 * x + 3 * x ** 2 + 4 * y + 5 * y ** 2 + 6 * x * y
self.z = poly2(self.x, self.y)
def test_poly2D_fitting(self):
fitter = LinearLSQFitter()
v = self.model.fit_deriv(x=self.x, y=self.y)
p = linalg.lstsq(v, self.z.flatten(), rcond=-1)[0]
new_model = fitter(self.model, self.x, self.y, self.z)
assert_allclose(new_model.parameters, p)
def test_eval(self):
fitter = LinearLSQFitter()
new_model = fitter(self.model, self.x, self.y, self.z)
assert_allclose(new_model(self.x, self.y), self.z)
@pytest.mark.skipif('not HAS_SCIPY')
def test_polynomial2D_nonlinear_fitting(self):
self.model.parameters = [.6, 1.8, 2.9, 3.7, 4.9, 6.7]
nlfitter = LevMarLSQFitter()
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
new_model = nlfitter(self.model, self.x, self.y, self.z)
assert_allclose(new_model.parameters, [1, 2, 3, 4, 5, 6])
class TestICheb2D:
"""
Tests 2D Chebyshev polynomial fitting
Create a 2D polynomial (z) using Polynomial2DModel and default coefficients
Fit z using a ICheb2D model
Evaluate the ICheb2D polynomial and compare with the initial z
"""
def setup_class(self):
self.pmodel = models.Polynomial2D(2)
self.y, self.x = np.mgrid[:5, :5]
self.z = self.pmodel(self.x, self.y)
self.cheb2 = models.Chebyshev2D(2, 2)
self.fitter = LinearLSQFitter()
def test_default_params(self):
self.cheb2.parameters = np.arange(9)
p = np.array([1344., 1772., 400., 1860., 2448., 552., 432., 568.,
128.])
z = self.cheb2(self.x, self.y)
model = self.fitter(self.cheb2, self.x, self.y, z)
assert_almost_equal(model.parameters, p)
def test_poly2D_cheb2D(self):
model = self.fitter(self.cheb2, self.x, self.y, self.z)
z1 = model(self.x, self.y)
assert_almost_equal(self.z, z1)
@pytest.mark.skipif('not HAS_SCIPY')
def test_chebyshev2D_nonlinear_fitting(self):
cheb2d = models.Chebyshev2D(2, 2)
cheb2d.parameters = np.arange(9)
z = cheb2d(self.x, self.y)
cheb2d.parameters = [0.1, .6, 1.8, 2.9, 3.7, 4.9, 6.7, 7.5, 8.9]
nlfitter = LevMarLSQFitter()
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
model = nlfitter(cheb2d, self.x, self.y, z)
assert_allclose(model.parameters, [0, 1, 2, 3, 4, 5, 6, 7, 8],
atol=10**-9)
@pytest.mark.skipif('not HAS_SCIPY')
def test_chebyshev2D_nonlinear_fitting_with_weights(self):
cheb2d = models.Chebyshev2D(2, 2)
cheb2d.parameters = np.arange(9)
z = cheb2d(self.x, self.y)
cheb2d.parameters = [0.1, .6, 1.8, 2.9, 3.7, 4.9, 6.7, 7.5, 8.9]
nlfitter = LevMarLSQFitter()
weights = np.ones_like(self.y)
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
model = nlfitter(cheb2d, self.x, self.y, z, weights=weights)
assert_allclose(model.parameters, [0, 1, 2, 3, 4, 5, 6, 7, 8],
atol=10**-9)
@pytest.mark.skipif('not HAS_SCIPY')
class TestJointFitter:
"""
Tests the joint fitting routine using 2 gaussian models
"""
def setup_class(self):
"""
Create 2 gaussian models and some data with noise.
Create a fitter for the two models keeping the amplitude parameter
common for the two models.
"""
self.g1 = models.Gaussian1D(10, mean=14.9, stddev=.3)
self.g2 = models.Gaussian1D(10, mean=13, stddev=.4)
self.jf = JointFitter([self.g1, self.g2],
{self.g1: ['amplitude'],
self.g2: ['amplitude']}, [9.8])
self.x = np.arange(10, 20, .1)
y1 = self.g1(self.x)
y2 = self.g2(self.x)
with NumpyRNGContext(_RANDOM_SEED):
n = np.random.randn(100)
self.ny1 = y1 + 2 * n
self.ny2 = y2 + 2 * n
self.jf(self.x, self.ny1, self.x, self.ny2)
def test_joint_parameter(self):
"""
Tests that the amplitude of the two models is the same
"""
assert_allclose(self.jf.fitparams[0], self.g1.parameters[0])
assert_allclose(self.jf.fitparams[0], self.g2.parameters[0])
def test_joint_fitter(self):
"""
Tests the fitting routine with similar procedure.
Compares the fitted parameters.
"""
p1 = [14.9, .3]
p2 = [13, .4]
A = 9.8
p = np.r_[A, p1, p2]
def model(A, p, x):
return A * np.exp(-0.5 / p[1] ** 2 * (x - p[0]) ** 2)
def errfunc(p, x1, y1, x2, y2):
return np.ravel(np.r_[model(p[0], p[1:3], x1) - y1,
model(p[0], p[3:], x2) - y2])
coeff, _ = optimize.leastsq(errfunc, p,
args=(self.x, self.ny1, self.x, self.ny2))
assert_allclose(coeff, self.jf.fitparams, rtol=10 ** (-2))
class TestLinearLSQFitter:
def test_compound_model_raises_error(self):
"""Test that if an user tries to use a compound model, raises an error"""
with pytest.raises(ValueError) as excinfo:
init_model1 = models.Polynomial1D(degree=2, c0=[1, 1], n_models=2)
init_model2 = models.Polynomial1D(degree=2, c0=[1, 1], n_models=2)
init_model_comp = init_model1 + init_model2
x = np.arange(10)
y = init_model_comp(x, model_set_axis=False)
fitter = LinearLSQFitter()
_ = fitter(init_model_comp, x, y)
assert "Model must be simple, not compound" in str(excinfo.value)
def test_chebyshev1D(self):
"""Tests fitting a 1D Chebyshev polynomial to some real world data."""
test_file = get_pkg_data_filename(os.path.join('data',
'idcompspec.fits'))
with open(test_file) as f:
lines = f.read()
reclist = lines.split('begin')
record = irafutil.IdentifyRecord(reclist[1])
coeffs = record.coeff
order = int(record.fields['order'])
initial_model = models.Chebyshev1D(order - 1,
domain=record.get_range())
fitter = LinearLSQFitter()
fitted_model = fitter(initial_model, record.x, record.z)
assert_allclose(fitted_model.parameters, np.array(coeffs),
rtol=10e-2)
def test_linear_fit_model_set(self):
"""Tests fitting multiple models simultaneously."""
init_model = models.Polynomial1D(degree=2, c0=[1, 1], n_models=2)
x = np.arange(10)
y_expected = init_model(x, model_set_axis=False)
assert y_expected.shape == (2, 10)
# Add a bit of random noise
with NumpyRNGContext(_RANDOM_SEED):
y = y_expected + np.random.normal(0, 0.01, size=y_expected.shape)
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y)
assert_allclose(fitted_model(x, model_set_axis=False), y_expected,
rtol=1e-1)
def test_linear_fit_2d_model_set(self):
"""Tests fitted multiple 2-D models simultaneously."""
init_model = models.Polynomial2D(degree=2, c0_0=[1, 1], n_models=2)
x = np.arange(10)
y = np.arange(10)
z_expected = init_model(x, y, model_set_axis=False)
assert z_expected.shape == (2, 10)
# Add a bit of random noise
with NumpyRNGContext(_RANDOM_SEED):
z = z_expected + np.random.normal(0, 0.01, size=z_expected.shape)
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y, z)
assert_allclose(fitted_model(x, y, model_set_axis=False), z_expected,
rtol=1e-1)
def test_linear_fit_fixed_parameter(self):
"""
Tests fitting a polynomial model with a fixed parameter (issue #6135).
"""
init_model = models.Polynomial1D(degree=2, c1=1)
init_model.c1.fixed = True
x = np.arange(10)
y = 2 + x + 0.5*x*x
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y)
assert_allclose(fitted_model.parameters, [2., 1., 0.5], atol=1e-14)
def test_linear_fit_model_set_fixed_parameter(self):
"""
Tests fitting a polynomial model set with a fixed parameter (#6135).
"""
init_model = models.Polynomial1D(degree=2, c1=[1, -2], n_models=2)
init_model.c1.fixed = True
x = np.arange(10)
yy = np.array([2 + x + 0.5*x*x, -2*x])
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, yy)
assert_allclose(fitted_model.c0, [2., 0.], atol=1e-14)
assert_allclose(fitted_model.c1, [1., -2.], atol=1e-14)
assert_allclose(fitted_model.c2, [0.5, 0.], atol=1e-14)
def test_linear_fit_2d_model_set_fixed_parameters(self):
"""
Tests fitting a 2d polynomial model set with fixed parameters (#6135).
"""
init_model = models.Polynomial2D(degree=2, c1_0=[1, 2], c0_1=[-0.5, 1],
n_models=2,
fixed={'c1_0': True, 'c0_1': True})
x, y = np.mgrid[0:5, 0:5]
zz = np.array([1+x-0.5*y+0.1*x*x, 2*x+y-0.2*y*y])
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y, zz)
assert_allclose(fitted_model(x, y, model_set_axis=False), zz,
atol=1e-14)
def test_linear_fit_model_set_masked_values(self):
"""
Tests model set fitting with masked value(s) (#4824, #6819).
"""
# NB. For single models, there is an equivalent doctest.
init_model = models.Polynomial1D(degree=1, n_models=2)
x = np.arange(10)
y = np.ma.masked_array([2*x+1, x-2], mask=np.zeros_like([x, x]))
y[0, 7] = 100. # throw off fit coefficients if unmasked
y.mask[0, 7] = True
y[1, 1:3] = -100.
y.mask[1, 1:3] = True
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y)
assert_allclose(fitted_model.c0, [1., -2.], atol=1e-14)
assert_allclose(fitted_model.c1, [2., 1.], atol=1e-14)
def test_linear_fit_2d_model_set_masked_values(self):
"""
Tests 2D model set fitting with masked value(s) (#4824, #6819).
"""
init_model = models.Polynomial2D(1, n_models=2)
x, y = np.mgrid[0:5, 0:5]
z = np.ma.masked_array([2*x+3*y+1, x-0.5*y-2],
mask=np.zeros_like([x, x]))
z[0, 3, 1] = -1000. # throw off fit coefficients if unmasked
z.mask[0, 3, 1] = True
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y, z)
assert_allclose(fitted_model.c0_0, [1., -2.], atol=1e-14)
assert_allclose(fitted_model.c1_0, [2., 1.], atol=1e-14)
assert_allclose(fitted_model.c0_1, [3., -0.5], atol=1e-14)
@pytest.mark.skipif('not HAS_SCIPY')
class TestNonLinearFitters:
"""Tests non-linear least squares fitting and the SLSQP algorithm."""
def setup_class(self):
self.initial_values = [100, 5, 1]
self.xdata = np.arange(0, 10, 0.1)
sigma = 4. * np.ones_like(self.xdata)
with NumpyRNGContext(_RANDOM_SEED):
yerror = np.random.normal(0, sigma)
def func(p, x):
return p[0] * np.exp(-0.5 / p[2] ** 2 * (x - p[1]) ** 2)
self.ydata = func(self.initial_values, self.xdata) + yerror
self.gauss = models.Gaussian1D(100, 5, stddev=1)
def test_estimated_vs_analytic_deriv(self):
"""
Runs `LevMarLSQFitter` with estimated and analytic derivatives of a
`Gaussian1D`.
"""
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata)
g1e = models.Gaussian1D(100, 5.0, stddev=1)
efitter = LevMarLSQFitter()
emodel = efitter(g1e, self.xdata, self.ydata, estimate_jacobian=True)
assert_allclose(model.parameters, emodel.parameters, rtol=10 ** (-3))
def test_estimated_vs_analytic_deriv_with_weights(self):
"""
Runs `LevMarLSQFitter` with estimated and analytic derivatives of a
`Gaussian1D`.
"""
weights = 1.0 / (self.ydata / 10.)
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata, weights=weights)
g1e = models.Gaussian1D(100, 5.0, stddev=1)
efitter = LevMarLSQFitter()
emodel = efitter(g1e, self.xdata, self.ydata, weights=weights, estimate_jacobian=True)
assert_allclose(model.parameters, emodel.parameters, rtol=10 ** (-3))
def test_with_optimize(self):
"""
Tests results from `LevMarLSQFitter` against `scipy.optimize.leastsq`.
"""
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True)
def func(p, x):
return p[0] * np.exp(-0.5 / p[2] ** 2 * (x - p[1]) ** 2)
def errfunc(p, x, y):
return func(p, x) - y
result = optimize.leastsq(errfunc, self.initial_values,
args=(self.xdata, self.ydata))
assert_allclose(model.parameters, result[0], rtol=10 ** (-3))
def test_with_weights(self):
"""
Tests results from `LevMarLSQFitter` with weights.
"""
# part 1: weights are equal to 1
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True)
withw = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True, weights=np.ones_like(self.xdata))
assert_allclose(model.parameters, withw.parameters, rtol=10 ** (-4))
# part 2: weights are 0 or 1 (effectively, they are a mask)
weights = np.zeros_like(self.xdata)
weights[::2] = 1.
mask = weights >= 1.
model = fitter(self.gauss, self.xdata[mask], self.ydata[mask],
estimate_jacobian=True)
withw = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True, weights=weights)
assert_allclose(model.parameters, withw.parameters, rtol=10 ** (-4))
@pytest.mark.filterwarnings(r'ignore:.* Maximum number of iterations reached')
@pytest.mark.filterwarnings(r'ignore:Values in x were outside bounds during a minimize step, clipping to bounds')
@pytest.mark.parametrize('fitter_class', fitters)
def test_fitter_against_LevMar(self, fitter_class):
"""Tests results from non-linear fitters against `LevMarLSQFitter`."""
levmar = LevMarLSQFitter()
fitter = fitter_class()
# This emits a warning from fitter that we need to ignore with
# pytest.mark.filterwarnings above.
new_model = fitter(self.gauss, self.xdata, self.ydata)
model = levmar(self.gauss, self.xdata, self.ydata)
assert_allclose(model.parameters, new_model.parameters,
rtol=10 ** (-4))
@pytest.mark.filterwarnings(r'ignore:Values in x were outside bounds during a minimize step, clipping to bounds')
def test_LSQ_SLSQP_with_constraints(self):
"""
Runs `LevMarLSQFitter` and `SLSQPLSQFitter` on a model with
constraints.
"""
g1 = models.Gaussian1D(100, 5, stddev=1)
g1.mean.fixed = True
fitter = LevMarLSQFitter()
fslsqp = SLSQPLSQFitter()
slsqp_model = fslsqp(g1, self.xdata, self.ydata)
model = fitter(g1, self.xdata, self.ydata)
assert_allclose(model.parameters, slsqp_model.parameters,
rtol=10 ** (-4))
def test_LevMar_with_weights(self):
"""
Tests that issue #11581 has been solved.
"""
np.random.seed(42)
norder = 2
fitter1 = LevMarLSQFitter()
fitter2 = LinearLSQFitter()
model = models.Polynomial1D(norder)
npts = 10000
c = [2.0, -10.0, 7.0]
tw = np.random.uniform(0.0, 10.0, npts)
tx = np.random.uniform(0.0, 10.0, npts)
ty = c[0] + c[1] * tx + c[2] * (tx ** 2)
ty += np.random.normal(0.0, 1.5, npts)
with pytest.warns(AstropyUserWarning, match=r'Model is linear in parameters'):
tf1 = fitter1(model, tx, ty, weights=tw)
tf2 = fitter2(model, tx, ty, weights=tw)
assert_allclose(tf1.parameters, tf2.parameters,
atol=10 ** (-16))
assert_allclose(tf1.parameters, c,
rtol=10 ** (-2), atol=10 ** (-2))
model = models.Gaussian1D()
fitter1(model, tx, ty, weights=tw)
model = models.Polynomial2D(norder)
nxpts = 100
nypts = 150
npts = nxpts * nypts
c = [1.0, 4.0, 7.0, -8.0, -9.0, -3.0]
tw = np.random.uniform(0.0, 10.0, npts).reshape(nxpts, nypts)
tx = np.random.uniform(0.0, 10.0, npts).reshape(nxpts, nypts)
ty = np.random.uniform(0.0, 10.0, npts).reshape(nxpts, nypts)
tz = c[0] + c[1] * tx + c[2] * (tx ** 2) + c[3] * ty + c[4] * (ty ** 2) + c[5] * tx * ty
tz += np.random.normal(0.0, 1.5, npts).reshape(nxpts, nypts)
with pytest.warns(AstropyUserWarning, match=r'Model is linear in parameters'):
tf1 = fitter1(model, tx, ty, tz, weights=tw)
tf2 = fitter2(model, tx, ty, tz, weights=tw)
assert_allclose(tf1.parameters, tf2.parameters,
atol=10 ** (-16))
assert_allclose(tf1.parameters, c,
rtol=10 ** (-2), atol=10 ** (-2))
def test_simplex_lsq_fitter(self):
"""A basic test for the `SimplexLSQ` fitter."""
class Rosenbrock(Fittable2DModel):
a = Parameter()
b = Parameter()
@staticmethod
def evaluate(x, y, a, b):
return (a - x) ** 2 + b * (y - x ** 2) ** 2
x = y = np.linspace(-3.0, 3.0, 100)
with NumpyRNGContext(_RANDOM_SEED):
z = Rosenbrock.evaluate(x, y, 1.0, 100.0)
z += np.random.normal(0., 0.1, size=z.shape)
fitter = SimplexLSQFitter()
r_i = Rosenbrock(1, 100)
r_f = fitter(r_i, x, y, z)
assert_allclose(r_f.parameters, [1.0, 100.0], rtol=1e-2)
def test_param_cov(self):
"""
Tests that the 'param_cov' fit_info entry gets the right answer for
*linear* least squares, where the answer is exact
"""
a = 2
b = 100
with NumpyRNGContext(_RANDOM_SEED):
x = np.linspace(0, 1, 100)
# y scatter is amplitude ~1 to make sure covarience is
# non-negligible
y = x*a + b + np.random.randn(len(x))
# first compute the ordinary least squares covariance matrix
X = np.vstack([x, np.ones(len(x))]).T
beta = np.matmul(np.matmul(np.linalg.inv(np.matmul(X.T, X)), X.T), y.T)
s2 = (np.sum((y - np.matmul(X, beta).ravel())**2) /
(len(y) - len(beta)))
olscov = np.linalg.inv(np.matmul(X.T, X)) * s2
# now do the non-linear least squares fit
mod = models.Linear1D(a, b)
fitter = LevMarLSQFitter()
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
fmod = fitter(mod, x, y)
assert_allclose(fmod.parameters, beta.ravel())
assert_allclose(olscov, fitter.fit_info['param_cov'])
class TestEntryPoint:
"""Tests population of fitting with entry point fitters"""
def setup_class(self):
self.exception_not_thrown = Exception("The test should not have gotten here. There was no exception thrown")
def successfulimport(self):
# This should work
class goodclass(Fitter):
__name__ = "GoodClass"
return goodclass
def raiseimporterror(self):
# This should fail as it raises an Import Error
raise ImportError
def returnbadfunc(self):
def badfunc():
# This should import but it should fail type check
pass
return badfunc
def returnbadclass(self):
# This should import But it should fail subclass type check
class badclass:
pass
return badclass
def test_working(self):
"""This should work fine"""
mock_entry_working = mock.create_autospec(EntryPoint)
mock_entry_working.name = "Working"
mock_entry_working.load = self.successfulimport
populate_entry_points([mock_entry_working])
def test_import_error(self):
"""This raises an import error on load to test that it is handled correctly"""
with warnings.catch_warnings():
warnings.filterwarnings('error')
try:
mock_entry_importerror = mock.create_autospec(EntryPoint)
mock_entry_importerror.name = "IErr"
mock_entry_importerror.load = self.raiseimporterror
populate_entry_points([mock_entry_importerror])
except AstropyUserWarning as w:
if "ImportError" in w.args[0]: # any error for this case should have this in it.
pass
else:
raise w
else:
raise self.exception_not_thrown
def test_bad_func(self):
"""This returns a function which fails the type check"""
with warnings.catch_warnings():
warnings.filterwarnings('error')
try:
mock_entry_badfunc = mock.create_autospec(EntryPoint)
mock_entry_badfunc.name = "BadFunc"
mock_entry_badfunc.load = self.returnbadfunc
populate_entry_points([mock_entry_badfunc])
except AstropyUserWarning as w:
if "Class" in w.args[0]: # any error for this case should have this in it.
pass
else:
raise w
else:
raise self.exception_not_thrown
def test_bad_class(self):
"""This returns a class which doesn't inherient from fitter """
with warnings.catch_warnings():
warnings.filterwarnings('error')
try:
mock_entry_badclass = mock.create_autospec(EntryPoint)
mock_entry_badclass.name = "BadClass"
mock_entry_badclass.load = self.returnbadclass
populate_entry_points([mock_entry_badclass])
except AstropyUserWarning as w:
if 'modeling.Fitter' in w.args[0]: # any error for this case should have this in it.
pass
else:
raise w
else:
raise self.exception_not_thrown
@pytest.mark.skipif('not HAS_SCIPY')
class Test1DFittingWithOutlierRemoval:
def setup_class(self):
self.x = np.linspace(-5., 5., 200)
self.model_params = (3.0, 1.3, 0.8)
def func(p, x):
return p[0]*np.exp(-0.5*(x - p[1])**2/p[2]**2)
self.y = func(self.model_params, self.x)
@pytest.mark.filterwarnings('ignore:The fit may be unsuccessful')
@pytest.mark.filterwarnings(r'ignore:Values in x were outside bounds during a minimize step, clipping to bounds')
def test_with_fitters_and_sigma_clip(self):
import scipy.stats as stats
np.random.seed(0)
c = stats.bernoulli.rvs(0.25, size=self.x.shape)
self.y += (np.random.normal(0., 0.2, self.x.shape) +
c*np.random.normal(3.0, 5.0, self.x.shape))
g_init = models.Gaussian1D(amplitude=1., mean=0, stddev=1.)
# test with Levenberg-Marquardt Least Squares fitter
fit = FittingWithOutlierRemoval(LevMarLSQFitter(), sigma_clip,
niter=3, sigma=3.0)
fitted_model, _ = fit(g_init, self.x, self.y)
assert_allclose(fitted_model.parameters, self.model_params, rtol=1e-1)
# test with Sequential Least Squares Programming fitter
fit = FittingWithOutlierRemoval(SLSQPLSQFitter(), sigma_clip,
niter=3, sigma=3.0)
fitted_model, _ = fit(g_init, self.x, self.y)
assert_allclose(fitted_model.parameters, self.model_params, rtol=1e-1)
# test with Simplex LSQ fitter
fit = FittingWithOutlierRemoval(SimplexLSQFitter(), sigma_clip,
niter=3, sigma=3.0)
fitted_model, _ = fit(g_init, self.x, self.y)
assert_allclose(fitted_model.parameters, self.model_params, atol=1e-1)
@pytest.mark.skipif('not HAS_SCIPY')
class Test2DFittingWithOutlierRemoval:
def setup_class(self):
self.y, self.x = np.mgrid[-3:3:128j, -3:3:128j]
self.model_params = (3.0, 1.0, 0.0, 0.8, 0.8)
def Gaussian_2D(p, pos):
return p[0]*np.exp(-0.5*(pos[0] - p[2])**2 / p[4]**2 -
0.5*(pos[1] - p[1])**2 / p[3]**2)
self.z = Gaussian_2D(self.model_params, np.array([self.y, self.x]))
def initial_guess(self, data, pos):
y = pos[0]
x = pos[1]
"""computes the centroid of the data as the initial guess for the
center position"""
wx = x * data
wy = y * data
total_intensity = np.sum(data)
x_mean = np.sum(wx) / total_intensity
y_mean = np.sum(wy) / total_intensity
x_to_pixel = x[0].size / (x[x[0].size - 1][x[0].size - 1] - x[0][0])
y_to_pixel = y[0].size / (y[y[0].size - 1][y[0].size - 1] - y[0][0])
x_pos = np.around(x_mean * x_to_pixel + x[0].size / 2.).astype(int)
y_pos = np.around(y_mean * y_to_pixel + y[0].size / 2.).astype(int)
amplitude = data[y_pos][x_pos]
return amplitude, x_mean, y_mean
@pytest.mark.filterwarnings('ignore:The fit may be unsuccessful')
@pytest.mark.filterwarnings(r'ignore:Values in x were outside bounds during a minimize step, clipping to bounds')
def test_with_fitters_and_sigma_clip(self):
import scipy.stats as stats
np.random.seed(0)
c = stats.bernoulli.rvs(0.25, size=self.z.shape)
self.z += (np.random.normal(0., 0.2, self.z.shape) +
c*np.random.normal(self.z, 2.0, self.z.shape))
guess = self.initial_guess(self.z, np.array([self.y, self.x]))
g2_init = models.Gaussian2D(amplitude=guess[0], x_mean=guess[1],
y_mean=guess[2], x_stddev=0.75,
y_stddev=1.25)
# test with Levenberg-Marquardt Least Squares fitter
fit = FittingWithOutlierRemoval(LevMarLSQFitter(), sigma_clip,
niter=3, sigma=3.)
fitted_model, _ = fit(g2_init, self.x, self.y, self.z)
assert_allclose(fitted_model.parameters[0:5], self.model_params,
atol=1e-1)
# test with Sequential Least Squares Programming fitter
fit = FittingWithOutlierRemoval(SLSQPLSQFitter(), sigma_clip, niter=3,
sigma=3.)
fitted_model, _ = fit(g2_init, self.x, self.y, self.z)
assert_allclose(fitted_model.parameters[0:5], self.model_params,
atol=1e-1)
# test with Simplex LSQ fitter
fit = FittingWithOutlierRemoval(SimplexLSQFitter(), sigma_clip,
niter=3, sigma=3.)
fitted_model, _ = fit(g2_init, self.x, self.y, self.z)
assert_allclose(fitted_model.parameters[0:5], self.model_params,
atol=1e-1)
def test_1d_set_fitting_with_outlier_removal():
"""Test model set fitting with outlier removal (issue #6819)"""
poly_set = models.Polynomial1D(2, n_models=2)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(),
sigma_clip, sigma=2.5, niter=3,
cenfunc=np.ma.mean, stdfunc=np.ma.std)
x = np.arange(10)
y = np.array([2.5*x - 4, 2*x*x + x + 10])
y[1, 5] = -1000 # outlier
poly_set, filt_y = fitter(poly_set, x, y)
assert_allclose(poly_set.c0, [-4., 10.], atol=1e-14)
assert_allclose(poly_set.c1, [2.5, 1.], atol=1e-14)
assert_allclose(poly_set.c2, [0., 2.], atol=1e-14)
def test_2d_set_axis_2_fitting_with_outlier_removal():
"""Test fitting 2D model set (axis 2) with outlier removal (issue #6819)"""
poly_set = models.Polynomial2D(1, n_models=2, model_set_axis=2)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(),
sigma_clip, sigma=2.5, niter=3,
cenfunc=np.ma.mean, stdfunc=np.ma.std)
y, x = np.mgrid[0:5, 0:5]
z = np.rollaxis(np.array([x+y, 1-0.1*x+0.2*y]), 0, 3)
z[3, 3:5, 0] = 100. # outliers
poly_set, filt_z = fitter(poly_set, x, y, z)
assert_allclose(poly_set.c0_0, [[[0., 1.]]], atol=1e-14)
assert_allclose(poly_set.c1_0, [[[1., -0.1]]], atol=1e-14)
assert_allclose(poly_set.c0_1, [[[1., 0.2]]], atol=1e-14)
@pytest.mark.skipif('not HAS_SCIPY')
class TestWeightedFittingWithOutlierRemoval:
"""Issue #7020 """
def setup_class(self):
# values of x,y not important as we fit y(x,y) = p0 model here
self.y, self.x = np.mgrid[0:20, 0:20]
self.z = np.mod(self.x + self.y, 2) * 2 - 1 # -1,1 chessboard
self.weights = np.mod(self.x + self.y, 2) * 2 + 1 # 1,3 chessboard
self.z[0, 0] = 1000.0 # outlier
self.z[0, 1] = 1000.0 # outlier
self.x1d = self.x.flatten()
self.z1d = self.z.flatten()
self.weights1d = self.weights.flatten()
def test_1d_without_weights_without_sigma_clip(self):
model = models.Polynomial1D(0)
fitter = LinearLSQFitter()
fit = fitter(model, self.x1d, self.z1d)
assert_allclose(fit.parameters[0], self.z1d.mean(), atol=10**(-2))
def test_1d_without_weights_with_sigma_clip(self):
model = models.Polynomial1D(0)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(), sigma_clip,
niter=3, sigma=3.)
fit, mask = fitter(model, self.x1d, self.z1d)
assert((~mask).sum() == self.z1d.size - 2)
assert(mask[0] and mask[1])
assert_allclose(fit.parameters[0], 0.0, atol=10**(-2)) # with removed outliers mean is 0.0
def test_1d_with_weights_without_sigma_clip(self):
model = models.Polynomial1D(0)
fitter = LinearLSQFitter()
fit = fitter(model, self.x1d, self.z1d, weights=self.weights1d)
assert(fit.parameters[0] > 1.0) # outliers pulled it high
def test_1d_with_weights_with_sigma_clip(self):
"""smoke test for #7020 - fails without fitting.py patch because weights does not propagate"""
model = models.Polynomial1D(0)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(), sigma_clip,
niter=3, sigma=3.)
fit, filtered = fitter(model, self.x1d, self.z1d, weights=self.weights1d)
assert(fit.parameters[0] > 10**(-2)) # weights pulled it > 0
assert(fit.parameters[0] < 1.0) # outliers didn't pull it out of [-1:1] because they had been removed
def test_1d_set_with_common_weights_with_sigma_clip(self):
"""added for #6819 (1D model set with weights in common)"""
model = models.Polynomial1D(0, n_models=2)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(), sigma_clip,
niter=3, sigma=3.)
z1d = np.array([self.z1d, self.z1d])
fit, filtered = fitter(model, self.x1d, z1d, weights=self.weights1d)
assert_allclose(fit.parameters, [0.8, 0.8], atol=1e-14)
def test_1d_set_with_weights_with_sigma_clip(self):
"""1D model set with separate weights"""
model = models.Polynomial1D(0, n_models=2)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(), sigma_clip,
niter=3, sigma=3.)
z1d = np.array([self.z1d, self.z1d])
weights = np.array([self.weights1d, self.weights1d])
fit, filtered = fitter(model, self.x1d, z1d, weights=weights)
assert_allclose(fit.parameters, [0.8, 0.8], atol=1e-14)
def test_2d_without_weights_without_sigma_clip(self):
model = models.Polynomial2D(0)
fitter = LinearLSQFitter()
fit = fitter(model, self.x, self.y, self.z)
assert_allclose(fit.parameters[0], self.z.mean(), atol=10**(-2))
def test_2d_without_weights_with_sigma_clip(self):
model = models.Polynomial2D(0)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(), sigma_clip,
niter=3, sigma=3.)
fit, mask = fitter(model, self.x, self.y, self.z)
assert((~mask).sum() == self.z.size - 2)
assert(mask[0, 0] and mask[0, 1])
assert_allclose(fit.parameters[0], 0.0, atol=10**(-2))
def test_2d_with_weights_without_sigma_clip(self):
model = models.Polynomial2D(0)
fitter = LevMarLSQFitter() # LinearLSQFitter doesn't handle weights properly in 2D
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
fit = fitter(model, self.x, self.y, self.z, weights=self.weights)
assert(fit.parameters[0] > 1.0) # outliers pulled it high
def test_2d_linear_with_weights_without_sigma_clip(self):
model = models.Polynomial2D(0)
fitter = LinearLSQFitter() # LinearLSQFitter doesn't handle weights properly in 2D
fit = fitter(model, self.x, self.y, self.z, weights=self.weights)
assert(fit.parameters[0] > 1.0) # outliers pulled it high
def test_2d_with_weights_with_sigma_clip(self):
"""smoke test for #7020 - fails without fitting.py patch because
weights does not propagate"""
model = models.Polynomial2D(0)
fitter = FittingWithOutlierRemoval(LevMarLSQFitter(), sigma_clip,
niter=3, sigma=3.)
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
fit, filtered = fitter(model, self.x, self.y, self.z,
weights=self.weights)
assert(fit.parameters[0] > 10**(-2)) # weights pulled it > 0
assert(fit.parameters[0] < 1.0) # outliers didn't pull it out of [-1:1] because they had been removed
def test_2d_linear_with_weights_with_sigma_clip(self):
"""same as test above with a linear fitter."""
model = models.Polynomial2D(0)
fitter = FittingWithOutlierRemoval(LinearLSQFitter(), sigma_clip,
niter=3, sigma=3.)
fit, filtered = fitter(model, self.x, self.y, self.z,
weights=self.weights)
assert(fit.parameters[0] > 10**(-2)) # weights pulled it > 0
assert(fit.parameters[0] < 1.0) # outliers didn't pull it out of [-1:1] because they had been removed
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitters_with_weights():
"""Issue #5737 """
Xin, Yin = np.mgrid[0:21, 0:21]
fitter = LevMarLSQFitter()
with NumpyRNGContext(_RANDOM_SEED):
zsig = np.random.normal(0, 0.01, size=Xin.shape)
# Non-linear model
g2 = models.Gaussian2D(10, 10, 9, 2, 3)
z = g2(Xin, Yin)
gmod = fitter(models.Gaussian2D(15, 7, 8, 1.3, 1.2), Xin, Yin, z + zsig)
assert_allclose(gmod.parameters, g2.parameters, atol=10 ** (-2))
# Linear model
p2 = models.Polynomial2D(3)
p2.parameters = np.arange(10)/1.2
z = p2(Xin, Yin)
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
pmod = fitter(models.Polynomial2D(3), Xin, Yin, z + zsig)
assert_allclose(pmod.parameters, p2.parameters, atol=10 ** (-2))
def test_linear_fitter_with_weights():
"""Regression test for #7035"""
Xin, Yin = np.mgrid[0:21, 0:21]
fitter = LinearLSQFitter()
with NumpyRNGContext(_RANDOM_SEED):
zsig = np.random.normal(0, 0.01, size=Xin.shape)
p2 = models.Polynomial2D(3)
p2.parameters = np.arange(10)/1.2
z = p2(Xin, Yin)
pmod = fitter(models.Polynomial2D(3), Xin, Yin, z + zsig, weights=zsig**(-2))
assert_allclose(pmod.parameters, p2.parameters, atol=10 ** (-2))
def test_linear_fitter_with_weights_flat():
"""Same as the above #7035 test but with flattened inputs"""
Xin, Yin = np.mgrid[0:21, 0:21]
Xin, Yin = Xin.flatten(), Yin.flatten()
fitter = LinearLSQFitter()
with NumpyRNGContext(_RANDOM_SEED):
zsig = np.random.normal(0, 0.01, size=Xin.shape)
p2 = models.Polynomial2D(3)
p2.parameters = np.arange(10)/1.2
z = p2(Xin, Yin)
pmod = fitter(models.Polynomial2D(3), Xin, Yin, z + zsig, weights=zsig**(-2))
assert_allclose(pmod.parameters, p2.parameters, atol=10 ** (-2))
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.filterwarnings('ignore:The fit may be unsuccessful')
def test_fitters_interface():
"""
Test that ``**kwargs`` work with all optimizers.
This is a basic smoke test.
"""
levmar = LevMarLSQFitter()
slsqp = SLSQPLSQFitter()
simplex = SimplexLSQFitter()
kwargs = {'maxiter': 77, 'verblevel': 1, 'epsilon': 1e-2, 'acc': 1e-6}
simplex_kwargs = {'maxiter': 77, 'verblevel': 1, 'acc': 1e-6}
model = models.Gaussian1D(10, 4, .3)
x = np.arange(21)
y = model(x)
_ = slsqp(model, x, y, **kwargs)
_ = simplex(model, x, y, **simplex_kwargs)
kwargs.pop('verblevel')
_ = levmar(model, x, y, **kwargs)
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize('fitter_class', [SLSQPLSQFitter, SimplexLSQFitter])
def test_optimizers(fitter_class):
fitter = fitter_class()
# Test maxiter
assert fitter._opt_method.maxiter == 100
fitter._opt_method.maxiter = 1000
assert fitter._opt_method.maxiter == 1000
# Test eps
assert fitter._opt_method.eps == np.sqrt(np.finfo(float).eps)
fitter._opt_method.eps = 1e-16
assert fitter._opt_method.eps == 1e-16
# Test acc
assert fitter._opt_method.acc == 1e-7
fitter._opt_method.acc = 1e-16
assert fitter._opt_method.acc == 1e-16
# Test repr
assert repr(fitter._opt_method) ==\
f"{fitter._opt_method.__class__.__name__}()"
fitparams = mk.MagicMock()
final_func_val = mk.MagicMock()
numiter = mk.MagicMock()
funcalls = mk.MagicMock()
exit_mode = 1
mess = mk.MagicMock()
xtol = mk.MagicMock()
if fitter_class == SLSQPLSQFitter:
return_value = (fitparams, final_func_val, numiter, exit_mode, mess)
fit_info = {
'final_func_val': final_func_val,
'numiter': numiter,
'exit_mode': exit_mode,
'message': mess
}
else:
return_value = (fitparams, final_func_val, numiter, funcalls, exit_mode)
fit_info = {
'final_func_val': final_func_val,
'numiter': numiter,
'exit_mode': exit_mode,
'num_function_calls': funcalls
}
with mk.patch.object(fitter._opt_method.__class__, 'opt_method',
return_value=return_value):
with pytest.warns(AstropyUserWarning, match=r"The fit may be unsuccessful; .*"):
assert (fitparams, fit_info) == fitter._opt_method(mk.MagicMock(), mk.MagicMock(),
mk.MagicMock(), xtol=xtol)
assert fit_info == fitter._opt_method.fit_info
if isinstance(fitter, SLSQPLSQFitter):
fitter._opt_method.acc == 1e-16
else:
fitter._opt_method.acc == xtol
@mk.patch.multiple(Optimization, __abstractmethods__=set())
def test_Optimization_abstract_call():
optimization = Optimization(mk.MagicMock())
with pytest.raises(NotImplementedError) as err:
optimization()
assert str(err.value) ==\
"Subclasses should implement this method"
def test_fitting_with_outlier_removal_niter():
"""
Test that FittingWithOutlierRemoval stops prior to reaching niter if the
set of masked points has converged and correctly reports the actual number
of iterations performed.
"""
# 2 rows with some noise around a constant level and 1 deviant point:
x = np.arange(25)
with NumpyRNGContext(_RANDOM_SEED):
y = np.random.normal(loc=10., scale=1., size=(2, 25))
y[0, 14] = 100.
# Fit 2 models with up to 5 iterations (should only take 2):
fitter = FittingWithOutlierRemoval(
fitter=LinearLSQFitter(), outlier_func=sigma_clip, niter=5,
sigma_lower=3., sigma_upper=3., maxiters=1
)
model, mask = fitter(models.Chebyshev1D(2, n_models=2), x, y)
# Confirm that only the deviant point was rejected, in 2 iterations:
assert_equal(np.where(mask), [[0], [14]])
assert fitter.fit_info['niter'] == 2
# Refit just the first row without any rejection iterations, to ensure
# there are no regressions for that special case:
fitter = FittingWithOutlierRemoval(
fitter=LinearLSQFitter(), outlier_func=sigma_clip, niter=0,
sigma_lower=3., sigma_upper=3., maxiters=1
)
model, mask = fitter(models.Chebyshev1D(2), x, y[0])
# Confirm that there were no iterations or rejected points:
assert mask.sum() == 0
assert fitter.fit_info['niter'] == 0
@pytest.mark.skipif('not HAS_SCIPY')
class TestFittingUncertanties:
"""
Test that parameter covariance is calculated correctly for the fitters
that do so (currently LevMarLSQFitter, LinearLSQFitter).
"""
example_1D_models = [models.Polynomial1D(2), models.Linear1D()]
example_1D_sets = [models.Polynomial1D(2, n_models=2, model_set_axis=False),
models.Linear1D(n_models=2, slope=[1., 1.], intercept=[0, 0])]
def setup_class(self):
self.x = np.arange(10)
self.x_grid = np.random.randint(0, 100, size=100).reshape(10, 10)
self.y_grid = np.random.randint(0, 100, size=100).reshape(10, 10)
self.rand_grid = np.random.random(100).reshape(10, 10)
self.rand = self.rand_grid[0]
@pytest.mark.parametrize(('single_model', 'model_set'),
list(zip(example_1D_models, example_1D_sets)))
def test_1d_models(self, single_model, model_set):
""" Test that fitting uncertainties are computed correctly for 1D models
and 1D model sets. Use covariance/stds given by LevMarLSQFitter as
a benchmark since they are returned by the numpy fitter.
"""
levmar_fitter = LevMarLSQFitter(calc_uncertainties=True)
linlsq_fitter = LinearLSQFitter(calc_uncertainties=True)
# test 1D single models
# fit single model w/ nonlinear fitter
y = single_model(self.x) + self.rand
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
fit_model_levmar = levmar_fitter(single_model, self.x, y)
cov_model_levmar = fit_model_levmar.cov_matrix.cov_matrix
# fit single model w/ linlsq fitter
fit_model_linlsq = linlsq_fitter(single_model, self.x, y)
cov_model_linlsq = fit_model_linlsq.cov_matrix.cov_matrix
# check covariance, stds computed correctly computed
assert_allclose(cov_model_linlsq, cov_model_levmar)
assert_allclose(np.sqrt(np.diag(cov_model_linlsq)),
fit_model_linlsq.stds.stds)
# now test 1D model sets
# fit set of models w/ linear fitter
y = model_set(self.x, model_set_axis=False) +\
np.array([self.rand, self.rand])
fit_1d_set_linlsq = linlsq_fitter(model_set, self.x, y)
cov_1d_set_linlsq = [j.cov_matrix for j in
fit_1d_set_linlsq.cov_matrix]
# make sure cov matrix from single model fit w/ levmar fitter matches
# the cov matrix of first model in the set
assert_allclose(cov_1d_set_linlsq[0], cov_model_levmar)
assert_allclose(np.sqrt(np.diag(cov_1d_set_linlsq[0])),
fit_1d_set_linlsq.stds[0].stds)
def test_2d_models(self):
"""
Test that fitting uncertainties are computed correctly for 2D models
and 2D model sets. Use covariance/stds given by LevMarLSQFitter as
a benchmark since they are returned by the numpy fitter.
"""
levmar_fitter = LevMarLSQFitter(calc_uncertainties=True)
linlsq_fitter = LinearLSQFitter(calc_uncertainties=True)
single_model = models.Polynomial2D(2, c0_0=2)
model_set = models.Polynomial2D(degree=2, n_models=2, c0_0=[2, 3],
model_set_axis=False)
# fit single model w/ nonlinear fitter
z_grid = single_model(self.x_grid, self.y_grid) + self.rand_grid
with pytest.warns(AstropyUserWarning,
match=r'Model is linear in parameters'):
fit_model_levmar = levmar_fitter(single_model, self.x_grid,
self.y_grid, z_grid)
cov_model_levmar = fit_model_levmar.cov_matrix.cov_matrix
# fit single model w/ nonlinear fitter
fit_model_linlsq = linlsq_fitter(single_model, self.x_grid,
self.y_grid, z_grid)
cov_model_linlsq = fit_model_linlsq.cov_matrix.cov_matrix
assert_allclose(cov_model_levmar, cov_model_linlsq)
assert_allclose(np.sqrt(np.diag(cov_model_linlsq)),
fit_model_linlsq.stds.stds)
# fit 2d model set
z_grid = model_set(self.x_grid, self.y_grid) + np.array((self.rand_grid,
self.rand_grid))
fit_2d_set_linlsq = linlsq_fitter(model_set, self.x_grid, self.y_grid,
z_grid)
cov_2d_set_linlsq = [j.cov_matrix for j in fit_2d_set_linlsq.cov_matrix]
# make sure cov matrix from single model fit w/ levmar fitter matches
# the cov matrix of first model in the set
assert_allclose(cov_2d_set_linlsq[0], cov_model_levmar)
assert_allclose(np.sqrt(np.diag(cov_2d_set_linlsq[0])),
fit_2d_set_linlsq.stds[0].stds)
def test_covariance_std_printing_indexing(self, capsys):
"""
Test printing methods and indexing.
"""
# test str representation for Covariance/stds
fitter = LinearLSQFitter(calc_uncertainties=True)
mod = models.Linear1D()
fit_mod = fitter(mod, self.x, mod(self.x)+self.rand)
print(fit_mod.cov_matrix)
captured = capsys.readouterr()
assert "slope | 0.001" in captured.out
assert "intercept| -0.006, 0.041" in captured.out
print(fit_mod.stds)
captured = capsys.readouterr()
assert "slope | 0.038" in captured.out
assert "intercept| 0.203" in captured.out
# test 'pprint' for Covariance/stds
print(fit_mod.cov_matrix.pprint(round_val=5, max_lines=1))
captured = capsys.readouterr()
assert "slope | 0.00144" in captured.out
assert "intercept" not in captured.out
print(fit_mod.stds.pprint(max_lines=1, round_val=5))
captured = capsys.readouterr()
assert "slope | 0.03799" in captured.out
assert "intercept" not in captured.out
# test indexing for Covariance class.
assert fit_mod.cov_matrix[0, 0] == fit_mod.cov_matrix['slope', 'slope']
# test indexing for stds class.
assert fit_mod.stds[1] == fit_mod.stds['intercept']
|
pllim/astropy
|
astropy/modeling/tests/test_fitters.py
|
Python
|
bsd-3-clause
| 49,705
|
[
"Gaussian"
] |
fd857ada0fd26e96966ed5171cf85e8a2ca5c1dc8976c3d3a924bf6a15deedb0
|
#pylint: disable=no-init
from __future__ import (absolute_import, division, print_function)
import stresstesting
from mantid.simpleapi import *
#----------------------------------------------------------------------
class ConvertToMDworkflow(stresstesting.MantidStressTest):
"""
"""
tolerance = 1e-5
def runTest(self):
# let's load test event workspace, which has been already preprocessed and available in Mantid Test folder
WS_Name='CNCS_7860_event'
Load(Filename=WS_Name,OutputWorkspace=WS_Name)
# this workspace has been obtained from an inelastic experiment with input energy Ei = 3.
# Usually this energy is stored in workspace
# but if it is not, we have to provide it for inelastic conversion to work.
AddSampleLog(Workspace=WS_Name,LogName='Ei',LogText='3.0',LogType='Number')
# disable multithreaded splitting as BoxID-s are assigned in random manner
# AddSampleLog(Workspace=WS_Name,LogName='NUM_THREADS',LogText='0',LogType='Number')
#
# set up target ws name and remove target workspace with the same name which can occasionally exist.
RezWS = 'WS_4D'
try:
DeleteWorkspace(RezWS)
except ValueError:
print("Target ws ",RezWS," not found in analysis data service\n")
#
#---> Start loop over contributing files
for i in range(0,20,5):
# the following operations simulate different workspaces, obtained from experiment using rotating crystal;
# For real experiment we usually just load these workspaces from nxspe files with proper Psi values defined there
# and have to set up ub matrix
SourceWS = 'SourcePart'+str(i)
# ws emulation begin ---->
CloneWorkspace(InputWorkspace=WS_Name,OutputWorkspace=SourceWS)
# using scattering on a crystal with cubic lattice and 1,0,0 direction along the beam.
SetUB(Workspace=SourceWS,a='1.4165',b='1.4165',c='1.4165',u='1,0,0',v='0,1,0')
# rotated by proper number of degrees around axis Y
AddSampleLog(Workspace=SourceWS,LogName='Psi',LogText=str(i)+'.0',LogType='Number Series')
SetGoniometer(Workspace=SourceWS,Axis0='Psi,0,1,0,1')
# ws emulation, end ---------------------------------------------------------------------------------------
ConvertToMD(InputWorkspace=SourceWS,OutputWorkspace=RezWS,QDimensions='Q3D',QConversionScales='HKL',
OverwriteExisting=0,dEAnalysisMode='Direct',MinValues='-3,-3,-3,-1',MaxValues='3,3,3,3',
SplitInto="20,20,1,1")
# delete source workspace from memory;
DeleteWorkspace(SourceWS)
def validate(self):
"""Returns the name of the workspace & file to compare"""
self.tolerance = 1e-5
#self.disableChecking.append('SpectraMap')
#self.disableChecking.append('Instrument')
result = 'WS_4D'
reference = "ConvertToMDSample.nxs"
valNames = [result,reference]
Load(Filename=reference,OutputWorkspace=valNames[1])
checker = AlgorithmManager.create("CompareMDWorkspaces")
checker.setLogging(True)
checker.setPropertyValue("Workspace1",result)
checker.setPropertyValue("Workspace2",valNames[1])
checker.setPropertyValue("Tolerance", str(self.tolerance))
checker.setPropertyValue("IgnoreBoxID", "1")
checker.setPropertyValue("CheckEvents", "1")
checker.execute()
if checker.getPropertyValue("Equals") != "1":
print(" Workspaces do not match, result: ",checker.getPropertyValue("Result"))
print(self.__class__.__name__)
SaveMD(InputWorkspace=valNames[0],Filename=self.__class__.__name__+'-mismatch.nxs')
return False
return True
|
dymkowsk/mantid
|
Testing/SystemTests/tests/analysis/ConvertToMDworkflow.py
|
Python
|
gpl-3.0
| 3,907
|
[
"CRYSTAL"
] |
f17b94b1eff55bb53082d6972a2911e8ffcc2c85a1a10c6ccbc594d45a0e7237
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.documentai_v1beta2.services.document_understanding_service import (
DocumentUnderstandingServiceAsyncClient,
)
from google.cloud.documentai_v1beta2.services.document_understanding_service import (
DocumentUnderstandingServiceClient,
)
from google.cloud.documentai_v1beta2.services.document_understanding_service import (
transports,
)
from google.cloud.documentai_v1beta2.types import document
from google.cloud.documentai_v1beta2.types import document_understanding
from google.cloud.documentai_v1beta2.types import geometry
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.rpc import status_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert DocumentUnderstandingServiceClient._get_default_mtls_endpoint(None) is None
assert (
DocumentUnderstandingServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
DocumentUnderstandingServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
DocumentUnderstandingServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
DocumentUnderstandingServiceClient._get_default_mtls_endpoint(
sandbox_mtls_endpoint
)
== sandbox_mtls_endpoint
)
assert (
DocumentUnderstandingServiceClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class",
[DocumentUnderstandingServiceClient, DocumentUnderstandingServiceAsyncClient,],
)
def test_document_understanding_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "documentai.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.DocumentUnderstandingServiceGrpcTransport, "grpc"),
(transports.DocumentUnderstandingServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_document_understanding_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class",
[DocumentUnderstandingServiceClient, DocumentUnderstandingServiceAsyncClient,],
)
def test_document_understanding_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "documentai.googleapis.com:443"
def test_document_understanding_service_client_get_transport_class():
transport = DocumentUnderstandingServiceClient.get_transport_class()
available_transports = [
transports.DocumentUnderstandingServiceGrpcTransport,
]
assert transport in available_transports
transport = DocumentUnderstandingServiceClient.get_transport_class("grpc")
assert transport == transports.DocumentUnderstandingServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
DocumentUnderstandingServiceClient,
transports.DocumentUnderstandingServiceGrpcTransport,
"grpc",
),
(
DocumentUnderstandingServiceAsyncClient,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
DocumentUnderstandingServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(DocumentUnderstandingServiceClient),
)
@mock.patch.object(
DocumentUnderstandingServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(DocumentUnderstandingServiceAsyncClient),
)
def test_document_understanding_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(
DocumentUnderstandingServiceClient, "get_transport_class"
) as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(
DocumentUnderstandingServiceClient, "get_transport_class"
) as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
DocumentUnderstandingServiceClient,
transports.DocumentUnderstandingServiceGrpcTransport,
"grpc",
"true",
),
(
DocumentUnderstandingServiceAsyncClient,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
DocumentUnderstandingServiceClient,
transports.DocumentUnderstandingServiceGrpcTransport,
"grpc",
"false",
),
(
DocumentUnderstandingServiceAsyncClient,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
DocumentUnderstandingServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(DocumentUnderstandingServiceClient),
)
@mock.patch.object(
DocumentUnderstandingServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(DocumentUnderstandingServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_document_understanding_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class",
[DocumentUnderstandingServiceClient, DocumentUnderstandingServiceAsyncClient],
)
@mock.patch.object(
DocumentUnderstandingServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(DocumentUnderstandingServiceClient),
)
@mock.patch.object(
DocumentUnderstandingServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(DocumentUnderstandingServiceAsyncClient),
)
def test_document_understanding_service_client_get_mtls_endpoint_and_cert_source(
client_class,
):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
DocumentUnderstandingServiceClient,
transports.DocumentUnderstandingServiceGrpcTransport,
"grpc",
),
(
DocumentUnderstandingServiceAsyncClient,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_document_understanding_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
DocumentUnderstandingServiceClient,
transports.DocumentUnderstandingServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
DocumentUnderstandingServiceAsyncClient,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_document_understanding_service_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_document_understanding_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.documentai_v1beta2.services.document_understanding_service.transports.DocumentUnderstandingServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = DocumentUnderstandingServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
DocumentUnderstandingServiceClient,
transports.DocumentUnderstandingServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
DocumentUnderstandingServiceAsyncClient,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_document_understanding_service_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"documentai.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=None,
default_host="documentai.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"request_type", [document_understanding.BatchProcessDocumentsRequest, dict,]
)
def test_batch_process_documents(request_type, transport: str = "grpc"):
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_process_documents), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.batch_process_documents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == document_understanding.BatchProcessDocumentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_batch_process_documents_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_process_documents), "__call__"
) as call:
client.batch_process_documents()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == document_understanding.BatchProcessDocumentsRequest()
@pytest.mark.asyncio
async def test_batch_process_documents_async(
transport: str = "grpc_asyncio",
request_type=document_understanding.BatchProcessDocumentsRequest,
):
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_process_documents), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.batch_process_documents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == document_understanding.BatchProcessDocumentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_batch_process_documents_async_from_dict():
await test_batch_process_documents_async(request_type=dict)
def test_batch_process_documents_field_headers():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = document_understanding.BatchProcessDocumentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_process_documents), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.batch_process_documents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_batch_process_documents_field_headers_async():
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = document_understanding.BatchProcessDocumentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_process_documents), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.batch_process_documents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_batch_process_documents_flattened():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_process_documents), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.batch_process_documents(
requests=[
document_understanding.ProcessDocumentRequest(parent="parent_value")
],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].requests
mock_val = [
document_understanding.ProcessDocumentRequest(parent="parent_value")
]
assert arg == mock_val
def test_batch_process_documents_flattened_error():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.batch_process_documents(
document_understanding.BatchProcessDocumentsRequest(),
requests=[
document_understanding.ProcessDocumentRequest(parent="parent_value")
],
)
@pytest.mark.asyncio
async def test_batch_process_documents_flattened_async():
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_process_documents), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.batch_process_documents(
requests=[
document_understanding.ProcessDocumentRequest(parent="parent_value")
],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].requests
mock_val = [
document_understanding.ProcessDocumentRequest(parent="parent_value")
]
assert arg == mock_val
@pytest.mark.asyncio
async def test_batch_process_documents_flattened_error_async():
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.batch_process_documents(
document_understanding.BatchProcessDocumentsRequest(),
requests=[
document_understanding.ProcessDocumentRequest(parent="parent_value")
],
)
@pytest.mark.parametrize(
"request_type", [document_understanding.ProcessDocumentRequest, dict,]
)
def test_process_document(request_type, transport: str = "grpc"):
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.process_document), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = document.Document(
mime_type="mime_type_value", text="text_value", uri="uri_value",
)
response = client.process_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == document_understanding.ProcessDocumentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, document.Document)
assert response.mime_type == "mime_type_value"
assert response.text == "text_value"
def test_process_document_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.process_document), "__call__") as call:
client.process_document()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == document_understanding.ProcessDocumentRequest()
@pytest.mark.asyncio
async def test_process_document_async(
transport: str = "grpc_asyncio",
request_type=document_understanding.ProcessDocumentRequest,
):
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.process_document), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
document.Document(mime_type="mime_type_value", text="text_value",)
)
response = await client.process_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == document_understanding.ProcessDocumentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, document.Document)
assert response.mime_type == "mime_type_value"
assert response.text == "text_value"
@pytest.mark.asyncio
async def test_process_document_async_from_dict():
await test_process_document_async(request_type=dict)
def test_process_document_field_headers():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = document_understanding.ProcessDocumentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.process_document), "__call__") as call:
call.return_value = document.Document()
client.process_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_process_document_field_headers_async():
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = document_understanding.ProcessDocumentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.process_document), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document())
await client.process_document(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.DocumentUnderstandingServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.DocumentUnderstandingServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = DocumentUnderstandingServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.DocumentUnderstandingServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = DocumentUnderstandingServiceClient(
client_options=options, transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = DocumentUnderstandingServiceClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.DocumentUnderstandingServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = DocumentUnderstandingServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.DocumentUnderstandingServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = DocumentUnderstandingServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.DocumentUnderstandingServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.DocumentUnderstandingServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.DocumentUnderstandingServiceGrpcTransport,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport, transports.DocumentUnderstandingServiceGrpcTransport,
)
def test_document_understanding_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.DocumentUnderstandingServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_document_understanding_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.documentai_v1beta2.services.document_understanding_service.transports.DocumentUnderstandingServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.DocumentUnderstandingServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"batch_process_documents",
"process_document",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
def test_document_understanding_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.documentai_v1beta2.services.document_understanding_service.transports.DocumentUnderstandingServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.DocumentUnderstandingServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_document_understanding_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.documentai_v1beta2.services.document_understanding_service.transports.DocumentUnderstandingServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.DocumentUnderstandingServiceTransport()
adc.assert_called_once()
def test_document_understanding_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
DocumentUnderstandingServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.DocumentUnderstandingServiceGrpcTransport,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
],
)
def test_document_understanding_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.DocumentUnderstandingServiceGrpcTransport, grpc_helpers),
(
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
grpc_helpers_async,
),
],
)
def test_document_understanding_service_transport_create_channel(
transport_class, grpc_helpers
):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"documentai.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="documentai.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.DocumentUnderstandingServiceGrpcTransport,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
],
)
def test_document_understanding_service_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_document_understanding_service_host_no_port():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="documentai.googleapis.com"
),
)
assert client.transport._host == "documentai.googleapis.com:443"
def test_document_understanding_service_host_with_port():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="documentai.googleapis.com:8000"
),
)
assert client.transport._host == "documentai.googleapis.com:8000"
def test_document_understanding_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.DocumentUnderstandingServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_document_understanding_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.DocumentUnderstandingServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.DocumentUnderstandingServiceGrpcTransport,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
],
)
def test_document_understanding_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.DocumentUnderstandingServiceGrpcTransport,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
],
)
def test_document_understanding_service_transport_channel_mtls_with_adc(
transport_class,
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_document_understanding_service_grpc_lro_client():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_document_understanding_service_grpc_lro_async_client():
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = DocumentUnderstandingServiceClient.common_billing_account_path(
billing_account
)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = DocumentUnderstandingServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = DocumentUnderstandingServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = DocumentUnderstandingServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = DocumentUnderstandingServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = DocumentUnderstandingServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = DocumentUnderstandingServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = DocumentUnderstandingServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = DocumentUnderstandingServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = DocumentUnderstandingServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = DocumentUnderstandingServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = DocumentUnderstandingServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = DocumentUnderstandingServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = DocumentUnderstandingServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = DocumentUnderstandingServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.DocumentUnderstandingServiceTransport, "_prep_wrapped_messages"
) as prep:
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.DocumentUnderstandingServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = DocumentUnderstandingServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = DocumentUnderstandingServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = DocumentUnderstandingServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(
DocumentUnderstandingServiceClient,
transports.DocumentUnderstandingServiceGrpcTransport,
),
(
DocumentUnderstandingServiceAsyncClient,
transports.DocumentUnderstandingServiceGrpcAsyncIOTransport,
),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
|
googleapis/python-documentai
|
tests/unit/gapic/documentai_v1beta2/test_document_understanding_service.py
|
Python
|
apache-2.0
| 65,523
|
[
"Octopus"
] |
b41ba4f573291292a29a96a867f3693197d722750d8fe10139083cc34b0dc5d8
|
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 1 18:45:15 2016
@author:
"""
# LECTURA Y GRAFICADO RADAR (Formato binario GVAR-SMN)
import numpy as np
import matplotlib as plt
import datetime as dt
import binary_io as bio
import bred_vector_functions as bvf
import os
basedir='/data9/jruiz/EXPERIMENTS/'
expname = '/OsakaPAR_1km_control1000m_smallrandompert_new/'
plotbasedir=basedir + expname + '/plots/'
undef_in=1.0e20
undef_out=np.nan
qmeanlevs=[0.001,0.0050,0.05] #Levels for vertically averaged condensate.
inipert=1 #Which is the first perturbation that we will plot.
endpert=1 #Which is the last perturbation that we will plot.
npert=endpert-inipert+1 #Total number of perturbations that will be plotted.
norm_type='UVT'
smooth_type='Gaussian'
smooth_sigma=np.array([1.5])
#The following will be used to extract a particlar variable from the original data.
#This variables should be especified according to the data that we have in the binary files.
ctl_vars='U','V','W','T','QV','QHYD' #Complete list of variables in ctl file.
ctl_inirecord=[0,12,24,36,48,60] #Starting record for each variable. From 0 to N
ctl_endrecord=[11,23,35,47,59,71] #End record for each variable. From 0 to N.
#Which variables and levels are we going to plot?
plotlevels=np.array([3,7,9]) #Which levels will be plotted (this levels are equivalent to the BV plots)
plotvars='U','V','W','T','QV','QHYD' #Which variables will be plotted.
#Define regions
lati=np.array([34.75,34.6])
late=np.array([35.25,34.9])
loni=np.array([135.5,135.4])
lone=np.array([136.25,135.7])
reg_name='REG_1','REG_2','TOTAL'
#Create the plotbasedir
if not os.path.exists(plotbasedir):
os.mkdir(plotbasedir)
#Defini initial and end times using datetime module.
itime = dt.datetime(2013,7,13,5,10,30) #Initial time.
etime = dt.datetime(2013,7,13,5,39,30) #End time.
#Define the delta.
delta=dt.timedelta(seconds=30)
ntimes=round( (itime-etime).seconds/delta.seconds ) + 1
nx=180
ny=180
nz=np.max(ctl_endrecord) + 1 #Total number of records in binary file.
nlev=12 #Number of vertical levels for 3D variables.
ntimes=round( (itime-etime).seconds/delta.seconds ) + 1 #Total number of times.
data_pert_anal=dict()
data_mean_anal=dict()
data_pert_gues=dict()
data_mean_gues=dict()
ctime=itime
#Get lat lon.
lat=bio.read_data_direct(basedir + expname + '/latlon/lat.grd',nx,ny,1,'>f4')[:,:,0]
lon=bio.read_data_direct(basedir + expname + '/latlon/lon.grd',nx,ny,1,'>f4')[:,:,0]
#Add the global domain as a region.
lati=np.append(lati,lat[0,0])
late=np.append(late,lat[nx-1,ny-1])
loni=np.append(loni,lon[0,0])
lone=np.append(lone,lon[nx-1,ny-1])
xi , yi = bvf.lat_lon_to_i_j(lon,lat,loni,lati)
xe , ye = bvf.lat_lon_to_i_j(lon,lat,lone,late)
nregs=xi.shape[0]
time_mean_growth_rate=np.zeros([nx,ny,nlev])
time_sprd_growth_rate=np.zeros([nx,ny,nlev])
time_mean_norm=np.zeros([nx,ny,nlev])
time_sprd_norm=np.zeros([nx,ny,nlev])
norm_mean_gues=np.zeros([ntimes,npert,nregs])
norm_mean_anal=np.zeros([ntimes,npert,nregs])
norm_max_gues=np.zeros([ntimes,npert,nregs])
norm_max_anal=np.zeros([ntimes,npert,nregs])
norm_min_gues=np.zeros([ntimes,npert,nregs])
norm_min_anal=np.zeros([ntimes,npert,nregs])
gr_pert_mean=np.zeros([ntimes,npert,nregs])
gr_pert_max=np.zeros([ntimes,npert,nregs])
gr_pert_min=np.zeros([ntimes,npert,nregs])
norm_gues=np.zeros([nx,ny,nlev])
norm_anal=np.zeros([nx,ny,nlev])
int_liquid=np.zeros([nx,ny,nlev])
it=0
for ipert in range (inipert , endpert + 1):
pertstr="%04d" % ipert
print( ' Plotting bred vector number ' + pertstr )
while ( ctime <= etime ):
ptime=ctime - delta #Data correspinding to the previous step (to compute bv growth)
print ( 'The date is :', ctime )
print ( 'Reading the perturbed analysis')
my_file=basedir + expname + ptime.strftime("%Y%m%d%H%M%S") + '/analgp/' + '/' + pertstr + '.grd'
data_pert_anal=bio.read_data_scale_2(my_file,nx,ny,nz,ctl_vars,ctl_inirecord,ctl_endrecord,dtypein='f4',undef_in=undef_in,undef_out=undef_out)
print ( 'Reading the analysis mean')
my_file=basedir + expname + ptime.strftime("%Y%m%d%H%M%S") + '/analgp/' + '/mean.grd'
data_mean_anal=bio.read_data_scale_2(my_file,nx,ny,nz,ctl_vars,ctl_inirecord,ctl_endrecord,dtypein='f4',undef_in=undef_in,undef_out=undef_out)
print ( 'Reading the perturbed gues')
my_file=basedir + expname + ctime.strftime("%Y%m%d%H%M%S") + '/guesgp/' + '/' + pertstr + '.grd'
data_pert_gues=bio.read_data_scale_2(my_file,nx,ny,nz,ctl_vars,ctl_inirecord,ctl_endrecord,dtypein='f4',undef_in=undef_in,undef_out=undef_out)
print ( 'Reading the gues mean')
my_file=basedir + expname + ctime.strftime("%Y%m%d%H%M%S") + '/guesgp/' + '/mean.grd'
data_mean_gues=bio.read_data_scale_2(my_file,nx,ny,nz,ctl_vars,ctl_inirecord,ctl_endrecord,dtypein='f4',undef_in=undef_in,undef_out=undef_out)
#Compute total integrated liquid (we will use this to identify areas associated with clouds and convection)
tmp_int_liquid = np.nansum(data_mean_anal['QHYD'],2)
for ilev in range(0,nlev) : #Create a fake 3D array for the vertically integrated liquid
#This is because the plotting function expects a 3D array as input.
int_liquid[:,:,ilev]=tmp_int_liquid
#Note: In pithon when multiple variables are output from a function in a tuple, then all the variables has to be decodified.
#If not the reconstruction of the variables will fail.
norm_mean_gues[it,ipert-1,:] , norm_max_gues[it,ipert-1,:] , norm_min_gues[it,ipert-1,:] , norm_gues =bvf.norm_bv( data_pert_gues , data_mean_gues , norm_type=norm_type , smooth=smooth_type , sigma=smooth_sigma , xi=xi,yi=yi,xe=xe,ye=ye)
norm_mean_anal[it,ipert-1,:] , norm_max_anal[it,ipert-1,:] , norm_min_anal[it,ipert-1,:] , norm_anal =bvf.norm_bv( data_pert_anal , data_mean_anal , norm_type=norm_type , smooth=smooth_type , sigma=smooth_sigma , xi=xi,yi=yi,xe=xe,ye=ye)
gr_pert_mean[it,ipert-1,:] , gr_pert_max[it,ipert-1,:] , gr_pert_min[it,ipert-1,:] , gr_pert = bvf.growth_rate_bv( norm_gues , norm_anal , xi=xi , xe=xe , yi=yi , ye=ye )
#Plot LETKF perturbation norm.
mydir=plotbasedir + '/' + pertstr + '/'
varname='norm_' + norm_type
my_range='centered'
bvf.plot_var_levels( norm_gues , lon , lat , plotlevels , mydir , varname , date=ctime.strftime("%Y%m%d%H%M%S") ,varcontour=int_liquid,clevels=qmeanlevs,range=my_range)
bvf.plot_var_ave( norm_gues , lon , lat , mydir , varname , varcontour=data_mean_anal['QHYD'] , clevels=qmeanlevs , date=ctime.strftime("%Y%m%d%H%M%S"),range=my_range)
varname='gr_' + norm_type
my_range='centered'
#Plot LETKF perturbation growth rate.
bvf.plot_var_levels( gr_pert , lon , lat , plotlevels , mydir , varname , date=ctime.strftime("%Y%m%d%H%M%S") ,varcontour=int_liquid,clevels=qmeanlevs,range=my_range)
bvf.plot_var_ave( gr_pert , lon , lat , mydir , varname , varcontour=data_mean_anal['QHYD'] , clevels=qmeanlevs , date=ctime.strftime("%Y%m%d%H%M%S"),range=my_range)
time_mean_growth_rate = time_mean_growth_rate + gr_pert
time_sprd_growth_rate = time_sprd_growth_rate + np.power( gr_pert , 2 )
time_mean_norm = time_mean_norm + norm_gues
time_sprd_norm = time_sprd_norm + np.power( norm_gues , 2 )
ctime = ctime + delta
it = it + 1
print ( "Finish time loop" )
time_mean_growth_rate = time_mean_growth_rate / ntimes
time_mean_norm = time_mean_norm / ntimes
time_sprd_growth_rate = np.power( time_sprd_growth_rate / ntimes - np.power( time_mean_growth_rate , 2 ) , 0.5)
time_sprd_norm = np.power( time_sprd_norm / ntimes - np.power( time_mean_norm , 2 ) , 0.5)
#Plot mean norm
mydir=plotbasedir + '/time_independent_plots/' + '/' + pertstr + '/'
bvf.plot_var_levels( time_mean_norm , lon , lat , plotlevels , mydir , 'tmean_norm' + norm_type )
bvf.plot_var_ave( time_mean_norm , lon , lat , mydir , 'tmean_norm' + norm_type )
#Plot mean growing rate
bvf.plot_var_levels( time_mean_growth_rate , lon , lat , plotlevels , mydir , 'tmean_grrate' + norm_type )
bvf.plot_var_ave( time_mean_growth_rate , lon , lat , mydir , 'tmean_grrate' + norm_type )
#Plot std norm
mydir=plotbasedir + '/time_independent_plots/' + '/' + pertstr + '/'
bvf.plot_var_levels( time_sprd_norm , lon , lat , plotlevels , mydir , 'tstd_norm' + norm_type )
bvf.plot_var_ave( time_sprd_norm , lon , lat , mydir , 'tstd_norm' + norm_type )
#Plot std growing rate
bvf.plot_var_levels( time_sprd_growth_rate , lon , lat , plotlevels , mydir , 'tstd_grrate' + norm_type )
bvf.plot_var_ave( time_sprd_growth_rate , lon , lat , mydir , 'tstd_grrate' + norm_type )
|
gustfrontar/LETKF_WRF
|
wrf/verification/python/plot_letkfpert_growthrate.py
|
Python
|
gpl-3.0
| 8,806
|
[
"Gaussian"
] |
39887852ddbc798261a5f47fdada88c72855a785a14feecd618b7381da35edfa
|
#!/usr/bin/env python
#
# License: BSD
# https://raw.githubusercontent.com/stonier/py_trees/devel/LICENSE
#
##############################################################################
# Documentation
##############################################################################
"""
Visitors are entities that can be passed to a tree implementation
(e.g. :class:`~py_trees.trees.BehaviourTree`) and used to either visit
each and every behaviour in the tree, or visit behaviours as the tree is
traversed in an executing tick. At each behaviour, the visitor
runs its own method on the behaviour to do as it wishes - logging, introspecting, etc.
.. warning:: Visitors should not modify the behaviours they visit.
"""
##############################################################################
# Imports
##############################################################################
from . import common
# from . import console
# from . import syntax_highlighting
##############################################################################
# Visitors
##############################################################################
class VisitorBase(object):
"""
Parent template for visitor types.
Visitors are primarily designed to work with :class:`~py_trees.trees.BehaviourTree`
but they can be used in the same way for other tree custodian implementations.
Args:
full (:obj:`bool`): flag to indicate whether it should be used to visit only traversed nodes or the entire tree
Attributes:
full (:obj:`bool`): flag to indicate whether it should be used to visit only traversed nodes or the entire tree
"""
def __init__(self, full=False):
self.full = full
def initialise(self):
"""
Override this method if any resetting of variables needs to be
performed between ticks (i.e. visitations).
"""
pass
def run(self, behaviour):
"""
This method gets run as each behaviour is ticked. Override it to
perform some activity - e.g. introspect the behaviour
to store/process logging data for visualisations.
Args:
behaviour (:class:`~py_trees.behaviour.Behaviour`): behaviour that is ticking
"""
pass
class DebugVisitor(VisitorBase):
"""
Picks up and logs feedback messages and the behaviour's status. Logging is done with
the behaviour's logger.
"""
def __init__(self):
super(DebugVisitor, self).__init__(full=False)
def run(self, behaviour):
if behaviour.feedback_message:
behaviour.logger.debug("%s.run() [%s][%s]" % (self.__class__.__name__, behaviour.feedback_message, behaviour.status))
else:
behaviour.logger.debug("%s.run() [%s]" % (self.__class__.__name__, behaviour.status))
class SnapshotVisitor(VisitorBase):
"""
Visits the tree in tick-tock, recording runtime information for publishing
the information as a snapshot view of the tree after the iteration has
finished.
Args:
full (:obj:`bool`): flag to indicate whether it should be used to visit only traversed nodes or the entire tree
Attributes:
nodes (dict): dictionary of behaviour id (uuid.UUID) and status (:class:`~py_trees.common.Status`) pairs
running_nodes([uuid.UUID]): list of id's for behaviours which were traversed in the current tick
previously_running_nodes([uuid.UUID]): list of id's for behaviours which were traversed in the last tick
.. seealso::
This visitor is used with the :class:`~py_trees.trees.BehaviourTree` class to collect
information and :func:`~py_trees.display.ascii_tree` to display information.
"""
def __init__(self, full=False):
super(SnapshotVisitor, self).__init__(full=full)
self.nodes = {}
self.running_nodes = []
self.previously_running_nodes = []
def initialise(self):
"""
Switch running to previously running and then reset all other variables. This will
get called before a tree ticks.
"""
self.nodes = {}
self.previously_running_nodes = self.running_nodes
self.running_nodes = []
def run(self, behaviour):
"""
This method gets run as each behaviour is ticked. Catch the id and status and store it.
Additionally add it to the running list if it is :data:`~py_trees.common.Status.RUNNING`.
Args:
behaviour (:class:`~py_trees.behaviour.Behaviour`): behaviour that is ticking
"""
self.nodes[behaviour.id] = behaviour.status
if behaviour.status == common.Status.RUNNING:
self.running_nodes.append(behaviour.id)
|
stonier/py_trees_suite
|
py_trees/visitors.py
|
Python
|
bsd-3-clause
| 4,739
|
[
"VisIt"
] |
44fb90368276ae227796cbe97a84397849b438019bb97cef4653f41da88f467b
|
# Copyright (C) 2004-2008 Paul Cochrane
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Example of plotting ellipsoids (useful for visualising tensors) with pyvisi
"""
import sys
numArgs = len(sys.argv)
if numArgs == 1:
ren_mod = "vtk"
else:
ren_mod = sys.argv[1]
# set up some data to plot
from numpy import *
# example code for how a user would write a script in pyvisi
from pyvisi import * # base level visualisation stuff
# import the objects to render the scene using the specific renderer
if ren_mod == "vtk":
from pyvisi.renderers.vtk import * # vtk
elif ren_mod == "povray":
from pyvisi.renderers.povray import * # povray
else:
raise ValueError, "Unknown renderer module"
# define the scene object
# a Scene is a container for all of the kinds of things you want to put
# into your plot for instance, images, meshes, arrow/vector/quiver plots,
# contour plots, spheres etc.
scene = Scene()
# create a EllipsoidPlot object
plot = EllipsoidPlot(scene)
# add some helpful info to the plot
plot.title = 'Example ellipsoid plot'
# plot data defined in a vtk file
plot.setData(fname='stress22.vtk', format='vtk-xml')
scene.render(pause=True, interactive=True)
# save the plot
scene.save(fname="ellipsoidPlot.png", format="png")
# vim: expandtab shiftwidth=4:
|
paultcochrane/pyvisi
|
examples/ellipsoidPlot.py
|
Python
|
gpl-2.0
| 1,977
|
[
"VTK"
] |
64511d60c811412d35469f1ef932b49ad063ffc0e78b3fdc6612e9aca3ae9784
|
# -*- coding: utf-8 -*-
"""
Spike Triggered Average calculator
Input:
stimulus (t)
spike spikeTimes (t)
if no spiketimes, generate randoms
Given stim and spiketimes, grabs the spike windows, and calcs the spike triggered average.
output:
spike triggered average
Created on Wed Feb 11 21:20:00 2015
@author: Richard Decal, decal@uw.edu
"""
import numpy as np
import matplotlib.pyplot as plt
def gen_rand_spiketimes(number_of_spikes, STIM_LEN):
"""given stimulus length and stim count, generate 10 random spikes
TODO: make this a poisson process
TODO: don't have random spiketimes: spike when convolution tells you to!
"""
rand_spike_times = np.zeros(STIM_LEN)
timebins = []
for i in range(number_of_spikes):
timebin = np.random.randint(low=0, high=STIM_LEN-1)
rand_spike_times[timebin] = 1
timebins.append(timebin)
return rand_spike_times, timebins
def window_grabber(stimulus, spikeTimes, WINDOW_LEN):
""""when a spike happens, grab the window preceding the spike. return an nparray containing each window
TODO: instead of discarding spikes at beginning, make vector with leading zeros??
"""
spike_trigger_windows = []
for time in spikeTimes:
if time > WINDOW_LEN: #discard spikes that are too close to the beginning.
spike_trigger_windows.append(stimulus[time-WINDOW_LEN:time])
spike_trigger_windows = np.array(spike_trigger_windows)
return spike_trigger_windows
def spike_trigger_averager(spike_trigger_windows):
"""given an array of many grabbed windows, average all the windows.
return a Spike-triggered average window/vector
"""
spike_trigger_average = np.average(spike_trigger_windows, axis=0)
return spike_trigger_average
def figplotter(WINDOW_LEN, spike_trigger_average):
plt.plot(range(0,WINDOW_LEN), spike_trigger_average)
plt.show()
def main(stimulus = np.genfromtxt("gauss_stimulus_3000dim.txt"), WINDOW_LEN = 50):
""""
default imports vector of len(3000) of pts drawn from a gaussian dist w/ mean=0,stdev=1.0
TODO: allow input of spikes and spikeTimes, generate if none are available
"""
STIM_LEN = len(stimulus)
spike_timeseries, spikeTimes = gen_rand_spiketimes(1000, STIM_LEN) #TODO: replace with calculated spiketimes
spike_trigger_windows = window_grabber(stimulus, spikeTimes, WINDOW_LEN)
spike_trigger_average = spike_trigger_averager(spike_trigger_windows)
return spike_trigger_average, WINDOW_LEN
if __name__ == "__main__":
spike_trigger_average, WINDOW_LEN = main()
figplotter(WINDOW_LEN, spike_trigger_average)
|
isomerase/MyPyGLM
|
calc_STA.py
|
Python
|
mit
| 2,653
|
[
"Gaussian"
] |
b11445f9a05e63adcaa2c85208d2d3932f8d386b103ca476e0a41752c7661eed
|
"""System test for multivariate TE using the discrete JIDT estimator."""
import os
import numpy as np
from idtxl.multivariate_te import MultivariateTE
from idtxl.data import Data
from test_estimators_jidt import _get_gauss_data
from idtxl.idtxl_utils import calculate_mi
def test_multivariate_te_corr_gaussian(estimator=None):
"""Test multivariate TE estimation on correlated Gaussians.
Run the multivariate TE algorithm on two sets of random Gaussian data with
a given covariance. The second data set is shifted by one sample creating
a source-target delay of one sample. This example is modeled after the
JIDT demo 4 for transfer entropy. The resulting TE can be compared to the
analytical result (but expect some error in the estimate).
The simulated delay is 1 sample, i.e., the algorithm should find
significant TE from sample (0, 1), a sample in process 0 with lag/delay 1.
The final target sample should always be (1, 1), the mandatory sample at
lat 1, because there is no memory in the process.
Note:
This test runs considerably faster than other system tests.
This produces strange small values for non-coupled sources. TODO
"""
if estimator is None:
estimator = 'JidtKraskovCMI'
cov = 0.4
expected_mi, source1, source2, target = _get_gauss_data(covariance=cov)
# n = 1000
# source = [rn.normalvariate(0, 1) for r in range(n)]
# target = [sum(pair) for pair in zip(
# [cov * y for y in source],
# [(1 - cov) * y for y in [rn.normalvariate(0, 1) for r in range(n)]])]
# # Cast everything to numpy so the idtxl estimator understands it.
# source = np.expand_dims(np.array(source), axis=1)
# target = np.expand_dims(np.array(target), axis=1)
data = Data(normalise=True)
data.set_data(np.vstack((source1[1:].T, target[:-1].T)), 'ps')
settings = {
'cmi_estimator': estimator,
'discretise_method': 'max_ent',
'max_lag_sources': 5,
'min_lag_sources': 1,
'max_lag_target': 5,
'n_perm_max_stat': 21,
'n_perm_min_stat': 21,
'n_perm_omnibus': 21,
'n_perm_max_seq': 21,
}
random_analysis = MultivariateTE()
results_max_ent = random_analysis.analyse_single_target(settings, data, 1)
settings['discretise_method'] = 'equal'
settings['n_discrete_bins'] = 5
results_equal = random_analysis.analyse_single_target(settings, data, 1)
# Assert that there are significant conditionals from the source for target
# 1. For 500 repetitions I got mean errors of 0.02097686 and 0.01454073 for
# examples 1 and 2 respectively. The maximum errors were 0.093841 and
# 0.05833172 repectively. This inspired the following error boundaries.
corr_expected = cov / (1 * np.sqrt(cov**2 + (1-cov)**2))
expected_res = calculate_mi(corr_expected)
estimated_res_max_ent = results_max_ent.get_single_target(1, fdr=False)['te'][0]
estimated_res_equal = results_equal.get_single_target(1, fdr=False)['te'][0]
diff_max_ent = np.abs(estimated_res_max_ent - expected_res)
diff_equal = np.abs(estimated_res_equal - expected_res)
print('Expected source sample: (0, 1)\nExpected target sample: (1, 1)')
print(('Max. entropy binning - estimated TE: {0:5.4f}, analytical result: '
'{1:5.4f}, error: {2:2.2f} % ').format(
estimated_res_max_ent, expected_res, diff_max_ent / expected_res))
print(('Equal binning - estimated TE: {0:5.4f}, analytical result: '
'{1:5.4f}, error: {2:2.2f} % ').format(
estimated_res_equal, expected_res, diff_equal / expected_res))
assert diff_max_ent < 0.1, (
'Multivariate TE calculation for correlated Gaussians using '
'discretised data with max. entropy binning failed (error larger 0.1: '
'{0}, expected: {1}, actual: {2}).'.format(
diff_max_ent, expected_res, results_max_ent['cond_sources_te']))
assert diff_equal < 0.1, (
'Multivariate TE calculation for correlated Gaussians using '
'discretised data with equal binning failed (error larger 0.1: {0}, '
'expected: {1}, actual: {2}).'.format(
diff_max_ent, expected_res, results_max_ent['cond_sources_te']))
def test_multivariate_te_lagged_copies():
"""Test multivariate TE estimation on a lagged copy of random data.
Run the multivariate TE algorithm on two sets of random data, where the
second set is a lagged copy of the first. This test should find no
significant conditionals at all (neither in the target's nor in the
source's past).
Note:
This test takes several hours and may take one to two days on some
machines.
"""
lag = 3
d_0 = np.random.rand(1, 1000, 20)
d_1 = np.hstack((np.random.rand(1, lag, 20), d_0[:, lag:, :]))
data = Data()
data.set_data(np.vstack((d_0, d_1)), 'psr')
settings = {
'cmi_estimator': 'JidtDiscreteCMI',
'discretise_method': 'max_ent',
'max_lag_sources': 5,
'n_perm_max_stat': 21,
'n_perm_min_stat': 21,
'n_perm_omnibus': 500,
'n_perm_max_seq': 500,
}
random_analysis = MultivariateTE()
# Assert that there are no significant conditionals in either direction
# other than the mandatory single sample in the target's past (which
# ensures that we calculate a proper TE at any time in the algorithm).
for t in range(2):
results = random_analysis.analyse_single_target(settings, data, t)
assert len(results.get_single_target(t, fdr=False).selected_vars_full) == 1, (
'Conditional contains more/less than 1 variables.')
assert not results.get_single_target(t, fdr=False).selected_vars_sources, (
'Conditional sources is not empty.')
assert len(results.get_single_target(t, fdr=False).selected_vars_target) == 1, (
'Conditional target contains more/less than 1 variable.')
assert results.get_single_target(t, fdr=False).selected_sources_pval is None, (
'Conditional p-value is not None.')
assert results.get_single_target(t, fdr=False).omnibus_pval is None, (
'Omnibus p-value is not None.')
assert results.get_single_target(t, fdr=False).omnibus_sign is None, (
'Omnibus significance is not None.')
assert results.get_single_target(t, fdr=False).selected_sources_te is None, (
'Conditional TE values is not None.')
def test_multivariate_te_random():
"""Test multivariate TE estimation on two random data sets.
Run the multivariate TE algorithm on two sets of random data with no
coupling. This test should find no significant conditionals at all (neither
in the target's nor in the source's past).
Note:
This test takes several hours and may take one to two days on some
machines.
"""
d = np.random.rand(2, 1000, 20)
data = Data()
data.set_data(d, 'psr')
settings = {
'cmi_estimator': 'JidtDiscreteCMI',
'discretise_method': 'max_ent',
'min_lag_sources': 1,
'max_lag_sources': 5,
'n_perm_max_stat': 200,
'n_perm_min_stat': 200,
'n_perm_omnibus': 500,
'n_perm_max_seq': 500,
}
random_analysis = MultivariateTE()
# Assert that there are no significant conditionals in either direction
# other than the mandatory single sample in the target's past (which
# ensures that we calculate a proper TE at any time in the algorithm).
for t in range(2):
results = random_analysis.analyse_single_target(settings, data, t)
assert len(results.get_single_target(t, fdr=False).selected_vars_full) == 1, (
'Conditional contains more/less than 1 variables.')
assert not results.get_single_target(t, fdr=False).selected_vars_sources, (
'Conditional sources is not empty.')
assert len(results.get_single_target(t, fdr=False).selected_vars_target) == 1, (
'Conditional target contains more/less than 1 variable.')
assert results.get_single_target(t, fdr=False).selected_sources_pval is None, (
'Conditional p-value is not None.')
assert results.get_single_target(t, fdr=False).omnibus_pval is None, (
'Omnibus p-value is not None.')
assert results.get_single_target(t, fdr=False).omnibus_sign is None, (
'Omnibus significance is not None.')
assert results.get_single_target(t, fdr=False).selected_sources_te is None, (
'Conditional TE values is not None.')
def test_multivariate_te_lorenz_2():
"""Test multivariate TE estimation on bivariately couled Lorenz systems.
Run the multivariate TE algorithm on two Lorenz systems with a coupling
from first to second system with delay u = 45 samples. Both directions are
analyzed, the algorithm should not find a coupling from system two to one.
Note:
This test takes several hours and may take one to two days on some
machines.
"""
d = np.load(os.path.join(os.path.dirname(__file__),
'data/lorenz_2_exampledata.npy'))
data = Data()
data.set_data(d, 'psr')
settings = {
'cmi_estimator': 'JidtDiscreteCMI',
'discretise_method': 'max_ent',
'max_lag_sources': 47,
'min_lag_sources': 42,
'max_lag_target': 20,
'tau_target': 2,
'n_perm_max_stat': 21, # 200
'n_perm_min_stat': 21, # 200
'n_perm_omnibus': 21,
'n_perm_max_seq': 21, # this should be equal to the min stats b/c we
# reuse the surrogate table from the min stats
}
lorenz_analysis = MultivariateTE()
# FOR DEBUGGING: add the whole history for k = 20, tau = 2 to the
# estimation, this makes things faster, b/c these don't have to be
# tested again. Note conditionals are specified using lags.
settings['add_conditionals'] = [(1, 19), (1, 17), (1, 15), (1, 13),
(1, 11), (1, 9), (1, 7), (1, 5), (1, 3),
(1, 1)]
settings['max_lag_sources'] = 60
settings['min_lag_sources'] = 31
settings['tau_sources'] = 2
settings['max_lag_target'] = 1 # was 0 before, but this is no longer allowed by the estimator
settings['tau_target'] = 1
# Just analyse the coupled direction
results = lorenz_analysis.analyse_single_target(settings, data, 1)
print(results._single_target)
adj_matrix = results.get_adjacency_matrix(weights='binary', fdr=False)
adj_matrix.print_matrix()
def test_multivariate_te_mute():
"""Test multivariate TE estimation on the MUTE example network.
Test data comes from a network that is used as an example in the paper on
the MuTE toolbox (Montalto, PLOS ONE, 2014, eq. 14). The network has the
following (non-linear) couplings:
0 -> 1, u = 2
0 -> 2, u = 3
0 -> 3, u = 2 (non-linear)
3 -> 4, u = 1
4 -> 3, u = 1
The maximum order of any single AR process is never higher than 2.
"""
data = Data()
data.generate_mute_data(n_samples=1000, n_replications=10)
settings = {
'cmi_estimator': 'JidtDiscreteCMI',
'discretise_method': 'max_ent',
'max_lag_sources': 3,
'min_lag_sources': 1,
'max_lag_target': 3,
'n_perm_max_stat': 21,
'n_perm_min_stat': 21,
'n_perm_omnibus': 21,
'n_perm_max_seq': 21, # this should be equal to the min stats b/c we
# reuse the surrogate table from the min stats
}
network_analysis = MultivariateTE()
results_me = network_analysis.analyse_network(settings, data,
targets=[1, 2])
settings['discretise_method'] = 'equal'
results_eq = network_analysis.analyse_network(settings, data,
targets=[1, 2])
for t in [1, 2]:
print('Target {0}: equal binning: {1}, max. ent. binning: {2}'.format(
t,
results_eq.get_single_target(t, fdr=False).omnibus_te,
results_me.get_single_target(t, fdr=False).omnibus_te
))
# Skip comparison of estimates if analyses returned different source
# sets. This will always lead to different estimates.
if (results_eq.get_single_target(t, fdr=False).selected_vars_sources ==
results_me.get_single_target(t, fdr=False).selected_vars_sources):
assert (np.isclose(
results_eq.get_single_target(1, fdr=False).omnibus_te,
results_me.get_single_target(1, fdr=False).omnibus_te,
rtol=0.05)), ('Target {0}: unequl results for both binning '
'methods.'.format(t))
else:
continue
if __name__ == '__main__':
test_multivariate_te_lorenz_2()
test_multivariate_te_mute()
test_multivariate_te_random()
test_multivariate_te_lagged_copies()
test_multivariate_te_corr_gaussian()
test_multivariate_te_corr_gaussian('OpenCLKraskovCMI')
|
pwollstadt/trentoolxl
|
test/systemtest_multivariate_te_discrete.py
|
Python
|
gpl-3.0
| 13,241
|
[
"Gaussian"
] |
f68503f15accb1b55948c9f1d45af06e5700b3d04ae9a0332af85840af5012e4
|
#!/usr/bin/env python
'''
setup board.h for chibios
'''
import argparse
import sys
import fnmatch
import os
import dma_resolver
import shlex
import pickle
import re
import shutil
parser = argparse.ArgumentParser("chibios_pins.py")
parser.add_argument(
'-D', '--outdir', type=str, default=None, help='Output directory')
parser.add_argument(
'--bootloader', action='store_true', default=False, help='configure for bootloader')
parser.add_argument(
'hwdef', type=str, default=None, help='hardware definition file')
parser.add_argument(
'--params', type=str, default=None, help='user default params path')
args = parser.parse_args()
# output variables for each pin
f4f7_vtypes = ['MODER', 'OTYPER', 'OSPEEDR', 'PUPDR', 'ODR', 'AFRL', 'AFRH']
f1_vtypes = ['CRL', 'CRH', 'ODR']
f1_input_sigs = ['RX', 'MISO', 'CTS']
f1_output_sigs = ['TX', 'MOSI', 'SCK', 'RTS', 'CH1', 'CH2', 'CH3', 'CH4']
af_labels = ['USART', 'UART', 'SPI', 'I2C', 'SDIO', 'SDMMC', 'OTG', 'JT', 'TIM', 'CAN']
vtypes = []
# number of pins in each port
pincount = {
'A': 16,
'B': 16,
'C': 16,
'D': 16,
'E': 16,
'F': 16,
'G': 16,
'H': 2,
'I': 0,
'J': 0,
'K': 0
}
ports = pincount.keys()
portmap = {}
# dictionary of all config lines, indexed by first word
config = {}
# alternate pin mappings
altmap = {}
# list of all pins in config file order
allpins = []
# list of configs by type
bytype = {}
# list of alt configs by type
alttype = {}
# list of configs by label
bylabel = {}
# list of alt configs by label
altlabel = {}
# list of SPI devices
spidev = []
# dictionary of ROMFS files
romfs = {}
# SPI bus list
spi_list = []
# all config lines in order
alllines = []
# allow for extra env vars
env_vars = {}
# build flags for ChibiOS makefiles
build_flags = []
# sensor lists
imu_list = []
compass_list = []
baro_list = []
all_lines = []
mcu_type = None
dual_USB_enabled = False
def is_int(str):
'''check if a string is an integer'''
try:
int(str)
except Exception:
return False
return True
def error(str):
'''show an error and exit'''
print("Error: " + str)
sys.exit(1)
def get_mcu_lib(mcu):
'''get library file for the chosen MCU'''
import importlib
try:
return importlib.import_module(mcu)
except ImportError:
error("Unable to find module for MCU %s" % mcu)
def setup_mcu_type_defaults():
'''setup defaults for given mcu type'''
global pincount, ports, portmap, vtypes, mcu_type
lib = get_mcu_lib(mcu_type)
if hasattr(lib, 'pincount'):
pincount = lib.pincount
if mcu_series.startswith("STM32F1"):
vtypes = f1_vtypes
else:
vtypes = f4f7_vtypes
ports = pincount.keys()
# setup default as input pins
for port in ports:
portmap[port] = []
for pin in range(pincount[port]):
portmap[port].append(generic_pin(port, pin, None, 'INPUT', []))
def get_alt_function(mcu, pin, function):
'''return alternative function number for a pin'''
lib = get_mcu_lib(mcu)
if function.endswith('_TXINV') or function.endswith('_RXINV'):
# RXINV and TXINV are special labels for inversion pins, not alt-functions
return None
if hasattr(lib, "AltFunction_map"):
alt_map = lib.AltFunction_map
else:
# just check if Alt Func is available or not
for l in af_labels:
if function.startswith(l):
return 0
return None
if function and function.endswith("_RTS") and (
function.startswith('USART') or function.startswith('UART')):
# we do software RTS
return None
for l in af_labels:
if function.startswith(l):
s = pin + ":" + function
if s not in alt_map:
error("Unknown pin function %s for MCU %s" % (s, mcu))
return alt_map[s]
return None
def have_type_prefix(ptype):
'''return True if we have a peripheral starting with the given peripheral type'''
for t in list(bytype.keys()) + list(alttype.keys()):
if t.startswith(ptype):
return True
return False
def get_ADC1_chan(mcu, pin):
'''return ADC1 channel for an analog pin'''
import importlib
try:
lib = importlib.import_module(mcu)
ADC1_map = lib.ADC1_map
except ImportError:
error("Unable to find ADC1_Map for MCU %s" % mcu)
if pin not in ADC1_map:
error("Unable to find ADC1 channel for pin %s" % pin)
return ADC1_map[pin]
class generic_pin(object):
'''class to hold pin definition'''
def __init__(self, port, pin, label, type, extra):
global mcu_series
self.portpin = "P%s%u" % (port, pin)
self.port = port
self.pin = pin
self.label = label
self.type = type
self.extra = extra
self.af = None
if type == 'OUTPUT':
self.sig_dir = 'OUTPUT'
else:
self.sig_dir = 'INPUT'
if mcu_series.startswith("STM32F1") and self.label is not None:
self.f1_pin_setup()
# check that labels and pin types are consistent
for prefix in ['USART', 'UART', 'TIM']:
if label is None or type is None:
continue
if type.startswith(prefix):
a1 = label.split('_')
a2 = type.split('_')
if a1[0] != a2[0]:
error("Peripheral prefix mismatch for %s %s %s" % (self.portpin, label, type))
def f1_pin_setup(self):
for label in af_labels:
if self.label.startswith(label):
if self.label.endswith(tuple(f1_input_sigs)):
self.sig_dir = 'INPUT'
self.extra.append('FLOATING')
elif self.label.endswith(tuple(f1_output_sigs)):
self.sig_dir = 'OUTPUT'
elif label == 'I2C':
self.sig_dir = 'OUTPUT'
elif label == 'OTG':
self.sig_dir = 'OUTPUT'
else:
error("Unknown signal type %s:%s for %s!" % (self.portpin, self.label, mcu_type))
def has_extra(self, v):
'''return true if we have the given extra token'''
return v in self.extra
def extra_prefix(self, prefix):
'''find an extra token starting with the given prefix'''
for e in self.extra:
if e.startswith(prefix):
return e
return None
def extra_value(self, name, type=None, default=None):
'''find an extra value of given type'''
v = self.extra_prefix(name)
if v is None:
return default
if v[len(name)] != '(' or v[-1] != ')':
error("Badly formed value for %s: %s\n" % (name, v))
ret = v[len(name) + 1:-1]
if type is not None:
try:
ret = type(ret)
except Exception:
error("Badly formed value for %s: %s\n" % (name, ret))
return ret
def is_RTS(self):
'''return true if this is a RTS pin'''
if self.label and self.label.endswith("_RTS") and (
self.type.startswith('USART') or self.type.startswith('UART')):
return True
return False
def is_CS(self):
'''return true if this is a CS pin'''
return self.has_extra("CS") or self.type == "CS"
def get_MODER_value(self):
'''return one of ALTERNATE, OUTPUT, ANALOG, INPUT'''
if self.af is not None:
v = "ALTERNATE"
elif self.type == 'OUTPUT':
v = "OUTPUT"
elif self.type.startswith('ADC'):
v = "ANALOG"
elif self.is_CS():
v = "OUTPUT"
elif self.is_RTS():
v = "OUTPUT"
else:
v = "INPUT"
return v
def get_MODER(self):
'''return one of ALTERNATE, OUTPUT, ANALOG, INPUT'''
return "PIN_MODE_%s(%uU)" % (self.get_MODER_value(), self.pin)
def get_OTYPER_value(self):
'''return one of PUSHPULL, OPENDRAIN'''
v = 'PUSHPULL'
if self.type.startswith('I2C'):
# default I2C to OPENDRAIN
v = 'OPENDRAIN'
values = ['PUSHPULL', 'OPENDRAIN']
for e in self.extra:
if e in values:
v = e
return v
def get_OTYPER(self):
'''return one of PUSHPULL, OPENDRAIN'''
return "PIN_OTYPE_%s(%uU)" % (self.get_OTYPER_value(), self.pin)
def get_OSPEEDR_value(self):
'''return one of SPEED_VERYLOW, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH'''
# on STM32F4 these speeds correspond to 2MHz, 25MHz, 50MHz and 100MHz
values = ['SPEED_VERYLOW', 'SPEED_LOW', 'SPEED_MEDIUM', 'SPEED_HIGH']
v = 'SPEED_MEDIUM'
for e in self.extra:
if e in values:
v = e
return v
def get_OSPEEDR_int(self):
'''return value from 0 to 3 for speed'''
values = ['SPEED_VERYLOW', 'SPEED_LOW', 'SPEED_MEDIUM', 'SPEED_HIGH']
v = self.get_OSPEEDR_value()
if v not in values:
error("Bad OSPEED %s" % v)
return values.index(v)
def get_OSPEEDR(self):
'''return one of SPEED_VERYLOW, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH'''
return "PIN_O%s(%uU)" % (self.get_OSPEEDR_value(), self.pin)
def get_PUPDR_value(self):
'''return one of FLOATING, PULLUP, PULLDOWN'''
values = ['FLOATING', 'PULLUP', 'PULLDOWN']
v = 'FLOATING'
if self.is_CS():
v = "PULLUP"
# generate pullups for UARTs
if (self.type.startswith('USART') or
self.type.startswith('UART')) and (
(self.label.endswith('_TX') or
self.label.endswith('_RX') or
self.label.endswith('_CTS') or
self.label.endswith('_RTS'))):
v = "PULLUP"
# generate pullups for SDIO and SDMMC
if (self.type.startswith('SDIO') or
self.type.startswith('SDMMC')) and (
(self.label.endswith('_D0') or
self.label.endswith('_D1') or
self.label.endswith('_D2') or
self.label.endswith('_D3') or
self.label.endswith('_CMD'))):
v = "PULLUP"
for e in self.extra:
if e in values:
v = e
return v
def get_PUPDR(self):
'''return one of FLOATING, PULLUP, PULLDOWN wrapped in PIN_PUPDR_ macro'''
return "PIN_PUPDR_%s(%uU)" % (self.get_PUPDR_value(), self.pin)
def get_ODR_F1_value(self):
'''return one of LOW, HIGH'''
values = ['LOW', 'HIGH']
v = 'HIGH'
if self.type == 'OUTPUT':
v = 'LOW'
elif self.label is not None and self.label.startswith('I2C'):
v = 'LOW'
for e in self.extra:
if e in values:
v = e
# for some controllers input pull up down is selected by ODR
if self.type == "INPUT":
v = 'LOW'
if 'PULLUP' in self.extra:
v = "HIGH"
return v
def get_ODR_value(self):
'''return one of LOW, HIGH'''
if mcu_series.startswith("STM32F1"):
return self.get_ODR_F1_value()
values = ['LOW', 'HIGH']
v = 'HIGH'
for e in self.extra:
if e in values:
v = e
return v
def get_ODR(self):
'''return one of LOW, HIGH wrapped in PIN_ODR macro'''
return "PIN_ODR_%s(%uU)" % (self.get_ODR_value(), self.pin)
def get_AFIO_value(self):
'''return AFIO'''
af = self.af
if af is None:
af = 0
return af
def get_AFIO(self):
'''return AFIO wrapped in PIN_AFIO_AF macro'''
return "PIN_AFIO_AF(%uU, %uU)" % (self.pin, self.get_AFIO_value())
def get_AFRL(self):
'''return AFIO low 8'''
if self.pin >= 8:
return None
return self.get_AFIO()
def get_AFRH(self):
'''return AFIO high 8'''
if self.pin < 8:
return None
return self.get_AFIO()
def get_CR_F1(self):
'''return CR FLAGS for STM32F1xx'''
# Check Speed
if self.sig_dir != "INPUT" or self.af is not None:
speed_values = ['SPEED_LOW', 'SPEED_MEDIUM', 'SPEED_HIGH']
v = 'SPEED_MEDIUM'
for e in self.extra:
if e in speed_values:
v = e
speed_str = "PIN_%s(%uU) |" % (v, self.pin)
elif self.is_CS():
speed_str = "PIN_SPEED_LOW(%uU) |" % (self.pin)
else:
speed_str = ""
if self.af is not None:
if self.label.endswith('_RX'):
# uart RX is configured as a input, and can be pullup, pulldown or float
if 'PULLUP' in self.extra or 'PULLDOWN' in self.extra:
v = 'PUD'
else:
v = "NOPULL"
elif self.label.startswith('I2C'):
v = "AF_OD"
else:
v = "AF_PP"
elif self.is_CS():
v = "OUTPUT_PP"
elif self.sig_dir == 'OUTPUT':
if 'OPENDRAIN' in self.extra:
v = 'OUTPUT_OD'
else:
v = "OUTPUT_PP"
elif self.type.startswith('ADC'):
v = "ANALOG"
else:
v = "PUD"
if 'FLOATING' in self.extra:
v = "NOPULL"
mode_str = "PIN_MODE_%s(%uU)" % (v, self.pin)
return "%s %s" % (speed_str, mode_str)
def get_CR(self):
'''return CR FLAGS'''
if mcu_series.startswith("STM32F1"):
return self.get_CR_F1()
if self.sig_dir != "INPUT":
speed_values = ['SPEED_LOW', 'SPEED_MEDIUM', 'SPEED_HIGH']
v = 'SPEED_MEDIUM'
for e in self.extra:
if e in speed_values:
v = e
speed_str = "PIN_%s(%uU) |" % (v, self.pin)
else:
speed_str = ""
# Check Alternate function
if self.type.startswith('I2C'):
v = "AF_OD"
elif self.sig_dir == 'OUTPUT':
if self.af is not None:
v = "AF_PP"
else:
v = "OUTPUT_PP"
elif self.type.startswith('ADC'):
v = "ANALOG"
elif self.is_CS():
v = "OUTPUT_PP"
elif self.is_RTS():
v = "OUTPUT_PP"
else:
v = "PUD"
if 'FLOATING' in self.extra:
v = "NOPULL"
mode_str = "PIN_MODE_%s(%uU)" % (v, self.pin)
return "%s %s" % (speed_str, mode_str)
def get_CRH(self):
if self.pin < 8:
return None
return self.get_CR()
def get_CRL(self):
if self.pin >= 8:
return None
return self.get_CR()
def pal_modeline(self):
'''return a mode line suitable for palSetModeLine()'''
# MODER, OTYPER, OSPEEDR, PUPDR, ODR, AFRL, AFRH
ret = 'PAL_STM32_MODE_' + self.get_MODER_value()
ret += '|PAL_STM32_OTYPE_' + self.get_OTYPER_value()
ret += '|PAL_STM32_SPEED(%u)' % self.get_OSPEEDR_int()
ret += '|PAL_STM32_PUPDR_' + self.get_PUPDR_value()
af = self.get_AFIO_value()
if af != 0:
ret += '|PAL_STM32_ALTERNATE(%u)' % af
return ret
def __str__(self):
str = ''
if self.af is not None:
str += " AF%u" % self.af
if self.type.startswith('ADC1'):
str += " ADC1_IN%u" % get_ADC1_chan(mcu_type, self.portpin)
if self.extra_value('PWM', type=int):
str += " PWM%u" % self.extra_value('PWM', type=int)
return "P%s%u %s %s%s" % (self.port, self.pin, self.label, self.type,
str)
def get_config(name, column=0, required=True, default=None, type=None, spaces=False):
'''get a value from config dictionary'''
if name not in config:
if required and default is None:
error("missing required value %s in hwdef.dat" % name)
return default
if len(config[name]) < column + 1:
if not required:
return None
error("missing required value %s in hwdef.dat (column %u)" % (name,
column))
if spaces:
ret = ' '.join(config[name][column:])
else:
ret = config[name][column]
if type is not None:
if type == int and ret.startswith('0x'):
try:
ret = int(ret, 16)
except Exception:
error("Badly formed config value %s (got %s)" % (name, ret))
else:
try:
ret = type(ret)
except Exception:
error("Badly formed config value %s (got %s)" % (name, ret))
return ret
def get_mcu_config(name, required=False):
'''get a value from the mcu dictionary'''
lib = get_mcu_lib(mcu_type)
if not hasattr(lib, 'mcu'):
error("Missing mcu config for %s" % mcu_type)
if name not in lib.mcu:
if required:
error("Missing required mcu config %s for %s" % (name, mcu_type))
return None
return lib.mcu[name]
def make_line(label):
'''return a line for a label'''
if label in bylabel:
p = bylabel[label]
line = 'PAL_LINE(GPIO%s,%uU)' % (p.port, p.pin)
else:
line = "0"
return line
def enable_can(f):
'''setup for a CAN enabled board'''
f.write('#define HAL_WITH_UAVCAN 1\n')
env_vars['HAL_WITH_UAVCAN'] = '1'
def has_sdcard_spi():
'''check for sdcard connected to spi bus'''
for dev in spidev:
if(dev[0] == 'sdcard'):
return True
return False
def write_mcu_config(f):
'''write MCU config defines'''
f.write('// MCU type (ChibiOS define)\n')
f.write('#define %s_MCUCONF\n' % get_config('MCU'))
mcu_subtype = get_config('MCU', 1)
if mcu_subtype.endswith('xx'):
f.write('#define %s_MCUCONF\n\n' % mcu_subtype[:-2])
f.write('#define %s\n\n' % mcu_subtype)
f.write('// crystal frequency\n')
f.write('#define STM32_HSECLK %sU\n\n' % get_config('OSCILLATOR_HZ'))
f.write('// UART used for stdout (printf)\n')
if get_config('STDOUT_SERIAL', required=False):
f.write('#define HAL_STDOUT_SERIAL %s\n\n' % get_config('STDOUT_SERIAL'))
f.write('// baudrate used for stdout (printf)\n')
f.write('#define HAL_STDOUT_BAUDRATE %u\n\n' % get_config('STDOUT_BAUDRATE', type=int))
if have_type_prefix('SDIO'):
f.write('// SDIO available, enable POSIX filesystem support\n')
f.write('#define USE_POSIX\n\n')
f.write('#define HAL_USE_SDC TRUE\n')
build_flags.append('USE_FATFS=yes')
elif have_type_prefix('SDMMC'):
f.write('// SDMMC available, enable POSIX filesystem support\n')
f.write('#define USE_POSIX\n\n')
f.write('#define HAL_USE_SDC TRUE\n')
f.write('#define STM32_SDC_USE_SDMMC1 TRUE\n')
build_flags.append('USE_FATFS=yes')
elif has_sdcard_spi():
f.write('// MMC via SPI available, enable POSIX filesystem support\n')
f.write('#define USE_POSIX\n\n')
f.write('#define HAL_USE_MMC_SPI TRUE\n')
f.write('#define HAL_USE_SDC FALSE\n')
f.write('#define HAL_SDCARD_SPI_HOOK TRUE\n')
build_flags.append('USE_FATFS=yes')
else:
f.write('#define HAL_USE_SDC FALSE\n')
build_flags.append('USE_FATFS=no')
env_vars['DISABLE_SCRIPTING'] = True
if 'OTG1' in bytype:
f.write('#define STM32_USB_USE_OTG1 TRUE\n')
f.write('#define HAL_USE_USB TRUE\n')
f.write('#define HAL_USE_SERIAL_USB TRUE\n')
if 'OTG2' in bytype:
f.write('#define STM32_USB_USE_OTG2 TRUE\n')
if have_type_prefix('CAN') and 'AP_PERIPH' not in env_vars:
enable_can(f)
if get_config('PROCESS_STACK', required=False):
env_vars['PROCESS_STACK'] = get_config('PROCESS_STACK')
else:
env_vars['PROCESS_STACK'] = "0x2000"
if get_config('MAIN_STACK', required=False):
env_vars['MAIN_STACK'] = get_config('MAIN_STACK')
else:
env_vars['MAIN_STACK'] = "0x400"
if get_config('IOMCU_FW', required=False):
env_vars['IOMCU_FW'] = get_config('IOMCU_FW')
else:
env_vars['IOMCU_FW'] = 0
if get_config('PERIPH_FW', required=False):
env_vars['PERIPH_FW'] = get_config('PERIPH_FW')
else:
env_vars['PERIPH_FW'] = 0
# write any custom STM32 defines
for d in alllines:
if d.startswith('STM32_'):
f.write('#define %s\n' % d)
if d.startswith('define '):
f.write('#define %s\n' % d[7:])
flash_size = get_config('FLASH_SIZE_KB', type=int)
f.write('#define BOARD_FLASH_SIZE %u\n' % flash_size)
env_vars['BOARD_FLASH_SIZE'] = flash_size
f.write('#define CRT1_AREAS_NUMBER 1\n')
flash_reserve_start = get_config(
'FLASH_RESERVE_START_KB', default=16, type=int)
f.write('\n// location of loaded firmware\n')
f.write('#define FLASH_LOAD_ADDRESS 0x%08x\n' % (0x08000000 + flash_reserve_start*1024))
if args.bootloader:
f.write('#define FLASH_BOOTLOADER_LOAD_KB %u\n' % get_config('FLASH_BOOTLOADER_LOAD_KB', type=int))
f.write('#define FLASH_RESERVE_END_KB %u\n' % get_config('FLASH_RESERVE_END_KB', default=0, type=int))
f.write('\n')
ram_map = get_mcu_config('RAM_MAP', True)
f.write('// memory regions\n')
regions = []
total_memory = 0
for (address, size, flags) in ram_map:
regions.append('{(void*)0x%08x, 0x%08x, 0x%02x }' % (address, size*1024, flags))
total_memory += size
f.write('#define HAL_MEMORY_REGIONS %s\n' % ', '.join(regions))
f.write('#define HAL_MEMORY_TOTAL_KB %u\n' % total_memory)
f.write('#define HAL_RAM0_START 0x%08x\n' % ram_map[0][0])
ram_reserve_start = get_config('RAM_RESERVE_START', default=0, type=int)
if ram_reserve_start > 0:
f.write('#define HAL_RAM_RESERVE_START 0x%08x\n' % ram_reserve_start)
f.write('\n// CPU serial number (12 bytes)\n')
f.write('#define UDID_START 0x%08x\n\n' % get_mcu_config('UDID_START', True))
f.write('\n// APJ board ID (for bootloaders)\n')
f.write('#define APJ_BOARD_ID %s\n' % get_config('APJ_BOARD_ID'))
lib = get_mcu_lib(mcu_type)
build_info = lib.build
if get_mcu_config('CPU_FLAGS') and get_mcu_config('CORTEX'):
# CPU flags specified in mcu file
cortex = get_mcu_config('CORTEX')
env_vars['CPU_FLAGS'] = get_mcu_config('CPU_FLAGS').split()
build_info['MCU'] = cortex
print("MCU Flags: %s %s" % (cortex, env_vars['CPU_FLAGS']))
elif mcu_series.startswith("STM32F1"):
cortex = "cortex-m3"
env_vars['CPU_FLAGS'] = ["-mcpu=%s" % cortex]
build_info['MCU'] = cortex
else:
cortex = "cortex-m4"
env_vars['CPU_FLAGS'] = ["-mcpu=%s" % cortex, "-mfpu=fpv4-sp-d16", "-mfloat-abi=hard"]
build_info['MCU'] = cortex
env_vars['CORTEX'] = cortex
if not args.bootloader:
if cortex == 'cortex-m4':
env_vars['CPU_FLAGS'].append('-DARM_MATH_CM4')
elif cortex == 'cortex-m7':
env_vars['CPU_FLAGS'].append('-DARM_MATH_CM7')
if not mcu_series.startswith("STM32F1") and not args.bootloader:
env_vars['CPU_FLAGS'].append('-u_printf_float')
build_info['ENV_UDEFS'] = "-DCHPRINTF_USE_FLOAT=1"
# setup build variables
for v in build_info.keys():
build_flags.append('%s=%s' % (v, build_info[v]))
# setup for bootloader build
if args.bootloader:
f.write('''
#define HAL_BOOTLOADER_BUILD TRUE
#define HAL_USE_ADC FALSE
#define HAL_USE_EXT FALSE
#define HAL_NO_UARTDRIVER
#define HAL_NO_PRINTF
#define HAL_NO_CCM
#define CH_DBG_STATISTICS FALSE
#define CH_CFG_USE_TM FALSE
#define CH_CFG_USE_REGISTRY FALSE
#define CH_CFG_USE_WAITEXIT FALSE
#define CH_CFG_USE_DYNAMIC FALSE
#define CH_CFG_USE_MEMPOOLS FALSE
#define CH_CFG_USE_OBJ_FIFOS FALSE
#define CH_DBG_FILL_THREADS FALSE
#define CH_CFG_USE_SEMAPHORES FALSE
#define CH_CFG_USE_HEAP FALSE
#define CH_CFG_USE_MUTEXES FALSE
#define CH_CFG_USE_CONDVARS FALSE
#define CH_CFG_USE_CONDVARS_TIMEOUT FALSE
#define CH_CFG_USE_EVENTS FALSE
#define CH_CFG_USE_EVENTS_TIMEOUT FALSE
#define CH_CFG_USE_MESSAGES FALSE
#define CH_CFG_USE_MAILBOXES FALSE
#define CH_CFG_USE_FACTORY FALSE
#define CH_CFG_USE_MEMCORE FALSE
#define HAL_USE_I2C FALSE
#define HAL_USE_PWM FALSE
''')
if env_vars.get('ROMFS_UNCOMPRESSED', False):
f.write('#define HAL_ROMFS_UNCOMPRESSED\n')
def write_ldscript(fname):
'''write ldscript.ld for this board'''
flash_size = get_config('FLASH_USE_MAX_KB', type=int, default=0)
if flash_size == 0:
flash_size = get_config('FLASH_SIZE_KB', type=int)
# space to reserve for bootloader and storage at start of flash
flash_reserve_start = get_config(
'FLASH_RESERVE_START_KB', default=16, type=int)
env_vars['FLASH_RESERVE_START_KB'] = str(flash_reserve_start)
# space to reserve for storage at end of flash
flash_reserve_end = get_config('FLASH_RESERVE_END_KB', default=0, type=int)
# ram layout
ram_map = get_mcu_config('RAM_MAP', True)
flash_base = 0x08000000 + flash_reserve_start * 1024
if not args.bootloader:
flash_length = flash_size - (flash_reserve_start + flash_reserve_end)
else:
flash_length = get_config('FLASH_BOOTLOADER_LOAD_KB', type=int)
print("Generating ldscript.ld")
f = open(fname, 'w')
ram0_start = ram_map[0][0]
ram0_len = ram_map[0][1] * 1024
# possibly reserve some memory for app/bootloader comms
ram_reserve_start = get_config('RAM_RESERVE_START', default=0, type=int)
ram0_start += ram_reserve_start
ram0_len -= ram_reserve_start
f.write('''/* generated ldscript.ld */
MEMORY
{
flash : org = 0x%08x, len = %uK
ram0 : org = 0x%08x, len = %u
}
INCLUDE common.ld
''' % (flash_base, flash_length, ram0_start, ram0_len))
def copy_common_linkerscript(outdir, hwdef):
dirpath = os.path.dirname(hwdef)
shutil.copy(os.path.join(dirpath, "../common/common.ld"),
os.path.join(outdir, "common.ld"))
def get_USB_IDs():
'''return tuple of USB VID/PID'''
global dual_USB_enabled
if dual_USB_enabled:
# use pidcodes allocated ID
default_vid = 0x1209
default_pid = 0x5740
else:
default_vid = 0x1209
default_pid = 0x5741
return (get_config('USB_VENDOR', type=int, default=default_vid), get_config('USB_PRODUCT', type=int, default=default_pid))
def write_USB_config(f):
'''write USB config defines'''
if not have_type_prefix('OTG'):
return
f.write('// USB configuration\n')
(USB_VID, USB_PID) = get_USB_IDs()
f.write('#define HAL_USB_VENDOR_ID 0x%04x\n' % int(USB_VID))
f.write('#define HAL_USB_PRODUCT_ID 0x%04x\n' % int(USB_PID))
f.write('#define HAL_USB_STRING_MANUFACTURER %s\n' % get_config("USB_STRING_MANUFACTURER", default="\"ArduPilot\""))
default_product = "%BOARD%"
if args.bootloader:
default_product += "-BL"
f.write('#define HAL_USB_STRING_PRODUCT %s\n' % get_config("USB_STRING_PRODUCT", default="\"%s\""%default_product))
f.write('#define HAL_USB_STRING_SERIAL %s\n' % get_config("USB_STRING_SERIAL", default="\"%SERIAL%\""))
f.write('\n\n')
def write_SPI_table(f):
'''write SPI device table'''
f.write('\n// SPI device table\n')
devlist = []
for dev in spidev:
if len(dev) != 7:
print("Badly formed SPIDEV line %s" % dev)
name = '"' + dev[0] + '"'
bus = dev[1]
devid = dev[2]
cs = dev[3]
mode = dev[4]
lowspeed = dev[5]
highspeed = dev[6]
if not bus.startswith('SPI') or bus not in spi_list:
error("Bad SPI bus in SPIDEV line %s" % dev)
if not devid.startswith('DEVID') or not is_int(devid[5:]):
error("Bad DEVID in SPIDEV line %s" % dev)
if cs not in bylabel or not bylabel[cs].is_CS():
error("Bad CS pin in SPIDEV line %s" % dev)
if mode not in ['MODE0', 'MODE1', 'MODE2', 'MODE3']:
error("Bad MODE in SPIDEV line %s" % dev)
if not lowspeed.endswith('*MHZ') and not lowspeed.endswith('*KHZ'):
error("Bad lowspeed value %s in SPIDEV line %s" % (lowspeed, dev))
if not highspeed.endswith('*MHZ') and not highspeed.endswith('*KHZ'):
error("Bad highspeed value %s in SPIDEV line %s" % (highspeed,
dev))
cs_pin = bylabel[cs]
pal_line = 'PAL_LINE(GPIO%s,%uU)' % (cs_pin.port, cs_pin.pin)
devidx = len(devlist)
f.write(
'#define HAL_SPI_DEVICE%-2u SPIDesc(%-17s, %2u, %2u, %-19s, SPIDEV_%s, %7s, %7s)\n'
% (devidx, name, spi_list.index(bus), int(devid[5:]), pal_line,
mode, lowspeed, highspeed))
devlist.append('HAL_SPI_DEVICE%u' % devidx)
f.write('#define HAL_SPI_DEVICE_LIST %s\n\n' % ','.join(devlist))
def write_SPI_config(f):
'''write SPI config defines'''
global spi_list
for t in list(bytype.keys()) + list(alttype.keys()):
if t.startswith('SPI'):
spi_list.append(t)
spi_list = sorted(spi_list)
if len(spi_list) == 0:
f.write('#define HAL_USE_SPI FALSE\n')
return
devlist = []
for dev in spi_list:
n = int(dev[3:])
devlist.append('HAL_SPI%u_CONFIG' % n)
f.write(
'#define HAL_SPI%u_CONFIG { &SPID%u, %u, STM32_SPI_SPI%u_DMA_STREAMS }\n'
% (n, n, n, n))
f.write('#define HAL_SPI_BUS_LIST %s\n\n' % ','.join(devlist))
write_SPI_table(f)
def parse_spi_device(dev):
'''parse a SPI:xxx device item'''
a = dev.split(':')
if len(a) != 2:
error("Bad SPI device: %s" % dev)
return 'hal.spi->get_device("%s")' % a[1]
def parse_i2c_device(dev):
'''parse a I2C:xxx:xxx device item'''
a = dev.split(':')
if len(a) != 3:
error("Bad I2C device: %s" % dev)
busaddr = int(a[2], base=0)
if a[1] == 'ALL_EXTERNAL':
return ('FOREACH_I2C_EXTERNAL(b)', 'GET_I2C_DEVICE(b,0x%02x)' % (busaddr))
elif a[1] == 'ALL_INTERNAL':
return ('FOREACH_I2C_INTERNAL(b)', 'GET_I2C_DEVICE(b,0x%02x)' % (busaddr))
elif a[1] == 'ALL':
return ('FOREACH_I2C(b)', 'GET_I2C_DEVICE(b,0x%02x)' % (busaddr))
busnum = int(a[1])
return ('', 'GET_I2C_DEVICE(%u,0x%02x)' % (busnum, busaddr))
def seen_str(dev):
'''return string representation of device for checking for duplicates'''
return str(dev[:2])
def write_IMU_config(f):
'''write IMU config defines'''
global imu_list
devlist = []
wrapper = ''
seen = set()
for dev in imu_list:
if seen_str(dev) in seen:
error("Duplicate IMU: %s" % seen_str(dev))
seen.add(seen_str(dev))
driver = dev[0]
for i in range(1, len(dev)):
if dev[i].startswith("SPI:"):
dev[i] = parse_spi_device(dev[i])
elif dev[i].startswith("I2C:"):
(wrapper, dev[i]) = parse_i2c_device(dev[i])
n = len(devlist)+1
devlist.append('HAL_INS_PROBE%u' % n)
f.write(
'#define HAL_INS_PROBE%u %s ADD_BACKEND(AP_InertialSensor_%s::probe(*this,%s))\n'
% (n, wrapper, driver, ','.join(dev[1:])))
if len(devlist) > 0:
f.write('#define HAL_INS_PROBE_LIST %s\n\n' % ';'.join(devlist))
def write_MAG_config(f):
'''write MAG config defines'''
global compass_list
devlist = []
seen = set()
for dev in compass_list:
if seen_str(dev) in seen:
error("Duplicate MAG: %s" % seen_str(dev))
seen.add(seen_str(dev))
driver = dev[0]
probe = 'probe'
wrapper = ''
a = driver.split(':')
driver = a[0]
if len(a) > 1 and a[1].startswith('probe'):
probe = a[1]
for i in range(1, len(dev)):
if dev[i].startswith("SPI:"):
dev[i] = parse_spi_device(dev[i])
elif dev[i].startswith("I2C:"):
(wrapper, dev[i]) = parse_i2c_device(dev[i])
n = len(devlist)+1
devlist.append('HAL_MAG_PROBE%u' % n)
f.write(
'#define HAL_MAG_PROBE%u %s ADD_BACKEND(DRIVER_%s, AP_Compass_%s::%s(%s))\n'
% (n, wrapper, driver, driver, probe, ','.join(dev[1:])))
if len(devlist) > 0:
f.write('#define HAL_MAG_PROBE_LIST %s\n\n' % ';'.join(devlist))
def write_BARO_config(f):
'''write barometer config defines'''
global baro_list
devlist = []
seen = set()
for dev in baro_list:
if seen_str(dev) in seen:
error("Duplicate BARO: %s" % seen_str(dev))
seen.add(seen_str(dev))
driver = dev[0]
probe = 'probe'
wrapper = ''
a = driver.split(':')
driver = a[0]
if len(a) > 1 and a[1].startswith('probe'):
probe = a[1]
for i in range(1, len(dev)):
if dev[i].startswith("SPI:"):
dev[i] = parse_spi_device(dev[i])
elif dev[i].startswith("I2C:"):
(wrapper, dev[i]) = parse_i2c_device(dev[i])
if dev[i].startswith('hal.i2c_mgr'):
dev[i] = 'std::move(%s)' % dev[i]
n = len(devlist)+1
devlist.append('HAL_BARO_PROBE%u' % n)
args = ['*this'] + dev[1:]
f.write(
'#define HAL_BARO_PROBE%u %s ADD_BACKEND(AP_Baro_%s::%s(%s))\n'
% (n, wrapper, driver, probe, ','.join(args)))
if len(devlist) > 0:
f.write('#define HAL_BARO_PROBE_LIST %s\n\n' % ';'.join(devlist))
def write_board_validate_macro(f):
'''write board validation macro'''
global config
validate_string = ''
validate_dict = {}
if 'BOARD_VALIDATE' in config:
for check in config['BOARD_VALIDATE']:
check_name = check
check_string = check
while True:
def substitute_alias(m):
return '(' + get_config(m.group(1), spaces=True) + ')'
output = re.sub(r'\$(\w+|\{([^}]*)\})', substitute_alias, check_string)
if (output == check_string):
break
check_string = output
validate_dict[check_name] = check_string
# Finally create check conditional
for check_name in validate_dict:
validate_string += "!" + validate_dict[check_name] + "?" + "\"" + check_name + "\"" + ":"
validate_string += "nullptr"
f.write('#define HAL_VALIDATE_BOARD (%s)\n\n' % validate_string)
def get_gpio_bylabel(label):
'''get GPIO(n) setting on a pin label, or -1'''
p = bylabel.get(label)
if p is None:
return -1
return p.extra_value('GPIO', type=int, default=-1)
def get_extra_bylabel(label, name, default=None):
'''get extra setting for a label by name'''
p = bylabel.get(label)
if p is None:
return default
return p.extra_value(name, type=str, default=default)
def write_UART_config(f):
'''write UART config defines'''
global dual_USB_enabled
if get_config('UART_ORDER', required=False) is None:
return
uart_list = config['UART_ORDER']
f.write('\n// UART configuration\n')
# write out driver declarations for HAL_ChibOS_Class.cpp
devnames = "ABCDEFGH"
sdev = 0
idx = 0
num_empty_uarts = 0
for dev in uart_list:
if dev == 'EMPTY':
f.write('#define HAL_UART%s_DRIVER Empty::UARTDriver uart%sDriver\n' %
(devnames[idx], devnames[idx]))
num_empty_uarts += 1
else:
f.write(
'#define HAL_UART%s_DRIVER ChibiOS::UARTDriver uart%sDriver(%u)\n'
% (devnames[idx], devnames[idx], sdev))
sdev += 1
idx += 1
for idx in range(len(uart_list), len(devnames)):
f.write('#define HAL_UART%s_DRIVER Empty::UARTDriver uart%sDriver\n' %
(devnames[idx], devnames[idx]))
if 'IOMCU_UART' in config:
f.write('#define HAL_WITH_IO_MCU 1\n')
idx = len(uart_list)
f.write('#define HAL_UART_IOMCU_IDX %u\n' % idx)
f.write(
'#define HAL_UART_IO_DRIVER ChibiOS::UARTDriver uart_io(HAL_UART_IOMCU_IDX)\n'
)
uart_list.append(config['IOMCU_UART'][0])
f.write('#define HAL_HAVE_SERVO_VOLTAGE 1\n') # make the assumption that IO gurantees servo monitoring
# all IOMCU capable boards have SBUS out
f.write('#define AP_FEATURE_SBUS_OUT 1\n')
else:
f.write('#define HAL_WITH_IO_MCU 0\n')
f.write('\n')
need_uart_driver = False
OTG2_index = None
devlist = []
have_rts_cts = False
for dev in uart_list:
if dev.startswith('UART'):
n = int(dev[4:])
elif dev.startswith('USART'):
n = int(dev[5:])
elif dev.startswith('OTG'):
n = int(dev[3:])
elif dev.startswith('EMPTY'):
continue
else:
error("Invalid element %s in UART_ORDER" % dev)
devlist.append('HAL_%s_CONFIG' % dev)
tx_line = make_line(dev + '_TX')
rx_line = make_line(dev + '_RX')
rts_line = make_line(dev + '_RTS')
if rts_line != "0":
have_rts_cts = True
if dev.startswith('OTG2'):
f.write(
'#define HAL_%s_CONFIG {(BaseSequentialStream*) &SDU2, true, false, 0, 0, false, 0, 0}\n'
% dev)
OTG2_index = uart_list.index(dev)
dual_USB_enabled = True
elif dev.startswith('OTG'):
f.write(
'#define HAL_%s_CONFIG {(BaseSequentialStream*) &SDU1, true, false, 0, 0, false, 0, 0}\n'
% dev)
else:
need_uart_driver = True
f.write(
"#define HAL_%s_CONFIG { (BaseSequentialStream*) &SD%u, false, "
% (dev, n))
if mcu_series.startswith("STM32F1"):
f.write("%s, %s, %s, " % (tx_line, rx_line, rts_line))
else:
f.write("STM32_%s_RX_DMA_CONFIG, STM32_%s_TX_DMA_CONFIG, %s, %s, %s, " %
(dev, dev, tx_line, rx_line, rts_line))
# add inversion pins, if any
f.write("%d, " % get_gpio_bylabel(dev + "_RXINV"))
f.write("%s, " % get_extra_bylabel(dev + "_RXINV", "POL", "0"))
f.write("%d, " % get_gpio_bylabel(dev + "_TXINV"))
f.write("%s}\n" % get_extra_bylabel(dev + "_TXINV", "POL", "0"))
if have_rts_cts:
f.write('#define AP_FEATURE_RTSCTS 1\n')
if OTG2_index is not None:
f.write('#define HAL_OTG2_UART_INDEX %d\n' % OTG2_index)
f.write('''
#if HAL_WITH_UAVCAN
#ifndef HAL_OTG2_PROTOCOL
#define HAL_OTG2_PROTOCOL SerialProtocol_SLCAN
#endif
#define HAL_SERIAL%d_PROTOCOL HAL_OTG2_PROTOCOL
#define HAL_SERIAL%d_BAUD 115200
#endif
''' % (OTG2_index, OTG2_index))
f.write('#define HAL_HAVE_DUAL_USB_CDC 1\n')
f.write('#define HAL_UART_DEVICE_LIST %s\n\n' % ','.join(devlist))
if not need_uart_driver and not args.bootloader:
f.write('''
#ifndef HAL_USE_SERIAL
#define HAL_USE_SERIAL HAL_USE_SERIAL_USB
#endif
''')
num_uarts = len(devlist)
if 'IOMCU_UART' in config:
num_uarts -= 1
if num_uarts > 8:
error("Exceeded max num UARTs of 8 (%u)" % num_uarts)
f.write('#define HAL_UART_NUM_SERIAL_PORTS %u\n' % (num_uarts+num_empty_uarts))
def write_UART_config_bootloader(f):
'''write UART config defines'''
if get_config('UART_ORDER', required=False) is None:
return
uart_list = config['UART_ORDER']
f.write('\n// UART configuration\n')
devlist = []
have_uart = False
OTG2_index = None
for u in uart_list:
if u.startswith('OTG2'):
devlist.append('(BaseChannel *)&SDU2')
OTG2_index = uart_list.index(u)
elif u.startswith('OTG'):
devlist.append('(BaseChannel *)&SDU1')
else:
unum = int(u[-1])
devlist.append('(BaseChannel *)&SD%u' % unum)
have_uart = True
f.write('#define BOOTLOADER_DEV_LIST %s\n' % ','.join(devlist))
if OTG2_index is not None:
f.write('#define HAL_OTG2_UART_INDEX %d\n' % OTG2_index)
if not have_uart:
f.write('''
#ifndef HAL_USE_SERIAL
#define HAL_USE_SERIAL FALSE
#endif
''')
def write_I2C_config(f):
'''write I2C config defines'''
if not have_type_prefix('I2C'):
print("No I2C peripherals")
f.write('''
#ifndef HAL_USE_I2C
#define HAL_USE_I2C FALSE
#endif
''')
return
if 'I2C_ORDER' not in config:
print("Missing I2C_ORDER config")
return
i2c_list = config['I2C_ORDER']
f.write('// I2C configuration\n')
if len(i2c_list) == 0:
error("I2C_ORDER invalid")
devlist = []
# write out config structures
for dev in i2c_list:
if not dev.startswith('I2C') or dev[3] not in "1234":
error("Bad I2C_ORDER element %s" % dev)
n = int(dev[3:])
devlist.append('HAL_I2C%u_CONFIG' % n)
sda_line = make_line('I2C%u_SDA' % n)
scl_line = make_line('I2C%u_SCL' % n)
f.write('''
#if defined(STM32_I2C_I2C%u_RX_DMA_STREAM) && defined(STM32_I2C_I2C%u_TX_DMA_STREAM)
#define HAL_I2C%u_CONFIG { &I2CD%u, STM32_I2C_I2C%u_RX_DMA_STREAM, STM32_I2C_I2C%u_TX_DMA_STREAM, %s, %s }
#else
#define HAL_I2C%u_CONFIG { &I2CD%u, SHARED_DMA_NONE, SHARED_DMA_NONE, %s, %s }
#endif
'''
% (n, n, n, n, n, n, scl_line, sda_line, n, n, scl_line, sda_line))
f.write('\n#define HAL_I2C_DEVICE_LIST %s\n\n' % ','.join(devlist))
def parse_timer(str):
'''parse timer channel string, i.e TIM8_CH2N'''
result = re.match(r'TIM([0-9]*)_CH([1234])(N?)', str)
if result:
tim = int(result.group(1))
chan = int(result.group(2))
compl = result.group(3) == 'N'
if tim < 1 or tim > 17:
error("Bad timer number %s in %s" % (tim, str))
return (tim, chan, compl)
else:
error("Bad timer definition %s" % str)
def write_PWM_config(f):
'''write PWM config defines'''
rc_in = None
rc_in_int = None
alarm = None
pwm_out = []
pwm_timers = []
for l in bylabel.keys():
p = bylabel[l]
if p.type.startswith('TIM'):
if p.has_extra('RCIN'):
rc_in = p
elif p.has_extra('RCININT'):
rc_in_int = p
elif p.has_extra('ALARM'):
alarm = p
else:
if p.extra_value('PWM', type=int) is not None:
pwm_out.append(p)
if p.type not in pwm_timers:
pwm_timers.append(p.type)
if not pwm_out and not alarm:
print("No PWM output defined")
f.write('''
#ifndef HAL_USE_PWM
#define HAL_USE_PWM FALSE
#endif
''')
if rc_in is not None:
(n, chan, compl) = parse_timer(rc_in.label)
if compl:
# it is an inverted channel
f.write('#define HAL_RCIN_IS_INVERTED\n')
if chan not in [1, 2]:
error(
"Bad channel number, only channel 1 and 2 supported for RCIN")
f.write('// RC input config\n')
f.write('#define HAL_USE_ICU TRUE\n')
f.write('#define STM32_ICU_USE_TIM%u TRUE\n' % n)
f.write('#define RCIN_ICU_TIMER ICUD%u\n' % n)
f.write('#define RCIN_ICU_CHANNEL ICU_CHANNEL_%u\n' % chan)
f.write('#define STM32_RCIN_DMA_STREAM STM32_TIM_TIM%u_CH%u_DMA_STREAM\n' % (n, chan))
f.write('#define STM32_RCIN_DMA_CHANNEL STM32_TIM_TIM%u_CH%u_DMA_CHAN\n' % (n, chan))
f.write('\n')
if rc_in_int is not None:
(n, chan, compl) = parse_timer(rc_in_int.label)
if compl:
error('Complementary channel is not supported for RCININT %s' % rc_in_int.label)
f.write('// RC input config\n')
f.write('#define HAL_USE_EICU TRUE\n')
f.write('#define STM32_EICU_USE_TIM%u TRUE\n' % n)
f.write('#define RCININT_EICU_TIMER EICUD%u\n' % n)
f.write('#define RCININT_EICU_CHANNEL EICU_CHANNEL_%u\n' % chan)
f.write('\n')
if alarm is not None:
(n, chan, compl) = parse_timer(alarm.label)
if compl:
error("Complementary channel is not supported for ALARM %s" % alarm.label)
f.write('\n')
f.write('// Alarm PWM output config\n')
f.write('#define STM32_PWM_USE_TIM%u TRUE\n' % n)
f.write('#define STM32_TIM%u_SUPPRESS_ISR\n' % n)
chan_mode = [
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED',
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED'
]
chan_mode[chan - 1] = 'PWM_OUTPUT_ACTIVE_HIGH'
pwm_clock = 1000000
period = 1000
f.write('''#define HAL_PWM_ALARM \\
{ /* pwmGroup */ \\
%u, /* Timer channel */ \\
{ /* PWMConfig */ \\
%u, /* PWM clock frequency. */ \\
%u, /* Initial PWM period 20ms. */ \\
NULL, /* no callback */ \\
{ /* Channel Config */ \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL} \\
}, \\
0, 0 \\
}, \\
&PWMD%u /* PWMDriver* */ \\
}\n''' %
(chan-1, pwm_clock, period, chan_mode[0],
chan_mode[1], chan_mode[2], chan_mode[3], n))
else:
f.write('\n')
f.write('// No Alarm output pin defined\n')
f.write('#undef HAL_PWM_ALARM\n')
f.write('\n')
f.write('// PWM timer config\n')
for t in sorted(pwm_timers):
n = int(t[3:])
f.write('#define STM32_PWM_USE_TIM%u TRUE\n' % n)
f.write('#define STM32_TIM%u_SUPPRESS_ISR\n' % n)
f.write('\n')
f.write('// PWM output config\n')
groups = []
have_complementary = False
for t in sorted(pwm_timers):
group = len(groups) + 1
n = int(t[3:])
chan_list = [255, 255, 255, 255]
chan_mode = [
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED',
'PWM_OUTPUT_DISABLED', 'PWM_OUTPUT_DISABLED'
]
alt_functions = [0, 0, 0, 0]
pal_lines = ['0', '0', '0', '0']
for p in pwm_out:
if p.type != t:
continue
(n, chan, compl) = parse_timer(p.label)
pwm = p.extra_value('PWM', type=int)
chan_list[chan - 1] = pwm - 1
if compl:
chan_mode[chan - 1] = 'PWM_COMPLEMENTARY_OUTPUT_ACTIVE_HIGH'
have_complementary = True
else:
chan_mode[chan - 1] = 'PWM_OUTPUT_ACTIVE_HIGH'
alt_functions[chan - 1] = p.af
pal_lines[chan - 1] = 'PAL_LINE(GPIO%s, %uU)' % (p.port, p.pin)
groups.append('HAL_PWM_GROUP%u' % group)
if n in [1, 8]:
# only the advanced timers do 8MHz clocks
advanced_timer = 'true'
else:
advanced_timer = 'false'
pwm_clock = 1000000
period = 20000 * pwm_clock / 1000000
f.write('''#if defined(STM32_TIM_TIM%u_UP_DMA_STREAM) && defined(STM32_TIM_TIM%u_UP_DMA_CHAN)
# define HAL_PWM%u_DMA_CONFIG true, STM32_TIM_TIM%u_UP_DMA_STREAM, STM32_TIM_TIM%u_UP_DMA_CHAN
#else
# define HAL_PWM%u_DMA_CONFIG false, 0, 0
#endif\n''' % (n, n, n, n, n, n))
f.write('''#define HAL_PWM_GROUP%u { %s, \\
{%u, %u, %u, %u}, \\
/* Group Initial Config */ \\
{ \\
%u, /* PWM clock frequency. */ \\
%u, /* Initial PWM period 20ms. */ \\
NULL, /* no callback */ \\
{ \\
/* Channel Config */ \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL}, \\
{%s, NULL} \\
}, 0, 0}, &PWMD%u, \\
HAL_PWM%u_DMA_CONFIG, \\
{ %u, %u, %u, %u }, \\
{ %s, %s, %s, %s }}\n''' %
(group, advanced_timer,
chan_list[0], chan_list[1], chan_list[2], chan_list[3],
pwm_clock, period,
chan_mode[0], chan_mode[1], chan_mode[2], chan_mode[3],
n, n,
alt_functions[0], alt_functions[1], alt_functions[2], alt_functions[3],
pal_lines[0], pal_lines[1], pal_lines[2], pal_lines[3]))
f.write('#define HAL_PWM_GROUPS %s\n\n' % ','.join(groups))
if have_complementary:
f.write('#define STM32_PWM_USE_ADVANCED TRUE\n')
def write_ADC_config(f):
'''write ADC config defines'''
f.write('// ADC config\n')
adc_chans = []
for l in bylabel:
p = bylabel[l]
if not p.type.startswith('ADC'):
continue
chan = get_ADC1_chan(mcu_type, p.portpin)
scale = p.extra_value('SCALE', default=None)
if p.label == 'VDD_5V_SENS':
f.write('#define ANALOG_VCC_5V_PIN %u\n' % chan)
f.write('#define HAL_HAVE_BOARD_VOLTAGE 1\n')
if p.label == 'FMU_SERVORAIL_VCC_SENS':
f.write('#define FMU_SERVORAIL_ADC_CHAN %u\n' % chan)
f.write('#define HAL_HAVE_SERVO_VOLTAGE 1\n')
adc_chans.append((chan, scale, p.label, p.portpin))
adc_chans = sorted(adc_chans)
vdd = get_config('STM32_VDD')
if vdd[-1] == 'U':
vdd = vdd[:-1]
vdd = float(vdd) * 0.01
f.write('#define HAL_ANALOG_PINS { \\\n')
for (chan, scale, label, portpin) in adc_chans:
scale_str = '%.2f/4096' % vdd
if scale is not None and scale != '1':
scale_str = scale + '*' + scale_str
f.write('{ %2u, %12s }, /* %s %s */ \\\n' % (chan, scale_str, portpin,
label))
f.write('}\n\n')
def write_GPIO_config(f):
'''write GPIO config defines'''
f.write('// GPIO config\n')
gpios = []
gpioset = set()
for l in bylabel:
p = bylabel[l]
gpio = p.extra_value('GPIO', type=int)
if gpio is None:
continue
if gpio in gpioset:
error("Duplicate GPIO value %u" % gpio)
gpioset.add(gpio)
# see if it is also a PWM pin
pwm = p.extra_value('PWM', type=int, default=0)
port = p.port
pin = p.pin
gpios.append((gpio, pwm, port, pin, p))
gpios = sorted(gpios)
for (gpio, pwm, port, pin, p) in gpios:
f.write('#define HAL_GPIO_LINE_GPIO%u PAL_LINE(GPIO%s, %2uU)\n' % (gpio, port, pin))
f.write('#define HAL_GPIO_PINS { \\\n')
for (gpio, pwm, port, pin, p) in gpios:
f.write('{ %3u, true, %2u, PAL_LINE(GPIO%s, %2uU)}, /* %s */ \\\n' %
(gpio, pwm, port, pin, p))
# and write #defines for use by config code
f.write('}\n\n')
f.write('// full pin define list\n')
last_label = None
for l in sorted(list(set(bylabel.keys()))):
p = bylabel[l]
label = p.label
label = label.replace('-', '_')
if label == last_label:
continue
last_label = label
f.write('#define HAL_GPIO_PIN_%-20s PAL_LINE(GPIO%s,%uU)\n' %
(label, p.port, p.pin))
f.write('\n')
def bootloader_path():
# always embed a bootloader if it is available
this_dir = os.path.realpath(__file__)
rootdir = os.path.relpath(os.path.join(this_dir, "../../../../.."))
hwdef_dirname = os.path.basename(os.path.dirname(args.hwdef))
bootloader_filename = "%s_bl.bin" % (hwdef_dirname,)
bootloader_path = os.path.join(rootdir,
"Tools",
"bootloaders",
bootloader_filename)
if os.path.exists(bootloader_path):
return os.path.realpath(bootloader_path)
return None
def add_bootloader():
'''added bootloader to ROMFS'''
bp = bootloader_path()
if bp is not None:
romfs["bootloader.bin"] = bp
def write_ROMFS(outdir):
'''create ROMFS embedded header'''
romfs_list = []
for k in romfs.keys():
romfs_list.append((k, romfs[k]))
env_vars['ROMFS_FILES'] = romfs_list
def setup_apj_IDs():
'''setup the APJ board IDs'''
env_vars['APJ_BOARD_ID'] = get_config('APJ_BOARD_ID')
env_vars['APJ_BOARD_TYPE'] = get_config('APJ_BOARD_TYPE', default=mcu_type)
(USB_VID, USB_PID) = get_USB_IDs()
env_vars['USBID'] = '0x%04x/0x%04x' % (USB_VID, USB_PID)
def write_peripheral_enable(f):
'''write peripheral enable lines'''
f.write('// peripherals enabled\n')
for type in sorted(list(bytype.keys()) + list(alttype.keys())):
if type.startswith('USART') or type.startswith('UART'):
dstr = 'STM32_SERIAL_USE_%-6s' % type
f.write('#ifndef %s\n' % dstr)
f.write('#define %s TRUE\n' % dstr)
f.write('#endif\n')
if type.startswith('SPI'):
f.write('#define STM32_SPI_USE_%s TRUE\n' % type)
if type.startswith('OTG'):
f.write('#define STM32_USB_USE_%s TRUE\n' % type)
if type.startswith('I2C'):
f.write('#define STM32_I2C_USE_%s TRUE\n' % type)
def get_dma_exclude(periph_list):
'''return list of DMA devices to exclude from DMA'''
dma_exclude = []
for periph in periph_list:
if periph in bylabel:
p = bylabel[periph]
if p.has_extra('NODMA'):
dma_exclude.append(periph)
if periph in altlabel:
p = altlabel[periph]
if p.has_extra('NODMA'):
dma_exclude.append(periph)
return dma_exclude
def write_alt_config(f):
'''write out alternate config settings'''
if len(altmap.keys()) == 0:
# no alt configs
return
f.write('''
/* alternative configurations */
#define PAL_STM32_SPEED(n) ((n&3U)<<3U)
#define PAL_STM32_HIGH 0x8000U
#define HAL_PIN_ALT_CONFIG { \\
''')
for alt in altmap.keys():
for pp in altmap[alt].keys():
p = altmap[alt][pp]
f.write(" { %u, %s, PAL_LINE(GPIO%s,%uU)}, /* %s */ \\\n" % (alt, p.pal_modeline(), p.port, p.pin, str(p)))
f.write('}\n\n')
def write_all_lines(hwdat):
f = open(hwdat, 'w')
f.write('\n'.join(all_lines))
f.close()
flash_size = get_config('FLASH_SIZE_KB', type=int)
if flash_size > 1024:
romfs["hwdef.dat"] = hwdat
def write_hwdef_header(outfilename):
'''write hwdef header file'''
print("Writing hwdef setup in %s" % outfilename)
f = open(outfilename, 'w')
f.write('''/*
generated hardware definitions from hwdef.dat - DO NOT EDIT
*/
#pragma once
#ifndef TRUE
#define TRUE 1
#endif
#ifndef FALSE
#define FALSE 0
#endif
''')
write_mcu_config(f)
write_SPI_config(f)
write_ADC_config(f)
write_GPIO_config(f)
write_IMU_config(f)
write_MAG_config(f)
write_BARO_config(f)
write_board_validate_macro(f)
write_peripheral_enable(f)
dma_unassigned = dma_resolver.write_dma_header(f, periph_list, mcu_type,
dma_exclude=get_dma_exclude(periph_list),
dma_priority=get_config('DMA_PRIORITY', default='TIM* SPI*', spaces=True),
dma_noshare=get_config('DMA_NOSHARE', default='', spaces=True))
if not args.bootloader:
write_PWM_config(f)
write_I2C_config(f)
write_UART_config(f)
else:
write_UART_config_bootloader(f)
setup_apj_IDs()
write_USB_config(f)
add_bootloader()
if len(romfs) > 0:
f.write('#define HAL_HAVE_AP_ROMFS_EMBEDDED_H 1\n')
if mcu_series.startswith('STM32F1'):
f.write('''
/*
* I/O ports initial setup, this configuration is established soon after reset
* in the initialization code.
* Please refer to the STM32 Reference Manual for details.
*/
#define PIN_MODE_OUTPUT_PP(n) (0U << (((n) & 7) * 4))
#define PIN_MODE_OUTPUT_OD(n) (4U << (((n) & 7) * 4))
#define PIN_MODE_AF_PP(n) (8U << (((n) & 7) * 4))
#define PIN_MODE_AF_OD(n) (12U << (((n) & 7) * 4))
#define PIN_MODE_ANALOG(n) (0U << (((n) & 7) * 4))
#define PIN_MODE_NOPULL(n) (4U << (((n) & 7) * 4))
#define PIN_MODE_PUD(n) (8U << (((n) & 7) * 4))
#define PIN_SPEED_MEDIUM(n) (1U << (((n) & 7) * 4))
#define PIN_SPEED_LOW(n) (2U << (((n) & 7) * 4))
#define PIN_SPEED_HIGH(n) (3U << (((n) & 7) * 4))
#define PIN_ODR_HIGH(n) (1U << (((n) & 15)))
#define PIN_ODR_LOW(n) (0U << (((n) & 15)))
#define PIN_PULLUP(n) (1U << (((n) & 15)))
#define PIN_PULLDOWN(n) (0U << (((n) & 15)))
#define PIN_UNDEFINED(n) PIN_INPUT_PUD(n)
''')
else:
f.write('''
/*
* I/O ports initial setup, this configuration is established soon after reset
* in the initialization code.
* Please refer to the STM32 Reference Manual for details.
*/
#define PIN_MODE_INPUT(n) (0U << ((n) * 2U))
#define PIN_MODE_OUTPUT(n) (1U << ((n) * 2U))
#define PIN_MODE_ALTERNATE(n) (2U << ((n) * 2U))
#define PIN_MODE_ANALOG(n) (3U << ((n) * 2U))
#define PIN_ODR_LOW(n) (0U << (n))
#define PIN_ODR_HIGH(n) (1U << (n))
#define PIN_OTYPE_PUSHPULL(n) (0U << (n))
#define PIN_OTYPE_OPENDRAIN(n) (1U << (n))
#define PIN_OSPEED_VERYLOW(n) (0U << ((n) * 2U))
#define PIN_OSPEED_LOW(n) (1U << ((n) * 2U))
#define PIN_OSPEED_MEDIUM(n) (2U << ((n) * 2U))
#define PIN_OSPEED_HIGH(n) (3U << ((n) * 2U))
#define PIN_PUPDR_FLOATING(n) (0U << ((n) * 2U))
#define PIN_PUPDR_PULLUP(n) (1U << ((n) * 2U))
#define PIN_PUPDR_PULLDOWN(n) (2U << ((n) * 2U))
#define PIN_AFIO_AF(n, v) ((v) << (((n) % 8U) * 4U))
''')
for port in sorted(ports):
f.write("/* PORT%s:\n" % port)
for pin in range(pincount[port]):
p = portmap[port][pin]
if p.label is not None:
f.write(" %s\n" % p)
f.write("*/\n\n")
if pincount[port] == 0:
# handle blank ports
for vtype in vtypes:
f.write("#define VAL_GPIO%s_%-7s 0x0\n" % (port,
vtype))
f.write("\n\n\n")
continue
for vtype in vtypes:
f.write("#define VAL_GPIO%s_%-7s (" % (p.port, vtype))
first = True
for pin in range(pincount[port]):
p = portmap[port][pin]
modefunc = getattr(p, "get_" + vtype)
v = modefunc()
if v is None:
continue
if not first:
f.write(" | \\\n ")
f.write(v)
first = False
if first:
# there were no pin definitions, use 0
f.write("0")
f.write(")\n\n")
write_alt_config(f)
if not mcu_series.startswith("STM32F1"):
dma_required = ['SPI*', 'ADC*']
if 'IOMCU_UART' in config:
dma_required.append(config['IOMCU_UART'][0] + '*')
for d in dma_unassigned:
for r in dma_required:
if fnmatch.fnmatch(d, r):
error("Missing required DMA for %s" % d)
def build_peripheral_list():
'''build a list of peripherals for DMA resolver to work on'''
peripherals = []
done = set()
prefixes = ['SPI', 'USART', 'UART', 'I2C']
periph_pins = allpins[:]
for alt in altmap.keys():
for p in altmap[alt].keys():
periph_pins.append(altmap[alt][p])
for p in periph_pins:
type = p.type
if type in done:
continue
for prefix in prefixes:
if type.startswith(prefix):
ptx = type + "_TX"
prx = type + "_RX"
if prefix in ['SPI', 'I2C']:
# in DMA map I2C and SPI has RX and TX suffix
if ptx not in bylabel:
bylabel[ptx] = p
if prx not in bylabel:
bylabel[prx] = p
if prx in bylabel or prx in altlabel:
peripherals.append(prx)
if ptx in bylabel or ptx in altlabel:
peripherals.append(ptx)
if type.startswith('ADC'):
peripherals.append(type)
if type.startswith('SDIO') or type.startswith('SDMMC'):
if not mcu_series.startswith("STM32H7"):
peripherals.append(type)
if type.startswith('TIM'):
if p.has_extra('RCIN'):
label = p.label
if label[-1] == 'N':
label = label[:-1]
peripherals.append(label)
elif not p.has_extra('ALARM') and not p.has_extra('RCININT'):
# get the TIMn_UP DMA channels for DShot
label = type + '_UP'
if label not in peripherals and not p.has_extra('NODMA'):
peripherals.append(label)
done.add(type)
return peripherals
def write_env_py(filename):
'''write out env.py for environment variables to control the build process'''
# see if board has a defaults.parm file or a --default-parameters file was specified
defaults_filename = os.path.join(os.path.dirname(args.hwdef), 'defaults.parm')
defaults_path = os.path.join(os.path.dirname(args.hwdef), args.params)
if not args.bootloader:
if os.path.exists(defaults_path):
env_vars['DEFAULT_PARAMETERS'] = os.path.abspath(defaults_path)
print("Default parameters path from command line: %s" % defaults_path)
elif os.path.exists(defaults_filename):
env_vars['DEFAULT_PARAMETERS'] = os.path.abspath(defaults_filename)
print("Default parameters path from hwdef: %s" % defaults_filename)
else:
print("No default parameter file found")
# CHIBIOS_BUILD_FLAGS is passed to the ChibiOS makefile
env_vars['CHIBIOS_BUILD_FLAGS'] = ' '.join(build_flags)
pickle.dump(env_vars, open(filename, "wb"))
def romfs_add(romfs_filename, filename):
'''add a file to ROMFS'''
romfs[romfs_filename] = filename
def romfs_wildcard(pattern):
'''add a set of files to ROMFS by wildcard'''
base_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
(pattern_dir, pattern) = os.path.split(pattern)
for f in os.listdir(os.path.join(base_path, pattern_dir)):
if fnmatch.fnmatch(f, pattern):
romfs[f] = os.path.join(pattern_dir, f)
def romfs_add_dir(subdirs):
'''add a filesystem directory to ROMFS'''
for dirname in subdirs:
romfs_dir = os.path.join(os.path.dirname(args.hwdef), dirname)
if not args.bootloader and os.path.exists(romfs_dir):
for root, d, files in os.walk(romfs_dir):
for f in files:
if fnmatch.fnmatch(f, '*~'):
# skip editor backup files
continue
fullpath = os.path.join(root, f)
relpath = os.path.normpath(os.path.join(dirname, os.path.relpath(root, romfs_dir), f))
romfs[relpath] = fullpath
def process_line(line):
'''process one line of pin definition file'''
global allpins, imu_list, compass_list, baro_list
global mcu_type, mcu_series
all_lines.append(line)
a = shlex.split(line, posix=False)
# keep all config lines for later use
alllines.append(line)
p = None
if a[0].startswith('P') and a[0][1] in ports:
# it is a port/pin definition
try:
port = a[0][1]
pin = int(a[0][2:])
label = a[1]
type = a[2]
extra = a[3:]
except Exception:
error("Bad pin line: %s" % a)
return
p = generic_pin(port, pin, label, type, extra)
af = get_alt_function(mcu_type, a[0], label)
if af is not None:
p.af = af
alt = p.extra_value("ALT", type=int, default=0)
if alt != 0:
if mcu_series.startswith("STM32F1"):
error("Alt config not allowed for F1 MCU")
if alt not in altmap:
altmap[alt] = {}
if p.portpin in altmap[alt]:
error("Pin %s ALT(%u) redefined" % (p.portpin, alt))
altmap[alt][p.portpin] = p
# we need to add alt pins into bytype[] so they are enabled in chibios config
if type not in alttype:
alttype[type] = []
alttype[type].append(p)
altlabel[label] = p
return
if a[0] in config:
error("Pin %s redefined" % a[0])
if p is None and line.find('ALT(') != -1:
error("ALT() invalid for %s" % a[0])
config[a[0]] = a[1:]
if p is not None:
# add to set of pins for primary config
portmap[port][pin] = p
allpins.append(p)
if type not in bytype:
bytype[type] = []
bytype[type].append(p)
bylabel[label] = p
elif a[0] == 'MCU':
mcu_type = a[2]
mcu_series = a[1]
setup_mcu_type_defaults()
elif a[0] == 'SPIDEV':
spidev.append(a[1:])
elif a[0] == 'IMU':
imu_list.append(a[1:])
elif a[0] == 'COMPASS':
compass_list.append(a[1:])
elif a[0] == 'BARO':
baro_list.append(a[1:])
elif a[0] == 'ROMFS':
romfs_add(a[1], a[2])
elif a[0] == 'ROMFS_WILDCARD':
romfs_wildcard(a[1])
elif a[0] == 'undef':
print("Removing %s" % a[1])
config.pop(a[1], '')
bytype.pop(a[1], '')
bylabel.pop(a[1], '')
# also remove all occurences of defines in previous lines if any
for line in alllines[:]:
if line.startswith('define') and a[1] == line.split()[1]:
alllines.remove(line)
newpins = []
for pin in allpins:
if pin.type == a[1]:
continue
if pin.label == a[1]:
continue
if pin.portpin == a[1]:
continue
newpins.append(pin)
allpins = newpins
if a[1] == 'IMU':
imu_list = []
if a[1] == 'COMPASS':
compass_list = []
if a[1] == 'BARO':
baro_list = []
elif a[0] == 'env':
print("Adding environment %s" % ' '.join(a[1:]))
if len(a[1:]) < 2:
error("Bad env line for %s" % a[0])
env_vars[a[1]] = ' '.join(a[2:])
def process_file(filename):
'''process a hwdef.dat file'''
try:
f = open(filename, "r")
except Exception:
error("Unable to open file %s" % filename)
for line in f.readlines():
line = line.strip()
if len(line) == 0 or line[0] == '#':
continue
a = shlex.split(line)
if a[0] == "include" and len(a) > 1:
include_file = a[1]
if include_file[0] != '/':
dir = os.path.dirname(filename)
include_file = os.path.normpath(
os.path.join(dir, include_file))
print("Including %s" % include_file)
process_file(include_file)
else:
process_line(line)
# process input file
process_file(args.hwdef)
outdir = args.outdir
if outdir is None:
outdir = '/tmp'
if "MCU" not in config:
error("Missing MCU type in config")
mcu_type = get_config('MCU', 1)
print("Setup for MCU %s" % mcu_type)
# build a list for peripherals for DMA resolver
periph_list = build_peripheral_list()
# write out hw.dat for ROMFS
write_all_lines(os.path.join(outdir, "hw.dat"))
# write out hwdef.h
write_hwdef_header(os.path.join(outdir, "hwdef.h"))
# write out ldscript.ld
write_ldscript(os.path.join(outdir, "ldscript.ld"))
romfs_add_dir(['scripts'])
write_ROMFS(outdir)
# copy the shared linker script into the build directory; it must
# exist in the same directory as the ldscript.ld file we generate.
copy_common_linkerscript(outdir, args.hwdef)
write_env_py(os.path.join(outdir, "env.py"))
|
Pedals2Paddles/ardupilot
|
libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py
|
Python
|
gpl-3.0
| 68,886
|
[
"CRYSTAL"
] |
d6b04cbf554a67b7bff65906bfa4be80b34990ac09950c5d1ff01db2bbc2e63a
|
import POVME.packages.binana.peel as peel
#import packages.pymolecule.pymolecule as pymolecule
#import numpy
my_params = peel.defaultParams
#my_protein = pymolecule.Molecule()
ligand = peel.PDB()
ligand.LoadPDB('3CZ_reduced.pdb')
#my_protein.fileio.load_pdb_into('3CZ_reduced.pdb', bonds_by_distance=True, serial_reindex=True, resseq_reindex=False)
my_peel = peel.peel(ligand, my_params, isLigand=True)
my_peel.write_vmd_script('visualize_lig.vmd', peel.defaultParams)
#povmeMap = numpy.ones((13,13,13))
#my_peel.color_povme_map(povmeMap, [0,13,0,13,0,13], 1)
my_feature_maps = my_peel.create_feature_maps([50,70,-30,-10,45,65], 1)
my_feature_maps['occupancy'].write_dx_file('OCC.dx')
my_feature_maps['hbondAcceptor'].write_pdb('HBA.pdb')
#There are no hbond donor groups on the ligand
#my_feature_maps['hbondDonor'].write_pdb('HBD.pdb')
my_feature_maps['aromatic'].write_pdb('ARO.pdb')
my_feature_maps['aromatic'].write_dx_file('ARO.dx')
my_feature_maps['hydrophobic'].write_pdb('HBC.pdb')
my_feature_maps['hydrophilic'].write_pdb('HPL.pdb')
#This is a bad feature
#my_feature_maps['hydrophobicity'].write_pdb('HPBTY.pdb')
#my_feature_maps['hydrophobicity'].write_dx_file('HPBTY.dx')
#HBAMap = my_feature_maps[0].data
#print HBAMap
#HBAPoints = numpy.transpose(numpy.nonzero(HBAMap))
#print HBAPoints
print "Done!"
|
POVME/POVME
|
POVME/packages/binana/tests/peel_lig_basic/ligTest.py
|
Python
|
mit
| 1,325
|
[
"VMD"
] |
24232f2be14a71318492c1cca0f9cc5dd690093c6251351fca403fdd2606deb2
|
from astn import AstToGAst, GAstToAst
import ast
import gast
class Ast2ToGAst(AstToGAst):
# mod
def visit_Module(self, node):
new_node = gast.Module(
self._visit(node.body),
[] # type_ignores
)
return new_node
# stmt
def visit_FunctionDef(self, node):
new_node = gast.FunctionDef(
self._visit(node.name),
self._visit(node.args),
self._visit(node.body),
self._visit(node.decorator_list),
None, # returns
None, # type_comment
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_ClassDef(self, node):
new_node = gast.ClassDef(
self._visit(node.name),
self._visit(node.bases),
[], # keywords
self._visit(node.body),
self._visit(node.decorator_list),
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_For(self, node):
new_node = gast.For(
self._visit(node.target),
self._visit(node.iter),
self._visit(node.body),
self._visit(node.orelse),
[] # type_comment
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_With(self, node):
new_node = gast.With(
[gast.withitem(
self._visit(node.context_expr),
self._visit(node.optional_vars)
)],
self._visit(node.body),
None, # type_comment
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Raise(self, node):
ntype = self._visit(node.type)
ninst = self._visit(node.inst)
ntback = self._visit(node.tback)
what = ntype
if ninst is not None:
what = gast.Call(ntype, [ninst], [])
gast.copy_location(what, node)
what.end_lineno = what.end_col_offset = None
if ntback is not None:
attr = gast.Attribute(what, 'with_traceback', gast.Load())
gast.copy_location(attr, node)
attr.end_lineno = attr.end_col_offset = None
what = gast.Call(
attr,
[ntback],
[]
)
gast.copy_location(what, node)
what.end_lineno = what.end_col_offset = None
new_node = gast.Raise(what, None)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_TryExcept(self, node):
new_node = gast.Try(
self._visit(node.body),
self._visit(node.handlers),
self._visit(node.orelse),
[] # finalbody
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_TryFinally(self, node):
new_node = gast.Try(
self._visit(node.body),
[], # handlers
[], # orelse
self._visit(node.finalbody)
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
# expr
def visit_Name(self, node):
new_node = gast.Name(
self._visit(node.id),
self._visit(node.ctx),
None,
None,
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Num(self, node):
new_node = gast.Constant(
node.n,
None,
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Subscript(self, node):
new_slice = self._visit(node.slice)
new_node = gast.Subscript(
self._visit(node.value),
new_slice,
self._visit(node.ctx),
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Ellipsis(self, node):
new_node = gast.Constant(
Ellipsis,
None,
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Index(self, node):
return self._visit(node.value)
def visit_ExtSlice(self, node):
new_dims = self._visit(node.dims)
new_node = gast.Tuple(new_dims, gast.Load())
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Str(self, node):
new_node = gast.Constant(
node.s,
None,
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Call(self, node):
if node.starargs:
star = gast.Starred(self._visit(node.starargs), gast.Load())
gast.copy_location(star, node)
star.end_lineno = star.end_col_offset = None
starred = [star]
else:
starred = []
if node.kwargs:
kwargs = [gast.keyword(None, self._visit(node.kwargs))]
else:
kwargs = []
new_node = gast.Call(
self._visit(node.func),
self._visit(node.args) + starred,
self._visit(node.keywords) + kwargs,
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_comprehension(self, node):
new_node = gast.comprehension(
target=self._visit(node.target),
iter=self._visit(node.iter),
ifs=self._visit(node.ifs),
is_async=0,
)
gast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
# arguments
def visit_arguments(self, node):
# missing locations for vararg and kwarg set at function level
if node.vararg:
vararg = ast.Name(node.vararg, ast.Param())
else:
vararg = None
if node.kwarg:
kwarg = ast.Name(node.kwarg, ast.Param())
else:
kwarg = None
if node.vararg:
vararg = ast.Name(node.vararg, ast.Param())
else:
vararg = None
new_node = gast.arguments(
self._visit(node.args),
[], # posonlyargs
self._visit(vararg),
[], # kwonlyargs
[], # kw_defaults
self._visit(kwarg),
self._visit(node.defaults),
)
return new_node
class GAstToAst2(GAstToAst):
# mod
def visit_Module(self, node):
new_node = ast.Module(self._visit(node.body))
return new_node
# stmt
def visit_FunctionDef(self, node):
new_node = ast.FunctionDef(
self._visit(node.name),
self._visit(node.args),
self._visit(node.body),
self._visit(node.decorator_list),
)
# because node.args doesn't have any location to copy from
if node.args.vararg:
ast.copy_location(node.args.vararg, node)
if node.args.kwarg:
ast.copy_location(node.args.kwarg, node)
ast.copy_location(new_node, node)
return new_node
def visit_ClassDef(self, node):
new_node = ast.ClassDef(
self._visit(node.name),
self._visit(node.bases),
self._visit(node.body),
self._visit(node.decorator_list),
)
ast.copy_location(new_node, node)
return new_node
def visit_For(self, node):
new_node = ast.For(
self._visit(node.target),
self._visit(node.iter),
self._visit(node.body),
self._visit(node.orelse),
)
ast.copy_location(new_node, node)
return new_node
def visit_With(self, node):
new_node = ast.With(
self._visit(node.items[0].context_expr),
self._visit(node.items[0].optional_vars),
self._visit(node.body)
)
ast.copy_location(new_node, node)
return new_node
def visit_Raise(self, node):
if isinstance(node.exc, gast.Call) and \
isinstance(node.exc.func, gast.Attribute) and \
node.exc.func.attr == 'with_traceback':
raised = self._visit(node.exc.func.value)
traceback = self._visit(node.exc.args[0])
else:
raised = self._visit(node.exc)
traceback = None
new_node = ast.Raise(raised, None, traceback)
ast.copy_location(new_node, node)
return new_node
def visit_Try(self, node):
if node.finalbody:
new_node = ast.TryFinally(
self._visit(node.body),
self._visit(node.finalbody)
)
else:
new_node = ast.TryExcept(
self._visit(node.body),
self._visit(node.handlers),
self._visit(node.orelse),
)
ast.copy_location(new_node, node)
return new_node
# expr
def visit_Name(self, node):
new_node = ast.Name(
self._visit(node.id),
self._visit(node.ctx),
)
ast.copy_location(new_node, node)
return new_node
def visit_Constant(self, node):
if isinstance(node.value, (bool, int, long, float, complex)):
new_node = ast.Num(node.value)
elif node.value is Ellipsis:
new_node = ast.Ellipsis()
else:
new_node = ast.Str(node.value)
ast.copy_location(new_node, node)
return new_node
def visit_Subscript(self, node):
def adjust_slice(s):
if isinstance(s, (ast.Slice, ast.Ellipsis)):
return s
else:
return ast.Index(s)
if isinstance(node.slice, gast.Tuple):
new_slice = ast.ExtSlice([adjust_slice(self._visit(elt))
for elt in node.slice.elts])
else:
new_slice = adjust_slice(self._visit(node.slice))
ast.copy_location(new_slice, node.slice)
new_node = ast.Subscript(
self._visit(node.value),
new_slice,
self._visit(node.ctx),
)
ast.copy_location(new_node, node)
new_node.end_lineno = new_node.end_col_offset = None
return new_node
def visit_Call(self, node):
if node.args and isinstance(node.args[-1], gast.Starred):
args = node.args[:-1]
starargs = node.args[-1].value
else:
args = node.args
starargs = None
if node.keywords and node.keywords[-1].arg is None:
keywords = node.keywords[:-1]
kwargs = node.keywords[-1].value
else:
keywords = node.keywords
kwargs = None
new_node = ast.Call(
self._visit(node.func),
self._visit(args),
self._visit(keywords),
self._visit(starargs),
self._visit(kwargs),
)
ast.copy_location(new_node, node)
return new_node
def visit_arg(self, node):
new_node = ast.Name(node.arg, ast.Param())
ast.copy_location(new_node, node)
return new_node
# arguments
def visit_arguments(self, node):
vararg = node.vararg and node.vararg.id
kwarg = node.kwarg and node.kwarg.id
new_node = ast.arguments(
self._visit(node.args),
self._visit(vararg),
self._visit(kwarg),
self._visit(node.defaults),
)
return new_node
def ast_to_gast(node):
return Ast2ToGAst().visit(node)
def gast_to_ast(node):
return GAstToAst2().visit(node)
|
iproduct/course-social-robotics
|
11-dnn-keras/venv/Lib/site-packages/gast/ast2.py
|
Python
|
gpl-2.0
| 12,391
|
[
"VisIt"
] |
9d6d491f33f788e3127725d21ed310273b1d7f1847b1c4a05ce2b0b17bf38e2e
|
"""
Plotting functions.
(c) Oscar Branson : https://github.com/oscarbranson
"""
import itertools, re, warnings
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
from scipy.stats import gaussian_kde
from scipy.optimize import curve_fit
# from IPython import display
from pandas import IndexSlice as idx
from tqdm import tqdm
from .signal import fastgrad, fastsmooth, findmins, bool_2_indices, calc_grads
from .analytes import pretty_element, unitpicker, analyte_checker
from .stat_fns import nominal_values, gauss, R2calc, unpack_uncertainties
def calc_nrow(n, ncol):
if n % ncol == 0:
nrow = n / ncol
else:
nrow = n // ncol + 1
return int(nrow)
def rangecalc(xs, pad=0.05):
mn = np.nanmin(xs)
mx = np.nanmax(xs)
xr = mx - mn
return [mn - pad * xr, mx + pad * xr]
def trace_plot(self, analytes=None, figsize=[10, 4], scale='log', filt=None,
ranges=False, stats=False, stat='nanmean', err='nanstd',
focus_stage=None, err_envelope=False, ax=None):
"""
Plot analytes as a function of Time.
Parameters
----------
analytes : array_like
list of strings containing names of analytes to plot.
None = all analytes.
figsize : tuple
size of final figure.
scale : str or None
'log' = plot data on log scale
filt : bool, str or dict
False: plot unfiltered data.
True: plot filtered data over unfiltered data.
str: apply filter key to all analytes
dict: apply key to each analyte in dict. Must contain all
analytes plotted. Can use self.filt.keydict.
ranges : bool
show signal/background regions.
stats : bool
plot average and error of each trace, as specified by `stat` and
`err`.
stat : str
average statistic to plot.
err : str
error statistic to plot.
Returns
-------
figure, axis
"""
if focus_stage is None:
focus_stage = self.focus_stage
# TODO: This is broken. conflict between manually specified focus_stage and analytes provided by D_obj
analytes = analyte_checker(self, analytes, focus_stage=focus_stage)
# if analytes is None:
# analytes = self.analytes
# if focus_stage in ['ratios', 'calibrated']:
# analytes = self.analyte_ratios
if ax is None:
fig = plt.figure(figsize=figsize)
ax = fig.add_axes([.1, .12, .77, .8])
ret = True
else:
fig = ax.figure
ret = False
for a in analytes:
if a not in self.data[focus_stage]:
continue
# TODO: this is lazy and will fail silently. Should print a warning.
x = self.Time
y, yerr = unpack_uncertainties(self.data[focus_stage][a])
if scale == 'log':
ax.set_yscale('log')
y[y == 0] = np.nan
if filt:
ind = self.filt.grab_filt(filt, a)
xf = x.copy()
yf = y.copy()
yerrf = yerr.copy()
if any(~ind):
xf[~ind] = np.nan
yf[~ind] = np.nan
yerrf[~ind] = np.nan
if any(~ind):
ax.plot(x, y, color=self.cmap[a], alpha=.2, lw=0.6)
ax.plot(xf, yf, color=self.cmap[a], label=pretty_element(a))
if err_envelope:
ax.fill_between(xf, yf - yerrf, yf + yerrf, color=self.cmap[a],
alpha=0.2, zorder=-1)
else:
ax.plot(x, y, color=self.cmap[a], label=pretty_element(a))
if err_envelope:
ax.fill_between(x, y - yerr, y + yerr, color=self.cmap[a],
alpha=0.2, zorder=-1)
# Plot averages and error envelopes
if stats and hasattr(self, 'stats'):
warnings.warn('\nStatistic plotting is broken.\nCheck progress here: https://github.com/oscarbranson/latools/issues/18')
pass
# sts = self.stats[sig][0].size
# if sts > 1:
# for n in np.arange(self.n):
# n_ind = ind & (self.ns == n + 1)
# if sum(n_ind) > 2:
# x = [self.Time[n_ind][0], self.Time[n_ind][-1]]
# y = [self.stats[sig][self.stats['analytes'] == a][0][n]] * 2
# yp = ([self.stats[sig][self.stats['analytes'] == a][0][n] +
# self.stats[err][self.stats['analytes'] == a][0][n]] * 2)
# yn = ([self.stats[sig][self.stats['analytes'] == a][0][n] -
# self.stats[err][self.stats['analytes'] == a][0][n]] * 2)
# ax.plot(x, y, color=self.cmap[a], lw=2)
# ax.fill_between(x + x[::-1], yp + yn,
# color=self.cmap[a], alpha=0.4,
# linewidth=0)
# else:
# x = [self.Time[0], self.Time[-1]]
# y = [self.stats[sig][self.stats['analytes'] == a][0]] * 2
# yp = ([self.stats[sig][self.stats['analytes'] == a][0] +
# self.stats[err][self.stats['analytes'] == a][0]] * 2)
# yn = ([self.stats[sig][self.stats['analytes'] == a][0] -
# self.stats[err][self.stats['analytes'] == a][0]] * 2)
# ax.plot(x, y, color=self.cmap[a], lw=2)
# ax.fill_between(x + x[::-1], yp + yn, color=self.cmap[a],
# alpha=0.4, linewidth=0)
if ranges:
for lims in self.bkgrng:
ax.axvspan(*lims, color='k', alpha=0.1, zorder=-1)
for lims in self.sigrng:
ax.axvspan(*lims, color='r', alpha=0.1, zorder=-1)
ax.text(0.01, 0.99, self.sample + ' : ' + focus_stage,
transform=ax.transAxes,
ha='left', va='top')
ax.set_xlabel('Time (s)')
ax.set_xlim(np.nanmin(x), np.nanmax(x))
# y label
ud = {'rawdata': 'counts',
'despiked': 'counts',
'bkgsub': 'background corrected counts',
'ratios': 'counts/count',
'calibrated': 'mol/mol',
'mass_fraction': 'Mass Fraction'}
ax.set_ylabel(ud[focus_stage])
# if interactive:
# ax.legend()
# plugins.connect(fig, plugins.MousePosition(fontsize=14))
# display.clear_output(wait=True)
# display.display(fig)
# input('Press [Return] when finished.')
# else:
ax.legend(bbox_to_anchor=(1.15, 1))
if ret:
return fig, ax
def gplot(self, analytes=None, win=25, figsize=[10, 4], filt=False,
ranges=False, focus_stage=None, ax=None, recalc=True):
"""
Plot analytes gradients as a function of Time.
Parameters
----------
analytes : array_like
list of strings containing names of analytes to plot.
None = all analytes.
win : int
The window over which to calculate the rolling gradient.
figsize : tuple
size of final figure.
ranges : bool
show signal/background regions.
Returns
-------
figure, axis
"""
if focus_stage is None:
focus_stage = self.focus_stage
if isinstance(analytes, str):
analytes = [analytes]
elif analytes is None:
if self.grads_calced:
analytes = self.grads.keys()
else:
analytes = self.data[focus_stage].keys()
if ax is None:
fig = plt.figure(figsize=figsize)
ax = fig.add_axes([.1, .12, .77, .8])
ret = True
else:
fig = ax.figure
ret = False
x = self.Time
if recalc or not self.grads_calced:
self.grads = calc_grads(x, self.data[focus_stage], analytes, win)
self.grads_calce = True
for a in analytes:
y = self.grads[a]
if filt:
ind = self.filt.grab_filt(filt, a)
xf = x.copy()
yf = y.copy()
if any(~ind):
xf[~ind] = np.nan
yf[~ind] = np.nan
if any(~ind):
ax.plot(x, y, color=self.cmap[a], alpha=.2, lw=0.6)
ax.plot(xf, yf, color=self.cmap[a], label=pretty_element(a))
else:
ax.plot(x, y, color=self.cmap[a], label=pretty_element(a))
# ax.plot(x, self.grads[a], color=self.cmap[a], label=pretty_element(a))
if ranges:
for lims in self.bkgrng:
ax.axvspan(*lims, color='k', alpha=0.1, zorder=-1)
for lims in self.sigrng:
ax.axvspan(*lims, color='r', alpha=0.1, zorder=-1)
ax.text(0.01, 0.99, self.sample + ' : ' + self.focus_stage + ' : gradient',
transform=ax.transAxes,
ha='left', va='top')
ax.set_xlabel('Time (s)')
ax.set_xlim(np.nanmin(x), np.nanmax(x))
# y label
ud = {'rawdata': 'counts/s',
'despiked': 'counts/s',
'bkgsub': 'background corrected counts/s',
'ratios': 'counts/count/s',
'calibrated': 'mol/mol/s',
'mass_fraction': 'Mass Fraction/s'}
ax.set_ylabel(ud[focus_stage])
# y tick format
def yfmt(x, p):
return '{:.0e}'.format(x)
ax.yaxis.set_major_formatter(mpl.ticker.FuncFormatter(yfmt))
ax.legend(bbox_to_anchor=(1.15, 1))
ax.axhline(0, color='k', lw=1, ls='dashed', alpha=0.5)
if ret:
return fig, ax
def crossplot(dat, keys=None, lognorm=True, bins=25, figsize=(12, 12),
colourful=True, focus_stage=None,
mode='hist2d', cmap=None, **kwargs):
"""
Plot analytes against each other.
The number of plots is n**2 - n, where n = len(keys).
Parameters
----------
dat : dict
A dictionary of key: data pairs, where data is the same
length in each entry.
keys : optional, array_like or str
The keys of dat to plot. Defaults to all keys.
lognorm : bool
Whether or not to log normalise the colour scale
of the 2D histogram.
bins : int
The number of bins in the 2D histogram.
figsize : tuple
colourful : bool
Returns
-------
(fig, axes)
"""
if keys is None:
keys = list(dat.keys())
numvar = len(keys)
if figsize[0] < 1.5 * numvar:
figsize = [1.5 * numvar] * 2
fig, axes = plt.subplots(nrows=numvar, ncols=numvar,
figsize=figsize)
fig.subplots_adjust(hspace=0.05, wspace=0.05)
for ax in axes.flat:
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
if ax.is_first_col():
ax.yaxis.set_ticks_position('left')
if ax.is_last_col():
ax.yaxis.set_ticks_position('right')
if ax.is_first_row():
ax.xaxis.set_ticks_position('top')
if ax.is_last_row():
ax.xaxis.set_ticks_position('bottom')
# set up colour scales
if colourful:
cmlist = ['Blues', 'BuGn', 'BuPu', 'GnBu',
'Greens', 'Greys', 'Oranges', 'OrRd',
'PuBu', 'PuBuGn', 'PuRd', 'Purples',
'RdPu', 'Reds', 'YlGn', 'YlGnBu', 'YlOrBr', 'YlOrRd']
else:
cmlist = ['Greys']
if cmap is None and mode == 'scatter':
cmap = {k: 'k' for k in dat.keys()}
while len(cmlist) < len(keys):
cmlist *= 2
# isolate nominal_values for all keys
focus = {k: nominal_values(dat[k]) for k in keys}
# determine units for all keys
udict = {a: unitpicker(np.nanmean(focus[a]),
focus_stage=focus_stage,
label=a) for a in keys}
# determine ranges for all analytes
rdict = {a: (np.nanmin(focus[a] * udict[a][0]),
np.nanmax(focus[a] * udict[a][0])) for a in keys}
# check for nans
for k, v in rdict.items():
if any(np.isnan(v)):
rdict[k] = (-1,1)
for i, j in tqdm(zip(*np.triu_indices_from(axes, k=1)), desc='Drawing Plots',
total=sum(range(len(keys)))):
# get analytes
ai = keys[i]
aj = keys[j]
# remove nan, apply multipliers
pi = focus[ai] * udict[ai][0]
pj = focus[aj] * udict[aj][0]
# determine normalisation shceme
if lognorm:
norm = mpl.colors.LogNorm()
else:
norm = None
# draw plots
if mode == 'hist2d':
# remove nan
pi = pi[~np.isnan(pi)]
pj = pj[~np.isnan(pj)]
axes[i, j].hist2d(pj, pi, bins,
norm=norm,
cmap=plt.get_cmap(cmlist[i]))
axes[j, i].hist2d(pi, pj, bins,
norm=norm,
cmap=plt.get_cmap(cmlist[j]))
elif mode == 'scatter':
axes[i, j].scatter(pj, pi, s=10,
color=cmap[ai], lw=0.5, edgecolor='k',
alpha=0.4)
axes[j, i].scatter(pi, pj, s=10,
color=cmap[aj], lw=0.5, edgecolor='k',
alpha=0.4)
else:
raise ValueError("invalid mode. Must be 'hist2d' or 'scatter'.")
axes[i, j].set_ylim(*rdict[ai])
axes[i, j].set_xlim(*rdict[aj])
axes[j, i].set_ylim(*rdict[aj])
axes[j, i].set_xlim(*rdict[ai])
# diagonal labels
for a, n in zip(keys, np.arange(len(keys))):
label = udict[a][1].split(' ')
axes[n, n].annotate('\n'.join(label[::-1]), (0.5, 0.5),
xycoords='axes fraction',
ha='center', va='center', fontsize=10)
axes[n, n].set_xlim(*rdict[a])
axes[n, n].set_ylim(*rdict[a])
# switch on alternating axes
for i, j in zip(range(numvar), itertools.cycle((-1, 0))):
axes[j, i].xaxis.set_visible(True)
for label in axes[j, i].get_xticklabels():
label.set_rotation(90)
axes[i, j].yaxis.set_visible(True)
return fig, axes
def histograms(dat, keys=None, bins=25, logy=False, cmap=None, ncol=4):
"""
Plot histograms of all items in dat.
Parameters
----------
dat : dict
Data in {key: array} pairs.
keys : arra-like
The keys in dat that you want to plot. If None,
all are plotted.
bins : int
The number of bins in each histogram (default = 25)
logy : bool
If true, y axis is a log scale.
cmap : dict
The colours that the different items should be. If None,
all are grey.
Returns
-------
fig, axes
"""
if keys is None:
keys = dat.keys()
ncol = int(ncol)
nrow = calc_nrow(len(keys), ncol)
fig, axs = plt.subplots(nrow, 4, figsize=[ncol * 2, nrow * 2])
pn = 0
for k, ax in zip(keys, axs.flat):
tmp = nominal_values(dat[k])
x = tmp[~np.isnan(tmp)]
m, u = unitpicker(x)
if cmap is not None:
c = cmap[k]
else:
c = (0, 0, 0, 0.5)
ax.hist(x * m, bins=bins, color=c)
if logy:
ax.set_yscale('log')
ylab = '$log_{10}(n)$'
else:
ylab = 'n'
ax.set_ylim(1, ax.get_ylim()[1])
if ax.is_first_col():
ax.set_ylabel(ylab)
ax.set_yticklabels([])
ax.text(.95, .95, k, ha='right', va='top', transform=ax.transAxes)
pn += 1
for ax in axs.flat[pn:]:
ax.set_visible(False)
fig.tight_layout()
return fig, axs
def autorange_plot(t, sig, gwin=7, swin=None, win=30,
on_mult=(1.5, 1.), off_mult=(1., 1.5),
nbin=10, thresh=None):
"""
Function for visualising the autorange mechanism.
Parameters
----------
t : array-like
Independent variable (usually time).
sig : array-like
Dependent signal, with distinctive 'on' and 'off' regions.
gwin : int
The window used for calculating first derivative.
Defaults to 7.
swin : int
The window ised for signal smoothing. If None, gwin // 2.
win : int
The width (c +/- win) of the transition data subsets.
Defaults to 20.
on_mult and off_mult : tuple, len=2
Control the width of the excluded transition regions, which is defined
relative to the peak full-width-half-maximum (FWHM) of the transition
gradient. The region n * FHWM below the transition, and m * FWHM above
the tranision will be excluded, where (n, m) are specified in `on_mult`
and `off_mult`.
`on_mult` and `off_mult` apply to the off-on and on-off transitions,
respectively.
Defaults to (1.5, 1) and (1, 1.5).
nbin : ind
Used to calculate the number of bins in the data histogram.
bins = len(sig) // nbin
Returns
-------
fig, axes
"""
if swin is not None:
sigs = fastsmooth(sig, swin)
else:
sigs = sig
# perform autorange calculations
# bins = 50
kde_x = np.linspace(sig.min(), sig.max(), nbin)
kde = gaussian_kde(sigs)
yd = kde.pdf(kde_x)
mins = findmins(kde_x, yd) # find minima in kde
if thresh is not None:
mins = [thresh]
if len(mins) > 0:
bkg = sigs < (mins[0]) # set background as lowest distribution
else:
bkg = np.ones(sig.size, dtype=bool)
# bkg[0] = True # the first value must always be background
# assign rough background and signal regions based on kde minima
fbkg = bkg
fsig = ~bkg
g = abs(fastgrad(sigs, gwin)) # calculate gradient of signal
# 2. determine the approximate index of each transition
zeros = bool_2_indices(fsig)
if zeros is not None:
zeros = zeros.flatten()
lohi = []
pgs = []
excl = []
tps = []
failed = []
for z in zeros: # for each approximate transition
# isolate the data around the transition
if z - win < 0:
lo = gwin // 2
hi = int(z + win)
elif z + win > (len(sig) - gwin // 2):
lo = int(z - win)
hi = len(sig) - gwin // 2
else:
lo = int(z - win)
hi = int(z + win)
xs = t[lo:hi]
ys = g[lo:hi]
lohi.append([lo, hi])
# determine type of transition (on/off)
mid = (hi + lo) // 2
tp = sigs[mid + 3] > sigs[mid - 3] # True if 'on' transition.
tps.append(tp)
c = t[z] # center of transition
width = (t[1] - t[0]) * 2 # initial width guess
try:
pg, _ = curve_fit(gauss, xs, ys,
p0=(np.nanmax(ys),
c,
width),
sigma=(xs - c)**2 + .01)
pgs.append(pg)
fwhm = abs(2 * pg[-1] * np.sqrt(2 * np.log(2)))
# apply on_mult or off_mult, as appropriate.
if tp:
lim = np.array([-fwhm, fwhm]) * on_mult + pg[1]
else:
lim = np.array([-fwhm, fwhm]) * off_mult + pg[1]
excl.append(lim)
fbkg[(t > lim[0]) & (t < lim[1])] = False
fsig[(t > lim[0]) & (t < lim[1])] = False
failed.append(False)
except RuntimeError:
failed.append(True)
lohi.append([np.nan, np.nan])
pgs.append([np.nan, np.nan, np.nan])
excl.append([np.nan, np.nan])
tps.append(tp)
pass
else:
zeros = []
# make plot
nrows = 2 + len(zeros) // 2 + len(zeros) % 2
fig, axs = plt.subplots(nrows, 2, figsize=(6, 4 + 1.5 * nrows))
# Trace
ax1, ax2, ax3, ax4 = axs.flat[:4]
ax4.set_visible(False)
# widen ax1 & 3
for ax in [ax1, ax3]:
p = ax.axes.get_position()
p2 = [p.x0, p.y0, p.width * 1.75, p.height]
ax.axes.set_position(p2)
# move ax3 up
p = ax3.axes.get_position()
p2 = [p.x0, p.y0 + 0.15 * p.height, p.width, p.height]
ax3.axes.set_position(p2)
# truncate ax2
p = ax2.axes.get_position()
p2 = [p.x0 + p.width * 0.6, p.y0, p.width * 0.4, p.height]
ax2.axes.set_position(p2)
# plot traces and gradient
ax1.plot(t, sig, color='k', lw=1)
ax1.set_xticklabels([])
ax1.set_ylabel('Signal')
ax3.plot(t, g, color='k', lw=1)
ax3.set_xlabel('Time (s)')
ax3.set_ylabel('Gradient')
# plot kde
ax2.fill_betweenx(kde_x, yd, color=(0, 0, 0, 0.2))
ax2.plot(yd, kde_x, color='k')
ax2.set_ylim(ax1.get_ylim())
ax2.set_yticklabels([])
ax2.set_xlabel('Data\nDensity')
# limit
for ax in [ax1, ax2]:
ax.axhline(mins[0], color='k', ls='dashed', alpha=0.4)
if len(zeros) > 0:
# zeros
for z in zeros:
ax1.axvline(t[z], color='r', alpha=0.5)
ax3.axvline(t[z], color='r', alpha=0.5)
# plot individual transitions
n = 1
for (lo, hi), lim, tp, pg, fail, ax in zip(lohi, excl, tps, pgs, failed, axs.flat[4:]):
# plot region on gradient axis
ax3.axvspan(t[lo], t[hi], color='r', alpha=0.1, zorder=-2)
# plot individual transitions
x = t[lo:hi]
y = g[lo:hi]
ys = sig[lo:hi]
ax.scatter(x, y, color='k', marker='x', zorder=-1, s=10)
ax.set_yticklabels([])
ax.set_ylim(rangecalc(y))
tax = ax.twinx()
tax.plot(x, ys, color='k', alpha=0.3, zorder=-5)
tax.set_yticklabels([])
tax.set_ylim(rangecalc(ys))
# plot fitted gaussian
xn = np.linspace(x.min(), x.max(), 100)
ax.plot(xn, gauss(xn, *pg), color='r', alpha=0.5)
# plot center and excluded region
ax.axvline(pg[1], color='b', alpha=0.5)
ax.axvspan(*lim, color='b', alpha=0.1, zorder=-2)
ax1.axvspan(*lim, color='b', alpha=0.1, zorder=-2)
if tp:
ax.text(.05, .95, '{} (on)'.format(n), ha='left',
va='top', transform=ax.transAxes)
else:
ax.text(.95, .95, '{} (off)'.format(n), ha='right',
va='top', transform=ax.transAxes)
if ax.is_last_row():
ax.set_xlabel('Time (s)')
if ax.is_first_col():
ax.set_ylabel('Gradient (x)')
if ax.is_last_col():
tax.set_ylabel('Signal (line)')
if fail:
ax.axes.set_facecolor((1, 0, 0, 0.2))
ax.text(.5, .5, 'FAIL', ha='center', va='center',
fontsize=16, color=(1, 0, 0, 0.5), transform=ax.transAxes)
n += 1
# should never be, but just in case...
if len(zeros) % 2 == 1:
axs.flat[-1].set_visible = False
return fig, axs
def calibration_plot(self, analyte_ratios=None, datarange=True, loglog=False, ncol=3, srm_group=None, percentile_data_cutoff=85, save=True):
"""
Plot the calibration lines between measured and known SRM values.
Parameters
----------
analyte_ratios : optional, array_like or str
The analyte ratio(s) to plot. Defaults to all analyte ratios.
datarange : boolean
Whether or not to show the distribution of the measured data
alongside the calibration curve.
loglog : boolean
Whether or not to plot the data on a log - log scale. This is
useful if you have two low standards very close together,
and want to check whether your data are between them, or
below them.
ncol : int
The number of columns in the plot
srm_group : int
Which groups of SRMs to plot in the analysis.
percentile_data_cutoff : float
The upper percentile of data to display in the histogram.
Returns
-------
(fig, axes)
"""
if isinstance(analyte_ratios, str):
analyte_ratios = [analyte_ratios]
if analyte_ratios is None:
# analytes = self._calib_analytes
analyte_ratios = self.analytes_sorted(self._srm_id_analyte_ratios)
# analytes = self.analytes_sorted(self.analytes.difference([self.internal_standard]))
if srm_group is not None:
srm_groups = {int(g): t for g, t in self.stdtab.loc[:, ['group', 'gTime']].values}
try:
gTime = srm_groups[srm_group]
except KeyError:
text = ('Invalid SRM group selection. Valid options are:\n' +
' Key: Time Centre\n' +
'\n'.join([' {:}: {:.1f}s'.format(k, v) for k, v in srm_groups.items()]))
print(text)
else:
gTime = None
ncol = int(ncol)
n = len(analyte_ratios)
nrow = calc_nrow(n + 1, ncol)
axes = []
if not datarange:
fig = plt.figure(figsize=[4.1 * ncol, 3 * nrow])
else:
fig = plt.figure(figsize=[4.7 * ncol, 3 * nrow])
self.get_focus()
gs = mpl.gridspec.GridSpec(nrows=int(nrow), ncols=int(ncol),
hspace=0.35, wspace=0.3)
mdict = self.srm_mdict
for g, a in zip(gs, analyte_ratios):
num, denom = a.split('_')
if not datarange:
ax = fig.add_axes(g.get_position(fig))
axes.append((ax,))
else:
f = 0.8
p0 = g.get_position(fig)
p1 = [p0.x0, p0.y0, p0.width * f, p0.height]
p2 = [p0.x0 + p0.width * f, p0.y0, p0.width * (1 - f), p0.height]
ax = fig.add_axes(p1)
axh = fig.add_axes(p2)
axes.append((ax, axh))
if gTime is None:
sub = idx[:,:]
else:
sub = idx[gTime, :]
x = self.caltab.loc[sub, (a, 'meas_mean')].values
xe = self.caltab.loc[sub, (a, 'meas_err')].values
y = self.caltab.loc[sub, (a, 'srm_mean')].values
ye = self.caltab.loc[sub, (a, 'srm_err')].values
srm = self.caltab.loc[sub, ('SRM')]
# plot calibration data
for s, m in mdict.items():
ind = srm == s
ax.errorbar(x[ind], y[ind], xerr=xe[ind], yerr=ye[ind],
color=self.cmaps[a], alpha=0.6,
lw=0, elinewidth=1, marker=m, #'o',
capsize=0, markersize=5, label='_')
# work out axis scaling
if not loglog:
xmax = np.nanmax(x + xe)
ymax = np.nanmax(y + ye)
if any(x - xe < 0):
xmin = np.nanmin(x - xe)
xpad = (xmax - xmin) * 0.05
xlim = [xmin - xpad, xmax + xpad]
else:
xlim = [0, xmax * 1.05]
if any(y - ye < 0):
ymin = np.nanmin(y - ye)
ypad = (ymax - ymin) * 0.05
ylim = [ymin - ypad, ymax + ypad]
else:
ylim = [0, ymax * 1.05]
else:
xd = self.caltab.loc[:, (a, 'meas_mean')][self.caltab.loc[:, (a, 'meas_mean')] > 0].values
yd = self.caltab.loc[:, (a, 'srm_mean')][self.caltab.loc[:, (a, 'srm_mean')] > 0].values
xlim = [10**np.floor(np.log10(np.nanmin(xd))),
10**np.ceil(np.log10(np.nanmax(xd)))]
ylim = [10**np.floor(np.log10(np.nanmin(yd))),
10**np.ceil(np.log10(np.nanmax(yd)))]
# scale sanity checks
if xlim[0] == xlim[1]:
xlim[0] = ylim[0]
if ylim[0] == ylim[1]:
ylim[0] = xlim[0]
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlim(xlim)
ax.set_ylim(ylim)
# visual warning if any values < 0
if xlim[0] < 0:
ax.axvspan(xlim[0], 0, color=(1,0.8,0.8), zorder=-1)
if ylim[0] < 0:
ax.axhspan(ylim[0], 0, color=(1,0.8,0.8), zorder=-1)
if any(x < 0) or any(y < 0):
ax.text(.5, .5, 'WARNING: Values below zero.', color='r', weight='bold',
ha='center', va='center', rotation=40, transform=ax.transAxes, alpha=0.6)
# calculate line and R2
if loglog:
x = np.logspace(*np.log10(xlim), 100)
else:
x = np.array(xlim)
if gTime is None:
coefs = self.calib_params.loc[:, a]
else:
coefs = self.calib_params.loc[gTime, a]
m = np.nanmean(coefs['m'])
m_nom = nominal_values(m)
# calculate case-specific paramers
if 'c' in coefs:
c = np.nanmean(coefs['c'])
c_nom = nominal_values(c)
# calculate R2
ym = self.caltab.loc[:, (a, 'meas_mean')] * m_nom + c_nom
R2 = R2calc(self.caltab.loc[:, (a, 'srm_mean')], ym, force_zero=False)
# generate line and label
line = x * m_nom + c_nom
label = 'y = {:.2e} x'.format(m)
if c > 0:
label += '\n+ {:.2e}'.format(c)
else:
label += '\n {:.2e}'.format(c)
else:
# calculate R2
ym = self.caltab.loc[:, (a, 'meas_mean')] * m_nom
R2 = R2calc(self.caltab.loc[:, (a, 'srm_mean')], ym, force_zero=True)
# generate line and label
line = x * m_nom
label = 'y = {:.2e} x'.format(m)
# plot line of best fit
ax.plot(x, line, color=(0, 0, 0, 0.5), ls='dashed')
# add R2 to label
if round(R2, 3) == 1:
label = '$R^2$: >0.999\n' + label
else:
label = '$R^2$: {:.3f}\n'.format(R2) + label
ax.text(.05, .95, pretty_element(a), transform=ax.transAxes,
weight='bold', va='top', ha='left', size=12)
ax.set_xlabel('counts/counts')
ax.set_ylabel('mol/mol')
# write calibration equation on graph happens after data distribution
# plot data distribution historgram alongside calibration plot
if datarange:
# isolate data
meas = nominal_values(self.focus[a])
meas = meas[~np.isnan(meas)]
# check and set y scale
if np.nanmin(meas) < ylim[0]:
if loglog:
mmeas = meas[meas > 0]
ylim[0] = 10**np.floor(np.log10(np.nanmin(mmeas)))
else:
ylim[0] = 0
mquant = np.percentile(meas[~np.isnan(meas)], percentile_data_cutoff) * 1.05
if mquant > ylim[1]:
if loglog:
ylim[1] = 10**np.ceil(np.log10(mquant))
else:
ylim[1] = mquant
# hist
if loglog:
bins = np.logspace(*np.log10(ylim), 30)
else:
bins = np.linspace(*ylim, 30)
axh.hist(meas, bins=bins, orientation='horizontal',
color=self.cmaps[a], lw=0.5, alpha=0.5)
if loglog:
axh.set_yscale('log')
axh.set_ylim(ylim) # ylim of histogram axis
ax.set_ylim(ylim) # ylim of calibration axis
axh.set_xticks([])
axh.set_yticklabels([])
# write calibration equation on graph
cmax = np.nanmax(y)
if cmax / ylim[1] > 0.5:
ax.text(0.98, 0.04, label, transform=ax.transAxes,
va='bottom', ha='right')
else:
ax.text(0.02, 0.75, label, transform=ax.transAxes,
va='top', ha='left')
if srm_group is None:
title = 'All SRMs'
else:
title = 'SRM Group {:} (centre at {:.1f}s)'.format(srm_group, gTime)
axes[0][0].set_title(title, loc='left', weight='bold', fontsize=12)
# SRM legend
ax = fig.add_axes(gs[-1].get_position(fig))
for lab, m in mdict.items():
ax.scatter([],[],marker=m, label=lab, color=(0,0,0,0.6))
ax.legend()
ax.axis('off')
if save:
fig.savefig(self.report_dir + '/calibration.pdf')
return fig, axes
# def calibration_drift_plot(self, analytes=None, ncol=3, save=True):
# """
# Plot calibration slopes through the entire session.
# Parameters
# ----------
# self : latools.analyse
# Analyse object, containing
# analytes : optional, array_like or str
# The analyte(s) to plot. Defaults to all analytes.
# ncol : int
# Number of columns of plots
# save : bool
# Whether or not to save the plot.
# Returns
# -------
# (fig, axes)
# """
# if not hasattr(self, 'calib_params'):
# raise ValueError('Please run calibrate before making this plot.')
# if analytes is None:
# analytes = [a for a in self.analytes if self.internal_standard not in a]
# ncol = int(ncol)
# n = len(analytes)
# nrow = calc_nrow(n, ncol)
# axes = []
# fig = plt.figure(figsize=[6 * ncol, 3 * nrow])
# gs = mpl.gridspec.GridSpec(nrows=int(nrow), ncols=int(ncol),
# hspace=0.35, wspace=0.3)
# cp = self.calib_params
# for g, a in zip(gs, analytes):
# ax = fig.add_axes(g.get_position(fig))
# axes.append(ax)
# ax.plot(cp.index, nominal_values(cp.loc[:, (a, 'm')]), color=self.cmaps[a])
# ax.fill_between(cp.index,
# nominal_values(cp.loc[:, (a, 'm')]) - std_devs(cp.loc[:, (a, 'm')]),
# nominal_values(cp.loc[:, (a, 'm')]) + std_devs(cp.loc[:, (a, 'm')]),
# color=self.cmaps[a], alpha=0.2, lw=0)
# ax.text(.05, .95, pretty_element(a), transform=ax.transAxes,
# weight='bold', va='top', ha='left', size=12)
# ax.set_xlabel('Time (s)')
# ax.set_ylabel('mol/mol ' + self.internal_standard)
# if save:
# fig.savefig(self.report_dir + '/calibration_drift.pdf')
# return fig, axes
def filter_report(Data, filt=None, analytes=None, savedir=None, nbin=5):
"""
Visualise effect of data filters.
Parameters
----------
filt : str
Exact or partial name of filter to plot. Supports
partial matching. i.e. if 'cluster' is specified, all
filters with 'cluster' in the name will be plotted.
Defaults to all filters.
analyte : str
Name of analyte to plot.
save : str
file path to save the plot
Returns
-------
(fig, axes)
"""
if filt is None or filt == 'all':
sets = Data.filt.filter_table
else:
sets = Data.filt.filter_table.loc[idx[:, [i for i in Data.filt.filter_table.index.levels[1] if filt in i]], :]
cm = plt.cm.get_cmap('Spectral')
ngrps = sets.shape[0]
if analytes is None:
analytes = Data.analytes
elif isinstance(analytes, str):
analytes = [analytes]
axes = []
for analyte in analytes:
fig = plt.figure()
for i in sets.index.levels[0]:
filts = sets.loc[i]
nfilts = np.array([f.split('_') for f in filts.index])
fgnames = np.array(['_'.join(a) for a in nfilts[:, (0,-2)]])
fgrp = np.unique(fgnames)[0]
if 'DBSCAN' in fgrp:
warnings.warn('filter_report is not implemented for DBSCAN')
continue
fig.set_size_inches(10, 3.5 * ngrps)
h = .8 / ngrps
y = nominal_values(Data.focus[analyte])
yh = y[~np.isnan(y)]
m, u = unitpicker(np.nanmax(y),
focus_stage=Data.focus_stage)
axs = tax, hax = (fig.add_axes([.1, .9 - (i + 1) * h, .6, h * .98]),
fig.add_axes([.7, .9 - (i + 1) * h, .2, h * .98]))
axes.append(axs)
# get variables
fg = sets.index.get_level_values(1) # filter names
components = Data.filt.filter_components.loc[:, i] # component dataframe
cs = cm(np.linspace(0, 1, len(fg)))
fn = ['_'.join(x) for x in nfilts[:, (0, -1)]]
an = nfilts[:, 0]
bins = np.linspace(np.nanmin(y), np.nanmax(y), len(yh) // nbin) * m
# plot all data
tax.scatter(Data.Time, m * y, color='k', alpha=0.2, lw=0.1,
s=20, label='excl')
hax.hist(m * yh, bins, alpha=0.2, orientation='horizontal',
color='k', lw=0)
# plot filtered data
for f, c, lab in zip(fg, cs, fn):
ind = components[f]
tax.scatter(Data.Time[ind], m * y[ind],
edgecolor=(0,0,0,0), color=c, s=15, label=lab)
hax.hist(m * y[ind][~np.isnan(y[ind])], bins, color=c, lw=0.1,
orientation='horizontal', alpha=0.6)
if 'thresh' in fgrp and analyte in fgrp:
tax.axhline(Data.filt.params[fg[0]]['threshold'] * m,
ls='dashed', zorder=-2, alpha=0.5, color='k')
hax.axhline(Data.filt.params[fg[0]]['threshold'] * m,
ls='dashed', zorder=-2, alpha=0.5, color='k')
# formatting
for ax in axs:
mn = np.nanmin(y) * m
mx = np.nanmax(y) * m
rn = mx - mn
ax.set_ylim(mn - .05 * rn, mx + 0.05 * rn)
# legend
hn, la = tax.get_legend_handles_labels()
hax.legend(hn, la, loc='upper right', scatterpoints=1)
tax.text(.02, .98, Data.sample + ': ' + fgrp, size=12,
weight='bold', ha='left', va='top',
transform=tax.transAxes)
tax.set_ylabel(pretty_element(analyte) + ' (' + u + ')')
tax.set_xticks(tax.get_xticks()[:-1])
hax.set_yticklabels([])
if i < ngrps - 1:
tax.set_xticklabels([])
hax.set_xticklabels([])
else:
tax.set_xlabel('Time (s)')
hax.set_xlabel('n')
if isinstance(savedir, str):
fig.savefig(savedir + '/' + Data.sample + '_' +
analyte + '.pdf')
plt.close(fig)
return fig, axes
# Old DBSCAN code
# if 'DBSCAN' in fgrp:
# # determine data filters
# core_ind = components[[f for f in fg
# if 'core' in f][0]]
# other = np.array([('noise' not in f) & ('core' not in f)
# for f in fg])
# tfg = fg[other]
# tfn = fn[other]
# tcs = cm(np.linspace(0, 1, len(tfg)))
# # plot all data
# hax.hist(m * yh, bins, alpha=0.2, orientation='horizontal',
# color='k', lw=0)
# # legend markers for core/member
# tax.scatter([], [], s=20, label='core', color='w', lw=0.5, edgecolor='k')
# tax.scatter([], [], s=7.5, label='member', color='w', lw=0.5, edgecolor='k')
# # plot noise
# try:
# noise_ind = components[[f for f in fg
# if 'noise' in f][0]]
# tax.scatter(Data.Time[noise_ind], m * y[noise_ind],
# lw=1, color='k', s=10, marker='x',
# label='noise', alpha=0.6)
# except:
# pass
# # plot filtered data
# for f, c, lab in zip(tfg, tcs, tfn):
# ind = components[f]
# tax.scatter(Data.Time[~core_ind & ind],
# m * y[~core_ind & ind], lw=.5, color=c, s=5, edgecolor='k')
# tax.scatter(Data.Time[core_ind & ind],
# m * y[core_ind & ind], lw=.5, color=c, s=15, edgecolor='k',
# label=lab)
# hax.hist(m * y[ind][~np.isnan(y[ind])], bins, color=c, lw=0.1,
# orientation='horizontal', alpha=0.6)
def correlation_plot(self, corr=None):
if len(self.correlations) == 0:
raise ValueError("No calculated correlations. Run threshold_correlation first.")
if corr is None:
if len(self.correlations) == 1:
corr = list(self.correlations.keys())[0]
if corr not in self.correlations:
raise ValueError("{:} not founself. Please use one of [{:}]".format(corr, [', '.join(c) for c in self.correlations.keys()]))
x_analyte, y_analyte, window = corr.split('_')
r, p = self.correlations[corr]
fig, axs = plt.subplots(3, 1, figsize=[7, 5], sharex=True)
# plot analytes
ax = axs[0]
ax.plot(self.Time, nominal_values(self.focus[x_analyte]), color=self.cmap[x_analyte], label=x_analyte)
ax.plot(self.Time, nominal_values(self.focus[y_analyte]), color=self.cmap[y_analyte], label=y_analyte)
ax.set_yscale('log')
ax.legend()
ax.set_ylabel('Signals')
# plot r
ax = axs[1]
ax.plot(self.Time, r)
ax.set_ylabel('Pearson R')
# plot p
ax = axs[2]
ax.plot(self.Time, p)
ax.set_ylabel('pignificance Level (p)')
ax.set_xlabel('Time (s)')
fig.tight_layout()
return fig, axs
def stackhist(data_arrays, bins=None, bin_range=(1,99), yoffset=0, ax=None, **kwargs):
"""
Plots a stacked histogram of multiple arrays.
Parameters
----------
data_arrays : iterable
iterable containing all the arrays to plot on the histogram
bins : array-like or int
Either the number of bins (int) or an array of bin edges.
bin_range : tuple
If bins is not specified, this speficies the percentile range used for the bins.
By default, the histogram is plotted between the 1st and 99th percentiles of the
data.
yoffset : float
The y offset of the histogram base. Useful if stacking multiple histograms on a
single axis.
ax : matplotlib.Axes
"""
if ax is None:
fig, ax = plt.subplots(1, 1)
# calculate histogram bins
all_data = np.concatenate(data_arrays)
data_range = np.percentile(all_data[~np.isnan(all_data)], bin_range)
if isinstance(bins, int):
nbin = bins
bins = np.linspace(*data_range, nbin)
elif bins is None:
nbin = 50
bins = np.linspace(*data_range, nbin)
else:
nbin = len(bins)
xleft = bins[:-1]
bw = bins[1] - bins[0]
# calculate global histogram max
nmax = np.max(np.histogram(all_data[~np.isnan(all_data)], bins)[0])
y0 = np.full(len(xleft), yoffset, dtype=float)
for a in data_arrays:
yn, _ = np.histogram(a[~np.isnan(a)], bins)
yn = yn.astype(float) / nmax
ax.bar(xleft, yn, bw, bottom=y0, align='edge', **kwargs)
y0 += yn
return ax
|
oscarbranson/latools
|
latools/helpers/plot.py
|
Python
|
mit
| 44,377
|
[
"Gaussian"
] |
5e67a86c00d07a6d9c7a89b2ac329ed4ca397a511b37ebc9a4d857c5f84d0cf7
|
import sys;
sys.path.append("/Users/roger/Dropbox/pyglm-master/pyglm/")
sys.path.append("/Users/roger/Dropbox/pyglm-master/pyglm/utils/")
import numpy as np
import pickle
np.random.seed(150)
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style("white")
sns.set_context("paper")
plt.ion()
from pybasicbayes.util.text import progprint_xrange
from pyglm.utils.basis import cosine_basis
from pyglm.plotting import plot_glm
from models import LatentDistanceWeightsSparseBernoulliGLM
T = 10000 # Number of time bins to generate
N = 10 # Number of neurons
B = 1 # Number of "basis functions"
L = 100 # Autoregressive window of influence
D = 2 # Dimensionality of the feature space
# Create a cosine basis to model smooth influence of
# spikes on one neuron on the later spikes of others.
basis = cosine_basis(B=B, L=L) / L
true_model = LatentDistanceWeightsSparseBernoulliGLM(N, basis=basis, regression_kwargs=dict(rho=0.7, S_w=1, mu_b=-2))
# Set the true locations to be on a circle
# r = 1.5 + np.arange(N) // (N / 2.)
r = 1.55
th = np.linspace(0, 2 * np.pi, N, endpoint=False)
x = r * np.cos(th)
y = r * np.sin(th)
L_D = np.hstack((x[:, None], y[:, None]))
true_model.network.L = L_D
# Simulated weights
for m in range(N):
for n in range(N):
true_model.regressions[m].W[n, :] = true_model.regressions[m].a[n] * np.random.multivariate_normal(
true_model.network.mu_W[m, n], true_model.network.sigma_W[m, n])
for k in range(N):
true_model.regressions[k].a[k] = True
true_model.regressions[k].W[k,:] = -3
_, Y = true_model.generate(T=T, keep=True)
mean_spikecount = Y.sum(0) / T
# Plot raster plot
sns.heatmap(np.transpose(Y), xticklabels=False)
# Plot the true model
fig, axs, handles = true_model.plot()
plt.pause(0.1)
# Plot cross-correlation between neurons
# fig, axs = plt.subplots(10, 10)
# window_length = 0
# for i in range(10):
# for j in range(i,10):
# axs[i,j].xcorr(Y[:,i+window_length], Y[:,j+window_length], maxlags=30)
# axs[i,j].set_title('C' + str(i+1) + str(j+1))
#
# plt.xlabel('time shift')
# plt.tight_layout()
# Make a fig to plot the true and inferred network
plt.ion()
fig = plt.figure(3)
ax_true = fig.add_subplot(1, 2, 1, aspect="equal")
ax_test = fig.add_subplot(1, 2, 2, aspect="equal")
A = true_model.adjacency
W = true_model.weights
W_total = W.sum(1)
true_model.network.plot_LatentDistanceModel(A, W, ax=ax_true)
# Create a test model for fitting
test_model = LatentDistanceWeightsSparseBernoulliGLM(N, basis=basis, regression_kwargs=dict(rho=0.7, S_w=1, mu_b=-2))
test_model.add_data(Y)
# Fit with Gibbs sampling
def _collect(m):
return m.log_likelihood(), m.weights, m.adjacency, m.biases, m.means[0], m.network.L, m.network.lpf
def _update(m, itr):
m.resample_model()
ax_test.cla()
test_model.network.plot_LatentDistanceModel(m.adjacency, m.weights, ax=ax_test, L_true=true_model.network.L)
print("Iteration ", itr)
print("LP:", m.log_likelihood())
plt.pause(0.001)
return _collect(m)
N_samples = 2000
samples = []
for itr in progprint_xrange(N_samples):
samples.append(_update(test_model, itr))
# Unpack the samples
samples = zip(*samples)
lps, W_smpls, A_smpls, b_smpls, fr_smpls, L_smples, lpf_smpls = tuple(map(np.array, samples))
# Plot the log likelihood per iteration
plt.figure(figsize=(4, 4))
plt.plot(lps)
plt.xlabel("Iteration")
plt.ylabel("Log Likelihood")
plt.tight_layout()
# Plot the posterior mean and variance
W_mean = W_smpls[N_samples // 2:].mean(0)
A_mean = A_smpls[N_samples // 2:].mean(0)
fr_mean = fr_smpls[N_samples // 2:].mean(0)
fr_std = fr_smpls[N_samples // 2:].std(0)
plot_glm(Y, W_mean, A_mean, fr_mean, std_firingrates=3 * fr_std, title="Posterior Mean")
# plot true location and inferred location
from hips.plotting.colormaps import harvard_colors
from utils.utils import compute_optimal_rotation
color = harvard_colors()[0:10]
fig = plt.figure()
ax = fig.add_subplot(111, aspect="equal")
N_select = 10
N_id_select = np.random.permutation(N)[0:N_select]
k = 0
for i in N_id_select:
for j in range(N_samples):
R = compute_optimal_rotation(L_smples[j], true_model.network.L)
L_smples[j] = L_smples[j].dot(R)
# affine translation
D_t = np.mean(L_smples[j], 0) - np.mean(true_model.network.L, 0)
L_smples[j] = L_smples[j] - D_t
ax.scatter(L_smples[N_samples // 2:, i, 0], L_smples[N_samples // 2:, i, 1], s=20, c=color[k])
k += 1
ax.scatter(true_model.network.L[N_id_select, 0], true_model.network.L[N_id_select, 1], s=300, c=color, edgecolor='0',
lw=1, marker=(5, 1))
b = np.amax(abs(true_model.network.L)) + true_model.network.L[:].std() / 2.0
# Plot grids for origin
ax.plot([0, 0], [-b, b], ':k', lw=0.2)
ax.plot([-b, b], [0, 0], ':k', lw=0.2)
# Set the limits
ax.set_xlim([-b, b])
ax.set_ylim([-b, b])
ax.tick_params(axis='both', which='major', labelsize=16)
# Labels
ax.set_xlabel('Latent Dimension 1', fontsize=20)
ax.set_ylabel('Latent Dimension 2', fontsize=20)
plt.show()
plt.figure()
plt.plot(lpf_smpls[1:])
# Saving the objects:
#with open('TVpgGLM/results/sythetic_true_location1.pickle', 'wb') as f:
# pickle.dump([lps, W, W_smpls, A_smpls, L_D, L_smples], f)
|
sheqi/TVpgGLM
|
demo/syn_LDM_weight_demo1.py
|
Python
|
mit
| 5,247
|
[
"NEURON"
] |
5dd8029d5fa7c69cda1904a5df5ea44d5e089417446d91f4c299c75be0870fd4
|
../../../../../../../share/pyshared/orca/scripts/apps/gnome-panel/speech_generator.py
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/lib/python2.7/dist-packages/orca/scripts/apps/gnome-panel/speech_generator.py
|
Python
|
gpl-3.0
| 85
|
[
"ORCA"
] |
a1465958cd28f83c1d0d89555efbdbd3a590b575fb7a0b1e0df1a18be0fca0c5
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module implements input and output processing from PWSCF.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "ongsp@ucsd.edu"
__date__ = "3/27/15"
import six
from monty.re import regrep
from collections import defaultdict
class PWInput(object):
"""
Base input file class. Right now, only supports no symmetry and is
very basic.
"""
def __init__(self, structure, pseudo, control=None, system=None,
electrons=None, ions=None, cell=None, kpoints_mode="automatic",
kpoints_grid=(1, 1, 1),kpoints_shift=(0, 0, 0)):
"""
Initializes a PWSCF input file.
Args:
structure (Structure): Input structure
pseudo (dict): A dict of the pseudopotentials to use.
control (dict): Control parameters. Refer to official PWSCF doc
on supported parameters. Default to {"calculation": "scf"}
system (dict): System parameters. Refer to official PWSCF doc
on supported parameters. Default to None, which means {}.
electrons (dict): Electron parameters. Refer to official PWSCF doc
on supported parameters. Default to None, which means {}.
ions (dict): Ions parameters. Refer to official PWSCF doc
on supported parameters. Default to None, which means {}.
cell (dict): Cell parameters. Refer to official PWSCF doc
on supported parameters. Default to None, which means {}.
kpoints_mode (str): Kpoints generation mode. Default to automatic.
kpoints_grid (sequence): The kpoint grid. Default to (1, 1, 1).
kpoints_shift (sequence): The shift for the kpoints. Defaults to
(0, 0, 0).
"""
self.structure = structure
sections = {}
sections["control"] = control or {"calculation": "scf"}
sections["system"] = system or {}
sections["electrons"] = electrons or {}
sections["ions"] = ions or {}
sections["cell"] = cell or {}
for species in self.structure.composition.keys():
if species.symbol not in pseudo:
raise PWInputError("Missing %s in pseudo specification!")
self.pseudo = pseudo
self.sections = sections
self.kpoints_mode = kpoints_mode
self.kpoints_grid = kpoints_grid
self.kpoints_shift = kpoints_shift
def __str__(self):
out = []
def to_str(v):
if isinstance(v, six.string_types):
return "'%s'" % v
return v
for k1 in ["control", "system", "electrons", "ions", "cell"]:
v1 = self.sections[k1]
out.append("&%s" % k1.upper())
sub = []
for k2 in sorted(v1.keys()):
sub.append(" %s = %s" % (k2, to_str(v1[k2])))
if k1 == "system":
sub.append(" ibrav = 0")
sub.append(" nat = %d" % len(self.structure))
sub.append(" ntyp = %d" % len(self.structure.composition))
sub.append("/")
out.append(",\n".join(sub))
out.append("ATOMIC_SPECIES")
for k, v in self.structure.composition.items():
out.append(" %s %.4f %s" % (k.symbol, k.atomic_mass,
self.pseudo[k.symbol]))
out.append("ATOMIC_POSITIONS crystal")
for site in self.structure:
out.append(" %s %.6f %.6f %.6f" % (site.specie.symbol, site.a,
site.b, site.c))
out.append("K_POINTS %s" % self.kpoints_mode)
kpt_str = ["%s" % i for i in self.kpoints_grid]
kpt_str.extend(["%s" % i for i in self.kpoints_shift])
out.append(" %s" % " ".join(kpt_str))
out.append("CELL_PARAMETERS angstrom")
for vec in self.structure.lattice.matrix:
out.append(" %f %f %f" % (vec[0], vec[1], vec[2]))
return "\n".join(out)
def write_file(self, filename):
"""
Write the PWSCF input file.
Args:
filename (str): The string filename to output to.
"""
with open(filename, "w") as f:
f.write(self.__str__())
class PWInputError(BaseException):
pass
class PWOutput(object):
patterns = {
"energies": "total energy\s+=\s+([\d\.\-]+)\sRy",
"ecut": "kinetic\-energy cutoff\s+=\s+([\d\.\-]+)\s+Ry",
"lattice_type": "bravais\-lattice index\s+=\s+(\d+)",
"celldm1": "celldm\(1\)=\s+([\d\.]+)\s",
"celldm2": "celldm\(2\)=\s+([\d\.]+)\s",
"celldm3": "celldm\(3\)=\s+([\d\.]+)\s",
"celldm4": "celldm\(4\)=\s+([\d\.]+)\s",
"celldm5": "celldm\(5\)=\s+([\d\.]+)\s",
"celldm6": "celldm\(6\)=\s+([\d\.]+)\s",
"nkpts": "number of k points=\s+([\d]+)"
}
def __init__(self, filename):
self.filename = filename
self.data = defaultdict(list)
self.read_pattern(PWOutput.patterns)
for k, v in self.data.items():
if k == "energies":
self.data[k] = [float(i[0][0]) for i in v]
elif k in ["lattice_type", "nkpts"]:
self.data[k] = int(v[0][0][0])
else:
self.data[k] = float(v[0][0][0])
def read_pattern(self, patterns, reverse=False,
terminate_on_match=False, postprocess=str):
"""
General pattern reading. Uses monty's regrep method. Takes the same
arguments.
Args:
patterns (dict): A dict of patterns, e.g.,
{"energy": "energy\(sigma->0\)\s+=\s+([\d\-\.]+)"}.
reverse (bool): Read files in reverse. Defaults to false. Useful for
large files, esp OUTCARs, especially when used with
terminate_on_match.
terminate_on_match (bool): Whether to terminate when there is at
least one match in each key in pattern.
postprocess (callable): A post processing function to convert all
matches. Defaults to str, i.e., no change.
Renders accessible:
Any attribute in patterns. For example,
{"energy": "energy\(sigma->0\)\s+=\s+([\d\-\.]+)"} will set the
value of self.data["energy"] = [[-1234], [-3453], ...], to the
results from regex and postprocess. Note that the returned
values are lists of lists, because you can grep multiple
items on one line.
"""
matches = regrep(self.filename, patterns, reverse=reverse,
terminate_on_match=terminate_on_match,
postprocess=postprocess)
self.data.update(matches)
def get_celldm(self, i):
return self.data["celldm%d" % i]
@property
def final_energy(self):
return self.data["energies"][-1]
@property
def lattice_type(self):
return self.data["lattice_type"]
if __name__ == "__main__":
o = PWOutput("../../test_files/Si.pwscf.out")
print(o.data)
print(o.final_energy)
|
rousseab/pymatgen
|
pymatgen/io/pwscf.py
|
Python
|
mit
| 7,309
|
[
"CRYSTAL"
] |
6627dbb8ac07ab7d11b090c79b0dcd0fb9826def8da9efcdfee5b5369828f5a1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = '7sDream'
import os
import shutil
from datetime import datetime
from zhihu import ZhihuClient, ActType, CollectActType, ANONYMOUS
def test_question():
url = 'http://www.zhihu.com/question/24825703'
question = client.question(url)
# 获取该问题的详细描述
print(question.title)
# 亲密关系之间要说「谢谢」吗?
# 获取该问题的详细描述
print(question.details)
# 从小父母和大家庭里,.......什么时候不该说"谢谢”??
# 获取回答个数
print(question.answer_num)
# 630
# 获取关注该问题的人数
print(question.follower_num)
# 4326
# 获取关注问题的用户
for _, follower in zip(range(10), question.followers):
print(follower.name)
# J Drop
# 熊猫
# Steve He
# ...
# 获取该问题所属话题
for topic in question.topics:
print(topic.name)
# '心理学', '恋爱', '社会', '礼仪', '亲密关系'
# 获取排名第一的回答的点赞数
print(question.top_answer.upvote_num)
# 197
# 获取排名前十的十个回答的点赞数
for answer in question.top_i_answers(10):
print(answer.author.name, answer.upvote_num, answer.author.motto)
# 49
# 89
# 425
# ...
# 获取提问时间
ctime = question.creation_time
print(ctime)
assert ctime == datetime.strptime('2014-08-12 17:58:07',
"%Y-%m-%d %H:%M:%S")
# 获取最后编辑时间
last_edit_time = question.last_edit_time
print(last_edit_time)
assert last_edit_time >= datetime.strptime('2015-04-01 00:39:21',
"%Y-%m-%d %H:%M:%S")
# 获取提问者
assert question.author is ANONYMOUS
question = client.question('https://www.zhihu.com/question/38531356')
assert question.author.name == '杨捷'
assert question.author.url == 'https://www.zhihu.com/people/yangjiePro/'
question.refresh()
# test again
print(question.title)
print(question.details)
print(question.answer_num)
print(question.follower_num)
for _, follower in zip(range(10), question.followers):
print(follower.name)
for topic in question.topics:
print(topic.name)
print(question.last_edit_time)
# with sort parameter, repeat above tests
url = 'https://www.zhihu.com/question/24825703?sort=created'
question = client.question(url)
print(question.title)
print(question.details)
print(question.answer_num)
print(question.follower_num)
for _, follower in zip(range(10), question.followers):
print(follower.name)
for topic in question.topics:
print(topic.name)
print(question.top_answer.upvote_num)
for answer in question.top_i_answers(10):
print(answer.author.name, answer.upvote_num, answer.author.motto)
ctime = question.creation_time
print(ctime)
assert ctime == datetime.strptime('2014-08-12 17:58:07',
"%Y-%m-%d %H:%M:%S")
last_edit_time = question.last_edit_time
print(last_edit_time)
assert last_edit_time >= datetime.strptime('2015-04-01 00:39:21',
"%Y-%m-%d %H:%M:%S")
assert question.author is ANONYMOUS
question = client.question('https://www.zhihu.com/question/38531356')
assert question.author.name == '杨捷'
assert question.author.url == 'https://www.zhihu.com/people/yangjiePro/'
question.refresh()
# test again
print(question.title)
print(question.details)
print(question.answer_num)
print(question.follower_num)
for _, follower in zip(range(10), question.followers):
print(follower.name)
for topic in question.topics:
print(topic.name)
print(question.last_edit_time)
# test fetching all sorted answers
question = client.question(
'https://www.zhihu.com/question/27459050?sort=created')
count = 0
for answer in question.answers:
count += 1
print(answer.author.name, answer.upvote_num, answer.author.motto)
assert count >= 83
assert question.deleted is False
# test deleted question
url = 'https://www.zhihu.com/question/39416522'
question = client.question(url)
assert question.deleted is True
# test question without answer
url = 'https://www.zhihu.com/question/36358828?sort=created'
question = client.question(url)
assert len(list(question.answers)) == 0
# test answer in one page(< 20)
url = 'https://www.zhihu.com/question/28330796?sort=created'
question = client.question(url)
assert len(list(question.answers)) >= 7
def test_answer():
url = 'http://www.zhihu.com/question/24825703/answer/30975949'
answer = client.answer(url)
assert answer.deleted is False
# 获取答案url
print(answer.url)
# 获取该答案所在问题标题
print(answer.question.title)
# 关系亲密的人之间要说「谢谢」吗?
# 获取该答案作者名
print(answer.author.name)
# 甜阁下
# 获取答案赞同数
print(answer.upvote_num)
# 1155
# 获取答案点赞人昵称和感谢赞同提问回答数
for _, upvoter in zip(range(10), answer.upvoters):
print(upvoter.name, upvoter.upvote_num, upvoter.thank_num,
upvoter.question_num, upvoter.answer_num, upvoter.is_zero_user())
# ...
# 五月 42 15 3 35
# 陈半边 6311 1037 3 101
# 刘柯 3107 969 273 36
#
# 三零用户比例 36.364%
print(answer.comment_num)
assert answer.comment_num >= 161
# 获取答案下的评论
for i, comment in enumerate(answer.comments, 1):
if i == 1:
assert comment.creation_time == datetime(2014, 9, 25, 9, 18, 56)
if i < 11:
print(comment.author.name, comment.content)
assert i >= 161
for i, comment in enumerate(answer.latest_comments, 1):
if i == 1:
assert comment.creation_time >= datetime(2015, 9, 21, 19, 50, 42)
if i < 11:
print(comment.author.name, comment.content)
assert comment.creation_time == datetime(2014, 9, 25, 9, 18, 56)
assert i >= 161
# 获取答案内容的HTML
print(answer.content)
# <html>
# ...
# </html>
# 获取答案创建时间
print(answer.creation_time)
assert answer.creation_time == datetime.fromtimestamp(1411567255)
# 获取答案收藏数量
print(answer.collect_num)
assert answer.collect_num >= 821
# 获取收藏答案的收藏夹
for _, collection in zip(range(10), answer.collections):
print(collection.url, collection.name, collection.owner,
collection.follower_num)
# 保存HTML
answer.save(filepath='.')
# 当前目录下生成 "亲密关系之间要说「谢谢」吗? - 甜阁下.html"
# 保存markdown
answer.save(filepath='.', mode="md")
# 当前目录下生成 "亲密关系之间要说「谢谢」吗? - 甜阁下.md"
answer.refresh()
# test again
print(answer.upvote_num)
print(answer.content)
print(answer.collect_num)
print(answer.comment_num)
assert answer.deleted is False
# test deleted answer
url = 'https://www.zhihu.com/question/40185501/answer/85271078'
answer = client.answer(url)
assert answer.deleted is True
answer.refresh()
assert answer.deleted is True
# test answer without collection
url = 'https://www.zhihu.com/question/23138285/answer/81246171'
answer = client.answer(url)
assert answer.collect_num == 0
assert sum(1 for _ in answer.collections) == 0
# test zero comment answer
url = 'https://www.zhihu.com/question/39051779/answer/81575803'
answer = client.answer(url)
assert sum(1 for _ in answer.comments) == 0
assert sum(1 for _ in answer.latest_comments) == 0
# test single page comment answer
url = 'https://www.zhihu.com/question/28399220/answer/79799671'
answer = client.answer(url)
assert 0 < sum(1 for _ in answer.comments) < 30
def test_author():
url = 'http://www.zhihu.com/people/7sdream'
author = client.author(url)
# 获取用户动态
for _, act in zip(range(30), author.activities):
print(act.content.url)
if act.type == ActType.FOLLOW_COLUMN:
print('%s 在 %s 关注了专栏 %s' %
(author.name, act.time, act.column.name))
elif act.type == ActType.FOLLOW_QUESTION:
print('%s 在 %s 关注了问题 %s' %
(author.name, act.time, act.question.title))
elif act.type == ActType.ASK_QUESTION:
print('%s 在 %s 提了个问题 %s' %
(author.name, act.time, act.question.title))
elif act.type == ActType.UPVOTE_POST:
print('%s 在 %s 赞同了%s 的文章 %s, '
'此文章赞同数 %d, 评论数 %d' %
(author.name, act.time, act.post.author.name, act.post.title,
act.post.upvote_num, act.post.comment_num), end='')
if act.post.column:
print(",此文章属于专栏 %s" % act.post.column.name)
else:
print()
elif act.type == ActType.PUBLISH_POST:
print('%s 在 %s 在专栏 %s 中发布了文章 %s, '
'此文章赞同数 %d, 评论数 %d' %
(author.name, act.time, act.post.column.name,
act.post.title, act.post.upvote_num,
act.post.comment_num))
elif act.type == ActType.UPVOTE_ANSWER:
print('%s 在 %s 赞同了问题 %s 中 %s(motto: %s) 的回答, '
'此回答赞同数 %d' %
(author.name, act.time, act.answer.question.title,
act.answer.author.name, act.answer.author.motto,
act.answer.upvote_num))
elif act.type == ActType.ANSWER_QUESTION:
print('%s 在 %s 回答了问题 %s 此回答赞同数 %d' %
(author.name, act.time, act.answer.question.title,
act.answer.upvote_num))
elif act.type == ActType.FOLLOW_TOPIC:
print('%s 在 %s 关注了话题 %s' %
(author.name, act.time, act.topic.name))
elif act.type == ActType.FOLLOW_COLLECTION:
print('%s 在 %s 关注了收藏夹 %s' %
(author.name, act.time, act.collection.name))
# 获取用户名称
print(author.name)
# 7sDream
# 获取用户介绍
print(author.motto)
# 二次元新居民/软件爱好者/零回答消灭者
# 获取用户头像地址
print(author.photo_url)
# http://pic3.zhimg.com/893fd554c8aa57196d5ab98530ef479a_r.jpg
# 获取用户得到赞同数
print(author.upvote_num)
# 1338
# 获取用户得到感谢数
print(author.thank_num)
# 468
# 获取用户关注人数
print(author.followee_num)
# 82
# 获取用户关注人
for _, followee in zip(range(10), author.followees):
print(followee.name)
# yuwei
# falling
# 伍声
# bhuztez
# 段晓晨
# 冯东
# ...
# 获取用户粉丝数
print(author.follower_num)
# 303
# 获得用户粉丝
for _, follower in zip(range(10), author.followers):
print(follower.name)
# yuwei
# falling
# 周非
# 陈泓瑾
# O1Operator
# ...
# 获取用户提问数
print(author.question_num)
# 16
# 获取用户所有提问的标题
for _, question in zip(range(10), author.questions):
print(question.title)
# 用户「松阳先生」的主页出了什么问题?
# C++运算符重载在头文件中应该如何定义?
# 亚马逊应用市场的应用都是正版的吗?
# ...
# 获取用户答题数
print(author.answer_num)
# 247
# 获取用户所有回答
for _, answer in zip(range(10), author.answers):
print(answer.upvote_num)
# 0
# 5
# 12
# 0
# ...
# 获取用户微博
print(author.weibo_url)
# 获取用户行业
print(author.business)
# 获取用户所在地
print(author.location)
# 获取用户教育情况
print(author.education)
# 获取用户性别
print(author.gender)
# 获取用户最后一次活跃时间
print(author.last_activity_time)
# 获取用户文章数
print(author.post_num)
# 7
# 获取用户专栏名
for column in author.columns:
print(column.name)
# 科学の禁书目录
# 获取用户收藏夹数
print(author.collection_num)
# 3
# 获取用户收藏夹名
for collection in author.collections:
print(collection.name)
# 教学精品。
# 可以留着慢慢看~
# OwO
# 一句。
# Read it later
# 获取用户关注专栏数
print(author.followed_column_num)
# 9
# 获取用户关注的专栏
for column in author.followed_columns:
print(column.name, column.url, column.follower_num)
# 黑客与画家 http://zhuanlan.zhihu.com/hacker-and-painter/ 9743
# piapia安全 http://zhuanlan.zhihu.com/ppsec/ 566
# Android 科学院 http://zhuanlan.zhihu.com/andlib/ 4347
# ...
# 获取用户关注的话题数
print(author.followed_topic_num)
# 30
# 获取用户关注的话题
for topic in author.followed_topics:
print(topic.name, topic.url)
# Python http://www.zhihu.com/topic/19552832/
# 计算机 http://www.zhihu.com/topic/19555547/
# 生活 http://www.zhihu.com/topic/19551147/
def test_collection():
url = 'http://www.zhihu.com/collection/28698204'
collection = client.collection(url)
# 获取收藏夹id
print(collection.id)
# 获取收藏夹名字
print(collection.name)
# 可以用来背的答案
# 获取收藏夹关注人数
print(collection.follower_num)
# 6343
# 获取收藏夹关注用户
for _, follower in zip(range(10), collection.followers):
print(follower.name)
# 花椰菜
# 邱火羽白
# 枫丹白露
# ...
# 获取收藏夹创建者名字
print(collection.owner.name)
# 树叶
# 获取收藏夹内所有答案的点赞数
for _, answer in zip(range(10), collection.answers):
print(answer.upvote_num)
# 2561
# 535
# 223
# ...
# 获取收藏夹内所有问题标题
for _, question in zip(range(10), collection.questions):
print(question.title)
# 如何完成标准的平板支撑?
# 有没有适合 Android 开发初学者的 App 源码推荐?
# 如何挑逗女朋友?
# 有哪些计算机的书适合推荐给大一学生?
# ...
# 获取收藏夹日志
log = None
for log in collection.logs:
if log.type != CollectActType.CREATE_COLLECTION:
print(log.type, log.time, log.answer.question.title, log.owner.name)
else:
print(log.type, log.time, log.owner.name, "create the collection")
assert log.answer is None
assert log.time == datetime.strptime('2013-11-08 00:55:43',
"%Y-%m-%d %H:%M:%S")
def test_column():
url = 'http://zhuanlan.zhihu.com/xiepanda'
column = client.column(url)
# 获取专栏名
print(column.name)
# 谢熊猫出没注意
# 获取关注人数
print(column.follower_num)
# 73570
# 获取文章数量
print(column.post_num)
# 68
# 获取所有文章标题
for _, post in zip(range(10), column.posts):
print(post.title)
# 伦敦,再见。London, Pride.
# 为什么你来到伦敦?——没有抽到h1b
# “城邦之国”新加坡强在哪?
# ...
def test_post():
url = 'http://zhuanlan.zhihu.com/p/20569063'
post = client.post(url)
# 获取文章地址
print(post.url)
# 获取文章标题
print(post.title)
# 为什么最近有很多名人,比如比尔盖茨,马斯克、霍金等,让人们警惕人工智能?
# 获取所在专栏名称
print(post.column.name)
# 谢熊猫出没注意
# 获取作者名称
print(post.author.name)
# 谢熊猫君
# 获取赞同数
print(post.upvote_num)
# 18491
# 获取评论数
print(post.comment_num)
# 1748
# 获取点赞者
for _, upvoter in zip(range(10), post.upvoters):
print(upvoter.name)
# 喵喵呜
# haku asaya
# 会飞的驴
# ...
# 保存为 markdown
post.save(filepath='.')
# 当前目录下生成
# 为什么最近有很多名人,比如比尔盖茨,马斯克、霍金等,让人们警惕人工智能? - 谢熊猫君.md
def test_topic():
url = 'https://www.zhihu.com/topic/19552330'
topic = client.topic(url)
# 获取话题地址
print(topic.url)
# http://www.zhihu.com/topic/19947695/
# 获取话题名称
print(topic.name)
# 深网(Deep Web)
# 获取话题描述信息
print(topic.description)
# 暗网(英语:Deep Web,又称深网、不可见网、隐藏网)
# 获取话题头像url
print(topic.photo_url)
# http://pic1.zhimg.com/a3b0d77c052e45399da1fe26fb4c9734_r.jpg
# 获取话题关注人数
print(topic.follower_num)
# 3309
# 获取关注者
for _, follower in zip(range(10), topic.followers):
print(follower.name, follower.motto)
# 韦小宝 韦小宝
# 吉陆遥 吉陆遥
# qingyuan ma qingyuan ma
# ...
# 获取父话题
for _, parent in zip(range(10), topic.parents):
print(parent.name)
# 互联网
# 获取子话题
for _, child in zip(range(20), topic.children):
print(child.name)
# Tor
# 获取优秀答主
for _, author in zip(range(10), topic.top_authors):
print(author.name, author.motto)
# Ben Chen
# acel rovsion 我只不过规范基点上的建构论者
# 沈万马
# ...
# 获取话题下的精华回答
for _, ans in zip(range(10), topic.top_answers):
print(ans.question.title, ans.author.name, ans.upvote_num)
# 《纸牌屋》中提到的深网 (Deep Web) 是什么? Ben Chen 2956
# 黑客逃避追踪,为什么要用虚拟机 + TOR + VPN 呢? Ario 526
# 《纸牌屋》中提到的深网 (Deep Web) 是什么? acel rovsion 420
# ...
# 按时间由近到远获取所有问题
for _, question in zip(range(10), topic.questions):
print(question.title)
assert question.creation_time is not None
print(question.creation_time)
# 马里亚纳网络存在吗?
# 玛丽亚纳网络存在吗?
# 为什么暗网里这么多违法的东西FBI不顺藤摸瓜呢?
# ...
# 按时间由近到远获取所有回答
for _, ans in zip(range(10), topic.answers):
print(ans.question.title, ans.author.name, ans.upvote_num)
# 如何用tor登陆qq? 匿名用户 0
# 想看一下暗网(deep web),但是怕中病毒,所以有谁能发发截图?? tor 0
# icq是什么 为什么暗网交流一般都用icq? tor 0
# ...
# 获取话题下的热门问题,按热门度由高到低返回
for _, q in zip(range(10), topic.hot_questions):
print(q.title)
# 《纸牌屋》中提到的深网 (Deep Web) 是什么?
# 黑客逃避追踪,为什么要用虚拟机 + TOR + VPN 呢?
# 微博热传的关于暗网的变态故事是真的还是假的啊?
# ...
# 获取话题下的热门回答,按热门度由高到低返回
for _, ans in zip(range(10), topic.hot_answers):
print(ans.question.title, ans.author.name, ans.upvote_num)
# 《纸牌屋》中提到的深网 (Deep Web) 是什么? Ben Chen 3006
# 《纸牌屋》中提到的深网 (Deep Web) 是什么? 提刀夜行 123
# 《纸牌屋》中提到的深网 (Deep Web) 是什么? 匿名用户 21
# ...
# 获取等待回答的问题
for _, q in zip(range(10), topic.unanswered_questions):
print(q.title)
# 如何用tor登陆qq? 匿名用户 0
# 想看一下暗网(deep web),但是怕中病毒,所以有谁能发发截图?? tor 0
# icq是什么 为什么暗网交流一般都用icq? tor 0
# ...
def test_me():
"""
本函数默认不会开启,因为函数涉及到点赞,反对,感谢,关注等主观操作
请确认您有能力在测试代码生错误的情况下,判断出出错在哪一行,
并将代码进行的操作回滚(即:将点赞,感谢,关注等操作取消)
如果确认有能力,请填写代码中的空白,并将test函数中相关行注释取消
"""
answer = client.answer('') # 填写答案Url
post = client.post('') # 填写文章Url
author = client.author('') # 填写用户Url
question = client.question('') # 填写问题Url
topic = client.topic('') # 填写话题Url
collection = client.collection('') # 填写收藏夹Url
me = client.me()
print('赞同答案...', end='')
assert me.vote(answer, 'up') # 赞同
assert me.vote(answer, 'down') # 反对
assert me.vote(answer, 'clear') # 清除
print('通过')
print('感谢答案...', end='')
assert me.thanks(answer) # 感谢
assert me.thanks(answer, False) # 取消感谢
print('通过')
print('赞同文章...', end='')
assert me.vote(post, 'up') # 赞同
assert me.vote(post, 'down') # 反对
assert me.vote(post, 'clear') # 清除
print('通过')
print('关注用户...', end='')
assert me.follow(author) # 关注
assert me.follow(author, False) # 取消关注
print('通过')
print('关注问题...', end='')
assert me.follow(question) # 关注
assert me.follow(question, False) # 取消关注
print('通过')
print('关注话题...', end='')
assert me.follow(topic) # 关注
assert me.follow(topic, False) # 取消关注
print('通过')
print('关注收藏夹...', end='')
assert me.follow(collection) # 关注
assert me.follow(collection, False) # 取消关注
print('通过')
print('发送评论...', end='')
assert me.add_comment(answer, 'test') # 评论
print('通过')
print('发送私信...', end='')
assert me.send_message(author, 'test') # 私信
print('通过')
print('屏蔽用户...', end='')
assert me.block(author, True) # 屏蔽
assert me.block(author, False) # 取消屏蔽
print('通过')
print('屏蔽话题...', end='')
assert me.block(topic, True) # 屏蔽
assert me.block(topic, False) # 取消屏蔽
print('通过')
print('对问题点没有帮助...', end='')
assert me.unhelpful(answer, True) # 没有帮助
assert me.unhelpful(answer, False) # 取消没有帮助
print('通过')
def test_anonymous():
# 提问
url = 'https://www.zhihu.com/question/24825703'
question = client.question(url)
assert question.author is ANONYMOUS
# 回答
url = 'https://www.zhihu.com/question/24937466/answer/29661298'
answer = client.answer(url)
assert answer.author is ANONYMOUS
# 点赞
url = 'https://www.zhihu.com/question/39772334/answer/83106851'
answer = client.answer(url)
anonymous_upvote_count = 0
for upvoter in answer.upvoters:
if upvoter is ANONYMOUS:
anonymous_upvote_count += 1
assert anonymous_upvote_count >= 3
# 评论
url = 'https://www.zhihu.com/question/37172453/answer/72350276'
answer = client.answer(url)
for i, comment in enumerate(answer.comments):
if i == 0:
assert comment.author is ANONYMOUS
# 关注问题
url = 'https://www.zhihu.com/question/37172453'
question = client.question(url)
anonymous_follower_count = 0
for follower in question.followers:
if follower is ANONYMOUS:
anonymous_follower_count += 1
assert anonymous_follower_count >= 3
def test_proxy():
# visit http://cn-proxy.com/ to get available proxies if test failed
proxy_ips = ['117.135.251.131', '117.135.251.134']
client.set_proxy_pool(proxy_ips)
for _ in range(5):
result = client._session.get('http://httpbin.org/ip').json()
assert result['origin'] in proxy_ips
result = client._session.post('http://httpbin.org/post',
data={'m': '1'}).json()
assert result['form'] == {'m': '1'}
assert result['origin'] in proxy_ips
client.remove_proxy_pool()
client.set_proxy_pool(proxy_ips, https=False)
for _ in range(5):
result = client._session.get('http://httpbin.org/ip').json()
assert result['origin'] in proxy_ips
def test():
test_question()
test_answer()
test_author()
test_collection()
test_column()
test_post()
test_topic()
test_anonymous()
# test_proxy()
# test_me()
if __name__ == '__main__':
Cookies_File = 'test.json'
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
TEST_DIR = os.path.join(BASE_DIR, 'test')
print("Test dir: ", TEST_DIR)
if os.path.exists(TEST_DIR):
print("Cleaning it...", end='')
shutil.rmtree(TEST_DIR)
print("Done")
else:
print("Test dir not exist.")
os.chdir(BASE_DIR)
if os.path.isfile(Cookies_File):
print("Cookies file found.")
client = ZhihuClient(Cookies_File)
else:
print("Cookies file not exist, please login...")
client = ZhihuClient()
cookies_str = client.login_in_terminal()
with open(Cookies_File, 'w') as f:
f.write(cookies_str)
print("Making test dir...", end="")
os.mkdir(TEST_DIR)
print("Done", end="\n\n")
os.chdir(TEST_DIR)
print("===== test start =====")
import timeit
try:
time = timeit.timeit(
'test()', setup='from __main__ import test', number=1)
print('===== test passed =====')
print('no error happen')
print('time used: {0} ms'.format(time * 1000))
except Exception as e:
print('===== test failed =====')
raise e
finally:
os.chdir(BASE_DIR)
print("Cleaning...", end='')
shutil.rmtree(TEST_DIR)
print("Done")
|
7sDream/zhihu-py3
|
test/zhihu-test.py
|
Python
|
mit
| 26,626
|
[
"VisIt"
] |
5bd4c4fb51e45f019c00f5251dfd7d84b98a7663474489438a5dfe5f51a9d856
|
# -*- coding: utf-8 -*-
"""
sphinx.writers.texinfo
~~~~~~~~~~~~~~~~~~~~~~
Custom docutils writer for Texinfo.
:copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import string
import textwrap
from os import path
from docutils import nodes, writers
from sphinx import addnodes, __version__
from sphinx.locale import versionlabels, _
from sphinx.util import ustrftime
from sphinx.writers.latex import collected_footnote
COPYING = """\
@quotation
%(project)s %(release)s, %(date)s
%(author)s
Copyright @copyright{} %(copyright)s
@end quotation
"""
TEMPLATE = """\
\\input texinfo @c -*-texinfo-*-
@c %%**start of header
@setfilename %(filename)s
@documentencoding UTF-8
@ifinfo
@*Generated by Sphinx """ + __version__ + """.@*
@end ifinfo
@settitle %(title)s
@defindex ge
@paragraphindent %(paragraphindent)s
@exampleindent %(exampleindent)s
@afourlatex
%(direntry)s
@c %%**end of header
@copying
%(copying)s
@end copying
@titlepage
@title %(title)s
@insertcopying
@end titlepage
@contents
@c %%** start of user preamble
%(preamble)s
@c %%** end of user preamble
@ifnottex
@node Top
@top %(title)s
@insertcopying
@end ifnottex
@c %%**start of body
%(body)s
@c %%**end of body
@bye
"""
def find_subsections(section):
"""Return a list of subsections for the given ``section``."""
result = []
for child in section.children:
if isinstance(child, nodes.section):
result.append(child)
continue
result.extend(find_subsections(child))
return result
class TexinfoWriter(writers.Writer):
"""Texinfo writer for generating Texinfo documents."""
supported = ('texinfo', 'texi')
settings_spec = (
'Texinfo Specific Options', None, (
("Name of the Info file", ['--texinfo-filename'], {'default': ''}),
('Dir entry', ['--texinfo-dir-entry'], {'default': ''}),
('Description', ['--texinfo-dir-description'], {'default': ''}),
('Category', ['--texinfo-dir-category'], {'default':
'Miscellaneous'})))
settings_defaults = {}
output = None
visitor_attributes = ('output', 'fragment')
def __init__(self, builder):
writers.Writer.__init__(self)
self.builder = builder
def translate(self):
self.visitor = visitor = TexinfoTranslator(self.document, self.builder)
self.document.walkabout(visitor)
visitor.finish()
for attr in self.visitor_attributes:
setattr(self, attr, getattr(visitor, attr))
class TexinfoTranslator(nodes.NodeVisitor):
ignore_missing_images = False
default_elements = {
'author': '',
'body': '',
'copying': '',
'date': '',
'direntry': '',
'exampleindent': 4,
'filename': '',
'paragraphindent': 2,
'preamble': '',
'project': '',
'release': '',
'title': '',
}
def __init__(self, document, builder):
nodes.NodeVisitor.__init__(self, document)
self.builder = builder
self.init_settings()
self.written_ids = set() # node names and anchors in output
self.referenced_ids = set() # node names and anchors that should
# be in output
self.indices = [] # (node name, content)
self.short_ids = {} # anchors --> short ids
self.node_names = {} # node name --> node's name to display
self.node_menus = {} # node name --> node's menu entries
self.rellinks = {} # node name --> (next, previous, up)
self.collect_indices()
self.collect_node_names()
self.collect_node_menus()
self.collect_rellinks()
self.body = []
self.context = []
self.previous_section = None
self.section_level = 0
self.seen_title = False
self.next_section_ids = set()
self.escape_newlines = 0
self.curfilestack = []
self.footnotestack = []
self.in_footnote = 0
self.handled_abbrs = set()
def finish(self):
if self.previous_section is None:
self.add_menu('Top')
for index in self.indices:
name, content = index
pointers = tuple([name] + self.rellinks[name])
self.body.append('\n@node %s,%s,%s,%s\n' % pointers)
self.body.append('@unnumbered %s\n\n%s\n' % (name, content))
while self.referenced_ids:
# handle xrefs with missing anchors
r = self.referenced_ids.pop()
if r not in self.written_ids:
self.body.append('@anchor{%s}@w{%s}\n' % (r, ' ' * 30))
self.fragment = ''.join(self.body).strip() + '\n'
self.elements['body'] = self.fragment
self.output = TEMPLATE % self.elements
## Helper routines
def init_settings(self):
settings = self.settings = self.document.settings
elements = self.elements = self.default_elements.copy()
elements.update({
# if empty, the title is set to the first section title
'title': settings.title,
'author': settings.author,
# if empty, use basename of input file
'filename': settings.texinfo_filename,
'release': self.escape(self.builder.config.release),
'project': self.escape(self.builder.config.project),
'copyright': self.escape(self.builder.config.copyright),
'date': self.escape(self.builder.config.today or
ustrftime(self.builder.config.today_fmt
or _('%B %d, %Y')))
})
# title
title = elements['title']
if not title:
title = self.document.next_node(nodes.title)
title = (title and title.astext()) or '<untitled>'
elements['title'] = self.escape_id(title) or '<untitled>'
# filename
if not elements['filename']:
elements['filename'] = self.document.get('source') or 'untitled'
if elements['filename'][-4:] in ('.txt', '.rst'):
elements['filename'] = elements['filename'][:-4]
elements['filename'] += '.info'
# direntry
if settings.texinfo_dir_entry:
entry = self.format_menu_entry(
self.escape_menu(settings.texinfo_dir_entry),
'(%s)' % elements['filename'],
self.escape_arg(settings.texinfo_dir_description))
elements['direntry'] = ('@dircategory %s\n'
'@direntry\n'
'%s'
'@end direntry\n') % (
self.escape_id(settings.texinfo_dir_category), entry)
elements['copying'] = COPYING % elements
# allow the user to override them all
elements.update(settings.texinfo_elements)
def collect_node_names(self):
"""Generates a unique id for each section.
Assigns the attribute ``node_name`` to each section."""
# must have a "Top" node
self.document['node_name'] = 'Top'
self.node_names['Top'] = 'Top'
self.written_ids.update(('Top', 'top'))
# each index is a node
for name, content in self.indices:
self.node_names[name] = name
self.written_ids.add(name)
# each section is also a node
for section in self.document.traverse(nodes.section):
title = section.next_node(nodes.Titular)
name = (title and title.astext()) or '<untitled>'
node_id = self.escape_id(name) or '<untitled>'
assert node_id and name
nth, suffix = 1, ''
while node_id + suffix in self.written_ids:
nth += 1
suffix = '<%s>' % nth
node_id += suffix
assert node_id not in self.node_names
assert node_id not in self.written_ids
section['node_name'] = node_id
self.node_names[node_id] = name
self.written_ids.add(node_id)
def collect_node_menus(self):
"""Collect the menu entries for each "node" section."""
node_menus = self.node_menus
for node in ([self.document] +
self.document.traverse(nodes.section)):
assert 'node_name' in node and node['node_name']
entries = [s['node_name'] for s in find_subsections(node)]
node_menus[node['node_name']] = entries
# try to find a suitable "Top" node
title = self.document.next_node(nodes.title)
top = (title and title.parent) or self.document
if not isinstance(top, (nodes.document, nodes.section)):
top = self.document
if top is not self.document:
entries = node_menus[top['node_name']]
entries += node_menus['Top'][1:]
node_menus['Top'] = entries
del node_menus[top['node_name']]
top['node_name'] = 'Top'
# handle the indices
for name, content in self.indices:
node_menus[name] = ()
node_menus['Top'].append(name)
def collect_rellinks(self):
"""Collect the relative links (next, previous, up) for each "node"."""
rellinks = self.rellinks
node_menus = self.node_menus
for id, entries in node_menus.items():
rellinks[id] = ['', '', '']
# up's
for id, entries in node_menus.items():
for e in entries:
rellinks[e][2] = id
# next's and prev's
for id, entries in node_menus.items():
for i, id in enumerate(entries):
# First child's prev is empty
if i != 0:
rellinks[id][1] = entries[i-1]
# Last child's next is empty
if i != len(entries) - 1:
rellinks[id][0] = entries[i+1]
# top's next is its first child
try:
first = node_menus['Top'][0]
except IndexError:
pass
else:
rellinks['Top'][0] = first
rellinks[first][1] = 'Top'
## Escaping
# Which characters to escape depends on the context. In some cases,
# namely menus and node names, it's not possible to escape certain
# characters.
def escape(self, s):
"""Return a string with Texinfo command characters escaped."""
s = s.replace('@', '@@')
s = s.replace('{', '@{')
s = s.replace('}', '@}')
# prevent `` and '' quote conversion
s = s.replace('``', "`@w{`}")
s = s.replace("''", "'@w{'}")
# prevent "--" from being converted to an "em dash"
# s = s.replace('-', '@w{-}')
return s
def escape_arg(self, s):
"""Return an escaped string suitable for use as an argument
to a Texinfo command."""
s = self.escape(s)
# commas are the argument delimeters
s = s.replace(',', '@comma{}')
# normalize white space
s = ' '.join(s.split()).strip()
return s
def escape_id(self, s):
"""Return an escaped string suitable for node names and anchors."""
bad_chars = ',:.()'
for bc in bad_chars:
s = s.replace(bc, ' ')
s = ' '.join(s.split()).strip()
return self.escape(s)
def escape_menu(self, s):
"""Return an escaped string suitable for menu entries."""
s = self.escape_arg(s)
s = s.replace(':', ';')
s = ' '.join(s.split()).strip()
return s
def ensure_eol(self):
"""Ensure the last line in body is terminated by new line."""
if self.body and self.body[-1][-1:] != '\n':
self.body.append('\n')
def format_menu_entry(self, name, node_name, desc):
if name == node_name:
s = '* %s:: ' % (name,)
else:
s = '* %s: %s. ' % (name, node_name)
offset = max((24, (len(name) + 4) % 78))
wdesc = '\n'.join(' ' * offset + l for l in
textwrap.wrap(desc, width=78-offset))
return s + wdesc.strip() + '\n'
def add_menu_entries(self, entries, reg=re.compile(r'\s+---?\s+')):
for entry in entries:
name = self.node_names[entry]
# special formatting for entries that are divided by an em-dash
parts = reg.split(name, 1)
if len(parts) == 2:
name, desc = parts
else:
desc = ''
name = self.escape_menu(name)
desc = self.escape(desc)
self.body.append(self.format_menu_entry(name, entry, desc))
def add_menu(self, node_name):
entries = self.node_menus[node_name]
if not entries:
return
self.body.append('\n@menu\n')
self.add_menu_entries(entries)
if not self.node_menus[entries[0]]:
self.body.append('\n@end menu\n')
return
def _add_detailed_menu(name):
entries = self.node_menus[name]
if not entries:
return
self.body.append('\n%s\n\n' % (self.escape(self.node_names[name],)))
self.add_menu_entries(entries)
for subentry in entries:
_add_detailed_menu(subentry)
if node_name == 'Top':
self.body.append('\n@detailmenu\n'
' --- The Detailed Node Listing ---\n')
for entry in entries:
_add_detailed_menu(entry)
if node_name == 'Top':
self.body.append('\n@end detailmenu')
self.body.append('\n@end menu\n\n')
def tex_image_length(self, width_str):
match = re.match('(\d*\.?\d*)\s*(\S*)', width_str)
if not match:
# fallback
return width_str
res = width_str
amount, unit = match.groups()[:2]
if not unit or unit == "px":
# pixels: let TeX alone
return ''
elif unit == "%":
# a4paper: textwidth=418.25368pt
res = "%d.0pt" % (float(amount) * 4.1825368)
return res
def collect_indices(self):
def generate(content, collapsed):
ret = ['\n@menu\n']
for letter, entries in content:
for entry in entries:
if not entry[3]:
continue
name = self.escape_menu(entry[0])
sid = self.get_short_id('%s:%s' % (entry[2], entry[3]))
desc = self.escape_arg(entry[6])
me = self.format_menu_entry(name, sid, desc)
ret.append(me)
ret.append('@end menu\n')
return ''.join(ret)
indices_config = self.builder.config.texinfo_domain_indices
if indices_config:
for domain in self.builder.env.domains.itervalues():
for indexcls in domain.indices:
indexname = '%s-%s' % (domain.name, indexcls.name)
if isinstance(indices_config, list):
if indexname not in indices_config:
continue
content, collapsed = indexcls(domain).generate(
self.builder.docnames)
if not content:
continue
node_name = self.escape_id(indexcls.localname)
self.indices.append((node_name,
generate(content, collapsed)))
self.indices.append((_('Index'), '\n@printindex ge\n'))
# this is copied from the latex writer
# TODO: move this to sphinx.util
def collect_footnotes(self, node):
fnotes = {}
def footnotes_under(n):
if isinstance(n, nodes.footnote):
yield n
else:
for c in n.children:
if isinstance(c, addnodes.start_of_file):
continue
for k in footnotes_under(c):
yield k
for fn in footnotes_under(node):
num = fn.children[0].astext().strip()
fnotes[num] = [collected_footnote(*fn.children), False]
return fnotes
## xref handling
def get_short_id(self, id):
"""Return a shorter 'id' associated with ``id``."""
# Shorter ids improve paragraph filling in places
# that the id is hidden by Emacs.
try:
sid = self.short_ids[id]
except KeyError:
sid = hex(len(self.short_ids))[2:]
self.short_ids[id] = sid
return sid
def add_anchor(self, id, node):
if id.startswith('index-'):
return
id = self.curfilestack[-1] + ':' + id
eid = self.escape_id(id)
sid = self.get_short_id(id)
for id in (eid, sid):
if id not in self.written_ids:
self.body.append('@anchor{%s}' % id)
self.written_ids.add(id)
def add_xref(self, id, name, node):
name = self.escape_menu(name)
sid = self.get_short_id(id)
self.body.append('@pxref{%s,,%s}' % (sid, name))
self.referenced_ids.add(sid)
self.referenced_ids.add(self.escape_id(id))
## Visiting
def visit_document(self, node):
self.footnotestack.append(self.collect_footnotes(node))
self.curfilestack.append(node.get('docname', ''))
if 'docname' in node:
self.add_anchor(':doc', node)
def depart_document(self, node):
self.footnotestack.pop()
self.curfilestack.pop()
def visit_Text(self, node):
s = self.escape(node.astext())
if self.escape_newlines:
s = s.replace('\n', ' ')
self.body.append(s)
def depart_Text(self, node):
pass
def visit_section(self, node):
self.next_section_ids.update(node.get('ids', []))
if not self.seen_title:
return
if self.previous_section:
self.add_menu(self.previous_section['node_name'])
else:
self.add_menu('Top')
node_name = node['node_name']
pointers = tuple([node_name] + self.rellinks[node_name])
self.body.append('\n@node %s,%s,%s,%s\n' % pointers)
for id in self.next_section_ids:
self.add_anchor(id, node)
self.next_section_ids.clear()
self.previous_section = node
self.section_level += 1
def depart_section(self, node):
self.section_level -= 1
headings = (
'@unnumbered',
'@chapter',
'@section',
'@subsection',
'@subsubsection',
)
rubrics = (
'@heading',
'@subheading',
'@subsubheading',
)
def visit_title(self, node):
if not self.seen_title:
self.seen_title = 1
raise nodes.SkipNode
parent = node.parent
if isinstance(parent, nodes.table):
return
if isinstance(parent, (nodes.Admonition, nodes.sidebar, nodes.topic)):
raise nodes.SkipNode
elif not isinstance(parent, nodes.section):
self.builder.warn(
'encountered title node not in section, topic, table, '
'admonition or sidebar', (self.curfilestack[-1], node.line))
self.visit_rubric(node)
else:
try:
heading = self.headings[self.section_level]
except IndexError:
heading = self.headings[-1]
self.body.append('\n%s ' % heading)
def depart_title(self, node):
self.body.append('\n\n')
def visit_rubric(self, node):
if len(node.children) == 1 and node.children[0].astext() in \
('Footnotes', _('Footnotes')):
raise nodes.SkipNode
try:
rubric = self.rubrics[self.section_level]
except IndexError:
rubric = self.rubrics[-1]
self.body.append('\n%s ' % rubric)
def depart_rubric(self, node):
self.body.append('\n\n')
def visit_subtitle(self, node):
self.body.append('\n\n@noindent\n')
def depart_subtitle(self, node):
self.body.append('\n\n')
## References
def visit_target(self, node):
# postpone the labels until after the sectioning command
parindex = node.parent.index(node)
try:
try:
next = node.parent[parindex+1]
except IndexError:
# last node in parent, look at next after parent
# (for section of equal level)
next = node.parent.parent[node.parent.parent.index(node.parent)]
if isinstance(next, nodes.section):
if node.get('refid'):
self.next_section_ids.add(node['refid'])
self.next_section_ids.update(node['ids'])
return
except IndexError:
pass
if 'refuri' in node:
return
if node.get('refid'):
self.add_anchor(node['refid'], node)
for id in node['ids']:
self.add_anchor(id, node)
def depart_target(self, node):
pass
def visit_reference(self, node):
# an xref's target is displayed in Info so we ignore a few
# cases for the sake of appearance
if isinstance(node.parent, (nodes.title, addnodes.desc_type,)):
return
if isinstance(node[0], nodes.image):
return
name = node.get('name', node.astext()).strip()
uri = node.get('refuri', '')
if not uri and node.get('refid'):
uri = '%' + self.curfilestack[-1] + '#' + node['refid']
if not uri:
return
if uri.startswith('mailto:'):
uri = self.escape_arg(uri[7:])
name = self.escape_arg(name)
if not name or name == uri:
self.body.append('@email{%s}' % uri)
else:
self.body.append('@email{%s,%s}' % (uri, name))
elif uri.startswith('#'):
# references to labels in the same document
id = self.curfilestack[-1] + ':' + uri[1:]
self.add_xref(id, name, node)
elif uri.startswith('%'):
# references to documents or labels inside documents
hashindex = uri.find('#')
if hashindex == -1:
# reference to the document
id = uri[1:] + '::doc'
else:
# reference to a label
id = uri[1:].replace('#', ':')
self.add_xref(id, name, node)
elif uri.startswith('info:'):
# references to an external Info file
uri = uri[5:].replace('_', ' ')
uri = self.escape_arg(uri)
id = 'Top'
if '#' in uri:
uri, id = uri.split('#', 1)
id = self.escape_id(id)
name = self.escape_menu(name)
if name == id:
self.body.append('@pxref{%s,,,%s}' % (id, uri))
else:
self.body.append('@pxref{%s,,%s,%s}' % (id, name, uri))
else:
uri = self.escape_arg(uri)
name = self.escape_arg(name)
show_urls = self.builder.config.texinfo_show_urls
if self.in_footnote:
show_urls = 'inline'
if not name or uri == name:
self.body.append('@indicateurl{%s}' % uri)
elif show_urls == 'inline':
self.body.append('@uref{%s,%s}' % (uri, name))
elif show_urls == 'no':
self.body.append('@uref{%s,,%s}' % (uri, name))
else:
self.body.append('%s@footnote{%s}' % (name, uri))
raise nodes.SkipNode
def depart_reference(self, node):
pass
def visit_title_reference(self, node):
text = node.astext()
self.body.append('@cite{%s}' % self.escape_arg(text))
raise nodes.SkipNode
## Blocks
def visit_paragraph(self, node):
if 'continued' in node or isinstance(node.parent, nodes.compound):
self.body.append('\n@noindent')
self.body.append('\n')
def depart_paragraph(self, node):
self.body.append('\n')
def visit_block_quote(self, node):
self.body.append('\n@quotation\n')
def depart_block_quote(self, node):
self.ensure_eol()
self.body.append('@end quotation\n')
def visit_literal_block(self, node):
self.body.append('\n@example\n')
def depart_literal_block(self, node):
self.body.append('\n@end example\n\n'
'@noindent\n')
visit_doctest_block = visit_literal_block
depart_doctest_block = depart_literal_block
def visit_line_block(self, node):
if not isinstance(node.parent, nodes.line_block):
self.body.append('\n\n')
self.body.append('@display\n')
def depart_line_block(self, node):
self.body.append('@end display\n')
if not isinstance(node.parent, nodes.line_block):
self.body.append('\n\n')
def visit_line(self, node):
self.escape_newlines += 1
def depart_line(self, node):
self.body.append('@w{ }\n')
self.escape_newlines -= 1
## Inline
def visit_strong(self, node):
self.body.append('@strong{')
def depart_strong(self, node):
self.body.append('}')
def visit_emphasis(self, node):
self.body.append('@emph{')
def depart_emphasis(self, node):
self.body.append('}')
def visit_literal(self, node):
self.body.append('@code{')
def depart_literal(self, node):
self.body.append('}')
def visit_superscript(self, node):
self.body.append('@w{^')
def depart_superscript(self, node):
self.body.append('}')
def visit_subscript(self, node):
self.body.append('@w{[')
def depart_subscript(self, node):
self.body.append(']}')
## Footnotes
def visit_footnote(self, node):
raise nodes.SkipNode
def visit_collected_footnote(self, node):
self.in_footnote += 1
self.body.append('@footnote{')
def depart_collected_footnote(self, node):
self.body.append('}')
self.in_footnote -= 1
def visit_footnote_reference(self, node):
num = node.astext().strip()
try:
footnode, used = self.footnotestack[-1][num]
except (KeyError, IndexError):
raise nodes.SkipNode
# footnotes are repeated for each reference
footnode.walkabout(self)
raise nodes.SkipChildren
def visit_citation(self, node):
for id in node.get('ids'):
self.add_anchor(id, node)
def depart_citation(self, node):
pass
def visit_citation_reference(self, node):
self.body.append('@w{[')
def depart_citation_reference(self, node):
self.body.append(']}')
## Lists
def visit_bullet_list(self, node):
bullet = node.get('bullet', '*')
self.body.append('\n\n@itemize %s\n' % bullet)
def depart_bullet_list(self, node):
self.ensure_eol()
self.body.append('@end itemize\n')
def visit_enumerated_list(self, node):
# doesn't support Roman numerals
enum = node.get('enumtype', 'arabic')
starters = {'arabic': '',
'loweralpha': 'a',
'upperalpha': 'A',}
start = node.get('start', starters.get(enum, ''))
self.body.append('\n\n@enumerate %s\n' % start)
def depart_enumerated_list(self, node):
self.ensure_eol()
self.body.append('@end enumerate\n')
def visit_list_item(self, node):
self.body.append('\n@item ')
def depart_list_item(self, node):
pass
## Option List
def visit_option_list(self, node):
self.body.append('\n\n@table @option\n')
def depart_option_list(self, node):
self.ensure_eol()
self.body.append('@end table\n')
def visit_option_list_item(self, node):
pass
def depart_option_list_item(self, node):
pass
def visit_option_group(self, node):
self.at_item_x = '@item'
def depart_option_group(self, node):
pass
def visit_option(self, node):
self.body.append('\n%s ' % self.at_item_x)
self.at_item_x = '@itemx'
def depart_option(self, node):
pass
def visit_option_string(self, node):
pass
def depart_option_string(self, node):
pass
def visit_option_argument(self, node):
self.body.append(node.get('delimiter', ' '))
def depart_option_argument(self, node):
pass
def visit_description(self, node):
self.body.append('\n')
def depart_description(self, node):
pass
## Definitions
def visit_definition_list(self, node):
self.body.append('\n\n@table @asis\n')
def depart_definition_list(self, node):
self.ensure_eol()
self.body.append('@end table\n')
def visit_definition_list_item(self, node):
self.at_item_x = '@item'
def depart_definition_list_item(self, node):
pass
def visit_term(self, node):
for id in node.get('ids'):
self.add_anchor(id, node)
# anchors and indexes need to go in front
for n in node[::]:
if isinstance(n, (addnodes.index, nodes.target)):
n.walkabout(self)
node.remove(n)
self.body.append('\n%s ' % self.at_item_x)
self.at_item_x = '@itemx'
def depart_term(self, node):
pass
def visit_termsep(self, node):
self.body.append('\n%s ' % self.at_item_x)
def depart_termsep(self, node):
pass
def visit_classifier(self, node):
self.body.append(' : ')
def depart_classifier(self, node):
pass
def visit_definition(self, node):
self.body.append('\n')
def depart_definition(self, node):
pass
## Tables
def visit_table(self, node):
self.entry_sep = '@item'
def depart_table(self, node):
self.body.append('\n@end multitable\n\n')
def visit_tabular_col_spec(self, node):
pass
def depart_tabular_col_spec(self, node):
pass
def visit_colspec(self, node):
self.colwidths.append(node['colwidth'])
if len(self.colwidths) != self.n_cols:
return
self.body.append('\n\n@multitable ')
for i, n in enumerate(self.colwidths):
self.body.append('{%s} ' %('x' * (n+2)))
def depart_colspec(self, node):
pass
def visit_tgroup(self, node):
self.colwidths = []
self.n_cols = node['cols']
def depart_tgroup(self, node):
pass
def visit_thead(self, node):
self.entry_sep = '@headitem'
def depart_thead(self, node):
pass
def visit_tbody(self, node):
pass
def depart_tbody(self, node):
pass
def visit_row(self, node):
pass
def depart_row(self, node):
self.entry_sep = '@item'
def visit_entry(self, node):
self.body.append('\n%s\n' % self.entry_sep)
self.entry_sep = '@tab'
def depart_entry(self, node):
for i in xrange(node.get('morecols', 0)):
self.body.append('\n@tab\n')
## Field Lists
def visit_field_list(self, node):
self.body.append('\n\n@itemize @w\n')
def depart_field_list(self, node):
self.ensure_eol()
self.body.append('@end itemize\n')
def visit_field(self, node):
if not isinstance(node.parent, nodes.field_list):
self.visit_field_list(node)
def depart_field(self, node):
if not isinstance(node.parent, nodes.field_list):
self.depart_field_list(node)
def visit_field_name(self, node):
self.body.append('\n@item ')
def depart_field_name(self, node):
self.body.append(': ')
def visit_field_body(self, node):
pass
def depart_field_body(self, node):
pass
## Admonitions
def visit_admonition(self, node, name=''):
if not name:
name = self.escape(node[0].astext())
self.body.append('\n@cartouche\n'
'@quotation %s ' % name)
def depart_admonition(self, node):
self.ensure_eol()
self.body.append('@end quotation\n'
'@end cartouche\n')
def _make_visit_admonition(typ):
def visit(self, node):
self.visit_admonition(node, self.escape(_(typ)))
return visit
visit_attention = _make_visit_admonition('Attention')
depart_attention = depart_admonition
visit_caution = _make_visit_admonition('Caution')
depart_caution = depart_admonition
visit_danger = _make_visit_admonition('Danger')
depart_danger = depart_admonition
visit_error = _make_visit_admonition('Error')
depart_error = depart_admonition
visit_important = _make_visit_admonition('Important')
depart_important = depart_admonition
visit_note = _make_visit_admonition('Note')
depart_note = depart_admonition
visit_tip = _make_visit_admonition('Tip')
depart_tip = depart_admonition
visit_hint = _make_visit_admonition('Hint')
depart_hint = depart_admonition
visit_warning = _make_visit_admonition('Warning')
depart_warning = depart_admonition
## Misc
def visit_docinfo(self, node):
raise nodes.SkipNode
def visit_generated(self, node):
raise nodes.SkipNode
def visit_header(self, node):
raise nodes.SkipNode
def visit_footer(self, node):
raise nodes.SkipNode
def visit_container(self, node):
pass
def depart_container(self, node):
pass
def visit_decoration(self, node):
pass
def depart_decoration(self, node):
pass
def visit_topic(self, node):
# ignore TOC's since we have to have a "menu" anyway
if 'contents' in node.get('classes', []):
raise nodes.SkipNode
title = node[0]
self.visit_rubric(title)
self.body.append('%s\n' % self.escape(title.astext()))
def depart_topic(self, node):
pass
def visit_transition(self, node):
self.body.append('\n\n@exdent @w{ %s}\n\n' % ('* ' * 30))
def depart_transition(self, node):
pass
def visit_attribution(self, node):
self.body.append('\n\n@center --- ')
def depart_attribution(self, node):
self.body.append('\n\n')
def visit_raw(self, node):
format = node.get('format', '').split()
if 'texinfo' in format or 'texi' in format:
self.body.append(node.astext())
raise nodes.SkipNode
def visit_figure(self, node):
self.body.append('\n\n@float Figure\n')
def depart_figure(self, node):
self.body.append('\n@end float\n\n')
def visit_caption(self, node):
if not isinstance(node.parent, nodes.figure):
self.builder.warn('caption not inside a figure.',
(self.curfilestack[-1], node.line))
return
self.body.append('\n@caption{')
def depart_caption(self, node):
if isinstance(node.parent, nodes.figure):
self.body.append('}\n')
def visit_image(self, node):
if node['uri'] in self.builder.images:
uri = self.builder.images[node['uri']]
else:
# missing image!
if self.ignore_missing_images:
return
uri = node['uri']
if uri.find('://') != -1:
# ignore remote images
return
name, ext = path.splitext(uri)
attrs = node.attributes
# width and height ignored in non-tex output
width = self.tex_image_length(attrs.get('width', ''))
height = self.tex_image_length(attrs.get('height', ''))
alt = self.escape_arg(attrs.get('alt', ''))
self.body.append('\n@image{%s,%s,%s,%s,%s}\n' %
(name, width, height, alt, ext[1:]))
def depart_image(self, node):
pass
def visit_compound(self, node):
pass
def depart_compound(self, node):
pass
def visit_sidebar(self, node):
self.visit_topic(node)
def depart_sidebar(self, node):
self.depart_topic(node)
def visit_label(self, node):
self.body.append('@w{(')
def depart_label(self, node):
self.body.append(')} ')
def visit_legend(self, node):
pass
def depart_legend(self, node):
pass
def visit_substitution_reference(self, node):
pass
def depart_substitution_reference(self, node):
pass
def visit_substitution_definition(self, node):
raise nodes.SkipNode
def visit_system_message(self, node):
self.body.append('\n@w{----------- System Message: %s/%s -----------} '
'(%s, line %s)\n' % (
node.get('type', '?'),
node.get('level', '?'),
self.escape(node.get('source', '?')),
node.get('line', '?')))
def depart_system_message(self, node):
pass
def visit_comment(self, node):
self.body.append('\n')
for line in node.astext().splitlines():
self.body.append('@c %s\n' % line)
raise nodes.SkipNode
def visit_problematic(self, node):
self.body.append('>')
def depart_problematic(self, node):
self.body.append('<')
def unimplemented_visit(self, node):
self.builder.warn("unimplemented node type: %r" % node,
(self.curfilestack[-1], node.line))
def unknown_visit(self, node):
self.builder.warn("unknown node type: %r" % node,
(self.curfilestack[-1], node.line))
def unknown_departure(self, node):
pass
### Sphinx specific
def visit_productionlist(self, node):
self.visit_literal_block(None)
names = []
for production in node:
names.append(production['tokenname'])
maxlen = max(len(name) for name in names)
for production in node:
if production['tokenname']:
for id in production.get('ids'):
self.add_anchor(id, production)
s = production['tokenname'].ljust(maxlen) + ' ::='
lastname = production['tokenname']
else:
s = '%s ' % (' '*maxlen)
self.body.append(self.escape(s))
self.body.append(self.escape(production.astext() + '\n'))
self.depart_literal_block(None)
raise nodes.SkipNode
def visit_production(self, node):
pass
def depart_production(self, node):
pass
def visit_literal_emphasis(self, node):
self.body.append('@code{')
def depart_literal_emphasis(self, node):
self.body.append('}')
def visit_index(self, node):
# terminate the line but don't prevent paragraph breaks
if isinstance(node.parent, nodes.paragraph):
self.ensure_eol()
else:
self.body.append('\n')
for entry in node['entries']:
typ, text, tid, text2 = entry
text = self.escape_menu(text)
self.body.append('@geindex %s\n' % text)
def visit_refcount(self, node):
self.body.append('\n')
def depart_refcount(self, node):
self.body.append('\n')
def visit_versionmodified(self, node):
intro = versionlabels[node['type']] % node['version']
if node.children:
intro += ': '
else:
intro += '.'
self.body.append('\n%s' % self.escape(intro))
def depart_versionmodified(self, node):
self.body.append('\n')
def visit_start_of_file(self, node):
# add a document target
self.next_section_ids.add(':doc')
self.curfilestack.append(node['docname'])
self.footnotestack.append(self.collect_footnotes(node))
def depart_start_of_file(self, node):
self.curfilestack.pop()
self.footnotestack.pop()
def visit_centered(self, node):
txt = self.escape_arg(node.astext())
self.body.append('\n\n@center %s\n\n' % txt)
raise nodes.SkipNode
def visit_seealso(self, node):
self.visit_topic(node)
def depart_seealso(self, node):
self.depart_topic(node)
def visit_meta(self, node):
raise nodes.SkipNode
def visit_glossary(self, node):
pass
def depart_glossary(self, node):
pass
def visit_acks(self, node):
self.body.append('\n\n')
self.body.append(', '.join(n.astext()
for n in node.children[0].children) + '.')
self.body.append('\n\n')
raise nodes.SkipNode
def visit_highlightlang(self, node):
pass
def depart_highlightlang(self, node):
pass
## Desc
def visit_desc(self, node):
self.at_deffnx = '@deffn'
def depart_desc(self, node):
self.ensure_eol()
self.body.append('@end deffn\n')
def visit_desc_signature(self, node):
objtype = node.parent['objtype']
if objtype != 'describe':
for id in node.get('ids'):
self.add_anchor(id, node)
# use the full name of the objtype for the category
try:
domain = self.builder.env.domains[node.parent['domain']]
primary = self.builder.config.primary_domain
name = domain.get_type_name(domain.object_types[objtype],
primary == domain.name)
except KeyError:
name = objtype
category = self.escape_arg(string.capwords(name))
self.body.append('\n%s {%s} ' % (self.at_deffnx, category))
self.at_deffnx = '@deffnx'
def depart_desc_signature(self, node):
self.body.append("\n")
def visit_desc_name(self, node):
pass
def depart_desc_name(self, node):
pass
def visit_desc_addname(self, node):
pass
def depart_desc_addname(self, node):
pass
def visit_desc_type(self, node):
pass
def depart_desc_type(self, node):
pass
def visit_desc_returns(self, node):
self.body.append(' -> ')
def depart_desc_returns(self, node):
pass
def visit_desc_parameterlist(self, node):
self.body.append(' (')
self.first_param = 1
def depart_desc_parameterlist(self, node):
self.body.append(')')
def visit_desc_parameter(self, node):
if not self.first_param:
self.body.append(', ')
else:
self.first_param = 0
text = self.escape(node.astext())
# replace no-break spaces with normal ones
text = text.replace(u' ', '@w{ }')
self.body.append(text)
raise nodes.SkipNode
def visit_desc_optional(self, node):
self.body.append('[')
def depart_desc_optional(self, node):
self.body.append(']')
def visit_desc_annotation(self, node):
raise nodes.SkipNode
def visit_desc_content(self, node):
pass
def depart_desc_content(self, node):
pass
def visit_inline(self, node):
pass
def depart_inline(self, node):
pass
def visit_abbreviation(self, node):
abbr = node.astext()
self.body.append('@abbr{')
if node.hasattr('explanation') and abbr not in self.handled_abbrs:
self.context.append(',%s}' % self.escape_arg(node['explanation']))
self.handled_abbrs.add(abbr)
else:
self.context.append('}')
def depart_abbreviation(self, node):
self.body.append(self.context.pop())
def visit_download_reference(self, node):
pass
def depart_download_reference(self, node):
pass
def visit_hlist(self, node):
self.visit_bullet_list(node)
def depart_hlist(self, node):
self.depart_bullet_list(node)
def visit_hlistcol(self, node):
pass
def depart_hlistcol(self, node):
pass
def visit_pending_xref(self, node):
pass
def depart_pending_xref(self, node):
pass
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/sphinx/writers/texinfo.py
|
Python
|
agpl-3.0
| 44,646
|
[
"VisIt"
] |
8821ff1ada157035858ef7e3e1368627a7647b3335089eb2dd04307ce33e150e
|
# -*- coding: utf-8 -*-
#
# brunel_alpha_nest.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""Random balanced network (alpha synapses) connected with NEST
------------------------------------------------------------------
This script simulates an excitatory and an inhibitory population on
the basis of the network used in [1]_.
In contrast to ``brunel-alpha-numpy.py``, this variant uses NEST's builtin
connection routines to draw the random connections instead of NumPy.
When connecting the network customary synapse models are used, which
allow for querying the number of created synapses. Using spike
detectors the average firing rates of the neurons in the populations
are established. The building as well as the simulation time of the
network are recorded.
References
~~~~~~~~~~~~~
.. [1] Brunel N (2000). Dynamics of sparsely connected networks of excitatory and
inhibitory spiking neurons. Journal of Computational Neuroscience 8,
183-208.
See Also
~~~~~~~~~~~~
:doc:`brunel_alpha_numpy`
"""
###############################################################################
# Import all necessary modules for simulation, analysis and plotting. Scipy
# should be imported before nest.
from scipy.optimize import fsolve
import nest
import nest.raster_plot
import time
from numpy import exp
###############################################################################
# Definition of functions used in this example. First, define the `Lambert W`
# function implemented in SLI. The second function computes the maximum of
# the postsynaptic potential for a synaptic input current of unit amplitude
# (1 pA) using the `Lambert W` function. Thus function will later be used to
# calibrate the synaptic weights.
def LambertWm1(x):
nest.ll_api.sli_push(x)
nest.ll_api.sli_run('LambertWm1')
y = nest.ll_api.sli_pop()
return y
def ComputePSPnorm(tauMem, CMem, tauSyn):
a = (tauMem / tauSyn)
b = (1.0 / tauSyn - 1.0 / tauMem)
# time of maximum
t_max = 1.0 / b * (-LambertWm1(-exp(-1.0 / a) / a) - 1.0 / a)
# maximum of PSP for current of unit amplitude
return (exp(1.0) / (tauSyn * CMem * b) *
((exp(-t_max / tauMem) - exp(-t_max / tauSyn)) / b -
t_max * exp(-t_max / tauSyn)))
nest.ResetKernel()
###############################################################################
# Assigning the current time to a variable in order to determine the build
# time of the network.
startbuild = time.time()
###############################################################################
# Assigning the simulation parameters to variables.
dt = 0.1 # the resolution in ms
simtime = 1000.0 # Simulation time in ms
delay = 1.5 # synaptic delay in ms
###############################################################################
# Definition of the parameters crucial for asynchronous irregular firing of
# the neurons.
g = 5.0 # ratio inhibitory weight/excitatory weight
eta = 2.0 # external rate relative to threshold rate
epsilon = 0.1 # connection probability
###############################################################################
# Definition of the number of neurons in the network and the number of neuron
# recorded from
order = 2500
NE = 4 * order # number of excitatory neurons
NI = 1 * order # number of inhibitory neurons
N_neurons = NE + NI # number of neurons in total
N_rec = 50 # record from 50 neurons
###############################################################################
# Definition of connectivity parameter
CE = int(epsilon * NE) # number of excitatory synapses per neuron
CI = int(epsilon * NI) # number of inhibitory synapses per neuron
C_tot = int(CI + CE) # total number of synapses per neuron
###############################################################################
# Initialization of the parameters of the integrate and fire neuron and the
# synapses. The parameter of the neuron are stored in a dictionary. The
# synaptic currents are normalized such that the amplitude of the PSP is J.
tauSyn = 0.5 # synaptic time constant in ms
tauMem = 20.0 # time constant of membrane potential in ms
CMem = 250.0 # capacitance of membrane in in pF
theta = 20.0 # membrane threshold potential in mV
neuron_params = {"C_m": CMem,
"tau_m": tauMem,
"tau_syn_ex": tauSyn,
"tau_syn_in": tauSyn,
"t_ref": 2.0,
"E_L": 0.0,
"V_reset": 0.0,
"V_m": 0.0,
"V_th": theta}
J = 0.1 # postsynaptic amplitude in mV
J_unit = ComputePSPnorm(tauMem, CMem, tauSyn)
J_ex = J / J_unit # amplitude of excitatory postsynaptic current
J_in = -g * J_ex # amplitude of inhibitory postsynaptic current
###############################################################################
# Definition of threshold rate, which is the external rate needed to fix the
# membrane potential around its threshold, the external firing rate and the
# rate of the poisson generator which is multiplied by the in-degree CE and
# converted to Hz by multiplication by 1000.
nu_th = (theta * CMem) / (J_ex * CE * exp(1) * tauMem * tauSyn)
nu_ex = eta * nu_th
p_rate = 1000.0 * nu_ex * CE
################################################################################
# Configuration of the simulation kernel by the previously defined time
# resolution used in the simulation. Setting ``print_time`` to `True` prints the
# already processed simulation time as well as its percentage of the total
# simulation time.
nest.SetKernelStatus({"resolution": dt, "print_time": True,
"overwrite_files": True})
print("Building network")
###############################################################################
# Configuration of the model ``iaf_psc_alpha`` and ``poisson_generator`` using
# ``SetDefaults``. This function expects the model to be the inserted as a
# string and the parameter to be specified in a dictionary. All instances of
# theses models created after this point will have the properties specified
# in the dictionary by default.
nest.SetDefaults("iaf_psc_alpha", neuron_params)
nest.SetDefaults("poisson_generator", {"rate": p_rate})
###############################################################################
# Creation of the nodes using ``Create``. We store the returned handles in
# variables for later reference. Here the excitatory and inhibitory, as well
# as the poisson generator and two spike detectors. The spike detectors will
# later be used to record excitatory and inhibitory spikes.
nodes_ex = nest.Create("iaf_psc_alpha", NE)
nodes_in = nest.Create("iaf_psc_alpha", NI)
noise = nest.Create("poisson_generator")
espikes = nest.Create("spike_detector")
ispikes = nest.Create("spike_detector")
###############################################################################
# Configuration of the spike detectors recording excitatory and inhibitory
# spikes using ``SetStatus``, which expects a list of node handles and a list
# of parameter dictionaries. Setting the variable ``to_file`` to `True` ensures
# that the spikes will be recorded in a .gdf file starting with the string
# assigned to label. Setting ``withtime`` and ``withgid`` to `True` ensures that
# each spike is saved to file by stating the gid of the spiking neuron and
# the spike time in one line.
nest.SetStatus(espikes, [{"label": "brunel-py-ex",
"withtime": True,
"withgid": True,
"to_file": True}])
nest.SetStatus(ispikes, [{"label": "brunel-py-in",
"withtime": True,
"withgid": True,
"to_file": True}])
print("Connecting devices")
###############################################################################
# Definition of a synapse using ``CopyModel``, which expects the model name of
# a pre-defined synapse, the name of the customary synapse and an optional
# parameter dictionary. The parameters defined in the dictionary will be the
# default parameter for the customary synapse. Here we define one synapse for
# the excitatory and one for the inhibitory connections giving the
# previously defined weights and equal delays.
nest.CopyModel("static_synapse", "excitatory",
{"weight": J_ex, "delay": delay})
nest.CopyModel("static_synapse", "inhibitory",
{"weight": J_in, "delay": delay})
#################################################################################
# Connecting the previously defined poisson generator to the excitatory and
# inhibitory neurons using the excitatory synapse. Since the poisson
# generator is connected to all neurons in the population the default rule
# (``all_to_all``) of ``Connect`` is used. The synaptic properties are inserted
# via ``syn_spec`` which expects a dictionary when defining multiple variables or
# a string when simply using a pre-defined synapse.
nest.Connect(noise, nodes_ex, syn_spec="excitatory")
nest.Connect(noise, nodes_in, syn_spec="excitatory")
###############################################################################
# Connecting the first ``N_rec`` nodes of the excitatory and inhibitory
# population to the associated spike detectors using excitatory synapses.
# Here the same shortcut for the specification of the synapse as defined
# above is used.
nest.Connect(nodes_ex[:N_rec], espikes, syn_spec="excitatory")
nest.Connect(nodes_in[:N_rec], ispikes, syn_spec="excitatory")
print("Connecting network")
print("Excitatory connections")
###############################################################################
# Connecting the excitatory population to all neurons using the pre-defined
# excitatory synapse. Beforehand, the connection parameter are defined in a
# dictionary. Here we use the connection rule ``fixed_indegree``,
# which requires the definition of the indegree. Since the synapse
# specification is reduced to assigning the pre-defined excitatory synapse it
# suffices to insert a string.
conn_params_ex = {'rule': 'fixed_indegree', 'indegree': CE}
nest.Connect(nodes_ex, nodes_ex + nodes_in, conn_params_ex, "excitatory")
print("Inhibitory connections")
###############################################################################
# Connecting the inhibitory population to all neurons using the pre-defined
# inhibitory synapse. The connection parameter as well as the synapse
# parameter are defined analogously to the connection from the excitatory
# population defined above.
conn_params_in = {'rule': 'fixed_indegree', 'indegree': CI}
nest.Connect(nodes_in, nodes_ex + nodes_in, conn_params_in, "inhibitory")
###############################################################################
# Storage of the time point after the buildup of the network in a variable.
endbuild = time.time()
###############################################################################
# Simulation of the network.
print("Simulating")
nest.Simulate(simtime)
###############################################################################
# Storage of the time point after the simulation of the network in a variable.
endsimulate = time.time()
###############################################################################
# Reading out the total number of spikes received from the spike detector
# connected to the excitatory population and the inhibitory population.
events_ex = nest.GetStatus(espikes, "n_events")[0]
events_in = nest.GetStatus(ispikes, "n_events")[0]
###############################################################################
# Calculation of the average firing rate of the excitatory and the inhibitory
# neurons by dividing the total number of recorded spikes by the number of
# neurons recorded from and the simulation time. The multiplication by 1000.0
# converts the unit 1/ms to 1/s=Hz.
rate_ex = events_ex / simtime * 1000.0 / N_rec
rate_in = events_in / simtime * 1000.0 / N_rec
###############################################################################
# Reading out the number of connections established using the excitatory and
# inhibitory synapse model. The numbers are summed up resulting in the total
# number of synapses.
num_synapses = (nest.GetDefaults("excitatory")["num_connections"] +
nest.GetDefaults("inhibitory")["num_connections"])
###############################################################################
# Establishing the time it took to build and simulate the network by taking
# the difference of the pre-defined time variables.
build_time = endbuild - startbuild
sim_time = endsimulate - endbuild
###############################################################################
# Printing the network properties, firing rates and building times.
print("Brunel network simulation (Python)")
print("Number of neurons : {0}".format(N_neurons))
print("Number of synapses: {0}".format(num_synapses))
print(" Exitatory : {0}".format(int(CE * N_neurons) + N_neurons))
print(" Inhibitory : {0}".format(int(CI * N_neurons)))
print("Excitatory rate : %.2f Hz" % rate_ex)
print("Inhibitory rate : %.2f Hz" % rate_in)
print("Building time : %.2f s" % build_time)
print("Simulation time : %.2f s" % sim_time)
###############################################################################
# Plot a raster of the excitatory neurons and a histogram.
nest.raster_plot.from_device(espikes, hist=True)
|
hakonsbm/nest-simulator
|
pynest/examples/brunel_alpha_nest.py
|
Python
|
gpl-2.0
| 14,167
|
[
"NEURON"
] |
217ff3a4251e7b07dd264ea3972cbf126ba94889290f1078de48347bd1a33597
|
import os
import sys
import numpy as np
from flare.otf_parser import OtfAnalysis
from flare.env import AtomicEnvironment
from flare.predict import predict_on_structure
from ase.io import read
def test_parse_header():
os.system("cp test_files/sample_slab_otf.out .")
header_dict = OtfAnalysis("sample_slab_otf.out").header
assert header_dict["frames"] == 5000
assert header_dict["atoms"] == 28
assert header_dict["species_set"] == {"Al"}
assert header_dict["dt"] == 0.001
assert header_dict["kernel_name"] == "two_plus_three_body"
assert header_dict["n_hyps"] == 5
assert header_dict["algo"] == "BFGS"
assert np.equal(
header_dict["cell"],
np.array([[8.59135, 0.0, 0.0], [4.29567, 7.44033, 0.0], [0.0, 0.0, 26.67654]]),
).all()
os.system("rm sample_slab_otf.out")
def test_gp_parser():
"""
Test the capability of otf parser to read GP/DFT info
:return:
"""
os.system("cp test_files/sample_slab_otf.out .")
parsed = OtfAnalysis("sample_slab_otf.out")
assert parsed.gp_species_list == [["Al"] * 28] * 4
gp_positions = parsed.gp_position_list
assert len(gp_positions) == 4
pos1 = 1.50245891
pos2 = 10.06179079
assert pos1 == gp_positions[0][-1][1]
assert pos2 == gp_positions[-1][0][2]
force1 = 0.29430943
force2 = -0.02350709
assert force1 == parsed.gp_force_list[0][-1][1]
assert force2 == parsed.gp_force_list[-1][0][2]
os.system("rm sample_slab_otf.out")
def test_md_parser():
"""
Test the capability of otf parser to read MD info
:return:
"""
os.system("cp test_files/sample_slab_otf.out .")
parsed = OtfAnalysis("sample_slab_otf.out")
pos1 = 10.09769665
assert pos1 == parsed.position_list[0][0][2]
assert len(parsed.position_list[0]) == 28
os.system("rm sample_slab_otf.out")
def test_output_md_structures():
os.system("cp test_files/sample_slab_otf.out .")
parsed = OtfAnalysis("sample_slab_otf.out")
positions = parsed.position_list
forces = parsed.force_list
structures = parsed.output_md_structures()
assert np.isclose(structures[-1].positions, positions[-1]).all()
assert np.isclose(structures[-1].forces, forces[-1]).all()
os.system("rm sample_slab_otf.out")
def test_replicate_gp():
"""
Based on gp_test_al.out, ensures that given hyperparameters and DFT calls
a GP model can be reproduced and correctly re-predict forces and
uncertainties
:return:
"""
os.system("cp test_files/sample_h2_otf.out .")
parsed = OtfAnalysis("sample_h2_otf.out")
positions = parsed.position_list
forces = parsed.force_list
gp_model = parsed.make_gp()
structures = parsed.output_md_structures()
assert np.isclose(structures[-1].positions, positions[-1]).all()
assert np.isclose(structures[-1].forces, forces[-1]).all()
final_structure = structures[-1]
pred_for, pred_stds = predict_on_structure(final_structure, gp_model)
assert np.isclose(final_structure.forces, pred_for).all()
assert np.isclose(final_structure.stds, pred_stds).all()
set_of_structures = structures[-3:-1]
for structure in set_of_structures:
pred_for, pred_stds = predict_on_structure(structure, gp_model)
assert np.isclose(structure.forces, pred_for, atol=1e-6).all()
assert np.isclose(structure.stds, pred_stds, atol=1e-6).all()
os.system("rm sample_h2_otf.out")
def test_otf2xyz():
xyz_file = "h2.xyz"
parsed = OtfAnalysis("test_files/sample_h2_otf.out", calculate_energy=True)
parsed.to_xyz(xyz_file)
xyz_trj = read(xyz_file, index=":")
assert np.allclose(xyz_trj[-1].positions[0, 0], 2.2794)
assert np.allclose(xyz_trj[-2].get_forces()[-1, 2], 0.0000)
os.system("rm h2.xyz")
|
mir-group/flare
|
tests/test_parse_otf.py
|
Python
|
mit
| 3,817
|
[
"ASE"
] |
979f02f3e94f3133f6e4ad1b781744ddbc4a5db8569c7b1a3032e04cd4194918
|
from functools import total_ordering
from dark.score import HigherIsBetterScore, LowerIsBetterScore
@total_ordering
class _Base(object):
"""
Holds information about a matching region from a read alignment.
You should not use this class directly. Use one of its subclasses,
either HSP or LSP, depending on whether you want numerically higher
scores to be considered better (HSP) or worse (LSP).
Below is an example alignment to show the locations of the six
start/end offsets. The upper four are offsets into the subject. The
lower two are offsets into the read. Note that the read has two gaps
('-' characters). All offsets are zero-based and follow the Python
convention that the 'end' positions are not included in the string.
readStartInSubject readEndInSubject
| |
| |
| subjectStart subjectEnd |
| | | |
| | | |
Subject: .................ACGTAAAGGCTTAGGT.................
Read: ....ACGTA-AGGCTT-GGT............
| |
| |
readStart readEnd
Note that the above is just one alignment, and that others are possible
(e.g., with the read extending beyond the end(s) of the subject, or the
subject also with gaps in it). The point of the example diagram is to show
what the six variable names will always refer to, not to enumerate all
possible alignments (the tests in test/blast/test_hsp.py go through
many different cases). The classes in this file are just designed to hold
the variables associated with an HSP and to make it easy to compare them.
@param readStart: The offset in the read where the match begins.
@param readEnd: The offset in the read where the match ends.
@param readStartInSubject: The offset in the subject where the match of
the read starts.
@param readEndInSubject: The offset in the subject where the match of
the read ends.
@param readFrame: The reading frame for the read, a value from
{-3, -2, -1, 1, 2, 3} where the sign indicates negative or positive
sense.
@param subjectStart: The offset in the subject where the match begins.
@param subjectEnd: The offset in the subject where the match ends.
@param subjectFrame: The reading frame for the subject, a value from
{-3, -2, -1, 1, 2, 3} where the sign indicates negative or positive
sense.
@param readMatchedSequence: The matched part of the read. Note that
this may contain gaps (marked with '-').
@param subjectMatchedSequence: The matched part of the subject. Note that
this may contain gaps (marked with '-').
@param identicalCount: The C{int} number of positions at which the subject
and query were identical.
@param positiveCount: The C{int} number of positions at which the subject
and query had a positive score in the scoring matrix used during
matching (this is probably only different from the C{identicalCount}
when matching amino acids (i.e., not nucleotides).
@param percentIdentical: A C{float} percentage (i.e., ranging from 0.0 to
100.0, NOT a fraction) of amino acids that were identical in the match.
"""
def __init__(self, readStart=None, readEnd=None, readStartInSubject=None,
readEndInSubject=None, readFrame=None, subjectStart=None,
subjectEnd=None, subjectFrame=None, readMatchedSequence=None,
subjectMatchedSequence=None, identicalCount=None,
percentIdentical=None, positiveCount=None,
percentPositive=None):
self.readStart = readStart
self.readEnd = readEnd
self.readStartInSubject = readStartInSubject
self.readEndInSubject = readEndInSubject
self.readFrame = readFrame
self.subjectStart = subjectStart
self.subjectEnd = subjectEnd
self.subjectFrame = subjectFrame
self.readMatchedSequence = readMatchedSequence
self.subjectMatchedSequence = subjectMatchedSequence
self.identicalCount = identicalCount
self.percentIdentical = percentIdentical
self.positiveCount = positiveCount
self.percentPositive = percentPositive
def __lt__(self, other):
return self.score < other.score
def __eq__(self, other):
return self.score == other.score
def betterThan(self, score):
"""
Compare this instance's score with another score.
@param score: A C{float} score.
@return: A C{bool}, C{True} if this score is the better.
"""
return self.score.betterThan(score)
def toDict(self):
"""
Get information about the HSP/LSP as a dictionary.
@return: A C{dict} representation of the HSP/LSP.
"""
return {
'readStart': self.readStart,
'readEnd': self.readEnd,
'readStartInSubject': self.readStartInSubject,
'readEndInSubject': self.readEndInSubject,
'readFrame': self.readFrame,
'subjectStart': self.subjectStart,
'subjectEnd': self.subjectEnd,
'subjectFrame': self.subjectFrame,
'readMatchedSequence': self.readMatchedSequence,
'subjectMatchedSequence': self.subjectMatchedSequence,
'identicalCount': self.identicalCount,
'percentIdentical': self.percentIdentical,
'positiveCount': self.positiveCount,
'percentPositive': self.percentPositive,
}
class HSP(_Base):
"""
Holds information about a high-scoring pair from a read alignment.
Comparisons are done as for BLAST or DIAMOND bit scores (higher is better).
@param score: The numeric score of this HSP.
"""
def __init__(self, score, **kwargs):
_Base.__init__(self, **kwargs)
self.score = HigherIsBetterScore(score)
def toDict(self):
"""
Get information about the HSP as a dictionary.
@return: A C{dict} representation of the HSP.
"""
result = _Base.toDict(self)
result['score'] = self.score.score
return result
class LSP(_Base):
"""
Holds information about a low-scoring pair from a read alignment.
Comparisons are done as for BLAST or DIAMOND e-values (smaller is better).
@param score: The numeric score of this LSP.
"""
def __init__(self, score, **kwargs):
_Base.__init__(self, **kwargs)
self.score = LowerIsBetterScore(score)
def toDict(self):
"""
Get information about the LSP as a dictionary.
@return: A C{dict} representation of the LSP.
"""
result = _Base.toDict(self)
result['score'] = self.score.score
return result
|
terrycojones/dark-matter
|
dark/hsp.py
|
Python
|
mit
| 7,156
|
[
"BLAST"
] |
f8b348b24edea16c9058dd4a8117c74d5ccee269ce74c7803ed2c5401d11ee4d
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.dataflow_v1beta3.services.flex_templates_service import (
FlexTemplatesServiceAsyncClient,
)
from google.cloud.dataflow_v1beta3.services.flex_templates_service import (
FlexTemplatesServiceClient,
)
from google.cloud.dataflow_v1beta3.services.flex_templates_service import transports
from google.cloud.dataflow_v1beta3.types import environment
from google.cloud.dataflow_v1beta3.types import jobs
from google.cloud.dataflow_v1beta3.types import templates
from google.oauth2 import service_account
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert FlexTemplatesServiceClient._get_default_mtls_endpoint(None) is None
assert (
FlexTemplatesServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
FlexTemplatesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
FlexTemplatesServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
FlexTemplatesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
FlexTemplatesServiceClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class", [FlexTemplatesServiceClient, FlexTemplatesServiceAsyncClient,]
)
def test_flex_templates_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "dataflow.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.FlexTemplatesServiceGrpcTransport, "grpc"),
(transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_flex_templates_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class", [FlexTemplatesServiceClient, FlexTemplatesServiceAsyncClient,]
)
def test_flex_templates_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "dataflow.googleapis.com:443"
def test_flex_templates_service_client_get_transport_class():
transport = FlexTemplatesServiceClient.get_transport_class()
available_transports = [
transports.FlexTemplatesServiceGrpcTransport,
]
assert transport in available_transports
transport = FlexTemplatesServiceClient.get_transport_class("grpc")
assert transport == transports.FlexTemplatesServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
FlexTemplatesServiceClient,
transports.FlexTemplatesServiceGrpcTransport,
"grpc",
),
(
FlexTemplatesServiceAsyncClient,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
FlexTemplatesServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(FlexTemplatesServiceClient),
)
@mock.patch.object(
FlexTemplatesServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(FlexTemplatesServiceAsyncClient),
)
def test_flex_templates_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(FlexTemplatesServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(FlexTemplatesServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
FlexTemplatesServiceClient,
transports.FlexTemplatesServiceGrpcTransport,
"grpc",
"true",
),
(
FlexTemplatesServiceAsyncClient,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
FlexTemplatesServiceClient,
transports.FlexTemplatesServiceGrpcTransport,
"grpc",
"false",
),
(
FlexTemplatesServiceAsyncClient,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
FlexTemplatesServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(FlexTemplatesServiceClient),
)
@mock.patch.object(
FlexTemplatesServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(FlexTemplatesServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_flex_templates_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class", [FlexTemplatesServiceClient, FlexTemplatesServiceAsyncClient]
)
@mock.patch.object(
FlexTemplatesServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(FlexTemplatesServiceClient),
)
@mock.patch.object(
FlexTemplatesServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(FlexTemplatesServiceAsyncClient),
)
def test_flex_templates_service_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
FlexTemplatesServiceClient,
transports.FlexTemplatesServiceGrpcTransport,
"grpc",
),
(
FlexTemplatesServiceAsyncClient,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_flex_templates_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
FlexTemplatesServiceClient,
transports.FlexTemplatesServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
FlexTemplatesServiceAsyncClient,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_flex_templates_service_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_flex_templates_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = FlexTemplatesServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
FlexTemplatesServiceClient,
transports.FlexTemplatesServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
FlexTemplatesServiceAsyncClient,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_flex_templates_service_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"dataflow.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/userinfo.email",
),
scopes=None,
default_host="dataflow.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("request_type", [templates.LaunchFlexTemplateRequest, dict,])
def test_launch_flex_template(request_type, transport: str = "grpc"):
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.launch_flex_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = templates.LaunchFlexTemplateResponse()
response = client.launch_flex_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == templates.LaunchFlexTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, templates.LaunchFlexTemplateResponse)
def test_launch_flex_template_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.launch_flex_template), "__call__"
) as call:
client.launch_flex_template()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == templates.LaunchFlexTemplateRequest()
@pytest.mark.asyncio
async def test_launch_flex_template_async(
transport: str = "grpc_asyncio", request_type=templates.LaunchFlexTemplateRequest
):
client = FlexTemplatesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.launch_flex_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
templates.LaunchFlexTemplateResponse()
)
response = await client.launch_flex_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == templates.LaunchFlexTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, templates.LaunchFlexTemplateResponse)
@pytest.mark.asyncio
async def test_launch_flex_template_async_from_dict():
await test_launch_flex_template_async(request_type=dict)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.FlexTemplatesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.FlexTemplatesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = FlexTemplatesServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.FlexTemplatesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = FlexTemplatesServiceClient(
client_options=options, transport=transport,
)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = FlexTemplatesServiceClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.FlexTemplatesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = FlexTemplatesServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.FlexTemplatesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = FlexTemplatesServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.FlexTemplatesServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.FlexTemplatesServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.FlexTemplatesServiceGrpcTransport,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(client.transport, transports.FlexTemplatesServiceGrpcTransport,)
def test_flex_templates_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.FlexTemplatesServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_flex_templates_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.FlexTemplatesServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = ("launch_flex_template",)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_flex_templates_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.FlexTemplatesServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/userinfo.email",
),
quota_project_id="octopus",
)
def test_flex_templates_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.FlexTemplatesServiceTransport()
adc.assert_called_once()
def test_flex_templates_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
FlexTemplatesServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/userinfo.email",
),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.FlexTemplatesServiceGrpcTransport,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
],
)
def test_flex_templates_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/userinfo.email",
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.FlexTemplatesServiceGrpcTransport, grpc_helpers),
(transports.FlexTemplatesServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_flex_templates_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"dataflow.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/userinfo.email",
),
scopes=["1", "2"],
default_host="dataflow.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.FlexTemplatesServiceGrpcTransport,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
],
)
def test_flex_templates_service_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_flex_templates_service_host_no_port():
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="dataflow.googleapis.com"
),
)
assert client.transport._host == "dataflow.googleapis.com:443"
def test_flex_templates_service_host_with_port():
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="dataflow.googleapis.com:8000"
),
)
assert client.transport._host == "dataflow.googleapis.com:8000"
def test_flex_templates_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.FlexTemplatesServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_flex_templates_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.FlexTemplatesServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.FlexTemplatesServiceGrpcTransport,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
],
)
def test_flex_templates_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.FlexTemplatesServiceGrpcTransport,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
],
)
def test_flex_templates_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = FlexTemplatesServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = FlexTemplatesServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = FlexTemplatesServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = FlexTemplatesServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = FlexTemplatesServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = FlexTemplatesServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = FlexTemplatesServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = FlexTemplatesServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = FlexTemplatesServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = FlexTemplatesServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = FlexTemplatesServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = FlexTemplatesServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = FlexTemplatesServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = FlexTemplatesServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = FlexTemplatesServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.FlexTemplatesServiceTransport, "_prep_wrapped_messages"
) as prep:
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.FlexTemplatesServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = FlexTemplatesServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = FlexTemplatesServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = FlexTemplatesServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport),
(
FlexTemplatesServiceAsyncClient,
transports.FlexTemplatesServiceGrpcAsyncIOTransport,
),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
|
googleapis/python-dataflow-client
|
tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py
|
Python
|
apache-2.0
| 52,176
|
[
"Octopus"
] |
164a83dd17b34eb58c7cb92875e0d6650f39989e623d9225d83ad95a8be77b38
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy
from pyscf import lib
from pyscf import gto
from pyscf import dft
from pyscf.dft import gen_grid
from pyscf.dft import radi
h2o = gto.Mole()
h2o.verbose = 5
h2o.output = '/dev/null'
h2o.atom.extend([
["O" , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ])
h2o.basis = {"H": '6-31g',
"O": '6-31g',}
h2o.build()
def tearDownModule():
global h2o
h2o.stdout.close()
del h2o
class KnownValues(unittest.TestCase):
def test_gen_grid(self):
grid = gen_grid.Grids(h2o)
grid.prune = None
grid.radi_method = radi.gauss_chebyshev
grid.becke_scheme = gen_grid.original_becke
grid.radii_adjust = radi.becke_atomic_radii_adjust
grid.atomic_radii = radi.BRAGG_RADII
grid.atom_grid = {"H": (10, 50), "O": (10, 50),}
grid.build(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.coords), 185.91245945279027, 9)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 1720.1317185648893, 8)
grid.becke_scheme = gen_grid.stratmann
grid.build(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 1730.3692983091271, 8)
grid.atom_grid = {"O": (10, 50),}
grid.radii_adjust = None
grid.becke_scheme = gen_grid.stratmann
grid.kernel(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 2559.0064040257907, 8)
grid.atom_grid = (10, 11)
grid.becke_scheme = gen_grid.original_becke
grid.radii_adjust = None
grid.build(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 1712.3069450297105, 8)
def test_radi(self):
grid = gen_grid.Grids(h2o)
grid.prune = None
grid.radii_adjust = radi.becke_atomic_radii_adjust
grid.atomic_radii = radi.COVALENT_RADII
grid.radi_method = radi.mura_knowles
grid.atom_grid = {"H": (10, 50), "O": (10, 50),}
grid.build(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 1804.5437331817291, 9)
grid.radi_method = radi.delley
grid.build(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 1686.3482864673697, 9)
grid.radi_method = radi.becke
grid.build(with_non0tab=False)
self.assertAlmostEqual(lib.fp(grid.weights), 2486249.209827192, 7)
def test_prune(self):
grid = gen_grid.Grids(h2o)
grid.prune = gen_grid.sg1_prune
grid.atom_grid = {"H": (10, 50), "O": (10, 50),}
grid.build(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.coords), 202.17732600266302, 9)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 442.54536463517167, 9)
grid.prune = gen_grid.nwchem_prune
grid.build(with_non0tab=False)
self.assertAlmostEqual(numpy.linalg.norm(grid.coords), 149.55023044392638, 9)
self.assertAlmostEqual(numpy.linalg.norm(grid.weights), 586.36841824004455, 9)
z = 16
rad, dr = radi.gauss_chebyshev(50)
angs = gen_grid.sg1_prune(z, rad, 434, radii=radi.SG1RADII)
self.assertAlmostEqual(lib.fp(angs), -291.0794420982329, 9)
angs = gen_grid.nwchem_prune(z, rad, 434, radii=radi.BRAGG_RADII)
self.assertAlmostEqual(lib.fp(angs), -180.12023039394498, 9)
angs = gen_grid.nwchem_prune(z, rad, 26, radii=radi.BRAGG_RADII)
self.assertTrue(numpy.all(angs==26))
def test_gen_atomic_grids(self):
grid = gen_grid.Grids(h2o)
grid.prune = None
grid.atom_grid = {"H": (10, 58), "O": (10, 50),}
self.assertRaises(ValueError, grid.build)
def test_make_mask(self):
grid = gen_grid.Grids(h2o)
grid.atom_grid = {"H": (10, 110), "O": (10, 110),}
grid.build()
coords = grid.coords*10.
non0 = gen_grid.make_mask(h2o, coords)
self.assertEqual(non0.sum(), 122)
self.assertAlmostEqual(lib.fp(non0), 0.554275491306796, 9)
def test_overwriting_grids_attribute(self):
g = gen_grid.Grids(h2o).run()
self.assertEqual(g.weights.size, 34310)
g.atom_grid = {"H": (10, 110), "O": (10, 110),}
self.assertTrue(g.weights is None)
if __name__ == "__main__":
print("Test Grids")
unittest.main()
|
sunqm/pyscf
|
pyscf/dft/test/test_grids.py
|
Python
|
apache-2.0
| 5,058
|
[
"PySCF"
] |
e4af4153bdbbef39de92ebbd6b875534f67c72468388e0d67008d33a25a6ebd3
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
from Kamaelia.UI.Tk.TkWindow import TkWindow
from Kamaelia.Support.Tk.Scrolling import ScrollingMenu
from Axon.Ipc import producerFinished, shutdownMicroprocess
from ArgumentsPanel import ArgumentsPanel
import Tkinter
import pprint
class BuilderControlsGUI(TkWindow):
def __init__(self, classes):
self.selectedComponent = None
self.uid = 1
self.classes = classes
super(BuilderControlsGUI, self).__init__()
def setupWindow(self):
items = []
lookup = {} # This is a bit of a nasty hack really ... :-)
# Why is this a hack ?
# Oh it's viewed here as a hack because it's a closure
self.window.title("Pipeline Builder")
self.addframe = Tkinter.Frame(self.window, borderwidth=2, relief=Tkinter.GROOVE)
self.addframe.grid(row=0, column=0, sticky=Tkinter.N+Tkinter.E+Tkinter.W+Tkinter.S, padx=4, pady=4)
def menuCallback(index, text):
self.click_menuChoice(lookup[text])
# print self.classes[0]
for theclass in self.classes:
# print "THECLASS"
# pprint.pprint(theclass)
# print "SSALCEHT"
lookup[ theclass['module']+"."+theclass['class'] ] = theclass
items.append(theclass['module']+"."+theclass['class'])
self.choosebutton = ScrollingMenu(self.addframe, items,
command = menuCallback)
self.choosebutton.grid(row=0, column=0, columnspan=2, sticky=Tkinter.N)
self.argPanel = None
self.argCanvas = Tkinter.Canvas(self.addframe, relief=Tkinter.SUNKEN, borderwidth=2)
self.argCanvas.grid(row=1, column=0, sticky=Tkinter.N+Tkinter.S+Tkinter.E+Tkinter.W)
self.argCanvasWID = self.argCanvas.create_window(0,0, anchor=Tkinter.NW)
self.argCanvasScroll = Tkinter.Scrollbar(self.addframe, orient=Tkinter.VERTICAL)
self.argCanvasScroll.grid(row=1, column=1, sticky=Tkinter.N+Tkinter.S+Tkinter.E)
self.argCanvasScroll['command'] = self.argCanvas.yview
self.argCanvas['yscrollcommand'] = self.argCanvasScroll.set
self.click_menuChoice(self.classes[1])
self.addbutton = Tkinter.Button(self.addframe, text="ADD Component", command=self.click_addComponent )
self.addbutton.grid(row=2, column=0, columnspan=2, sticky=Tkinter.S)
self.addframe.rowconfigure(1, weight=1)
self.addframe.columnconfigure(0, weight=1)
self.remframe = Tkinter.Frame(self.window, borderwidth=2, relief=Tkinter.GROOVE)
self.remframe.grid(row=1, column=0, columnspan=2, sticky=Tkinter.S+Tkinter.E+Tkinter.W, padx=4, pady=4)
self.selectedlabel = Tkinter.Label(self.remframe, text="<no component selected>")
self.selectedlabel.grid(row=0, column=0, sticky=Tkinter.S)
self.delbutton = Tkinter.Button(self.remframe, text="REMOVE Component/Links", command=self.click_removeComponent )
self.delbutton.grid(row=1, column=0, sticky=Tkinter.S)
self.delbutton.config(state=Tkinter.DISABLED)
self.window.rowconfigure(0, weight=1)
self.window.columnconfigure(0, weight=1)
self.window.protocol("WM_DELETE_WINDOW", self.handleCloseWindowRequest )
def main(self):
while not self.isDestroyed():
if self.dataReady("inbox"):
data = self.recv("inbox")
if data[0].upper() == "SELECT":
if data[1].upper() == "NODE":
self.componentSelected(data[2])
while self.dataReady("control"):
msg = self.recv("control")
if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess):
self.send(msg, "signal")
self.window.destroy()
self.tkupdate()
yield 1
def handleCloseWindowRequest(self):
self.send( shutdownMicroprocess(self), "signal")
self.window.destroy()
def makeUID(self):
uid = self.uid
self.uid += 1
return uid
def componentSelected(self, component):
self.selectedComponent = component
if component == None:
self.delbutton.config(state=Tkinter.DISABLED)
self.selectedlabel["text"] = "<no component selected>"
else:
self.delbutton.config(state=Tkinter.NORMAL)
self.selectedlabel["text"] = repr(component[0])
def click_addComponent(self):
# add to the pipeline and wire it in
c = self.argPanel.getDef()
c["id"] = ( c['name'], repr(self.makeUID()) )
msg = ("ADD", c['id'], c['name'], c, self.selectedComponent)
self.send( msg, "outbox")
def click_removeComponent(self):
if self.selectedComponent:
self.send( ("DEL", self.selectedComponent), "outbox")
def click_chooseComponent(self):
pass
def click_menuChoice(self, theclass):
if self.argPanel != None:
self.argPanel.destroy()
self.argPanel = ArgumentsPanel(self.argCanvas, theclass)
self.argPanel.update_idletasks()
self.argCanvas.itemconfigure(self.argCanvasWID, window=self.argPanel)
self.argCanvas['scrollregion'] = self.argCanvas.bbox("all")
"""
When the "ADD component" button is clicked, emit a message of the form:
* ("ADD", c['id'], c['name'], c, self.selectedComponent)
* Out the outbox
This declares a unique component id, with a unique name, along with a definition consisting
of:
def getDef(self):
return { "name" : self.theclass['class'],
"module" : self.theclass['module'],
"instantiation" : self.getInstantiation()
}
When the "remove component" button is clicked, emit
* a ("DEL", self.selectedComponent) message
* out the outbox "outbox"
Specifically this says "delete the currently selected component"
"""
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
import Axon
import pprint
def getAllClasses( modules ):
_modules = list(modules.keys())
_modules.sort()
for modname in _modules:
try:
for entry in getModuleConstructorArgs( modname, modules[modname] ):
yield entry
except ImportError:
print "WARNING: Import Error: ", modname
continue
def getModuleConstructorArgs( modulename, classnames):
clist = []
module = __import__(modulename, [], [], classnames)
for classname in classnames:
theclass = eval("module."+classname)
entry = { "module" : modulename,
"class" : classname,
"classdoc" : theclass.__doc__,
"initdoc" : theclass.__init__.__doc__,
"args" : getConstructorArgs(theclass),
"theclass" : theclass,
}
clist.append(entry)
return clist
def getConstructorArgs(component):
initfunc = eval("component.__init__")
try:
(args, vargs, vargkw, defaults) = inspect.getargspec(initfunc)
except TypeError, e:
print "FAILURE", str(component), repr(component), component
raise e
arglist = [ [arg] for arg in args ]
if defaults is not None:
for i in range(0,len(defaults)):
arglist[-1-i].append( repr(defaults[-1-i]) )
del arglist[0] # remove 'self'
return {"std":arglist, "*":vargs, "**":vargkw}
import inspect
# subset for testing
COMPONENTS = { 'Kamaelia.File.ReadFileAdaptor': ['ReadFileAdaptor'],
'Kamaelia.File.Reading': ['PromptedFileReader'],
'Kamaelia.Codec.Dirac': ['DiracDecoder', 'DiracEncoder'],
'Kamaelia.UI.Pygame.VideoOverlay': ['VideoOverlay'],
'Kamaelia.File.UnixProcess': ['UnixProcess'],
'Kamaelia.File.Writing': ['SimpleFileWriter']
}
class PrettyPrinter(Axon.Component.component):
def main(self):
while 1:
while self.dataReady("inbox"):
data = self.recv("inbox")
print "-------------------------------------------------------------------"
pprint.pprint(data)
yield 1
items = list(getAllClasses( COMPONENTS ))
Pipeline(
BuilderControlsGUI(items),
PrettyPrinter()
).run()
|
sparkslabs/kamaelia_
|
Sketches/MPS/Old/Compose/Compose/GUI/BuilderControlsGUI.py
|
Python
|
apache-2.0
| 9,668
|
[
"DIRAC"
] |
62c3e720719c1c2c8eecbe507847ed5deaae41205d398ee486cb286a0f0862e1
|
# -*- coding: latin-1 -*-
# Copyright (C) 2009-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
####### Test make translation ###############
import hexablock
doc = hexablock.addDocument("default")
size_x = 1
size_y = 1
size_z = 2
orig = doc.addVertex(0, 0, 0)
dirVr = doc.addVector(1, 1, 1)
grid = doc.makeCartesian1(orig, dirVr, size_x, size_y, size_z, 0, 0, 0)
##### doc.saveVtk ("cartesian.vtk")
devant = doc.addVector(5, 0, 0)
grid1 = doc.makeTranslation(grid, devant)
##### doc.saveVtk ("translation.vtk")
|
FedoraScientific/salome-hexablock
|
doc/test_doc/make_transformation/make_translation.py
|
Python
|
lgpl-2.1
| 1,307
|
[
"VTK"
] |
9270315e61a577b33019ebd4993d55e33bb1fc0861b14dcf88a61e3aac5f0095
|
# coding=utf-8
# coding=utf-8
import time
import os
import subprocess
import sys
from sys import argv
from subprocess import call
import glob
import shutil
from argparse import (ArgumentParser, FileType)
import logging
import yaml
import re
import thread
import threading
from threading import Thread
global pipeline_path
pipeline_path=os.getcwd()
#Unix_mac
global Plasmidfinder_samples
Plasmidfinder_samples=""
global resfinder_samples
resfinder_samples=""
global virulence_samples
virulence_samples=""
global mlst_samples
mlst_samples=""
global emm_samples
emm_samples=""
global resfams_samples
resfams_samples=""
global virdb_samples
virdb_samples=""
global card_samples
card_samples=""
global OSystem
global config_file
global cutadapt
global cutadapt_path
global trimgalore_path
global spades_path
global mlst_ncbi_path
global plasmidfinder_ncbi_path
global resistancefinder_ncbi_path
global barrnap_path
global hmmer_path
global blastp_ncbi_path
global parSNP_path
global sample_name
global parSNP_tree
parSNP_tree=""
import argparse
import argparse as ap
import argparse
description="Welcome To BacPipe pipeline: \n a Software for analysing whole genome sequencing data for clinical diagnostics and outbreaks assessment"
parser = argparse.ArgumentParser(prog='BacPipe',description=description)
parser.add_argument('--os',dest='os',choices=['unix','mac'],help='please select the operating system', required=True)
parser.add_argument('--config',dest='file',help='path to config file',nargs='*', required=True)
parser.add_argument('--processors',type=int,dest='processors',help='number of processors',default=4)
parser.add_argument('--version', action='version', version='%(prog)s 1.2.6')
args=parser.parse_args()
try:
config_file=args.file[0]
exists = os.path.isfile(config_file)
print "using %s as config file" % (config_file)
except FileNotFoundError:
print "%s file does not exist" % (config_file)
OSystem=args.os
sample_name=""
if OSystem == "mac":
cutadapt=os.path.join(pipeline_path+'/cutadapt-1.12/cutadapt/cutadapt')
cutadapt_path=os.path.join(pipeline_path+'/cutadapt-1.12/')
trimgalore_path=os.path.join(pipeline_path+'/trim_galore_v0.4.2/trim_galore')
spades_path=os.path.join(pipeline_path+'/SPAdes-3.9.1-Darwin/bin/spades.py')
mlst_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
plasmidfinder_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
resistancefinder_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
barrnap_path=str(pipeline_path+'/barrnap-master/bin/')
hmmer_path=os.path.join(pipeline_path+'/hmmer-3.1b2-macosx-intel/binaries/hmmscan')
blastp_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/bin/blastp')
parSNP_path=os.path.join(pipeline_path+'/parsnp_OSX64_v1_2/parsnp')
elif OSystem == "unix":
cutadapt=str(pipeline_path+'/cutadapt-1.12/cutadapt/cutadapt')
cutadapt_path=str(pipeline_path+'/cutadapt-1.12/')
trimgalore_path=str(pipeline_path+'/trim_galore_v0.4.2/trim_galore')
spades_path=os.path.join(pipeline_path+'/SPAdes-3.13.0-Linux/bin/spades.py')
mlst_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
plasmidfinder_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
resistancefinder_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
barrnap_path=str(pipeline_path+'/barrnap_0_6/bin/')
hmmer_path=os.path.join(pipeline_path+'/hmmer-3.1b2-linux-intel-x86_64/binaries/hmmscan')
blastp_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/bin/blastp')
parSNP_path=os.path.join(pipeline_path+'/parsnp_Linux64_v1_2/parsnp')
else:
print "Please select unix or mac as first argument. E.g. python Pipeline.py mac"
sys.exit()
#define variables
global counter
counter=0
global input_path
input_path=""
global output_path
output_path=""
global prokka_full_path
prokka_full_path=""
global parSNP_reference_path
parSNP_reference_path=""
global parSNP_reference_fsa_path
parSNP_reference_fsa_path=""
global TG_switch
TG_switch=""
global TG_paired
TG_paired=""
global SPAdes_mode
SPAdes_mode=""
global spades_switch
spades_switch=""
global plasmids_finder_
plasmids_finder_=""
global prokka_output
prokka_output=""
global resistanceFinder_output
resistanceFinder_output=""
global mlst_output
mlst_output=""
global plasmidsFinder_output
plasmidsFinder_output=""
global virulenceFinder_output
virulenceFinder_output=""
def pipeline(config_f, thread_):
with open(config_f, 'r') as f:
config = yaml.load(f)
#Trim adapter and quality filtering
def trim_galore(raw_reads_1, raw_reads_2):
# Define variables from configuration file
reads = str("--" + config['trim_galore']['reads_type'])
quality = str(config["trim_galore"]["quality_threshold"])
logging.info("Trimgalore is running...")
#cutadapt=os.path.join(pipeline_path+'/cutadapt-1.12/cutadapt/cutadapt')
#trimgalore_path=os.path.join(pipeline_path+"/trim_galore_v0.4.2/trim_galore")
with open(file_out, 'a') as logf:
if reads == '--paired':
subprocess.call([trimgalore_path, "--paired", "-q", quality, "--suppress_warn", "-o", sample_directory, raw_reads_1, raw_reads_2], stdout=logf)
else:
subprocess.call([trimgalore_path, "-q", quality, "--suppress_warn", "-o", sample_directory, raw_reads_1], stdout=logf)
logging.info("Reads trimming and quality filtering completed")
os.chdir(sample_directory)
#Correct filenames for spades input
val_reads_1 = raw_reads_1.replace("_R1_001.fastq.gz", "_R1_001_val_1.fq.gz")
val_reads_2 = raw_reads_2.replace("_R2_001.fastq.gz", "_R2_001_val_2.fq.gz")
logging.info("TrimGalore finished "+sample_name)
if spades_flag == 'False':
spades(val_reads_1, val_reads_2)
def call_script(args):
subprocess.call(args)
#Genome assembly
def spades(spades_reads_pe1, spades_reads_pe2):
# Define variables from configuration file
kmer_size = str(config["spades"]["kmer"])
os.chdir(sample_directory)
spades_output = os.path.join(sample_directory, 'spades_assembly')
if not os.path.exists(spades_output):
os.makedirs(spades_output)
logging.info("SPAdes genome assembler is running...")
#spades_path=os.path.join(pipeline_path+'/SPAdes-3.9.1-Darwin/bin/spades.py')
Sp_mode = config['spades']['Mode']
with open(file_out, 'a') as logf:
if Sp_mode == 'paired':
subprocess.call([spades_path, "--careful", "-k", kmer_size, "-t", str(thread_), "-m", "24", "-1", spades_reads_pe1, "-2", spades_reads_pe2, "-o", spades_output], stdout=logf)
elif Sp_mode == 'single':
spades_reads_pe1 = spades_reads_pe1.replace("_R1_001_val_1.fq.gz","_R1_001_trimmed.fq.gz")
subprocess.call([spades_path, "--careful", "-k", kmer_size, "-t", str(thread_), "-m", "24", "-s", spades_reads_pe1, "-o", spades_output], stdout=logf)
# elif Sp_mode == 'pacbio':
# spades_reads_pe1 = spades_reads_pe1.replace("_R1_001_val_1.fq.gz","_R1_001_trimmed.fq.gz")
# subprocess.call([spades_path, "--careful", "-k", kmer_size, "-t", str(thread_), "-m", "24", "--pacbio", spades_reads_pe1, "-o", spades_output], stdout=logf)
# elif Sp_mode == 'nanopore':
# spades_reads_pe1 = spades_reads_pe1.replace("_R1_001_val_1.fq.gz","_R1_001_trimmed.fq.gz")
# subprocess.call([spades_path, "--careful", "-k", kmer_size, "-t", str(thread_), "-m", "24", "--nanopore", spades_reads_pe1, "-o", spades_output], stdout=logf)
# elif Sp_mode == 'nanopore':
# spades_reads_pe1 = spades_reads_pe1.replace("_R1_001_val_1.fq.gz","_R1_001_trimmed.fq.gz")
# subprocess.call([spades_path, "--careful", "-k", kmer_size, "-t", str(thread_), "-m", "24", "--sanger", spades_reads_pe1, "-o", spades_output], stdout=logf)'''
elif Sp_mode == 'iontorrent':
spades_reads_pe1 = spades_reads_pe1.replace("_R1_001_val_1.fq.gz","_R1_001_trimmed.fq.gz")
subprocess.call([spades_path, "--careful", "-k", kmer_size, "-t", str(thread_), "-m", "24", "--iontorrent","--s", spades_reads_pe1, "-o", spades_output], stdout=logf)
else:
print "Error: unable to identify SPAdes mode"
os.chdir(spades_output)
# Copy fasta assembly in the assemblies folder and rename it with sample id
logging.info("SPAdes finished "+sample_name)
if os.path.isfile('scaffolds.fasta'):
assembly_fasta = 'scaffolds.fasta'
shutil.copy(assembly_fasta, assemblies_directory)
os.chdir(assemblies_directory)
renamed_assembly = assembly_fasta.replace('scaffolds', sample_name)
shutil.move(assembly_fasta, renamed_assembly)
logging.info("Genome assembly completed "+sample_name)
try:
print ""
parsnp_processors=int(thread_)
thread1=threading.Thread(target=mlst_typing, args=(renamed_assembly,))
thread2=threading.Thread(target=plasmids_finder, args=(renamed_assembly,))
thread3=threading.Thread(target=resistance_finder, args=(renamed_assembly,))
thread4=threading.Thread(target=virulence_finder, args=(renamed_assembly,))
#thread5=threading.Thread(target=parSNP, args=(assemblies_directory,parsnp_processors,))
thread6=threading.Thread(target=emmTyping, args=(renamed_assembly,))
if mlst_typing_flag == 'False':
#thread.start_new_thread(mlst_typing,(renamed_assembly,))
thread1.start()
parsnp_processors=parsnp_processors-1
if plasmids_finder_flag == 'False':
#thread.start_new_thread(plasmids_finder,(renamed_assembly,))
thread2.start()
parsnp_processors=parsnp_processors-1
if resfinder_flag == 'False':
#thread.start_new_thread(resistance_finder,(renamed_assembly,))
thread3.start()
parsnp_processors=parsnp_processors-1
if virulencefinder_flag == 'False':
#thread.start_new_thread(virulence_finder,(renamed_assembly,))
thread4.start()
parsnp_processors=parsnp_processors-1
if emmTyping_flag == 'False':
thread6.start()
parsnp_processors=parsnp_processors-1
#if parSNP_flag == 'False':
#if parsnp_processors <1:
#parsnp_processors=1
#parsnp_processors=str(parsnp_processors)
#thread5=threading.Thread(target=parSNP, args=(assemblies_directory,parsnp_processors,))
#thread5.start()
if mlst_typing_flag == 'False':
thread1.join()
if plasmids_finder_flag == 'False':
thread2.join()
if resfinder_flag == 'False':
thread3.join()
if virulencefinder_flag == 'False':
thread4.join()
#if parSNP_flag == 'False':
#thread5.join()
if emmTyping_flag == 'False':
thread6.join()
except:
print "Error: unable to start thread after SPAdes"
if prokka_flag == 'False':
genome_annotation(renamed_assembly)
elif Output_flag == 'False':
Output()
else:
pass
else:
logging.info("**********************SPAdes assembly not found**********************")
pass
###################
###################
###################
def Post_assembled_tools(renamed_assembly):
if os.path.isfile(renamed_assembly):
try:
print ""
parsnp_processors=int(thread_)
thread1=threading.Thread(target=mlst_typing, args=(renamed_assembly,))
thread2=threading.Thread(target=plasmids_finder, args=(renamed_assembly,))
thread3=threading.Thread(target=resistance_finder, args=(renamed_assembly,))
thread4=threading.Thread(target=virulence_finder, args=(renamed_assembly,))
#thread5=threading.Thread(target=parSNP, args=(assemblies_directory,parsnp_processors,))
thread6=threading.Thread(target=emmTyping, args=(renamed_assembly,))
if mlst_typing_flag == 'False':
#thread.start_new_thread(mlst_typing,(renamed_assembly,))
thread1.start()
parsnp_processors=parsnp_processors-1
if plasmids_finder_flag == 'False':
#thread.start_new_thread(plasmids_finder,(renamed_assembly,))
thread2.start()
parsnp_processors=parsnp_processors-1
if resfinder_flag == 'False':
#thread.start_new_thread(resistance_finder,(renamed_assembly,))
thread3.start()
parsnp_processors=parsnp_processors-1
if virulencefinder_flag == 'False':
#thread.start_new_thread(virulence_finder,(renamed_assembly,))
thread4.start()
parsnp_processors=parsnp_processors-1
if emmTyping_flag == 'False':
thread6.start()
parsnp_processors=parsnp_processors-1
#if parSNP_flag == 'False':
#if parsnp_processors <1:
#parsnp_processors=1
#parsnp_processors=str(parsnp_processors)
#thread5=threading.Thread(target=parSNP, args=(assemblies_directory,parsnp_processors,))
#thread5.start()
if mlst_typing_flag == 'False':
thread1.join()
if plasmids_finder_flag == 'False':
thread2.join()
if resfinder_flag == 'False':
thread3.join()
if virulencefinder_flag == 'False':
thread4.join()
#if parSNP_flag == 'False':
#thread5.join()
if emmTyping_flag == 'False':
thread6.join()
except:
print "Error: unable to start thread"
if Output_flag == 'False':
if Resfams_flag == 'False' or cardSearch_flag == 'False' or VirDBSearch_flag == 'False':
print ""
else:
Output()
else:
print "File not found"
###################
###################
###################
def mlst_typing(mlst_assembly):
# Define variables from configuration file
organism = str(config["mlst_typing"]["organism"])
mlst_directory = os.path.join(sample_directory, 'mlst_typing')
if not os.path.exists(mlst_directory):
os.makedirs(mlst_directory)
logging.info("MLST typing...")
#mlst_path=os.path.join(pipeline_path+'/mlst/mlst.py')
mlst_path=os.path.join(pipeline_path+'/mlst/mlst.pl')
#mlst_DB_path=os.path.join(pipeline_path+'/mlst/mlst_db/')
mlst_DB_path=os.path.join(pipeline_path+'/mlst/database/')
#mlst_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
with open(file_out, 'a') as logf:
subprocess.call([mlst_path, "-i", mlst_assembly, "-o", mlst_directory, "-d", mlst_DB_path, "-s", organism, "-b", mlst_ncbi_path], stdout=logf)
#print "python3.5 %s -i %s -o %s -p %s -s %s -x" % (mlst_path,mlst_assembly,mlst_directory,mlst_DB_path,organism)
#subprocess.call(["python3.5",mlst_path,"-i",mlst_assembly,"-o", mlst_directory, "-p", mlst_DB_path,"-s", organism,"-x"], stdout=logf)
logging.info("MLST finished "+sample_name)
global mlst_output
mlst_output=mlst_directory+'/results_tab.txt'
#mlst_output=mlst_directory+'/results_tab.tsv'
global mlst_samples
mlst_samples=mlst_samples+mlst_output+","
def plasmids_finder(plasmid_assembly):
# Define variables from configuration file
database = str(config["plasmids_finder"]["plasmids_database"])
threshold = str(config["plasmids_finder"]["identity_threshold"])
plasmids_directory = os.path.join(sample_directory, 'plasmids')
if not os.path.exists(plasmids_directory):
os.makedirs(plasmids_directory)
logging.info("Finding plasmids...")
plasmidfinder_path=os.path.join(pipeline_path+'/plasmidfinder/plasmidfinder.py')
plasmidfinder_DB_path=os.path.join(pipeline_path+'/plasmidfinder/plasmidfinder_db')
#plasmidfinder_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
with open(file_out, 'a') as logf:
#plasmidfinder.py -i ./test/test.fsa -o ./test/ -mp blastn -x -p ./database/ -q
#print "python3.5 %s -i %s -o %s -p %s -mp blastn -x" % (plasmidfinder_path,plasmid_assembly,plasmids_directory,plasmidfinder_DB_path)
subprocess.call(["python3.5",plasmidfinder_path, "-i", plasmid_assembly, "-o", plasmids_directory, "-p", plasmidfinder_DB_path, "-x","-mp", "blastn"], stdout=logf)
logging.info("PlasmidFinder finished "+sample_name)
global plasmidsFinder_output
plasmidsFinder_output=plasmids_directory+"/results_tab.tsv"
global Plasmidfinder_samples
Plasmidfinder_samples=Plasmidfinder_samples+plasmidsFinder_output+","
def resistance_finder(res_assembly):
# Define variables from configuration file
database = str(config["resfinder"]["resistance_database"])
threshold = str(config["resfinder"]["identity_threshold"])
minimum_overlap_length = str(config["resfinder"]["min_length"])
res_directory = os.path.join(sample_directory, 'resistance_profile')
if not os.path.exists(res_directory):
os.makedirs(res_directory)
logging.info("Finding antimicrobial resistance genes...")
resistancefinder_path=os.path.join(pipeline_path+'/resfinder/resfinder.pl')
resistancefinder_DB_path=os.path.join(pipeline_path+'/resfinder/resfinder_db')
#resistancefinder_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
with open(file_out, 'a') as logf:
subprocess.call([resistancefinder_path, "-i", res_assembly, "-o", res_directory, "-d", resistancefinder_DB_path, "-k", threshold, "-l", minimum_overlap_length, "-a", database, "-b", resistancefinder_ncbi_path], stdout=logf)
#subprocess.call([resistancefinder_path, "-i", res_assembly, "-o", res_directory, "-d", resistancefinder_DB_path, "-k", threshold, "-l", minimum_overlap_length, "-a", database, "-b", resistancefinder_ncbi_path], stdout=logf)
logging.info("Resistance finished "+sample_name)
global resistanceFinder_output
resistanceFinder_output=res_directory+'/results_tab.txt'
global resfinder_samples
resfinder_samples=resfinder_samples+resistanceFinder_output+","
def virulence_finder(vir_assembly):
# Define variables from configuration file
database = str(config["virulencefinder"]["virulence_database"])
threshold = str(config["virulencefinder"]["identity_threshold"])#modify YAML
vir_directory = os.path.join(sample_directory, 'virulence_profile')
os.chdir(assemblies_directory)
if not os.path.exists(vir_directory):
os.makedirs(vir_directory)
logging.info("Finding virulence...")
#virulencefinder_path=os.path.join(pipeline_path+'/virulencefinder/virulencefinder.py')
virulencefinder_path=os.path.join(pipeline_path+'/virulencefinder/virulencefinder.pl')
#virulencefinder_DB_path=os.path.join(pipeline_path+'/virulencefinder/virulencefinder_db')
virulencefinder_DB_path=os.path.join(pipeline_path+'/virulencefinder/database')
virulencefinder_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/')
with open(file_out, 'a') as logf:
#print "perl %s -i %s -o %s -d %s -k %s -s %s -b %s" % (virulencefinder_path,vir_assembly,vir_directory,virulencefinder_DB_path,threshold,database,virulencefinder_ncbi_path)
subprocess.call(["perl",virulencefinder_path, "-i", vir_assembly, "-o", vir_directory, "-d", virulencefinder_DB_path, "-k", threshold, "-s", database, "-b", virulencefinder_ncbi_path], stdout=logf)
#subprocess.call(["python3.5",virulencefinder_path, "-i", vir_assembly,"-o",vir_directory,"-mp", "blastn", "-x", "-p", virulencefinder_DB_path], stdout=logf)
#print "python3.5 %s -i %s -o %s -mp blastn -x -p %s" % (virulencefinder_path,vir_assembly,vir_directory,virulencefinder_DB_path)
logging.info("VirulenceFinder finished "+sample_name)
global virulenceFinder_output
virulenceFinder_output=sample_directory+'/virulence_profile'+"/results_tab.txt"
global virulence_samples
virulence_samples=virulence_samples+virulenceFinder_output+","
def emmTyping(emmTyping_assembly):
# Define variables from configuration file
emmTyping_directory = os.path.join(sample_directory, 'emmTyping_directory')
#ann_directory = os.path.join(sample_directory, 'genome_annotation')
#os.chdir(ann_directory)
if not os.path.exists(emmTyping_directory):
os.makedirs(emmTyping_directory)
logging.info("emm Typing via blast...")
results=os.path.join(emmTyping_directory + '/'+'results.txt')
output=os.path.join(emmTyping_directory + '/'+'results_tab.txt')
emmTyping_path=os.path.join(pipeline_path+'/emm_typing/Blast_emm.pl')
blastall_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/bin/blastall')
mkblastdb_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/bin/makeblastdb')
emm_db_path=os.path.join(pipeline_path+'/emm_typing/emm_trimmed.fasta')
with open(file_out, 'a') as logf:
subprocess.call(["perl", emmTyping_path, emmTyping_assembly, output,emm_db_path,str(thread_), results,blastall_ncbi_path,mkblastdb_ncbi_path], stdout=logf)
logging.info("emmTyping search finished "+sample_name)
global emm_samples
emm_samples=emm_samples+output+","
def genome_annotation(annotation_assembly):
ann_directory = os.path.join(sample_directory, 'genome_annotation')
if not os.path.exists(ann_directory):
os.makedirs(ann_directory)
logging.info("Genome annotation with PROKKA...")
prokka_path= str(config['prokka']['prokka_path'])
prokka_path=prokka_path
#barrnap_path=str(pipeline_path+'/barrnap-master/bin/')
os.environ["PATH"] += os.pathsep + prokka_path
os.environ["PATH"] += os.pathsep + barrnap_path
with open(file_out, 'a') as logf:
subprocess.call([prokka_path, "--quiet", "--cpus", str(thread_), "--kingdom", "Bacteria", "--addgenes", "--outdir", ann_directory, "--strain", sample_name, "--force", "--centre", "C", "--locustag", "L","--prefix","results",annotation_assembly], stdout=logf)
os.chdir(ann_directory)
# Rename gbk file with sample name
gbk_file = glob.glob("*.gbk")
gff_file = glob.glob("*.gff")
faa_file = glob.glob("*.faa")
renamed_gbk = gbk_file[0].replace(gbk_file[0], sample_name + '.gbk')
renamed_gff = gff_file[0].replace(gff_file[0], sample_name + '.gff')
shutil.move(gbk_file[0],renamed_gbk)
shutil.move(gff_file[0],renamed_gff)
Restfams_faa=os.path.join(ann_directory, '/',faa_file[0])
logging.info("Annotation finished "+sample_name)
##if Resfams_flag == 'False':
##Resfams(faa_file[0])
##if cardSearch_flag == 'False':
##cardSearch(faa_file[0])
##if VirDBSearch_flag == 'False':
##VirDBSearch(faa_file[0])
try:
thread6=threading.Thread(target=Resfams, args=(faa_file[0],))
thread7=threading.Thread(target=cardSearch, args=(faa_file[0],))
thread8=threading.Thread(target=VirDBSearch, args=(faa_file[0],))
if Resfams_flag == 'False':
thread6.start()
#thread.start_new_thread(Resfams,(faa_file[0],))
if cardSearch_flag == 'False':
thread7.start()
#thread.start_new_thread(cardSearch,(faa_file[0],))
if VirDBSearch_flag == 'False':
thread8.start()
#thread.start_new_thread(VirDBSearch,(faa_file[0],))
if Resfams_flag == 'False':
thread6.join()
if cardSearch_flag == 'False':
thread7.join()
if VirDBSearch_flag == 'False':
thread8.join()
except:
print "Error: unable to start thread after Prokka"
if Output_flag == 'False':
Output()
def post_annotation_analysis(annotation_assembly_faa):
if os.path.isfile(annotation_assembly_faa):
try:
thread6=threading.Thread(target=Resfams, args=(annotation_assembly_faa,))
thread7=threading.Thread(target=cardSearch, args=(annotation_assembly_faa,))
thread8=threading.Thread(target=VirDBSearch, args=(annotation_assembly_faa,))
if Resfams_flag == 'False':
thread6.start()
#thread.start_new_thread(Resfams,(faa_file[0],))
if cardSearch_flag == 'False':
thread7.start()
#thread.start_new_thread(cardSearch,(faa_file[0],))
if VirDBSearch_flag == 'False':
thread8.start()
#thread.start_new_thread(VirDBSearch,(faa_file[0],))
if Resfams_flag == 'False':
thread6.join()
if cardSearch_flag == 'False':
thread7.join()
if VirDBSearch_flag == 'False':
thread8.join()
except:
print "Error: unable to start thread after Prokka"
if Output_flag == 'False':
Output()
def Resfams(Resfams_faa):
# Define variables from configuration file
Resfams_directory = os.path.join(sample_directory, 'Resfams_directory')
#ann_directory = os.path.join(sample_directory, 'genome_annotation')
#os.chdir(ann_directory)
if not os.path.exists(Resfams_directory):
os.makedirs(Resfams_directory)
logging.info("denovo annotation Resfams...")
results=os.path.join(Resfams_directory + '/'+'results.txt')
output=os.path.join(Resfams_directory + '/'+'output.txt')
#hmmer_path=os.path.join(pipeline_path+'/hmmer-3.1b2-macosx-intel/binaries/hmmscan')
resfams_path=os.path.join(pipeline_path+'/ResFam/Resfams.hmm')
with open(file_out, 'a') as logf:
subprocess.call([hmmer_path, "--cut_ga", "--tblout", results,"-o",output, resfams_path,Resfams_faa], stdout=logf)
logging.info("Resfams finished "+sample_name)
global resfams_output
resfams_output=Resfams_directory+'/results.txt'
global resfams_samples
resfams_samples=resfams_samples+resfams_output+","
def cardSearch(card_faa):
# Define variables from configuration file
cardSearch_directory = os.path.join(sample_directory, 'CARDsearch_directory')
#ann_directory = os.path.join(sample_directory, 'genome_annotation')
#os.chdir(ann_directory)
if not os.path.exists(cardSearch_directory):
os.makedirs(cardSearch_directory)
logging.info("CARD search via blast...")
results=os.path.join(cardSearch_directory + '/'+'results.txt')
output=os.path.join(cardSearch_directory + '/'+'output.txt')
CARDSearch_path=os.path.join(pipeline_path+'/cardSearch/Blast_card.pl')
blastp_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/bin/blastp')
card_db_path=os.path.join(pipeline_path+'/cardSearch/card_protien.fasta')
with open(file_out, 'a') as logf:
subprocess.call(["perl", CARDSearch_path, card_faa, output,card_db_path,str(thread_), results,blastp_ncbi_path], stdout=logf)
#pipe = subprocess.Popen(["perl", CARDSearch_path,card_faa,output,card_db_path,str(thread_),results], stdout=subprocess.PIPE)
logging.info("CARD search finished "+sample_name)
global card_output
card_output=cardSearch_directory+'/results.txt'
global card_samples
card_samples=card_samples+card_output+","
def VirDBSearch(VirDB_faa):
# Define variables from configuration file
VirDBSearch_directory = os.path.join(sample_directory, 'VirDBSearch_directory')
#ann_directory = os.path.join(sample_directory, 'genome_annotation')
#os.chdir(ann_directory)
if not os.path.exists(VirDBSearch_directory):
os.makedirs(VirDBSearch_directory)
logging.info("VirDB search via blast...")
results=os.path.join(VirDBSearch_directory + '/'+'results.txt')
output=os.path.join(VirDBSearch_directory + '/'+'output.txt')
VIRDBSearch_path=os.path.join(pipeline_path+'/VirDB/Blast_VirDB.pl')
blastp_ncbi_path=os.path.join(pipeline_path+'/mlst/blast-2.2.26/bin/blastp')
vir_db_path=os.path.join(pipeline_path+'/VirDB/VFDB_setA_pro.fas')
pipe = subprocess.Popen(["perl", VIRDBSearch_path,VirDB_faa,output,vir_db_path,str(thread_),results,blastp_ncbi_path], stdout=subprocess.PIPE)
logging.info("VirDB search finished "+sample_name)
global virDB_output
virDB_output=VirDBSearch_directory+'/results.txt'
global virdb_samples
virdb_samples=virdb_samples+virDB_output+","
def parSNP(parSNP_fsa,parsnp_pros):
# Define variables from configuration file
parSNP_directory = os.path.join(output_directory, 'parSNP_directory')
##parSNP_directory = os.path.join(sample_directory, 'parSNP_directory')
#ann_directory = os.path.join(sample_directory, 'genome_annotation')
#os.chdir(ann_directory)
if not os.path.exists(parSNP_directory):
os.makedirs(parSNP_directory)
logging.info("SNP analysis via parSNP ...")
results=os.path.join(parSNP_directory + '/'+'results.txt')
output=os.path.join(parSNP_directory + '/'+'output.vcf')
global parSNP_tree
parSNP_tree=os.path.join(parSNP_directory + '/'+'parsnp.tree')
#parSNP_path=os.path.join(pipeline_path+'/harvesttools_OSX64_v1_3/harvesttools')
#parSNP_path=os.path.join(pipeline_path+'/parsnp_OSX64_v1_2/parsnp')
parSNP_reference= str(config['parSNP']['parSNP_reference'])
parSNP_reference_fsa= str(config['parSNP']['parSNP_reference_fsa'])
with open(file_out, 'a') as logf:
if parSNP_reference == "None" :
subprocess.call([parSNP_path, "-r", parSNP_reference_fsa, "-d", parSNP_fsa, "-o", parSNP_directory,"-p",parsnp_pros, "-c"], stdout=logf)
else:
subprocess.call([parSNP_path, "-g", parSNP_reference, "-d", parSNP_fsa, "-o", parSNP_directory,"-p",parsnp_pros, "-c"], stdout=logf)
#print "%s -g %s -d %s -o %s -p %s" % (parSNP_path,parSNP_reference,parSNP_fsa,parSNP_directory,parsnp_pros)
logging.info("parSNP finished "+sample_name)
def Output():
logging.info("Summarizing outputs...")
tools=""
if mlst_typing_flag == 'False':
tools=tools+"0,"
if plasmids_finder_flag == 'False':
tools=tools+"1,"
if resfinder_flag == 'False':
tools=tools+"2,"
if virulencefinder_flag == 'False':
tools=tools+"3,"
if prokka_flag == 'False':
tools=tools+"4,"
if Resfams_flag == 'False':
tools=tools+"5,"
if cardSearch_flag == 'False':
tools=tools+"6,"
if VirDBSearch_flag == 'False':
tools=tools+"7,"
#if emmTyping_flag == 'False':
#tools=tools+"8,"
# if parSNP_flag == 'False':
# tools=tools+"8,"
if tools == "":
print ""
else:
tools=tools[:-1]
Output_batch_path=os.path.join(pipeline_path+'/Output_batch.pl')
pipe = subprocess.Popen(["perl", Output_batch_path,output_directory,sample_name, tools], stdout=subprocess.PIPE)
logging.info("summarization finished "+sample_name)
#os.chdir(input_directory)
logging.info("-----Finish processing sample " + sample_name + "-----")
## Create directories
output_directory = config["directories"]["output"]
if not os.path.exists(output_directory):
os.makedirs(output_directory)
input_directory = config["directories"]["reads"]
assemblies_directory = os.path.join(output_directory, 'genome_assemblies')
if not os.path.exists(assemblies_directory):
os.makedirs(assemblies_directory)
summary_directory = os.path.join(output_directory, 'Summary')
if not os.path.exists(summary_directory):
os.makedirs(summary_directory)
os.chdir(output_directory)
logfile ="pipeline.log"
if logging.root:
del logging.root.handlers[:]
logging.basicConfig(
filename=logfile,
level=logging.DEBUG,
filemode='w',
format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %H:%M:%S')
class StreamToLogger(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""
def __init__(self, logger, log_level=logging.INFO):
self.logger = logger
self.log_level = log_level
self.linebuf = ''
def write(self, buf):
for line in buf.rstrip().splitlines():
self.logger.log(self.log_level, line.rstrip())
stdout_logger = logging.getLogger('STDOUT')
sl = StreamToLogger(stdout_logger, logging.INFO)
sys.stdout = sl
stderr_logger = logging.getLogger('STDERR')
sl = StreamToLogger(stderr_logger, logging.ERROR)
sys.stderr = sl
#STDOUT
file_out=os.path.join(output_directory,'log.txt')
#Store in a variable the text file with filenames of reads already processed
files_processed = open('files_processed.txt', 'a+')
if os.path.getsize('files_processed.txt') == 0:
files_processed.write('----- List of processed files -----' + '\n')
files_processed.close()
os.chdir(input_directory)
#deactivated tools
if 'deactivate' in config['trim_galore']:
trim_galore_flag= str(config['trim_galore']['deactivate'])
else:
trim_galore_flag= 'False'
if 'deactivate' in config['spades']:
spades_flag= str(config['spades']['deactivate'])
else:
spades_flag= 'False'
if 'deactivate' in config['mlst_typing']:
mlst_typing_flag= str(config['mlst_typing']['deactivate'])
else:
mlst_typing_flag= 'False'
if 'deactivate' in config['plasmids_finder']:
plasmids_finder_flag= str(config['plasmids_finder']['deactivate'])
else:
plasmids_finder_flag= 'False'
if 'deactivate' in config['cardSearch']:
cardSearch_flag= str(config['cardSearch']['deactivate'])
else:
cardSearch_flag= 'False'
if 'deactivate' in config['VirDBSearch']:
VirDBSearch_flag= str(config['VirDBSearch']['deactivate'])
else:
VirDBSearch_flag= 'False'
if 'deactivate' in config['parSNP']:
parSNP_flag= str(config['parSNP']['deactivate'])
else:
parSNP_flag= 'False'
if 'deactivate' in config['emmTyping']:
emmTyping_flag= str(config['emmTyping']['deactivate'])
else:
emmTyping_flag= 'False'
if 'deactivate' in config['resfinder']:
resfinder_flag= str(config['resfinder']['deactivate'])
else:
resfinder_flag= 'False'
if 'deactivate' in config['prokka']:
prokka_flag= str(config['prokka']['deactivate'])
else:
prokka_flag= 'False'
if 'deactivate' in config['Resfams']:
Resfams_flag= str(config['Resfams']['deactivate'])
else:
Resfams_flag= 'False'
if 'deactivate' in config['virulencefinder']:
virulencefinder_flag= str(config['virulencefinder']['deactivate'])
else:
virulencefinder_flag= 'False'
if 'deactivate' in config['Output']:
Output_flag= str(config['Output']['deactivate'])
else:
Output_flag= 'False'
#printing tools deactivated
if trim_galore_flag == 'True':
logging.info("deactivating trim galore as specified")
if spades_flag == 'True':
logging.info("deactivating SPAdes as specified")
if mlst_typing_flag == 'True':
logging.info("deactivating MLST typing as specified")
if plasmids_finder_flag == 'True':
logging.info("deactivating PlasmidsFinder as specified")
if resfinder_flag == 'True':
logging.info("deactivating ResFinder as specified")
if cardSearch_flag == 'True':
logging.info("deactivating CARD Search as specified")
if VirDBSearch_flag == 'True':
logging.info("deactivating VirDB Search as specified")
if parSNP_flag == 'True':
logging.info("deactivating parSNP as specified")
if emmTyping_flag == 'True':
logging.info("deactivating M-Typing as specified")
if prokka_flag == 'True':
logging.info("deactivating Prokka as specified")
if Resfams_flag == 'True':
logging.info("deactivating Resfams as specified")
if virulencefinder_flag == 'True':
logging.info("deactivating VirulenceFinder as specified")
if Output_flag == 'True':
logging.info("deactivating Output summarization as specified")
#Check for paired end reads to process in the input directory
global sample_name
if trim_galore_flag == 'False':
tracker=0
list = [f for f in glob.glob("*_R1_*") if "fastq.gz" in f or "fq.gz" in f ]
for illumina_reads_1 in list:
logging.info("Reading files ending with _R1_001.fastq.gz for forward and _R1_001.fastq.gz for reverse within the input directory")
illumina_reads_2 = illumina_reads_1.replace("_R1_", "_R2_")
sample_name = illumina_reads_1[:-9]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-9])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
trim_galore(illumina_reads_1, illumina_reads_2)
files_processed.write(illumina_reads_1 + '\n')
files_processed.write(illumina_reads_2 + '\n')
files_processed.close()
tracker=1
if parSNP_flag == 'False' and tracker == 1:
logging.info("Reading assembled files ending .fasta within the genome_assemblies directory")
logging.info("-----Start processing assembled genomes -----")
Processors_=str(app.getEntry('Processors'))
parSNP_input=os.path.join(output_directory, 'genome_assemblies')
parSNP(parSNP_input,Processors_)
elif tracker == 0:
print "No input files for TrimGalore"
else:
print ""
#Start from trim galore output
elif spades_flag == 'False':
list = [f for f in glob.glob("*_R1_*") if "fastq.gz" in f or "fq.gz" in f ]
for illumina_reads_1 in list:
logging.info("Reading files ending with _R1_001_val_1.fq.gz for forward and _R2_001_val_2.fq.gz for reverse within the input directory")
illumina_reads_2 = illumina_reads_1.replace("_R1_", "_R2_")
sample_name = illumina_reads_1[:-12]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-12])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
illumina_reads_2=os.path.join(input_directory,illumina_reads_2)
spades(illumina_reads_1, illumina_reads_2)
files_processed.write(illumina_reads_1 + '\n')
files_processed.write(illumina_reads_2 + '\n')
files_processed.close()
if parSNP_flag == 'False':
logging.info("Reading assembled files ending .fasta within the genome_assemblies directory")
logging.info("-----Start processing assembled genomes -----")
Processors_=str(app.getEntry('Processors'))
parSNP_input=os.path.join(output_directory, 'genome_assemblies')
parSNP(parSNP_input,Processors_)
else:
#Start from Spades output 701_S8_L001_R1_001.fasta
if mlst_typing_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.fasta"):
logging.info("Reading assembled files ending .fasta within the input directory")
sample_name = illumina_reads_1[:-6]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-6])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
#mlst_typing(illumina_reads_1)
Post_assembled_tools(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
#Start from Spades output 701_S8_L001_R1_001.fasta
elif plasmids_finder_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.fasta"):
logging.info("Reading assembled files ending .fasta within the input directory")
sample_name = illumina_reads_1[:-6]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-6])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
#plasmids_finder(illumina_reads_1)
Post_assembled_tools(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
elif resfinder_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.fasta"):
logging.info("Reading assembled files ending .fasta within the input directory")
sample_name = illumina_reads_1[:-6]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-6])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
#resistance_finder(illumina_reads_1)
Post_assembled_tools(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
elif virulencefinder_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.fasta"):
logging.info("Reading assembled files ending .fasta within the input directory")
sample_name = illumina_reads_1[:-6]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-6])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
#virulence_finder(illumina_reads_1)
Post_assembled_tools(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
elif emmTyping_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.fasta"):
logging.info("Reading assembled files ending .fasta within the input directory")
sample_name = illumina_reads_1[:-6]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-6])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
#emmTyping(illumina_reads_1)
Post_assembled_tools(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
else:
print ""
if parSNP_flag == 'False':
os.chdir(input_directory)
if os.path.isdir(input_directory):
sample_name = os.path.basename(input_directory)
#for illumina_reads_1 in glob.glob("*.fasta"):
logging.info("Reading assembled files ending .fasta within the input directory")
#os.chdir(output_directory)
#sample_directory = os.path.join(output_directory, sample_name)
#if not os.path.exists(sample_directory):
# os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
Processors_=str(app.getEntry('Processors'))
parSNP(input_directory,Processors_)
if prokka_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.fasta"):
logging.info("Reading assembled files ending .fasta within the input directory")
sample_name = illumina_reads_1[:-6]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-6])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
genome_annotation(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
else:
if Resfams_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.faa"):
logging.info("Reading protien files ending .faa within the input directory")
sample_name = illumina_reads_1[:-4]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-4])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
post_annotation_analysis(illumina_reads_1)
#Resfams(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
elif cardSearch_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.faa"):
logging.info("Reading protien files ending .faa within the input directory")
sample_name = illumina_reads_1[:-4]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-4])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
post_annotation_analysis(illumina_reads_1)
#cardSearch(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
elif VirDBSearch_flag == 'False':
os.chdir(input_directory)
for illumina_reads_1 in glob.glob("*.faa"):
logging.info("Reading protien files ending .faa within the input directory")
sample_name = illumina_reads_1[:-4]
os.chdir(output_directory)
files_processed = open('files_processed.txt', 'a+')
row = [x.strip('\n') for x in files_processed.readlines()]
if str(illumina_reads_1) in row:
logging.info(sample_name + " already processed")
continue
else:
sample_directory = os.path.join(output_directory, illumina_reads_1[:-4])
if not os.path.exists(sample_directory):
os.makedirs(sample_directory)
os.chdir(input_directory)
logging.info("-----Start processing sample " + sample_name + "-----")
illumina_reads_1=os.path.join(input_directory,illumina_reads_1)
#VirDBSearch(illumina_reads_1)
post_annotation_analysis(illumina_reads_1)
files_processed.write(illumina_reads_1 + '\n')
files_processed.close()
else:
print ""
if (Output_flag == 'True' and Resfams_flag == 'True' and prokka_flag == 'True' and virulencefinder_flag == 'True' and emmTyping_flag == 'True' and parSNP_flag == 'True' and VirDBSearch_flag == 'True' and cardSearch_flag == 'True' and resfinder_flag == 'True' and plasmids_finder_flag == 'True' and mlst_typing_flag == 'True' and spades_flag == 'True' and trim_galore_flag == 'True'):
logging.info("Nothing to do here!!")
logging.info("----------------------Script Completed---------------------")
def show_results(config_f):
with open(config_f, 'r') as f:
config = yaml.load(f)
#deactivated tools
if 'deactivate' in config['mlst_typing']:
mlst_typing_flag= str(config['mlst_typing']['deactivate'])
else:
mlst_typing_flag= 'yes'
if 'deactivate' in config['plasmids_finder']:
plasmids_finder_flag= str(config['plasmids_finder']['deactivate'])
else:
plasmids_finder_flag= 'yes'
if 'deactivate' in config['cardSearch']:
cardSearch_flag= str(config['cardSearch']['deactivate'])
else:
cardSearch_flag= 'yes'
if 'deactivate' in config['VirDBSearch']:
VirDBSearch_flag= str(config['VirDBSearch']['deactivate'])
else:
VirDBSearch_flag= 'yes'
if 'deactivate' in config['emmTyping']:
emmTyping_flag= str(config['emmTyping']['deactivate'])
else:
emmTyping_flag= 'yes'
if 'deactivate' in config['resfinder']:
resfinder_flag= str(config['resfinder']['deactivate'])
else:
resfinder_flag= 'yes'
if 'deactivate' in config['Resfams']:
Resfams_flag= str(config['Resfams']['deactivate'])
else:
Resfams_flag= 'yes'
if 'deactivate' in config['virulencefinder']:
virulencefinder_flag= str(config['virulencefinder']['deactivate'])
else:
virulencefinder_flag= 'yes'
output_path = config["directories"]["output"]
if(mlst_typing_flag=="False"):
try:
global mlst_samples
mlst_output_ = output_path+"/mlst_output.txt"
try:
os.remove(mlst_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,mlst_samples,mlst_output_,"none"])
except:
print "Can't find MLST output"
if(plasmids_finder_flag=="False"):
try:
#global output_path
global Plasmidfinder_samples
plasmidsFinder_output_ = output_path+"/plasmids_finder_output.txt"
try:
os.remove(plasmidsFinder_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,Plasmidfinder_samples,plasmidsFinder_output_,"none"])
except:
print "Can't find Plasmidfinder output"
if(virulencefinder_flag=="False"):
try:
#global output_path
global virulence_samples
virulencefinder_output_ = output_path+"/virulencefinder_output.txt"
try:
os.remove(virulencefinder_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,virulence_samples,virulencefinder_output_,"none"])
except:
print "Can't find virulencefinder output"
if(resfinder_flag=="False"):
try:
#global output_path
global resfinder_samples
resfinder_output_ = output_path+"/resfinder_output.txt"
try:
os.remove(resfinder_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,resfinder_samples,resfinder_output_,"none"])
except:
print "Can't find resfinder output"
if(emmTyping_flag=="False"):
try:
#global output_path
global emm_samples
emm_output_ = output_path+"/emm_output.txt"
try:
os.remove(emm_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,emm_samples,emm_output_,"none"])
except:
print "Can't find emmTyping output"
if(Resfams_flag=="False"):
try:
#global output_path
global resfams_samples
resfams_output_ = output_path+"/resfams_output.txt"
try:
os.remove(resfams_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,resfams_samples,resfams_output_,"resfams"])
except:
print "Can't find resfams output"
if(cardSearch_flag=="False"):
try:
#global output_path
global card_samples
cardSearch_output_ = output_path+"/cardSearch_output.txt"
try:
os.remove(cardSearch_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,card_samples,cardSearch_output_,"card"])
except:
print "Can't find cardSearch output"
if(VirDBSearch_flag=="False"):
try:
#global output_path
global virdb_samples
VirDBSearch_output_ = output_path+"/VirDBSearch_output.txt"
try:
os.remove(VirDBSearch_output_)
except:
print ""
Output_organizer_path=os.path.join(pipeline_path+'/Output_organizer.pl')
call(["perl",Output_organizer_path,virdb_samples,VirDBSearch_output_,"vfdb"])
except:
print "Can't find VirDBSearch output"
#####################################
#####################################
#####################################
try:
Processors_=str(args.processors)
print "using %s thereads" % (Processors_)
except ValueError:
print "using the default 4 threads"
pipeline(config_file,Processors_)
os.chdir(pipeline_path)
show_results(config_file)
|
wholeGenomeSequencingAnalysisPipeline/BacPipe
|
Pipeline_cmd.py
|
Python
|
gpl-3.0
| 52,132
|
[
"BLAST"
] |
dacf3b1c3be4f8a6b4a58be867025cd946fbc5b2e1435cd8aae0feb5985bbe51
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2001-2006 Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2011-2012 Paul Franklin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
""" The ReportDialog base class """
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
import os
import logging
LOG = logging.getLogger(".")
#-------------------------------------------------------------------------
#
# GTK+ modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.config import config
from gramps.gen.const import URL_MANUAL_PAGE, DOCGEN_OPTIONS
from gramps.gen.errors import (DatabaseError, FilterError, ReportError,
WindowActiveError)
from ...utils import open_file_with_default_application
from .. import add_gui_options, make_gui_option
from ...user import User
from ...dialog import ErrorDialog, OptionDialog
from gramps.gen.plug.report import (CATEGORY_TEXT, CATEGORY_DRAW, CATEGORY_BOOK,
CATEGORY_CODE, CATEGORY_WEB,
CATEGORY_GRAPHVIZ, standalone_categories)
from gramps.gen.plug.docgen import StyleSheet, StyleSheetList
from ...managedwindow import ManagedWindow
from ._stylecombobox import StyleComboBox
from ._styleeditor import StyleListDisplay
from ._fileentry import FileEntry
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Private Constants
#
#-------------------------------------------------------------------------
URL_REPORT_PAGE = URL_MANUAL_PAGE + "_-_Reports"
#-------------------------------------------------------------------------
#
# ReportDialog class
#
#-------------------------------------------------------------------------
class ReportDialog(ManagedWindow):
"""
The ReportDialog base class. This is a base class for generating
customized dialogs to solicit options for a report. It cannot be
used as is, but it can be easily sub-classed to create a functional
dialog for a stand-alone report.
"""
border_pad = 6
def __init__(self, dbstate, uistate, option_class, name, trans_name,
track=[]):
"""Initialize a dialog to request that the user select options
for a basic *stand-alone* report."""
self.style_name = "default"
self.firstpage_added = False
self.raw_name = name
self.dbstate = dbstate
self.uistate = uistate
self.db = dbstate.db
self.report_name = trans_name
ManagedWindow.__init__(self, uistate, track, self)
self.init_options(option_class)
self.init_interface()
def close(self, *obj):
"""
Close itself.
cleanup things that can prevent garbage collection
"""
if hasattr(self, 'widgets'): # handle pathlogical bug 4145
totwidg = list(range(len(self.widgets)))
totwidg.reverse()
for ind in totwidg:
if hasattr(self.widgets[ind][1], 'clean_up'):
self.widgets[ind][1].clean_up()
del self.widgets[ind]
delattr(self, 'widgets')
for name, fram in self.frames.items():
totwidg = list(range(len(fram)))
totwidg.reverse()
for ind in totwidg:
if hasattr(fram[ind][1], 'clean_up'):
fram[ind][1].clean_up()
del fram[ind]
self.frames.clear()
self.frames = None
ManagedWindow.close(self, *obj)
def init_options(self, option_class):
try:
if issubclass(option_class, object):
self.options = option_class(self.raw_name, self.db)
except TypeError:
self.options = option_class
self.options.load_previous_values()
def build_window_key(self, obj):
key = self.raw_name
return key
def build_menu_names(self, obj):
return (_("Configuration"), self.report_name)
def init_interface(self):
self.widgets = []
self.doc_widgets = []
self.frame_names = []
self.frames = {}
self.format_menu = None
self.style_button = None
self.style_name = self.options.handler.get_default_stylesheet_name()
window = Gtk.Dialog(title='Gramps')
self.set_window(window, None, self.get_title())
self.window.set_modal(True)
self.help = self.window.add_button(_('_Help'), Gtk.ResponseType.HELP)
self.help.connect('clicked', self.on_help_clicked)
self.cancel = self.window.add_button(_('_Cancel'),
Gtk.ResponseType.CANCEL)
self.cancel.connect('clicked', self.on_cancel)
self.ok = self.window.add_button(_('_OK'), Gtk.ResponseType.OK)
self.ok.connect('clicked', self.on_ok_clicked)
self.window.set_position(Gtk.WindowPosition.CENTER_ON_PARENT)
self.window.set_default_size(600, -1)
# Set up and run the dialog. These calls are not in top down
# order when looking at the dialog box as there is some
# interaction between the various frames.
self.setup_title()
self.setup_header()
self.grid = Gtk.Grid()
self.grid.set_column_spacing(12)
self.grid.set_row_spacing(6)
self.grid.set_border_width(6)
self.row = 0
# Build the list of widgets that are used to extend the Options
# frame and to create other frames
self.add_user_options()
self.setup_init()
self.setup_format_frame()
self.setup_target_frame()
self.setup_style_frame()
self.notebook = Gtk.Notebook()
self.notebook.set_scrollable(True)
self.notebook.set_border_width(6)
try:
#assume a vbox or hbox
self.window.vbox.pack_start(self.notebook,
expand=True, fill=True, padding=0)
except:
#general container instead:
self.window.vbox.add(self.notebook)
self.setup_report_options_frame()
self.setup_other_frames()
self.notebook.set_current_page(0)
try:
#assume a vbox or hbox
self.window.vbox.pack_start(self.grid,
expand=True, fill=True, padding=0)
except:
#general container instead:
self.window.vbox.add(self.grid)
self.show()
def get_title(self):
"""The window title for this dialog"""
name = self.report_name
category = standalone_categories[self.category][1]
return "%s - %s - Gramps" % (name, category)
#------------------------------------------------------------------------
#
# Functions related to extending the options
#
#------------------------------------------------------------------------
def add_user_options(self):
"""Called to allow subclasses to add widgets to the dialog form.
It is called immediately before the window is displayed. All
calls to add_option or add_frame_option should be called in
this task."""
add_gui_options(self)
def parse_user_options(self):
"""Called to allow parsing of added widgets.
It is called when OK is pressed in a dialog.
All custom widgets should provide a parsing code here."""
try:
self.options.parse_user_options()
except:
LOG.error("Failed to parse user options.", exc_info=True)
def add_option(self, label_text, widget):
"""Takes a text string and a Gtk Widget, and stores them to be
appended to the Options section of the dialog. The text string
is used to create a label for the passed widget. This allows the
subclass to extend the Options section with its own widgets. The
subclass is responsible for all managing of the widgets, including
extracting the final value before the report executes. This task
should only be called in the add_user_options task."""
self.widgets.append((label_text, widget))
def add_frame_option(self, frame_name, label_text, widget):
"""Similar to add_option this method takes a frame_name, a
text string and a Gtk Widget. When the interface is built,
all widgets with the same frame_name are grouped into a
GtkFrame. This allows the subclass to create its own sections,
filling them with its own widgets. The subclass is responsible for
all managing of the widgets, including extracting the final value
before the report executes. This task should only be called in
the add_user_options task."""
if frame_name in self.frames:
self.frames[frame_name].append((label_text, widget))
else:
self.frames[frame_name] = [(label_text, widget)]
self.frame_names.append(frame_name)
#------------------------------------------------------------------------
#
# Functions to create a default output style.
#
#------------------------------------------------------------------------
def build_style_menu(self, default=None):
"""Build a menu of style sets that are available for use in
this report. This menu will always have a default style
available, and will have any other style set name that the
user has previously created for this report. This menu is
created here instead of inline with the rest of the style
frame, because it must be recreated to reflect any changes
whenever the user closes the style editor dialog."""
if default is None:
default = self.style_name
style_sheet_map = self.style_sheet_list.get_style_sheet_map()
self.style_menu.set(style_sheet_map, default)
#------------------------------------------------------------------------
#
# Functions related to setting up the dialog window.
#
#------------------------------------------------------------------------
def setup_title(self):
"""Set up the title bar of the dialog. This function relies
on the get_title() customization function for what the title
should be."""
self.window.set_title(self.get_title())
def setup_header(self):
"""Set up the header line bar of the dialog."""
label = Gtk.Label(label='<span size="larger" weight="bold">%s</span>' %
self.report_name)
label.set_use_markup(True)
self.window.vbox.pack_start(label, False, False, self.border_pad)
def setup_style_frame(self):
"""Set up the style frame of the dialog. This function relies
on other routines to create the default style for this report,
and to read in any user-defined styles for this report. It
then builds a menu of all the available styles for the user to
choose from."""
# Build the default style set for this report.
self.default_style = StyleSheet()
self.options.make_default_style(self.default_style)
if self.default_style.is_empty():
# Don't display the option if no styles are used
return
# Styles Frame
label = Gtk.Label(label=_("%s:") % _("Style"))
label.set_halign(Gtk.Align.START)
self.style_menu = StyleComboBox()
self.style_menu.set_hexpand(True)
self.style_button = Gtk.Button(label="%s..." % _("Style Editor"))
self.style_button.connect('clicked', self.on_style_edit_clicked)
self.grid.attach(label, 1, self.row, 1, 1)
self.grid.attach(self.style_menu, 2, self.row, 1, 1)
self.grid.attach(self.style_button, 3, self.row, 1, 1)
self.row += 1
# Build the initial list of available styles sets. This
# includes the default style set and any style sets saved from
# previous invocations of gramps.
self.style_sheet_list = StyleSheetList(
self.options.handler.get_stylesheet_savefile(), self.default_style)
# Now build the actual menu.
style = self.options.handler.get_default_stylesheet_name()
self.build_style_menu(style)
def setup_report_options_frame(self):
"""Set up the report options frame of the dialog. This
function relies on several report_xxx() customization
functions to determine which of the items should be present in
this box. *All* of these items are optional, although the
generations fields is used in most
(but not all) dialog boxes."""
row = 0
max_rows = len(self.widgets)
if max_rows == 0:
return
grid = Gtk.Grid()
grid.set_border_width(6)
grid.set_column_spacing(12)
grid.set_row_spacing(6)
label = Gtk.Label(label="<b>%s</b>" % _("Report Options"))
label.set_halign(Gtk.Align.START)
label.set_use_markup(True)
self.notebook.append_page(grid, label)
# Setup requested widgets
for (text, widget) in self.widgets:
widget.set_hexpand(True)
if text:
# translators: needed for French, ignore otherwise
text_widget = Gtk.Label(label=_("%s:") % text)
text_widget.set_halign(Gtk.Align.START)
grid.attach(text_widget, 1, row, 1, 1)
grid.attach(widget, 2, row, 1, 1)
else:
grid.attach(widget, 2, row, 1, 1)
row += 1
def setup_other_frames(self):
from .._guioptions import GuiTextOption
for key in self.frame_names:
flist = self.frames[key]
grid = Gtk.Grid()
grid.set_column_spacing(12)
grid.set_row_spacing(6)
grid.set_border_width(6)
l = Gtk.Label(label="<b>%s</b>" % _(key))
l.set_use_markup(True)
self.notebook.append_page(grid, l)
row = 0
for (text, widget) in flist:
widget.set_hexpand(True)
if text:
text_widget = Gtk.Label(label=_('%s:') % text)
text_widget.set_halign(Gtk.Align.START)
grid.attach(text_widget, 1, row, 1, 1)
if isinstance(widget, GuiTextOption):
grid.attach(widget, 2, row, 1, 1)
else:
grid.attach(widget, 2, row, 1, 1)
else:
grid.attach(widget, 2, row, 1, 1)
row += 1
#------------------------------------------------------------------------
#
# Customization hooks for stand-alone reports (subclass ReportDialog)
#
#------------------------------------------------------------------------
def setup_format_frame(self):
"""Not used in bare report dialogs. Override in the subclass."""
pass
#------------------------------------------------------------------------
#
# Functions related getting/setting the default directory for a dialog.
#
#------------------------------------------------------------------------
def get_default_directory(self):
"""Get the name of the directory to which the target dialog
box should default. This value can be set in the preferences
panel."""
return config.get('paths.report-directory')
def set_default_directory(self, value):
"""Save the name of the current directory, so that any future
reports will default to the most recently used directory.
This also changes the directory name that will appear in the
preferences panel, but does not change the preference in disk.
This means that the last directory used will only be
remembered for this session of gramps unless the user saves
his/her preferences."""
config.set('paths.report-directory', value)
#------------------------------------------------------------------------
#
# Functions related to setting up the dialog window.
#
#------------------------------------------------------------------------
def setup_init(self):
# add any elements that we are going to need:
hid = self.style_name
if hid[-4:] == ".xml":
hid = hid[0:-4]
self.target_fileentry = FileEntry(hid, _("Save As"),
parent=self.window)
spath = self.get_default_directory()
self.target_fileentry.set_filename(spath)
# need any labels at top:
label = Gtk.Label(label="<b>%s</b>" % _('Document Options'))
label.set_use_markup(1)
label.set_halign(Gtk.Align.START)
self.grid.set_border_width(12)
self.grid.attach(label, 0, self.row, 4, 1)
self.row += 1
def setup_target_frame(self):
"""Set up the target frame of the dialog. This function
relies on several target_xxx() customization functions to
determine whether the target is a directory or file, what the
title of any browser window should be, and what default
directory should be used."""
# Save Frame
self.doc_label = Gtk.Label(label=_("%s:") % _("Filename"))
self.doc_label.set_halign(Gtk.Align.START)
self.grid.attach(self.doc_label, 1, self.row, 1, 1)
self.target_fileentry.set_hexpand(True)
self.grid.attach(self.target_fileentry, 2, self.row, 2, 1)
self.row += 1
#------------------------------------------------------------------------
#
# Functions related to retrieving data from the dialog window
#
#------------------------------------------------------------------------
def parse_target_frame(self):
"""Parse the target frame of the dialog. If the target
filename is empty this routine returns a special value of None
to tell the calling routine to give up. This function also
saves the current directory so that any future reports will
default to the most recently used directory."""
self.target_path = self.target_fileentry.get_full_path(0)
if not self.target_path:
return None
# First we check whether the selected path exists
if os.path.exists(self.target_path):
# selected path is an existing dir and we need a dir
if os.path.isdir(self.target_path):
# check whether the dir has rwx permissions
if not os.access(self.target_path, os.R_OK|os.W_OK|os.X_OK):
ErrorDialog(_('Permission problem'),
_("You do not have permission to write "
"under the directory %s\n\n"
"Please select another directory or correct "
"the permissions.") % self.target_path,
parent=self.window)
return None
# selected path is an existing file and we need a file
if os.path.isfile(self.target_path):
aaa = OptionDialog(_('File already exists'),
_('You can choose to either overwrite the '
'file, or change the selected filename.'),
_('_Overwrite'), None,
_('_Change filename'), None,
parent=self.window)
if aaa.get_response() == Gtk.ResponseType.YES:
return None
# selected path does not exist yet
else:
# we will need to create the file/dir
# need to make sure we can create in the parent dir
parent_dir = os.path.dirname(os.path.normpath(self.target_path))
if os.path.isdir(parent_dir):
if not os.access(parent_dir, os.W_OK):
ErrorDialog(_('Permission problem'),
_("You do not have permission to create "
"%s\n\n"
"Please select another path or correct "
"the permissions.") % self.target_path,
parent=self.window)
return None
else:
ErrorDialog(_('No directory'),
_('There is no directory %s.\n\n'
'Please select another directory '
'or create it.') % parent_dir,
parent=self.window)
return None
self.set_default_directory(os.path.dirname(self.target_path) + os.sep)
self.options.handler.output = self.target_path
return 1
def parse_style_frame(self):
"""Parse the style frame of the dialog. Save the user
selected output style for later use. Note that this routine
retrieves a value whether or not the menu is displayed on the
screen. The subclass will know whether this menu was enabled.
This is for simplicity of programming."""
if not self.default_style.is_empty():
(style_name, self.selected_style) = self.style_menu.get_value()
self.options.handler.set_default_stylesheet_name(style_name)
#------------------------------------------------------------------------
#
# Callback functions from the dialog
#
#------------------------------------------------------------------------
def on_ok_clicked(self, obj):
"""The user is satisfied with the dialog choices. Validate
the output file name before doing anything else. If there is
a file name, gather the options and create the report."""
# Is there a filename? This should also test file permissions, etc.
if not self.parse_target_frame():
self.window.run()
# Preparation
self.parse_style_frame()
self.parse_user_options()
# Save options
self.options.handler.save_options()
def on_cancel(self, *obj):
pass
def on_help_clicked(self, *obj):
from ...display import display_help
display_help(URL_REPORT_PAGE, self.report_name.replace(" ", "_"))
def on_style_edit_clicked(self, *obj):
"""The user has clicked on the 'Edit Styles' button. Create a
style sheet editor object and let them play. When they are
done, the previous routine will be called to update the dialog
menu for selecting a style."""
StyleListDisplay(self.style_sheet_list, self.uistate, self.track,
callback=self.build_style_menu)
#----------------------------------------------------------------------
#
# Functions related to any docgen options for a dialog.
#
#----------------------------------------------------------------------
def setup_doc_options_frame(self):
if self.doc_widgets:
for option_widget in self.doc_widgets:
self.grid.remove(option_widget)
self.doc_widgets = []
self.doc_options = None
if not self.doc_option_class:
return # this docgen type has no options
self.init_doc_options(self.doc_option_class)
menu = self.doc_options.menu
for name in menu.get_option_names(DOCGEN_OPTIONS):
option = menu.get_option(DOCGEN_OPTIONS, name)
# override option default with xml-saved value:
if name in self.doc_options.options_dict:
option.set_value(self.doc_options.options_dict[name])
widget, has_label = make_gui_option(option, self.dbstate,
self.uistate, self.track)
if has_label:
widget_text = Gtk.Label(label=(_('%s:') % option.get_label()))
widget_text.set_halign(Gtk.Align.START)
self.grid.attach(widget_text, 1, self.row, 1, 1)
self.doc_widgets.append(widget_text)
self.grid.attach(widget, 2, self.row, 2, 1)
self.doc_widgets.append(widget)
self.row += 1
def init_doc_options(self, option_class):
try:
if issubclass(option_class, object):
self.doc_options = option_class(self.raw_name, self.db)
except TypeError:
self.doc_options = option_class
self.doc_options.load_previous_values()
def parse_doc_options(self):
"""
Called to allow parsing of added docgen widgets.
It is called when OK is pressed in a dialog.
"""
if not self.doc_options:
return
try:
self.doc_options.parse_user_options()
for opt in self.doc_options.options_dict:
self.options.options_dict[opt] = \
[self.basedocname, self.doc_options.options_dict[opt]]
except:
logging.warning("Failed to parse doc options")
#------------------------------------------------------------------------
#
# Generic task function a standalone GUI report
#
#------------------------------------------------------------------------
def report(dbstate, uistate, person, report_class, options_class,
trans_name, name, category, require_active):
"""
report - task starts the report. The plugin system requires that the
task be in the format of task that takes a database and a person as
its arguments.
"""
if require_active and not person:
ErrorDialog(
_('Active person has not been set'),
_('You must select an active person for this report to work '
'properly.'),
parent=uistate.window)
return
if category == CATEGORY_TEXT:
from ._textreportdialog import TextReportDialog
dialog_class = TextReportDialog
elif category == CATEGORY_DRAW:
from ._drawreportdialog import DrawReportDialog
dialog_class = DrawReportDialog
elif category == CATEGORY_GRAPHVIZ:
from ._graphvizreportdialog import GraphvizReportDialog
dialog_class = GraphvizReportDialog
elif category == CATEGORY_WEB:
from ._webreportdialog import WebReportDialog
dialog_class = WebReportDialog
elif category in (CATEGORY_BOOK, CATEGORY_CODE):
try:
report_class(dbstate, uistate)
except WindowActiveError:
pass
return
else:
dialog_class = ReportDialog
dialog = dialog_class(dbstate, uistate, options_class, name, trans_name)
while True:
response = dialog.window.run()
if response == Gtk.ResponseType.OK:
dialog.close()
try:
user = User(uistate=uistate)
my_report = report_class(dialog.db, dialog.options, user)
my_report.doc.init()
my_report.begin_report()
my_report.write_report()
my_report.end_report()
# Web reports do not have a target frame
# The GtkPrint generator can not be "opened"
if (hasattr(dialog, "open_with_app") and
dialog.open_with_app.get_property('sensitive') == True
and dialog.open_with_app.get_active()):
out_file = dialog.options.get_output()
open_file_with_default_application(out_file, uistate)
except FilterError as msg:
(msg1, msg2) = msg.messages()
ErrorDialog(msg1, msg2, parent=uistate.window)
except IOError as msg:
ErrorDialog(_("Report could not be created"),
str(msg),
parent=uistate.window)
except ReportError as msg:
(msg1, msg2) = msg.messages()
ErrorDialog(msg1, msg2, parent=uistate.window)
except DatabaseError as msg:
ErrorDialog(_("Report could not be created"),
str(msg),
parent=uistate.window)
# The following except statement will catch all "NoneType" exceptions.
# This is useful for released code where the exception is most likely
# a corrupt database. But it is less useful for developing new reports
# where the exception is most likely a report bug.
# except AttributeError,msg:
# if str(msg).startswith("'NoneType' object has no attribute"):
# # "'NoneType' object has no attribute ..." usually means
# # database corruption
# RunDatabaseRepair(str(msg),
# parent=self.window)
# else:
# raise
raise
except:
LOG.error("Failed to run report.", exc_info=True)
break
elif response == Gtk.ResponseType.CANCEL:
dialog.close()
break
elif response == Gtk.ResponseType.DELETE_EVENT:
#just stop, in ManagedWindow, delete-event is already coupled to
#correct action.
break
#do needed cleanup
dialog.db = None
dialog.options = None
if hasattr(dialog, 'window'):
delattr(dialog, 'window')
if hasattr(dialog, 'notebook'):
delattr(dialog, 'notebook')
del dialog
|
jralls/gramps
|
gramps/gui/plug/report/_reportdialog.py
|
Python
|
gpl-2.0
| 30,880
|
[
"Brian"
] |
3ff9d5086b802791783c115c79a30af6000d446358b11fbf5f0630a8c8c0c143
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2017 IBM Corporation
Licensed under the Apache License, Version 2.0 (the “License”);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an “AS IS” BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
* Roberto Oliveira <rdutra@br.ibm.com>
* Rafael Sene <rpsene@br.ibm.com>
* Diego Fernandez-Merjildo <merjildo@br.ibm.com>
"""
import os
from .report_blocker import ReportBlocker
from .problem_reporter import ProblemReporter
from . import core
class Visitor(object):
""" Class used to visit the translation unit nodes and run checkers """
def __init__(self, checker):
self.checker = checker
self.current_file = ''
self.files_treat_list = ["Performance degradation",
"Decimal Floating Point (DFP) API",
"Integrated Performance Primitives (IPP) API",
"Math Kernel Library (MKL) API",
"Message Passing Interface (MPI) API",
"Non Portable Pthread",
"x86-specific compiler built-in"]
def visit(self, node, file_name):
""" Run checker accordingly, either using the TranslationUnit Unit or
a file """
self.current_file = file_name
ReportBlocker.block_lines(self.current_file)
self.visit_includes()
if self.checker.get_problem_type() in self.files_treat_list:
self.visit_file()
else:
self.visit_nodes(node)
def visit_nodes(self, node):
""" Visit all nodes from translation unit and for each node, call all
activate checkers to seek for problems """
if self.checker.check_node(node):
ProblemReporter.report_node(node, self.current_file,
self.checker.get_problem_type(),
self.checker.get_problem_msg(),
self.checker.get_solution(node))
for node in node.get_children():
self.visit_nodes(node)
def visit_includes(self):
""" Visit includes from translation unit and for each include, call
all activate checkers to seek for problems """
includes_dict = core.get_includes(self.current_file)
for line, name in includes_dict.items():
name = os.path.basename(name)
if self.checker.check_include(name):
ProblemReporter.report_include(name, self.current_file, line,
self.checker.get_problem_type(),
self.checker.get_problem_msg(),
self.checker.get_solution(name))
def visit_file(self):
""" Visit files and look for problems that clang doesn't treat """
reports = self.checker.check_file(self.current_file)
for report in reports:
name = report[0]
num_line = report[1]
ProblemReporter.report_file(self.current_file, num_line, name,
self.checker.get_problem_type(),
self.checker.get_problem_msg(),
self.checker.get_solution(name))
|
open-power-sdk/migration-advisor
|
ma/visitor.py
|
Python
|
apache-2.0
| 3,748
|
[
"VisIt"
] |
6c38fab94d6c59ee75d6eb309fef97f1b63a31d65f3e6a5a4383721a1fd44712
|
#!/usr/bin/python
# -*- coding: latin-1 -*-
## This is a class I pulled off of stackoverflow--- may be useful for reference
import calendar
import Bio
from Bio import Entrez
def format_ddate(ddate):
"""Turn a date dictionary into an ISO-type string (YYYY-MM-DD)."""
year = ddate['Year']
month = ddate['Month']
day = ddate['Day']
if not month.isdigit():
month = months_rdict.get(month, None)
if not month:
return None
return "%s-%s-%s" % (year, month.zfill(2), day.zfill(2))
# This is for translating abbreviated month names to numbers.
months_rdict = {v: str(k) for k,v in enumerate(calendar.month_abbr)}
# Returns a list or its value if there is only one.
list_or_single = lambda l: l*(len(l)>1) or l[0]
class PubmedSearcher:
"""Fetches data from Pubmed using the Entrez module from Biopython."""
# There are the fields that we want to fetch, and all of them are single
# values except grantlist, which is a dictionary (as per Entrez).
# More fields can easily be added, and all that needs to be done is
# to add the appropriate extract_ and fetch_ methods below.
fields = [
'pmid', 'doi', 'vol', 'pages',
'year', 'pub_month', 'pub_day',
'date_pubmed_created', 'date_pubmed_updated',
'date_accepted', 'date_aheadofprint', 'date_pubmed_history',
'grantlist' # this is a dict!
]
# Entrez normally limits the number of queries to 200, so work with block of 100.
nblock = 100
def __init__(self, email):
"""Entrez requires and email address."""
Entrez.email = email
def fetch_xml_round(self, pmid):
"""Tries to fetch and parse the XML for a list of PMIDs, no questions asked."""
return Entrez.read(Entrez.efetch(db="pubmed", id=pmid, retmode="xml"))
def fetch_xml(self, pmid):
"""This breaks the process up into blocks."""
self.nrounds = len(pmid) / self.nblock + (len(pmid) % self.nblock > 0)
if self.nrounds > 1:
print "Will query in %i rounds with %i articles per round." % (self.nrounds, self.nblock)
xml_data = []
for i in range(self.nrounds):
istart = i * self.nblock
ifinish = (i+1) * self.nblock
print "Fetching round %i..." % (i+1)
xml_data += self.fetch_xml_round(pmid[istart:ifinish])
else:
xml_data = fetch_xml_round(pmid)
return xml_data
def extract_id_factory(idtype):
def extract_id(self, xml_data):
"""Extract the %s ID from Entrez XML output.""" % idtype
for id in xml_data['PubmedData']['ArticleIdList']:
if id.attributes['IdType'].lower() == idtype:
return str(id)
return extract_id
extract_pmid = extract_id_factory('pubmed')
extract_doi = extract_id_factory('doi')
def extract_date_factory(datetype):
def extract_date(self, xml_data):
"""Extract %s date from Entrez XML output.""" % datetype
for date in xml_data['PubmedData']['History']:
if date.attributes['PubStatus'].lower() == datetype:
return format_ddate(date)
return extract_date
extract_date_accepted = extract_date_factory('accepted')
extract_date_aheadofprint = extract_date_factory('aheadofprint')
extract_date_pubmed_history = extract_date_factory('pubmed')
# Many fields can be pointed to via an Xpath directly, although we still want
# to mangle the result in many cases, so the factory needs some tweaking.
# To make names shorter, the field 'type' argument passed to the factory
# is prefixed by "xpath_" to get the xpath variable name. Watch out for that!
xpath_year = ['MedlineCitation', 'Article', 'Journal', 'JournalIssue', 'PubDate', 'Year']
xpath_pub_month = ['MedlineCitation', 'Article', 'Journal', 'JournalIssue', 'PubDate', 'Month']
xpath_pub_day = ['MedlineCitation', 'Article', 'Journal', 'JournalIssue', 'PubDate', 'Day']
xpath_vol = ['MedlineCitation', 'Article', 'Journal', 'JournalIssue', 'Volume']
xpath_pages = ['MedlineCitation', 'Article', 'Pagination', 'MedlinePgn']
xpath_date_pubmed_created = ['MedlineCitation', 'DateCreated']
xpath_date_pubmed_updated = ['MedlineCitation', 'DateRevised']
def extract_xpath_factory(type, rdict=None, format=None):
def extract_xpath(self, xml_data):
try:
data = xml_data
for node in getattr(self, 'xpath_'+type):
data = data[node]
if rdict != None:
data = rdict[data]
if format != None:
data = format(data)
return data
except (KeyError, TypeError):
return None
return extract_xpath
extract_year = extract_xpath_factory('year')
extract_pub_month = extract_xpath_factory('pub_month', rdict=months_rdict)
extract_pub_day = extract_xpath_factory('pub_day')
extract_vol = extract_xpath_factory('vol')
extract_pages = extract_xpath_factory('pages')
extract_date_pubmed_created = extract_xpath_factory('date_pubmed_created', format=format_ddate)
extract_date_pubmed_updated = extract_xpath_factory('date_pubmed_updated', format=format_ddate)
# The grantlist is a bit mroe convoluted, because it is itself a dictionary with several
# fields, which normally need to go together to by of use.
xpath_grantlist = ['MedlineCitation', 'Article', 'GrantList']
def extract_grantlist(self, xml_data):
try:
data = xml_data
for node in self.xpath_grantlist:
data = data[node]
fields = {'acronym':'Acronym', 'agency':'Agency', 'country':'Country', 'number':'GrantID'}
return [{k:d.get(v,None) for k,v in fields.items()} for d in data]
except (KeyError, TypeError):
return None
# These are convenience functions that fetch single fields for many PMIDs.
def fetch_field_factory(type):
def fetch_field(self, pmid):
targets = [getattr(self, 'extract_'+type)(parsed) for parsed in self.fetch_xml(pmid)]
return list_or_single(targets)
return fetch_field
fetch_pmid = fetch_field_factory('pmid')
fetch_doi = fetch_field_factory('doi')
fetch_year = fetch_field_factory('year')
fetch_pub_month = fetch_field_factory('pub_month')
fetch_pub_day = fetch_field_factory('pub_day')
fetch_vol = fetch_field_factory('vol')
fetch_pages = fetch_field_factory('pages')
# And this one extracts fetches all the fields defined above for many PMIDs.
def fetch_all(self, pmid):
xml_data = self.fetch_xml(pmid)
all = []
for parsed in xml_data:
x = {}
for f in self.fields:
x[f] = getattr(self, "extract_"+f)(parsed)
all.append(x)
return all
|
dgutman/ADRC_Analytics
|
src/PubmedPulled_ExampleClass.py
|
Python
|
apache-2.0
| 7,022
|
[
"Biopython"
] |
fac713d71d068b183815bf90d2aa1949208371cf16f6c9abed3947b9b8f7817e
|
from __future__ import unicode_literals
import unittest
from django.utils import html
class TestUtilsHtml(unittest.TestCase):
def check_output(self, function, value, output=None):
"""
Check that function(value) equals output. If output is None,
check that function(value) equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
f = html.escape
items = (
('&','&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
for pattern in patterns:
self.check_output(f, pattern % value, pattern % output)
# Check repeated values.
self.check_output(f, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(f, '<&', '<&')
def test_linebreaks(self):
f = html.linebreaks
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
("para1\nsub1\rsub2\n\npara2", "<p>para1<br />sub1<br />sub2</p>\n\n<p>para2</p>"),
("para1\r\n\r\npara2\rsub1\r\rpara4", "<p>para1</p>\n\n<p>para2<br />sub1</p>\n\n<p>para4</p>"),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
self.check_output(f, value, output)
def test_strip_tags(self):
f = html.strip_tags
items = (
('<adf>a', 'a'),
('</adf>a', 'a'),
('<asdf><asdf>e', 'e'),
('<f', '<f'),
('</fe', '</fe'),
('<x>b<y>', 'b'),
)
for value, output in items:
self.check_output(f, value, output)
def test_strip_spaces_between_tags(self):
f = html.strip_spaces_between_tags
# Strings that should come out untouched.
items = (' <adf>', '<adf> ', ' </adf> ', ' <f> x</f>')
for value in items:
self.check_output(f, value)
# Strings that have spaces to strip.
items = (
('<d> </d>', '<d></d>'),
('<p>hello </p>\n<p> world</p>', '<p>hello </p><p> world</p>'),
('\n<p>\t</p>\n<p> </p>\n', '\n<p></p><p></p>\n'),
)
for value, output in items:
self.check_output(f, value, output)
def test_strip_entities(self):
f = html.strip_entities
# Strings that should come out untouched.
values = ("&", "&a", "&a", "a&#a")
for value in values:
self.check_output(f, value)
# Valid entities that should be stripped from the patterns.
entities = ("", "", "&a;", "&fdasdfasdfasdf;")
patterns = (
("asdf %(entity)s ", "asdf "),
("%(entity)s%(entity)s", ""),
("&%(entity)s%(entity)s", "&"),
("%(entity)s3", "3"),
)
for entity in entities:
for in_pattern, output in patterns:
self.check_output(f, in_pattern % {'entity': entity}, output)
def test_fix_ampersands(self):
f = html.fix_ampersands
# Strings without ampersands or with ampersands already encoded.
values = ("a", "b", "&a;", "& &x; ", "asdf")
patterns = (
("%s", "%s"),
("&%s", "&%s"),
("&%s&", "&%s&"),
)
for value in values:
for in_pattern, out_pattern in patterns:
self.check_output(f, in_pattern % value, out_pattern % value)
# Strings with ampersands that need encoding.
items = (
("&#;", "&#;"),
("ͫ ;", "&#875 ;"),
("abc;", "&#4abc;"),
)
for value, output in items:
self.check_output(f, value, output)
def test_escapejs(self):
f = html.escapejs
items = (
('"double quotes" and \'single quotes\'', '\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027'),
(r'\ : backslashes, too', '\\u005C : backslashes, too'),
('and lots of whitespace: \r\n\t\v\f\b', 'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008'),
(r'<script>and this</script>', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'),
('paragraph separator:\u2029and line separator:\u2028', 'paragraph separator:\\u2029and line separator:\\u2028'),
)
for value, output in items:
self.check_output(f, value, output)
def test_clean_html(self):
f = html.clean_html
items = (
('<p>I <i>believe</i> in <b>semantic markup</b>!</p>', '<p>I <em>believe</em> in <strong>semantic markup</strong>!</p>'),
('I escape & I don\'t <a href="#" target="_blank">target</a>', 'I escape & I don\'t <a href="#" >target</a>'),
('<p>I kill whitespace</p><br clear="all"><p> </p>', '<p>I kill whitespace</p>'),
# also a regression test for #7267: this used to raise an UnicodeDecodeError
('<p>* foo</p><p>* bar</p>', '<ul>\n<li> foo</li><li> bar</li>\n</ul>'),
)
for value, output in items:
self.check_output(f, value, output)
|
cobalys/django
|
tests/regressiontests/utils/html.py
|
Python
|
bsd-3-clause
| 5,490
|
[
"ADF"
] |
e8699a659f4cd893789cd41ddf34c89ad1a4a2d0abbc6fced69d5c42d98e28aa
|
#!/usr/bin/env python
# Copyright 2015(c) The Ontario Institute for Cancer Reserach. All rights reserved.
#
# This program and the accompanying materials are made available under the
# terms of the GNU Public License v3.0.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.i
from setuptools import setup
import sys, os
if sys.version_info < (2, 7, 3):
exec('raise Error, "Python 2.7.3 or later is required"')
def read(*path):
return open(os.path.join(os.path.abspath(os.path.dirname(__file__)), *path)).read()
VERSION = '1.0'
README = read('README.rst')
NEWS = read('NEWS.rst')
install_requires = ['cython', 'numpy', 'pymongo>=2.8,<3.0', 'biopython', 'scikit-learn', 'scipy']
if sys.version_info < (2, 7, 3):
install_requires.append('argparse')
# Get classifiers from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers = """
Development Status :: 5 - Production/Stable
License :: OSI Approved :: GNU General Public License (GPL)
Environment :: Console
Intended Audience :: Science/Research
Intended Audience :: Developers
Programming Language :: Python :: 2.7
Topic :: Scientific/Engineering :: Bio-Informatics
Operating System :: Unix
"""
config = {
'name': 'cssscl',
'version': VERSION,
'description': 'Combining sequence similarity scores for biological sequence classification',
'long_description': README + '\n\n' + NEWS,
'license': 'GNU General Public License, Version 3.0',
'author': 'Ivan Borozan',
'author_email': 'ivan.borozan@gmail.com',
#'url': 'https://github.com/cssscl/cssscl',
#'download_url': 'https://github.com/cssscl/cssscl',
'classifiers': filter(None, classifiers.split("\n")),
'scripts': ['bin/cssscl'],
'packages': ['cssscl'],
'zip_safe': True,
'install_requires': install_requires
}
def setup_package():
"""Setup Package"""
setup(**config)
if __name__ == '__main__':
setup_package()
|
oicr-ibc/cssscl
|
setup.py
|
Python
|
gpl-3.0
| 2,016
|
[
"Biopython"
] |
1bfe24c5296a66ccf5bf360f3fcdbfca2fb997b33b035f24e0de3aff1fb434f2
|
# $HeadURL$
# $Id$
__version__ = "0.10-svn"
__license__ = """
Mirage, a fast GTK+ Image Viewer
Copyright 2007 Scott Horowitz <stonecrest@gmail.com>
This file is part of Mirage.
Mirage is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Mirage is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import pygtk
pygtk.require('2.0')
import gtk
import os, sys, getopt, ConfigParser, string, gc
import random, urllib, gobject, gettext, locale
import stat, time, subprocess, shutil, filecmp
import tempfile, socket, threading
from fractions import Fraction
try:
import mirage_numacomp as numacomp
HAVE_NUMACOMP = True
except:
HAVE_NUMACOMP = False
print "mirage_numacomp.so not found, unable to do numerical aware sorting."
try:
import hashlib
HAS_HASHLIB = True
except:
HAS_HASHLIB= False
import md5
try:
import imgfuncs
HAS_IMGFUNCS = True
except:
HAS_IMGFUNCS = False
print "imgfuncs.so module not found, rotating/flipping images will be disabled."
try:
import xmouse
HAS_XMOUSE = True
except:
HAS_XMOUSE = False
print "xmouse.so module not found, some screenshot capabilities will be disabled."
try:
import pyexiv2
HAS_EXIF = True
except:
HAS_EXIF = False
print "pyexiv2 module not found, exifdata reading/writing are disabled"
try:
import gconf
except:
pass
if gtk.gtk_version < (2, 10, 0):
sys.stderr.write("Mirage requires GTK+ 2.10.0 or newer..\n")
sys.exit(1)
if gtk.pygtk_version < (2, 12, 0):
sys.stderr.write("Mirage requires PyGTK 2.12.0 or newer.\n")
sys.exit(1)
def valid_int(inputstring):
try:
x = int(inputstring)
return True
except:
return False
class Base:
def __init__(self):
gtk.gdk.threads_init()
# FIX THIS! Does not work on windows and what happens if mo-files exists
# in both dirs?
gettext.install('mirage', '/usr/share/locale', unicode=1)
gettext.install('mirage', '/usr/local/share/locale', unicode=1)
# Constants
self.open_mode_smart = 0
self.open_mode_fit = 1
self.open_mode_1to1 = 2
self.open_mode_last = 3
self.min_zoomratio = 0.02
# Create a dictionary with all settings the users can do in the interface
self.usettings = {}
# Initialize vars:
self.usettings['window_width'] = 600
self.usettings['window_height'] = 400
self.usettings['simple_bgcolor'] = False
# Current image:
self.curr_img_in_list = 0
# This is the actual pixbuf that is loaded in Mirage. This will
# usually be the same as self.curr_img_in_list except for scenarios
# like when the user presses 'next image' multiple times in a row.
# In this case, self.curr_img_in_list will increment while
# self.loaded_img_in_list will retain the current loaded image.
self.loaded_img_in_list = 0
self.currimg = ImageData(index=0)
#self.currimg_name = ""
#self.currimg_width = 0 #Changed
#self.currimg_height = 0 #Changed
#self.currimg_pixbuf = None #Changed
#self.currimg_pixbuf_original = None #Changed
#self.currimg_zoomratio = 1 #Changed
#self.currimg_is_animation = False #Changed
# Next preloaded image:
self.nextimg = ImageData(index=-1)
#self.preloadimg_next_in_list = -1 #Changed
#self.preloadimg_next_name = "" #Changed
#self.preloadimg_next_width = 0 #Changed
#self.preloadimg_next_height = 0 #Changed
#self.preloadimg_next_pixbuf = None #changed
#self.preloadimg_next_pixbuf_original = None #Changed
#self.preloadimg_next_zoomratio = 1 #Changed
#self.preloadimg_next_is_animation = False #Changed
# Previous preloaded image:
self.previmg = ImageData(index=-1)
#self.preloadimg_prev_in_list = -1
#self.preloadimg_prev_name = ""
#self.preloadimg_prev_width = 0 #Changed
#self.preloadimg_prev_height = 0 #Changed
#self.preloadimg_prev_pixbuf = None #Changed
#self.preloadimg_prev_pixbuf_original = None #Changed
#self.preloadimg_prev_zoomratio = 1 #changed
#self.preloadimg_prev_is_animation = False #changed
# Settings, misc:
self.usettings['toolbar_show'] = True
self.usettings['thumbpane_show'] = True
self.usettings['statusbar_show'] = True
self.fullscreen_mode = False
self.opendialogpath = ""
self.zoom_quality = gtk.gdk.INTERP_BILINEAR
self.recursive = False
self.verbose = False
self.image_loaded = False
self.usettings['open_all_images'] = True # open all images in the directory(ies)
self.usettings['use_last_dir'] = True
self.usettings['last_dir'] = os.path.expanduser("~")
self.usettings['fixed_dir'] = os.path.expanduser("~")
self.image_list = []
self.firstimgindex_subfolders_list = []
self.usettings['open_mode'] = self.open_mode_smart
self.usettings['last_mode'] = self.open_mode_smart
self.usettings['listwrap_mode'] = 0 # 0=no, 1=yes, 2=ask
self.user_prompt_visible = False # the "wrap?" prompt
self.usettings['slideshow_delay'] = 1 # seconds
self.slideshow_mode = False
self.usettings['slideshow_random'] = False
self.slideshow_controls_visible = False # fullscreen slideshow controls
self.controls_moving = False
self.usettings['zoomvalue'] = 2
self.usettings['quality_save'] = 90
self.usettings['bgcolor'] = False
self.updating_adjustments = False
self.usettings['disable_screensaver'] = False
self.usettings['slideshow_in_fullscreen'] = False
self.closing_app = False
self.usettings['confirm_delete'] = True
self.usettings['preloading_images'] = True
self.usettings['action_names'] = [_("Open in GIMP"), _("Create Thumbnail"), _("Create Thumbnails"), _("Move to Favorites")]
self.usettings['action_shortcuts'] = ["<Control>e", "<Alt>t", "<Control><Alt>t", "<Control><Alt>f"]
self.usettings['action_commands'] = ["gimp %F", "convert %F -thumbnail 150x150 %Pt_%N.jpg", "convert %F -thumbnail 150x150 %Pt_%N.jpg", "mkdir -p ~/mirage-favs; mv %F ~/mirage-favs; [NEXT]"]
self.usettings['action_batch'] = [False, False, True, False]
self.onload_cmd = None
self.searching_for_images = False
self.preserve_aspect = True
self.ignore_preserve_aspect_callback = False
self.usettings['savemode'] = 2
self.image_modified = False
self.image_zoomed = False
self.usettings['start_in_fullscreen'] = False
self.running_custom_actions = False
self.merge_id = None
self.actionGroupCustom = None
self.merge_id_recent = None
self.actionGroupRecent = None
self.usettings['open_hidden_files'] = False
self.usettings['use_numacomp'] = False
self.usettings['case_numacomp'] = False
self.thumbnail_sizes = ["128", "96", "72", "64", "48", "32"]
self.usettings['thumbnail_size'] = 128 # Default to 128 x 128
self.thumbnail_loaded = []
self.thumbpane_updating = False
self.usettings['recentfiles'] = ["", "", "", "", ""]
self.usettings['screenshot_delay'] = 2
self.thumbpane_bottom_coord_loaded = 0
# Read any passed options/arguments:
try:
opts, args = getopt.getopt(sys.argv[1:], "hRvVsfo:", ["help", "version", "recursive", "verbose", "slideshow", "fullscreen", "onload="])
except getopt.GetoptError:
# print help information and exit:
self.print_usage()
sys.exit(2)
# If options were passed, perform action on them.
if opts != []:
for o, a in opts:
if o in ("-v", "--version"):
self.print_version()
sys.exit(2)
elif o in ("-h", "--help"):
self.print_usage()
sys.exit(2)
elif o in ("-R", "--recursive"):
self.recursive = True
elif o in ("-V", "--verbose"):
self.verbose = True
elif o in ("-s", "--slideshow", "-f", "--fullscreen"):
#This will be handled later
None
elif o in ("-o", "--onload"):
self.onload_cmd = a
else:
self.print_usage()
sys.exit(2)
# Determine config dir, first try the environment variable XDG_CONFIG_HOME
# according to XDG specification and as a fallback use ~/.config/mirage
self.config_dir = (os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config')) + '/mirage'
# Load config from disk:
self.read_config_and_set_settings()
# slideshow_delay is the user's preference, whereas curr_slideshow_delay is
# the current delay (which can be changed without affecting the 'default')
self.curr_slideshow_delay = self.usettings['slideshow_delay']
# Same for randomization:
self.curr_slideshow_random = self.usettings['slideshow_random']
# Find application images/pixmaps
self.resource_path_list = False
self.blank_image = gtk.gdk.pixbuf_new_from_file(self.find_path("mirage_blank.png"))
# Define the main menubar and toolbar:
factory = gtk.IconFactory()
iconname = 'stock_leave-fullscreen.png'
iconname2 = 'stock_fullscreen.png'
leave_fullscreen_icon_path = self.find_path(iconname)
pixbuf = gtk.gdk.pixbuf_new_from_file(leave_fullscreen_icon_path)
iconset = gtk.IconSet(pixbuf)
factory.add('leave-fullscreen', iconset)
factory.add_default()
fullscreen_icon_path = self.find_path(iconname2)
pixbuf = gtk.gdk.pixbuf_new_from_file(fullscreen_icon_path)
iconset = gtk.IconSet(pixbuf)
factory.add('fullscreen', iconset)
factory.add_default()
try:
test = gtk.Button("", gtk.STOCK_LEAVE_FULLSCREEN)
leave_fullscreen_icon = gtk.STOCK_LEAVE_FULLSCREEN
fullscreen_icon = gtk.STOCK_FULLSCREEN
except:
# This will allow gtk 2.6 users to run Mirage
leave_fullscreen_icon = 'leave-fullscreen'
fullscreen_icon = 'fullscreen'
actions = (
('FileMenu', None, _('_File')),
('EditMenu', None, _('_Edit')),
('ViewMenu', None, _('_View')),
('GoMenu', None, _('_Go')),
('HelpMenu', None, _('_Help')),
('ActionSubMenu', None, _('Custom _Actions')),
('Open Image', gtk.STOCK_FILE, _('_Open Image...'), '<Ctrl>O', _('Open Image'), self.open_file),
('Open Remote Image', gtk.STOCK_NETWORK, _('Open _Remote image...'), None, _('Open Remote Image'), self.open_file_remote),
('Open Folder', gtk.STOCK_DIRECTORY, _('Open _Folder...'), '<Ctrl>F', _('Open Folder'), self.open_folder),
('Save', gtk.STOCK_SAVE, _('_Save Image'), '<Ctrl>S', _('Save Image'), self.save_image),
('Save As', gtk.STOCK_SAVE, _('Save Image _As...'), '<Shift><Ctrl>S', _('Save Image As'), self.save_image_as),
('Crop', None, _('_Crop...'), None, _('Crop Image'), self.crop_image),
('Resize', None, _('R_esize...'), None, _('Resize Image'), self.resize_image),
('Saturation', None, _('_Saturation...'), None, _('Modify saturation'), self.saturation),
('Quit', gtk.STOCK_QUIT, _('_Quit'), '<Ctrl>Q', _('Quit'), self.exit_app),
('Previous Image', gtk.STOCK_GO_BACK, _('_Previous Image'), 'Left', _('Previous Image'), self.goto_prev_image),
('Previous Subfolder', gtk.STOCK_MEDIA_REWIND, _('Pre_vious Subfolder'), '<Shift>Left', _('Previous Subfolder'), self.goto_first_image_prev_subfolder),
('Next Image', gtk.STOCK_GO_FORWARD, _('_Next Image'), 'Right', _('Next Image'), self.goto_next_image),
('Next Subfolder', gtk.STOCK_MEDIA_FORWARD, _('Ne_xt Subfolder'), '<Shift>Right', _('Next Subfolder'), self.goto_first_image_next_subfolder),
('Previous2', gtk.STOCK_GO_BACK, _('_Previous'), 'Left', _('Previous'), self.goto_prev_image),
('Next2', gtk.STOCK_GO_FORWARD, _('_Next'), 'Right', _('Next'), self.goto_next_image),
('Random Image', None, _('_Random Image'), 'R', _('Random Image'), self.goto_random_image),
('First Image', gtk.STOCK_GOTO_FIRST, _('_First Image'), 'Home', _('First Image'), self.goto_first_image),
('Last Image', gtk.STOCK_GOTO_LAST, _('_Last Image'), 'End', _('Last Image'), self.goto_last_image),
('In', gtk.STOCK_ZOOM_IN, _('Zoom _In'), '<Ctrl>Up', _('Zoom In'), self.zoom_in),
('Out', gtk.STOCK_ZOOM_OUT, _('Zoom _Out'), '<Ctrl>Down', _('Zoom Out'), self.zoom_out),
('Fit', gtk.STOCK_ZOOM_FIT, _('Zoom To _Fit'), '<Ctrl>0', _('Fit'), self.zoom_to_fit_window_action),
('1:1', gtk.STOCK_ZOOM_100, _('_1:1'), '<Ctrl>1', _('1:1'), self.zoom_1_to_1_action),
('Rotate Left', None, _('Rotate _Left'), '<Ctrl>Left', _('Rotate Left'), self.rotate_left),
('Rotate Right', None, _('Rotate _Right'), '<Ctrl>Right', _('Rotate Right'), self.rotate_right),
('Flip Vertically', None, _('Flip _Vertically'), '<Ctrl>V', _('Flip Vertically'), self.flip_image_vert),
('Flip Horizontally', None, _('Flip _Horizontally'), '<Ctrl>H', _('Flip Horizontally'), self.flip_image_horiz),
('About', gtk.STOCK_ABOUT, _('_About'), None, _('About'), self.show_about),
('Contents', gtk.STOCK_HELP, _('_Contents'), 'F1', _('Contents'), self.show_help),
('Preferences', gtk.STOCK_PREFERENCES, _('_Preferences...'), '<Ctrl>P', _('Preferences'), self.show_prefs),
('Full Screen', fullscreen_icon, _('_Full Screen'), 'F11', _('Full Screen'), self.enter_fullscreen),
('Exit Full Screen', leave_fullscreen_icon, _('E_xit Full Screen'), None, _('Exit Full Screen'), self.leave_fullscreen),
('Start Slideshow', gtk.STOCK_MEDIA_PLAY, _('_Start Slideshow'), 'F5', _('Start Slideshow'), self.toggle_slideshow),
('Stop Slideshow', gtk.STOCK_MEDIA_STOP, _('_Stop Slideshow'), 'F5', _('Stop Slideshow'), self.toggle_slideshow),
('Delete Image', gtk.STOCK_DELETE, _('_Delete...'), 'Delete', _('Delete Image'), self.delete_image),
('Rename Image', None, _('Re_name...'), 'F2', _('Rename Image'), self.rename_image),
('Take Screenshot', None, _('_Take Screenshot...'), None, _('Take Screenshot'), self.screenshot),
('Properties', gtk.STOCK_PROPERTIES, _('_Properties...'), None, _('Properties'), self.show_properties),
('Custom Actions', None, _('_Configure...'), None, _('Custom Actions'), self.show_custom_actions),
('MiscKeysMenuHidden', None, 'Keys'),
('Escape', None, '', 'Escape', _('Exit Full Screen'), self.leave_fullscreen),
('Minus', None, '', 'minus', _('Zoom Out'), self.zoom_out),
('Plus', None, '', 'plus', _('Zoom In'), self.zoom_in),
('Equal', None, '', 'equal', _('Zoom In'), self.zoom_in),
('Space', None, '', 'space', _('Next Image'), self.goto_next_image),
('Ctrl-KP_Insert', None, '', '<Ctrl>KP_Insert', _('Fit'), self.zoom_to_fit_window_action),
('Ctrl-KP_End', None, '', '<Ctrl>KP_End', _('1:1'), self.zoom_1_to_1_action),
('Ctrl-KP_Subtract', None, '', '<Ctrl>KP_Subtract', _('Zoom Out'), self.zoom_out),
('Ctrl-KP_Add', None, '', '<Ctrl>KP_Add', _('Zoom In'), self.zoom_in),
('Ctrl-KP_0', None, '', '<Ctrl>KP_0', _('Fit'), self.zoom_to_fit_window_action),
('Ctrl-KP_1', None, '', '<Ctrl>KP_1', _('1:1'), self.zoom_1_to_1_action),
('Full Screen Key', None, '', '<Shift>Return', None, self.enter_fullscreen),
('Prev', None, '', 'Up', _('Previous Image'), self.goto_prev_image),
('Next', None, '', 'Down', _('Next Image'), self.goto_next_image),
('PgUp', None, '', 'Page_Up', _('Previous Image'), self.goto_prev_image),
('PgDn', None, '', 'Page_Down', _('Next Image'), self.goto_next_image),
('BackSpace', None, '', 'BackSpace', _('Previous Image'), self.goto_prev_image),
('Prev Subfolder 2', None, '', '<Shift>Up', _('Previous Subfolder'), self.goto_first_image_prev_subfolder),
('Next Subfolder 2', None, '', '<Shift>Down', _('Next Subfolder'), self.goto_first_image_next_subfolder),
('Prev Subfolder 3', None, '', '<Shift>Page_Up', _('Previous Subfolder'), self.goto_first_image_prev_subfolder),
('Next Subfolder 3', None, '', '<Shift>Page_Down', _('Next Subfolder'), self.goto_first_image_next_subfolder),
('OriginalSize', None, '', '1', _('1:1'), self.zoom_1_to_1_action),
('ZoomIn', None, '', 'KP_Add', _('Zoom In'), self.zoom_in),
('ZoomOut', None, '', 'KP_Subtract', _('Zoom Out'), self.zoom_out)
)
toggle_actions = (
('Status Bar', None, _('_Status Bar'), None, _('Status Bar'), self.toggle_status_bar, self.usettings['statusbar_show']),
('Toolbar', None, _('_Toolbar'), None, _('Toolbar'), self.toggle_toolbar, self.usettings['toolbar_show']),
('Thumbnails Pane', None, _('Thumbnails _Pane'), None, _('Thumbnails Pane'), self.toggle_thumbpane, self.usettings['thumbpane_show'])
)
# Populate keys[]:
self.keys=[]
for i in range(len(actions)):
if len(actions[i]) > 3:
if actions[i][3] != None:
self.keys.append([actions[i][4], actions[i][3]])
uiDescription = """
<ui>
<popup name="Popup">
<menuitem action="Next Image"/>
<menuitem action="Previous Image"/>
<separator name="FM1"/>
<menuitem action="Out"/>
<menuitem action="In"/>
<menuitem action="1:1"/>
<menuitem action="Fit"/>
<separator name="FM4"/>
<menuitem action="Start Slideshow"/>
<menuitem action="Stop Slideshow"/>
<separator name="FM3"/>
<menuitem action="Exit Full Screen"/>
<menuitem action="Full Screen"/>
</popup>
<menubar name="MainMenu">
<menu action="FileMenu">
<menuitem action="Open Image"/>
<menuitem action="Open Folder"/>
<menuitem action="Open Remote Image"/>
<separator name="FM1"/>
<menuitem action="Save"/>
<menuitem action="Save As"/>
<separator name="FM2"/>
<menuitem action="Take Screenshot"/>
<separator name="FM3"/>
<menuitem action="Properties"/>
<separator name="FM4"/>
<placeholder name="Recent Files">
</placeholder>
<separator name="FM5"/>
<menuitem action="Quit"/>
</menu>
<menu action="EditMenu">
<menuitem action="Rotate Left"/>
<menuitem action="Rotate Right"/>
<menuitem action="Flip Vertically"/>
<menuitem action="Flip Horizontally"/>
<separator name="FM1"/>
<menuitem action="Crop"/>
<menuitem action="Resize"/>
<menuitem action="Saturation"/>
<separator name="FM2"/>
<menuitem action="Rename Image"/>
<menuitem action="Delete Image"/>
<separator name="FM3"/>
<menu action="ActionSubMenu">
<separator name="FM4" position="bot"/>
<menuitem action="Custom Actions" position="bot"/>
</menu>
<menuitem action="Preferences"/>
</menu>
<menu action="ViewMenu">
<menuitem action="Out"/>
<menuitem action="In"/>
<menuitem action="1:1"/>
<menuitem action="Fit"/>
<separator name="FM2"/>
<menuitem action="Toolbar"/>
<menuitem action="Thumbnails Pane"/>
<menuitem action="Status Bar"/>
<separator name="FM1"/>
<menuitem action="Full Screen"/>
</menu>
<menu action="GoMenu">
<menuitem action="Next Image"/>
<menuitem action="Previous Image"/>
<menuitem action="Random Image"/>
<separator name="FM1"/>
<menuitem action="First Image"/>
<menuitem action="Last Image"/>
<separator name="FM2"/>
<menuitem action="Next Subfolder"/>
<menuitem action="Previous Subfolder"/>
<separator name="FM3"/>
<menuitem action="Start Slideshow"/>
<menuitem action="Stop Slideshow"/>
</menu>
<menu action="HelpMenu">
<menuitem action="Contents"/>
<menuitem action="About"/>
</menu>
<menu action="MiscKeysMenuHidden">
<menuitem action="Minus"/>
<menuitem action="Escape"/>
<menuitem action="Plus"/>
<menuitem action="Equal"/>
<menuitem action="Space"/>
<menuitem action="Ctrl-KP_Insert"/>
<menuitem action="Ctrl-KP_End"/>
<menuitem action="Ctrl-KP_Subtract"/>
<menuitem action="Ctrl-KP_Add"/>
<menuitem action="Ctrl-KP_0"/>
<menuitem action="Ctrl-KP_1"/>
<menuitem action="Full Screen Key"/>
<menuitem action="Prev"/>
<menuitem action="Next"/>
<menuitem action="PgUp"/>
<menuitem action="PgDn"/>
<menuitem action="Prev Subfolder 2"/>
<menuitem action="Next Subfolder 2"/>
<menuitem action="Prev Subfolder 3"/>
<menuitem action="Next Subfolder 3"/>
<menuitem action="OriginalSize"/>
<menuitem action="BackSpace"/>
<menuitem action="ZoomIn"/>
<menuitem action="ZoomOut"/>
</menu>
</menubar>
<toolbar name="MainToolbar">
<toolitem action="Open Image"/>
<separator name="FM1"/>
<toolitem action="Previous2"/>
<toolitem action="Next2"/>
<separator name="FM2"/>
<toolitem action="Out"/>
<toolitem action="In"/>
<toolitem action="1:1"/>
<toolitem action="Fit"/>
</toolbar>
</ui>
"""
# Create interface
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.update_title()
icon_path = self.find_path('mirage.png')
try:
gtk.window_set_default_icon_from_file(icon_path)
except:
pass
vbox = gtk.VBox(False, 0)
self.UIManager = gtk.UIManager()
actionGroup = gtk.ActionGroup('Actions')
actionGroup.add_actions(actions)
actionGroup.add_toggle_actions(toggle_actions)
self.UIManager.insert_action_group(actionGroup, 0)
self.UIManager.add_ui_from_string(uiDescription)
self.refresh_custom_actions_menu()
self.refresh_recent_files_menu()
self.window.add_accel_group(self.UIManager.get_accel_group())
self.menubar = self.UIManager.get_widget('/MainMenu')
vbox.pack_start(self.menubar, False, False, 0)
self.toolbar = self.UIManager.get_widget('/MainToolbar')
vbox.pack_start(self.toolbar, False, False, 0)
self.layout = gtk.Layout()
self.vscroll = gtk.VScrollbar(None)
self.vscroll.set_adjustment(self.layout.get_vadjustment())
self.hscroll = gtk.HScrollbar(None)
self.hscroll.set_adjustment(self.layout.get_hadjustment())
self.table = gtk.Table(3, 2, False)
self.thumblist = gtk.ListStore(gtk.gdk.Pixbuf)
self.thumbpane = gtk.TreeView(self.thumblist)
self.thumbcolumn = gtk.TreeViewColumn(None)
self.thumbcell = gtk.CellRendererPixbuf()
self.thumbcolumn.set_sizing(gtk.TREE_VIEW_COLUMN_FIXED)
self.thumbpane_set_size()
self.thumbpane.append_column(self.thumbcolumn)
self.thumbcolumn.pack_start(self.thumbcell, True)
self.thumbcolumn.set_attributes(self.thumbcell, pixbuf=0)
self.thumbpane.get_selection().set_mode(gtk.SELECTION_SINGLE)
self.thumbpane.set_headers_visible(False)
self.thumbpane.set_property('can-focus', False)
self.thumbscroll = gtk.ScrolledWindow()
self.thumbscroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
self.thumbscroll.add(self.thumbpane)
self.table.attach(self.thumbscroll, 0, 1, 0, 1, 0, gtk.FILL|gtk.EXPAND, 0, 0)
self.table.attach(self.layout, 1, 2, 0, 1, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
self.table.attach(self.hscroll, 1, 2, 1, 2, gtk.FILL|gtk.SHRINK, gtk.FILL|gtk.SHRINK, 0, 0)
self.table.attach(self.vscroll, 2, 3, 0, 1, gtk.FILL|gtk.SHRINK, gtk.FILL|gtk.SHRINK, 0, 0)
vbox.pack_start(self.table, True, True, 0)
if not self.usettings['bgcolor']:
self.usettings['bgcolor'] = gtk.gdk.Color(0, 0, 0) # Default to black
if self.usettings['simple_bgcolor']:
self.layout.modify_bg(gtk.STATE_NORMAL, None)
else:
self.layout.modify_bg(gtk.STATE_NORMAL, self.usettings['bgcolor'])
self.imageview = gtk.Image()
self.layout.add(self.imageview)
self.statusbar = gtk.Statusbar()
self.statusbar2 = gtk.Statusbar()
self.statusbar.set_has_resize_grip(False)
self.statusbar2.set_has_resize_grip(True)
self.statusbar2.set_size_request(200, -1)
hbox_statusbar = gtk.HBox()
hbox_statusbar.pack_start(self.statusbar, expand=True)
hbox_statusbar.pack_start(self.statusbar2, expand=False)
vbox.pack_start(hbox_statusbar, False, False, 0)
self.window.add(vbox)
self.window.set_property('allow-shrink', False)
self.window.set_default_size(self.usettings['window_width'],self.usettings['window_height'])
# Slideshow control:
self.slideshow_window = gtk.Window(gtk.WINDOW_POPUP)
self.slideshow_controls = gtk.HBox()
self.ss_back = gtk.Button()
self.ss_back.add(gtk.image_new_from_stock(gtk.STOCK_GO_BACK, gtk.ICON_SIZE_BUTTON))
self.ss_back.set_property('can-focus', False)
self.ss_back.connect('clicked', self.goto_prev_image)
self.ss_start = gtk.Button("", gtk.STOCK_MEDIA_PLAY)
self.ss_start.get_child().get_child().get_children()[1].set_text('')
self.ss_start.set_property('can-focus', False)
self.ss_start.connect('clicked', self.toggle_slideshow)
self.ss_stop = gtk.Button("", gtk.STOCK_MEDIA_STOP)
self.ss_stop.get_child().get_child().get_children()[1].set_text('')
self.ss_stop.set_property('can-focus', False)
self.ss_stop.connect('clicked', self.toggle_slideshow)
self.ss_forward = gtk.Button("", gtk.STOCK_GO_FORWARD)
self.ss_forward.get_child().get_child().get_children()[1].set_text('')
self.ss_forward.set_property('can-focus', False)
self.ss_forward.connect('clicked', self.goto_next_image)
self.slideshow_controls.pack_start(self.ss_back, False, False, 0)
self.slideshow_controls.pack_start(self.ss_start, False, False, 0)
self.slideshow_controls.pack_start(self.ss_stop, False, False, 0)
self.slideshow_controls.pack_start(self.ss_forward, False, False, 0)
self.slideshow_window.add(self.slideshow_controls)
if self.usettings['simple_bgcolor']:
self.slideshow_window.modify_bg(gtk.STATE_NORMAL, None)
else:
self.slideshow_window.modify_bg(gtk.STATE_NORMAL, self.usettings['bgcolor'])
self.slideshow_window2 = gtk.Window(gtk.WINDOW_POPUP)
self.slideshow_controls2 = gtk.HBox()
try:
self.ss_exit = gtk.Button("", gtk.STOCK_LEAVE_FULLSCREEN)
self.ss_exit.get_child().get_child().get_children()[1].set_text('')
except:
self.ss_exit = gtk.Button()
self.ss_exit.set_image(gtk.image_new_from_stock('leave-fullscreen', gtk.ICON_SIZE_MENU))
self.ss_exit.set_property('can-focus', False)
self.ss_exit.connect('clicked', self.leave_fullscreen)
self.ss_randomize = gtk.ToggleButton()
icon_path = self.find_path('stock_shuffle.png')
try:
pixbuf = gtk.gdk.pixbuf_new_from_file(icon_path)
iconset = gtk.IconSet(pixbuf)
factory.add('stock-shuffle', iconset)
factory.add_default()
self.ss_randomize.set_image(gtk.image_new_from_stock('stock-shuffle', gtk.ICON_SIZE_MENU))
except:
self.ss_randomize.set_label("Rand")
self.ss_randomize.connect('toggled', self.random_changed)
spin_adj = gtk.Adjustment(self.usettings['slideshow_delay'], 0, 50000, 1,100, 0)
self.ss_delayspin = gtk.SpinButton(spin_adj, 1.0, 0)
self.ss_delayspin.set_numeric(True)
self.ss_delayspin.connect('changed', self.delay_changed)
self.slideshow_controls2.pack_start(self.ss_randomize, False, False, 0)
self.slideshow_controls2.pack_start(self.ss_delayspin, False, False, 0)
self.slideshow_controls2.pack_start(self.ss_exit, False, False, 0)
self.slideshow_window2.add(self.slideshow_controls2)
if self.usettings['simple_bgcolor']:
self.slideshow_window2.modify_bg(gtk.STATE_NORMAL, None)
else:
self.slideshow_window2.modify_bg(gtk.STATE_NORMAL, self.usettings['bgcolor'])
# Connect signals
self.window.connect("delete_event", self.delete_event)
self.window.connect("destroy", self.destroy)
self.window.connect("size-allocate", self.window_resized)
self.window.connect('key-press-event', self.topwindow_keypress)
self.toolbar.connect('focus', self.toolbar_focused)
self.layout.drag_dest_set(gtk.DEST_DEFAULT_HIGHLIGHT | gtk.DEST_DEFAULT_DROP, [("text/uri-list", 0, 80)], gtk.gdk.ACTION_DEFAULT)
self.layout.connect('drag_motion', self.motion_cb)
self.layout.connect('drag_data_received', self.drop_cb)
self.layout.add_events(gtk.gdk.KEY_PRESS_MASK | gtk.gdk.POINTER_MOTION_MASK | gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_MOTION_MASK | gtk.gdk.SCROLL_MASK)
self.layout.connect("scroll-event", self.mousewheel_scrolled)
self.layout.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.KEY_PRESS_MASK)
self.layout.connect("button_press_event", self.button_pressed)
self.layout.add_events(gtk.gdk.POINTER_MOTION_MASK | gtk.gdk.POINTER_MOTION_HINT_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
self.layout.connect("motion-notify-event", self.mouse_moved)
self.layout.connect("button-release-event", self.button_released)
self.imageview.connect("expose-event", self.expose_event)
self.thumb_sel_handler = self.thumbpane.get_selection().connect('changed', self.thumbpane_selection_changed)
self.thumb_scroll_handler = self.thumbscroll.get_vscrollbar().connect("value-changed", self.thumbpane_scrolled)
# Since GNOME does its own thing for the toolbar style...
# Requires gnome-python installed to work (but optional)
try:
client = gconf.client_get_default()
style = client.get_string('/desktop/gnome/interface/toolbar_style')
if style == "both":
self.toolbar.set_style(gtk.TOOLBAR_BOTH)
elif style == "both-horiz":
self.toolbar.set_style(gtk.TOOLBAR_BOTH_HORIZ)
elif style == "icons":
self.toolbar.set_style(gtk.TOOLBAR_ICONS)
elif style == "text":
self.toolbar.set_style(gtk.TOOLBAR_TEXT)
client.add_dir("/desktop/gnome/interface", gconf.CLIENT_PRELOAD_NONE)
client.notify_add("/desktop/gnome/interface/toolbar_style", self.gconf_key_changed)
except:
pass
# Show GUI:
if not self.usettings['toolbar_show']:
self.toolbar.set_property('visible', False)
self.toolbar.set_no_show_all(True)
if not self.usettings['statusbar_show']:
self.statusbar.set_property('visible', False)
self.statusbar.set_no_show_all(True)
self.statusbar2.set_property('visible', False)
self.statusbar2.set_no_show_all(True)
if not self.usettings['thumbpane_show']:
self.thumbscroll.set_property('visible', False)
self.thumbscroll.set_no_show_all(True)
self.hscroll.set_no_show_all(True)
self.vscroll.set_no_show_all(True)
go_into_fullscreen = False
if opts != []:
for o, a in opts:
if (o in ("-f", "--fullscreen")) or ((o in ("-s", "--slideshow")) and self.usettings['slideshow_in_fullscreen']):
go_into_fullscreen = True
if go_into_fullscreen or self.usettings['start_in_fullscreen']:
self.enter_fullscreen(None)
self.statusbar.set_no_show_all(True)
self.statusbar2.set_no_show_all(True)
self.toolbar.set_no_show_all(True)
self.menubar.set_no_show_all(True)
self.thumbscroll.set_no_show_all(True)
self.window.show_all()
self.ss_exit.set_size_request(self.ss_start.size_request()[0], self.ss_stop.size_request()[1])
self.ss_randomize.set_size_request(self.ss_start.size_request()[0], -1)
self.ss_start.set_size_request(self.ss_start.size_request()[0]*2, -1)
self.ss_stop.set_size_request(self.ss_stop.size_request()[0]*2, -1)
self.UIManager.get_widget('/Popup/Exit Full Screen').hide()
self.layout.set_flags(gtk.CAN_FOCUS)
self.window.set_focus(self.layout)
#sets the visibility of some menu entries
self.set_slideshow_sensitivities()
self.UIManager.get_widget('/MainMenu/MiscKeysMenuHidden').set_property('visible', False)
if opts != []:
for o, a in opts:
if o in ("-f", "--fullscreen"):
self.UIManager.get_widget('/Popup/Exit Full Screen').show()
# If arguments (filenames) were passed, try to open them:
self.image_list = []
if args != []:
for i in range(len(args)):
args[i] = urllib.url2pathname(args[i])
self.expand_filelist_and_load_image(args)
else:
self.set_go_sensitivities(False)
self.set_image_sensitivities(False)
if opts != []:
for o, a in opts:
if o in ("-s", "--slideshow"):
self.toggle_slideshow(None)
def read_config_and_set_settings(self):
conf = ConfigParser.ConfigParser()
if os.path.isfile(self.config_dir + '/miragerc'):
conf.read(self.config_dir + '/miragerc')
if conf.has_option('window', 'w'):
self.usettings['window_width'] = conf.getint('window', 'w')
if conf.has_option('window', 'h'):
self.usettings['window_height'] = conf.getint('window', 'h')
if conf.has_option('window', 'toolbar'):
self.usettings['toolbar_show'] = conf.getboolean('window', 'toolbar')
if conf.has_option('window', 'statusbar'):
self.usettings['statusbar_show'] = conf.getboolean('window', 'statusbar')
if conf.has_option('window', 'thumbpane'):
self.usettings['thumbpane_show'] = conf.getboolean('window', 'thumbpane')
if conf.has_option('prefs', 'simple-bgcolor'):
self.usettings['simple_bgcolor'] = conf.getboolean('prefs', 'simple-bgcolor')
if conf.has_option('prefs', 'bgcolor-red'):
bgr = conf.getint('prefs', 'bgcolor-red')
bgg = conf.getint('prefs', 'bgcolor-green')
bgb = conf.getint('prefs', 'bgcolor-blue')
self.usettings['bgcolor'] = gtk.gdk.Color(red=bgr, green=bgg, blue=bgb)
if conf.has_option('prefs', 'use_last_dir'):
self.usettings['use_last_dir'] = conf.getboolean('prefs', 'use_last_dir')
if conf.has_option('prefs', 'last_dir'):
self.usettings['last_dir'] = conf.get('prefs', 'last_dir')
if conf.has_option('prefs', 'fixed_dir'):
self.usettings['fixed_dir'] = conf.get('prefs', 'fixed_dir')
if conf.has_option('prefs', 'open_all'):
self.usettings['open_all_images'] = conf.getboolean('prefs', 'open_all')
if conf.has_option('prefs', 'hidden'):
self.usettings['open_hidden_files'] = conf.getboolean('prefs', 'hidden')
if conf.has_option('prefs', 'use_numacomp'):
if HAVE_NUMACOMP:
self.usettings['use_numacomp'] = conf.getboolean('prefs', 'use_numacomp')
else:
self.usettings['usenumacomp'] = False
if conf.has_option('prefs', 'casesensitive_numacomp'):
self.usettings['case_numacomp'] = conf.getboolean('prefs', 'casesensitive_numacomp')
if conf.has_option('prefs', 'open_mode'):
self.usettings['open_mode'] = conf.getint('prefs', 'open_mode')
if conf.has_option('prefs', 'last_mode'):
self.usettings['last_mode'] = conf.getint('prefs', 'last_mode')
if conf.has_option('prefs', 'listwrap_mode'):
self.usettings['listwrap_mode'] = conf.getint('prefs', 'listwrap_mode')
if conf.has_option('prefs', 'slideshow_delay'):
self.usettings['slideshow_delay'] = conf.getint('prefs', 'slideshow_delay')
if conf.has_option('prefs', 'slideshow_random'):
self.usettings['slideshow_random'] = conf.getboolean('prefs', 'slideshow_random')
if conf.has_option('prefs', 'zoomquality'):
self.usettings['zoomvalue'] = conf.getint('prefs', 'zoomquality')
if int(round(self.usettings['zoomvalue'], 0)) == 0:
self.zoom_quality = gtk.gdk.INTERP_NEAREST
elif int(round(self.usettings['zoomvalue'], 0)) == 1:
self.zoom_quality = gtk.gdk.INTERP_TILES
elif int(round(self.usettings['zoomvalue'], 0)) == 2:
self.zoom_quality = gtk.gdk.INTERP_BILINEAR
elif int(round(self.usettings['zoomvalue'], 0)) == 3:
self.zoom_quality = gtk.gdk.INTERP_HYPER
if conf.has_option('prefs', 'quality_save'):
self.usettings['quality_save'] = conf.getint('prefs', 'quality_save')
if conf.has_option('prefs', 'disable_screensaver'):
self.usettings['disable_screensaver'] = conf.getboolean('prefs', 'disable_screensaver')
if conf.has_option('prefs', 'slideshow_in_fullscreen'):
self.usettings['slideshow_in_fullscreen'] = conf.getboolean('prefs', 'slideshow_in_fullscreen')
if conf.has_option('prefs', 'preloading_images'):
self.usettings['preloading_images'] = conf.getboolean('prefs', 'preloading_images')
if conf.has_option('prefs', 'thumbsize'):
self.usettings['thumbnail_size'] = conf.getint('prefs', 'thumbsize')
if conf.has_option('prefs', 'screenshot_delay'):
self.usettings['screenshot_delay'] = conf.getint('prefs', 'screenshot_delay')
if conf.has_option('actions', 'num_actions'):
num_actions = conf.getint('actions', 'num_actions')
self.usettings['action_names'] = []
self.usettings['action_commands'] = []
self.usettings['action_shortcuts'] = []
self.usettings['action_batch'] = []
for i in range(num_actions):
if conf.has_option('actions', 'names[' + str(i) + ']') and conf.has_option('actions', 'commands[' + str(i) + ']') and conf.has_option('actions', 'shortcuts[' + str(i) + ']') and conf.has_option('actions', 'batch[' + str(i) + ']'):
self.usettings['action_names'].append(conf.get('actions', 'names[' + str(i) + ']'))
self.usettings['action_commands'].append(conf.get('actions', 'commands[' + str(i) + ']'))
self.usettings['action_shortcuts'].append(conf.get('actions', 'shortcuts[' + str(i) + ']'))
self.usettings['action_batch'].append(conf.getboolean('actions', 'batch[' + str(i) + ']'))
if conf.has_option('prefs', 'savemode'):
self.usettings['savemode'] = conf.getint('prefs', 'savemode')
if conf.has_option('prefs', 'start_in_fullscreen'):
self.usettings['start_in_fullscreen'] = conf.getboolean('prefs', 'start_in_fullscreen')
if conf.has_option('prefs', 'confirm_delete'):
self.usettings['confirm_delete'] = conf.getboolean('prefs', 'confirm_delete')
self.usettings['recentfiles'] = []
if conf.has_option('recent', 'num_recent'):
num_recent = conf.getint('recent', 'num_recent')
for i in range(num_recent):
self.usettings['recentfiles'].append('')
if conf.has_option('recent', 'urls[' + str(i) + ',0]'):
self.usettings['recentfiles'][i] = conf.get('recent', 'urls[' + str(i) + ',0]')
# Read accel_map file, if it exists
if os.path.isfile(self.config_dir + '/accel_map'):
gtk.accel_map_load(self.config_dir + '/accel_map')
def refresh_recent_files_menu(self):
if self.merge_id_recent:
self.UIManager.remove_ui(self.merge_id_recent)
if self.actionGroupRecent:
self.UIManager.remove_action_group(self.actionGroupRecent)
self.actionGroupRecent = None
self.actionGroupRecent = gtk.ActionGroup('RecentFiles')
self.UIManager.ensure_update()
for i in range(len(self.usettings['recentfiles'])):
if len(self.usettings['recentfiles'][i]) > 0:
filename = self.usettings['recentfiles'][i].split("/")[-1]
if len(filename) > 0:
if len(filename) > 27:
# Replace end of file name (excluding extension) with ..
try:
menu_name = filename[:25] + '..' + os.path.splitext(filename)[1]
except:
menu_name = filename[0]
else:
menu_name = filename
menu_name = menu_name.replace('_','__')
action = [(str(i), None, menu_name, '<Alt>' + str(i+1), None, self.recent_action_click)]
self.actionGroupRecent.add_actions(action)
uiDescription = """
<ui>
<menubar name="MainMenu">
<menu action="FileMenu">
<placeholder name="Recent Files">
"""
for i in range(len(self.usettings['recentfiles'])):
if len(self.usettings['recentfiles'][i]) > 0:
uiDescription = uiDescription + """<menuitem action=\"""" + str(i) + """\"/>"""
uiDescription = uiDescription + """</placeholder></menu></menubar></ui>"""
self.merge_id_recent = self.UIManager.add_ui_from_string(uiDescription)
self.UIManager.insert_action_group(self.actionGroupRecent, 0)
self.UIManager.get_widget('/MainMenu/MiscKeysMenuHidden').set_property('visible', False)
def refresh_custom_actions_menu(self):
if self.merge_id:
self.UIManager.remove_ui(self.merge_id)
if self.actionGroupCustom:
self.UIManager.remove_action_group(self.actionGroupCustom)
self.actionGroupCustom = None
self.actionGroupCustom = gtk.ActionGroup('CustomActions')
self.UIManager.ensure_update()
for i in range(len(self.usettings['action_names'])):
action = [(self.usettings['action_names'][i], None, self.usettings['action_names'][i], self.usettings['action_shortcuts'][i], None, self.custom_action_click)]
self.actionGroupCustom.add_actions(action)
uiDescription = """
<ui>
<menubar name="MainMenu">
<menu action="EditMenu">
<menu action="ActionSubMenu">
"""
for i in range(len(self.usettings['action_names'])):
uiDescription = uiDescription + """<menuitem action=\"""" + self.usettings['action_names'][len(self.usettings['action_names'])-i-1].replace('&','&') + """\" position="top"/>"""
uiDescription = uiDescription + """</menu></menu></menubar></ui>"""
self.merge_id = self.UIManager.add_ui_from_string(uiDescription)
self.UIManager.insert_action_group(self.actionGroupCustom, 0)
self.UIManager.get_widget('/MainMenu/MiscKeysMenuHidden').set_property('visible', False)
def thumbpane_update_images(self, clear_first=False, force_upto_imgnum=-1):
self.stop_now = False
# When first populating the thumbpane, make sure we go up to at least
# force_upto_imgnum so that we can show this image selected:
if clear_first:
self.thumbpane_clear_list()
# Load all images up to the bottom ofo the visible thumbpane rect:
rect = self.thumbpane.get_visible_rect()
bottom_coord = rect.y + rect.height + self.usettings['thumbnail_size']
if bottom_coord > self.thumbpane_bottom_coord_loaded:
self.thumbpane_bottom_coord_loaded = bottom_coord
# update images:
if not self.thumbpane_updating:
thread = threading.Thread(target=self.thumbpane_update_pending_images, args=(force_upto_imgnum, None))
thread.setDaemon(True)
thread.start()
def thumbpane_create_dir(self):
if not os.path.exists(os.path.expanduser('~/.thumbnails/')):
os.mkdir(os.path.expanduser('~/.thumbnails/'))
if not os.path.exists(os.path.expanduser('~/.thumbnails/normal/')):
os.mkdir(os.path.expanduser('~/.thumbnails/normal/'))
def thumbpane_update_pending_images(self, force_upto_imgnum, foo):
self.thumbpane_updating = True
self.thumbpane_create_dir()
# Check to see if any images need their thumbnails generated.
curr_coord = 0
imgnum = 0
while curr_coord < self.thumbpane_bottom_coord_loaded or imgnum <= force_upto_imgnum:
if self.closing_app or self.stop_now or not self.usettings['thumbpane_show']:
break
if imgnum >= len(self.image_list):
break
self.thumbpane_set_image(self.image_list[imgnum], imgnum)
curr_coord += self.thumbpane.get_background_area((imgnum,),self.thumbcolumn).height
if force_upto_imgnum == imgnum:
# Verify that the user hasn't switched images while we're loading thumbnails:
if force_upto_imgnum == self.curr_img_in_list:
gobject.idle_add(self.thumbpane_select, force_upto_imgnum)
imgnum += 1
self.thumbpane_updating = False
def thumbpane_clear_list(self):
self.thumbpane_bottom_coord_loaded = 0
self.thumbscroll.get_vscrollbar().handler_block(self.thumb_scroll_handler)
self.thumblist.clear()
self.thumbscroll.get_vscrollbar().handler_unblock(self.thumb_scroll_handler)
for image in self.image_list:
blank_pix = self.get_blank_pix_for_image(image)
self.thumblist.append([blank_pix])
self.thumbnail_loaded = [False]*len(self.image_list)
def thumbpane_set_image(self, image_name, imgnum, force_update=False):
if self.usettings['thumbpane_show']:
if not self.thumbnail_loaded[imgnum] or force_update:
filename, thumbfile = self.thumbnail_get_name(image_name)
pix = self.thumbpane_get_pixbuf(thumbfile, filename, force_update)
if pix:
if self.usettings['thumbnail_size'] != 128:
# 128 is the size of the saved thumbnail, so convert if different:
pix, image_width, image_height = self.get_pixbuf_of_size(pix, self.usettings['thumbnail_size'], gtk.gdk.INTERP_TILES)
self.thumbnail_loaded[imgnum] = True
self.thumbscroll.get_vscrollbar().handler_block(self.thumb_scroll_handler)
pix = self.pixbuf_add_border(pix)
try:
self.thumblist[imgnum] = [pix]
except:
pass
self.thumbscroll.get_vscrollbar().handler_unblock(self.thumb_scroll_handler)
def thumbnail_get_name(self, image_name):
filename = os.path.expanduser('file://' + image_name)
uriname = os.path.expanduser('file://' + urllib.pathname2url(image_name))
if HAS_HASHLIB:
m = hashlib.md5()
else:
m = md5.new()
m.update(uriname)
mhex = m.hexdigest()
mhex_filename = os.path.expanduser('~/.thumbnails/normal/' + mhex + '.png')
return filename, mhex_filename
def thumbpane_get_pixbuf(self, thumb_url, image_url, force_generation):
# Returns a valid pixbuf or None if a pixbuf cannot be generated. Tries to re-use
# a thumbnail from ~/.thumbails/normal/, otherwise generates one with the
# XDG filename: md5(file:///full/path/to/image).png
imgfile = image_url
if imgfile[:7] == 'file://':
imgfile = imgfile[7:]
try:
if os.path.exists(thumb_url) and not force_generation:
pix = gtk.gdk.pixbuf_new_from_file(thumb_url)
pix_mtime = pix.get_option('tEXt::Thumb::MTime')
if pix_mtime:
st = os.stat(imgfile)
file_mtime = str(st[stat.ST_MTIME])
# If the mtimes match, we're good. if not, regenerate the thumbnail..
if pix_mtime == file_mtime:
return pix
# Create the 128x128 thumbnail:
uri = 'file://' + urllib.pathname2url(imgfile)
pix = gtk.gdk.pixbuf_new_from_file(imgfile)
pix, image_width, image_height = self.get_pixbuf_of_size(pix, 128, gtk.gdk.INTERP_TILES)
st = os.stat(imgfile)
file_mtime = str(st[stat.ST_MTIME])
# Save image to .thumbnails:
pix.save(thumb_url, "png", {'tEXt::Thumb::URI':uri, 'tEXt::Thumb::MTime':file_mtime, 'tEXt::Software':'Mirage' + __version__})
return pix
except:
return None
def thumbpane_load_image(self, treeview, imgnum):
if imgnum != self.curr_img_in_list:
gobject.idle_add(self.goto_image, str(imgnum), None)
def thumbpane_selection_changed(self, treeview):
cancel = self.autosave_image()
if cancel:
# Revert selection...
gobject.idle_add(self.thumbpane_select, self.curr_img_in_list)
return True
try:
model, paths = self.thumbpane.get_selection().get_selected_rows()
imgnum = paths[0][0]
if not self.thumbnail_loaded[imgnum]:
self.thumbpane_set_image(self.image_list[imgnum], imgnum)
gobject.idle_add(self.thumbpane_load_image, treeview, imgnum)
except:
pass
def thumbpane_select(self, imgnum):
if self.usettings['thumbpane_show']:
self.thumbpane.get_selection().handler_block(self.thumb_sel_handler)
try:
self.thumbpane.get_selection().select_path((imgnum,))
self.thumbpane.scroll_to_cell((imgnum,))
except:
pass
self.thumbpane.get_selection().handler_unblock(self.thumb_sel_handler)
def thumbpane_set_size(self):
self.thumbcolumn.set_fixed_width(self.thumbpane_get_size())
self.window_resized(None, self.window.allocation, True)
def thumbpane_get_size(self):
return int(self.usettings['thumbnail_size'] * 1.3)
def thumbpane_scrolled(self, range):
self.thumbpane_update_images()
def get_blank_pix_for_image(self, image):
# Sizes the "blank image" icon for the thumbpane. This will ensure that we don't
# load a humongous icon for a small pix, for example, and will keep the thumbnails
# from shifting around when they are actually loaded.
try:
info = gtk.gdk.pixbuf_get_file_info(image)
imgwidth = float(info[1])
imgheight = float(info[2])
if imgheight > self.usettings['thumbnail_size']:
if imgheight > imgwidth:
imgheight = self.usettings['thumbnail_size']
else:
imgheight = imgheight/imgwidth * self.usettings['thumbnail_size']
imgheight = 2 + int(imgheight) # Account for border that will be added to thumbnails..
imgwidth = self.usettings['thumbnail_size']
except:
imgheight = 2 + self.usettings['thumbnail_size']
imgwidth = self.usettings['thumbnail_size']
blank_pix = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, imgwidth, imgheight)
blank_pix.fill(0x00000000)
imgwidth2 = int(imgheight*0.8)
imgheight2 = int(imgheight*0.8)
composite_pix = self.blank_image.scale_simple(imgwidth2, imgheight2, gtk.gdk.INTERP_BILINEAR)
leftcoord = int((imgwidth - imgwidth2)/2)
topcoord = int((imgheight - imgheight2)/2)
composite_pix.copy_area(0, 0, imgwidth2, imgheight2, blank_pix, leftcoord, topcoord)
return blank_pix
def find_path(self, filename, exit_on_fail=True):
""" Find a pixmap or icon by looking through standard dirs.
If the image isn't found exit with error status 1 unless
exit_on_fail is set to False, then return None """
if not self.resource_path_list:
#If executed from mirage in bin this points to the basedir
basedir_mirage = os.path.split(sys.path[0])[0]
#If executed from mirage.py module in python lib this points to the basedir
f0 = os.path.split(__file__)[0].split('/lib')[0]
self.resource_path_list = list(set(filter(os.path.isdir, [
os.path.join(basedir_mirage, 'share', 'mirage'),
os.path.join(basedir_mirage, 'share', 'pixmaps'),
os.path.join(sys.prefix, 'share', 'mirage'),
os.path.join(sys.prefix, 'share', 'pixmaps'),
os.path.join(sys.prefix, 'local', 'share', 'mirage'),
os.path.join(sys.prefix, 'local', 'share', 'pixmaps'),
sys.path[0], #If it's run non-installed
os.path.join(f0, 'share', 'mirage'),
os.path.join(f0, 'share', 'pixmaps'),
])))
for path in self.resource_path_list:
pix = os.path.join(path, filename)
if os.path.exists(pix):
return pix
# If we reached here, we didn't find the pixmap
if exit_on_fail:
print _("Couldn't find the image %s. Please check your installation.") % filename
sys.exit(1)
else:
return None
def gconf_key_changed(self, client, cnxn_id, entry, label):
if entry.value.type == gconf.VALUE_STRING:
style = entry.value.to_string()
if style == "both":
self.toolbar.set_style(gtk.TOOLBAR_BOTH)
elif style == "both-horiz":
self.toolbar.set_style(gtk.TOOLBAR_BOTH_HORIZ)
elif style == "icons":
self.toolbar.set_style(gtk.TOOLBAR_ICONS)
elif style == "text":
self.toolbar.set_style(gtk.TOOLBAR_TEXT)
if self.image_loaded and self.last_image_action_was_fit:
if self.last_image_action_was_smart_fit:
self.zoom_to_fit_or_1_to_1(None, False, False)
else:
self.zoom_to_fit_window(None, False, False)
def toolbar_focused(self, widget, direction):
self.layout.grab_focus()
return True
def topwindow_keypress(self, widget, event):
# For whatever reason, 'Left' and 'Right' cannot be used as menu
# accelerators so we will manually check for them here:
if (not (event.state & gtk.gdk.SHIFT_MASK)) and not (event.state & gtk.gdk.CONTROL_MASK) and not (event.state & gtk.gdk.MOD1_MASK):
if event.keyval == gtk.gdk.keyval_from_name('Left') or event.keyval == gtk.gdk.keyval_from_name('Up'):
self.goto_prev_image(None)
return
elif event.keyval == gtk.gdk.keyval_from_name('Right') or event.keyval == gtk.gdk.keyval_from_name('Down'):
self.goto_next_image(None)
return
shortcut = gtk.accelerator_name(event.keyval, event.state)
if "Escape" in shortcut:
self.stop_now = True
self.searching_for_images = False
while gtk.events_pending():
gtk.main_iteration()
self.update_title()
return
def parse_action_command(self, command, batchmode):
self.running_custom_actions = True
self.change_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
while gtk.events_pending():
gtk.main_iteration()
self.curr_custom_action = 0
if batchmode:
self.num_custom_actions = len(self.image_list)
for i in range(self.num_custom_actions):
self.curr_custom_action += 1
self.update_statusbar()
while gtk.events_pending():
gtk.main_iteration()
imagename = self.image_list[i]
self.parse_action_command2(command, imagename)
else:
self.num_custom_actions = 1
self.curr_custom_action = 1
self.update_statusbar()
while gtk.events_pending():
gtk.main_iteration()
self.parse_action_command2(command, self.currimg.name)
gc.collect()
self.change_cursor(None)
# Refresh the current image or any preloaded needed if they have changed:
if not os.path.exists(self.currimg.name):
self.currimg.pixbuf_original = None
self.image_load_failed(False)
else:
animtest = gtk.gdk.PixbufAnimation(self.currimg.name)
if animtest.is_static_image():
if self.images_are_different(animtest.get_static_image(), self.currimg.pixbuf_original):
self.load_new_image2(False, False, True, False)
else:
if self.images_are_different(animtest, self.currimg.pixbuf_original):
self.load_new_image2(False, False, True, False)
self.running_custom_actions = False
self.update_statusbar()
while gtk.events_pending():
gtk.main_iteration()
if not os.path.exists(self.previmg.name):
self.previmg.index = -1
else:
animtest = gtk.gdk.PixbufAnimation(self.previmg.name)
if animtest.is_static_image():
if self.images_are_different(animtest.get_static_image(), self.previmg.pixbuf_original):
self.previmg.index = -1
self.preload_when_idle = gobject.idle_add(self.preload_prev_image, False)
else:
if self.images_are_different(animtest, self.previmg.pixbuf_original):
self.previmg.index = -1
self.preload_when_idle = gobject.idle_add(self.preload_prev_image, False)
if not os.path.exists(self.nextimg.name):
self.nextimg.index = -1
else:
animtest = gtk.gdk.PixbufAnimation(self.nextimg.name)
if animtest.is_static_image():
if self.images_are_different(animtest.get_static_image(), self.nextimg.pixbuf_original):
self.nextimg.index = -1
self.preload_when_idle = gobject.idle_add(self.preload_next_image, False)
else:
if self.images_are_different(animtest, self.nextimg.pixbuf_original):
self.nextimg.index = -1
self.preload_when_idle = gobject.idle_add(self.preload_next_image, False)
self.stop_now = False
if batchmode:
# Update all thumbnails:
gobject.idle_add(self.thumbpane_update_images, True, self.curr_img_in_list)
else:
# Update only the current thumbnail:
gobject.idle_add(self.thumbpane_set_image, self.image_list[self.curr_img_in_list], self.curr_img_in_list, True)
def images_are_different(self, pixbuf1, pixbuf2):
if pixbuf1.get_pixels() == pixbuf2.get_pixels():
return False
else:
return True
def recent_action_click(self, action):
self.stop_now = True
while gtk.events_pending():
gtk.main_iteration()
cancel = self.autosave_image()
if cancel:
return
index = int(action.get_name())
if os.path.isfile(self.usettings['recentfiles'][index]) or os.path.exists(self.usettings['recentfiles'][index]) or self.usettings['recentfiles'][index].startswith('http://') or self.usettings['recentfiles'][index].startswith('ftp://'):
self.expand_filelist_and_load_image([self.usettings['recentfiles'][index]])
else:
self.image_list = []
self.curr_img_in_list = 0
self.image_list.append(self.usettings['recentfiles'][index])
self.image_load_failed(False)
self.recent_file_remove_and_refresh(index)
def recent_file_remove_and_refresh_name(self, rmfile):
index_num = 0
for imgfile in self.usettings['recentfiles']:
if imgfile == rmfile:
self.recent_file_remove_and_refresh(index_num)
break
index_num += index_num
def recent_file_remove_and_refresh(self, index_num):
i = index_num
while i < len(self.usettings['recentfiles'])-1:
self.usettings['recentfiles'][i] = self.usettings['recentfiles'][i+1]
i = i + 1
# Set last item empty:
self.usettings['recentfiles'][len(self.usettings['recentfiles'])-1] = ''
self.refresh_recent_files_menu()
def recent_file_add_and_refresh(self, addfile):
# First check if the filename is already in the list:
for i in range(len(self.usettings['recentfiles'])):
if len(self.usettings['recentfiles'][i]) > 0:
if addfile == self.usettings['recentfiles'][i]:
# If found in list, put to position 1 and decrement the rest:
j = i
while j > 0:
self.usettings['recentfiles'][j] = self.usettings['recentfiles'][j-1]
j = j - 1
self.usettings['recentfiles'][0] = addfile
self.refresh_recent_files_menu()
return
# If not found, put to position 1, decrement the rest:
j = len(self.usettings['recentfiles'])-1
while j > 0:
self.usettings['recentfiles'][j] = self.usettings['recentfiles'][j-1]
j = j - 1
if len(self.usettings['recentfiles']) > 0:
self.usettings['recentfiles'][0] = addfile
self.refresh_recent_files_menu()
def custom_action_click(self, action):
if self.UIManager.get_widget('/MainMenu/EditMenu/ActionSubMenu/' + action.get_name()).get_property('sensitive'):
for i in range(len(self.usettings['action_shortcuts'])):
try:
if action.get_name() == self.usettings['action_names'][i]:
self.parse_action_command(self.usettings['action_commands'][i], self.usettings['action_batch'][i])
except:
pass
def parse_action_command2(self, cmd, imagename):
# Executes the given command using ``os.system``, substituting "%"-macros approprately.
def sh_esc(s):
import re
return re.sub(r'[^/._a-zA-Z0-9-]', lambda c: '\\'+c.group(), s)
cmd = cmd.strip()
# [NEXT] and [PREV] are only valid alone or at the end of the command
if cmd == "[NEXT]":
self.goto_next_image(None)
return
elif cmd == "[PREV]":
self.goto_prev_image(None)
return
# -1=go to previous, 1=go to next, 0=don't change
prev_or_next=0
if cmd[-6:] == "[NEXT]":
prev_or_next=1
cmd = cmd[:-6]
elif cmd[-6:] == "[PREV]":
prev_or_next=-1
cmd = cmd[:-6]
if "%F" in cmd:
cmd = cmd.replace("%F", sh_esc(imagename))
if "%N" in cmd:
cmd = cmd.replace("%N", sh_esc(os.path.splitext(os.path.basename(imagename))[0]))
if "%P" in cmd:
cmd = cmd.replace("%P", sh_esc(os.path.dirname(imagename) + "/"))
if "%E" in cmd:
cmd = cmd.replace("%E", sh_esc(os.path.splitext(os.path.basename(imagename))[1]))
if "%L" in cmd:
cmd = cmd.replace("%L", " ".join([sh_esc(s) for s in self.image_list]))
if self.verbose:
print _("Action: %s") % cmd
shell_rc = os.system(cmd) >> 8
if self.verbose:
print _("Action return code: %s") % shell_rc
if shell_rc != 0:
msg = _('Unable to launch \"%s\". Please specify a valid command from Edit > Custom Actions.') % cmd
error_dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, msg)
error_dialog.set_title(_("Invalid Custom Action"))
error_dialog.run()
error_dialog.destroy()
elif prev_or_next == 1:
self.goto_next_image(None)
elif prev_or_next == -1:
self.goto_prev_image(None)
self.running_custom_actions = False
def set_go_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/GoMenu/Previous Image').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/Next Image').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/Random Image').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/First Image').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/Last Image').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/Previous Subfolder').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/Next Subfolder').set_sensitive(enable)
self.UIManager.get_widget('/Popup/Previous Image').set_sensitive(enable)
self.UIManager.get_widget('/Popup/Next Image').set_sensitive(enable)
self.UIManager.get_widget('/MainToolbar/Previous2').set_sensitive(enable)
self.UIManager.get_widget('/MainToolbar/Next2').set_sensitive(enable)
self.ss_forward.set_sensitive(enable)
self.ss_back.set_sensitive(enable)
def set_image_sensitivities(self, enable):
self.set_zoom_in_sensitivities(enable)
self.set_zoom_out_sensitivities(enable)
self.UIManager.get_widget('/MainMenu/ViewMenu/1:1').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/ViewMenu/Fit').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Delete Image').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Rename Image').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Crop').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Resize').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Saturation').set_sensitive(enable)
self.UIManager.get_widget('/MainToolbar/1:1').set_sensitive(enable)
self.UIManager.get_widget('/MainToolbar/Fit').set_sensitive(enable)
self.UIManager.get_widget('/Popup/1:1').set_sensitive(enable)
self.UIManager.get_widget('/Popup/Fit').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/FileMenu/Save As').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/FileMenu/Save').set_sensitive(False)
self.UIManager.get_widget('/MainMenu/FileMenu/Properties').set_sensitive(False)
# Only jpeg, png, and bmp images are currently supported for saving
if len(self.image_list) > 0:
try:
filetype = gtk.gdk.pixbuf_get_file_info(self.currimg.name)[0]['name']
self.UIManager.get_widget('/MainMenu/FileMenu/Properties').set_sensitive(True)
if self.filetype_is_writable(filetype):
self.UIManager.get_widget('/MainMenu/FileMenu/Save').set_sensitive(enable)
except:
self.UIManager.get_widget('/MainMenu/FileMenu/Save').set_sensitive(False)
if self.actionGroupCustom:
for action in self.usettings['action_names']:
self.UIManager.get_widget('/MainMenu/EditMenu/ActionSubMenu/' + action).set_sensitive(enable)
if not HAS_IMGFUNCS:
enable = False
self.UIManager.get_widget('/MainMenu/EditMenu/Rotate Left').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Rotate Right').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Flip Vertically').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/EditMenu/Flip Horizontally').set_sensitive(enable)
def set_zoom_in_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/ViewMenu/In').set_sensitive(enable)
self.UIManager.get_widget('/MainToolbar/In').set_sensitive(enable)
self.UIManager.get_widget('/Popup/In').set_sensitive(enable)
def set_zoom_out_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/ViewMenu/Out').set_sensitive(enable)
self.UIManager.get_widget('/MainToolbar/Out').set_sensitive(enable)
self.UIManager.get_widget('/Popup/Out').set_sensitive(enable)
def set_next_image_sensitivities(self, enable):
self.UIManager.get_widget('/MainToolbar/Next2').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/Next Image').set_sensitive(enable)
self.UIManager.get_widget('/Popup/Next Image').set_sensitive(enable)
self.ss_forward.set_sensitive(enable)
def set_previous_image_sensitivities(self, enable):
self.UIManager.get_widget('/MainToolbar/Previous2').set_sensitive(enable)
self.UIManager.get_widget('/MainMenu/GoMenu/Previous Image').set_sensitive(enable)
self.UIManager.get_widget('/Popup/Previous Image').set_sensitive(enable)
self.ss_back.set_sensitive(enable)
def set_next_subfolder_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/GoMenu/Next Subfolder').set_sensitive(enable)
def set_previous_subfolder_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/GoMenu/Previous Subfolder').set_sensitive(enable)
def set_first_image_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/GoMenu/First Image').set_sensitive(enable)
def set_last_image_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/GoMenu/Last Image').set_sensitive(enable)
def set_random_image_sensitivities(self, enable):
self.UIManager.get_widget('/MainMenu/GoMenu/Random Image').set_sensitive(enable)
def set_slideshow_sensitivities(self):
if len(self.image_list) <=1:
self.UIManager.get_widget('/MainMenu/GoMenu/Start Slideshow').show()
self.UIManager.get_widget('/MainMenu/GoMenu/Start Slideshow').set_sensitive(False)
self.UIManager.get_widget('/MainMenu/GoMenu/Stop Slideshow').hide()
self.UIManager.get_widget('/MainMenu/GoMenu/Stop Slideshow').set_sensitive(False)
elif self.slideshow_mode:
self.UIManager.get_widget('/MainMenu/GoMenu/Start Slideshow').hide()
self.UIManager.get_widget('/MainMenu/GoMenu/Start Slideshow').set_sensitive(False)
self.UIManager.get_widget('/MainMenu/GoMenu/Stop Slideshow').show()
self.UIManager.get_widget('/MainMenu/GoMenu/Stop Slideshow').set_sensitive(True)
else:
self.UIManager.get_widget('/MainMenu/GoMenu/Start Slideshow').show()
self.UIManager.get_widget('/MainMenu/GoMenu/Start Slideshow').set_sensitive(True)
self.UIManager.get_widget('/MainMenu/GoMenu/Stop Slideshow').hide()
self.UIManager.get_widget('/MainMenu/GoMenu/Stop Slideshow').set_sensitive(False)
if self.slideshow_mode:
self.UIManager.get_widget('/Popup/Start Slideshow').hide()
self.UIManager.get_widget('/Popup/Stop Slideshow').show()
else:
self.UIManager.get_widget('/Popup/Start Slideshow').show()
self.UIManager.get_widget('/Popup/Stop Slideshow').hide()
if len(self.image_list) <=1:
self.UIManager.get_widget('/Popup/Start Slideshow').set_sensitive(False)
else:
self.UIManager.get_widget('/Popup/Start Slideshow').set_sensitive(True)
def set_zoom_sensitivities(self):
if not self.currimg.animation:
self.set_zoom_out_sensitivities(True)
self.set_zoom_in_sensitivities(True)
else:
self.set_zoom_out_sensitivities(False)
self.set_zoom_in_sensitivities(False)
def print_version(self):
print _("Version: Mirage"), __version__
print _("Website: http://mirageiv.berlios.de")
def print_usage(self):
self.print_version()
print ""
print _("Usage: mirage [OPTION]... FILES|FOLDERS...")
print ""
print _("Options") + ":"
print " -h, --help " + _("Show this help and exit")
print " -v, --version " + _("Show version information and exit")
print " -V, --verbose " + _("Show more detailed information")
print " -R, --recursive " + _("Recursively include all images found in")
print " " + _("subdirectories of FOLDERS")
print " -s, --slideshow " + _("Start in slideshow mode")
print " -f, --fullscreen " + _("Start in fullscreen mode")
print " -o, --onload 'cmd' " + _("Execute 'cmd' when an image is loaded")
print " " + _("uses same syntax as custom actions,")
print " " + _("i.e. mirage -o 'echo file is %F'")
def delay_changed(self, action):
self.curr_slideshow_delay = self.ss_delayspin.get_value()
if self.slideshow_mode:
gobject.source_remove(self.timer_delay)
if self.curr_slideshow_random:
self.timer_delay = gobject.timeout_add(int(self.curr_slideshow_delay*1000), self.goto_random_image, "ss", True)
else:
self.timer_delay = gobject.timeout_add(int(self.curr_slideshow_delay*1000), self.goto_next_image, "ss", True)
self.window.set_focus(self.layout)
def random_changed(self, action):
self.curr_slideshow_random = self.ss_randomize.get_active()
def motion_cb(self, widget, context, x, y, time):
context.drag_status(gtk.gdk.ACTION_COPY, time)
return True
def drop_cb(self, widget, context, x, y, selection, info, time):
uri = selection.data.strip()
path = urllib.url2pathname(uri)
paths = path.rsplit('\n')
for i, path in enumerate(paths):
paths[i] = path.rstrip('\r')
self.expand_filelist_and_load_image(paths)
def put_error_image_to_window(self):
self.imageview.set_from_stock(gtk.STOCK_MISSING_IMAGE, gtk.ICON_SIZE_LARGE_TOOLBAR)
self.currimg.width = self.imageview.size_request()[0]
self.currimg.height = self.imageview.size_request()[1]
self.center_image()
self.set_go_sensitivities(False)
self.set_image_sensitivities(False)
self.update_statusbar()
self.loaded_img_in_list = -1
return
def expose_event(self, widget, event):
if self.updating_adjustments:
return
self.updating_adjustments = True
if self.hscroll.get_property('visible'):
try:
zoomratio = float(self.currimg.width)/self.previmg_width
newvalue = abs(self.layout.get_hadjustment().get_value() * zoomratio + (self.available_image_width()) * (zoomratio - 1) / 2)
if newvalue >= self.layout.get_hadjustment().lower and newvalue <= (self.layout.get_hadjustment().upper - self.layout.get_hadjustment().page_size):
self.layout.get_hadjustment().set_value(newvalue)
except:
pass
if self.vscroll.get_property('visible'):
try:
newvalue = abs(self.layout.get_vadjustment().get_value() * zoomratio + (self.available_image_height()) * (zoomratio - 1) / 2)
if newvalue >= self.layout.get_vadjustment().lower and newvalue <= (self.layout.get_vadjustment().upper - self.layout.get_vadjustment().page_size):
self.layout.get_vadjustment().set_value(newvalue)
self.previmg_width = self.currimg.width
except:
pass
self.updating_adjustments = False
def window_resized(self, widget, allocation, force_update=False):
# Update the image size on window resize if the current image was last fit:
if self.image_loaded:
if force_update or allocation.width != self.prevwinwidth or allocation.height != self.prevwinheight:
if self.last_image_action_was_fit:
if self.last_image_action_was_smart_fit:
self.zoom_to_fit_or_1_to_1(None, False, False)
else:
self.zoom_to_fit_window(None, False, False)
else:
self.center_image()
self.load_new_image_stop_now()
self.show_scrollbars_if_needed()
# Also, regenerate preloaded image for new window size:
self.preload_when_idle = gobject.idle_add(self.preload_next_image, True)
self.preload_when_idle2 = gobject.idle_add(self.preload_prev_image, True)
self.prevwinwidth = allocation.width
self.prevwinheight = allocation.height
return
def save_settings(self):
conf = ConfigParser.ConfigParser()
conf.add_section('window')
conf.set('window', 'w', self.window.get_allocation().width)
conf.set('window', 'h', self.window.get_allocation().height)
conf.set('window', 'toolbar', self.usettings['toolbar_show'])
conf.set('window', 'statusbar', self.usettings['statusbar_show'])
conf.set('window', 'thumbpane', self.usettings['thumbpane_show'])
conf.add_section('prefs')
conf.set('prefs', 'simple-bgcolor', self.usettings['simple_bgcolor'])
conf.set('prefs', 'bgcolor-red', self.usettings['bgcolor'].red)
conf.set('prefs', 'bgcolor-green', self.usettings['bgcolor'].green)
conf.set('prefs', 'bgcolor-blue', self.usettings['bgcolor'].blue)
conf.set('prefs', 'open_all', self.usettings['open_all_images'])
conf.set('prefs', 'hidden', self.usettings['open_hidden_files'])
conf.set('prefs', 'use_numacomp', self.usettings['use_numacomp'])
conf.set('prefs', 'casesensitive_numacomp', self.usettings['case_numacomp'])
conf.set('prefs', 'use_last_dir', self.usettings['use_last_dir'])
conf.set('prefs', 'last_dir', self.usettings['last_dir'])
conf.set('prefs', 'fixed_dir', self.usettings['fixed_dir'])
conf.set('prefs', 'open_mode', self.usettings['open_mode'])
conf.set('prefs', 'last_mode', self.usettings['last_mode'])
conf.set('prefs', 'listwrap_mode', self.usettings['listwrap_mode'])
conf.set('prefs', 'slideshow_delay', int(self.usettings['slideshow_delay']))
conf.set('prefs', 'slideshow_random', self.usettings['slideshow_random'])
conf.set('prefs', 'zoomquality', self.usettings['zoomvalue'])
conf.set('prefs', 'quality_save', int(self.usettings['quality_save']))
conf.set('prefs', 'disable_screensaver', self.usettings['disable_screensaver'])
conf.set('prefs', 'slideshow_in_fullscreen', self.usettings['slideshow_in_fullscreen'])
conf.set('prefs', 'confirm_delete', self.usettings['confirm_delete'])
conf.set('prefs', 'preloading_images', self.usettings['preloading_images'])
conf.set('prefs', 'savemode', self.usettings['savemode'])
conf.set('prefs', 'start_in_fullscreen', self.usettings['start_in_fullscreen'])
conf.set('prefs', 'thumbsize', self.usettings['thumbnail_size'])
conf.set('prefs', 'screenshot_delay', self.usettings['screenshot_delay'])
conf.add_section('actions')
conf.set('actions', 'num_actions', len(self.usettings['action_names']))
for i in range(len(self.usettings['action_names'])):
conf.set('actions', 'names[' + str(i) + ']', self.usettings['action_names'][i])
conf.set('actions', 'commands[' + str(i) + ']', self.usettings['action_commands'][i])
conf.set('actions', 'shortcuts[' + str(i) + ']', self.usettings['action_shortcuts'][i])
conf.set('actions', 'batch[' + str(i) + ']', self.usettings['action_batch'][i])
conf.add_section('recent')
conf.set('recent', 'num_recent', len(self.usettings['recentfiles']))
for i in range(len(self.usettings['recentfiles'])):
conf.set('recent', 'num[' + str(i) + ']', len(self.usettings['recentfiles'][i]))
conf.set('recent', 'urls[' + str(i) + ',0]', self.usettings['recentfiles'][i])
if not os.path.exists(self.config_dir):
os.makedirs(self.config_dir)
conf.write(file(self.config_dir + '/miragerc', 'w'))
# Also, save accel_map:
gtk.accel_map_save(self.config_dir + '/accel_map')
return
def delete_event(self, widget, event, data=None):
cancel = self.autosave_image()
if cancel:
return True
self.stop_now = True
self.closing_app = True
self.save_settings()
sys.exit(0)
def destroy(self, event, data=None):
cancel = self.autosave_image()
if cancel:
return True
self.stop_now = True
self.closing_app = True
self.save_settings()
def exit_app(self, action):
cancel = self.autosave_image()
if cancel:
return True
self.stop_now = True
self.closing_app = True
self.save_settings()
sys.exit(0)
def put_zoom_image_to_window(self, currimg_preloaded):
self.window.window.freeze_updates()
if not currimg_preloaded:
# Always start with the original image to preserve quality!
# Calculate image size:
finalimg_width = int(self.currimg.pixbuf_original.get_width() * self.currimg.zoomratio)
finalimg_height = int(self.currimg.pixbuf_original.get_height() * self.currimg.zoomratio)
if not self.currimg.animation:
# Scale image:
if not self.currimg.pixbuf_original.get_has_alpha():
self.currimg.pixbuf = self.currimg.pixbuf_original.scale_simple(finalimg_width, finalimg_height, self.zoom_quality)
else:
colormap = self.imageview.get_colormap()
light_grey = colormap.alloc_color('#666666', True, True)
dark_grey = colormap.alloc_color('#999999', True, True)
self.currimg.pixbuf = self.currimg.pixbuf_original.composite_color_simple(finalimg_width, finalimg_height, self.zoom_quality, 255, 8, light_grey.pixel, dark_grey.pixel)
else:
self.currimg.pixbuf = self.currimg.pixbuf_original
self.currimg.width, self.currimg.height = finalimg_width, finalimg_height
self.layout.set_size(self.currimg.width, self.currimg.height)
self.center_image()
self.show_scrollbars_if_needed()
if not self.currimg.animation:
self.imageview.set_from_pixbuf(self.currimg.pixbuf)
self.previmage_is_animation = False
else:
self.imageview.set_from_animation(self.currimg.pixbuf)
self.previmage_is_animation = True
# Clean up (free memory) because I'm lazy
gc.collect()
self.window.window.thaw_updates()
self.loaded_img_in_list = self.curr_img_in_list
def show_scrollbars_if_needed(self):
if self.currimg.width > self.available_image_width():
self.hscroll.show()
else:
self.hscroll.hide()
if self.currimg.height > self.available_image_height():
self.vscroll.show()
else:
self.vscroll.hide()
def center_image(self):
x_shift = int((self.available_image_width() - self.currimg.width)/2)
if x_shift < 0:
x_shift = 0
y_shift = int((self.available_image_height() - self.currimg.height)/2)
if y_shift < 0:
y_shift = 0
self.layout.move(self.imageview, x_shift, y_shift)
def available_image_width(self):
width = self.window.get_size()[0]
if not self.fullscreen_mode:
if self.usettings['thumbpane_show']:
width -= self.thumbscroll.size_request()[0]
return width
def available_image_height(self):
height = self.window.get_size()[1]
if not self.fullscreen_mode:
height -= self.menubar.size_request()[1]
if self.usettings['toolbar_show']:
height -= self.toolbar.size_request()[1]
if self.usettings['statusbar_show']:
height -= self.statusbar.size_request()[1]
return height
def save_image(self, action):
if self.UIManager.get_widget('/MainMenu/FileMenu/Save').get_property('sensitive'):
self.save_image_now(self.currimg.name, gtk.gdk.pixbuf_get_file_info(self.currimg.name)[0]['name'])
def save_image_as(self, action):
dialog = gtk.FileChooserDialog(title=_("Save As"),action=gtk.FILE_CHOOSER_ACTION_SAVE,buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK))
dialog.set_default_response(gtk.RESPONSE_OK)
filename = os.path.basename(self.currimg.name)
filetype = None
dialog.set_current_folder(os.path.dirname(self.currimg.name))
dialog.set_current_name(filename)
dialog.set_do_overwrite_confirmation(True)
response = dialog.run()
if response == gtk.RESPONSE_OK:
prev_name = self.currimg.name
filename = dialog.get_filename()
dialog.destroy()
fileext = os.path.splitext(os.path.basename(filename))[1].lower()
if len(fileext) > 0:
fileext = fileext[1:]
# Override filetype if user typed a filename with a different extension:
for i in gtk.gdk.pixbuf_get_formats():
if fileext in i['extensions']:
filetype = i['name']
self.save_image_now(filename, filetype)
self.register_file_with_recent_docs(filename)
else:
dialog.destroy()
def save_image_now(self, dest_name, filetype):
try:
self.change_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
while gtk.events_pending():
gtk.main_iteration()
if filetype == None:
filetype = gtk.gdk.pixbuf_get_file_info(self.currimg.name)[0]['name']
if self.filetype_is_writable(filetype):
self.currimg.pixbuf_original.save(dest_name, filetype, {'quality': str(self.usettings['quality_save'])})
self.currimg.name = dest_name
self.image_list[self.curr_img_in_list] = dest_name
self.update_title()
self.update_statusbar()
# Update thumbnail:
gobject.idle_add(self.thumbpane_set_image, dest_name, self.curr_img_in_list, True)
self.image_modified = False
else:
error_dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_YES_NO, _('The %s format is not supported for saving. Do you wish to save the file in a different format?') % filetype)
error_dialog.set_title(_("Save"))
response = error_dialog.run()
if response == gtk.RESPONSE_YES:
error_dialog.destroy()
while gtk.events_pending():
gtk.main_iteration()
self.save_image_as(None)
else:
error_dialog.destroy()
except:
error_dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, _('Unable to save %s') % dest_name)
error_dialog.set_title(_("Save"))
error_dialog.run()
error_dialog.destroy()
self.change_cursor(None)
def autosave_image(self):
# Returns True if the user has canceled out of the dialog
# Never call this function from an idle or timeout loop! That will cause
# the app to freeze.
if self.image_modified:
if self.usettings['savemode'] == 1:
temp = self.UIManager.get_widget('/MainMenu/FileMenu/Save').get_property('sensitive')
self.UIManager.get_widget('/MainMenu/FileMenu/Save').set_property('sensitive', True)
self.save_image(None)
self.UIManager.get_widget('/MainMenu/FileMenu/Save').set_property('sensitive', temp)
elif self.usettings['savemode'] == 2:
dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_QUESTION, gtk.BUTTONS_NONE, _("The current image has been modified. Save changes?"))
dialog.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
dialog.add_button(gtk.STOCK_NO, gtk.RESPONSE_NO)
dialog.add_button(gtk.STOCK_SAVE, gtk.RESPONSE_YES)
dialog.set_title(_("Save?"))
dialog.set_default_response(gtk.RESPONSE_YES)
response = dialog.run()
dialog.destroy()
if response == gtk.RESPONSE_YES:
temp = self.UIManager.get_widget('/MainMenu/FileMenu/Save').get_property('sensitive')
self.UIManager.get_widget('/MainMenu/FileMenu/Save').set_property('sensitive', True)
self.save_image(None)
self.UIManager.get_widget('/MainMenu/FileMenu/Save').set_property('sensitive', temp)
self.image_modified = False
elif response == gtk.RESPONSE_NO:
self.image_modified = False
# Ensures that we don't use the current pixbuf for any preload pixbufs if we are in
# the process of loading the previous or next image in the list:
self.currimg.pixbuf = self.currimg.pixbuf_original
self.nextimg.index = -1
self.previmg.index = -1
self.loaded_img_in_list = -1
else:
return True
def filetype_is_writable(self, filetype):
# Determine if filetype is a writable format
filetype_is_writable = True
for i in gtk.gdk.pixbuf_get_formats():
if filetype in i['extensions']:
if i['is_writable']:
return True
return False
def open_file(self, action):
self.stop_now = True
while gtk.events_pending():
gtk.main_iteration()
self.open_file_or_folder(action, True)
def open_file_remote(self, action):
# Prompt user for the url:
dialog = gtk.Dialog(_("Open Remote"), self.window, gtk.DIALOG_MODAL, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
location = gtk.Entry()
location.set_size_request(300, -1)
location.set_activates_default(True)
hbox = gtk.HBox()
hbox.pack_start(gtk.Label(_("Image Location (URL):")), False, False, 5)
hbox.pack_start(location, True, True, 5)
dialog.vbox.pack_start(hbox, True, True, 10)
dialog.set_default_response(gtk.RESPONSE_OK)
dialog.vbox.show_all()
dialog.connect('response', self.open_file_remote_response, location)
response = dialog.show()
def open_file_remote_response(self, dialog, response, location):
if response == gtk.RESPONSE_OK:
filenames = []
filenames.append(location.get_text())
dialog.destroy()
while gtk.events_pending():
gtk.main_iteration()
self.expand_filelist_and_load_image(filenames)
else:
dialog.destroy()
def open_folder(self, action):
self.stop_now = True
while gtk.events_pending():
gtk.main_iteration()
self.open_file_or_folder(action, False)
def open_file_or_folder(self, action, isfile):
self.thumbpane_create_dir()
cancel = self.autosave_image()
if cancel:
return
# If isfile = True, file; If isfile = False, folder
dialog = gtk.FileChooserDialog(title=_("Open"),action=gtk.FILE_CHOOSER_ACTION_OPEN,buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
if isfile:
filter = gtk.FileFilter()
filter.set_name(_("Images"))
filter.add_pixbuf_formats()
dialog.add_filter(filter)
filter = gtk.FileFilter()
filter.set_name(_("All files"))
filter.add_pattern("*")
dialog.add_filter(filter)
preview = gtk.Image()
dialog.set_preview_widget(preview)
dialog.set_use_preview_label(False)
dialog.connect("update-preview", self.update_preview, preview)
recursivebutton = None
else:
dialog.set_action(gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
recursivebutton = gtk.CheckButton(label=_("Include images in subdirectories"))
dialog.set_extra_widget(recursivebutton)
dialog.set_default_response(gtk.RESPONSE_OK)
dialog.set_select_multiple(True)
if self.usettings['use_last_dir']:
if self.usettings['last_dir'] != None:
dialog.set_current_folder(self.usettings['last_dir'])
else:
if self.usettings['fixed_dir'] != None:
dialog.set_current_folder(self.usettings['fixed_dir'])
dialog.connect("response", self.open_file_or_folder_response, isfile, recursivebutton)
response = dialog.show()
def open_file_or_folder_response(self, dialog, response, isfile, recursivebutton):
if response == gtk.RESPONSE_OK:
if self.usettings['use_last_dir']:
self.usettings['last_dir'] = dialog.get_current_folder()
if not isfile and recursivebutton.get_property('active'):
self.recursive = True
filenames = dialog.get_filenames()
dialog.destroy()
while gtk.events_pending():
gtk.main_iteration()
self.expand_filelist_and_load_image(filenames)
else:
dialog.destroy()
def update_preview(self, file_chooser, preview):
filename = file_chooser.get_preview_filename()
if not filename:
return
filename, thumbfile = self.thumbnail_get_name(filename)
pixbuf = self.thumbpane_get_pixbuf(thumbfile, filename, False)
if pixbuf:
preview.set_from_pixbuf(pixbuf)
else:
pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, 1, 8, 128, 128)
pixbuf.fill(0x00000000)
preview.set_from_pixbuf(pixbuf)
have_preview = True
file_chooser.set_preview_widget_active(have_preview)
del pixbuf
gc.collect()
def hide_cursor(self):
if self.fullscreen_mode and not self.user_prompt_visible and not self.slideshow_controls_visible:
pix_data = """/* XPM */
static char * invisible_xpm[] = {
"1 1 1 1",
" c None",
" "};"""
color = gtk.gdk.Color()
pix = gtk.gdk.pixmap_create_from_data(None, pix_data, 1, 1, 1, color, color)
invisible = gtk.gdk.Cursor(pix, pix, color, color, 0, 0)
self.change_cursor(invisible)
return False
def enter_fullscreen(self, action):
if not self.fullscreen_mode:
self.fullscreen_mode = True
self.UIManager.get_widget('/Popup/Full Screen').hide()
self.UIManager.get_widget('/Popup/Exit Full Screen').show()
self.statusbar.hide()
self.statusbar2.hide()
self.toolbar.hide()
self.menubar.hide()
self.thumbscroll.hide()
self.thumbpane.hide()
self.window.fullscreen()
self.timer_id = gobject.timeout_add(2000, self.hide_cursor)
self.set_slideshow_sensitivities()
if self.usettings['simple_bgcolor']:
self.layout.modify_bg(gtk.STATE_NORMAL, self.usettings['bgcolor'])
else:
if self.usettings['simple_bgcolor']:
self.layout.modify_bg(gtk.STATE_NORMAL, None)
self.leave_fullscreen(action)
def leave_fullscreen(self, action):
if self.fullscreen_mode:
self.slideshow_controls_visible = False
self.slideshow_window.hide_all()
self.slideshow_window2.hide_all()
self.fullscreen_mode = False
self.UIManager.get_widget('/Popup/Full Screen').show()
self.UIManager.get_widget('/Popup/Exit Full Screen').hide()
if self.usettings['toolbar_show']:
self.toolbar.show()
self.menubar.show()
if self.usettings['statusbar_show']:
self.statusbar.show()
self.statusbar2.show()
if self.usettings['thumbpane_show']:
self.thumbscroll.show()
self.thumbpane.show()
self.thumbpane_update_images(False, self.curr_img_in_list)
self.window.unfullscreen()
self.change_cursor(None)
self.set_slideshow_sensitivities()
if self.usettings['simple_bgcolor']:
self.layout.modify_bg(gtk.STATE_NORMAL, None)
def toggle_status_bar(self, action):
if self.statusbar.get_property('visible'):
self.statusbar.hide()
self.statusbar2.hide()
self.usettings['statusbar_show'] = False
else:
self.statusbar.show()
self.statusbar2.show()
self.usettings['statusbar_show'] = True
if self.image_loaded and self.last_image_action_was_fit:
if self.last_image_action_was_smart_fit:
self.zoom_to_fit_or_1_to_1(None, False, False)
else:
self.zoom_to_fit_window(None, False, False)
def toggle_thumbpane(self, action):
if self.thumbscroll.get_property('visible'):
self.thumbscroll.hide()
self.thumbpane.hide()
self.usettings['thumbpane_show'] = False
else:
self.thumbscroll.show()
self.thumbpane.show()
self.usettings['thumbpane_show'] = True
self.stop_now = False
gobject.idle_add(self.thumbpane_update_images, True, self.curr_img_in_list)
if self.image_loaded and self.last_image_action_was_fit:
if self.last_image_action_was_smart_fit:
self.zoom_to_fit_or_1_to_1(None, False, False)
else:
self.zoom_to_fit_window(None, False, False)
def toggle_toolbar(self, action):
if self.toolbar.get_property('visible'):
self.toolbar.hide()
self.usettings['toolbar_show'] = False
else:
self.toolbar.show()
self.usettings['toolbar_show'] = True
if self.image_loaded and self.last_image_action_was_fit:
if self.last_image_action_was_smart_fit:
self.zoom_to_fit_or_1_to_1(None, False, False)
else:
self.zoom_to_fit_window(None, False, False)
def update_statusbar(self):
# Update status bar:
try:
st = os.stat(self.currimg.name)
filesize = st[stat.ST_SIZE]/1000
ratio = int(100 * self.currimg.zoomratio)
status_text = os.path.basename(self.currimg.name)+ ": " + str(self.currimg.pixbuf_original.get_width()) + "x" + str(self.currimg.pixbuf_original.get_height()) + " " + str(filesize) + "KB " + str(ratio) + "% "
except:
status_text=_("Cannot load image.")
self.statusbar.push(self.statusbar.get_context_id(""), status_text)
status_text = ""
if self.running_custom_actions:
status_text = _('Custom actions: %(current)i of %(total)i') % {'current': self.curr_custom_action,'total': self.num_custom_actions}
elif self.searching_for_images:
status_text = _('Scanning...')
self.statusbar2.push(self.statusbar2.get_context_id(""), status_text)
def show_custom_actions(self, action):
self.actions_dialog = gtk.Dialog(title=_("Configure Custom Actions"), parent=self.window)
self.actions_dialog.set_has_separator(False)
self.actions_dialog.set_resizable(False)
table_actions = gtk.Table(13, 2, False)
table_actions.attach(gtk.Label(), 1, 2, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
actionscrollwindow = gtk.ScrolledWindow()
self.actionstore = gtk.ListStore(str, str, str)
self.actionwidget = gtk.TreeView()
self.actionwidget.set_enable_search(False)
self.actionwidget.set_rules_hint(True)
self.actionwidget.connect('row-activated', self.edit_custom_action2)
actionscrollwindow.add(self.actionwidget)
actionscrollwindow.set_shadow_type(gtk.SHADOW_IN)
actionscrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
actionscrollwindow.set_size_request(500, 200)
self.actionwidget.set_model(self.actionstore)
self.cell = gtk.CellRendererText()
self.cellbool = gtk.CellRendererPixbuf()
self.tvcolumn0 = gtk.TreeViewColumn(_("Batch"))
self.tvcolumn1 = gtk.TreeViewColumn(_("Action"), self.cell, markup=0)
self.tvcolumn2 = gtk.TreeViewColumn(_("Shortcut"))
self.tvcolumn1.set_max_width(self.actionwidget.size_request()[0] - self.tvcolumn0.get_width() - self.tvcolumn2.get_width())
self.actionwidget.append_column(self.tvcolumn0)
self.actionwidget.append_column(self.tvcolumn1)
self.actionwidget.append_column(self.tvcolumn2)
self.populate_treeview()
if len(self.usettings['action_names']) > 0:
self.actionwidget.get_selection().select_path(0)
vbox_actions = gtk.VBox()
addbutton = gtk.Button("", gtk.STOCK_ADD)
addbutton.get_child().get_child().get_children()[1].set_text('')
addbutton.connect('clicked', self.add_custom_action, self.actionwidget)
addbutton.set_tooltip_text(_("Add action"))
editbutton = gtk.Button("", gtk.STOCK_EDIT)
editbutton.get_child().get_child().get_children()[1].set_text('')
editbutton.connect('clicked', self.edit_custom_action, self.actionwidget)
editbutton.set_tooltip_text(_("Edit selected action."))
removebutton = gtk.Button("", gtk.STOCK_REMOVE)
removebutton.get_child().get_child().get_children()[1].set_text('')
removebutton.connect('clicked', self.remove_custom_action)
removebutton.set_tooltip_text(_("Remove selected action."))
upbutton = gtk.Button("", gtk.STOCK_GO_UP)
upbutton.get_child().get_child().get_children()[1].set_text('')
upbutton.connect('clicked', self.custom_action_move_up, self.actionwidget)
upbutton.set_tooltip_text(_("Move selected action up."))
downbutton = gtk.Button("", gtk.STOCK_GO_DOWN)
downbutton.get_child().get_child().get_children()[1].set_text('')
downbutton.connect('clicked', self.custom_action_move_down, self.actionwidget)
downbutton.set_tooltip_text(_("Move selected action down."))
vbox_buttons = gtk.VBox()
propertyinfo = gtk.Label()
propertyinfo.set_markup('<small>' + _("Parameters") + ':\n<span font_family="Monospace">%F</span> - ' + _("File path, name, and extension") + '\n<span font_family="Monospace">%P</span> - ' + _("File path") + '\n<span font_family="Monospace">%N</span> - ' + _("File name without file extension") + '\n<span font_family="Monospace">%E</span> - ' + _("File extension (i.e. \".png\")") + '\n<span font_family="Monospace">%L</span> - ' + _("List of files, space-separated") + '</small>')
propertyinfo.set_alignment(0, 0)
actioninfo = gtk.Label()
actioninfo.set_markup('<small>' + _("Operations") + ':\n<span font_family="Monospace">[NEXT]</span> - ' + _("Go to next image") + '\n<span font_family="Monospace">[PREV]</span> - ' + _("Go to previous image") +'</small>')
actioninfo.set_alignment(0, 0)
hbox_info = gtk.HBox()
hbox_info.pack_start(propertyinfo, False, False, 15)
hbox_info.pack_start(actioninfo, False, False, 15)
vbox_buttons.pack_start(addbutton, False, False, 5)
vbox_buttons.pack_start(editbutton, False, False, 5)
vbox_buttons.pack_start(removebutton, False, False, 5)
vbox_buttons.pack_start(upbutton, False, False, 5)
vbox_buttons.pack_start(downbutton, False, False, 0)
hbox_top = gtk.HBox()
hbox_top.pack_start(actionscrollwindow, True, True, 5)
hbox_top.pack_start(vbox_buttons, False, False, 5)
vbox_actions.pack_start(hbox_top, True, True, 5)
vbox_actions.pack_start(hbox_info, False, False, 5)
hbox_instructions = gtk.HBox()
info_image = gtk.Image()
info_image.set_from_stock(gtk.STOCK_DIALOG_INFO, gtk.ICON_SIZE_BUTTON)
hbox_instructions.pack_start(info_image, False, False, 5)
instructions = gtk.Label(_("Here you can define custom actions with shortcuts. Actions use the built-in parameters and operations listed below and can have multiple statements separated by a semicolon. Batch actions apply to all images in the list."))
instructions.set_line_wrap(True)
instructions.set_alignment(0, 0.5)
hbox_instructions.pack_start(instructions, False, False, 5)
table_actions.attach(hbox_instructions, 1, 3, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 5, 0)
table_actions.attach(gtk.Label(), 1, 3, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table_actions.attach(vbox_actions, 1, 3, 4, 12, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table_actions.attach(gtk.Label(), 1, 3, 12, 13, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
self.actions_dialog.vbox.pack_start(table_actions, False, False, 0)
# Show dialog:
self.actions_dialog.vbox.show_all()
instructions.set_size_request(self.actions_dialog.size_request()[0]-50, -1)
close_button = self.actions_dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
close_button.grab_focus()
self.actions_dialog.run()
self.refresh_custom_actions_menu()
while gtk.events_pending():
gtk.main_iteration()
if len(self.image_list) == 0:
self.set_image_sensitivities(False)
self.actions_dialog.destroy()
def add_custom_action(self, button, treeview):
self.open_custom_action_dialog(True, '', '', 'None', False, treeview)
def edit_custom_action2(self, treeview, path, view_column):
self.edit_custom_action(None, treeview)
def edit_custom_action(self, button, treeview):
(model, iter) = self.actionwidget.get_selection().get_selected()
if iter != None:
(row, ) = self.actionstore.get_path(iter)
self.open_custom_action_dialog(False, self.usettings['action_names'][row], self.usettings['action_commands'][row], self.usettings['action_shortcuts'][row], self.usettings['action_batch'][row], treeview)
def open_custom_action_dialog(self, add_call, name, command, shortcut, batch, treeview):
if add_call:
self.dialog_name = gtk.Dialog(_("Add Custom Action"), self.actions_dialog, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
else:
self.dialog_name = gtk.Dialog(_("Edit Custom Action"), self.actions_dialog, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
self.dialog_name.set_modal(True)
table = gtk.Table(2, 4, False)
action_name_label = gtk.Label(_("Action Name:"))
action_name_label.set_alignment(0, 0.5)
action_command_label = gtk.Label(_("Command:"))
action_command_label.set_alignment(0, 0.5)
shortcut_label = gtk.Label(_("Shortcut:"))
shortcut_label.set_alignment(0, 0.5)
table.attach(action_name_label, 0, 1, 0, 1, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table.attach(action_command_label, 0, 1, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table.attach(shortcut_label, 0, 1, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
action_name = gtk.Entry()
action_name.set_text(name)
action_command = gtk.Entry()
action_command.set_text(command)
table.attach(action_name, 1, 2, 0, 1, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table.attach(action_command, 1, 2, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
self.shortcut = gtk.Button(shortcut)
self.shortcut.connect('clicked', self.shortcut_clicked)
table.attach(self.shortcut, 1, 2, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
batchmode = gtk.CheckButton(_("Perform action on all images (Batch)"))
batchmode.set_active(batch)
table.attach(batchmode, 0, 2, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
self.dialog_name.vbox.pack_start(table, False, False, 5)
self.dialog_name.vbox.show_all()
self.dialog_name.connect('response', self.dialog_name_response, add_call, action_name, action_command, self.shortcut, batchmode, treeview)
self.dialog_name.run()
def dialog_name_response(self, dialog, response, add_call, action_name, action_command, shortcut, batchmode, treeview):
if response == gtk.RESPONSE_ACCEPT:
if not (action_command.get_text() == "" or action_name.get_text() == "" or self.shortcut.get_label() == "None"):
name = action_name.get_text()
command = action_command.get_text()
if ((("[NEXT]" in command.strip()) and command.strip()[-6:] != "[NEXT]") or (("[PREV]" in command.strip()) and command.strip()[-6:] != "[PREV]") ):
error_dialog = gtk.MessageDialog(self.actions_dialog, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, _('[PREV] and [NEXT] are only valid alone or at the end of the command'))
error_dialog.set_title(_("Invalid Custom Action"))
error_dialog.run()
error_dialog.destroy()
return
shortcut = shortcut.get_label()
batch = batchmode.get_active()
dialog.destroy()
if add_call:
self.usettings['action_names'].append(name)
self.usettings['action_commands'].append(command)
self.usettings['action_shortcuts'].append(shortcut)
self.usettings['action_batch'].append(batch)
else:
(model, iter) = self.actionwidget.get_selection().get_selected()
(rownum, ) = self.actionstore.get_path(iter)
self.usettings['action_names'][rownum] = name
self.usettings['action_commands'][rownum] = command
self.usettings['action_shortcuts'][rownum] = shortcut
self.usettings['action_batch'][rownum] = batch
self.populate_treeview()
if add_call:
rownum = len(self.usettings['action_names'])-1
treeview.get_selection().select_path(rownum)
while gtk.events_pending():
gtk.main_iteration()
# Keep item in visible rect:
visible_rect = treeview.get_visible_rect()
row_rect = treeview.get_background_area(rownum, self.tvcolumn1)
if row_rect.y + row_rect.height > visible_rect.height:
top_coord = (row_rect.y + row_rect.height - visible_rect.height) + visible_rect.y
treeview.scroll_to_point(-1, top_coord)
elif row_rect.y < 0:
treeview.scroll_to_cell(rownum)
else:
error_dialog = gtk.MessageDialog(self.actions_dialog, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, _('Incomplete custom action specified.'))
error_dialog.set_title(_("Invalid Custom Action"))
error_dialog.run()
error_dialog.destroy()
else:
dialog.destroy()
def custom_action_move_down(self, button, treeview):
iter = None
selection = treeview.get_selection()
model, iter = selection.get_selected()
if iter:
rownum = int(model.get_string_from_iter(iter))
if rownum < len(self.usettings['action_names'])-1:
# Move item down:
temp_name = self.usettings['action_names'][rownum]
temp_shortcut = self.usettings['action_shortcuts'][rownum]
temp_command = self.usettings['action_commands'][rownum]
temp_batch = self.usettings['action_batch'][rownum]
self.usettings['action_names'][rownum] = self.usettings['action_names'][rownum+1]
self.usettings['action_shortcuts'][rownum] = self.usettings['action_shortcuts'][rownum+1]
self.usettings['action_commands'][rownum] = self.usettings['action_commands'][rownum+1]
self.usettings['action_batch'][rownum] = self.usettings['action_batch'][rownum+1]
self.usettings['action_names'][rownum+1] = temp_name
self.usettings['action_shortcuts'][rownum+1] = temp_shortcut
self.usettings['action_commands'][rownum+1] = temp_command
self.usettings['action_batch'][rownum+1] = temp_batch
# Repopulate treeview and keep item selected:
self.populate_treeview()
selection.select_path((rownum+1,))
while gtk.events_pending():
gtk.main_iteration()
# Keep item in visible rect:
rownum = rownum + 1
visible_rect = treeview.get_visible_rect()
row_rect = treeview.get_background_area(rownum, self.tvcolumn1)
if row_rect.y + row_rect.height > visible_rect.height:
top_coord = (row_rect.y + row_rect.height - visible_rect.height) + visible_rect.y
treeview.scroll_to_point(-1, top_coord)
elif row_rect.y < 0:
treeview.scroll_to_cell(rownum)
def custom_action_move_up(self, button, treeview):
iter = None
selection = treeview.get_selection()
model, iter = selection.get_selected()
if iter:
rownum = int(model.get_string_from_iter(iter))
if rownum > 0:
# Move item down:
temp_name = self.usettings['action_names'][rownum]
temp_shortcut = self.usettings['action_shortcuts'][rownum]
temp_command = self.usettings['action_commands'][rownum]
temp_batch = self.usettings['action_batch'][rownum]
self.usettings['action_names'][rownum] = self.usettings['action_names'][rownum-1]
self.usettings['action_shortcuts'][rownum] = self.usettings['action_shortcuts'][rownum-1]
self.usettings['action_commands'][rownum] = self.usettings['action_commands'][rownum-1]
self.usettings['action_batch'][rownum] = self.usettings['action_batch'][rownum-1]
self.usettings['action_names'][rownum-1] = temp_name
self.usettings['action_shortcuts'][rownum-1] = temp_shortcut
self.usettings['action_commands'][rownum-1] = temp_command
self.usettings['action_batch'][rownum-1] = temp_batch
# Repopulate treeview and keep item selected:
self.populate_treeview()
selection.select_path((rownum-1,))
while gtk.events_pending():
gtk.main_iteration()
# Keep item in visible rect:
rownum = rownum - 1
visible_rect = treeview.get_visible_rect()
row_rect = treeview.get_background_area(rownum, self.tvcolumn1)
if row_rect.y + row_rect.height > visible_rect.height:
top_coord = (row_rect.y + row_rect.height - visible_rect.height) + visible_rect.y
treeview.scroll_to_point(-1, top_coord)
elif row_rect.y < 0:
treeview.scroll_to_cell(rownum)
def shortcut_clicked(self, widget):
self.dialog_shortcut = gtk.Dialog(_("Action Shortcut"), self.dialog_name, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT))
self.shortcut_label = gtk.Label(_("Press the desired shortcut for the action."))
hbox = gtk.HBox()
hbox.pack_start(self.shortcut_label, False, False, 15)
self.dialog_shortcut.vbox.pack_start(hbox, False, False, 5)
self.dialog_shortcut.vbox.show_all()
self.dialog_shortcut.connect('key-press-event', self.shortcut_keypress)
self.dialog_shortcut.run()
self.dialog_shortcut.destroy()
def shortcut_keypress(self, widget, event):
shortcut = gtk.accelerator_name(event.keyval, event.state)
if "<Mod2>" in shortcut:
shortcut = shortcut.replace("<Mod2>", "")
if shortcut[(len(shortcut)-2):len(shortcut)] != "_L" and shortcut[(len(shortcut)-2):len(shortcut)] != "_R":
# Validate to make sure the shortcut hasn't already been used:
for i in range(len(self.keys)):
if shortcut == self.keys[i][1]:
error_dialog = gtk.MessageDialog(self.dialog_shortcut, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, _('The shortcut \'%(shortcut)s\' is already used for \'%(key)s\'.') % {'shortcut': shortcut, 'key': self.keys[i][0]})
error_dialog.set_title(_("Invalid Shortcut"))
error_dialog.run()
error_dialog.destroy()
return
for i in range(len(self.usettings['action_shortcuts'])):
if shortcut == self.usettings['action_shortcuts'][i]:
error_dialog = gtk.MessageDialog(self.dialog_shortcut, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, _('The shortcut \'%(shortcut)s\' is already used for \'%(key)s\'.') % {'shortcut': shortcut, 'key': self.usettings['action_names'][i]})
error_dialog.set_title(_("Invalid Shortcut"))
error_dialog.run()
error_dialog.destroy()
return
self.shortcut.set_label(shortcut)
widget.destroy()
def remove_custom_action(self, button):
(model, iter) = self.actionwidget.get_selection().get_selected()
if iter != None:
(row, ) = self.actionstore.get_path(iter)
self.usettings['action_names'].pop(row)
self.usettings['action_shortcuts'].pop(row)
self.usettings['action_commands'].pop(row)
self.usettings['action_batch'].pop(row)
self.populate_treeview()
self.actionwidget.grab_focus()
def populate_treeview(self):
self.actionstore.clear()
for i in range(len(self.usettings['action_names'])):
if self.usettings['action_batch'][i]:
pb = gtk.STOCK_APPLY
else:
pb = None
self.actionstore.append([pb, '<big><b>' + self.usettings['action_names'][i].replace('&','&') + '</b></big>\n<small>' + self.usettings['action_commands'][i].replace('&','&') + '</small>', self.usettings['action_shortcuts'][i]])
self.tvcolumn0.clear()
self.tvcolumn1.clear()
self.tvcolumn2.clear()
self.tvcolumn0.pack_start(self.cellbool)
self.tvcolumn1.pack_start(self.cell)
self.tvcolumn2.pack_start(self.cell)
self.tvcolumn0.add_attribute(self.cellbool, "stock-id", 0)
self.tvcolumn1.set_attributes(self.cell, markup=1)
self.tvcolumn2.set_attributes(self.cell, text=2)
self.tvcolumn1.set_expand(True)
def screenshot(self, action):
cancel = self.autosave_image()
if cancel:
return
# Dialog:
dialog = gtk.Dialog(_("Screenshot"), self.window, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT))
snapbutton = dialog.add_button(_("_Snap"), gtk.RESPONSE_ACCEPT)
snapimage = gtk.Image()
snapimage.set_from_stock(gtk.STOCK_OK, gtk.ICON_SIZE_BUTTON)
snapbutton.set_image(snapimage)
loc = gtk.Label()
loc.set_markup('<b>' + _('Location') + '</b>')
loc.set_alignment(0, 0)
area = gtk.RadioButton()
area1 = gtk.RadioButton(group=area, label=_("Entire screen"))
area2 = gtk.RadioButton(group=area, label=_("Window under pointer"))
if not HAS_XMOUSE:
area2.set_sensitive(False)
area1.set_active(True)
de = gtk.Label()
de.set_markup('<b>' + _("Delay") + '</b>')
de.set_alignment(0, 0)
delaybox = gtk.HBox()
adj = gtk.Adjustment(self.usettings['screenshot_delay'], 0, 30, 1, 10, 0)
delay = gtk.SpinButton(adj, 0, 0)
delay.set_numeric(True)
delay.set_update_policy(gtk.UPDATE_IF_VALID)
delay.set_wrap(False)
delaylabel = gtk.Label(_(" seconds"))
delaybox.pack_start(delay, False)
delaybox.pack_start(delaylabel, False)
table = gtk.Table()
table.attach(gtk.Label(), 1, 2, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table.attach(loc, 1, 2, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table.attach(gtk.Label(), 1, 2, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table.attach(area1, 1, 2, 4, 5, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table.attach(area2, 1, 2, 5, 6, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table.attach(gtk.Label(), 1, 2, 6, 7, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table.attach(de, 1, 2, 7, 8, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table.attach(gtk.Label(), 1, 2, 8, 9, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table.attach(delaybox, 1, 2, 9, 10, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table.attach(gtk.Label(), 1, 2, 10, 11, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
dialog.vbox.pack_start(table)
dialog.set_default_response(gtk.RESPONSE_ACCEPT)
dialog.vbox.show_all()
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
dialog.destroy()
while gtk.events_pending():
gtk.main_iteration()
self.usettings['screenshot_delay'] = delay.get_value_as_int()
gobject.timeout_add(int(self.usettings['screenshot_delay']*1000), self._screenshot_grab, area1.get_active())
else:
dialog.destroy()
def _screenshot_grab(self, entire_screen):
root_win = gtk.gdk.get_default_root_window()
if entire_screen:
x = 0
y = 0
width = gtk.gdk.screen_width()
height = gtk.gdk.screen_height()
else:
(x, y, width, height) = xmouse.geometry()
pix = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, width, height)
pix = pix.get_from_drawable(root_win, gtk.gdk.colormap_get_system(), x, y, 0, 0, width, height)
# Save as /tmp/mirage-<random>/filename.ext
tmpdir = tempfile.mkdtemp(prefix="mirage-") + "/"
tmpfile = tmpdir + "screenshot.png"
pix.save(tmpfile, 'png')
# Load file:
self.image_list = [tmpfile]
self.curr_img_in_list = 0
gobject.idle_add(self.load_new_image2, False, False, False, False, True)
self.update_statusbar()
self.set_go_navigation_sensitivities(False)
self.set_slideshow_sensitivities()
self.thumbpane_update_images(True, self.curr_img_in_list)
del pix
self.window.present()
def show_properties(self, action):
show_props = gtk.Dialog(_("Properties"), self.window)
show_props.set_has_separator(False)
show_props.set_resizable(False)
table = gtk.Table(3, 4, False)
image = gtk.Image()
animtest = gtk.gdk.PixbufAnimation(self.currimg.name)
image_is_anim = False
if animtest.is_static_image():
pixbuf, image_width, image_height = self.get_pixbuf_of_size(self.currimg.pixbuf_original, 180, self.zoom_quality)
else:
pixbuf, image_width, image_height = self.get_pixbuf_of_size(animtest.get_static_image(), 180, self.zoom_quality)
image_is_anim = True
image.set_from_pixbuf(self.pixbuf_add_border(pixbuf))
# The generic info
vbox_left = gtk.VBox()
title = gtk.Label(_("Generic:"))
title.set_alignment(1, 1)
filename = gtk.Label(_("File name:"))
filename.set_alignment(1, 1)
filedate = gtk.Label(_("File modified:"))
filedate.set_alignment(1, 1)
imagesize = gtk.Label(_("Dimensions:"))
imagesize.set_alignment(1, 1)
filesize = gtk.Label(_("File size:"))
filesize.set_alignment(1, 1)
filetype = gtk.Label(_("File type:"))
filetype.set_alignment(1, 1)
transparency = gtk.Label(_("Transparency:"))
transparency.set_alignment(1, 1)
animation = gtk.Label(_("Animation:"))
animation.set_alignment(1, 1)
bits = gtk.Label(_("Bits per sample:"))
bits.set_alignment(1, 1)
channels = gtk.Label(_("Channels:"))
channels.set_alignment(1, 1)
vbox_left.pack_start(title, False, False, 2)
vbox_left.pack_start(filename, False, False, 2)
vbox_left.pack_start(filedate, False, False, 2)
vbox_left.pack_start(imagesize, False, False, 2)
vbox_left.pack_start(filesize, False, False, 2)
vbox_left.pack_start(filetype, False, False, 2)
vbox_left.pack_start(transparency, False, False, 2)
vbox_left.pack_start(animation, False, False, 2)
vbox_left.pack_start(bits, False, False, 2)
vbox_left.pack_start(channels, False, False, 2)
vbox_right = gtk.VBox()
filestat = os.stat(self.currimg.name)
filename2 = gtk.Label(os.path.basename(self.currimg.name))
filedate2 = gtk.Label(time.strftime('%Y/%m/%d %H:%M', time.localtime(filestat[stat.ST_MTIME])))
imagesize2 = gtk.Label(str(self.currimg.pixbuf_original.get_width()) + "x" + str(self.currimg.pixbuf_original.get_height()))
filetype2 = gtk.Label(gtk.gdk.pixbuf_get_file_info(self.currimg.name)[0]['mime_types'][0])
filesize2 = gtk.Label(str(filestat[stat.ST_SIZE]/1000) + "KB")
if not image_is_anim and pixbuf.get_has_alpha():
transparency2 = gtk.Label(_("Yes"))
else:
transparency2 = gtk.Label(_("No"))
if animtest.is_static_image():
animation2 = gtk.Label(_("No"))
else:
animation2 = gtk.Label(_("Yes"))
bits2 = gtk.Label(str(pixbuf.get_bits_per_sample()))
channels2 = gtk.Label(str(pixbuf.get_n_channels()))
filename2.set_alignment(0, 1)
filedate2.set_alignment(0, 1)
imagesize2.set_alignment(0, 1)
filesize2.set_alignment(0, 1)
filetype2.set_alignment(0, 1)
transparency2.set_alignment(0, 1)
animation2.set_alignment(0, 1)
bits2.set_alignment(0, 1)
channels2.set_alignment(0, 1)
empty = gtk.Label(" ") # An empty label to align the rows correctly
vbox_right.pack_start(empty, False, False, 2)
vbox_right.pack_start(filename2, False, False, 2)
vbox_right.pack_start(filedate2, False, False, 2)
vbox_right.pack_start(imagesize2, False, False, 2)
vbox_right.pack_start(filesize2, False, False, 2)
vbox_right.pack_start(filetype2, False, False, 2)
vbox_right.pack_start(transparency2, False, False, 2)
vbox_right.pack_start(animation2, False, False, 2)
vbox_right.pack_start(bits2, False, False, 2)
vbox_right.pack_start(channels2, False, False, 2)
hbox = gtk.HBox()
hbox.pack_start(vbox_left, False, False, 3)
hbox.pack_start(vbox_right, False, False, 3)
includes_exif = False
if HAS_EXIF:
exifd = pyexiv2.ImageMetadata(self.currimg.name)
exifd.read()
if ([x for x in exifd.exif_keys if "Exif.Photo" in x]):
includes_exif = True
# The exif data
exif_lbox = gtk.VBox()
exif_title = gtk.Label(_("Exifdata"))
exif_title.set_alignment(1,1)
#for line alignment
exif_vbox = gtk.VBox()
exif_empty = gtk.Label(" ")
expo_l, expo_v = self.exif_return_label(exifd, _("Exposure time:"), _("%s sec"),"Exif.Photo.ExposureTime", "rat_frac")
aperture_l, aperture_v = self.exif_return_label(exifd, _("Aperture:"), _("%s"),"Exif.Photo.FNumber", "rat_float")
focal_l, focal_v = self.exif_return_label(exifd, _("Focal length:"), _("%s mm"),"Exif.Photo.FocalLength", "rat_int")
date_l, date_v = self.exif_return_label(exifd, _("Time taken:"), _("%s"),"Exif.Photo.DateTimeOriginal", "str")
ISO_l, ISO_v = self.exif_return_label(exifd, _("ISO Speed:"), _("%s"),"Exif.Photo.ISOSpeedRatings", "int")
bias_l, bias_v = self.exif_return_label(exifd, _("Exposure bias:"), _("%s"),"Exif.Photo.ExposureBiasValue", "rat_frac")
model_l, model_v = self.exif_return_label(exifd, _("Camera:"), _("%s"),"Exif.Image.Model", "str")
exif_lbox.pack_start(exif_title, False, False, 2)
exif_lbox.pack_start(aperture_l, False, False, 2)
exif_lbox.pack_start(focal_l, False, False, 2)
exif_lbox.pack_start(expo_l, False, False, 2)
exif_lbox.pack_start(bias_l, False, False, 2)
exif_lbox.pack_start(ISO_l, False, False, 2)
exif_lbox.pack_start(model_l, False, False, 2)
exif_lbox.pack_start(date_l, False, False, 2)
exif_vbox.pack_start(exif_empty, False, False, 2)
exif_vbox.pack_start(aperture_v, False, False, 2)
exif_vbox.pack_start(focal_v, False, False, 2)
exif_vbox.pack_start(expo_v, False, False, 2)
exif_vbox.pack_start(bias_v, False, False, 2)
exif_vbox.pack_start(ISO_v, False, False, 2)
exif_vbox.pack_start(model_v, False, False, 2)
exif_vbox.pack_start(date_v, False, False, 2)
hbox2 = gtk.HBox()
hbox2.pack_start(exif_lbox, False, False, 2)
hbox2.pack_start(exif_vbox, False, False, 2)
#Show the box
table.attach(image, 1, 2, 1, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table.attach(hbox, 2, 3, 1, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
if HAS_EXIF and includes_exif:
table.attach(hbox2, 3, 4, 1, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
show_props.vbox.pack_start(table, False, False, 15)
show_props.vbox.show_all()
close_button = show_props.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
close_button.grab_focus()
show_props.run()
show_props.destroy()
def exif_return_label(self, exif, label_v, format, tag, type="str"):
label = gtk.Label(label_v)
label.set_alignment(1, 1)
if tag in exif.exif_keys:
raw = exif[tag].value
if type == "rat_frac":
val = Fraction(str(raw))
elif type == "rat_float":
val = raw.to_float()
elif type == "rat_int":
val = int(raw.to_float())
elif type == "int":
val = int(raw)
else:
val = raw
value = gtk.Label(format % str(val))
else:
value = gtk.Label("-")
value.set_alignment(0,1)
return label, value
def show_prefs(self, action):
prev_thumbnail_size = self.usettings['thumbnail_size']
self.prefs_dialog = gtk.Dialog(_("Mirage Preferences"), self.window)
self.prefs_dialog.set_has_separator(False)
self.prefs_dialog.set_resizable(False)
# "Interface" prefs:
table_settings = gtk.Table(14, 3, False)
bglabel = gtk.Label()
bglabel.set_markup('<b>' + _('Interface') + '</b>')
bglabel.set_alignment(0, 1)
color_hbox = gtk.HBox(False, 0)
colortext = gtk.Label(_('Background color:'))
self.colorbutton = gtk.ColorButton(self.usettings['bgcolor'])
self.colorbutton.connect('color-set', self.bgcolor_selected)
self.colorbutton.set_size_request(150, -1)
self.colorbutton.set_tooltip_text(_("Sets the background color for the application."))
color_hbox.pack_start(colortext, False, False, 0)
color_hbox.pack_start(self.colorbutton, False, False, 0)
color_hbox.pack_start(gtk.Label(), True, True, 0)
simplecolor_hbox = gtk.HBox(False, 0)
simplecolortext = gtk.Label(_('Simple background color:'))
simplecolorbutton = gtk.CheckButton()
simplecolorbutton.connect('toggled', self.simple_bgcolor_selected)
simplecolor_hbox.pack_start(simplecolortext, False, False, 0)
simplecolor_hbox.pack_start(simplecolorbutton, False, False, 0)
simplecolor_hbox.pack_start(gtk.Label(), True, True, 0)
if self.usettings['simple_bgcolor']:
simplecolorbutton.set_active(True)
fullscreen = gtk.CheckButton(_("Open Mirage in fullscreen mode"))
fullscreen.set_active(self.usettings['start_in_fullscreen'])
thumbbox = gtk.HBox()
thumblabel = gtk.Label(_("Thumbnail size:"))
thumbbox.pack_start(thumblabel, False, False, 0)
thumbsize = gtk.combo_box_new_text()
option = 0
for size in self.thumbnail_sizes:
thumbsize.append_text(size + " x " + size)
if self.usettings['thumbnail_size'] == int(size):
thumbsize.set_active(option)
option += 1
thumbbox.pack_start(thumbsize, False, False, 5)
table_settings.attach(gtk.Label(), 1, 3, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_settings.attach(bglabel, 1, 3, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table_settings.attach(gtk.Label(), 1, 3, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_settings.attach(simplecolor_hbox, 1, 2, 4, 5, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(color_hbox, 1, 2, 5, 6, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(gtk.Label(), 1, 3, 6, 7, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_settings.attach(thumbbox, 1, 3, 7, 8, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(gtk.Label(), 1, 3, 8, 9, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(fullscreen, 1, 3, 9, 10, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(gtk.Label(), 1, 3, 10, 11, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(gtk.Label(), 1, 3, 11, 12, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(gtk.Label(), 1, 3, 12, 13, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(gtk.Label(), 1, 3, 13, 14, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_settings.attach(gtk.Label(), 1, 3, 14, 15, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
# "Behavior" tab:
table_behavior = gtk.Table(14, 2, False)
openlabel = gtk.Label()
openlabel.set_markup('<b>' + _('Open Behavior') + '</b>')
openlabel.set_alignment(0, 1)
hbox_openmode = gtk.HBox()
hbox_openmode.pack_start(gtk.Label(_('Open new image in:')), False, False, 0)
combobox = gtk.combo_box_new_text()
combobox.append_text(_("Smart Mode"))
combobox.append_text(_("Zoom To Fit Mode"))
combobox.append_text(_("1:1 Mode"))
combobox.append_text(_("Last Active Mode"))
combobox.set_active(self.usettings['open_mode'])
hbox_openmode.pack_start(combobox, False, False, 5)
openallimages = gtk.CheckButton(_("Load all images in current directory"))
openallimages.set_active(self.usettings['open_all_images'])
openallimages.set_tooltip_text(_("If enabled, opening an image in Mirage will automatically load all images found in that image's directory."))
hiddenimages = gtk.CheckButton(_("Allow loading hidden files"))
hiddenimages.set_active(self.usettings['open_hidden_files'])
hiddenimages.set_tooltip_text(_("If checked, Mirage will open hidden files. Otherwise, hidden files will be ignored."))
#Numacomp sorting options
usenumacomp = gtk.CheckButton(_("Use Numerical aware sort"))
usenumacomp.set_active(self.usettings['use_numacomp'])
usenumacomp.set_tooltip_text(_("If checked, Mirage will sort the images based on a numerical aware sort."))
usenumacomp.set_sensitive(HAVE_NUMACOMP)
case_numacomp = gtk.CheckButton(_("Casesensitive sort"))
case_numacomp.set_active(self.usettings['case_numacomp'])
case_numacomp.set_tooltip_text(_("If checked, a case-sensitive sort will be used"))
case_numacomp.set_sensitive(usenumacomp.get_active())
usenumacomp.connect('toggled', self.toggle_sensitivy_of_other,case_numacomp)
openpref = gtk.RadioButton()
openpref1 = gtk.RadioButton(group=openpref, label=_("Use last chosen directory"))
openpref1.set_tooltip_text(_("The default 'Open' directory will be the last directory used."))
openpref2 = gtk.RadioButton(group=openpref, label=_("Use this fixed directory:"))
openpref2.connect('toggled', self.prefs_use_fixed_dir_clicked)
openpref2.set_tooltip_text(_("The default 'Open' directory will be this specified directory."))
hbox_defaultdir = gtk.HBox()
self.defaultdir = gtk.Button()
hbox_defaultdir.pack_start(gtk.Label(), True, True, 0)
hbox_defaultdir.pack_start(self.defaultdir, False, False, 0)
hbox_defaultdir.pack_start(gtk.Label(), True, True, 0)
if len(self.usettings['fixed_dir']) > 25:
self.defaultdir.set_label('...' + self.usettings['fixed_dir'][-22:])
else:
self.defaultdir.set_label(self.usettings['fixed_dir'])
self.defaultdir.connect('clicked', self.defaultdir_clicked)
self.defaultdir.set_size_request(250, -1)
if self.usettings['use_last_dir']:
openpref1.set_active(True)
self.defaultdir.set_sensitive(False)
else:
openpref2.set_active(True)
self.defaultdir.set_sensitive(True)
table_behavior.attach(gtk.Label(), 1, 2, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_behavior.attach(openlabel, 1, 2, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table_behavior.attach(gtk.Label(), 1, 2, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_behavior.attach(hbox_openmode, 1, 2, 4, 5, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_behavior.attach(gtk.Label(), 1, 2, 5, 6, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_behavior.attach(openallimages, 1, 2, 6, 7, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_behavior.attach(hiddenimages, 1, 2, 7, 8, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_behavior.attach(usenumacomp, 1, 2, 8, 9, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_behavior.attach(case_numacomp, 1, 2, 9, 10, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 50, 0)
table_behavior.attach(gtk.Label(), 1, 2, 10, 11, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_behavior.attach(openpref1, 1, 2, 11, 12, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_behavior.attach(openpref2, 1, 2, 12, 13, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_behavior.attach(hbox_defaultdir, 1, 2, 13, 14, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 45, 0)
table_behavior.attach(gtk.Label(), 1, 2, 14, 15, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 45, 0)
# "Navigation" tab:
table_navigation = gtk.Table(14, 2, False)
navlabel = gtk.Label()
navlabel.set_markup('<b>' + _('Navigation') + '</b>')
navlabel.set_alignment(0, 1)
preloadnav = gtk.CheckButton(label=_("Preload images for faster navigation"))
preloadnav.set_active(self.usettings['preloading_images'])
preloadnav.set_tooltip_text(_("If enabled, the next and previous images in the list will be preloaded during idle time. Note that the speed increase comes at the expense of memory usage, so it is recommended to disable this option on machines with limited ram."))
hbox_listwrap = gtk.HBox()
hbox_listwrap.pack_start(gtk.Label(_("Wrap around imagelist:")), False, False, 0)
combobox2 = gtk.combo_box_new_text()
combobox2.append_text(_("No"))
combobox2.append_text(_("Yes"))
combobox2.append_text(_("Prompt User"))
combobox2.set_active(self.usettings['listwrap_mode'])
hbox_listwrap.pack_start(combobox2, False, False, 5)
table_navigation.attach(gtk.Label(), 1, 2, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_navigation.attach(navlabel, 1, 2, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table_navigation.attach(gtk.Label(), 1, 2, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_navigation.attach(hbox_listwrap, 1, 2, 4, 5, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_navigation.attach(gtk.Label(), 1, 2, 5, 6, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_navigation.attach(preloadnav, 1, 2, 6, 7, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_navigation.attach(gtk.Label(), 1, 2, 7, 8, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_navigation.attach(gtk.Label(), 1, 2, 8, 9, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_navigation.attach(gtk.Label(), 1, 2, 9, 10, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_navigation.attach(gtk.Label(), 1, 2, 10, 11, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_navigation.attach(gtk.Label(), 1, 2, 11, 12, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_navigation.attach(gtk.Label(), 1, 2, 12, 13, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_navigation.attach(gtk.Label(), 1, 2, 13, 14, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
# "Slideshow" tab:
table_slideshow = gtk.Table(14, 2, False)
slideshowlabel = gtk.Label()
slideshowlabel.set_markup('<b>' + _('Slideshow Mode') + '</b>')
slideshowlabel.set_alignment(0, 1)
hbox_delay = gtk.HBox()
hbox_delay.pack_start(gtk.Label(_("Delay between images in seconds:")), False, False, 0)
spin_adj = gtk.Adjustment(self.usettings['slideshow_delay'], 0, 50000, 1, 10, 0)
delayspin = gtk.SpinButton(spin_adj, 1.0, 0)
delayspin.set_numeric(True)
hbox_delay.pack_start(delayspin, False, False, 5)
randomize = gtk.CheckButton(_("Randomize order of images"))
randomize.set_active(self.usettings['slideshow_random'])
randomize.set_tooltip_text(_("If enabled, a random image will be chosen during slideshow mode (without loading any image twice)."))
disable_screensaver = gtk.CheckButton(_("Disable screensaver in slideshow mode"))
disable_screensaver.set_active(self.usettings['disable_screensaver'])
disable_screensaver.set_tooltip_text(_("If enabled, xscreensaver will be temporarily disabled during slideshow mode."))
ss_in_fs = gtk.CheckButton(_("Always start in fullscreen mode"))
ss_in_fs.set_tooltip_text(_("If enabled, starting a slideshow will put the application in fullscreen mode."))
ss_in_fs.set_active(self.usettings['slideshow_in_fullscreen'])
table_slideshow.attach(gtk.Label(), 1, 2, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_slideshow.attach(slideshowlabel, 1, 2, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table_slideshow.attach(gtk.Label(), 1, 2, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_slideshow.attach(hbox_delay, 1, 2, 4, 5, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_slideshow.attach(gtk.Label(), 1, 2, 5, 6, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_slideshow.attach(disable_screensaver, 1, 2, 6, 7, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_slideshow.attach(ss_in_fs, 1, 2, 7, 8, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_slideshow.attach(randomize, 1, 2, 8, 9, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_slideshow.attach(gtk.Label(), 1, 2, 9, 10, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_slideshow.attach(gtk.Label(), 1, 2, 10, 11, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_slideshow.attach(gtk.Label(), 1, 2, 11, 12, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_slideshow.attach(gtk.Label(), 1, 2, 12, 13, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
table_slideshow.attach(gtk.Label(), 1, 2, 13, 14, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 0, 0)
# "Image" tab:
table_image = gtk.Table(14, 2, False)
imagelabel = gtk.Label()
imagelabel.set_markup('<b>' + _('Image Editing') + '</b>')
imagelabel.set_alignment(0, 1)
deletebutton = gtk.CheckButton(_("Confirm image delete"))
deletebutton.set_active(self.usettings['confirm_delete'])
zoom_hbox = gtk.HBox()
zoom_hbox.pack_start(gtk.Label(_('Scaling quality:')), False, False, 0)
zoomcombo = gtk.combo_box_new_text()
zoomcombo.append_text(_("Nearest (Fastest)"))
zoomcombo.append_text(_("Tiles"))
zoomcombo.append_text(_("Bilinear"))
zoomcombo.append_text(_("Hyper (Best)"))
zoomcombo.set_active(self.usettings['zoomvalue'])
zoom_hbox.pack_start(zoomcombo, False, False, 0)
zoom_hbox.pack_start(gtk.Label(), True, True, 0)
hbox_save = gtk.HBox()
savelabel = gtk.Label(_("Modified images:"))
savecombo = gtk.combo_box_new_text()
savecombo.append_text(_("Ignore Changes"))
savecombo.append_text(_("Auto-Save"))
savecombo.append_text(_("Prompt For Action"))
savecombo.set_active(self.usettings['savemode'])
hbox_save.pack_start(savelabel, False, False, 0)
hbox_save.pack_start(savecombo, False, False, 5)
hbox_quality = gtk.HBox()
qualitylabel = gtk.Label(_("Quality to save in:"))
qspin_adj = gtk.Adjustment(self.usettings['quality_save'], 0, 100, 1, 100, 0)
qualityspin = gtk.SpinButton(qspin_adj, 1.0, 0)
qualityspin.set_numeric(True)
hbox_quality.pack_start(qualitylabel, False, False, 0)
hbox_quality.pack_start(qualityspin, False, False, 5)
table_image.attach(gtk.Label(), 1, 3, 1, 2, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(imagelabel, 1, 3, 2, 3, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 15, 0)
table_image.attach(gtk.Label(), 1, 3, 3, 4, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(zoom_hbox, 1, 3, 4, 5, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(gtk.Label(), 1, 3, 5, 6, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(hbox_save, 1, 3, 6, 7, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(gtk.Label(), 1, 3, 7, 8, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(hbox_quality, 1, 3, 8, 9, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(gtk.Label(), 1, 3, 9, 10, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(deletebutton, 1, 3, 10, 11, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(gtk.Label(), 1, 3, 11, 12, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(gtk.Label(), 1, 3, 12, 13, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(gtk.Label(), 1, 3, 13, 14, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
table_image.attach(gtk.Label(), 1, 3, 14, 15, gtk.FILL|gtk.EXPAND, gtk.FILL|gtk.EXPAND, 30, 0)
# Add tabs:
notebook = gtk.Notebook()
notebook.append_page(table_behavior, gtk.Label(_("Behavior")))
notebook.append_page(table_navigation, gtk.Label(_("Navigation")))
notebook.append_page(table_settings, gtk.Label(_("Interface")))
notebook.append_page(table_slideshow, gtk.Label(_("Slideshow")))
notebook.append_page(table_image, gtk.Label(_("Image")))
notebook.set_current_page(0)
hbox = gtk.HBox()
self.prefs_dialog.vbox.pack_start(hbox, False, False, 7)
hbox.pack_start(notebook, False, False, 7)
notebook.connect('switch-page', self.prefs_tab_switched)
# Show prefs:
self.prefs_dialog.vbox.show_all()
self.close_button = self.prefs_dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE)
self.close_button.grab_focus()
response = self.prefs_dialog.run()
if response == gtk.RESPONSE_CLOSE or response == gtk.RESPONSE_DELETE_EVENT:
self.usettings['zoomvalue'] = zoomcombo.get_active()
if int(round(self.usettings['zoomvalue'], 0)) == 0:
self.zoom_quality = gtk.gdk.INTERP_NEAREST
elif int(round(self.usettings['zoomvalue'], 0)) == 1:
self.zoom_quality = gtk.gdk.INTERP_TILES
elif int(round(self.usettings['zoomvalue'], 0)) == 2:
self.zoom_quality = gtk.gdk.INTERP_BILINEAR
elif int(round(self.usettings['zoomvalue'], 0)) == 3:
self.zoom_quality = gtk.gdk.INTERP_HYPER
self.usettings['open_all_images'] = openallimages.get_active()
self.usettings['open_hidden_files'] = hiddenimages.get_active()
self.usettings['use_numacomp'] = usenumacomp.get_active()
self.usettings['case_numacomp'] = case_numacomp.get_active()
if openpref1.get_active():
self.usettings['use_last_dir'] = True
else:
self.usettings['use_last_dir'] = False
open_mode_prev = self.usettings['open_mode']
self.usettings['open_mode'] = combobox.get_active()
preloading_images_prev = self.usettings['preloading_images']
self.usettings['preloading_images'] = preloadnav.get_active()
self.usettings['listwrap_mode'] = combobox2.get_active()
self.usettings['slideshow_delay'] = delayspin.get_value()
self.curr_slideshow_delay = self.usettings['slideshow_delay']
self.usettings['slideshow_random'] = randomize.get_active()
self.curr_slideshow_random = self.usettings['slideshow_random']
self.usettings['disable_screensaver'] = disable_screensaver.get_active()
self.usettings['slideshow_in_fullscreen'] = ss_in_fs.get_active()
self.usettings['savemode'] = savecombo.get_active()
self.usettings['start_in_fullscreen'] = fullscreen.get_active()
self.usettings['confirm_delete'] = deletebutton.get_active()
self.usettings['quality_save'] = qualityspin.get_value()
self.usettings['thumbnail_size'] = int(self.thumbnail_sizes[thumbsize.get_active()])
if self.usettings['thumbnail_size'] != prev_thumbnail_size:
gobject.idle_add(self.thumbpane_set_size)
gobject.idle_add(self.thumbpane_update_images, True, self.curr_img_in_list)
self.prefs_dialog.destroy()
self.set_go_navigation_sensitivities(False)
if (self.usettings['preloading_images'] and not preloading_images_prev) or (open_mode_prev != self.usettings['open_mode']):
# The user just turned on preloading, so do it:
self.nextimg.index = -1
self.previmg.index = -1
self.preload_when_idle = gobject.idle_add(self.preload_next_image, False)
self.preload_when_idle2 = gobject.idle_add(self.preload_prev_image, False)
elif not self.usettings['preloading_images']:
self.nextimg.index = -1
self.previmg.index = -1
def prefs_use_fixed_dir_clicked(self, button):
if button.get_active():
self.defaultdir.set_sensitive(True)
else:
self.defaultdir.set_sensitive(False)
def toggle_sensitivy_of_other(self,toggled_button,to_sensitive):
"""Set widget to_sensitive as sensitive if toggled_button is active."""
if toggled_button.get_active():
to_sensitive.set_sensitive(True)
else:
to_sensitive.set_sensitive(False)
def rename_image(self, action):
if len(self.image_list) > 0:
temp_slideshow_mode = self.slideshow_mode
if self.slideshow_mode:
self.toggle_slideshow(None)
rename_dialog = gtk.Dialog(_('Rename Image'), self.window, gtk.DIALOG_MODAL)
self.rename_txt = gtk.Entry()
filename = os.path.basename(self.currimg.name)
self.rename_txt.set_text(filename)
self.rename_txt.set_activates_default(True)
cancelbutton = rename_dialog.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
renamebutton = rename_dialog.add_button(_("_Rename"), gtk.RESPONSE_ACCEPT)
renameimage = gtk.Image()
renameimage.set_from_stock(gtk.STOCK_OK, gtk.ICON_SIZE_BUTTON)
renamebutton.set_image(renameimage)
animtest = gtk.gdk.PixbufAnimation(self.currimg.name)
if animtest.is_static_image():
pixbuf, image_width, image_height = self.get_pixbuf_of_size(self.currimg.pixbuf_original, 60, self.zoom_quality)
else:
pixbuf, image_width, image_height = self.get_pixbuf_of_size(animtest.get_static_image(), 60, self.zoom_quality)
image = gtk.Image()
image.set_from_pixbuf(pixbuf)
instructions = gtk.Label(_("Enter the new name:"))
instructions.set_alignment(0, 1)
hbox = gtk.HBox()
hbox.pack_start(image, False, False, 10)
vbox_stuff = gtk.VBox()
vbox_stuff.pack_start(gtk.Label(), False, False, 0)
vbox_stuff.pack_start(instructions, False, False, 0)
vbox_stuff.pack_start(gtk.Label(), False, False, 0)
vbox_stuff.pack_start(self.rename_txt, True, True, 0)
vbox_stuff.pack_start(gtk.Label(), False, False, 0)
hbox.pack_start(vbox_stuff, True, True, 10)
rename_dialog.vbox.pack_start(hbox, False, False, 0)
rename_dialog.set_has_separator(True)
rename_dialog.set_default_response(gtk.RESPONSE_ACCEPT)
rename_dialog.set_size_request(300, -1)
rename_dialog.vbox.show_all()
rename_dialog.connect('show', self.select_rename_text)
response = rename_dialog.run()
if response == gtk.RESPONSE_ACCEPT:
try:
new_filename = os.path.dirname(self.currimg.name) + "/" + self.rename_txt.get_text()
shutil.move(self.currimg.name, new_filename)
# Update thumbnail filename:
try:
shutil.move(self_get_name(self.currimg.name)[1], self.thumbnail_get_name(new_filename)[1])
except:
pass
self.recent_file_remove_and_refresh_name(self.currimg.name)
self.currimg.name = new_filename
self.register_file_with_recent_docs(self.currimg.name)
self.update_title()
except:
error_dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_OK, _('Unable to rename %s') % self.currimg.name)
error_dialog.set_title(_("Unable to rename"))
error_dialog.run()
error_dialog.destroy()
rename_dialog.destroy()
if temp_slideshow_mode:
self.toggle_slideshow(None)
def select_rename_text(self, widget):
filename = os.path.basename(self.currimg.name)
fileext = os.path.splitext(os.path.basename(self.currimg.name))[1]
self.rename_txt.select_region(0, len(filename) - len(fileext))
def delete_image(self, action):
if len(self.image_list) > 0:
temp_slideshow_mode = self.slideshow_mode
if self.slideshow_mode:
self.toggle_slideshow(None)
delete_dialog = gtk.Dialog(_('Delete Image'), self.window, gtk.DIALOG_MODAL)
if self.usettings['confirm_delete']:
permlabel = gtk.Label(_('Are you sure you wish to permanently delete %s?') % os.path.split(self.currimg.name)[1])
permlabel.set_line_wrap(True)
permlabel.set_alignment(0, 0.1)
warningicon = gtk.Image()
warningicon.set_from_stock(gtk.STOCK_DIALOG_WARNING, gtk.ICON_SIZE_DIALOG)
hbox = gtk.HBox()
hbox.pack_start(warningicon, False, False, 10)
hbox.pack_start(permlabel, False, False, 10)
delete_dialog.vbox.pack_start(gtk.Label(), False, False, 0)
delete_dialog.vbox.pack_start(hbox, False, False, 0)
cancelbutton = delete_dialog.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
deletebutton = delete_dialog.add_button(gtk.STOCK_DELETE, gtk.RESPONSE_YES)
delete_dialog.set_has_separator(False)
deletebutton.set_property('has-focus', True)
delete_dialog.set_default_response(gtk.RESPONSE_YES)
delete_dialog.vbox.show_all()
response = delete_dialog.run()
else:
response = gtk.RESPONSE_YES
if response == gtk.RESPONSE_YES:
try:
os.remove(self.currimg.name)
self.image_modified = False
try:
os.remove(self.thumbnail_get_name(self.currimg.name)[1])
except:
pass
self.recent_file_remove_and_refresh_name(self.currimg.name)
iter = self.thumblist.get_iter((self.curr_img_in_list,))
try:
self.thumbnail_loaded.pop(self.curr_img_in_list)
self.thumbpane_update_images()
except:
pass
self.thumblist.remove(iter)
templist = self.image_list
self.image_list = []
for item in templist:
if item != self.currimg.name:
self.image_list.append(item)
if len(self.image_list) >= 1:
if len(self.image_list) == 1:
self.curr_img_in_list = 0
elif self.curr_img_in_list == len(self.image_list):
self.curr_img_in_list -= 1
self.change_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
self.previmg.index = -1
self.nextimg.index = -1
self.load_when_idle = gobject.idle_add(self.load_new_image, False, False, True, True, True, True)
self.set_go_navigation_sensitivities(False)
else:
self.imageview.clear()
self.update_title()
self.statusbar.push(self.statusbar.get_context_id(""), "")
self.image_loaded = False
self.set_slideshow_sensitivities()
self.set_image_sensitivities(False)
self.set_go_navigation_sensitivities(False)
# Select new item:
self.thumbpane_select(self.curr_img_in_list)
except:
error_dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_OK, _('Unable to delete %s') % self.currimg.name)
error_dialog.set_title(_("Unable to delete"))
error_dialog.run()
error_dialog.destroy()
delete_dialog.destroy()
if temp_slideshow_mode:
self.toggle_slideshow(None)
def defaultdir_clicked(self, button):
getdir = gtk.FileChooserDialog(title=_("Choose directory"),action=gtk.FILE_CHOOSER_ACTION_OPEN,buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
getdir.set_action(gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
getdir.set_filename(self.usettings['fixed_dir'])
getdir.set_default_response(gtk.RESPONSE_OK)
response = getdir.run()
if response == gtk.RESPONSE_OK:
self.usettings['fixed_dir'] = getdir.get_filenames()[0]
if len(self.usettings['fixed_dir']) > 25:
button.set_label('...' + self.usettings['fixed_dir'][-22:])
else:
button.set_label(self.usettings['fixed_dir'])
getdir.destroy()
else:
getdir.destroy()
def prefs_tab_switched(self, notebook, page, page_num):
do_when_idle = gobject.idle_add(self.grab_close_button)
def grab_close_button(self):
self.close_button.grab_focus()
def bgcolor_selected(self, widget):
# When the user selects a color, store this color in self.bgcolor (which will
# later be saved to .miragerc) and set this background color:
self.usettings['bgcolor'] = widget.get_property('color')
if not self.usettings['simple_bgcolor']:
self.layout.modify_bg(gtk.STATE_NORMAL, self.usettings['bgcolor'])
self.slideshow_window.modify_bg(gtk.STATE_NORMAL, self.usettings['bgcolor'])
self.slideshow_window2.modify_bg(gtk.STATE_NORMAL, self.usettings['bgcolor'])
def simple_bgcolor_selected(self, widget):
if widget.get_active():
self.usettings['simple_bgcolor'] = True
self.layout.modify_bg(gtk.STATE_NORMAL, None)
else:
self.usettings['simple_bgcolor'] = False
self.bgcolor_selected(self.colorbutton)
def show_about(self, action):
# Help > About
self.about_dialog = gtk.AboutDialog()
try:
self.about_dialog.set_transient_for(self.window)
self.about_dialog.set_modal(True)
except:
pass
self.about_dialog.set_name('Mirage')
self.about_dialog.set_version(__version__)
self.about_dialog.set_comments(_('A fast GTK+ Image Viewer.'))
self.about_dialog.set_license(__license__)
self.about_dialog.set_authors(['Scott Horowitz <stonecrest@gmail.com> (retired, original developer)', 'Fredric Johansson <fredric.miscmail@gmail.com>'])
self.about_dialog.set_artists(['William Rea <sillywilly@gmail.com>'])
self.about_dialog.set_translator_credits('cs - Petr Pisar <petr.pisar@atlas.cz>\nde - Bjoern Martensen <bjoern.martensen@gmail.com>\nes - Isidro Arribas <cdhotfire@gmail.com>\nfr - Mike Massonnet <mmassonnet@gmail.com>\nhu - Sandor Lisovszki <lisovszki@dunakanyar.net>\nnl - Pascal De Vuyst <pascal.devuyst@gmail.com>\npl - Tomasz Dominikowski <dominikowski@gmail.com>\npt_BR - Danilo Martins <mawkee@gmail.com>\nru - mavka <mavka@justos.org>\nit - Daniele Maggio <dado84@freemail.it>\nzh_CN - Jayden Suen <no.sun@163.com>')
gtk.about_dialog_set_url_hook(self.show_website, "http://mirageiv.berlios.de")
self.about_dialog.set_website_label("http://mirageiv.berlios.de")
icon_path = self.find_path('mirage.png')
try:
icon_pixbuf = gtk.gdk.pixbuf_new_from_file(icon_path)
self.about_dialog.set_logo(icon_pixbuf)
except:
pass
self.about_dialog.connect('response', self.close_about)
self.about_dialog.connect('delete_event', self.close_about)
self.about_dialog.show_all()
def show_website(self, dialog, blah, link):
self.browser_load(link)
def show_help(self, action):
self.browser_load("http://mirageiv.berlios.de/docs.html")
def browser_load(self, docslink):
try:
pid = subprocess.Popen(["gnome-open", docslink]).pid
except:
try:
pid = subprocess.Popen(["exo-open", docslink]).pid
except:
try:
pid = subprocess.Popen(["kfmclient", "openURL", docslink]).pid
except:
try:
pid = subprocess.Popen(["firefox", docslink]).pid
except:
try:
pid = subprocess.Popen(["mozilla", docslink]).pid
except:
try:
pid = subprocess.Popen(["opera", docslink]).pid
except:
error_dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING, gtk.BUTTONS_CLOSE, _('Unable to launch a suitable browser.'))
error_dialog.run()
error_dialog.destroy()
def close_about(self, event, data=None):
self.about_dialog.hide()
return True
def mousewheel_scrolled(self, widget, event):
if event.type == gtk.gdk.SCROLL:
# Zooming of the image by Ctrl-mousewheel
if event.state & gtk.gdk.CONTROL_MASK:
if event.direction == gtk.gdk.SCROLL_UP:
self.zoom_in(None)
elif event.direction == gtk.gdk.SCROLL_DOWN:
self.zoom_out(None)
return True
# Navigation of images with mousewheel:
else:
if event.direction == gtk.gdk.SCROLL_UP:
self.goto_prev_image(None)
elif event.direction == gtk.gdk.SCROLL_DOWN:
self.goto_next_image(None)
return True
def mouse_moved(self, widget, event):
# This handles the panning of the image
if event.is_hint:
x, y, state = event.window.get_pointer()
else:
state = event.state
x, y = event.x_root, event.y_root
if (state & gtk.gdk.BUTTON2_MASK) or (state & gtk.gdk.BUTTON1_MASK):
# Prevent self.expose_event() from potentially further changing the
# adjustments upon the adjustment value changes
self.updating_adjustments = True
xadjust = self.layout.get_hadjustment()
newx = xadjust.value + (self.prevmousex - x)
if newx >= xadjust.lower and newx <= xadjust.upper - xadjust.page_size:
xadjust.set_value(newx)
self.layout.set_hadjustment(xadjust)
yadjust = self.layout.get_vadjustment()
newy = yadjust.value + (self.prevmousey - y)
if newy >= yadjust.lower and newy <= yadjust.upper - yadjust.page_size:
yadjust.set_value(newy)
self.layout.set_vadjustment(yadjust)
self.updating_adjustments = False
self.prevmousex = x
self.prevmousey = y
if self.fullscreen_mode:
# Show cursor on movement, then hide after 2 seconds of no movement
self.change_cursor(None)
if not self.slideshow_controls_visible:
gobject.source_remove(self.timer_id)
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration()
self.timer_id = gobject.timeout_add(2000, self.hide_cursor)
if y > 0.9*self.available_image_height():
self.slideshow_controls_show()
else:
self.slideshow_controls_hide()
return True
def button_pressed(self, widget, event):
if self.image_loaded:
# Changes the cursor to the 'resize' cursor, like GIMP, on a middle click:
if (event.button == 2 or event.button == 1) and (self.hscroll.get_property('visible')==True or self.vscroll.get_property('visible')==True):
self.change_cursor(gtk.gdk.Cursor(gtk.gdk.FLEUR))
self.prevmousex = event.x_root
self.prevmousey = event.y_root
# Right-click popup:
elif self.image_loaded and event.button == 3:
self.UIManager.get_widget('/Popup').popup(None, None, None, event.button, event.time)
return True
def button_released(self, widget, event):
# Resets the cursor when middle mouse button is released
if event.button == 2 or event.button == 1:
self.change_cursor(None)
return True
def zoom_in(self, action):
if self.currimg.name != "" and self.UIManager.get_widget('/MainMenu/ViewMenu/In').get_property('sensitive'):
self.image_zoomed = True
self.currimg.zoomratio = self.currimg.zoomratio * 1.25
self.set_zoom_sensitivities()
self.last_image_action_was_fit = False
self.put_zoom_image_to_window(False)
self.update_statusbar()
def zoom_out(self, action):
if self.currimg.name != "" and self.UIManager.get_widget('/MainMenu/ViewMenu/Out').get_property('sensitive'):
if self.currimg.zoomratio == self.min_zoomratio:
# No point in proceeding..
return
self.image_zoomed = True
self.currimg.zoomratio = self.currimg.zoomratio * 1/1.25
if self.currimg.zoomratio < self.min_zoomratio:
self.currimg.zoomratio = self.min_zoomratio
self.set_zoom_sensitivities()
self.last_image_action_was_fit = False
self.put_zoom_image_to_window(False)
self.update_statusbar()
def zoom_to_fit_window_action(self, action):
self.zoom_to_fit_window(action, False, False)
def zoom_to_fit_window(self, action, is_preloadimg_next, is_preloadimg_prev):
if is_preloadimg_next:
if self.usettings['preloading_images'] and self.nextimg.index != -1:
win_width = self.available_image_width()
win_height = self.available_image_height()
preimg_width = self.nextimg.pixbuf_original.get_width()
preimg_height = self.nextimg.pixbuf_original.get_height()
prewidth_ratio = float(preimg_width)/win_width
preheight_ratio = float(preimg_height)/win_height
if prewidth_ratio < preheight_ratio:
premax_ratio = preheight_ratio
else:
premax_ratio = prewidth_ratio
self.nextimg.zoomratio = 1/float(max_ratio)
elif is_preloadimg_prev:
if self.usettings['preloading_images'] and self.previmg.index != -1:
win_width = self.available_image_width()
win_height = self.available_image_height()
preimg_width = self.previmg.pixbuf_original.get_width()
preimg_height = self.previmg.pixbuf_original.get_height()
prewidth_ratio = float(preimg_width)/win_width
preheight_ratio = float(preimg_height)/win_height
if prewidth_ratio < preheight_ratio:
premax_ratio = preheight_ratio
else:
premax_ratio = prewidth_ratio
self.previmg.zoomratio = 1/float(max_ratio)
else:
if self.currimg.name != "" and (self.slideshow_mode or self.UIManager.get_widget('/MainMenu/ViewMenu/Fit').get_property('sensitive')):
self.image_zoomed = True
self.usettings['last_mode'] = self.open_mode_fit
self.last_image_action_was_fit = True
self.last_image_action_was_smart_fit = False
# Calculate zoomratio needed to fit to window:
win_width = self.available_image_width()
win_height = self.available_image_height()
img_width = self.currimg.pixbuf_original.get_width()
img_height = self.currimg.pixbuf_original.get_height()
width_ratio = float(img_width)/win_width
height_ratio = float(img_height)/win_height
if width_ratio < height_ratio:
max_ratio = height_ratio
else:
max_ratio = width_ratio
self.currimg.zoomratio = 1/float(max_ratio)
self.set_zoom_sensitivities()
self.put_zoom_image_to_window(False)
self.update_statusbar()
def zoom_to_fit_or_1_to_1(self, action, is_preloadimg_next, is_preloadimg_prev):
if is_preloadimg_next:
if self.usettings['preloading_images'] and self.nextimg.index != -1:
win_width = self.available_image_width()
win_height = self.available_image_height()
preimg_width = self.nextimg.pixbuf_original.get_width()
preimg_height = self.nextimg.pixbuf_original.get_height()
prewidth_ratio = float(preimg_width)/win_width
preheight_ratio = float(preimg_height)/win_height
if prewidth_ratio < preheight_ratio:
premax_ratio = preheight_ratio
else:
premax_ratio = prewidth_ratio
self.nextimg.zoomratio = 1/float(premax_ratio)
if self.nextimg.zoomratio > 1:
self.nextimg.zoomratio = 1
elif is_preloadimg_prev:
if self.usettings['preloading_images'] and self.previmg.index != -1:
win_width = self.available_image_width()
win_height = self.available_image_height()
preimg_width = self.previmg.pixbuf_original.get_width()
preimg_height = self.previmg.pixbuf_original.get_height()
prewidth_ratio = float(preimg_width)/win_width
preheight_ratio = float(preimg_height)/win_height
if prewidth_ratio < preheight_ratio:
premax_ratio = preheight_ratio
else:
premax_ratio = prewidth_ratio
self.previmg.zoomratio = 1/float(premax_ratio)
if self.previmg.zoomratio > 1:
self.previmg.zoomratio = 1
else:
if self.currimg.name != "":
self.image_zoomed = True
# Calculate zoomratio needed to fit to window:
win_width = self.available_image_width()
win_height = self.available_image_height()
img_width = self.currimg.pixbuf_original.get_width()
img_height = self.currimg.pixbuf_original.get_height()
width_ratio = float(img_width)/win_width
height_ratio = float(img_height)/win_height
if width_ratio < height_ratio:
max_ratio = height_ratio
else:
max_ratio = width_ratio
self.currimg.zoomratio = 1/float(max_ratio)
self.set_zoom_sensitivities()
if self.currimg.zoomratio > 1:
# Revert to 1:1 zoom
self.zoom_1_to_1(action, False, False)
else:
self.put_zoom_image_to_window(False)
self.update_statusbar()
self.last_image_action_was_fit = True
self.last_image_action_was_smart_fit = True
def zoom_1_to_1_action(self, action):
self.zoom_1_to_1(action, False, False)
def zoom_1_to_1(self, action, is_preloadimg_next, is_preloadimg_prev):
if is_preloadimg_next:
if self.usettings['preloading_images']:
self.nextimg.zoomratio = 1
elif is_preloadimg_prev:
if self.usettings['preloading_images']:
self.previmg.zoomratio = 1
else:
if self.currimg.name != "" and (self.slideshow_mode or self.currimg.animation or (not self.currimg.animation and self.UIManager.get_widget('/MainMenu/ViewMenu/1:1').get_property('sensitive'))):
self.image_zoomed = True
self.usettings['last_mode'] = self.open_mode_1to1
self.last_image_action_was_fit = False
self.currimg.zoomratio = 1
self.put_zoom_image_to_window(False)
self.update_statusbar()
def zoom_check_and_execute(self,action, is_preloadimg_next, is_preloadimg_prev):
if self.usettings['open_mode'] == self.open_mode_smart or (self.usettings['open_mode'] == self.open_mode_last and self.usettings['last_mode'] == self.open_mode_smart):
self.zoom_to_fit_or_1_to_1(action, is_preloadimg_next, is_preloadimg_prev)
elif self.usettings['open_mode'] == self.open_mode_fit or (self.usettings['open_mode'] == self.open_mode_last and self.usettings['last_mode'] == self.open_mode_fit):
self.zoom_to_fit_window(action, is_preloadimg_next, is_preloadimg_prev)
elif self.usettings['open_mode'] == self.open_mode_1to1 or (self.usettings['open_mode'] == self.open_mode_last and self.usettings['last_mode'] == self.open_mode_1to1):
self.zoom_1_to_1(action, is_preloadimg_next, is_preloadimg_prev)
def rotate_left(self, action):
self.rotate_left_or_right(self.UIManager.get_widget('/MainMenu/EditMenu/Rotate Left'), 90)
def rotate_right(self, action):
self.rotate_left_or_right(self.UIManager.get_widget('/MainMenu/EditMenu/Rotate Right'), 270)
def rotate_left_or_right(self, widget, angle):
if self.currimg.name != "" and widget.get_property('sensitive'):
self.currimg.pixbuf_original = self.image_rotate(self.currimg.pixbuf_original, angle)
if self.last_image_action_was_fit:
if self.last_image_action_was_smart_fit:
self.zoom_to_fit_or_1_to_1(None, False, False)
else:
self.zoom_to_fit_window(None, False, False)
else:
self.currimg.width, self.currimg.height = self.currimg.height, self.currimg.width
self.layout.set_size(self.currimg.width, self.currimg.height)
self.currimg.pixbuf = self.image_rotate(self.currimg.pixbuf, angle)
self.imageview.set_from_pixbuf(self.currimg.pixbuf)
self.show_scrollbars_if_needed()
self.center_image()
self.update_statusbar()
self.image_modified = True
def flip_image_vert(self, action):
self.flip_image_vert_or_horiz(self.UIManager.get_widget('/MainMenu/EditMenu/Flip Vertically'), True)
def flip_image_horiz(self, action):
self.flip_image_vert_or_horiz(self.UIManager.get_widget('/MainMenu/EditMenu/Flip Horizontally'), False)
def flip_image_vert_or_horiz(self, widget, vertical):
if self.currimg.name != "" and widget.get_property('sensitive'):
self.currimg.pixbuf = self.image_flip(self.currimg.pixbuf, vertical)
self.currimg.pixbuf_original = self.image_flip(self.currimg.pixbuf_original, vertical)
self.imageview.set_from_pixbuf(self.currimg.pixbuf)
self.image_modified = True
def get_pixbuf_of_size(self, pixbuf, size, zoom_quality):
# Creates a pixbuf that fits in the specified square of sizexsize
# while preserving the aspect ratio
# Returns tuple: (scaled_pixbuf, actual_width, actual_height)
image_width = pixbuf.get_width()
image_height = pixbuf.get_height()
if image_width-size > image_height-size:
if image_width > size:
image_height = int(size/float(image_width)*image_height)
image_width = size
else:
if image_height > size:
image_width = int(size/float(image_height)*image_width)
image_height = size
if not pixbuf.get_has_alpha():
crop_pixbuf = pixbuf.scale_simple(image_width, image_height, zoom_quality)
else:
colormap = self.imageview.get_colormap()
light_grey = colormap.alloc_color('#666666', True, True)
dark_grey = colormap.alloc_color('#999999', True, True)
crop_pixbuf = pixbuf.composite_color_simple(image_width, image_height, zoom_quality, 255, 8, light_grey.pixel, dark_grey.pixel)
return (crop_pixbuf, image_width, image_height)
def pixbuf_add_border(self, pix):
# Add a gray outline to pix. This will increase the pixbuf size by
# 2 pixels lengthwise and heightwise, 1 on each side. Returns pixbuf.
try:
width = pix.get_width()
height = pix.get_height()
newpix = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, width+2, height+2)
newpix.fill(0x858585ff)
pix.copy_area(0, 0, width, height, newpix, 1, 1)
return newpix
except:
return pix
def crop_image(self, action):
dialog = gtk.Dialog(_("Crop Image"), self.window, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT))
cropbutton = dialog.add_button(_("C_rop"), gtk.RESPONSE_ACCEPT)
cropimage = gtk.Image()
cropimage.set_from_stock(gtk.STOCK_OK, gtk.ICON_SIZE_BUTTON)
cropbutton.set_image(cropimage)
image = gtk.DrawingArea()
crop_pixbuf, image_width, image_height = self.get_pixbuf_of_size(self.currimg.pixbuf_original, 400, self.zoom_quality)
image.set_size_request(image_width, image_height)
hbox = gtk.HBox()
hbox.pack_start(gtk.Label(), expand=True)
hbox.pack_start(image, expand=False)
hbox.pack_start(gtk.Label(), expand=True)
vbox_left = gtk.VBox()
x_adj = gtk.Adjustment(0, 0, self.currimg.pixbuf_original.get_width(), 1, 10, 0)
x = gtk.SpinButton(x_adj, 0, 0)
x.set_numeric(True)
x.set_update_policy(gtk.UPDATE_IF_VALID)
x.set_wrap(False)
x_label = gtk.Label("X:")
x_label.set_alignment(0, 0.7)
y_adj = gtk.Adjustment(0, 0, self.currimg.pixbuf_original.get_height(), 1, 10, 0)
y = gtk.SpinButton(y_adj, 0, 0)
y.set_numeric(True)
y.set_update_policy(gtk.UPDATE_IF_VALID)
y.set_wrap(False)
y_label = gtk.Label("Y:")
x_label.set_size_request(y_label.size_request()[0], -1)
hbox_x = gtk.HBox()
hbox_y = gtk.HBox()
hbox_x.pack_start(x_label, False, False, 10)
hbox_x.pack_start(x, False, False, 0)
hbox_x.pack_start(gtk.Label(), False, False, 3)
hbox_y.pack_start(y_label, False, False, 10)
hbox_y.pack_start(y, False, False, 0)
hbox_y.pack_start(gtk.Label(), False, False, 3)
vbox_left.pack_start(hbox_x, False, False, 0)
vbox_left.pack_start(hbox_y, False, False, 0)
vbox_right = gtk.VBox()
width_adj = gtk.Adjustment(self.currimg.pixbuf_original.get_width(), 1, self.currimg.pixbuf_original.get_width(), 1, 10, 0)
width = gtk.SpinButton(width_adj, 0, 0)
width.set_numeric(True)
width.set_update_policy(gtk.UPDATE_IF_VALID)
width.set_wrap(False)
width_label = gtk.Label(_("Width:"))
width_label.set_alignment(0, 0.7)
height_adj = gtk.Adjustment(self.currimg.pixbuf_original.get_height(), 1, self.currimg.pixbuf_original.get_height(), 1, 10, 0)
height = gtk.SpinButton(height_adj, 0, 0)
height.set_numeric(True)
height.set_update_policy(gtk.UPDATE_IF_VALID)
height.set_wrap(False)
height_label = gtk.Label(_("Height:"))
width_label.set_size_request(height_label.size_request()[0], -1)
height_label.set_alignment(0, 0.7)
hbox_width = gtk.HBox()
hbox_height = gtk.HBox()
hbox_width.pack_start(width_label, False, False, 10)
hbox_width.pack_start(width, False, False, 0)
hbox_height.pack_start(height_label, False, False, 10)
hbox_height.pack_start(height, False, False, 0)
vbox_right.pack_start(hbox_width, False, False, 0)
vbox_right.pack_start(hbox_height, False, False, 0)
hbox2 = gtk.HBox()
hbox2.pack_start(gtk.Label(), expand=True)
hbox2.pack_start(vbox_left, False, False, 0)
hbox2.pack_start(vbox_right, False, False, 0)
hbox2.pack_start(gtk.Label(), expand=True)
dialog.vbox.pack_start(hbox, False, False, 0)
dialog.vbox.pack_start(hbox2, False, False, 15)
dialog.set_resizable(False)
dialog.vbox.show_all()
image.set_events(gtk.gdk.POINTER_MOTION_MASK | gtk.gdk.POINTER_MOTION_HINT_MASK | gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_MOTION_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
image.connect("expose-event", self.crop_image_expose_cb, crop_pixbuf, image_width, image_height)
image.connect("motion_notify_event", self.crop_image_mouse_moved, image, 0, 0, x, y, width, height, image_width, image_height, width_adj, height_adj)
image.connect("button_press_event", self.crop_image_button_press, image)
image.connect("button_release_event", self.crop_image_button_release)
self.x_changed = x.connect('value-changed', self.crop_value_changed, x, y, width, height, width_adj, height_adj, image_width, image_height, image, 0)
self.y_changed = y.connect('value-changed', self.crop_value_changed, x, y, width, height, width_adj, height_adj, image_width, image_height, image, 1)
self.width_changed = width.connect('value-changed', self.crop_value_changed, x, y, width, height, width_adj, height_adj, image_width, image_height, image, 2)
self.height_changed = height.connect('value-changed', self.crop_value_changed, x, y, width, height, width_adj, height_adj, image_width, image_height, image, 3)
image.realize()
self.crop_rectangle = [0, 0]
self.drawing_crop_rectangle = False
self.update_rectangle = False
self.rect = None
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
dialog.destroy()
if self.rect != None:
temp_pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, self.currimg.pixbuf_original.get_has_alpha(), 8, self.coords[2], self.coords[3])
self.currimg.pixbuf_original.copy_area(self.coords[0], self.coords[1], self.coords[2], self.coords[3], temp_pixbuf, 0, 0)
self.currimg.pixbuf_original = temp_pixbuf
del temp_pixbuf
gc.collect()
self.load_new_image2(False, True, False, False)
self.image_modified = True
else:
dialog.destroy()
def crop_value_changed(self, currspinbox, x, y, width, height, width_adj, height_adj, image_width, image_height, image, type):
if type == 0: # X
if x.get_value() + width.get_value() > self.currimg.pixbuf_original.get_width():
width.handler_block(self.width_changed)
width.set_value(self.currimg.pixbuf_original.get_width() - x.get_value())
width.handler_unblock(self.width_changed)
elif type == 1: # Y
if y.get_value() + height.get_value() > self.currimg.pixbuf_original.get_height():
height.handler_block(self.height_changed)
height.set_value(self.currimg.pixbuf_original.get_height() - y.get_value())
height.handler_unblock(self.height_changed)
self.coords = [int(x.get_value()), int(y.get_value()), int(width.get_value()), int(height.get_value())]
self.crop_rectangle[0] = int(round(float(self.coords[0])/self.currimg.pixbuf_original.get_width()*image_width, 0))
self.crop_rectangle[1] = int(round(float(self.coords[1])/self.currimg.pixbuf_original.get_height()*image_height, 0))
x2 = int(round(float(self.coords[2])/self.currimg.pixbuf_original.get_width()*image_width, 0)) + self.crop_rectangle[0]
y2 = int(round(float(self.coords[3])/self.currimg.pixbuf_original.get_height()*image_height, 0)) + self.crop_rectangle[1]
self.drawing_crop_rectangle = True
self.update_rectangle = True
self.crop_image_mouse_moved(None, None, image, x2, y2, x, y, width, height, image_width, image_height, width_adj, height_adj)
self.update_rectangle = False
self.drawing_crop_rectangle = False
def crop_image_expose_cb(self, image, event, pixbuf, width, height):
image.window.draw_pixbuf(None, pixbuf, 0, 0, 0, 0, width, height)
def crop_image_mouse_moved(self, widget, event, image, x2, y2, x, y, width, height, image_width, image_height, width_adj, height_adj):
if event != None:
x2, y2, state = event.window.get_pointer()
if self.drawing_crop_rectangle:
if self.crop_rectangle != None or self.update_rectangle:
gc = image.window.new_gc(function=gtk.gdk.INVERT)
if self.rect != None:
# Get rid of the previous drawn rectangle:
image.window.draw_rectangle(gc, False, self.rect[0], self.rect[1], self.rect[2], self.rect[3])
self.rect = [0, 0, 0, 0]
if self.crop_rectangle[0] > x2:
self.rect[0] = x2
self.rect[2] = self.crop_rectangle[0]-x2
else:
self.rect[0] = self.crop_rectangle[0]
self.rect[2] = x2-self.crop_rectangle[0]
if self.crop_rectangle[1] > y2:
self.rect[1] = y2
self.rect[3] = self.crop_rectangle[1]-y2
else:
self.rect[1] = self.crop_rectangle[1]
self.rect[3] = y2-self.crop_rectangle[1]
image.window.draw_rectangle(gc, False, self.rect[0], self.rect[1], self.rect[2], self.rect[3])
# Convert the rectangle coordinates of the current image
# to coordinates of pixbuf_original
if self.rect[0] < 0:
self.rect[2] = self.rect[2] + self.rect[0]
self.rect[0] = 0
if self.rect[1] < 0:
self.rect[3] = self.rect[3] + self.rect[1]
self.rect[1] = 0
if event != None:
self.coords = [0,0,0,0]
self.coords[0] = int(round(float(self.rect[0])/image_width*self.currimg.pixbuf_original.get_width(), 0))
self.coords[1] = int(round(float(self.rect[1])/image_height*self.currimg.pixbuf_original.get_height(), 0))
self.coords[2] = int(round(float(self.rect[2])/image_width*self.currimg.pixbuf_original.get_width(), 0))
self.coords[3] = int(round(float(self.rect[3])/image_height*self.currimg.pixbuf_original.get_height(), 0))
if self.coords[0] + self.coords[2] > self.currimg.pixbuf_original.get_width():
self.coords[2] = self.currimg.pixbuf_original.get_width() - self.coords[0]
if self.coords[1] + self.coords[3] > self.currimg.pixbuf_original.get_height():
self.coords[3] = self.currimg.pixbuf_original.get_height() - self.coords[1]
x.handler_block(self.x_changed)
y.handler_block(self.y_changed)
width.handler_block(self.width_changed)
height.handler_block(self.height_changed)
x.set_value(self.coords[0])
y.set_value(self.coords[1])
width.set_value(self.coords[2])
height.set_value(self.coords[3])
x.handler_unblock(self.x_changed)
y.handler_unblock(self.y_changed)
width_adj.set_property('upper', self.currimg.pixbuf_original.get_width() - self.coords[0])
height_adj.set_property('upper', self.currimg.pixbuf_original.get_height() - self.coords[1])
width.handler_unblock(self.width_changed)
height.handler_unblock(self.height_changed)
def crop_image_button_press(self, widget, event, image):
x, y, state = event.window.get_pointer()
if (state & gtk.gdk.BUTTON1_MASK):
self.drawing_crop_rectangle = True
self.crop_rectangle = [x, y]
gc = image.window.new_gc(function=gtk.gdk.INVERT)
if self.rect != None:
# Get rid of the previous drawn rectangle:
image.window.draw_rectangle(gc, False, self.rect[0], self.rect[1], self.rect[2], self.rect[3])
self.rect = None
def crop_image_button_release(self, widget, event):
x, y, state = event.window.get_pointer()
if not (state & gtk.gdk.BUTTON1_MASK):
self.drawing_crop_rectangle = False
def saturation(self, action):
dialog = gtk.Dialog(_("Saturation"), self.window, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT))
resizebutton = dialog.add_button(_("_Saturate"), gtk.RESPONSE_ACCEPT)
resizeimage = gtk.Image()
resizeimage.set_from_stock(gtk.STOCK_OK, gtk.ICON_SIZE_BUTTON)
resizebutton.set_image(resizeimage)
scale = gtk.HScale()
scale.set_draw_value(False)
scale.set_update_policy(gtk.UPDATE_DISCONTINUOUS)
scale.set_range(0, 2)
scale.set_increments(0.1, 0.5)
scale.set_value(1)
scale.connect('value-changed', self.saturation_preview)
label = gtk.Label(_("Saturation level:"))
label.set_alignment(0, 0.5)
hbox1 = gtk.HBox()
hbox1.pack_start(label, True, True, 10)
hbox2 = gtk.HBox()
hbox2.pack_start(scale, True, True, 20)
dialog.vbox.pack_start(gtk.Label(" "))
dialog.vbox.pack_start(hbox1, False)
dialog.vbox.pack_start(hbox2, True, True, 10)
dialog.vbox.pack_start(gtk.Label(" "))
dialog.set_default_response(gtk.RESPONSE_ACCEPT)
dialog.vbox.show_all()
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
self.currimg.pixbuf_original.saturate_and_pixelate(self.currimg.pixbuf_original, scale.get_value(), False)
self.currimg.pixbuf.saturate_and_pixelate(self.currimg.pixbuf, scale.get_value(), False)
self.imageview.set_from_pixbuf(self.currimg.pixbuf)
self.image_modified = True
dialog.destroy()
else:
self.imageview.set_from_pixbuf(self.currimg.pixbuf)
dialog.destroy()
def saturation_preview(self, range):
while gtk.events_pending():
gtk.main_iteration()
try:
bak = self.currimg.pixbuf.copy()
self.currimg.pixbuf.saturate_and_pixelate(self.currimg.pixbuf, range.get_value(), False)
self.imageview.set_from_pixbuf(self.currimg.pixbuf)
self.currimg.pixbuf = bak.copy()
del bak
except:
pass
gc.collect()
def resize_image(self, action):
dialog = gtk.Dialog(_("Resize Image"), self.window, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT))
resizebutton = dialog.add_button(_("_Resize"), gtk.RESPONSE_ACCEPT)
resizeimage = gtk.Image()
resizeimage.set_from_stock(gtk.STOCK_OK, gtk.ICON_SIZE_BUTTON)
resizebutton.set_image(resizeimage)
hbox_width = gtk.HBox()
width_adj = gtk.Adjustment(self.currimg.pixbuf_original.get_width(), 1, 100000000000, 1, 10, 0)
width = gtk.SpinButton(width_adj, 0, 0)
width.set_numeric(True)
width.set_update_policy(gtk.UPDATE_IF_VALID)
width.set_wrap(False)
width_label = gtk.Label(_("Width:"))
width_label.set_alignment(0, 0.7)
hbox_width.pack_start(width_label, False, False, 10)
hbox_width.pack_start(width, False, False, 0)
hbox_width.pack_start(gtk.Label(_("pixels")), False, False, 10)
hbox_height = gtk.HBox()
height_adj = gtk.Adjustment(self.currimg.pixbuf_original.get_height(), 1, 100000000000, 1, 10, 0)
height = gtk.SpinButton(height_adj, 0, 0)
height.set_numeric(True)
height.set_update_policy(gtk.UPDATE_IF_VALID)
height.set_wrap(False)
height_label = gtk.Label(_("Height:"))
width_label.set_size_request(height_label.size_request()[0], -1)
height_label.set_alignment(0, 0.7)
hbox_height.pack_start(height_label, False, False, 10)
hbox_height.pack_start(height, False, False, 0)
hbox_height.pack_start(gtk.Label(_("pixels")), False, False, 10)
hbox_aspect = gtk.HBox()
aspect_checkbox = gtk.CheckButton(_("Preserve aspect ratio"))
aspect_checkbox.set_active(self.preserve_aspect)
hbox_aspect.pack_start(aspect_checkbox, False, False, 10)
vbox = gtk.VBox()
vbox.pack_start(gtk.Label(), False, False, 0)
vbox.pack_start(hbox_width, False, False, 0)
vbox.pack_start(hbox_height, False, False, 0)
vbox.pack_start(gtk.Label(), False, False, 0)
vbox.pack_start(hbox_aspect, False, False, 0)
vbox.pack_start(gtk.Label(), False, False, 0)
hbox_total = gtk.HBox()
animtest = gtk.gdk.PixbufAnimation(self.currimg.name)
if animtest.is_static_image():
pixbuf, image_width, image_height = self.get_pixbuf_of_size(self.currimg.pixbuf_original, 96, self.zoom_quality)
else:
pixbuf, image_width, image_height = self.get_pixbuf_of_size(animtest.get_static_image(), 96, self.zoom_quality)
image = gtk.Image()
image.set_from_pixbuf(self.pixbuf_add_border(pixbuf))
hbox_total.pack_start(image, False, False, 10)
hbox_total.pack_start(vbox, False, False, 10)
dialog.vbox.pack_start(hbox_total, False, False, 0)
width.connect('value-changed', self.preserve_image_aspect, "width", height)
height.connect('value-changed', self.preserve_image_aspect, "height", width)
aspect_checkbox.connect('toggled', self.aspect_ratio_toggled, width, height)
dialog.set_default_response(gtk.RESPONSE_ACCEPT)
dialog.vbox.show_all()
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
pixelheight = height.get_value_as_int()
pixelwidth = width.get_value_as_int()
dialog.destroy()
self.currimg.pixbuf_original = self.currimg.pixbuf_original.scale_simple(pixelwidth, pixelheight, self.zoom_quality)
self.load_new_image2(False, True, False, False)
self.image_modified = True
else:
dialog.destroy()
def aspect_ratio_toggled(self, togglebutton, width, height):
self.preserve_aspect = togglebutton.get_active()
if self.preserve_aspect:
# Set height based on width and aspect ratio
target_value = float(width.get_value_as_int())/self.currimg.pixbuf_original.get_width()
target_value = int(target_value * self.currimg.pixbuf_original.get_height())
self.ignore_preserve_aspect_callback = True
height.set_value(target_value)
self.ignore_preserve_aspect_callback = False
def preserve_image_aspect(self, currspinbox, type, otherspinbox):
if not self.preserve_aspect:
return
if self.ignore_preserve_aspect_callback:
return
if type == "width":
target_value = float(currspinbox.get_value_as_int())/self.currimg.pixbuf_original.get_width()
target_value = int(target_value * self.currimg.pixbuf_original.get_height())
else:
target_value = float(currspinbox.get_value_as_int())/self.currimg.pixbuf_original.get_height()
target_value = int(target_value * self.currimg.pixbuf_original.get_width())
self.ignore_preserve_aspect_callback = True
otherspinbox.set_value(target_value)
self.ignore_preserve_aspect_callback = False
def goto_prev_image(self, action):
self.goto_image("PREV", action)
def goto_next_image(self, action, through_timeout=False):
self.goto_image("NEXT", action, through_timeout)
def goto_random_image(self, action, through_timeout=False):
self.goto_image("RANDOM", action, through_timeout)
def goto_first_image(self, action):
self.goto_image("FIRST", action)
def goto_last_image(self, action):
self.goto_image("LAST", action)
def goto_first_image_prev_subfolder(self, action):
self.goto_image("PREV_SUBFOLDER", action)
def goto_first_image_next_subfolder(self, action):
self.goto_image("NEXT_SUBFOLDER", action)
def goto_image(self, location, action, called_by_timeout=False):
"""Goes to the image specified by location. Location can be "LAST",
"FIRST", "NEXT", "PREV", "RANDOM", or a number. If at last image
and "NEXT" is issued, it will wrap around or not depending on
self.usettings['listwrap_mode']. Same action is made for first image
and "PREV". """
if self.slideshow_mode and action != "ss":
gobject.source_remove(self.timer_delay)
if ((location=="PREV" or location=="NEXT" or location=="RANDOM") and len(self.image_list) > 1) or ((location == "PREV_SUBFOLDER" or location == "NEXT_SUBFOLDER") and len(self.firstimgindex_subfolders_list) >= 2) or (location=="FIRST" and (len(self.image_list) > 1 and self.curr_img_in_list != 0)) or (location=="LAST" and (len(self.image_list) > 1 and self.curr_img_in_list != len(self.image_list)-1)) or valid_int(location):
self.load_new_image_stop_now()
cancel = self.autosave_image()
if cancel:
return
check_wrap = False
prev_img = self.curr_img_in_list
if location != "RANDOM":
self.randomlist = []
if location == "FIRST":
self.curr_img_in_list = 0
elif location == "RANDOM":
if self.randomlist == []:
self.reinitialize_randomlist()
else:
# check if we have seen every image; if so, reinitialize array and repeat:
all_items_are_true = True
for item in self.randomlist:
if not item:
all_items_are_true = False
if all_items_are_true:
check_wrap = True
elif location == "LAST":
self.curr_img_in_list = len(self.image_list)-1
elif location == "PREV":
if self.curr_img_in_list > 0:
self.curr_img_in_list -= 1
else:
check_wrap = True
elif location == "NEXT":
if self.curr_img_in_list < len(self.image_list) - 1:
self.curr_img_in_list += 1
else:
check_wrap = True
elif location == "PREV_SUBFOLDER":
if self.curr_img_in_list >= self.firstimgindex_subfolders_list[1]: #not in first subfolder
self.curr_img_in_list = self.get_firstimgindex_curr_next_prev_subfolder(self.curr_img_in_list)[-1]
else: #in first subfolder
check_wrap = True
elif location == "NEXT_SUBFOLDER":
if self.curr_img_in_list < self.firstimgindex_subfolders_list[-1]: #not in last subfolder
self.curr_img_in_list = self.get_firstimgindex_curr_next_prev_subfolder(self.curr_img_in_list)[1]
else: #in last subfolder
check_wrap = True
if check_wrap: #we are at the beginning or end of the list or all images have been viewed in random mode
if self.usettings['listwrap_mode'] == 0:
if self.slideshow_mode and ((action == "ss" and (location == "NEXT" or location == "RANDOM")) or (action != "ss" and location == "NEXT")): #automatic next/random action or manual next action, stop slideshow
self.toggle_slideshow(None)
return
elif self.slideshow_mode and action != "ss" and location == "PREV": #manual prev action, keep slideshow going
pass
elif not self.slideshow_mode and action != "ss" and (location == "PREV" or location == "NEXT"): #manual prev/next action, ignore as if not pressed
return
elif not self.slideshow_mode and action != "ss" and location == "RANDOM": #always next random image when pressing 'R'
self.reinitialize_randomlist()
elif self.usettings['listwrap_mode'] == 1:
if location == "PREV":
self.curr_img_in_list = len(self.image_list) - 1
elif location == "NEXT" or location == "NEXT_SUBFOLDER":
self.curr_img_in_list = 0
elif location == "PREV_SUBFOLDER":
self.curr_img_in_list = self.firstimgindex_subfolders_list[-1]
elif location == "RANDOM": #always next random image
self.reinitialize_randomlist()
elif self.usettings['listwrap_mode'] == 2:
if self.curr_img_in_list != self.loaded_img_in_list:
# Ensure that the user is looking at the correct "last" image before
# they are asked the wrap question:
if location == "PREV":
self.load_new_image(True, False, True, True, True, True)
else:
self.load_new_image(False, False, True, True, True, True)
self.set_go_navigation_sensitivities(False)
self.thumbpane_select(self.curr_img_in_list)
if self.fullscreen_mode:
self.change_cursor(None)
if location == "PREV":
dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO, _("You are viewing the first image in the list. Wrap around to the last image?"))
elif location == "NEXT":
dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO, _("You are viewing the last image in the list. Wrap around to the first image?"))
elif location == "PREV_SUBFOLDER":
dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO, _("You are viewing the first folder in the list. Wrap around to the first image of the last folder?"))
elif location == "NEXT_SUBFOLDER":
dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO, _("You are viewing the last folder in the list. Wrap around to the first image of the first folder?"))
elif location == "RANDOM":
dialog = gtk.MessageDialog(self.window, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO, _("All images have been viewed. Would you like to cycle through the images again?"))
dialog.set_title(_("Wrap?"))
dialog.label.set_property('can-focus', False)
dialog.set_default_response(gtk.RESPONSE_YES)
# Wrapping dialog.run() and .destroy() in .threads_enter()/leave() to prevent a hangup on linux
# Could also be done with 'with gtk.gdk.lock:' but that doesn't work on windows.
try:
if called_by_timeout:
gtk.gdk.threads_enter()
self.user_prompt_visible = True
response = dialog.run()
dialog.destroy()
except:
response = None
finally:
if called_by_timeout:
gtk.gdk.threads_leave()
self.user_prompt_visible = False
if response == gtk.RESPONSE_YES:
if location == "PREV":
self.curr_img_in_list = len(self.image_list)-1
elif location == "NEXT" or location == "NEXT_SUBFOLDER":
self.curr_img_in_list = 0
elif location == "PREV_SUBFOLDER":
self.curr_img_in_list = self.firstimgindex_subfolders_list[-1]
elif location == "RANDOM":
self.reinitialize_randomlist()
if self.fullscreen_mode:
self.hide_cursor
else:
if self.fullscreen_mode:
self.hide_cursor
else:
self.change_cursor(None)
if self.slideshow_mode and action != "ss" and location == "PREV": #manual prev action, keep slideshow going
pass
elif self.slideshow_mode and ((action == "ss" and (location == "NEXT" or location == "RANDOM")) or (action != "ss" and location == "NEXT")): #automatic next/random action or manual next action, stop slideshow
self.toggle_slideshow(None)
return
elif not self.slideshow_mode and action != "ss" and (location == "PREV" or location == "NEXT" or location == "RANDOM"): #manual prev/next/random action, ignore as if not pressed
return
if location == "RANDOM":
# Find random image that hasn't already been chosen:
j = random.randint(0, len(self.image_list)-1)
while self.randomlist[j]:
j = random.randint(0, len(self.image_list)-1)
self.curr_img_in_list = j
self.randomlist[j] = True
self.currimg.name = str(self.image_list[self.curr_img_in_list])
if valid_int(location):
self.curr_img_in_list = int(location)
if self.curr_img_in_list != prev_img: #don't load the same image again if already loaded
if not self.fullscreen_mode and (not self.slideshow_mode or (self.slideshow_mode and action != "ss")):
self.change_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if location == "PREV" or (valid_int(location) and int(location) == prev_img-1):
self.load_when_idle = gobject.idle_add(self.load_new_image, True, False, True, True, True, True)
else:
self.load_when_idle = gobject.idle_add(self.load_new_image, False, False, True, True, True, True)
self.set_go_navigation_sensitivities(False)
if self.slideshow_mode:
if self.curr_slideshow_random:
self.timer_delay = gobject.timeout_add(int(self.curr_slideshow_delay*1000), self.goto_random_image, "ss",True)
else:
self.timer_delay = gobject.timeout_add(int(self.curr_slideshow_delay*1000), self.goto_next_image, "ss", True)
gobject.idle_add(self.thumbpane_select, self.curr_img_in_list)
def set_go_navigation_sensitivities(self, skip_initial_check):
# setting skip_image_list_check to True is useful when calling from
# expand_filelist_and_load_image() for example, as self.image_list has not
# yet fully populated
if (not self.image_loaded or len(self.image_list) == 1) and not skip_initial_check:
self.set_common_image_sensitivities(False)
self.set_previous_subfolder_sensitivities(False)
self.set_next_subfolder_sensitivities(False)
elif self.curr_img_in_list == 0:
if self.usettings['listwrap_mode'] == 0:
self.set_previous_image_sensitivities(False)
self.set_previous_subfolder_sensitivities(False)
else:
self.set_previous_image_sensitivities(True)
if len(self.firstimgindex_subfolders_list) >= 2: #subfolders
self.set_previous_subfolder_sensitivities(True)
else: #no subfolders
self.set_previous_subfolder_sensitivities(False)
self.set_first_image_sensitivities(False)
self.set_next_image_sensitivities(True)
self.set_last_image_sensitivities(True)
self.set_random_image_sensitivities(True)
if len(self.firstimgindex_subfolders_list) >= 2: #subfolders
self.set_next_subfolder_sensitivities(True)
else: #no subfolders
self.set_next_subfolder_sensitivities(False)
elif self.curr_img_in_list == len(self.image_list)-1:
self.set_previous_image_sensitivities(True)
self.set_first_image_sensitivities(True)
if self.usettings['listwrap_mode'] == 0:
self.set_next_image_sensitivities(False)
self.set_next_subfolder_sensitivities(False)
else:
self.set_next_image_sensitivities(True)
if len(self.firstimgindex_subfolders_list) >= 2: #subfolders
self.set_next_subfolder_sensitivities(True)
else: #no subfolders
self.set_next_subfolder_sensitivities(False)
self.set_last_image_sensitivities(False)
self.set_random_image_sensitivities(True)
elif len(self.firstimgindex_subfolders_list) >= 2 and self.curr_img_in_list < self.firstimgindex_subfolders_list[1]: #first subfolder
self.set_common_image_sensitivities(True)
if self.usettings['listwrap_mode'] == 0:
self.set_previous_subfolder_sensitivities(False)
else:
self.set_previous_subfolder_sensitivities(True)
self.set_next_subfolder_sensitivities(True)
elif len(self.firstimgindex_subfolders_list) >= 2 and self.curr_img_in_list >= self.firstimgindex_subfolders_list[-1]: #last subfolder
self.set_common_image_sensitivities(True)
self.set_previous_subfolder_sensitivities(True)
if self.usettings['listwrap_mode'] == 0:
self.set_next_subfolder_sensitivities(False)
else:
self.set_next_subfolder_sensitivities(True)
else: #inbetween first and last image/subfolder
self.set_common_image_sensitivities(True)
if len(self.firstimgindex_subfolders_list) >= 2: #subfolders
self.set_previous_subfolder_sensitivities(True)
self.set_next_subfolder_sensitivities(True)
else: #no subfolders
self.set_previous_subfolder_sensitivities(False)
self.set_next_subfolder_sensitivities(False)
def set_common_image_sensitivities(self, enable):
self.set_previous_image_sensitivities(enable)
self.set_first_image_sensitivities(enable)
self.set_next_image_sensitivities(enable)
self.set_last_image_sensitivities(enable)
self.set_random_image_sensitivities(enable)
def reinitialize_randomlist(self):
self.randomlist = []
for i in range(len(self.image_list)):
self.randomlist.append(False)
self.randomlist[self.curr_img_in_list] = True
def image_load_failed(self, reset_cursor, filename=""):
# If a filename is provided, use it for display:
if len(filename) == 0:
self.currimg.name = str(self.image_list[self.curr_img_in_list])
else:
self.currmg_name = filename
if self.verbose and self.currimg.name != "":
print _("Loading: %s") % self.currimg.name
self.update_title()
self.put_error_image_to_window()
self.image_loaded = False
self.currimg.pixbuf_original = None
if reset_cursor:
if not self.fullscreen_mode:
self.change_cursor(None)
def load_new_image_stop_now(self):
try:
gobject.source_remove(self.load_when_idle)
except:
pass
try:
gobject.source_remove(self.preload_when_idle)
except:
pass
try:
gobject.source_remove(self.preload_when_idle2)
except:
pass
def load_new_image(self, check_prev_last, use_current_pixbuf_original, reset_cursor, perform_onload_action, preload_next_image_after, preload_prev_image_after):
try:
self.load_new_image2(check_prev_last, use_current_pixbuf_original, reset_cursor, perform_onload_action)
except:
self.image_load_failed(True)
if preload_next_image_after:
self.preload_when_idle = gobject.idle_add(self.preload_next_image, False)
if preload_prev_image_after:
self.preload_when_idle2 = gobject.idle_add(self.preload_prev_image, False)
def check_preloadimg_prev_for_existing(self, prev_index, reset_preloadimg_prev_in_list):
# Determines if preloadimg_prev needs to be updated; if so,
# checks if the image is already stored in self.currimg
# or self.preloadimg_next and can be reused.
reset_preloadimg_prev_in_list = False
if prev_index != self.previmg.index and prev_index != -1:
# Need to update preloadimg_prev:
if prev_index == self.loaded_img_in_list and not self.image_modified and not self.image_zoomed:
self.previmg = self.currimg
self.previmg.index = self.loaded_img_in_list
elif prev_index == self.nextimg.index:
self.previmg = self.nextimg
else:
reset_preloadimg_prev_in_list = True
elif prev_index == -1:
reset_preloadimg_prev_in_list = True
def check_preloadimg_next_for_existing(self, next_index, reset_preloadimg_next_in_list):
# Determines if preloadimg_next needs to be updated; if so,
# checks if the image is already stored in self.currimg
# or self.preloadimg_prev and can be reused.
reset_preloadimg_next_in_list = False
if next_index != self.nextimg.index and next_index != -1:
# Need to update preloadimg_next:
if next_index == self.loaded_img_in_list and not self.image_modified and not self.image_zoomed:
self.nextimg = self.currimg
self.nextimg.index = self.loaded_img_in_list
elif next_index == self.previmg.index:
self.nextimg = self.previmg
else:
reset_preloadimg_next_in_list = True
elif next_index == -1:
reset_preloadimg_next_in_list = True
def check_currimg_for_existing(self):
# Determines if currimg needs to be updated; if so,
# checks if the image is already stored in self.preloadimg_next
# or self.preloadimg_prev and can be reused (the whole point of
# preloading!)
used_prev = False
used_next = False
if self.curr_img_in_list != self.loaded_img_in_list:
# Need to update currimg:
if self.curr_img_in_list == self.previmg.index:
# Set preload_prev_image as current image
self.currimg = self.previmg
used_prev = True
if self.verbose and self.currimg.name != "":
print _("Loading: %s") % self.currimg.name
self.put_zoom_image_to_window(True)
if not self.currimg.animation:
self.set_image_sensitivities(True)
else:
self.set_image_sensitivities(False)
elif self.curr_img_in_list == self.nextimg.index:
# Use preload_next_image as current image
self.currimg = self.previmg
used_next = True
if self.verbose and self.currimg.name != "":
print _("Loading: %s") % self.currimg.name
self.put_zoom_image_to_window(True)
if not self.currimg.animation:
self.set_image_sensitivities(True)
else:
self.set_image_sensitivities(False)
return used_prev, used_next
def load_new_image2(self, check_prev_last, use_current_pixbuf_original, reset_cursor, perform_onload_action, skip_recentfiles=False):
# check_prev_last is used to determine if we should check whether
# preloadimg_prev can be reused last. This should really only
# be done if the user just clicked the previous image button in
# order to reduce the number of image loads.
# If use_current_pixbuf_original == True, do not reload the
# self.currimg.pixbuf_original from the file; instead, use the existing
# one. This is only currently useful for resizing images.
# Determine the indices in the self.image_list array for the
# previous and next preload images.
next_index = self.curr_img_in_list + 1
if next_index > len(self.image_list)-1:
if self.usettings['listwrap_mode'] == 0:
next_index = -1
else:
next_index = 0
prev_index = self.curr_img_in_list - 1
if prev_index < 0:
if self.usettings['listwrap_mode'] == 0:
prev_index = -1
else:
prev_index = len(self.image_list)-1
if self.usettings['preloading_images']:
reset_preloadimg_next_in_list = False
reset_preloadimg_prev_in_list = False
if check_prev_last:
self.check_preloadimg_next_for_existing(next_index, reset_preloadimg_next_in_list)
else:
self.check_preloadimg_prev_for_existing(prev_index, reset_preloadimg_prev_in_list)
used_prev, used_next = self.check_currimg_for_existing()
if self.usettings['preloading_images']:
if check_prev_last:
self.check_preloadimg_prev_for_existing(prev_index, reset_preloadimg_prev_in_list)
else:
self.check_preloadimg_next_for_existing(next_index, reset_preloadimg_next_in_list)
if reset_preloadimg_prev_in_list:
self.previmg.index = -1
if reset_preloadimg_next_in_list:
self.nextimg.index = -1
if used_prev or used_next:
# If we used a preload image, set the correct boolean variables
if self.usettings['open_mode'] == self.open_mode_smart or (self.usettings['open_mode'] == self.open_mode_last and self.usettings['last_mode'] == self.open_mode_smart):
self.last_image_action_was_fit = True
self.last_image_action_was_smart_fit = True
elif self.usettings['open_mode'] == self.open_mode_fit or (self.usettings['open_mode'] == self.open_mode_last and self.usettings['last_mode'] == self.open_mode_fit):
self.last_image_action_was_fit = True
self.last_image_action_was_smart_fit = False
elif self.usettings['open_mode'] == self.open_mode_1to1 or (self.usettings['open_mode'] == self.open_mode_last and self.usettings['last_mode'] == self.open_mode_1to1):
self.last_image_action_was_fit = False
else:
# Need to load the current image
self.currimg.pixbuf = None
self.currimg.zoomratio = 1
try:
self.currimg.name = str(self.image_list[self.curr_img_in_list])
except:
pass
if self.verbose and self.currimg.name != "":
print _("Loading: %s") % self.currimg.name
animtest = gtk.gdk.PixbufAnimation(self.currimg.name)
if animtest.is_static_image() or (use_current_pixbuf_original and not self.currimg.animation):
self.currimg.animation = False
if not use_current_pixbuf_original:
self.currimg.pixbuf_original = animtest.get_static_image()
self.set_image_sensitivities(True)
# Check zoomratio
self.zoom_check_and_execute(None, False, False)
else:
self.currimg.animation = True
if not use_current_pixbuf_original:
self.currimg.pixbuf_original = animtest
self.zoom_1_to_1(None, False, False)
self.set_image_sensitivities(False)
if self.onload_cmd != None and perform_onload_action:
self.parse_action_command(self.onload_cmd, False)
self.update_statusbar()
self.update_title()
self.image_loaded = True
self.image_modified = False
self.image_zoomed = False
self.set_slideshow_sensitivities()
if not skip_recentfiles:
self.register_file_with_recent_docs(self.currimg.name)
if reset_cursor:
if not self.fullscreen_mode:
self.change_cursor(None)
def preload_next_image(self, use_existing_image):
try:
if self.usettings['preloading_images'] and len(self.image_list) > 1:
if not use_existing_image:
next_index = self.curr_img_in_list + 1
if next_index > len(self.image_list)-1:
if self.usettings['listwrap_mode'] == 0:
self.nextimg.index == -1
return
else:
next_index = 0
if next_index == self.nextimg.index:
return
self.nextimg.index = next_index
self.nextimg.name = str(self.image_list[next_index])
pre_animtest = gtk.gdk.PixbufAnimation(self.nextimg.name)
if pre_animtest.is_static_image():
self.nextimg.animation = False
self.nextimg.pixbuf_original = pre_animtest.get_static_image()
else:
self.nextimg.animation = True
self.nextimg.pixbuf_original = pre_animtest
if self.nextimg.index == -1:
return
# Determine self.nextimg.zoomratio
self.zoom_check_and_execute(None, True, False)
# Always start with the original image to preserve quality!
# Calculate image size:
self.nextimg.width = int(self.nextimg.pixbuf_original.get_width() * self.nextimg.zoomratio)
self.nextimg.height = int(self.nextimg.pixbuf_original.get_height() * self.nextimg.zoomratio)
if not self.nextimg.animation:
# Scale image:
if not self.nextimg.pixbuf_original.get_has_alpha():
self.nextimg.pixbuf = self.nextimg.pixbuf_original.scale_simple(self.nextimg.width, self.nextimg.height, self.zoom_quality)
else:
colormap = self.imageview.get_colormap()
light_grey = colormap.alloc_color('#666666', True, True)
dark_grey = colormap.alloc_color('#999999', True, True)
self.nextimg.pixbuf = self.nextimg.pixbuf_original.composite_color_simple(self.nextimg.width, self.nextimg.height, self.zoom_quality, 255, 8, light_grey.pixel, dark_grey.pixel)
else:
self.nextimg.pixbuf = self.nextimg.pixbuf_original
gc.collect()
if self.verbose:
print _("Preloading: %s") % self.nextimg.name
except:
self.nextimg.index = -1
def preload_prev_image(self, use_existing_image):
try:
if self.usettings['preloading_images'] and len(self.image_list) > 1:
if not use_existing_image:
prev_index = self.curr_img_in_list - 1
if prev_index < 0:
if self.usettings['listwrap_mode'] == 0:
self.previmg.index == -1
return
else:
prev_index = len(self.image_list)-1
if prev_index == self.previmg.index:
return
self.previmg.index = prev_index
self.previmg.name = str(self.image_list[prev_index])
pre_animtest = gtk.gdk.PixbufAnimation(self.previmg.name)
if pre_animtest.is_static_image():
self.previmg.animation = False
self.previmg.pixbuf_original = pre_animtest.get_static_image()
else:
self.previmg.animation = True
self.previmg.pixbuf_original = pre_animtest
if self.previmg.index == -1:
return
# Determine self.previmg.zoomratio
self.zoom_check_and_execute(None, False, True)
# Always start with the original image to preserve quality!
# Calculate image size:
self.previmg.width = int(self.previmg.pixbuf_original.get_width() * self.previmg.zoomratio)
self.previmg.height = int(self.previmg.pixbuf_original.get_height() * self.previmg.zoomratio)
if not self.previmg.animation:
# Scale image:
if not self.previmg.pixbuf_original.get_has_alpha():
self.previmg.pixbuf = self.previmg.pixbuf_original.scale_simple(self.previmg.width, self.previmg.height, self.zoom_quality)
else:
colormap = self.imageview.get_colormap()
light_grey = colormap.alloc_color('#666666', True, True)
dark_grey = colormap.alloc_color('#999999', True, True)
self.previmg.pixbuf = self.previmg.pixbuf_original.composite_color_simple(self.previmg.width, self.previmg.height, self.zoom_quality, 255, 8, light_grey.pixel, dark_grey.pixel)
else:
self.previmg.pixbuf = self.previmg.pixbuf_original
gc.collect()
if self.verbose:
print _("Preloading: %s") % self.previmg.name
except:
self.previmg.index = -1
def change_cursor(self, type):
for i in gtk.gdk.window_get_toplevels():
if i.get_window_type() != gtk.gdk.WINDOW_TEMP and i.get_window_type() != gtk.gdk.WINDOW_CHILD:
i.set_cursor(type)
self.layout.window.set_cursor(type)
def expand_filelist_and_load_image(self, inputlist):
# Takes the current list (i.e. ["pic.jpg", "pic2.gif", "../images"]) and
# expands it into a list of all pictures found
self.thumblist.clear()
first_image_loaded_successfully = False
self.images_found = 0
self.stop_now = True # Make sure that any previous search process is stopped
self.change_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
# Reset preload images:
self.nextimg.index = -1
self.previmg.index = -1
# If any directories were passed, display "Searching..." in statusbar:
self.searching_for_images = False
for item in inputlist:
if os.path.isdir(item):
self.searching_for_images = True
self.update_statusbar()
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration()
first_image = ""
first_image_found = False
first_image_loaded = False
second_image = ""
second_image_found = False
second_image_preloaded = False
self.randomlist = []
folderlist = []
self.image_list = []
self.curr_img_in_list = 0
go_buttons_enabled = False
self.set_go_sensitivities(False)
# Clean up list (remove preceding "file://" or "file:" and trailing "/")
for itemnum in range(len(inputlist)):
# Strip off preceding file..
if inputlist[itemnum].startswith('file://'):
inputlist[itemnum] = inputlist[itemnum][7:]
elif inputlist[itemnum].startswith('file:'):
inputlist[itemnum] = inputlist[itemnum][5:]
# Strip off trailing "/" if it exists:
if inputlist[itemnum][len(inputlist[itemnum])-1] == "/":
inputlist[itemnum] = inputlist[itemnum][:(len(inputlist[itemnum])-1)]
if not (inputlist[itemnum].startswith('http://') or inputlist[itemnum].startswith('ftp://')):
inputlist[itemnum] = os.path.abspath(inputlist[itemnum])
else:
try:
# Remote file. Save as /tmp/mirage-<random>/filename.ext
tmpdir = tempfile.mkdtemp(prefix="mirage-") + "/"
tmpfile = tmpdir + os.path.basename(inputlist[itemnum])
socket.setdefaulttimeout(5)
urllib.urlretrieve(inputlist[itemnum], tmpfile)
inputlist[itemnum] = tmpfile
except:
pass
# Remove hidden files from list:
if not self.usettings['open_hidden_files']:
tmplist = []
for item in inputlist:
if os.path.basename(item)[0] != '.':
tmplist.append(item)
elif self.verbose:
print _("Skipping: %s") % item
inputlist = tmplist
if len(inputlist) == 0:
# All files/dirs were hidden, exit..
self.currimg.name = ""
self.searching_for_images = False
self.set_go_navigation_sensitivities(False)
self.set_slideshow_sensitivities()
if not self.closing_app:
self.change_cursor(None)
self.recursive = False
self.put_error_image_to_window()
self.update_title()
return
init_image = os.path.abspath(inputlist[0])
if self.valid_image(init_image):
try:
self.currimg.name = init_image
self.load_new_image2(False, False, True, True)
# Calling load_new_image2 will reset the following two vars
# to 0, so ensure they are -1 again (no images preloaded)
self.previmg.index = -1
self.nextimg.index = -1
if not self.currimg.animation:
self.previmg_width = self.currimg.pixbuf.get_width()
else:
self.previmg_width = self.currimg.pixbuf.get_static_image().get_width()
self.image_loaded = True
first_image_loaded_successfully = True
print "quickloaded image ahead of imagelist"
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration(True)
except:
pass
self.stop_now = False
# If open all images in dir...
if self.usettings['open_all_images']:
temp = inputlist
inputlist = []
for item in temp:
if os.path.isfile(item):
itempath = os.path.dirname(os.path.abspath(item))
temp = self.recursive
self.recursive = False
self.stop_now = False
self.expand_directory(itempath, False, go_buttons_enabled, False, False)
self.recursive = temp
else:
inputlist.append(item)
for item in self.image_list:
inputlist.append(item)
if first_image_found and not second_image_found:
second_image_found = True
second_image = item
second_image_came_from_dir = False
if item == init_image:
first_image_found = True
first_image = item
first_image_came_from_dir = False
self.curr_img_in_list = len(inputlist)-1
self.image_list = []
for item in inputlist:
if not self.closing_app:
if os.path.isfile(item):
if self.valid_image(item):
if not second_image_found and first_image_found:
second_image_found = True
second_image = item
second_image_came_from_dir = False
if not first_image_found:
first_image_found = True
first_image = item
first_image_came_from_dir = False
self.image_list.append(item)
if self.verbose:
self.images_found += 1
print _("Found: %(item)s [%(number)i]") % {'item': item, 'number': self.images_found}
else:
# If it's a directory that was explicitly selected or passed to
# the program, get all the files in the dir.
# Retrieve only images in the top directory specified by the user
# unless explicitly told to recurse (via -R or in Settings>Preferences)
folderlist.append(item)
if not second_image_found:
# See if we can find an image in this directory:
self.stop_now = False
self.expand_directory(item, True, go_buttons_enabled, False, False)
itemnum = 0
while itemnum < len(self.image_list) and not second_image_found:
if os.path.isfile(self.image_list[itemnum]):
if not second_image_found and first_image_found:
second_image_found = True
second_image_came_from_dir = True
second_image = self.image_list[itemnum]
self.set_go_navigation_sensitivities(True)
go_buttons_enabled = True
while gtk.events_pending():
gtk.main_iteration(True)
if not first_image_found:
first_image_found = True
first_image = self.image_list[itemnum]
first_image_came_from_dir = True
itemnum += 1
# Load first image and display:
if first_image_found and not first_image_loaded and self.curr_img_in_list <= len(self.image_list)-1:
first_image_loaded = True
if self.slideshow_mode:
self.toggle_slideshow(None)
if self.verbose and self.currimg.name != "":
print _("Loading: %s") % self.currimg.name
try:
self.load_new_image2(False, False, True, True)
# Calling load_new_image2 will reset the following two vars
# to 0, so ensure they are -1 again (no images preloaded)
self.previmg.index = -1
self.nextimg.index = -1
if not self.currimg.animation:
self.previmg_width = self.currimg.pixbuf.get_width()
else:
self.previmg_width = self.currimg.pixbuf.get_static_image().get_width()
self.image_loaded = True
first_image_loaded_successfully = True
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration(True)
except:
pass
if first_image_came_from_dir:
self.image_list = []
# Pre-load second image:
if second_image_found and not second_image_preloaded and ((not second_image_came_from_dir and self.curr_img_in_list+1 <= len(self.image_list)-1) or second_image_came_from_dir):
second_image_preloaded = True
temp = self.image_list
self.image_list = []
while len(self.image_list) < self.curr_img_in_list+1:
self.image_list.append(first_image)
self.image_list.append(second_image)
self.preload_next_image(False)
self.image_list = temp
if first_image_found:
# Sort the filelist and folderlist alphabetically, and recurse into folderlist:
if first_image_came_from_dir:
self.add_folderlist_images(folderlist, go_buttons_enabled)
self.do_image_list_stuff(first_image, second_image)
else:
self.do_image_list_stuff(first_image, second_image)
self.add_folderlist_images(folderlist, go_buttons_enabled)
prev_image = ''
self.firstimgindex_subfolders_list = []
for i, image in enumerate(self.image_list):
if os.path.dirname(image) != os.path.dirname(prev_image):
self.firstimgindex_subfolders_list.append(i)
prev_image = image
self.update_title()
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration(True)
if not first_image_loaded_successfully:
self.image_load_failed(False, init_image)
else:
self.curr_img_in_list = self.image_list.index(self.currimg.name)
self.searching_for_images = False
self.update_statusbar()
self.set_go_navigation_sensitivities(False)
self.set_slideshow_sensitivities()
self.thumbpane_update_images(True, self.curr_img_in_list)
if not self.closing_app:
self.change_cursor(None)
self.recursive = False
def add_folderlist_images(self, folderlist, go_buttons_enabled):
if len(folderlist) > 0:
#Sort based on a numerical aware sort or normal alphabetical sort
if self.usettings['use_numacomp'] and HAVE_NUMACOMP:
#Use case-sensitive sort?
if self.usettings['case_numacomp']:
folderlist.sort(cmp=numacomp.numacomp)
else:
folderlist.sort(cmp=numacomp.numacompi)
else:
folderlist.sort(locale.strcoll)
folderlist = list(set(folderlist))
for item in folderlist:
if not self.closing_app:
if (not self.usettings['open_hidden_files'] and os.path.basename(item)[0] != '.') or self.usettings['open_hidden_files']:
self.stop_now = False
self.expand_directory(item, False, go_buttons_enabled, True, True)
def do_image_list_stuff(self, first_image, second_image):
if len(self.image_list) > 0:
self.set_go_navigation_sensitivities(True)
self.image_list = list(set(self.image_list))
#Sort based on a numerical aware sort or normal alphabetical sort
if self.usettings['use_numacomp'] and HAVE_NUMACOMP:
#Use case-sensitive sort?
if self.usettings['case_numacomp']:
self.image_list.sort(cmp=numacomp.numacomp)
else:
self.image_list.sort(cmp=numacomp.numacompi)
else:
self.image_list.sort(locale.strcoll)
def expand_directory(self, item, stop_when_second_image_found, go_buttons_enabled, update_window_title, print_found_msg):
if not self.stop_now and not self.closing_app:
folderlist = []
filelist = []
if not os.access(item, os.R_OK):
return False
for item2 in os.listdir(item):
if not self.closing_app and not self.stop_now:
while gtk.events_pending():
gtk.main_iteration(True)
item2 = item + os.sep + item2
item_fullpath2 = os.path.abspath(item2)
if (not self.usettings['open_hidden_files'] and os.path.basename(item_fullpath2)[0] != '.') or self.usettings['open_hidden_files']:
if os.path.isfile(item_fullpath2) and self.valid_image(item_fullpath2):
filelist.append(item2)
if self.verbose and print_found_msg:
self.images_found += 1
print _("Found: %(fullpath)s [%(number)i]") % {'fullpath': item_fullpath2, 'number': self.images_found}
elif os.path.isdir(item_fullpath2) and self.recursive:
folderlist.append(item_fullpath2)
elif self.verbose:
print _("Skipping: %s") % item_fullpath2
if len(self.image_list)>0 and update_window_title:
self.update_title()
# Sort the filelist and folderlist alphabetically:
if len(filelist) > 0:
#Use numerical aware sort?
if self.usettings['use_numacomp'] and HAVE_NUMACOMP:
#Use case-sensitive sort?
if self.usettings['case_numacomp']:
filelist.sort(cmp=numacomp.numacomp)
else:
filelist.sort(cmp=numacomp.numacompi)
else:
filelist.sort(locale.strcoll)
for item2 in filelist:
if not item2 in self.image_list:
self.image_list.append(item2)
if stop_when_second_image_found and len(self.image_list)==2:
return
if not go_buttons_enabled and len(self.image_list) > 1:
self.set_go_navigation_sensitivities(True)
go_buttons_enabled = True
# Recurse into the folderlist:
if len(folderlist) > 0:
#Use numerical aware sort?
if self.usettings['use_numacomp'] and HAVE_NUMACOMP:
#Use case-sensitive sort?
if self.usettings['case_numacomp']:
folderlist.sort(cmp=numacomp.numacomp)
else:
folderlist.sort(cmp=numacomp.numacompi)
else:
folderlist.sort(locale.strcoll)
for item2 in folderlist:
if not self.stop_now:
self.expand_directory(item2, stop_when_second_image_found, go_buttons_enabled, update_window_title, print_found_msg)
def register_file_with_recent_docs(self, imgfile):
self.recent_file_add_and_refresh(imgfile)
if os.path.isfile(imgfile) and gtk.check_version(2, 10, 0) == None:
try:
gtk_recent_manager = gtk.recent_manager_get_default()
uri = ''
if imgfile[:7] != 'file://':
uri = 'file://'
uri = uri + urllib.pathname2url(os.path.abspath(imgfile))
gtk_recent_manager.add_item(uri)
except:
#Isnt currently functional on win32
if sys.platform == "win32":
pass
else:
raise
def valid_image(self, file):
test = gtk.gdk.pixbuf_get_file_info(file)
if test == None:
return False
elif test[0]['name'] == "wbmp":
# some regular files are thought to be wbmp for whatever reason,
# so let's check further.. :(
try:
test2 = gtk.gdk.pixbuf_new_from_file(file)
return True
except:
return False
else:
return True
def image_flip(self, old_pix, vertical):
width = old_pix.get_width()
height = old_pix.get_height()
d = None
if vertical:
d, w, h, rws = imgfuncs.vert(old_pix.get_pixels(), width, height, old_pix.get_rowstride(), old_pix.get_n_channels())
else:
d, w, h, rws = imgfuncs.horiz(old_pix.get_pixels(), width, height, old_pix.get_rowstride(), old_pix.get_n_channels())
if d:
new_pix = gtk.gdk.pixbuf_new_from_data(d, old_pix.get_colorspace(), old_pix.get_has_alpha(), old_pix.get_bits_per_sample(), w, h, rws)
return new_pix
return old_pix
def image_rotate(self, old_pix, full_angle):
width = old_pix.get_width()
height = old_pix.get_height()
angle = full_angle - (int(full_angle) / 360) * 360
if angle:
d = None
if angle % 270 == 0:
d, w, h, rws = imgfuncs.right(old_pix.get_pixels(), width, height, old_pix.get_rowstride(), old_pix.get_n_channels())
elif angle % 180 == 0:
d, w, h, rws = imgfuncs.mirror(old_pix.get_pixels(), width, height, old_pix.get_rowstride(), old_pix.get_n_channels())
elif angle % 90 == 0:
d, w, h, rws = imgfuncs.left(old_pix.get_pixels(), width, height, old_pix.get_rowstride(), old_pix.get_n_channels())
if d:
new_pix = gtk.gdk.pixbuf_new_from_data(d, old_pix.get_colorspace(), old_pix.get_has_alpha(), old_pix.get_bits_per_sample(), w, h, rws)
return new_pix
return old_pix
def toggle_slideshow(self, action):
if len(self.image_list) > 1:
if not self.slideshow_mode:
if self.usettings['slideshow_in_fullscreen'] and not self.fullscreen_mode:
self.enter_fullscreen(None)
self.slideshow_mode = True
self.update_title()
self.set_slideshow_sensitivities()
if not self.curr_slideshow_random:
self.timer_delay = gobject.timeout_add(int(self.curr_slideshow_delay*1000), self.goto_next_image, "ss", True)
else:
self.reinitialize_randomlist()
self.timer_delay = gobject.timeout_add(int(self.curr_slideshow_delay*1000), self.goto_random_image, "ss")
self.ss_start.hide()
self.ss_stop.show()
timer_screensaver = gobject.timeout_add(1000, self.disable_screensaver_in_slideshow_mode)
else:
self.slideshow_mode = False
gobject.source_remove(self.timer_delay)
self.update_title()
self.set_slideshow_sensitivities()
self.set_zoom_sensitivities()
self.ss_stop.hide()
self.ss_start.show()
def get_firstimgindex_curr_next_prev_subfolder(self, img_in_list):
"""Returns a tuple (current [0], next [1], previous [-1]) firstimgindex"""
if len(self.firstimgindex_subfolders_list) >= 2: #subfolders
for i, firstimgindex in enumerate(self.firstimgindex_subfolders_list):
if img_in_list < firstimgindex:
return self.firstimgindex_subfolders_list[i-1], self.firstimgindex_subfolders_list[i], self.firstimgindex_subfolders_list[i-2]
return self.firstimgindex_subfolders_list[-1], self.firstimgindex_subfolders_list[0], self.firstimgindex_subfolders_list[-2]
else:
return (-1,-1,-1)
def get_numimg_subfolder(self, firstimgindex_subfolder):
for i, index in enumerate(self.firstimgindex_subfolders_list):
if index == firstimgindex_subfolder:
if i < len(self.firstimgindex_subfolders_list)-1:
return self.firstimgindex_subfolders_list[i+1] - firstimgindex_subfolder
else:
return len(self.image_list) - firstimgindex_subfolder
return -1
def update_title(self):
if len(self.image_list) == 0:
title = "Mirage"
else:
subfoldertitle = ''
firstimgindex_curr_subfolder = self.get_firstimgindex_curr_next_prev_subfolder(self.curr_img_in_list)[0]
if firstimgindex_curr_subfolder > -1:
currimg_subfolder = self.curr_img_in_list - firstimgindex_curr_subfolder + 1
numimg_curr_subfolder = self.get_numimg_subfolder(firstimgindex_curr_subfolder)
subfoldertitle = _("%(current)i of %(total)i") % {'current': currimg_subfolder, 'total': numimg_curr_subfolder} + ' '
title = "Mirage - " + subfoldertitle + _("[%(current)i of %(total)i]") % {'current': self.curr_img_in_list+1, 'total': len(self.image_list)} + ' ' + os.path.basename(self.currimg.name)
if self.slideshow_mode:
title = title + ' - ' + _('Slideshow Mode')
self.window.set_title(title)
def slideshow_controls_show(self):
if not self.slideshow_controls_visible and not self.controls_moving:
self.slideshow_controls_visible = True
self.ss_delayspin.set_value(self.curr_slideshow_delay)
self.ss_randomize.set_active(self.curr_slideshow_random)
if self.slideshow_mode:
self.ss_start.set_no_show_all(True)
self.ss_stop.set_no_show_all(False)
else:
self.ss_start.set_no_show_all(False)
self.ss_stop.set_no_show_all(True)
(xpos, ypos) = self.window.get_position()
screen = self.window.get_screen()
self.slideshow_window.set_screen(screen)
self.slideshow_window2.set_screen(screen)
self.slideshow_window.show_all()
self.slideshow_window2.show_all()
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration()
ss_winheight = self.slideshow_window.allocation.height
ss_win2width = self.slideshow_window2.allocation.width
winheight = self.window.allocation.height
winwidth = self.window.allocation.width
y = -3.0
self.controls_moving = True
while y < ss_winheight:
self.slideshow_window.move(2+xpos, int(winheight-y-2))
self.slideshow_window2.move(winwidth-ss_win2width-2+xpos, int(winheight-y-2))
y += 0.05
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration()
self.controls_moving = False
def slideshow_controls_hide(self):
if self.slideshow_controls_visible and not self.controls_moving:
self.slideshow_controls_visible = False
(xpos, ypos) = self.window.get_position()
ss_winheight = self.slideshow_window.allocation.height
ss_win2width = self.slideshow_window2.allocation.width
winheight = self.window.allocation.height
winwidth = self.window.allocation.width
y = float(self.slideshow_window.allocation.height*1.0)
self.controls_moving = True
while y > -3:
self.slideshow_window.move(2+xpos, int(winheight-y-2))
self.slideshow_window2.move(winwidth-ss_win2width-2+xpos, int(winheight-y-2))
y -= 0.05
if not self.closing_app:
while gtk.events_pending():
gtk.main_iteration()
self.controls_moving = False
def disable_screensaver_in_slideshow_mode(self):
if self.slideshow_mode and self.usettings['disable_screensaver']:
test = os.spawnlp(os.P_WAIT, "/usr/bin/xscreensaver-command", "xscreensaver-command", "-deactivate")
if test <> 127:
timer_screensaver = gobject.timeout_add(1000, self.disable_screensaver_in_slideshow_mode)
def main(self):
gtk.main()
class ImageData:
def __init__(self, index=-1, name="", width=0, heigth=0, pixbuf=None,
pixbuf_original=None, pixbuf_rotated=None, zoomratio=1, animation=False):
self.index = index
self.name = name
self.width = width
self.height = heigth
self.pixbuf = pixbuf
self.pixbuf_original = pixbuf_original
self.pixbuf_rotated = pixbuf_rotated
self.zoomratio = zoomratio
self.animation = animation
if __name__ == "__main__":
base = Base()
gtk.gdk.threads_enter()
base.main()
gtk.gdk.threads_leave()
|
xiongchiamiov/Mirage
|
mirage.py
|
Python
|
gpl-3.0
| 222,023
|
[
"FLEUR"
] |
335d441952d99b8f448de634fd312dbb9c0745b0cfd57bd4266d506ad3b6d269
|
## serpent.py - pure Python implementation of the Serpent algorithm.
## Bjorn Edstrom <be@bjrn.se> 13 december 2007.
##
## Copyrights
## ==========
##
## This code is a derived from an implementation by Dr Brian Gladman
## (gladman@seven77.demon.co.uk) which is subject to the following license.
## This Python implementation is not subject to any other license.
##
##/* This is an independent implementation of the encryption algorithm:
## *
## * Serpent by Ross Anderson, Eli Biham and Lars Knudsen
## *
## * which is a candidate algorithm in the Advanced Encryption Standard
## * programme of the US National Institute of Standards and Technology
## *
## * Copyright in this implementation is held by Dr B R Gladman but I
## * hereby give permission for its free direct or derivative use subject
## * to acknowledgment of its origin and compliance with any conditions
## * that the originators of the algorithm place on its exploitation.
## *
## * Dr Brian Gladman (gladman@seven77.demon.co.uk) 14th January 1999
## */
##
## The above copyright notice must not be removed.
##
## Information
## ===========
##
## Anyone thinking of using this code should reconsider. It's slow.
## Try python-mcrypt instead. In case a faster library is not installed
## on the target system, this code can be used as a portable fallback.
try:
import psyco
psyco.full()
except ImportError:
pass
block_size = 16
key_size = 32
class Serpent:
def __init__(self, key=None):
"""Serpent."""
if key:
self.set_key(key)
def set_key(self, key):
"""Init."""
key_len = len(key)
if key_len % 4:
# XXX: add padding?
raise KeyError, "key not a multiple of 4"
if key_len > 32:
# XXX: prune?
raise KeyError, "key_len > 32"
self.key_context = [0] * 140
key_word32 = [0] * 32
i = 0
while key:
key_word32[i] = struct.unpack("<L", key[0:4])[0]
key = key[4:]
i += 1
set_key(self.key_context, key_word32, key_len)
def decrypt(self, block):
"""Decrypt blocks."""
if len(block) % 16:
raise ValueError, "block size must be a multiple of 16"
plaintext = ''
while block:
a, b, c, d = struct.unpack("<4L", block[:16])
temp = [a, b, c, d]
decrypt(self.key_context, temp)
plaintext += struct.pack("<4L", *temp)
block = block[16:]
return plaintext
def encrypt(self, block):
"""Encrypt blocks."""
if len(block) % 16:
raise ValueError, "block size must be a multiple of 16"
ciphertext = ''
while block:
a, b, c, d = struct.unpack("<4L", block[0:16])
temp = [a, b, c, d]
encrypt(self.key_context, temp)
ciphertext += struct.pack("<4L", *temp)
block = block[16:]
return ciphertext
def get_name(self):
"""Return the name of the cipher."""
return "Serpent"
def get_block_size(self):
"""Get cipher block size in bytes."""
return 16
def get_key_size(self):
"""Get cipher key size in bytes."""
return 32
#
# Private.
#
import struct
import sys
WORD_BIGENDIAN = 0
if sys.byteorder == 'big':
WORD_BIGENDIAN = 1
def rotr32(x, n):
return (x >> n) | ((x << (32 - n)) & 0xFFFFFFFF)
def rotl32(x, n):
return ((x << n) & 0xFFFFFFFF) | (x >> (32 - n))
def byteswap32(x):
return ((x & 0xff) << 24) | (((x >> 8) & 0xff) << 16) | \
(((x >> 16) & 0xff) << 8) | ((x >> 24) & 0xff)
def set_key(l_key, key, key_len):
key_len *= 8
if key_len > 256:
return False
i = 0
lk = (key_len + 31) / 32
while i < lk:
l_key[i] = key[i]
if WORD_BIGENDIAN:
l_key[i] = byteswap32(key[i])
i += 1
if key_len < 256:
while i < 8:
l_key[i] = 0
i += 1
i = key_len / 32
lk = 1 << (key_len % 32)
l_key[i] = (l_key[i] & (lk - 1)) | lk
for i in xrange(132):
lk = l_key[i] ^ l_key[i + 3] ^ l_key[i + 5] ^ l_key[i + 7] ^ 0x9e3779b9 ^ i
l_key[i + 8] = ((lk << 11) & 0xFFFFFFFF) | (lk >> 21)
key = l_key
# serpent_generate.py
a = key[4 * 0 + 8]
b = key[4 * 0 + 9]
c = key[4 * 0 + 10]
d = key[4 * 0 + 11]
e = 0
f = 0
g = 0
h = 0
t1 = 0
t2 = 0
t3 = 0
t4 = 0
t5 = 0
t6 = 0
t7 = 0
t8 = 0
t9 = 0
t10 = 0
t11 = 0
t12 = 0
t13 = 0
t14 = 0
t15 = 0
t16 = 0
t1 = a ^ c;
t2 = d ^ t1;
t3 = a & t2;
t4 = d ^ t3;
t5 = b & t4;
g = t2 ^ t5;
t7 = a | g;
t8 = b | d;
t11 = a | d;
t9 = t4 & t7;
f = t8 ^ t9;
t12 = b ^ t11;
t13 = g ^ t9;
t15 = t3 ^ t8;
h = t12 ^ t13;
t16 = c & t15;
e = t12 ^ t16
key[4 * 0 + 8] = e
key[4 * 0 + 9] = f
key[4 * 0 + 10] = g
key[4 * 0 + 11] = h
a = key[4 * 1 + 8]
b = key[4 * 1 + 9]
c = key[4 * 1 + 10]
d = key[4 * 1 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
key[4 * 1 + 8] = e
key[4 * 1 + 9] = f
key[4 * 1 + 10] = g
key[4 * 1 + 11] = h
a = key[4 * 2 + 8]
b = key[4 * 2 + 9]
c = key[4 * 2 + 10]
d = key[4 * 2 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ t1;
t3 = a | t2;
t4 = d | t2;
t5 = c ^ t3;
g = d ^ t5;
t7 = b ^ t4;
t8 = t2 ^ g;
t9 = t5 & t7;
h = t8 ^ t9;
t11 = t5 ^ t7;
f = h ^ t11;
t13 = t8 & t11;
e = t5 ^ t13
key[4 * 2 + 8] = e
key[4 * 2 + 9] = f
key[4 * 2 + 10] = g
key[4 * 2 + 11] = h
a = key[4 * 3 + 8]
b = key[4 * 3 + 9]
c = key[4 * 3 + 10]
d = key[4 * 3 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
key[4 * 3 + 8] = e
key[4 * 3 + 9] = f
key[4 * 3 + 10] = g
key[4 * 3 + 11] = h
a = key[4 * 4 + 8]
b = key[4 * 4 + 9]
c = key[4 * 4 + 10]
d = key[4 * 4 + 11]
t1 = (~c) % 0x100000000;
t2 = b ^ c;
t3 = b | t1;
t4 = d ^ t3;
t5 = a & t4;
t7 = a ^ d;
h = t2 ^ t5;
t8 = b ^ t5;
t9 = t2 | t8;
t11 = d & t3;
f = t7 ^ t9;
t12 = t5 ^ f;
t15 = t1 | t4;
t13 = h & t12;
g = t11 ^ t13;
t16 = t12 ^ g;
e = t15 ^ t16
key[4 * 4 + 8] = e
key[4 * 4 + 9] = f
key[4 * 4 + 10] = g
key[4 * 4 + 11] = h
a = key[4 * 5 + 8]
b = key[4 * 5 + 9]
c = key[4 * 5 + 10]
d = key[4 * 5 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
key[4 * 5 + 8] = e
key[4 * 5 + 9] = f
key[4 * 5 + 10] = g
key[4 * 5 + 11] = h
a = key[4 * 6 + 8]
b = key[4 * 6 + 9]
c = key[4 * 6 + 10]
d = key[4 * 6 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ b;
t3 = a ^ d;
t4 = c ^ t1;
t5 = t2 | t3;
e = t4 ^ t5;
t7 = d & e;
t8 = t2 ^ e;
t10 = t1 | e;
f = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = b ^ t7;
g = t11 ^ t12;
t15 = f & t12;
h = t14 ^ t15
key[4 * 6 + 8] = e
key[4 * 6 + 9] = f
key[4 * 6 + 10] = g
key[4 * 6 + 11] = h
a = key[4 * 7 + 8]
b = key[4 * 7 + 9]
c = key[4 * 7 + 10]
d = key[4 * 7 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
key[4 * 7 + 8] = e
key[4 * 7 + 9] = f
key[4 * 7 + 10] = g
key[4 * 7 + 11] = h
a = key[4 * 8 + 8]
b = key[4 * 8 + 9]
c = key[4 * 8 + 10]
d = key[4 * 8 + 11]
t1 = a ^ c;
t2 = d ^ t1;
t3 = a & t2;
t4 = d ^ t3;
t5 = b & t4;
g = t2 ^ t5;
t7 = a | g;
t8 = b | d;
t11 = a | d;
t9 = t4 & t7;
f = t8 ^ t9;
t12 = b ^ t11;
t13 = g ^ t9;
t15 = t3 ^ t8;
h = t12 ^ t13;
t16 = c & t15;
e = t12 ^ t16
key[4 * 8 + 8] = e
key[4 * 8 + 9] = f
key[4 * 8 + 10] = g
key[4 * 8 + 11] = h
a = key[4 * 9 + 8]
b = key[4 * 9 + 9]
c = key[4 * 9 + 10]
d = key[4 * 9 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
key[4 * 9 + 8] = e
key[4 * 9 + 9] = f
key[4 * 9 + 10] = g
key[4 * 9 + 11] = h
a = key[4 * 10 + 8]
b = key[4 * 10 + 9]
c = key[4 * 10 + 10]
d = key[4 * 10 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ t1;
t3 = a | t2;
t4 = d | t2;
t5 = c ^ t3;
g = d ^ t5;
t7 = b ^ t4;
t8 = t2 ^ g;
t9 = t5 & t7;
h = t8 ^ t9;
t11 = t5 ^ t7;
f = h ^ t11;
t13 = t8 & t11;
e = t5 ^ t13
key[4 * 10 + 8] = e
key[4 * 10 + 9] = f
key[4 * 10 + 10] = g
key[4 * 10 + 11] = h
a = key[4 * 11 + 8]
b = key[4 * 11 + 9]
c = key[4 * 11 + 10]
d = key[4 * 11 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
key[4 * 11 + 8] = e
key[4 * 11 + 9] = f
key[4 * 11 + 10] = g
key[4 * 11 + 11] = h
a = key[4 * 12 + 8]
b = key[4 * 12 + 9]
c = key[4 * 12 + 10]
d = key[4 * 12 + 11]
t1 = (~c) % 0x100000000;
t2 = b ^ c;
t3 = b | t1;
t4 = d ^ t3;
t5 = a & t4;
t7 = a ^ d;
h = t2 ^ t5;
t8 = b ^ t5;
t9 = t2 | t8;
t11 = d & t3;
f = t7 ^ t9;
t12 = t5 ^ f;
t15 = t1 | t4;
t13 = h & t12;
g = t11 ^ t13;
t16 = t12 ^ g;
e = t15 ^ t16
key[4 * 12 + 8] = e
key[4 * 12 + 9] = f
key[4 * 12 + 10] = g
key[4 * 12 + 11] = h
a = key[4 * 13 + 8]
b = key[4 * 13 + 9]
c = key[4 * 13 + 10]
d = key[4 * 13 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
key[4 * 13 + 8] = e
key[4 * 13 + 9] = f
key[4 * 13 + 10] = g
key[4 * 13 + 11] = h
a = key[4 * 14 + 8]
b = key[4 * 14 + 9]
c = key[4 * 14 + 10]
d = key[4 * 14 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ b;
t3 = a ^ d;
t4 = c ^ t1;
t5 = t2 | t3;
e = t4 ^ t5;
t7 = d & e;
t8 = t2 ^ e;
t10 = t1 | e;
f = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = b ^ t7;
g = t11 ^ t12;
t15 = f & t12;
h = t14 ^ t15
key[4 * 14 + 8] = e
key[4 * 14 + 9] = f
key[4 * 14 + 10] = g
key[4 * 14 + 11] = h
a = key[4 * 15 + 8]
b = key[4 * 15 + 9]
c = key[4 * 15 + 10]
d = key[4 * 15 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
key[4 * 15 + 8] = e
key[4 * 15 + 9] = f
key[4 * 15 + 10] = g
key[4 * 15 + 11] = h
a = key[4 * 16 + 8]
b = key[4 * 16 + 9]
c = key[4 * 16 + 10]
d = key[4 * 16 + 11]
t1 = a ^ c;
t2 = d ^ t1;
t3 = a & t2;
t4 = d ^ t3;
t5 = b & t4;
g = t2 ^ t5;
t7 = a | g;
t8 = b | d;
t11 = a | d;
t9 = t4 & t7;
f = t8 ^ t9;
t12 = b ^ t11;
t13 = g ^ t9;
t15 = t3 ^ t8;
h = t12 ^ t13;
t16 = c & t15;
e = t12 ^ t16
key[4 * 16 + 8] = e
key[4 * 16 + 9] = f
key[4 * 16 + 10] = g
key[4 * 16 + 11] = h
a = key[4 * 17 + 8]
b = key[4 * 17 + 9]
c = key[4 * 17 + 10]
d = key[4 * 17 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
key[4 * 17 + 8] = e
key[4 * 17 + 9] = f
key[4 * 17 + 10] = g
key[4 * 17 + 11] = h
a = key[4 * 18 + 8]
b = key[4 * 18 + 9]
c = key[4 * 18 + 10]
d = key[4 * 18 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ t1;
t3 = a | t2;
t4 = d | t2;
t5 = c ^ t3;
g = d ^ t5;
t7 = b ^ t4;
t8 = t2 ^ g;
t9 = t5 & t7;
h = t8 ^ t9;
t11 = t5 ^ t7;
f = h ^ t11;
t13 = t8 & t11;
e = t5 ^ t13
key[4 * 18 + 8] = e
key[4 * 18 + 9] = f
key[4 * 18 + 10] = g
key[4 * 18 + 11] = h
a = key[4 * 19 + 8]
b = key[4 * 19 + 9]
c = key[4 * 19 + 10]
d = key[4 * 19 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
key[4 * 19 + 8] = e
key[4 * 19 + 9] = f
key[4 * 19 + 10] = g
key[4 * 19 + 11] = h
a = key[4 * 20 + 8]
b = key[4 * 20 + 9]
c = key[4 * 20 + 10]
d = key[4 * 20 + 11]
t1 = (~c) % 0x100000000;
t2 = b ^ c;
t3 = b | t1;
t4 = d ^ t3;
t5 = a & t4;
t7 = a ^ d;
h = t2 ^ t5;
t8 = b ^ t5;
t9 = t2 | t8;
t11 = d & t3;
f = t7 ^ t9;
t12 = t5 ^ f;
t15 = t1 | t4;
t13 = h & t12;
g = t11 ^ t13;
t16 = t12 ^ g;
e = t15 ^ t16
key[4 * 20 + 8] = e
key[4 * 20 + 9] = f
key[4 * 20 + 10] = g
key[4 * 20 + 11] = h
a = key[4 * 21 + 8]
b = key[4 * 21 + 9]
c = key[4 * 21 + 10]
d = key[4 * 21 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
key[4 * 21 + 8] = e
key[4 * 21 + 9] = f
key[4 * 21 + 10] = g
key[4 * 21 + 11] = h
a = key[4 * 22 + 8]
b = key[4 * 22 + 9]
c = key[4 * 22 + 10]
d = key[4 * 22 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ b;
t3 = a ^ d;
t4 = c ^ t1;
t5 = t2 | t3;
e = t4 ^ t5;
t7 = d & e;
t8 = t2 ^ e;
t10 = t1 | e;
f = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = b ^ t7;
g = t11 ^ t12;
t15 = f & t12;
h = t14 ^ t15
key[4 * 22 + 8] = e
key[4 * 22 + 9] = f
key[4 * 22 + 10] = g
key[4 * 22 + 11] = h
a = key[4 * 23 + 8]
b = key[4 * 23 + 9]
c = key[4 * 23 + 10]
d = key[4 * 23 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
key[4 * 23 + 8] = e
key[4 * 23 + 9] = f
key[4 * 23 + 10] = g
key[4 * 23 + 11] = h
a = key[4 * 24 + 8]
b = key[4 * 24 + 9]
c = key[4 * 24 + 10]
d = key[4 * 24 + 11]
t1 = a ^ c;
t2 = d ^ t1;
t3 = a & t2;
t4 = d ^ t3;
t5 = b & t4;
g = t2 ^ t5;
t7 = a | g;
t8 = b | d;
t11 = a | d;
t9 = t4 & t7;
f = t8 ^ t9;
t12 = b ^ t11;
t13 = g ^ t9;
t15 = t3 ^ t8;
h = t12 ^ t13;
t16 = c & t15;
e = t12 ^ t16
key[4 * 24 + 8] = e
key[4 * 24 + 9] = f
key[4 * 24 + 10] = g
key[4 * 24 + 11] = h
a = key[4 * 25 + 8]
b = key[4 * 25 + 9]
c = key[4 * 25 + 10]
d = key[4 * 25 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
key[4 * 25 + 8] = e
key[4 * 25 + 9] = f
key[4 * 25 + 10] = g
key[4 * 25 + 11] = h
a = key[4 * 26 + 8]
b = key[4 * 26 + 9]
c = key[4 * 26 + 10]
d = key[4 * 26 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ t1;
t3 = a | t2;
t4 = d | t2;
t5 = c ^ t3;
g = d ^ t5;
t7 = b ^ t4;
t8 = t2 ^ g;
t9 = t5 & t7;
h = t8 ^ t9;
t11 = t5 ^ t7;
f = h ^ t11;
t13 = t8 & t11;
e = t5 ^ t13
key[4 * 26 + 8] = e
key[4 * 26 + 9] = f
key[4 * 26 + 10] = g
key[4 * 26 + 11] = h
a = key[4 * 27 + 8]
b = key[4 * 27 + 9]
c = key[4 * 27 + 10]
d = key[4 * 27 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
key[4 * 27 + 8] = e
key[4 * 27 + 9] = f
key[4 * 27 + 10] = g
key[4 * 27 + 11] = h
a = key[4 * 28 + 8]
b = key[4 * 28 + 9]
c = key[4 * 28 + 10]
d = key[4 * 28 + 11]
t1 = (~c) % 0x100000000;
t2 = b ^ c;
t3 = b | t1;
t4 = d ^ t3;
t5 = a & t4;
t7 = a ^ d;
h = t2 ^ t5;
t8 = b ^ t5;
t9 = t2 | t8;
t11 = d & t3;
f = t7 ^ t9;
t12 = t5 ^ f;
t15 = t1 | t4;
t13 = h & t12;
g = t11 ^ t13;
t16 = t12 ^ g;
e = t15 ^ t16
key[4 * 28 + 8] = e
key[4 * 28 + 9] = f
key[4 * 28 + 10] = g
key[4 * 28 + 11] = h
a = key[4 * 29 + 8]
b = key[4 * 29 + 9]
c = key[4 * 29 + 10]
d = key[4 * 29 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
key[4 * 29 + 8] = e
key[4 * 29 + 9] = f
key[4 * 29 + 10] = g
key[4 * 29 + 11] = h
a = key[4 * 30 + 8]
b = key[4 * 30 + 9]
c = key[4 * 30 + 10]
d = key[4 * 30 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ b;
t3 = a ^ d;
t4 = c ^ t1;
t5 = t2 | t3;
e = t4 ^ t5;
t7 = d & e;
t8 = t2 ^ e;
t10 = t1 | e;
f = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = b ^ t7;
g = t11 ^ t12;
t15 = f & t12;
h = t14 ^ t15
key[4 * 30 + 8] = e
key[4 * 30 + 9] = f
key[4 * 30 + 10] = g
key[4 * 30 + 11] = h
a = key[4 * 31 + 8]
b = key[4 * 31 + 9]
c = key[4 * 31 + 10]
d = key[4 * 31 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
key[4 * 31 + 8] = e
key[4 * 31 + 9] = f
key[4 * 31 + 10] = g
key[4 * 31 + 11] = h
a = key[4 * 32 + 8]
b = key[4 * 32 + 9]
c = key[4 * 32 + 10]
d = key[4 * 32 + 11]
t1 = a ^ c;
t2 = d ^ t1;
t3 = a & t2;
t4 = d ^ t3;
t5 = b & t4;
g = t2 ^ t5;
t7 = a | g;
t8 = b | d;
t11 = a | d;
t9 = t4 & t7;
f = t8 ^ t9;
t12 = b ^ t11;
t13 = g ^ t9;
t15 = t3 ^ t8;
h = t12 ^ t13;
t16 = c & t15;
e = t12 ^ t16
key[4 * 32 + 8] = e
key[4 * 32 + 9] = f
key[4 * 32 + 10] = g
key[4 * 32 + 11] = h
def encrypt(key, in_blk):
# serpent_generate.py
a = in_blk[0]
b = in_blk[1]
c = in_blk[2]
d = in_blk[3]
if WORD_BIGENDIAN:
a = byteswap32(a)
b = byteswap32(b)
c = byteswap32(c)
d = byteswap32(d)
e = 0
f = 0
g = 0
h = 0
t1 = 0
t2 = 0
t3 = 0
t4 = 0
t5 = 0
t6 = 0
t7 = 0
t8 = 0
t9 = 0
t10 = 0
t11 = 0
t12 = 0
t13 = 0
t14 = 0
t15 = 0
t16 = 0
a ^= key[4 * 0 + 8]
b ^= key[4 * 0 + 9]
c ^= key[4 * 0 + 10]
d ^= key[4 * 0 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 1 + 8]
f ^= key[4 * 1 + 9]
g ^= key[4 * 1 + 10]
h ^= key[4 * 1 + 11]
t1 = (~e) % 0x100000000;
t2 = f ^ t1;
t3 = e | t2;
t4 = h | t2;
t5 = g ^ t3;
c = h ^ t5;
t7 = f ^ t4;
t8 = t2 ^ c;
t9 = t5 & t7;
d = t8 ^ t9;
t11 = t5 ^ t7;
b = d ^ t11;
t13 = t8 & t11;
a = t5 ^ t13
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 2 + 8]
b ^= key[4 * 2 + 9]
c ^= key[4 * 2 + 10]
d ^= key[4 * 2 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 3 + 8]
f ^= key[4 * 3 + 9]
g ^= key[4 * 3 + 10]
h ^= key[4 * 3 + 11]
t1 = e ^ g;
t2 = h ^ t1;
t3 = e & t2;
t4 = h ^ t3;
t5 = f & t4;
c = t2 ^ t5;
t7 = e | c;
t8 = f | h;
t11 = e | h;
t9 = t4 & t7;
b = t8 ^ t9;
t12 = f ^ t11;
t13 = c ^ t9;
t15 = t3 ^ t8;
d = t12 ^ t13;
t16 = g & t15;
a = t12 ^ t16
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 4 + 8]
b ^= key[4 * 4 + 9]
c ^= key[4 * 4 + 10]
d ^= key[4 * 4 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 5 + 8]
f ^= key[4 * 5 + 9]
g ^= key[4 * 5 + 10]
h ^= key[4 * 5 + 11]
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = e ^ h;
t4 = g ^ t1;
t5 = t2 | t3;
a = t4 ^ t5;
t7 = h & a;
t8 = t2 ^ a;
t10 = t1 | a;
b = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = f ^ t7;
c = t11 ^ t12;
t15 = b & t12;
d = t14 ^ t15
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 6 + 8]
b ^= key[4 * 6 + 9]
c ^= key[4 * 6 + 10]
d ^= key[4 * 6 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 7 + 8]
f ^= key[4 * 7 + 9]
g ^= key[4 * 7 + 10]
h ^= key[4 * 7 + 11]
t1 = (~g) % 0x100000000;
t2 = f ^ g;
t3 = f | t1;
t4 = h ^ t3;
t5 = e & t4;
t7 = e ^ h;
d = t2 ^ t5;
t8 = f ^ t5;
t9 = t2 | t8;
t11 = h & t3;
b = t7 ^ t9;
t12 = t5 ^ b;
t15 = t1 | t4;
t13 = d & t12;
c = t11 ^ t13;
t16 = t12 ^ c;
a = t15 ^ t16
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 8 + 8]
b ^= key[4 * 8 + 9]
c ^= key[4 * 8 + 10]
d ^= key[4 * 8 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 9 + 8]
f ^= key[4 * 9 + 9]
g ^= key[4 * 9 + 10]
h ^= key[4 * 9 + 11]
t1 = (~e) % 0x100000000;
t2 = f ^ t1;
t3 = e | t2;
t4 = h | t2;
t5 = g ^ t3;
c = h ^ t5;
t7 = f ^ t4;
t8 = t2 ^ c;
t9 = t5 & t7;
d = t8 ^ t9;
t11 = t5 ^ t7;
b = d ^ t11;
t13 = t8 & t11;
a = t5 ^ t13
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 10 + 8]
b ^= key[4 * 10 + 9]
c ^= key[4 * 10 + 10]
d ^= key[4 * 10 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 11 + 8]
f ^= key[4 * 11 + 9]
g ^= key[4 * 11 + 10]
h ^= key[4 * 11 + 11]
t1 = e ^ g;
t2 = h ^ t1;
t3 = e & t2;
t4 = h ^ t3;
t5 = f & t4;
c = t2 ^ t5;
t7 = e | c;
t8 = f | h;
t11 = e | h;
t9 = t4 & t7;
b = t8 ^ t9;
t12 = f ^ t11;
t13 = c ^ t9;
t15 = t3 ^ t8;
d = t12 ^ t13;
t16 = g & t15;
a = t12 ^ t16
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 12 + 8]
b ^= key[4 * 12 + 9]
c ^= key[4 * 12 + 10]
d ^= key[4 * 12 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 13 + 8]
f ^= key[4 * 13 + 9]
g ^= key[4 * 13 + 10]
h ^= key[4 * 13 + 11]
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = e ^ h;
t4 = g ^ t1;
t5 = t2 | t3;
a = t4 ^ t5;
t7 = h & a;
t8 = t2 ^ a;
t10 = t1 | a;
b = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = f ^ t7;
c = t11 ^ t12;
t15 = b & t12;
d = t14 ^ t15
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 14 + 8]
b ^= key[4 * 14 + 9]
c ^= key[4 * 14 + 10]
d ^= key[4 * 14 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 15 + 8]
f ^= key[4 * 15 + 9]
g ^= key[4 * 15 + 10]
h ^= key[4 * 15 + 11]
t1 = (~g) % 0x100000000;
t2 = f ^ g;
t3 = f | t1;
t4 = h ^ t3;
t5 = e & t4;
t7 = e ^ h;
d = t2 ^ t5;
t8 = f ^ t5;
t9 = t2 | t8;
t11 = h & t3;
b = t7 ^ t9;
t12 = t5 ^ b;
t15 = t1 | t4;
t13 = d & t12;
c = t11 ^ t13;
t16 = t12 ^ c;
a = t15 ^ t16
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 16 + 8]
b ^= key[4 * 16 + 9]
c ^= key[4 * 16 + 10]
d ^= key[4 * 16 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 17 + 8]
f ^= key[4 * 17 + 9]
g ^= key[4 * 17 + 10]
h ^= key[4 * 17 + 11]
t1 = (~e) % 0x100000000;
t2 = f ^ t1;
t3 = e | t2;
t4 = h | t2;
t5 = g ^ t3;
c = h ^ t5;
t7 = f ^ t4;
t8 = t2 ^ c;
t9 = t5 & t7;
d = t8 ^ t9;
t11 = t5 ^ t7;
b = d ^ t11;
t13 = t8 & t11;
a = t5 ^ t13
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 18 + 8]
b ^= key[4 * 18 + 9]
c ^= key[4 * 18 + 10]
d ^= key[4 * 18 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 19 + 8]
f ^= key[4 * 19 + 9]
g ^= key[4 * 19 + 10]
h ^= key[4 * 19 + 11]
t1 = e ^ g;
t2 = h ^ t1;
t3 = e & t2;
t4 = h ^ t3;
t5 = f & t4;
c = t2 ^ t5;
t7 = e | c;
t8 = f | h;
t11 = e | h;
t9 = t4 & t7;
b = t8 ^ t9;
t12 = f ^ t11;
t13 = c ^ t9;
t15 = t3 ^ t8;
d = t12 ^ t13;
t16 = g & t15;
a = t12 ^ t16
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 20 + 8]
b ^= key[4 * 20 + 9]
c ^= key[4 * 20 + 10]
d ^= key[4 * 20 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 21 + 8]
f ^= key[4 * 21 + 9]
g ^= key[4 * 21 + 10]
h ^= key[4 * 21 + 11]
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = e ^ h;
t4 = g ^ t1;
t5 = t2 | t3;
a = t4 ^ t5;
t7 = h & a;
t8 = t2 ^ a;
t10 = t1 | a;
b = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = f ^ t7;
c = t11 ^ t12;
t15 = b & t12;
d = t14 ^ t15
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 22 + 8]
b ^= key[4 * 22 + 9]
c ^= key[4 * 22 + 10]
d ^= key[4 * 22 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 23 + 8]
f ^= key[4 * 23 + 9]
g ^= key[4 * 23 + 10]
h ^= key[4 * 23 + 11]
t1 = (~g) % 0x100000000;
t2 = f ^ g;
t3 = f | t1;
t4 = h ^ t3;
t5 = e & t4;
t7 = e ^ h;
d = t2 ^ t5;
t8 = f ^ t5;
t9 = t2 | t8;
t11 = h & t3;
b = t7 ^ t9;
t12 = t5 ^ b;
t15 = t1 | t4;
t13 = d & t12;
c = t11 ^ t13;
t16 = t12 ^ c;
a = t15 ^ t16
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 24 + 8]
b ^= key[4 * 24 + 9]
c ^= key[4 * 24 + 10]
d ^= key[4 * 24 + 11]
t1 = a ^ d;
t2 = a & d;
t3 = c ^ t1;
t6 = b & t1;
t4 = b ^ t3;
t10 = (~t3) % 0x100000000;
h = t2 ^ t4;
t7 = a ^ t6;
t14 = (~t7) % 0x100000000;
t8 = c | t7;
t11 = t3 ^ t7;
g = t4 ^ t8;
t12 = h & t11;
f = t10 ^ t12;
e = t12 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 25 + 8]
f ^= key[4 * 25 + 9]
g ^= key[4 * 25 + 10]
h ^= key[4 * 25 + 11]
t1 = (~e) % 0x100000000;
t2 = f ^ t1;
t3 = e | t2;
t4 = h | t2;
t5 = g ^ t3;
c = h ^ t5;
t7 = f ^ t4;
t8 = t2 ^ c;
t9 = t5 & t7;
d = t8 ^ t9;
t11 = t5 ^ t7;
b = d ^ t11;
t13 = t8 & t11;
a = t5 ^ t13
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 26 + 8]
b ^= key[4 * 26 + 9]
c ^= key[4 * 26 + 10]
d ^= key[4 * 26 + 11]
t1 = (~a) % 0x100000000;
t2 = b ^ d;
t3 = c & t1;
t13 = d | t1;
e = t2 ^ t3;
t5 = c ^ t1;
t6 = c ^ e;
t7 = b & t6;
t10 = e | t5;
h = t5 ^ t7;
t9 = d | t7;
t11 = t9 & t10;
t14 = t2 ^ h;
g = a ^ t11;
t15 = g ^ t13;
f = t14 ^ t15
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 27 + 8]
f ^= key[4 * 27 + 9]
g ^= key[4 * 27 + 10]
h ^= key[4 * 27 + 11]
t1 = e ^ g;
t2 = h ^ t1;
t3 = e & t2;
t4 = h ^ t3;
t5 = f & t4;
c = t2 ^ t5;
t7 = e | c;
t8 = f | h;
t11 = e | h;
t9 = t4 & t7;
b = t8 ^ t9;
t12 = f ^ t11;
t13 = c ^ t9;
t15 = t3 ^ t8;
d = t12 ^ t13;
t16 = g & t15;
a = t12 ^ t16
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 28 + 8]
b ^= key[4 * 28 + 9]
c ^= key[4 * 28 + 10]
d ^= key[4 * 28 + 11]
t1 = a ^ d;
t2 = d & t1;
t3 = c ^ t2;
t4 = b | t3;
h = t1 ^ t4;
t6 = (~b) % 0x100000000;
t7 = t1 | t6;
e = t3 ^ t7;
t9 = a & e;
t10 = t1 ^ t6;
t11 = t4 & t10;
g = t9 ^ t11;
t13 = a ^ t3;
t14 = t10 & g;
f = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 29 + 8]
f ^= key[4 * 29 + 9]
g ^= key[4 * 29 + 10]
h ^= key[4 * 29 + 11]
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = e ^ h;
t4 = g ^ t1;
t5 = t2 | t3;
a = t4 ^ t5;
t7 = h & a;
t8 = t2 ^ a;
t10 = t1 | a;
b = t7 ^ t8;
t11 = t2 | t7;
t12 = t3 ^ t10;
t14 = f ^ t7;
c = t11 ^ t12;
t15 = b & t12;
d = t14 ^ t15
a = rotl32(a, 13)
c = rotl32(c, 3)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
d = rotl32(d, 7)
b = rotl32(b, 1)
a ^= b ^ d
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a = rotl32(a, 5)
c = rotl32(c, 22)
a ^= key[4 * 30 + 8]
b ^= key[4 * 30 + 9]
c ^= key[4 * 30 + 10]
d ^= key[4 * 30 + 11]
t1 = (~a) % 0x100000000;
t2 = a ^ d;
t3 = b ^ t2;
t4 = t1 | t2;
t5 = c ^ t4;
f = b ^ t5;
t13 = (~t5) % 0x100000000;
t7 = t2 | f;
t8 = d ^ t7;
t9 = t5 & t8;
g = t3 ^ t9;
t11 = t5 ^ t8;
e = g ^ t11;
t14 = t3 & t11;
h = t13 ^ t14
e = rotl32(e, 13)
g = rotl32(g, 3)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
h = rotl32(h, 7)
f = rotl32(f, 1)
e ^= f ^ h
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e = rotl32(e, 5)
g = rotl32(g, 22)
e ^= key[4 * 31 + 8]
f ^= key[4 * 31 + 9]
g ^= key[4 * 31 + 10]
h ^= key[4 * 31 + 11]
t1 = (~g) % 0x100000000;
t2 = f ^ g;
t3 = f | t1;
t4 = h ^ t3;
t5 = e & t4;
t7 = e ^ h;
d = t2 ^ t5;
t8 = f ^ t5;
t9 = t2 | t8;
t11 = h & t3;
b = t7 ^ t9;
t12 = t5 ^ b;
t15 = t1 | t4;
t13 = d & t12;
c = t11 ^ t13;
t16 = t12 ^ c;
a = t15 ^ t16
a ^= key[4 * 32 + 8]
b ^= key[4 * 32 + 9]
c ^= key[4 * 32 + 10]
d ^= key[4 * 32 + 11]
if WORD_BIGENDIAN:
a = byteswap32(a)
b = byteswap32(b)
c = byteswap32(c)
d = byteswap32(d)
in_blk[0] = a
in_blk[1] = b
in_blk[2] = c
in_blk[3] = d
def decrypt(key, in_blk):
# serpent_generate.py
a = in_blk[0]
b = in_blk[1]
c = in_blk[2]
d = in_blk[3]
if WORD_BIGENDIAN:
a = byteswap32(a)
b = byteswap32(b)
c = byteswap32(c)
d = byteswap32(d)
e = 0
f = 0
g = 0
h = 0
t1 = 0
t2 = 0
t3 = 0
t4 = 0
t5 = 0
t6 = 0
t7 = 0
t8 = 0
t9 = 0
t10 = 0
t11 = 0
t12 = 0
t13 = 0
t14 = 0
t15 = 0
t16 = 0
a ^= key[4 * 32 + 8]
b ^= key[4 * 32 + 9]
c ^= key[4 * 32 + 10]
d ^= key[4 * 32 + 11]
t1 = a & b;
t2 = a | b;
t3 = c | t1;
t4 = d & t2;
h = t3 ^ t4;
t6 = (~d) % 0x100000000;
t7 = b ^ t4;
t8 = h ^ t6;
t11 = c ^ t7;
t9 = t7 | t8;
f = a ^ t9;
t12 = d | f;
e = t11 ^ t12;
t14 = a & h;
t15 = t3 ^ f;
t16 = e ^ t14;
g = t15 ^ t16
e ^= key[4 * 31 + 8]
f ^= key[4 * 31 + 9]
g ^= key[4 * 31 + 10]
h ^= key[4 * 31 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = g ^ t2;
t4 = g | t1;
t5 = h ^ t4;
t13 = h & t1;
b = t3 ^ t5;
t7 = t3 & t5;
t8 = t2 ^ t7;
t9 = f | t8;
d = t5 ^ t9;
t11 = f | d;
a = t8 ^ t11;
t14 = t3 ^ t11;
c = t13 ^ t14
a ^= key[4 * 30 + 8]
b ^= key[4 * 30 + 9]
c ^= key[4 * 30 + 10]
d ^= key[4 * 30 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = (~c) % 0x100000000;
t2 = b & t1;
t3 = d ^ t2;
t4 = a & t3;
t5 = b ^ t1;
h = t4 ^ t5;
t7 = b | h;
t8 = a & t7;
f = t3 ^ t8;
t10 = a | d;
t11 = t1 ^ t7;
e = t10 ^ t11;
t13 = a ^ c;
t14 = b & t10;
t15 = t4 | t13;
g = t14 ^ t15
e ^= key[4 * 29 + 8]
f ^= key[4 * 29 + 9]
g ^= key[4 * 29 + 10]
h ^= key[4 * 29 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = g ^ h;
t2 = g | h;
t3 = f ^ t2;
t4 = e & t3;
b = t1 ^ t4;
t6 = e ^ h;
t7 = f | h;
t8 = t6 & t7;
d = t3 ^ t8;
t10 = (~e) % 0x100000000;
t11 = g ^ d;
t12 = t10 | t11;
a = t3 ^ t12;
t14 = g | t4;
t15 = t7 ^ t14;
t16 = d | t10;
c = t15 ^ t16
a ^= key[4 * 28 + 8]
b ^= key[4 * 28 + 9]
c ^= key[4 * 28 + 10]
d ^= key[4 * 28 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = b ^ c;
t2 = b | c;
t3 = a ^ c;
t7 = a ^ d;
t4 = t2 ^ t3;
t5 = d | t4;
t9 = t2 ^ t7;
e = t1 ^ t5;
t8 = t1 | t5;
t11 = a & t4;
g = t8 ^ t9;
t12 = e | t9;
f = t11 ^ t12;
t14 = a & g;
t15 = t2 ^ t14;
t16 = e & t15;
h = t4 ^ t16
e ^= key[4 * 27 + 8]
f ^= key[4 * 27 + 9]
g ^= key[4 * 27 + 10]
h ^= key[4 * 27 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = f ^ h;
t2 = (~t1) % 0x100000000;
t3 = e ^ g;
t4 = g ^ t1;
t7 = e | t2;
t5 = f & t4;
t8 = h ^ t7;
t11 = (~t4) % 0x100000000;
a = t3 ^ t5;
t9 = t3 | t8;
t14 = h & t11;
d = t1 ^ t9;
t12 = a | d;
b = t11 ^ t12;
t15 = t3 ^ t12;
c = t14 ^ t15
a ^= key[4 * 26 + 8]
b ^= key[4 * 26 + 9]
c ^= key[4 * 26 + 10]
d ^= key[4 * 26 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = a ^ d;
t2 = a & b;
t3 = b ^ c;
t4 = a ^ t3;
t5 = b | d;
t7 = c | t1;
h = t4 ^ t5;
t8 = b ^ t7;
t11 = (~t2) % 0x100000000;
t9 = t4 & t8;
f = t1 ^ t9;
t13 = t9 ^ t11;
t12 = h & f;
g = t12 ^ t13;
t15 = a & d;
t16 = c ^ t13;
e = t15 ^ t16
e ^= key[4 * 25 + 8]
f ^= key[4 * 25 + 9]
g ^= key[4 * 25 + 10]
h ^= key[4 * 25 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000
t2 = e ^ f
t3 = t1 | t2
t4 = h ^ t3
t7 = h & t2
t5 = g ^ t4
t8 = t1 ^ t7
c = t2 ^ t5
t11 = e & t4
t9 = c & t8
t14 = t5 ^ t8
b = t4 ^ t9
t12 = t5 | b
d = t11 ^ t12
a = d ^ t14
a ^= key[4 * 24 + 8]
b ^= key[4 * 24 + 9]
c ^= key[4 * 24 + 10]
d ^= key[4 * 24 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = a & b;
t2 = a | b;
t3 = c | t1;
t4 = d & t2;
h = t3 ^ t4;
t6 = (~d) % 0x100000000;
t7 = b ^ t4;
t8 = h ^ t6;
t11 = c ^ t7;
t9 = t7 | t8;
f = a ^ t9;
t12 = d | f;
e = t11 ^ t12;
t14 = a & h;
t15 = t3 ^ f;
t16 = e ^ t14;
g = t15 ^ t16
e ^= key[4 * 23 + 8]
f ^= key[4 * 23 + 9]
g ^= key[4 * 23 + 10]
h ^= key[4 * 23 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = g ^ t2;
t4 = g | t1;
t5 = h ^ t4;
t13 = h & t1;
b = t3 ^ t5;
t7 = t3 & t5;
t8 = t2 ^ t7;
t9 = f | t8;
d = t5 ^ t9;
t11 = f | d;
a = t8 ^ t11;
t14 = t3 ^ t11;
c = t13 ^ t14
a ^= key[4 * 22 + 8]
b ^= key[4 * 22 + 9]
c ^= key[4 * 22 + 10]
d ^= key[4 * 22 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = (~c) % 0x100000000;
t2 = b & t1;
t3 = d ^ t2;
t4 = a & t3;
t5 = b ^ t1;
h = t4 ^ t5;
t7 = b | h;
t8 = a & t7;
f = t3 ^ t8;
t10 = a | d;
t11 = t1 ^ t7;
e = t10 ^ t11;
t13 = a ^ c;
t14 = b & t10;
t15 = t4 | t13;
g = t14 ^ t15
e ^= key[4 * 21 + 8]
f ^= key[4 * 21 + 9]
g ^= key[4 * 21 + 10]
h ^= key[4 * 21 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = g ^ h;
t2 = g | h;
t3 = f ^ t2;
t4 = e & t3;
b = t1 ^ t4;
t6 = e ^ h;
t7 = f | h;
t8 = t6 & t7;
d = t3 ^ t8;
t10 = (~e) % 0x100000000;
t11 = g ^ d;
t12 = t10 | t11;
a = t3 ^ t12;
t14 = g | t4;
t15 = t7 ^ t14;
t16 = d | t10;
c = t15 ^ t16
a ^= key[4 * 20 + 8]
b ^= key[4 * 20 + 9]
c ^= key[4 * 20 + 10]
d ^= key[4 * 20 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = b ^ c;
t2 = b | c;
t3 = a ^ c;
t7 = a ^ d;
t4 = t2 ^ t3;
t5 = d | t4;
t9 = t2 ^ t7;
e = t1 ^ t5;
t8 = t1 | t5;
t11 = a & t4;
g = t8 ^ t9;
t12 = e | t9;
f = t11 ^ t12;
t14 = a & g;
t15 = t2 ^ t14;
t16 = e & t15;
h = t4 ^ t16
e ^= key[4 * 19 + 8]
f ^= key[4 * 19 + 9]
g ^= key[4 * 19 + 10]
h ^= key[4 * 19 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = f ^ h;
t2 = (~t1) % 0x100000000;
t3 = e ^ g;
t4 = g ^ t1;
t7 = e | t2;
t5 = f & t4;
t8 = h ^ t7;
t11 = (~t4) % 0x100000000;
a = t3 ^ t5;
t9 = t3 | t8;
t14 = h & t11;
d = t1 ^ t9;
t12 = a | d;
b = t11 ^ t12;
t15 = t3 ^ t12;
c = t14 ^ t15
a ^= key[4 * 18 + 8]
b ^= key[4 * 18 + 9]
c ^= key[4 * 18 + 10]
d ^= key[4 * 18 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = a ^ d;
t2 = a & b;
t3 = b ^ c;
t4 = a ^ t3;
t5 = b | d;
t7 = c | t1;
h = t4 ^ t5;
t8 = b ^ t7;
t11 = (~t2) % 0x100000000;
t9 = t4 & t8;
f = t1 ^ t9;
t13 = t9 ^ t11;
t12 = h & f;
g = t12 ^ t13;
t15 = a & d;
t16 = c ^ t13;
e = t15 ^ t16
e ^= key[4 * 17 + 8]
f ^= key[4 * 17 + 9]
g ^= key[4 * 17 + 10]
h ^= key[4 * 17 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000
t2 = e ^ f
t3 = t1 | t2
t4 = h ^ t3
t7 = h & t2
t5 = g ^ t4
t8 = t1 ^ t7
c = t2 ^ t5
t11 = e & t4
t9 = c & t8
t14 = t5 ^ t8
b = t4 ^ t9
t12 = t5 | b
d = t11 ^ t12
a = d ^ t14
a ^= key[4 * 16 + 8]
b ^= key[4 * 16 + 9]
c ^= key[4 * 16 + 10]
d ^= key[4 * 16 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = a & b;
t2 = a | b;
t3 = c | t1;
t4 = d & t2;
h = t3 ^ t4;
t6 = (~d) % 0x100000000;
t7 = b ^ t4;
t8 = h ^ t6;
t11 = c ^ t7;
t9 = t7 | t8;
f = a ^ t9;
t12 = d | f;
e = t11 ^ t12;
t14 = a & h;
t15 = t3 ^ f;
t16 = e ^ t14;
g = t15 ^ t16
e ^= key[4 * 15 + 8]
f ^= key[4 * 15 + 9]
g ^= key[4 * 15 + 10]
h ^= key[4 * 15 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = g ^ t2;
t4 = g | t1;
t5 = h ^ t4;
t13 = h & t1;
b = t3 ^ t5;
t7 = t3 & t5;
t8 = t2 ^ t7;
t9 = f | t8;
d = t5 ^ t9;
t11 = f | d;
a = t8 ^ t11;
t14 = t3 ^ t11;
c = t13 ^ t14
a ^= key[4 * 14 + 8]
b ^= key[4 * 14 + 9]
c ^= key[4 * 14 + 10]
d ^= key[4 * 14 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = (~c) % 0x100000000;
t2 = b & t1;
t3 = d ^ t2;
t4 = a & t3;
t5 = b ^ t1;
h = t4 ^ t5;
t7 = b | h;
t8 = a & t7;
f = t3 ^ t8;
t10 = a | d;
t11 = t1 ^ t7;
e = t10 ^ t11;
t13 = a ^ c;
t14 = b & t10;
t15 = t4 | t13;
g = t14 ^ t15
e ^= key[4 * 13 + 8]
f ^= key[4 * 13 + 9]
g ^= key[4 * 13 + 10]
h ^= key[4 * 13 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = g ^ h;
t2 = g | h;
t3 = f ^ t2;
t4 = e & t3;
b = t1 ^ t4;
t6 = e ^ h;
t7 = f | h;
t8 = t6 & t7;
d = t3 ^ t8;
t10 = (~e) % 0x100000000;
t11 = g ^ d;
t12 = t10 | t11;
a = t3 ^ t12;
t14 = g | t4;
t15 = t7 ^ t14;
t16 = d | t10;
c = t15 ^ t16
a ^= key[4 * 12 + 8]
b ^= key[4 * 12 + 9]
c ^= key[4 * 12 + 10]
d ^= key[4 * 12 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = b ^ c;
t2 = b | c;
t3 = a ^ c;
t7 = a ^ d;
t4 = t2 ^ t3;
t5 = d | t4;
t9 = t2 ^ t7;
e = t1 ^ t5;
t8 = t1 | t5;
t11 = a & t4;
g = t8 ^ t9;
t12 = e | t9;
f = t11 ^ t12;
t14 = a & g;
t15 = t2 ^ t14;
t16 = e & t15;
h = t4 ^ t16
e ^= key[4 * 11 + 8]
f ^= key[4 * 11 + 9]
g ^= key[4 * 11 + 10]
h ^= key[4 * 11 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = f ^ h;
t2 = (~t1) % 0x100000000;
t3 = e ^ g;
t4 = g ^ t1;
t7 = e | t2;
t5 = f & t4;
t8 = h ^ t7;
t11 = (~t4) % 0x100000000;
a = t3 ^ t5;
t9 = t3 | t8;
t14 = h & t11;
d = t1 ^ t9;
t12 = a | d;
b = t11 ^ t12;
t15 = t3 ^ t12;
c = t14 ^ t15
a ^= key[4 * 10 + 8]
b ^= key[4 * 10 + 9]
c ^= key[4 * 10 + 10]
d ^= key[4 * 10 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = a ^ d;
t2 = a & b;
t3 = b ^ c;
t4 = a ^ t3;
t5 = b | d;
t7 = c | t1;
h = t4 ^ t5;
t8 = b ^ t7;
t11 = (~t2) % 0x100000000;
t9 = t4 & t8;
f = t1 ^ t9;
t13 = t9 ^ t11;
t12 = h & f;
g = t12 ^ t13;
t15 = a & d;
t16 = c ^ t13;
e = t15 ^ t16
e ^= key[4 * 9 + 8]
f ^= key[4 * 9 + 9]
g ^= key[4 * 9 + 10]
h ^= key[4 * 9 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000
t2 = e ^ f
t3 = t1 | t2
t4 = h ^ t3
t7 = h & t2
t5 = g ^ t4
t8 = t1 ^ t7
c = t2 ^ t5
t11 = e & t4
t9 = c & t8
t14 = t5 ^ t8
b = t4 ^ t9
t12 = t5 | b
d = t11 ^ t12
a = d ^ t14
a ^= key[4 * 8 + 8]
b ^= key[4 * 8 + 9]
c ^= key[4 * 8 + 10]
d ^= key[4 * 8 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = a & b;
t2 = a | b;
t3 = c | t1;
t4 = d & t2;
h = t3 ^ t4;
t6 = (~d) % 0x100000000;
t7 = b ^ t4;
t8 = h ^ t6;
t11 = c ^ t7;
t9 = t7 | t8;
f = a ^ t9;
t12 = d | f;
e = t11 ^ t12;
t14 = a & h;
t15 = t3 ^ f;
t16 = e ^ t14;
g = t15 ^ t16
e ^= key[4 * 7 + 8]
f ^= key[4 * 7 + 9]
g ^= key[4 * 7 + 10]
h ^= key[4 * 7 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000;
t2 = e ^ f;
t3 = g ^ t2;
t4 = g | t1;
t5 = h ^ t4;
t13 = h & t1;
b = t3 ^ t5;
t7 = t3 & t5;
t8 = t2 ^ t7;
t9 = f | t8;
d = t5 ^ t9;
t11 = f | d;
a = t8 ^ t11;
t14 = t3 ^ t11;
c = t13 ^ t14
a ^= key[4 * 6 + 8]
b ^= key[4 * 6 + 9]
c ^= key[4 * 6 + 10]
d ^= key[4 * 6 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = (~c) % 0x100000000;
t2 = b & t1;
t3 = d ^ t2;
t4 = a & t3;
t5 = b ^ t1;
h = t4 ^ t5;
t7 = b | h;
t8 = a & t7;
f = t3 ^ t8;
t10 = a | d;
t11 = t1 ^ t7;
e = t10 ^ t11;
t13 = a ^ c;
t14 = b & t10;
t15 = t4 | t13;
g = t14 ^ t15
e ^= key[4 * 5 + 8]
f ^= key[4 * 5 + 9]
g ^= key[4 * 5 + 10]
h ^= key[4 * 5 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = g ^ h;
t2 = g | h;
t3 = f ^ t2;
t4 = e & t3;
b = t1 ^ t4;
t6 = e ^ h;
t7 = f | h;
t8 = t6 & t7;
d = t3 ^ t8;
t10 = (~e) % 0x100000000;
t11 = g ^ d;
t12 = t10 | t11;
a = t3 ^ t12;
t14 = g | t4;
t15 = t7 ^ t14;
t16 = d | t10;
c = t15 ^ t16
a ^= key[4 * 4 + 8]
b ^= key[4 * 4 + 9]
c ^= key[4 * 4 + 10]
d ^= key[4 * 4 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = b ^ c;
t2 = b | c;
t3 = a ^ c;
t7 = a ^ d;
t4 = t2 ^ t3;
t5 = d | t4;
t9 = t2 ^ t7;
e = t1 ^ t5;
t8 = t1 | t5;
t11 = a & t4;
g = t8 ^ t9;
t12 = e | t9;
f = t11 ^ t12;
t14 = a & g;
t15 = t2 ^ t14;
t16 = e & t15;
h = t4 ^ t16
e ^= key[4 * 3 + 8]
f ^= key[4 * 3 + 9]
g ^= key[4 * 3 + 10]
h ^= key[4 * 3 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = f ^ h;
t2 = (~t1) % 0x100000000;
t3 = e ^ g;
t4 = g ^ t1;
t7 = e | t2;
t5 = f & t4;
t8 = h ^ t7;
t11 = (~t4) % 0x100000000;
a = t3 ^ t5;
t9 = t3 | t8;
t14 = h & t11;
d = t1 ^ t9;
t12 = a | d;
b = t11 ^ t12;
t15 = t3 ^ t12;
c = t14 ^ t15
a ^= key[4 * 2 + 8]
b ^= key[4 * 2 + 9]
c ^= key[4 * 2 + 10]
d ^= key[4 * 2 + 11]
c = rotr32(c, 22)
a = rotr32(a, 5)
c ^= d ^ ((b << 7) & 0xFFFFFFFF)
a ^= b ^ d
d = rotr32(d, 7)
b = rotr32(b, 1)
d ^= c ^ ((a << 3) & 0xFFFFFFFF)
b ^= a ^ c
c = rotr32(c, 3)
a = rotr32(a, 13)
t1 = a ^ d;
t2 = a & b;
t3 = b ^ c;
t4 = a ^ t3;
t5 = b | d;
t7 = c | t1;
h = t4 ^ t5;
t8 = b ^ t7;
t11 = (~t2) % 0x100000000;
t9 = t4 & t8;
f = t1 ^ t9;
t13 = t9 ^ t11;
t12 = h & f;
g = t12 ^ t13;
t15 = a & d;
t16 = c ^ t13;
e = t15 ^ t16
e ^= key[4 * 1 + 8]
f ^= key[4 * 1 + 9]
g ^= key[4 * 1 + 10]
h ^= key[4 * 1 + 11]
g = rotr32(g, 22)
e = rotr32(e, 5)
g ^= h ^ ((f << 7) & 0xFFFFFFFF)
e ^= f ^ h
h = rotr32(h, 7)
f = rotr32(f, 1)
h ^= g ^ ((e << 3) & 0xFFFFFFFF)
f ^= e ^ g
g = rotr32(g, 3)
e = rotr32(e, 13)
t1 = (~e) % 0x100000000
t2 = e ^ f
t3 = t1 | t2
t4 = h ^ t3
t7 = h & t2
t5 = g ^ t4
t8 = t1 ^ t7
c = t2 ^ t5
t11 = e & t4
t9 = c & t8
t14 = t5 ^ t8
b = t4 ^ t9
t12 = t5 | b
d = t11 ^ t12
a = d ^ t14
a ^= key[4 * 0 + 8]
b ^= key[4 * 0 + 9]
c ^= key[4 * 0 + 10]
d ^= key[4 * 0 + 11]
if WORD_BIGENDIAN:
a = byteswap32(a)
b = byteswap32(b)
c = byteswap32(c)
d = byteswap32(d)
in_blk[0] = a
in_blk[1] = b
in_blk[2] = c
in_blk[3] = d
if __name__ == "__main__":
__testkey = '\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
__testdat = '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
assert '\xde&\x9f\xf83\xe42\xb8[.\x88\xd2p\x1c\xe7\\' == Serpent(__testkey).encrypt(__testdat)
assert __testdat == Serpent(__testkey).decrypt('\xde&\x9f\xf83\xe42\xb8[.\x88\xd2p\x1c\xe7\\')
|
geheimnis/core-commands
|
cryptoalgo/symmetric/serpent.py
|
Python
|
gpl-3.0
| 70,557
|
[
"Brian"
] |
5ffb70769584db678f222cf90dfdde19d7659e4b0bf2d7dbe5fa70bd633ebaa0
|
from setuptools import setup
VERSION = '0.0.1'
QUALIFIER = ''
DISTNAME = 'fixnc'
LICENSE = 'MIT'
AUTHOR = 'Nikolay Koldunov'
AUTHOR_EMAIL = 'koldunovn@gmail.com'
URL = 'https://github.com/koldunovn/fixnc/'
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering',
]
INSTALL_REQUIRES = ['netcdf4 >= 1.1.8', 'sh >= 1.11']
TESTS_REQUIRE = ['pytest >= 2.7.1']
DESCRIPTION = "Easy edit of netCDF files."
LONG_DESCRIPTION = """
This package makes changing the meta information of the netCDF file easy.
You can add, delete and rename dimentions, variables and attributes.
What it does
------------
* renames dimentions, variables and attributes in netCDF files.
* changes values of variables and attributes.
* adds dimentions, variables and attributes.
* removes attributes.
* reorders dimentions and variables.
Important links
---------------
- HTML documentation: https://fixnc.readthedocs.io/
- Source code: https://github.com/koldunovn/fixnc/
"""
setup(name=DISTNAME,
version=VERSION,
license=LICENSE,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
classifiers=CLASSIFIERS,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
url=URL,
packages=['fixnc'],
include_package_data=True,
zip_safe=False)
|
koldunovn/fixnc
|
setup.py
|
Python
|
mit
| 1,661
|
[
"NetCDF"
] |
0bae51c5d911b807c8eb76de7d5e801193ebbd1d7bd4b31b00d8bf006cfc5f6b
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import unittest
import tempfile
from functools import reduce
import numpy
import scipy.linalg
from pyscf import gto
from pyscf import lib
import pyscf.lib.parameters as param
mol0 = gto.Mole()
mol0.atom = [
[1 , (0.,1.,1.)],
["O1", (0.,0.,0.)],
[1 , (1.,1.,0.)], ]
mol0.nucmod = { "O":'gaussian', 3:'g' }
mol0.unit = 'ang'
mol0.basis = {
"O": [(0, 0, (15, 1)), ] + gto.etbs(((0, 4, 1, 1.8),
(1, 3, 2, 1.8),
(2, 2, 1, 1.8),)),
"H": [(0, 0, (1, 1, 0), (3, 3, 1), (5, 1, 0)),
(1, -2, (1, 1)), ]}
mol0.symmetry = 1
mol0.charge = 1
mol0.spin = 1
mol0.verbose = 7
mol0.ecp = {'O1': 'lanl2dz'}
ftmp = tempfile.NamedTemporaryFile()
mol0.output = ftmp.name
mol0.build()
def tearDownModule():
global mol0, ftmp
mol0.stdout.close()
del mol0, ftmp
class KnownValues(unittest.TestCase):
def test_intor_cross(self):
mol1 = mol0.unpack(mol0.pack())
mol1.symmetry = True
mol1.unit = 'Ang'
mol1.atom = '''
1 0 1 1
O 0 0 0
h 1 1 0'''
mol1.basis = {'O': gto.basis.parse('''
C S
3047.5249000 0.0018347
457.3695100 0.0140373
103.9486900 0.0688426
29.2101550 0.2321844
9.2866630 0.4679413
3.1639270 0.3623120
# 1. 0.1
C SP
7.8682724 -0.1193324 0.0689991
1.8812885 -0.1608542 0.3164240
0.5442493 1.1434564 0.7443083
C SP
0.1687144 1.0000000 1.0000000'''),
'H': '6-31g'}
mol1.build()
v = gto.mole.intor_cross('cint1e_ovlp_sph', mol0, mol1)
self.assertAlmostEqual(numpy.linalg.norm(v), 3.6489423434168562, 1)
def test_num_basis(self):
self.assertEqual(mol0.nao_nr(), 34)
self.assertEqual(mol0.nao_2c(), 64)
def test_time_reversal_map(self):
tao = [ -2, 1, -4, 3, 8, -7, 6, -5,-10, 9,-12, 11,-14, 13,-16, 15,-18, 17,
20,-19, 24,-23, 22,-21, 26,-25, 30,-29, 28,-27, 32,-31, 36,-35, 34,-33,
-40, 39,-38, 37,-46, 45,-44, 43,-42, 41,-50, 49,-48, 47,-56, 55,-54, 53,
-52, 51,-58, 57,-60, 59, 64,-63, 62,-61]
self.assertEqual(list(mol0.time_reversal_map()), tao)
def test_check_sanity(self):
mol1 = mol0.copy()
mol1.x = None
mol1.copy = None
mol1.check_sanity()
def test_nao_range(self):
self.assertEqual(mol0.nao_nr_range(1,4), (2, 7))
self.assertEqual(mol0.nao_2c_range(1,4), (4, 12))
self.assertEqual(numpy.dot(range(mol0.nbas+1), mol0.ao_loc_nr()), 2151)
self.assertEqual(numpy.dot(range(mol0.nbas+1), mol0.ao_loc_2c()), 4066)
def test_search_bas(self):
self.assertEqual(mol0.search_shell_id(1, 1), 7)
self.assertRaises(RuntimeError, mol0.search_ao_nr, 1, 1, -1, 5)
self.assertEqual(mol0.search_ao_nr(1, 1, -1, 4), 16)
mol0.cart = True
self.assertEqual(mol0.search_ao_nr(2, 1, -1, 1), 30)
mol0.cart = False
def test_atom_types(self):
atoms = [['H0', ( 0, 0, 0)],
['H1', ( 0, 0, 0)],
['H', ( 0, 0, 0)],
['H3', ( 0, 0, 0)]]
basis = {'H':'sto3g', 'H1': '6-31g'}
atmgroup = gto.mole.atom_types(atoms, basis)
self.assertEqual(atmgroup, {'H': [0, 2, 3], 'H1': [1]})
atoms = [['H0', ( 0, 0, 0)],
['H1', ( 0, 0, 0)],
['H2', ( 0, 0, 0)],
['H3', ( 0, 0, 0)]]
basis = {'H2':'sto3g', 'H3':'6-31g', 'H0':'sto3g', 'H1': '6-31g'}
atmgroup = gto.mole.atom_types(atoms, basis)
self.assertEqual(atmgroup, {'H2': [2], 'H3': [3], 'H0': [0], 'H1': [1]})
def test_given_symmetry(self):
mol = gto.M(atom='H 0 0 -1; H 0 0 1', symmetry='D2h')
self.assertEqual(mol.irrep_id, [0, 5])
mol = gto.M(atom='H 0 0 -1; H 0 0 1', symmetry='D2')
self.assertEqual(mol.irrep_id, [0, 1])
mol = gto.M(atom='H 0 0 -1; H 0 0 1', symmetry='C2v')
self.assertEqual(mol.irrep_id, [0])
def test_dumps_loads(self):
import warnings
mol1 = gto.M()
mol1.x = lambda *args: None
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
d = mol1.dumps()
self.assertTrue(w[0].category, UserWarning)
mol1.loads(mol0.dumps())
def test_symm_orb_serialization(self):
'''Handle the complex symmetry-adapted orbitals'''
mol = gto.M(atom='He', basis='ccpvdz', symmetry=True)
mol.loads(mol.dumps())
lz_minus = numpy.sqrt(.5) * (mol.symm_orb[3] - mol.symm_orb[2] * 1j)
lz_plus = -numpy.sqrt(.5) * (mol.symm_orb[3] + mol.symm_orb[2] * 1j)
mol.symm_orb[2] = lz_minus
mol.symm_orb[3] = lz_plus
mol.loads(mol.dumps())
self.assertTrue(mol.symm_orb[0].dtype == numpy.double)
self.assertTrue(mol.symm_orb[2].dtype == numpy.complex128)
self.assertTrue(mol.symm_orb[3].dtype == numpy.complex128)
def test_same_mol1(self):
self.assertTrue(gto.same_mol(mol0, mol0))
mol1 = gto.M(atom='h 0 1 1; O1 0 0 0; h 1 1 0')
self.assertTrue(not gto.same_mol(mol0, mol1))
self.assertTrue(gto.same_mol(mol0, mol1, cmp_basis=False))
mol1 = gto.M(atom='h 0 1 1; O1 0 0 0; h 1 1 0.01')
self.assertTrue(not gto.same_mol(mol0, mol1, cmp_basis=False))
self.assertTrue(gto.same_mol(mol0, mol1, tol=.02, cmp_basis=False))
mol1 = gto.M(atom='''H 0.0052917700 0.0000000000 -0.8746076326
F 0.0000000000 0.0000000000 0.0516931447''')
mol2 = gto.M(atom='''H 0.0000000000 0.0000000000 -0.8746076326
F 0.0000000000 0.0000000000 0.0516931447''')
self.assertTrue(gto.same_mol(mol1, mol2))
self.assertTrue(not gto.same_mol(mol1, mol2, tol=1e-6))
mol3 = gto.M(atom='''H 0.0000000000 0.0000000000 -0.8746076326
H 0.0000000000 0.0000000000 0.0516931447''')
self.assertTrue(not gto.same_mol(mol3, mol2))
def test_same_mol2(self):
mol1 = gto.M(atom='H 0.0052917700 0.0000000000 -0.8746076326; F 0.0000000000 0.0000000000 0.0464013747')
mol2 = gto.M(atom='H 0.0000000000 0.0000000000 -0.8746076326; F 0.0052917700 0.0000000000 0.0464013747')
self.assertTrue(gto.same_mol(mol1, mol2))
mol1 = gto.M(atom='H 0.0052917700 0.0000000000 -0.8693158626; F 0.0000000000 0.0000000000 0.0464013747')
mol2 = gto.M(atom='H 0.0000000000 0.0052917700 -0.8693158626; F 0.0000000000 0.0000000000 0.0464013747')
mol3 = gto.M(atom='H 0.0000000000 0.0000000000 -0.8693158626; F 0.0052917700 0.0000000000 0.0464013747')
mol4 = gto.M(atom='H -0.0052917700 0.0000000000 -0.8746076326; F 0.0000000000 0.0000000000 0.0411096047')
mols = (mol1, mol2, mol3, mol4)
for i,mi in enumerate(mols):
for j in range(i):
self.assertTrue(gto.same_mol(mols[i], mols[j]))
mol1 = gto.M(atom='''H 0.0000000000 0.0000000000 0.0000000000
H 0.9497795800 1.3265673200 0.0000000000
H 0.9444878100 -1.3265673200 0.0000000000
H1 -0.9444878100 0.0000000000 1.3265673200
H1 -0.9444878100 0.0000000000 -1.3265673200''', basis={'H':'sto3g', 'H1':'sto3g'}, charge=1)
mol2 = gto.M(atom='''H 0.0000000000 0.0000000000 0.0000000000
H 0.9444878100 1.3265673200 0.0000000000
H 0.9497795800 -1.3265673200 0.0000000000
H1 -0.9444878100 0.0000000000 1.3265673200
H1 -0.9444878100 0.0000000000 -1.3265673200''', basis={'H':'sto3g', 'H1':'sto3g'}, charge=1)
self.assertTrue(gto.same_mol(mol1, mol2))
self.assertEqual(len(gto.atom_types(mol1._atom)), 2)
mol3 = gto.M(atom='''H 0.0000000000 0.0000000000 0.0000000000
H1 0.9497795800 1.3265673200 0.0000000000
H1 0.9444878100 -1.3265673200 0.0000000000
H1 -0.9444878100 0.0000000000 1.3265673200
H1 -0.9444878100 0.0000000000 -1.3265673200''', basis={'H':'sto3g', 'H1':'321g'}, charge=1)
self.assertTrue(not gto.same_mol(mol3, mol2))
def test_inertia_momentum(self):
mol1 = gto.Mole()
mol1.atom = mol0.atom
mol1.nucmod = 'G'
mol1.verbose = 5
mol1.nucprop = {'H': {'mass': 3}}
mol1.output = '/dev/null'
mol1.build(False, False)
self.assertAlmostEqual(lib.fp(gto.inertia_moment(mol1)),
2.139593709454326, 9)
mass = mol0.atom_mass_list(isotope_avg=True)
self.assertAlmostEqual(lib.fp(gto.inertia_moment(mol1, mass)),
2.1549269955776205, 9)
def test_chiral_mol(self):
mol1 = gto.M(atom='C 0 0 0; H 1 1 1; He -1 -1 1; Li -1 1 -1; Be 1 -1 -1')
mol2 = gto.M(atom='C 0 0 0; H 1 1 1; He -1 -1 1; Be -1 1 -1; Li 1 -1 -1')
self.assertTrue(gto.chiral_mol(mol1, mol2))
self.assertTrue(gto.chiral_mol(mol1))
mol1 = gto.M(atom='''H 0.9444878100 1.3265673200 0.0052917700
H 0.9444878100 -1.3265673200 0.0000000000
H -0.9444878100 0.0000000000 1.3265673200
H -0.9444878100 0.0000000000 -1.3265673200''')
mol2 = gto.M(atom='''H 0.9444878100 1.3265673200 0.0000000000
H 0.9444878100 -1.3265673200 0.0052917700
H -0.9444878100 0.0000000000 1.3265673200
H -0.9444878100 0.0000000000 -1.3265673200''')
self.assertTrue(gto.chiral_mol(mol1, mol2))
mol1 = gto.M(atom='''H 0.9444878100 1.3265673200 0.0052917700
H 0.9444878100 -1.3265673200 0.0000000000
H -0.9444878100 0.0000000000 1.3265673200
H -0.9444878100 0.0000000000 -1.3265673200''')
self.assertTrue(gto.chiral_mol(mol1))
def test_first_argument(self):
mol1 = gto.Mole()
mol1.build('He')
self.assertEqual(mol1.natm, 1)
def test_atom_as_file(self):
ftmp = tempfile.NamedTemporaryFile('w')
# file in xyz format
ftmp.write('He 0 0 0\nHe 0 0 1\n')
ftmp.flush()
mol1 = gto.M(atom=ftmp.name)
self.assertEqual(mol1.natm, 2)
# file in zmatrix format
ftmp = tempfile.NamedTemporaryFile('w')
ftmp.write('He\nHe 1 1.5\n')
ftmp.flush()
mol1 = gto.M(atom=ftmp.name)
self.assertEqual(mol1.natm, 2)
def test_format_atom(self):
atoms = [['h' , 0,1,1], "O1 0. 0. 0.", [1, 1.,1.,0.],]
self.assertTrue(numpy.allclose(gto.mole.format_atom(atoms, unit='Ang')[0][1],
[0.0, 1.8897261245650618, 1.8897261245650618]))
atoms = '''h 0 1 1
O1 0 0 0; 1 1 1 0; #H 0 0 3'''
self.assertTrue(numpy.allclose(gto.mole.format_atom(atoms, unit=1)[0][1],
[0.0, 1., 1.]))
atoms = 'O1; h 1 1; 1 1 1 2 90'
atoms = gto.mole.format_atom(atoms, unit=1)[2]
self.assertEqual(atoms[0], 'H')
self.assertTrue(numpy.allclose(atoms[1], [0, 0, 1.]))
def test_format_basis(self):
mol = gto.M(atom = '''O 0 0 0; 1 0 1 0; H 0 0 1''',
basis = {8: 'ccpvdz'})
self.assertEqual(mol.nao_nr(), 14)
mol = gto.M(atom = '''O 0 0 0; H:1 0 1 0; H@2 0 0 1''',
basis = {'O': 'ccpvdz', 'H:1': 'sto3g', 'H': 'unc-iglo3'})
self.assertEqual(mol.nao_nr(), 32)
mol = gto.M(
atom = '''O 0 0 0; H1 0 1 0; H2 0 0 1''',
basis = {'default': ('6-31g', [[0, [.05, 1.]], []]), 'H2': 'sto3g'}
)
self.assertEqual(mol.nao_nr(), 14)
mol = gto.M(
atom = '''O 0 0 0; H1 0 1 0; H2 0 0 1''',
basis = {'H1': gto.parse('''
# Parse NWChem format basis string (see https://bse.pnl.gov/bse/portal).
# Comment lines are ignored
#BASIS SET: (6s,3p) -> [2s,1p]
H S
2.9412494 -0.09996723
0.6834831 0.39951283
0.2222899 0.70011547
H S
2.9412494 0.15591627
0.6834831 0.60768372
0.2222899 0.39195739
''', optimize=True),
'O': 'unc-ccpvdz',
'H2': gto.load('sto-3g', 'He') # or use basis of another atom
}
)
self.assertEqual(mol.nao_nr(), 29)
mol = gto.M(
atom = '''O 0 0 0; H1 0 1 0; H2 0 0 1''',
basis = {'H': ['sto3g', '''unc
C S
71.6168370 0.15432897
13.0450960 0.53532814
3.5305122 0.44463454
C SP
2.9412494 -0.09996723 0.15591627
0.6834831 0.39951283 0.60768372
0.2222899 0.70011547 0.39195739
'''],
'O': mol.expand_etbs([(0, 4, 1.5, 2.2), # s-function
(1, 2, 0.5, 2.2)]) # p-function
}
)
self.assertEqual(mol.nao_nr(), 42)
mol = gto.M(
atom = '''O 0 0 0; H1 0 1 0; H2 0 0 1''',
basis = ('sto3g', 'ccpvdz', '3-21g',
gto.etbs([(0, 4, 1.5, 2.2), (1, 2, 0.5, 2.2)]),
[[0, numpy.array([1e3, 1.])]])
)
self.assertEqual(mol.nao_nr(), 77)
mol.atom = 'Hg'
mol.basis = 'ccpvdz'
self.assertRaises(RuntimeError, mol.build)
def test_default_basis(self):
mol = gto.M(atom=[['h' , 0,1,1], ["O1", (0.,0.,0.)], [1, 1.,1.,0.],],
basis={'default':'321g', 'O1': 'sto3g'})
self.assertEqual(sorted(mol._basis.keys()), ['H', 'O1'])
def test_parse_pople_basis(self):
self.assertEqual(len(gto.basis.load('6-31G(d)' , 'H')), 2)
self.assertEqual(len(gto.basis.load('6-31G(d)' , 'C')), 6)
self.assertEqual(len(gto.basis.load('6-31Gs' , 'C')), 6)
self.assertEqual(len(gto.basis.load('6-31G*' , 'C')), 6)
self.assertEqual(len(gto.basis.load('6-31G(d,p)' , 'H')), 3)
self.assertEqual(len(gto.basis.load('6-31G(d,p)' , 'C')), 6)
self.assertEqual(len(gto.basis.load('6-31G(2d,2p)' , 'H')), 4)
self.assertEqual(len(gto.basis.load('6-31G(2d,2p)' , 'C')), 7)
self.assertEqual(len(gto.basis.load('6-31G(3df,3pd)', 'H')), 6)
self.assertEqual(len(gto.basis.load('6-31G(3df,3pd)', 'C')), 9)
def test_parse_basis(self):
mol = gto.M(atom='''
6 0 0 -0.5
8 0 0 0.5
1 1 0 -1.0
1 -1 0 -1.0''',
basis='''
#BASIS SET: (3s) -> [2s]
H S
5.4471780 0.1562849787
0.82454724 0.9046908767
H S
0.18319158 1.0000000
#BASIS SET: (6s,3p) -> [3s,2p]
C S
172.2560000 0.0617669
25.9109000 0.3587940
5.5333500 0.7007130
C SP
3.6649800 -0.3958970 0.2364600
0.7705450 1.2158400 0.8606190
C SP
0.1958570 1.0000000 1.0000000
#BASIS SET: (6s,3p) -> [3s,2p]
O S
322.0370000 0.0592394
48.4308000 0.3515000
10.4206000 0.7076580
O SP
7.4029400 -0.4044530 0.2445860
1.5762000 1.2215600 0.8539550
O SP
0.3736840 1.0000000 1.0000000
''')
self.assertTrue(mol.nao_nr() == 22)
def test_ghost(self):
mol = gto.M(
atom = 'C 0 0 0; ghost 0 0 2',
basis = {'C': 'sto3g', 'ghost': gto.basis.load('sto3g', 'H')}
)
self.assertEqual(mol.nao_nr(), 6)
mol = gto.M(atom='''
ghost-O 0.000000000 0.000000000 2.500000000
X_H -0.663641000 -0.383071000 3.095377000
ghost.H 0.663588000 0.383072000 3.095377000
O 1.000000000 0.000000000 2.500000000
H -1.663641000 -0.383071000 3.095377000
H 1.663588000 0.383072000 3.095377000
''',
basis='631g')
self.assertEqual(mol.nao_nr(), 26)
mol = gto.M(atom='''
ghost-O 0.000000000 0.000000000 2.500000000
X_H -0.663641000 -0.383071000 3.095377000
ghost.H 0.663588000 0.383072000 3.095377000
O 1.000000000 0.000000000 2.500000000
''',
basis={'H': '3-21g', 'o': '3-21g', 'ghost-O': 'sto3g'})
self.assertEqual(mol.nao_nr(), 18) # 5 + 2 + 2 + 9
def test_nucmod(self):
gto.filatov_nuc_mod(80)
self.assertEqual(gto.mole._parse_nuc_mod(1), gto.NUC_GAUSS)
self.assertEqual(gto.mole._parse_nuc_mod('Gaussian'), gto.NUC_GAUSS)
mol1 = gto.Mole()
mol1.atom = mol0.atom
mol1.nucmod = 'G'
mol1.verbose = 5
mol1.nucprop = {'H': {'mass': 3}}
mol1.output = '/dev/null'
mol1.build(False, False)
mol1.set_nuc_mod(0, 2)
self.assertTrue(mol1._atm[1,gto.NUC_MOD_OF] == gto.NUC_GAUSS)
self.assertAlmostEqual(mol1._env[mol1._atm[0,gto.PTR_ZETA]], 2, 9)
self.assertAlmostEqual(mol1._env[mol1._atm[1,gto.PTR_ZETA]], 586314366.54656982, 4)
mol1.set_nuc_mod(1, 0)
self.assertTrue(mol1._atm[1,gto.NUC_MOD_OF] == gto.NUC_POINT)
mol1.nucmod = None
mol1.build(False, False)
self.assertTrue(mol1._atm[1,gto.NUC_MOD_OF] == gto.NUC_POINT)
mol1.nucmod = {'H': gto.filatov_nuc_mod}
mol1.build(False, False)
self.assertTrue(mol1._atm[0,gto.NUC_MOD_OF] == gto.NUC_GAUSS)
self.assertTrue(mol1._atm[1,gto.NUC_MOD_OF] == gto.NUC_POINT)
self.assertTrue(mol1._atm[2,gto.NUC_MOD_OF] == gto.NUC_GAUSS)
def test_zmat(self):
coord = numpy.array((
(0.200000000000, -1.889726124565, 0.000000000000),
(1.300000000000, -1.889726124565, 0.000000000000),
(2.400000000000, -1.889726124565, 0.000000000000),
(3.500000000000, -1.889726124565, 0.000000000000),
(0.000000000000, 0.000000000000, -1.889726124565),
(0.000000000000, 1.889726124565, 0.000000000000),
(0.200000000000, -0.800000000000, 0.000000000000),
(1.889726124565, 0.000000000000, 1.133835674739)))
zstr0 = gto.cart2zmat(coord)
zstr = '\n'.join(['H '+x for x in zstr0.splitlines()])
atoms = gto.zmat2cart(zstr)
zstr1 = gto.cart2zmat([x[1] for x in atoms])
self.assertTrue(zstr0 == zstr1)
numpy.random.seed(1)
coord = numpy.random.random((6,3))
zstr0 = gto.cart2zmat(coord)
zstr = '\n'.join(['H '+x for x in zstr0.splitlines()])
atoms = gto.zmat2cart(zstr)
zstr1 = gto.cart2zmat([x[1] for x in atoms])
self.assertTrue(zstr0 == zstr1)
def test_c2s(self): # Transformation of cart <-> sph, sph <-> spinor
c = mol0.sph2spinor_coeff()
s0 = mol0.intor('int1e_ovlp_spinor')
s1 = mol0.intor('int1e_ovlp_sph')
sa = reduce(numpy.dot, (c[0].T.conj(), s1, c[0]))
sa+= reduce(numpy.dot, (c[1].T.conj(), s1, c[1]))
mol0.cart = True
s2 = mol0.intor('int1e_ovlp')
mol0.cart = False
self.assertAlmostEqual(abs(s0 - sa).max(), 0, 12)
c = mol0.cart2sph_coeff()
sa = reduce(numpy.dot, (c.T.conj(), s2, c))
self.assertAlmostEqual(abs(s1 - sa).max(), 0, 12)
c0 = gto.mole.cart2sph(1)
ca, cb = gto.mole.cart2spinor_l(1)
ua, ub = gto.mole.sph2spinor_l(1)
self.assertAlmostEqual(abs(c0.dot(ua)-ca).max(), 0, 9)
self.assertAlmostEqual(abs(c0.dot(ub)-cb).max(), 0, 9)
c0 = gto.mole.cart2sph(0, normalized='sp')
ca, cb = gto.mole.cart2spinor_kappa(-1, 0, normalized='sp')
ua, ub = gto.mole.sph2spinor_kappa(-1, 0)
self.assertAlmostEqual(abs(c0.dot(ua)-ca).max(), 0, 9)
self.assertAlmostEqual(abs(c0.dot(ub)-cb).max(), 0, 9)
c1 = gto.mole.cart2sph(0, numpy.eye(1))
self.assertAlmostEqual(abs(c0*0.282094791773878143-c1).max(), 0, 12)
c0 = gto.mole.cart2sph(1, normalized='sp')
ca, cb = gto.mole.cart2spinor_kappa(1, 1, normalized='sp')
ua, ub = gto.mole.sph2spinor_kappa(1, 1)
self.assertAlmostEqual(abs(c0.dot(ua)-ca).max(), 0, 9)
self.assertAlmostEqual(abs(c0.dot(ub)-cb).max(), 0, 9)
c1 = gto.mole.cart2sph(1, numpy.eye(3).T)
self.assertAlmostEqual(abs(c0*0.488602511902919921-c1).max(), 0, 12)
def test_bas_method(self):
self.assertEqual([mol0.bas_len_cart(x) for x in range(mol0.nbas)],
[1, 3, 1, 1, 1, 1, 1, 3, 3, 3, 6, 6, 1, 3])
self.assertEqual([mol0.bas_len_spinor(x) for x in range(mol0.nbas)],
[2, 4, 2, 2, 2, 2, 2, 6, 6, 6, 10, 10, 2, 4])
c0 = mol0.bas_ctr_coeff(0)
self.assertAlmostEqual(abs(c0[:,0]/c0[0,0] - (1,3,1)).max(), 0, 9)
self.assertAlmostEqual(abs(c0[:,1] - (0,1,0)).max(), 0, 9)
self.assertRaises(ValueError, mol0.gto_norm, -1, 1.)
def test_nelectron(self):
mol = gto.Mole()
mol.atom = [
[1 , (0.,1.,1.)],
["O1", (0.,0.,0.)],
[1 , (1.,1.,0.)], ]
mol.charge = 1
self.assertEqual(mol.nelectron, 9)
mol0.nelectron = mol0.nelectron
mol0.nelectron = mol0.nelectron
mol0.spin = 2
self.assertRaises(RuntimeError, lambda *args: mol0.nelec)
mol0.spin = 1
mol1 = copy.copy(mol0)
self.assertEqual(mol1.nelec, (5, 4))
mol1.nelec = (3, 6)
self.assertEqual(mol1.nelec, (3, 6))
def test_multiplicity(self):
mol1 = copy.copy(mol0)
self.assertEqual(mol1.multiplicity, 2)
mol1.multiplicity = 5
self.assertEqual(mol1.multiplicity, 5)
self.assertEqual(mol1.spin, 4)
self.assertRaises(RuntimeError, lambda:mol1.nelec)
def test_ms(self):
mol1 = copy.copy(mol0)
self.assertEqual(mol1.ms, 0.5)
mol1.ms = 1
self.assertEqual(mol1.multiplicity, 3)
self.assertEqual(mol1.spin, 2)
self.assertRaises(RuntimeError, lambda:mol1.nelec)
def test_basis_not_found(self):
mol = gto.M(atom='''
H -0.663641000 -0.383071000 3.095377000
H 0.663588000 0.383072000 3.095377000
O 0.000000000 0.000000000 2.500000000
H -0.663641000 -0.383071000 3.095377000
H 0.663588000 0.383072000 3.095377000
O 1.000000000 0.000000000 2.500000000
H -0.663641000 -0.383071000 3.095377000
H 0.663588000 0.383072000 3.095377000
''', basis={'O': '3-21g'})
#TODO: assert the warning "Warn: Basis not found for atom 1 H"
self.assertEqual(mol.nao_nr(), 18)
aoslice = mol.aoslice_by_atom()
self.assertEqual(aoslice[:,0].tolist(), [0, 0, 0, 5, 5, 5,10,10])
self.assertEqual(aoslice[:,1].tolist(), [0, 0, 5, 5, 5,10,10,10])
def test_atom_method(self):
aoslice = mol0.aoslice_by_atom()
for i in range(mol0.natm):
symb = mol0.atom_pure_symbol(i)
shls = mol0.atom_shell_ids(i)
nshls = aoslice[i][1] - aoslice[i][0]
self.assertEqual(shls[0], aoslice[i][0])
self.assertEqual(len(shls), nshls)
self.assertEqual(mol0.atom_nshells(i), nshls)
aoslice = mol0.aoslice_2c_by_atom()
mol0.elements # test property(elements) in Mole
self.assertEqual([x[2] for x in aoslice], [0, 8, 56])
self.assertEqual([x[3] for x in aoslice], [8, 56, 64])
def test_dump_loads_skip(self):
import json
tmpfile = tempfile.NamedTemporaryFile()
lib.chkfile.save_mol(mol0, tmpfile.name)
mol1 = gto.Mole()
mol1.update(tmpfile.name)
# dumps() may produce different orders in different runs
self.assertEqual(json.loads(mol1.dumps()), json.loads(mol0.dumps()))
tmpfile = None
mol1.loads(mol1.dumps())
mol1.loads_(mol0.dumps())
mol1.unpack(mol1.pack())
mol1.unpack_(mol0.pack())
def test_set_geom(self):
mol1 = gto.Mole()
mol1.verbose = 5
mol1.set_geom_(mol0._atom, 'B', symmetry=True)
mol1.set_geom_(mol0.atom_coords(), 'B', inplace=False)
mol1.symmetry = False
mol1.set_geom_(mol0.atom_coords(), 'B')
mol1.set_geom_(mol0.atom_coords(), inplace=False)
mol1.set_geom_(mol0.atom_coords(), unit=1.)
mol1.set_geom_(mol0.atom_coords(), unit='Ang', inplace=False)
def test_apply(self):
from pyscf import scf, mp
self.assertTrue(isinstance(mol0.apply('RHF'), scf.rohf.ROHF))
self.assertTrue(isinstance(mol0.apply('MP2'), mp.ump2.UMP2))
self.assertTrue(isinstance(mol0.apply(scf.RHF), scf.rohf.ROHF))
self.assertTrue(isinstance(mol0.apply(scf.uhf.UHF), scf.uhf.UHF))
def test_with_MoleContext(self):
mol1 = mol0.copy()
with mol1.with_rinv_at_nucleus(1):
self.assertTrue(mol1._env[gto.PTR_RINV_ZETA] != 0)
self.assertAlmostEqual(abs(mol1._env[gto.PTR_RINV_ORIG+2]), 0, 9)
self.assertAlmostEqual(mol1._env[gto.PTR_RINV_ZETA], 0, 9)
self.assertAlmostEqual(mol1._env[gto.PTR_RINV_ORIG+2], 0, 9)
with mol1.with_rinv_at_nucleus(0):
self.assertAlmostEqual(abs(mol1._env[gto.PTR_RINV_ORIG+2]), 1.8897261245650618, 9)
self.assertAlmostEqual(mol1._env[gto.PTR_RINV_ORIG+2], 0, 9)
with mol1.with_rinv_zeta(20):
self.assertAlmostEqual(mol1._env[gto.PTR_RINV_ZETA], 20, 9)
mol1.set_rinv_zeta(3.)
self.assertAlmostEqual(mol1._env[gto.PTR_RINV_ZETA], 0, 9)
with mol1.with_rinv_origin((1,2,3)):
self.assertAlmostEqual(mol1._env[gto.PTR_RINV_ORIG+2], 3, 9)
self.assertAlmostEqual(mol1._env[gto.PTR_RINV_ORIG+2], 0, 9)
with mol1.with_range_coulomb(20):
self.assertAlmostEqual(mol1._env[gto.PTR_RANGE_OMEGA], 20, 9)
mol1.set_range_coulomb(2.)
self.assertAlmostEqual(mol1._env[gto.PTR_RANGE_OMEGA], 0, 9)
with mol1.with_common_origin((1,2,3)):
self.assertAlmostEqual(mol1._env[gto.PTR_COMMON_ORIG+2], 3, 9)
self.assertAlmostEqual(mol1._env[gto.PTR_COMMON_ORIG+2], 0, 9)
mol1.set_f12_zeta(2.)
def test_input_symmetry(self):
mol1 = gto.Mole()
mol1.atom = 'H 1 1 1; H -1 -1 1; H 1 -1 -1; H -1 1 -1'
mol1.unit = 'B'
mol1.symmetry = True
mol1.verbose = 5
mol1.output = '/dev/null'
mol1.build()
self.assertAlmostEqual(lib.fp(mol1.atom_coords()), 3.4708548731841296, 9)
mol1 = gto.Mole()
mol1 = gto.Mole()
mol1.atom = 'H 0 0 -1; H 0 0 1'
mol1.cart = True
mol1.unit = 'B'
mol1.symmetry = 'Dooh'
mol1.verbose = 5
mol1.output = '/dev/null'
mol1.build()
self.assertAlmostEqual(lib.fp(mol1.atom_coords()), 0.69980902201036865, 9)
mol1 = gto.Mole()
mol1.atom = 'H 0 -1 0; H 0 1 0'
mol1.unit = 'B'
mol1.symmetry = True
mol1.symmetry_subgroup = 'D2h'
mol1.build()
self.assertAlmostEqual(lib.fp(mol1.atom_coords()), -1.1939459267317516, 9)
mol1.atom = 'H 0 0 -1; H 0 0 1'
mol1.unit = 'B'
mol1.symmetry = 'Coov'
mol1.symmetry_subgroup = 'C2'
mol1.build()
self.assertAlmostEqual(lib.fp(mol1.atom_coords()), 0.69980902201036865, 9)
mol1.atom = 'H 1 0 -1; H 0 0 1; He 0 0 2'
mol1.symmetry = 'Coov'
self.assertRaises(RuntimeWarning, mol1.build)
mol1.atom = '''
C 0. 0. 0.7264
C 0. 0. -.7264
H 0.92419 0. 1.29252
H -.92419 0. 1.29252
H 0. 0.92419 -1.29252
H 0. -.92419 -1.29252'''
mol1.symmetry = True
mol1.symmetry_subgroup = 'C2v'
mol1.build()
self.assertAlmostEqual(lib.fp(mol1.atom_coords()), 2.9413856643164618, 9)
def test_symm_orb(self):
rs = numpy.array([[.1, -.3, -.2],
[.3, .1, .8]])
mol = gto.M(atom=[('H', c) for c in rs], unit='Bohr',
basis={'H': [[0, (1, 1)], [1, (.9, 1)], [2, (.8, 1)], [3, (.7, 1)]]})
numpy.random.seed(1)
u, w, vh = numpy.linalg.svd(numpy.random.random((3,3)))
rs1 = rs.dot(u) + numpy.array([-.5, -.3, .9])
mol1 = gto.M(atom=[('H', c) for c in rs1], unit='Bohr',
basis={'H': [[0, (1, 1)], [1, (.9, 1)], [2, (.8, 1)], [3, (.7, 1)]]})
mol.symmetry = 1
mol.build()
mol1.symmetry = 1
mol1.build()
s0 = mol.intor('int1e_ovlp')
s0 = [c.T.dot(s0).dot(c) for c in mol.symm_orb]
s1 = mol1.intor('int1e_ovlp')
s1 = [c.T.dot(s1).dot(c) for c in mol1.symm_orb]
self.assertTrue(all(abs(s0[i]-s1[i]).max()<1e-12 for i in range(len(mol.symm_orb))))
mol.cart = True
mol.symmetry = 1
mol.build()
mol1.cart = True
mol1.symmetry = 1
mol1.build()
s0 = mol.intor('int1e_ovlp')
s0 = [c.T.dot(s0).dot(c) for c in mol.symm_orb]
s1 = mol1.intor('int1e_ovlp')
s1 = [c.T.dot(s1).dot(c) for c in mol1.symm_orb]
self.assertTrue(all(abs(s0[i]-s1[i]).max()<1e-12 for i in range(len(mol.symm_orb))))
def test_search_ao_label(self):
mol1 = mol0.copy()
mol1.atom = mol0.atom + ['Mg 1,1,1']
mol1.ecp['Mg'] = 'lanl2dz'
mol1.basis['Mg'] = 'lanl2dz'
mol1.build(0, 0)
self.assertEqual(list(mol1.search_ao_label('O.*2p')), [10,11,12])
self.assertEqual(list(mol1.search_ao_label('O1 2p')), [10,11,12])
self.assertEqual(list(mol1.search_ao_label(['O.*2p','0 H 1s'])), [0, 10,11,12])
self.assertEqual(list(mol1.search_ao_label([10,11,12])), [10,11,12])
self.assertEqual(list(mol1.search_ao_label(lambda x: '4d' in x)), [24,25,26,27,28])
mol1.ao_labels(fmt='%s%s%s%s')
mol1.sph_labels(fmt=None)
mol1.cart = True
self.assertEqual(list(mol1.search_ao_label('4d')), [25,26,27,28,29,30])
mol1.ao_labels(fmt='%s%s%s%s')
mol1.ao_labels(fmt=None)
mol1.cart = False
mol1.spinor_labels()
mol1.spinor_labels(fmt='%s%s%s%s')
mol1.spinor_labels(fmt=None)
def test_input_ecp(self):
mol1 = gto.Mole()
mol1.atom = mol0.atom
mol1.ecp = 'lanl2dz'
mol1.build(False, False)
gto.basis.load_ecp('lanl08', 'O')
gto.format_ecp({'O':'lanl08', 1:'lanl2dz'})
self.assertRaises(KeyError, gto.format_ecp, {'H':'lan2ldz'})
def test_condense_to_shell(self):
mol1 = mol0.copy()
mol1.symmetry = False
mol1.build(False, False)
v = gto.condense_to_shell(mol1, mol1.intor('int1e_ovlp'), numpy.max)
self.assertAlmostEqual(lib.fp(v), 5.7342530154117846, 9)
def test_input_ghost_atom(self):
mol = gto.M(
atom = 'C 0 0 0; ghost 0 0 2',
basis = {'C': 'sto3g', 'ghost': gto.basis.load('sto3g', 'H')}
)
mol = gto.M(atom='''
ghost1 0.000000000 0.000000000 2.500000000
ghost2 -0.663641000 -0.383071000 3.095377000
ghost2 0.663588000 0.383072000 3.095377000
O 1.000000000 0.000000000 2.500000000
H -1.663641000 -0.383071000 3.095377000
H 1.663588000 0.383072000 3.095377000
''',
basis={'ghost1':gto.basis.load('sto3g', 'O'),
'ghost2':gto.basis.load('631g', 'H'),
'O':'631g', 'H':'631g'}
)
mol = gto.M(atom='''
ghost-O 0.000000000 0.000000000 2.500000000
ghost_H -0.663641000 -0.383071000 3.095377000
ghost:H 0.663588000 0.383072000 3.095377000
O 1.000000000 0.000000000 2.500000000
H -1.663641000 -0.383071000 3.095377000
H 1.663588000 0.383072000 3.095377000
''', basis='631g')
mol = gto.M(atom='''
X1 0.000000000 0.000000000 2.500000000
X2 -0.663641000 -0.383071000 3.095377000
X2 0.663588000 0.383072000 3.095377000
O 1.000000000 0.000000000 2.500000000
H -1.663641000 -0.383071000 3.095377000
H 1.663588000 0.383072000 3.095377000
''',
basis={'X1':gto.basis.load('sto3g', 'O'),
'X2':gto.basis.load('631g', 'H'),
'O':'631g', 'H':'631g'}
)
mol = gto.M(atom='''
X-O 0.000000000 0.000000000 2.500000000
X_H1 -0.663641000 -0.383071000 3.095377000
X:H 0.663588000 0.383072000 3.095377000
O 1.000000000 0.000000000 2.500000000
H -1.663641000 -0.383071000 3.095377000
H 1.663588000 0.383072000 3.095377000
''', basis='631g')
def test_conc_mole(self):
mol1 = gto.M(atom='Mg', ecp='LANL2DZ', basis='lanl2dz')
mol2 = mol1 + mol0
self.assertEqual(mol2.natm, 4)
self.assertEqual(mol2.nbas, 18)
self.assertEqual(mol2.nao_nr(), 42)
mol2 = mol0 + mol1
self.assertEqual(mol2.natm, 4)
self.assertEqual(mol2.nbas, 18)
self.assertEqual(mol2.nao_nr(), 42)
n0 = mol0.npgto_nr()
n1 = mol1.npgto_nr()
self.assertEqual(mol2.npgto_nr(), n0+n1)
mol2 = mol2 + mol2
mol2.cart = True
self.assertEqual(mol2.npgto_nr(), 100)
def test_intor_cross(self):
mol1 = gto.M(atom='He', basis={'He': [(2,(1.,1))]}, cart=True)
s0 = gto.intor_cross('int1e_ovlp', mol1, mol0)
self.assertEqual(s0.shape, (6, 34))
s0 = gto.intor_cross('int1e_ovlp', mol0, mol1)
self.assertEqual(s0.shape, (34, 6))
s0 = gto.intor_cross('int1e_ovlp_cart', mol0, mol1)
self.assertEqual(s0.shape, (36, 6))
def test_energy_nuc(self):
self.assertAlmostEqual(mol0.get_enuc(), 6.3611415029455705, 9)
self.assertAlmostEqual(gto.M().energy_nuc(), 0, 9)
def test_fakemol(self):
numpy.random.seed(1)
coords = numpy.random.random((6,3))*4
vref = 0
mol = mol0.copy()
for c in coords:
mol.set_rinv_origin(c)
vref += mol.intor('int1e_rinv')
fakemol = gto.fakemol_for_charges(coords)
pmol = mol + fakemol
shls_slice = (0, mol.nbas, 0, mol.nbas, mol.nbas, pmol.nbas)
v = pmol.intor('int3c2e', comp=1, shls_slice=shls_slice)
v = numpy.einsum('pqk->pq', v)
self.assertAlmostEqual(abs(vref-v).max(), 0, 12)
def test_to_uncontracted_cartesian_basis(self):
pmol, ctr_coeff = mol0.to_uncontracted_cartesian_basis()
c = scipy.linalg.block_diag(*ctr_coeff)
s = reduce(numpy.dot, (c.T, pmol.intor('int1e_ovlp'), c))
self.assertAlmostEqual(abs(s-mol0.intor('int1e_ovlp')).max(), 0, 9)
mol0.cart = True
pmol, ctr_coeff = mol0.to_uncontracted_cartesian_basis()
c = scipy.linalg.block_diag(*ctr_coeff)
s = reduce(numpy.dot, (c.T, pmol.intor('int1e_ovlp'), c))
self.assertAlmostEqual(abs(s-mol0.intor('int1e_ovlp')).max(), 0, 9)
mol0.cart = False
def test_getattr(self):
from pyscf import scf, dft, ci, tdscf
mol = gto.M(atom='He')
self.assertEqual(mol.HF().__class__, scf.HF(mol).__class__)
self.assertEqual(mol.KS().__class__, dft.KS(mol).__class__)
self.assertEqual(mol.UKS().__class__, dft.UKS(mol).__class__)
self.assertEqual(mol.CISD().__class__, ci.cisd.RCISD)
self.assertEqual(mol.TDA().__class__, tdscf.rhf.TDA)
self.assertEqual(mol.dTDA().__class__, tdscf.rks.dTDA)
self.assertEqual(mol.TDBP86().__class__, tdscf.rks.TDDFTNoHybrid)
self.assertEqual(mol.TDB3LYP().__class__, tdscf.rks.TDDFT)
self.assertRaises(AttributeError, lambda: mol.xyz)
self.assertRaises(AttributeError, lambda: mol.TDxyz)
def test_ao2mo(self):
mol = gto.M(atom='He')
nao = mol.nao
eri = mol.ao2mo(numpy.eye(nao))
self.assertAlmostEqual(eri[0,0], 1.0557129427350722, 12)
def test_tofile(self):
tmpfile = tempfile.NamedTemporaryFile()
mol = gto.M(atom=[[1 , (0.,1.,1.)],
["O1", (0.,0.,0.)],
[1 , (1.,1.,0.)], ])
out1 = mol.tofile(tmpfile.name, format='xyz')
ref = '''3
XYZ from PySCF
H 0.00000 1.00000 1.00000
O 0.00000 0.00000 0.00000
H 1.00000 1.00000 0.00000
'''
with open(tmpfile.name, 'r') as f:
self.assertEqual(f.read(), ref)
self.assertEqual(out1, ref[:-1])
tmpfile = tempfile.NamedTemporaryFile(suffix='.zmat')
str1 = mol.tofile(tmpfile.name, format='zmat')
#FIXME:self.assertEqual(mol._atom, mol.fromfile(tmpfile.name))
def test_frac_particles(self):
mol = gto.M(atom=[['h', (0.,1.,1.)],
['O', (0.,0.,0.)],
['h', (1.,1.,0.)],],
basis='sto3g')
mol._atm[1, gto.NUC_MOD_OF] = gto.NUC_FRAC_CHARGE
mol._env[mol._atm[1, gto.PTR_FRAC_CHARGE]] = 2.5
self.assertAlmostEqual(mol.atom_charges().sum(), 4.5, 12)
self.assertAlmostEqual(mol.atom_charge(1), 2.5, 12)
# Add test after updating cint
ref = 0
for ia in range(mol.natm):
with mol.with_rinv_origin(mol.atom_coord(ia)):
ref -= mol.intor('int1e_rinv') * mol.atom_charge(ia)
v = mol.intor('int1e_nuc')
self.assertAlmostEqual(abs(ref-v).max(), 0, 12)
if __name__ == "__main__":
print("test mole.py")
unittest.main()
|
gkc1000/pyscf
|
pyscf/gto/test/test_mole.py
|
Python
|
apache-2.0
| 39,894
|
[
"Gaussian",
"NWChem",
"PySCF"
] |
d6171d7913bca4a33b725889b7e518f44e77d763f59cc35465ff91c3c43fb6e1
|
"""Base class for mixture models."""
# Author: Wei Xue <xuewei4d@gmail.com>
# Modified by Thierry Guillemot <thierry.guillemot.work@gmail.com>
# License: BSD 3 clause
from __future__ import print_function
import warnings
from abc import ABCMeta, abstractmethod
from time import time
import numpy as np
from .. import cluster
from ..base import BaseEstimator
from ..base import DensityMixin
from ..externals import six
from ..exceptions import ConvergenceWarning
from ..utils import check_array, check_random_state
from ..utils.fixes import logsumexp
def _check_shape(param, param_shape, name):
"""Validate the shape of the input parameter 'param'.
Parameters
----------
param : array
param_shape : tuple
name : string
"""
param = np.array(param)
if param.shape != param_shape:
raise ValueError("The parameter '%s' should have the shape of %s, "
"but got %s" % (name, param_shape, param.shape))
def _check_X(X, n_components=None, n_features=None):
"""Check the input data X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
n_components : int
Returns
-------
X : array, shape (n_samples, n_features)
"""
X = check_array(X, dtype=[np.float64, np.float32])
if n_components is not None and X.shape[0] < n_components:
raise ValueError('Expected n_samples >= n_components '
'but got n_components = %d, n_samples = %d'
% (n_components, X.shape[0]))
if n_features is not None and X.shape[1] != n_features:
raise ValueError("Expected the input data X have %d features, "
"but got %d features"
% (n_features, X.shape[1]))
return X
class BaseMixture(six.with_metaclass(ABCMeta, DensityMixin, BaseEstimator)):
"""Base class for mixture models.
This abstract class specifies an interface for all mixture classes and
provides basic common methods for mixture models.
"""
def __init__(self, n_components, tol, reg_covar,
max_iter, n_init, init_params, random_state, warm_start,
verbose, verbose_interval):
self.n_components = n_components
self.tol = tol
self.reg_covar = reg_covar
self.max_iter = max_iter
self.n_init = n_init
self.init_params = init_params
self.random_state = random_state
self.warm_start = warm_start
self.verbose = verbose
self.verbose_interval = verbose_interval
def _check_initial_parameters(self, X):
"""Check values of the basic parameters.
Parameters
----------
X : array-like, shape (n_samples, n_features)
"""
if self.n_components < 1:
raise ValueError("Invalid value for 'n_components': %d "
"Estimation requires at least one component"
% self.n_components)
if self.tol < 0.:
raise ValueError("Invalid value for 'tol': %.5f "
"Tolerance used by the EM must be non-negative"
% self.tol)
if self.n_init < 1:
raise ValueError("Invalid value for 'n_init': %d "
"Estimation requires at least one run"
% self.n_init)
if self.max_iter < 1:
raise ValueError("Invalid value for 'max_iter': %d "
"Estimation requires at least one iteration"
% self.max_iter)
if self.reg_covar < 0.:
raise ValueError("Invalid value for 'reg_covar': %.5f "
"regularization on covariance must be "
"non-negative"
% self.reg_covar)
# Check all the parameters values of the derived class
self._check_parameters(X)
@abstractmethod
def _check_parameters(self, X):
"""Check initial parameters of the derived class.
Parameters
----------
X : array-like, shape (n_samples, n_features)
"""
pass
def _initialize_parameters(self, X, random_state):
"""Initialize the model parameters.
Parameters
----------
X : array-like, shape (n_samples, n_features)
random_state : RandomState
A random number generator instance.
"""
n_samples, _ = X.shape
if self.init_params == 'kmeans':
resp = np.zeros((n_samples, self.n_components))
label = cluster.KMeans(n_clusters=self.n_components, n_init=1,
random_state=random_state).fit(X).labels_
resp[np.arange(n_samples), label] = 1
elif self.init_params == 'random':
resp = random_state.rand(n_samples, self.n_components)
resp /= resp.sum(axis=1)[:, np.newaxis]
else:
raise ValueError("Unimplemented initialization method '%s'"
% self.init_params)
self._initialize(X, resp)
@abstractmethod
def _initialize(self, X, resp):
"""Initialize the model parameters of the derived class.
Parameters
----------
X : array-like, shape (n_samples, n_features)
resp : array-like, shape (n_samples, n_components)
"""
pass
def fit(self, X, y=None):
"""Estimate model parameters with the EM algorithm.
The method fit the model `n_init` times and set the parameters with
which the model has the largest likelihood or lower bound. Within each
trial, the method iterates between E-step and M-step for `max_iter`
times until the change of likelihood or lower bound is less than
`tol`, otherwise, a `ConvergenceWarning` is raised.
Parameters
----------
X : array-like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
self
"""
X = _check_X(X, self.n_components)
self._check_initial_parameters(X)
# if we enable warm_start, we will have a unique initialisation
do_init = not(self.warm_start and hasattr(self, 'converged_'))
n_init = self.n_init if do_init else 1
max_lower_bound = -np.infty
self.converged_ = False
random_state = check_random_state(self.random_state)
n_samples, _ = X.shape
for init in range(n_init):
self._print_verbose_msg_init_beg(init)
if do_init:
self._initialize_parameters(X, random_state)
self.lower_bound_ = -np.infty
for n_iter in range(self.max_iter):
prev_lower_bound = self.lower_bound_
log_prob_norm, log_resp = self._e_step(X)
self._m_step(X, log_resp)
self.lower_bound_ = self._compute_lower_bound(
log_resp, log_prob_norm)
change = self.lower_bound_ - prev_lower_bound
self._print_verbose_msg_iter_end(n_iter, change)
if abs(change) < self.tol:
self.converged_ = True
break
self._print_verbose_msg_init_end(self.lower_bound_)
if self.lower_bound_ > max_lower_bound:
max_lower_bound = self.lower_bound_
best_params = self._get_parameters()
best_n_iter = n_iter
if not self.converged_:
warnings.warn('Initialization %d did not converge. '
'Try different init parameters, '
'or increase max_iter, tol '
'or check for degenerate data.'
% (init + 1), ConvergenceWarning)
self._set_parameters(best_params)
self.n_iter_ = best_n_iter
return self
def _e_step(self, X):
"""E step.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
log_prob_norm : float
Mean of the logarithms of the probabilities of each sample in X
log_responsibility : array, shape (n_samples, n_components)
Logarithm of the posterior probabilities (or responsibilities) of
the point of each sample in X.
"""
log_prob_norm, log_resp = self._estimate_log_prob_resp(X)
return np.mean(log_prob_norm), log_resp
@abstractmethod
def _m_step(self, X, log_resp):
"""M step.
Parameters
----------
X : array-like, shape (n_samples, n_features)
log_resp : array-like, shape (n_samples, n_components)
Logarithm of the posterior probabilities (or responsibilities) of
the point of each sample in X.
"""
pass
@abstractmethod
def _check_is_fitted(self):
pass
@abstractmethod
def _get_parameters(self):
pass
@abstractmethod
def _set_parameters(self, params):
pass
def score_samples(self, X):
"""Compute the weighted log probabilities for each sample.
Parameters
----------
X : array-like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
log_prob : array, shape (n_samples,)
Log probabilities of each data point in X.
"""
self._check_is_fitted()
X = _check_X(X, None, self.means_.shape[1])
return logsumexp(self._estimate_weighted_log_prob(X), axis=1)
def score(self, X, y=None):
"""Compute the per-sample average log-likelihood of the given data X.
Parameters
----------
X : array-like, shape (n_samples, n_dimensions)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
log_likelihood : float
Log likelihood of the Gaussian mixture given X.
"""
return self.score_samples(X).mean()
def predict(self, X, y=None):
"""Predict the labels for the data samples in X using trained model.
Parameters
----------
X : array-like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
labels : array, shape (n_samples,)
Component labels.
"""
self._check_is_fitted()
X = _check_X(X, None, self.means_.shape[1])
return self._estimate_weighted_log_prob(X).argmax(axis=1)
def predict_proba(self, X):
"""Predict posterior probability of each component given the data.
Parameters
----------
X : array-like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
resp : array, shape (n_samples, n_components)
Returns the probability of each Gaussian (state) in
the model given each sample.
"""
self._check_is_fitted()
X = _check_X(X, None, self.means_.shape[1])
_, log_resp = self._estimate_log_prob_resp(X)
return np.exp(log_resp)
def sample(self, n_samples=1):
"""Generate random samples from the fitted Gaussian distribution.
Parameters
----------
n_samples : int, optional
Number of samples to generate. Defaults to 1.
Returns
-------
X : array, shape (n_samples, n_features)
Randomly generated sample
y : array, shape (nsamples,)
Component labels
"""
self._check_is_fitted()
if n_samples < 1:
raise ValueError(
"Invalid value for 'n_samples': %d . The sampling requires at "
"least one sample." % (self.n_components))
_, n_features = self.means_.shape
rng = check_random_state(self.random_state)
n_samples_comp = rng.multinomial(n_samples, self.weights_)
if self.covariance_type == 'full':
X = np.vstack([
rng.multivariate_normal(mean, covariance, int(sample))
for (mean, covariance, sample) in zip(
self.means_, self.covariances_, n_samples_comp)])
elif self.covariance_type == "tied":
X = np.vstack([
rng.multivariate_normal(mean, self.covariances_, int(sample))
for (mean, sample) in zip(
self.means_, n_samples_comp)])
else:
X = np.vstack([
mean + rng.randn(sample, n_features) * np.sqrt(covariance)
for (mean, covariance, sample) in zip(
self.means_, self.covariances_, n_samples_comp)])
y = np.concatenate([j * np.ones(sample, dtype=int)
for j, sample in enumerate(n_samples_comp)])
return (X, y)
def _estimate_weighted_log_prob(self, X):
"""Estimate the weighted log-probabilities, log P(X | Z) + log weights.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
weighted_log_prob : array, shape (n_features, n_component)
"""
return self._estimate_log_prob(X) + self._estimate_log_weights()
@abstractmethod
def _estimate_log_weights(self):
"""Estimate log-weights in EM algorithm, E[ log pi ] in VB algorithm.
Returns
-------
log_weight : array, shape (n_components, )
"""
pass
@abstractmethod
def _estimate_log_prob(self, X):
"""Estimate the log-probabilities log P(X | Z).
Compute the log-probabilities per each component for each sample.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
log_prob : array, shape (n_samples, n_component)
"""
pass
def _estimate_log_prob_resp(self, X):
"""Estimate log probabilities and responsibilities for each sample.
Compute the log probabilities, weighted log probabilities per
component and responsibilities for each sample in X with respect to
the current state of the model.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
log_prob_norm : array, shape (n_samples,)
log p(X)
log_responsibilities : array, shape (n_samples, n_components)
logarithm of the responsibilities
"""
weighted_log_prob = self._estimate_weighted_log_prob(X)
log_prob_norm = logsumexp(weighted_log_prob, axis=1)
with np.errstate(under='ignore'):
# ignore underflow
log_resp = weighted_log_prob - log_prob_norm[:, np.newaxis]
return log_prob_norm, log_resp
def _print_verbose_msg_init_beg(self, n_init):
"""Print verbose message on initialization."""
if self.verbose == 1:
print("Initialization %d" % n_init)
elif self.verbose >= 2:
print("Initialization %d" % n_init)
self._init_prev_time = time()
self._iter_prev_time = self._init_prev_time
def _print_verbose_msg_iter_end(self, n_iter, diff_ll):
"""Print verbose message on initialization."""
if n_iter % self.verbose_interval == 0:
if self.verbose == 1:
print(" Iteration %d" % n_iter)
elif self.verbose >= 2:
cur_time = time()
print(" Iteration %d\t time lapse %.5fs\t ll change %.5f" % (
n_iter, cur_time - self._iter_prev_time, diff_ll))
self._iter_prev_time = cur_time
def _print_verbose_msg_init_end(self, ll):
"""Print verbose message on the end of iteration."""
if self.verbose == 1:
print("Initialization converged: %s" % self.converged_)
elif self.verbose >= 2:
print("Initialization converged: %s\t time lapse %.5fs\t ll %.5f" %
(self.converged_, time() - self._init_prev_time, ll))
|
Titan-C/scikit-learn
|
sklearn/mixture/base.py
|
Python
|
bsd-3-clause
| 16,655
|
[
"Gaussian"
] |
bbf310a1c16a67e1f2a8f832e9ddc9cf4fc713781bcd40da2f0e837c8a88cc0c
|
from dateutil.relativedelta import relativedelta
from edc_visit_schedule import VisitSchedule, Schedule, Visit, Panel
from edc_visit_schedule import FormsCollection, Crf, Requisition
crfs = FormsCollection(
Crf(show_order=1, model='edc_metadata.crfone', required=True),
Crf(show_order=2, model='edc_metadata.crftwo', required=True),
Crf(show_order=3, model='edc_metadata.crfthree', required=True),
Crf(show_order=4, model='edc_metadata.crffour', required=True),
Crf(show_order=5, model='edc_metadata.crffive', required=True),
)
requisitions = FormsCollection(
Requisition(
show_order=10,
panel=Panel('one', requisition_model='edc_metadata.subjectrequisition'),
required=True, additional=False),
Requisition(
show_order=20,
panel=Panel('two', requisition_model='edc_metadata.subjectrequisition'),
required=True, additional=False),
Requisition(
show_order=30,
panel=Panel(
'three', requisition_model='edc_metadata.subjectrequisition'),
required=True, additional=False),
Requisition(
show_order=40,
panel=Panel(
'four', requisition_model='edc_metadata.subjectrequisition'),
required=True, additional=False),
Requisition(
show_order=50,
panel=Panel(
'five', requisition_model='edc_metadata.subjectrequisition'),
required=True, additional=False),
Requisition(
show_order=60,
panel=Panel('six', requisition_model='edc_metadata.subjectrequisition'),
required=True, additional=False),
)
visit_schedule1 = VisitSchedule(
name='visit_schedule1',
offstudy_model='edc_visit_tracking.subjectoffstudy',
death_report_model='edc_visit_tracking.deathreport')
visit_schedule2 = VisitSchedule(
name='visit_schedule2',
offstudy_model='edc_visit_tracking.subjectoffstudy',
death_report_model='edc_visit_tracking.deathreport')
schedule1 = Schedule(
name='schedule1',
onschedule_model='edc_visit_tracking.onscheduleone',
offschedule_model='edc_visit_tracking.offscheduleone',
consent_model='edc_visit_tracking.subjectconsent',
appointment_model='edc_appointment.appointment')
schedule2 = Schedule(
name='schedule2',
onschedule_model='edc_visit_tracking.onscheduletwo',
offschedule_model='edc_visit_tracking.offscheduletwo',
consent_model='edc_visit_tracking.subjectconsent',
appointment_model='edc_appointment.appointment')
visits = []
for index in range(0, 4):
visits.append(
Visit(
code=f'{index + 1}000',
title=f'Day {index + 1}',
timepoint=index,
rbase=relativedelta(days=index),
rlower=relativedelta(days=0),
rupper=relativedelta(days=6),
requisitions=requisitions,
crfs=crfs,
facility_name='default'))
for visit in visits:
schedule1.add_visit(visit)
visits = []
for index in range(4, 8):
visits.append(
Visit(
code=f'{index + 1}000',
title=f'Day {index + 1}',
timepoint=index,
rbase=relativedelta(days=index),
rlower=relativedelta(days=0),
rupper=relativedelta(days=6),
requisitions=requisitions,
crfs=crfs,
facility_name='default'))
for visit in visits:
schedule2.add_visit(visit)
visit_schedule1.add_schedule(schedule1)
visit_schedule2.add_schedule(schedule2)
|
botswana-harvard/edc-visit-tracking
|
edc_visit_tracking/tests/visit_schedule.py
|
Python
|
gpl-2.0
| 3,492
|
[
"VisIt"
] |
1c73867f12fd3b0ef0c4fcdfa22f6c00c22a32c6f76a5c37567aaeb7bc92bdfd
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.eagle.connection Accessing the EAGLE public database over the Web
#
# This module allows accessing the EAGLE public database over the Web.
# Slightly adjusted from © John Helly 2015 for the Virgo Consortium.
#
# -----------------------------------------------------------------
import numpy as np
import urllib
import urllib2
import cookielib
import re
from getpass import getpass
import os.path
import tempfile
# -----------------------------------------------------------------
# Mapping between SQL and numpy types
numpy_dtype = {
"real" : np.float32,
"float" : np.float64,
"int" : np.int32,
"bigint" : np.int64,
"char" : np.dtype("|S256"),
"nvarchar" : np.dtype("|S256")
}
# -----------------------------------------------------------------
# Cookie storage - want to avoid creating a new session for every query
cookie_file = os.path.join(tempfile.gettempdir(), "pts_eagle_sql_cookies.txt")
cookie_jar = cookielib.LWPCookieJar(cookie_file)
try:
cookie_jar.load(ignore_discard=True)
except IOError:
pass
# -----------------------------------------------------------------
## The Connection class allows accessing the EAGLE public database over the Web.
# The constructor establishes a connection for a particular user. SQL queries can
# be executed on the database through the execute_query() function.
class Connection:
## The constructor requires a user name; if the password is omitted it is asked at the console.
def __init__(self, username, password=None):
# Get password if necessary
if password is None:
password = getpass()
# Get URL for the database
self.db_url = "http://galaxy-catalogue.dur.ac.uk:8080/Eagle"
# Set up authentication and cookies
self.password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
self.password_mgr.add_password(None, self.db_url, username, password)
self.opener = urllib2.OpenerDirector()
self.auth_handler = urllib2.HTTPBasicAuthHandler(self.password_mgr)
self.cookie_handler = urllib2.HTTPCookieProcessor(cookie_jar)
## This functions executes an SQL query on the database and returns the result as a record array.
def execute_query(self, sql):
url = self.db_url + "?" + urllib.urlencode({'action': 'doQuery', 'SQL': sql})
urllib2.install_opener(urllib2.build_opener(self.auth_handler, self.cookie_handler))
response = urllib2.urlopen(url)
cookie_jar.save(ignore_discard=True)
# Check for OK response
line = response.readline()
if line != "#OK\n":
raise Exception(response.readlines())
# Skip rows until we reach QUERYTIMEOUT
while True:
line = response.readline()
if line == "":
raise Exception("Unexpected end of file while reading result header")
elif line.startswith("#QUERYTIMEOUT"):
break
# Skip QUERYTIME
if not(response.readline().startswith("#QUERYTIME")):
raise Exception("Don't understand result header!")
# Read column info
# (also discards line with full list of column names)
columns = []
while True:
line = response.readline()
if line[0] != "#":
column_names = line
break
else:
m = re.match("^#COLUMN ([0-9]+) name=([\w]+) JDBC_TYPE=(-?[0-9]+) JDBC_TYPENAME=([\w]+)$", line)
if m is not None:
columns.append(m.groups())
else:
raise Exception("Don't understand column info: "+line)
# Construct record type for the output
dtype = np.dtype([(col[1],numpy_dtype[col[3]]) for col in columns])
# Return the data as a record array
return np.genfromtxt(response, dtype=dtype, delimiter=",")
# -----------------------------------------------------------------
|
SKIRT/PTS
|
eagle/connection.py
|
Python
|
agpl-3.0
| 4,327
|
[
"Galaxy"
] |
480c72b0ee7939c3b14ae77def111ba5abc07f24319454f3d763b6ec0c2a541d
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Hybrid LFP scheme example script, applying the methodology with the model of:
Potjans, T. and Diesmann, M. "The Cell-Type Specific Cortical Microcircuit:
Relating Structure and Activity in a Full-Scale Spiking Network Model".
Cereb. Cortex (2014) 24 (3): 785-806.
doi: 10.1093/cercor/bhs358
Synopsis of the main simulation procedure:
1. Loading of parameterset
a. network parameters
b. parameters for hybrid scheme
2. Set up file destinations for different simulation output
3. network simulation
a. execute network simulation using NEST (www.nest-initiative.org)
b. merge network output (spikes, currents, voltages)
4. Create a object-representation that uses sqlite3 of all the spiking output
5. Iterate over post-synaptic populations:
a. Create Population object with appropriate parameters for
each specific population
b. Run all computations for populations
c. Postprocess simulation output of all cells in population
6. Postprocess all cell- and population-specific output data
7. Create a tarball for all non-redundant simulation output
The full simulation can be evoked by issuing a mpirun call, such as
mpirun -np 64 python cellsim16pops.py
Given the size of the network and demands for the multi-compartment LFP-
predictions using the present scheme, running the model on nothing but a large-
scale compute facility is strongly discouraged.
'''
from cellsim16popsParams_modified_ac_inh import multicompartment_params, \
point_neuron_network_params
import os
import numpy as np
from time import time
import neuron # NEURON compiled with MPI must be imported before NEST and mpi4py
# to avoid NEURON being aware of MPI.
import nest # Import not used, but done in order to ensure correct execution
import nest_simulation
from hybridLFPy import PostProcess, Population, CachedNetwork, setup_file_dest
import nest_output_processing
import lfpykit
# set some seed values
SEED = 12345678
SIMULATIONSEED = 12345678
np.random.seed(SEED)
##########################################################################
# PARAMETERS
##########################################################################
# Full set of parameters including network parameters
params = multicompartment_params()
# set up the file destination
setup_file_dest(params, clearDestination=True)
###############################################################################
# MAIN simulation procedure
###############################################################################
# tic toc
tic = time()
######## Perform network simulation ######################################
# initiate nest simulation with only the point neuron network parameter class
networkParams = point_neuron_network_params()
nest_simulation.sli_run(parameters=networkParams,
fname='microcircuit.sli',
verbosity='M_INFO')
# preprocess the gdf files containing spiking output, voltages, weighted and
# spatial input spikes and currents:
nest_output_processing.merge_gdf(networkParams,
raw_label=networkParams.spike_recorder_label,
file_type='dat',
fileprefix=params.networkSimParams['label'],
skiprows=3)
nest_output_processing.merge_gdf(networkParams,
raw_label=networkParams.voltmeter_label,
file_type='dat',
fileprefix='voltages',
skiprows=3)
nest_output_processing.merge_gdf(
networkParams,
raw_label=networkParams.weighted_input_spikes_label,
file_type='dat',
fileprefix='population_input_spikes',
skiprows=3)
# spatial input currents
# nest_output_processing.create_spatial_input_spikes_hdf5(networkParams,
# fileprefix='depth_res_input_spikes-')
# create tar file archive of <raw_nest_output_path> folder as .dat files are
# no longer needed. Also removes .dat files
nest_output_processing.tar_raw_nest_output(params.raw_nest_output_path,
delete_files=True)
# Create an object representation of the simulation output that uses sqlite3
networkSim = CachedNetwork(**params.networkSimParams)
toc = time() - tic
print('NEST simulation and gdf file processing done in %.3f seconds' % toc)
# Set up LFPykit measurement probes for LFPs and CSDs
probes = []
probes.append(lfpykit.RecExtElectrode(cell=None, **params.electrodeParams))
probes.append(
lfpykit.LaminarCurrentSourceDensity(
cell=None,
**params.CSDParams))
####### Set up populations ###############################################
# iterate over each cell type, and create populationulation object
for i, y in enumerate(params.y):
# create population:
pop = Population(
# parent class
cellParams=params.yCellParams[y],
rand_rot_axis=params.rand_rot_axis[y],
simulationParams=params.simulationParams,
populationParams=params.populationParams[y],
y=y,
layerBoundaries=params.layerBoundaries,
probes=probes,
savelist=params.savelist,
savefolder=params.savefolder,
dt_output=params.dt_output,
POPULATIONSEED=SIMULATIONSEED + i,
# daughter class kwargs
X=params.X,
networkSim=networkSim,
k_yXL=params.k_yXL[y],
synParams=params.synParams[y],
synDelayLoc=params.synDelayLoc[y],
synDelayScale=params.synDelayScale[y],
J_yX=params.J_yX[y],
tau_yX=params.tau_yX[y],
recordSingleContribFrac=params.recordSingleContribFrac,
)
# run population simulation and collect the data
pop.run()
pop.collect_data()
# object no longer needed
del pop
####### Postprocess the simulation output ################################
# reset seed, but output should be deterministic from now on
np.random.seed(SIMULATIONSEED)
# do some postprocessing on the collected data, i.e., superposition
# of population LFPs, CSDs etc
postproc = PostProcess(y=params.y,
dt_output=params.dt_output,
probes=probes,
savefolder=params.savefolder,
mapping_Yy=params.mapping_Yy,
savelist=params.savelist
)
# run through the procedure
postproc.run()
# create tar-archive with output for plotting
postproc.create_tar_archive()
# tic toc
print('Execution time: %.3f seconds' % (time() - tic))
|
INM-6/hybridLFPy
|
examples/Hagen_et_al_2016_cercor/cellsim16pops_modified_ac_inh.py
|
Python
|
gpl-3.0
| 6,659
|
[
"NEURON"
] |
d71fd09544b0615db18620d2587630fc93ecdb6ec01dd7fcab9bbbf699962281
|
import os
import numpy as np
import sys
import itertools
import multiprocessing as mp
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:],"r:n:",["ref-transcriptome=","njobs="])
except getopt.GetoptError:
print ("getopterrror")
print ('usage is : \n python process_xprs.py [-n number-of-processes-to-use]')
sys.exit(1)
num_proc=1
for opt,arg in opts:
if opt in ("-n","--njobs"):
num_proc=int(arg)
def get_expression(fltuple):
#print fltuple
bowtie=fltuple[0]
drname=fltuple[1]
druse=bowtie+drname+'/'
#print druse
if 'SRR' in druse:
flnames=os.listdir(druse)
if 'results.xprs' in flnames:
cmd = "cat "+druse+"results.xprs | awk '{print $2, $15}' | tail -n+2 | sort -k 1 > "+druse+"results.t3i"
os.system(cmd)
for samp in ['100','10','5','1','_point5','_point1']:
bowtie='./Zeisel_Bowtie_subsample'+samp+'/'
files = sorted(os.listdir(bowtie))
#print files
fltuple=itertools.product([bowtie],files)
pool = mp.Pool(processes = num_proc)
pool.map(get_expression,fltuple)
|
govinda-kamath/clustering_on_transcript_compatibility_counts
|
Zeisel_pipeline/process_xprs.py
|
Python
|
mit
| 1,102
|
[
"Bowtie"
] |
35429ceb465383a3efb7f5cc34a8d32c2dba48a57c63dab919c1b7b049752ba8
|
#!/usr/bin/python
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2013 Francois Beaune, Jupiter Jazz Limited
# Copyright (c) 2014-2017 Francois Beaune, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from __future__ import division
import argparse
import datetime
import glob
import os
import shutil
import string
import sys
import time
import traceback
import xml.dom.minidom as xml
#--------------------------------------------------------------------------------------------------
# Constants.
#--------------------------------------------------------------------------------------------------
VERSION = "2.9"
RENDERS_DIR = "_renders"
ARCHIVES_DIR = "_archives"
LOGS_DIR = "_logs"
PAUSE_BETWEEN_UPDATES = 60 # in seconds
MB = 1024 * 1024
#--------------------------------------------------------------------------------------------------
# Utility functions.
#--------------------------------------------------------------------------------------------------
def safe_get_file_size(filepath):
try:
return os.path.getsize(filepath)
except:
return 0
def get_directory_size(directory):
size = 0
for dirpath, dirnames, filenames in os.walk(directory):
for filename in filenames:
filepath = os.path.join(dirpath, filename)
size += safe_get_file_size(filepath)
return size
def get_files(directory, pattern="*"):
files = []
for file in glob.glob(os.path.join(directory, pattern)):
files.append(file)
return files
def safe_mkdir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
def convert_path_to_local(path):
if os.name == "nt":
return path.replace('/', '\\')
else:
return path.replace('\\', '/')
def tail_file(f, window=20):
"""
Returns the last `window` lines of file `f` as a list.
Based on code from http://stackoverflow.com/a/7047765/393756.
"""
BUFFER_SIZE = 1024
f.seek(0, 2)
bytes = f.tell()
size = window + 1
block = -1
data = []
while size > 0 and bytes > 0:
if bytes > BUFFER_SIZE:
# Seek back one whole block of size BUFFER_SIZE.
f.seek(block * BUFFER_SIZE, 2)
# Read one block.
data.insert(0, f.read(BUFFER_SIZE))
else:
# File too small, start from begining.
f.seek(0, 0)
# Only read what was not read.
data.insert(0, f.read(bytes))
lines_found = data[0].count('\n')
size -= lines_found
bytes -= BUFFER_SIZE
block -= 1
return "".join(data).splitlines()[-window:]
def format_message(severity, msg):
now = datetime.datetime.now()
timestamp = now.strftime("%Y-%m-%d %H:%M:%S.%f")
padded_severity = severity.ljust(7)
return "\n".join("{0} mgr {1} | {2}".format(timestamp, padded_severity, line)
for line in msg.splitlines())
#--------------------------------------------------------------------------------------------------
# Log backend to write to the console, using colors on systems that support them.
#--------------------------------------------------------------------------------------------------
class ConsoleBackend:
@staticmethod
def info(msg):
print("{0}".format(msg))
@staticmethod
def warning(msg):
if ConsoleBackend.is_coloring_supported():
print("\033[93m{0}\033[0m".format(msg))
else:
print("{0}".format(msg))
@staticmethod
def error(msg):
if ConsoleBackend.is_coloring_supported():
print("\033[91m{0}\033[0m".format(msg))
else:
print("{0}".format(msg))
@staticmethod
def is_coloring_supported():
return os.system == 'darwin'
#--------------------------------------------------------------------------------------------------
# Log backend to write to a log file.
#--------------------------------------------------------------------------------------------------
class LogFileBackend:
def __init__(self, path):
self.path = path
def write(self, msg):
safe_mkdir(os.path.dirname(self.path))
with open(self.path, "a") as file:
file.write(msg + "\n")
#--------------------------------------------------------------------------------------------------
# Log class to simultaneously write to a log file and to the console.
#--------------------------------------------------------------------------------------------------
class Log:
def __init__(self, path):
self.log_file = LogFileBackend(path)
def info(self, msg):
formatted_msg = format_message("info", msg)
self.log_file.write(formatted_msg)
ConsoleBackend.info(formatted_msg)
def warning(self, msg):
formatted_msg = format_message("warning", msg)
self.log_file.write(formatted_msg)
ConsoleBackend.warning(formatted_msg)
def error(self, msg):
formatted_msg = format_message("error", msg)
self.log_file.write(formatted_msg)
ConsoleBackend.error(formatted_msg)
@staticmethod
def info_no_log(msg):
ConsoleBackend.info(format_message("info", msg))
@staticmethod
def warning_no_log(msg):
ConsoleBackend.warning(format_message("warning", msg))
@staticmethod
def error_no_log(msg):
ConsoleBackend.error(format_message("error", msg))
#--------------------------------------------------------------------------------------------------
# Dependency database.
#--------------------------------------------------------------------------------------------------
class DependencyDB:
def __init__(self, source_directory, log):
self.source_directory = source_directory
self.log = log
self.roots = {}
def update(self, new_roots):
for root in new_roots:
if not root in self.roots:
success, deps = self.__extract_dependencies(root)
if success:
self.roots[root] = deps
self.log.info(" added {0}".format(root))
updated_roots = {}
for root in self.roots:
if root in new_roots:
updated_roots[root] = self.roots[root]
else:
self.log.info(" removed {0}".format(root))
self.roots = updated_roots
def get_all_dependencies(self):
deps = set()
for root in self.roots:
deps = deps.union(self.roots[root])
return deps
def __extract_dependencies(self, filename):
try:
filepath = os.path.join(self.source_directory, filename)
with open(filepath, 'r') as file:
contents = file.read()
xmldoc = xml.parseString(contents)
deps = set()
for node in xmldoc.getElementsByTagName('parameter'):
if node.getAttribute('name') == 'filename':
deps.add(convert_path_to_local(node.getAttribute('value')))
for node in xmldoc.getElementsByTagName('parameters'):
if node.getAttribute('name') == 'filename':
for child in node.childNodes:
if child.nodeType == xml.Node.ELEMENT_NODE:
deps.add(convert_path_to_local(child.getAttribute('value')))
return True, deps
except KeyboardInterrupt, SystemExit:
raise
except:
return False, set()
#--------------------------------------------------------------------------------------------------
# Management logic.
#--------------------------------------------------------------------------------------------------
class Manager:
def __init__(self, args, log):
self.args = args
self.log = log
self.frames_directory = os.path.join(self.args.target_directory, RENDERS_DIR)
self.archives_directory = os.path.join(self.args.target_directory, ARCHIVES_DIR)
self.all_uploaded_dependency_db = DependencyDB(self.args.target_directory, log)
self.own_uploaded_dependency_db = DependencyDB(self.args.source_directory, log)
self.completed_dependency_db = DependencyDB(self.args.source_directory, log)
def manage(self):
self.compute_target_directory_size()
self.gather_files()
self.print_status()
if self.args.frames_directory is not None:
self.move_frames()
self.update_dependency_dbs()
self.remove_orphan_dependencies()
self.upload_project_files()
self.upload_missing_dependencies()
def compute_target_directory_size(self):
self.target_directory_size = get_directory_size(self.args.target_directory)
def gather_files(self):
self.log.info("gathering files...")
self.source_files = map(os.path.basename, get_files(self.args.source_directory, "*.appleseed"))
self.uploaded_files = self.gather_uploaded_files()
self.inprogress_files = self.gather_inprogress_files()
self.completed_files = map(os.path.basename, get_files(self.archives_directory, "*.appleseed"))
self.log.info(" found {0} source files (this shot) in {1}".format(len(self.source_files), self.args.source_directory))
self.log.info(" found {0} uploaded files (all shots) in {1}".format(len(self.uploaded_files), self.args.target_directory))
self.log.info(" found {0} in-progress files (all shots) in {1}".format(len(self.inprogress_files), self.args.target_directory))
self.log.info(" found {0} completed files (all shots) in {1}".format(len(self.completed_files), self.archives_directory))
def gather_uploaded_files(self):
return map(os.path.basename, get_files(self.args.target_directory, "*.appleseed"))
def gather_inprogress_files(self):
inprogress = {}
for filename in map(os.path.basename, get_files(self.args.target_directory, "*.appleseed.*")):
parts = filename.split(".")
assert len(parts) >= 3
if parts[-2] == "appleseed":
owner = parts[-1]
stripped_filename = filename[:-(1 + len(owner))]
inprogress.setdefault(stripped_filename, []).append(owner)
return inprogress
def print_status(self):
self.log.info("-------------------------------------------------------------------")
self.print_progress()
self.print_assignments()
self.print_pings()
self.print_target_directory_size()
self.log.info("-------------------------------------------------------------------")
def print_progress(self):
total = len(self.source_files)
completed = self.count_completed_frames()
rendering = self.count_inprogress_frames()
pending = self.count_pending_frames()
progress = 100.0 * completed / total if total > 0 else 0.0
self.log.info("PROGRESS: {0}/{1} completed ({2:.2f} %), {3} rendering, {4} pending"
.format(completed, total, progress, rendering, pending))
def print_assignments(self):
assignments = {}
for filename in self.source_files:
if filename in self.inprogress_files.keys():
assignments[filename] = ", ".join(self.inprogress_files[filename])
if len(assignments) > 0:
self.log.info("frame assignments:")
for filename in assignments.keys():
self.log.info(" {0}: {1}".format(filename, assignments[filename]))
else:
self.log.info("no frame assigned.")
def print_pings(self):
owners = set()
for filename in self.source_files:
if filename in self.inprogress_files.keys():
for owner in self.inprogress_files[filename]:
owners.add(owner)
unsorted_pings = [(owner, self.read_ping(owner)) for owner in owners]
filtered_pings = [x for x in unsorted_pings if x[1] is not None]
pings = sorted(filtered_pings, key=lambda x: x[1])
if len(pings) > 0:
max_owner_length = max([len(owner) for owner in owners])
self.log.info("pings:")
for (owner, ping) in pings:
padding = " " * (max_owner_length + 1 - len(owner))
self.log.info(" {0}:{1}{2}".format(owner, padding, self.format_ping(ping) if ping is not None else "n/a"))
else:
self.log.info("no pings.")
def read_ping(self, owner):
TIMESTAMP_LENGTH = 26
try:
with open(os.path.join(self.args.target_directory, LOGS_DIR, owner + ".log")) as file:
last_line = tail_file(file, 1)[0]
return datetime.datetime.strptime(last_line[:TIMESTAMP_LENGTH], "%Y-%m-%d %H:%M:%S.%f")
except IOError as ex:
return None
def format_ping(self, ping):
elapsed = datetime.datetime.now() - ping
return "{0} ago (at {1})".format(elapsed, ping)
def print_target_directory_size(self):
size_mb = self.target_directory_size / MB
max_size_mb = self.args.max_size / MB
full = 100.0 * size_mb / max_size_mb if max_size_mb > 0 else 100.0
self.log.info("size of target directory: {0:.2f}/{1} mb ({2:.2f} % full)"
.format(size_mb, max_size_mb, full))
def count_completed_frames(self):
return sum(1 for filename in self.source_files if filename in self.completed_files)
def count_inprogress_frames(self):
return sum(1 for filename in self.source_files if filename in self.inprogress_files)
def count_pending_frames(self):
return sum(1 for filename in self.source_files
if not filename in self.completed_files and not filename in self.inprogress_files)
def move_frames(self):
self.log.info("moving frames...")
for filepath in get_files(self.frames_directory):
self.move_frame(filepath)
def move_frame(self, source_filepath):
filename = os.path.basename(source_filepath)
dest_filepath = os.path.join(self.args.frames_directory, filename)
self.log.info(" moving {0}".format(filename))
safe_mkdir(self.args.frames_directory)
shutil.move(source_filepath, dest_filepath)
def update_dependency_dbs(self):
self.update_uploaded_dependency_db()
self.update_completed_dependency_db()
def update_uploaded_dependency_db(self):
self.log.info("updating dependency database of uploaded and in-progress files (all shots)...")
all_roots = map(os.path.basename, get_files(self.args.target_directory, "*.appleseed*"))
self.all_uploaded_dependency_db.update(all_roots)
self.log.info("updating dependency database of uploaded files (this shot)...")
own_roots = [filename for filename in self.source_files
if filename in self.inprogress_files or filename in self.uploaded_files]
self.own_uploaded_dependency_db.update(own_roots)
def update_completed_dependency_db(self):
self.log.info("updating dependency database of completed files (this shot)...")
roots = [filename for filename in self.source_files if filename in self.completed_files]
self.completed_dependency_db.update(roots)
def remove_orphan_dependencies(self):
self.log.info("removing orphan dependencies...")
removed = 0
all_uploaded_files_dependencies = self.all_uploaded_dependency_db.get_all_dependencies()
for dep in self.completed_dependency_db.get_all_dependencies():
if not dep in all_uploaded_files_dependencies:
count = self.remove_file(dep)
if count > 0:
self.log.info(" removed {0}".format(dep))
removed += count
if removed > 0:
self.log.info(" removed {0} dependencies".format(removed))
def upload_project_files(self):
self.log.info("uploading project files...")
for filename in self.source_files:
if not filename in self.inprogress_files and not filename in self.completed_files:
if self.upload_file(filename) > 0:
self.log.info(" uploaded {0}".format(filename))
self.uploaded_files = self.gather_uploaded_files()
self.update_uploaded_dependency_db()
self.upload_missing_dependencies()
def upload_missing_dependencies(self):
self.log.info("uploading missing dependencies...")
uploaded = 0
for dep in self.own_uploaded_dependency_db.get_all_dependencies():
count = self.upload_file(dep)
if count > 0:
self.log.info(" uploaded {0}".format(dep))
uploaded += count
if uploaded > 0:
self.log.info(" uploaded {0} dependencies".format(uploaded))
def remove_file(self, filename):
filepath = os.path.join(self.args.target_directory, filename)
if not os.path.isfile(filepath):
return 0
try:
filesize = safe_get_file_size(filepath)
os.remove(filepath)
self.target_directory_size = max(self.target_directory_size - filesize, 0)
return 1
except IOError as ex:
self.log.error(" could not remove {0}: {1}".format(filepath, ex.strerror))
return 0
def upload_file(self, filename):
dest_filepath = os.path.join(self.args.target_directory, filename)
if os.path.isfile(dest_filepath):
return 0
source_filepath = os.path.join(self.args.source_directory, filename)
filesize = safe_get_file_size(source_filepath)
if self.target_directory_size + filesize > self.args.max_size:
return 0
try:
safe_mkdir(os.path.dirname(dest_filepath))
shutil.copyfile(source_filepath, dest_filepath)
self.target_directory_size += filesize
return 1
except IOError as ex:
self.log.error(" could not upload {0}: {1}".format(source_filepath, ex.strerror))
return 0
#--------------------------------------------------------------------------------------------------
# Entry point.
#--------------------------------------------------------------------------------------------------
def main():
# Parse the command line.
parser = argparse.ArgumentParser(description="send a shot to a folder being watched by "
"appleseed render nodes.")
parser.add_argument("-s", "--max-size", metavar="MB",
help="set the maximum allowed size in mb of the target directory "
"(default is 1 terabyte)")
parser.add_argument("--source", metavar="source-directory", dest="source_directory",
required=True, help="directory containing the source shot data")
parser.add_argument("--target", metavar="target-directory", dest="target_directory",
required=True, help="directory being watched by render nodes")
parser.add_argument("--frames", metavar="frames-directory", dest="frames_directory",
help="directory where the rendered frames should be stored")
args = parser.parse_args()
if args.max_size is None:
args.max_size = 2 ** 40 # default to 1 terabyte
else:
args.max_size = long(args.max_size)
args.max_size *= MB # convert to bytes
# Start the log.
log = Log(os.path.join(args.target_directory, LOGS_DIR, "rendermanager.log"))
log.info("--- starting logging ---")
log.info("running rendermanager.py version {0}.".format(VERSION))
manager = Manager(args, log)
# Main management loop.
try:
while True:
try:
manager.manage()
except KeyboardInterrupt, SystemExit:
raise
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
log.error("".join(line for line in lines))
log.info_no_log("waiting {0} seconds...".format(PAUSE_BETWEEN_UPDATES))
time.sleep(PAUSE_BETWEEN_UPDATES)
except KeyboardInterrupt, SystemExit:
pass
log.info("exiting...")
if __name__ == '__main__':
main()
|
Aakash1312/appleseed
|
scripts/rendermanager.py
|
Python
|
mit
| 21,721
|
[
"VisIt"
] |
6a0f16f6b0a241854c4ed164711d534ddaa1c0008d9f70746de4dce8e15816f3
|
def run_singlepoint(parameters):
""" Drive the calculation, based on passed parameters
Args:
parameters (dict): dictionary describing this run (see
https://github.com/Autodesk/molecular-design-toolkit/wiki/Generic-parameter-names )
"""
mol =
class PySCFCalculator(object):
class LazyClassMap(object):
""" For lazily importing classes from modules (when there's a lot of import overhead)
Class names should be stored as their *absolute import strings* so that they can be imported
only when needed
Example:
>>> myclasses = LazyClassMap({'od': 'collections.OrderedDict'})
>>> myclasss['od']()
OrderedDict()
"""
def __init__(self, mapping):
self.mapping = mapping
def __getitem__(self, key):
import importlib
fields = self.mapping[key].split('.')
cls = fields[-1]
modname = '.'.join(fields[:-1])
mod = importlib.import_module(modname)
return getattr(mod, cls)
def __contains__(self, item):
return item in self.mapping
def __iter__(self):
return iter(self.mapping)
THEORIES = LazyClassMap({'hf': 'pyscf.scf.RHF', 'rhf': 'pyscf.scf.RHF',
'uhf': 'pyscf.scf.UHF',
'mcscf': 'pyscf.mcscf.CASSCF', 'casscf': 'pyscf.mcscf.CASSCF',
'casci': 'pyscf.mcscf.CASCI',
'mp2': 'pyscf.mp.MP2',
'dft': 'pyscf.dft.RKS', 'rks': 'pyscf.dft.RKS', 'ks': 'pyscf.dft.RKS'})
|
molecular-toolkit/chemistry-docker-images
|
makefiles/buildfiles/pyscf/run.py
|
Python
|
apache-2.0
| 1,558
|
[
"PySCF"
] |
a03aaebf483c501e2304103fdd04655bf9d72ee8ae22b3187fab7213bf39c1c1
|
"""Minimal Python 2 & 3 shim around all Qt bindings
DOCUMENTATION
Qt.py was born in the film and visual effects industry to address
the growing need for the development of software capable of running
with more than one flavour of the Qt bindings for Python - PySide,
PySide2, PyQt4 and PyQt5.
1. Build for one, run with all
2. Explicit is better than implicit
3. Support co-existence
Default resolution order:
- PySide2
- PyQt5
- PySide
- PyQt4
Usage:
>> import sys
>> from Qt import QtWidgets
>> app = QtWidgets.QApplication(sys.argv)
>> button = QtWidgets.QPushButton("Hello World")
>> button.show()
>> app.exec_()
All members of PySide2 are mapped from other bindings, should they exist.
If no equivalent member exist, it is excluded from Qt.py and inaccessible.
The idea is to highlight members that exist across all supported binding,
and guarantee that code that runs on one binding runs on all others.
For more details, visit https://github.com/mottosso/Qt.py
LICENSE
See end of file for license (MIT, BSD) information.
"""
import os
import sys
import types
import shutil
import importlib
__version__ = "1.2.0.b3"
# Enable support for `from Qt import *`
__all__ = []
# Flags from environment variables
QT_VERBOSE = bool(os.getenv("QT_VERBOSE"))
QT_PREFERRED_BINDING = os.getenv("QT_PREFERRED_BINDING", "")
QT_SIP_API_HINT = os.getenv("QT_SIP_API_HINT")
# Reference to Qt.py
Qt = sys.modules[__name__]
Qt.QtCompat = types.ModuleType("QtCompat")
try:
long
except NameError:
# Python 3 compatibility
long = int
"""Common members of all bindings
This is where each member of Qt.py is explicitly defined.
It is based on a "lowest common denominator" of all bindings;
including members found in each of the 4 bindings.
The "_common_members" dictionary is generated using the
build_membership.sh script.
"""
_common_members = {
"QtCore": [
"QAbstractAnimation",
"QAbstractEventDispatcher",
"QAbstractItemModel",
"QAbstractListModel",
"QAbstractState",
"QAbstractTableModel",
"QAbstractTransition",
"QAnimationGroup",
"QBasicTimer",
"QBitArray",
"QBuffer",
"QByteArray",
"QByteArrayMatcher",
"QChildEvent",
"QCoreApplication",
"QCryptographicHash",
"QDataStream",
"QDate",
"QDateTime",
"QDir",
"QDirIterator",
"QDynamicPropertyChangeEvent",
"QEasingCurve",
"QElapsedTimer",
"QEvent",
"QEventLoop",
"QEventTransition",
"QFile",
"QFileInfo",
"QFileSystemWatcher",
"QFinalState",
"QGenericArgument",
"QGenericReturnArgument",
"QHistoryState",
"QItemSelectionRange",
"QIODevice",
"QLibraryInfo",
"QLine",
"QLineF",
"QLocale",
"QMargins",
"QMetaClassInfo",
"QMetaEnum",
"QMetaMethod",
"QMetaObject",
"QMetaProperty",
"QMimeData",
"QModelIndex",
"QMutex",
"QMutexLocker",
"QObject",
"QParallelAnimationGroup",
"QPauseAnimation",
"QPersistentModelIndex",
"QPluginLoader",
"QPoint",
"QPointF",
"QProcess",
"QProcessEnvironment",
"QPropertyAnimation",
"QReadLocker",
"QReadWriteLock",
"QRect",
"QRectF",
"QRegExp",
"QResource",
"QRunnable",
"QSemaphore",
"QSequentialAnimationGroup",
"QSettings",
"QSignalMapper",
"QSignalTransition",
"QSize",
"QSizeF",
"QSocketNotifier",
"QState",
"QStateMachine",
"QSysInfo",
"QSystemSemaphore",
"QT_TRANSLATE_NOOP",
"QT_TR_NOOP",
"QT_TR_NOOP_UTF8",
"QTemporaryFile",
"QTextBoundaryFinder",
"QTextCodec",
"QTextDecoder",
"QTextEncoder",
"QTextStream",
"QTextStreamManipulator",
"QThread",
"QThreadPool",
"QTime",
"QTimeLine",
"QTimer",
"QTimerEvent",
"QTranslator",
"QUrl",
"QVariantAnimation",
"QWaitCondition",
"QWriteLocker",
"QXmlStreamAttribute",
"QXmlStreamAttributes",
"QXmlStreamEntityDeclaration",
"QXmlStreamEntityResolver",
"QXmlStreamNamespaceDeclaration",
"QXmlStreamNotationDeclaration",
"QXmlStreamReader",
"QXmlStreamWriter",
"Qt",
"QtCriticalMsg",
"QtDebugMsg",
"QtFatalMsg",
"QtMsgType",
"QtSystemMsg",
"QtWarningMsg",
"qAbs",
"qAddPostRoutine",
"qChecksum",
"qCritical",
"qDebug",
"qFatal",
"qFuzzyCompare",
"qIsFinite",
"qIsInf",
"qIsNaN",
"qIsNull",
"qRegisterResourceData",
"qUnregisterResourceData",
"qVersion",
"qWarning",
"qrand",
"qsrand"
],
"QtGui": [
"QAbstractTextDocumentLayout",
"QActionEvent",
"QBitmap",
"QBrush",
"QClipboard",
"QCloseEvent",
"QColor",
"QConicalGradient",
"QContextMenuEvent",
"QCursor",
"QDesktopServices",
"QDoubleValidator",
"QDrag",
"QDragEnterEvent",
"QDragLeaveEvent",
"QDragMoveEvent",
"QDropEvent",
"QFileOpenEvent",
"QFocusEvent",
"QFont",
"QFontDatabase",
"QFontInfo",
"QFontMetrics",
"QFontMetricsF",
"QGradient",
"QHelpEvent",
"QHideEvent",
"QHoverEvent",
"QIcon",
"QIconDragEvent",
"QIconEngine",
"QImage",
"QImageIOHandler",
"QImageReader",
"QImageWriter",
"QInputEvent",
"QInputMethodEvent",
"QIntValidator",
"QKeyEvent",
"QKeySequence",
"QLinearGradient",
"QMatrix2x2",
"QMatrix2x3",
"QMatrix2x4",
"QMatrix3x2",
"QMatrix3x3",
"QMatrix3x4",
"QMatrix4x2",
"QMatrix4x3",
"QMatrix4x4",
"QMouseEvent",
"QMoveEvent",
"QMovie",
"QPaintDevice",
"QPaintEngine",
"QPaintEngineState",
"QPaintEvent",
"QPainter",
"QPainterPath",
"QPainterPathStroker",
"QPalette",
"QPen",
"QPicture",
"QPictureIO",
"QPixmap",
"QPixmapCache",
"QPolygon",
"QPolygonF",
"QQuaternion",
"QRadialGradient",
"QRegExpValidator",
"QRegion",
"QResizeEvent",
"QSessionManager",
"QShortcutEvent",
"QShowEvent",
"QStandardItem",
"QStandardItemModel",
"QStatusTipEvent",
"QSyntaxHighlighter",
"QTabletEvent",
"QTextBlock",
"QTextBlockFormat",
"QTextBlockGroup",
"QTextBlockUserData",
"QTextCharFormat",
"QTextCursor",
"QTextDocument",
"QTextDocumentFragment",
"QTextFormat",
"QTextFragment",
"QTextFrame",
"QTextFrameFormat",
"QTextImageFormat",
"QTextInlineObject",
"QTextItem",
"QTextLayout",
"QTextLength",
"QTextLine",
"QTextList",
"QTextListFormat",
"QTextObject",
"QTextObjectInterface",
"QTextOption",
"QTextTable",
"QTextTableCell",
"QTextTableCellFormat",
"QTextTableFormat",
"QTouchEvent",
"QTransform",
"QValidator",
"QVector2D",
"QVector3D",
"QVector4D",
"QWhatsThisClickedEvent",
"QWheelEvent",
"QWindowStateChangeEvent",
"qAlpha",
"qBlue",
"qGray",
"qGreen",
"qIsGray",
"qRed",
"qRgb",
"qRgba"
],
"QtHelp": [
"QHelpContentItem",
"QHelpContentModel",
"QHelpContentWidget",
"QHelpEngine",
"QHelpEngineCore",
"QHelpIndexModel",
"QHelpIndexWidget",
"QHelpSearchEngine",
"QHelpSearchQuery",
"QHelpSearchQueryWidget",
"QHelpSearchResultWidget"
],
"QtMultimedia": [
"QAbstractVideoBuffer",
"QAbstractVideoSurface",
"QAudio",
"QAudioDeviceInfo",
"QAudioFormat",
"QAudioInput",
"QAudioOutput",
"QVideoFrame",
"QVideoSurfaceFormat"
],
"QtNetwork": [
"QAbstractNetworkCache",
"QAbstractSocket",
"QAuthenticator",
"QHostAddress",
"QHostInfo",
"QLocalServer",
"QLocalSocket",
"QNetworkAccessManager",
"QNetworkAddressEntry",
"QNetworkCacheMetaData",
"QNetworkConfiguration",
"QNetworkConfigurationManager",
"QNetworkCookie",
"QNetworkCookieJar",
"QNetworkDiskCache",
"QNetworkInterface",
"QNetworkProxy",
"QNetworkProxyFactory",
"QNetworkProxyQuery",
"QNetworkReply",
"QNetworkRequest",
"QNetworkSession",
"QSsl",
"QTcpServer",
"QTcpSocket",
"QUdpSocket"
],
"QtOpenGL": [
"QGL",
"QGLContext",
"QGLFormat",
"QGLWidget"
],
"QtPrintSupport": [
"QAbstractPrintDialog",
"QPageSetupDialog",
"QPrintDialog",
"QPrintEngine",
"QPrintPreviewDialog",
"QPrintPreviewWidget",
"QPrinter",
"QPrinterInfo"
],
"QtSql": [
"QSql",
"QSqlDatabase",
"QSqlDriver",
"QSqlDriverCreatorBase",
"QSqlError",
"QSqlField",
"QSqlIndex",
"QSqlQuery",
"QSqlQueryModel",
"QSqlRecord",
"QSqlRelation",
"QSqlRelationalDelegate",
"QSqlRelationalTableModel",
"QSqlResult",
"QSqlTableModel"
],
"QtSvg": [
"QGraphicsSvgItem",
"QSvgGenerator",
"QSvgRenderer",
"QSvgWidget"
],
"QtTest": [
"QTest"
],
"QtWidgets": [
"QAbstractButton",
"QAbstractGraphicsShapeItem",
"QAbstractItemDelegate",
"QAbstractItemView",
"QAbstractScrollArea",
"QAbstractSlider",
"QAbstractSpinBox",
"QAction",
"QActionGroup",
"QApplication",
"QBoxLayout",
"QButtonGroup",
"QCalendarWidget",
"QCheckBox",
"QColorDialog",
"QColumnView",
"QComboBox",
"QCommandLinkButton",
"QCommonStyle",
"QCompleter",
"QDataWidgetMapper",
"QDateEdit",
"QDateTimeEdit",
"QDesktopWidget",
"QDial",
"QDialog",
"QDialogButtonBox",
"QDirModel",
"QDockWidget",
"QDoubleSpinBox",
"QErrorMessage",
"QFileDialog",
"QFileIconProvider",
"QFileSystemModel",
"QFocusFrame",
"QFontComboBox",
"QFontDialog",
"QFormLayout",
"QFrame",
"QGesture",
"QGestureEvent",
"QGestureRecognizer",
"QGraphicsAnchor",
"QGraphicsAnchorLayout",
"QGraphicsBlurEffect",
"QGraphicsColorizeEffect",
"QGraphicsDropShadowEffect",
"QGraphicsEffect",
"QGraphicsEllipseItem",
"QGraphicsGridLayout",
"QGraphicsItem",
"QGraphicsItemGroup",
"QGraphicsLayout",
"QGraphicsLayoutItem",
"QGraphicsLineItem",
"QGraphicsLinearLayout",
"QGraphicsObject",
"QGraphicsOpacityEffect",
"QGraphicsPathItem",
"QGraphicsPixmapItem",
"QGraphicsPolygonItem",
"QGraphicsProxyWidget",
"QGraphicsRectItem",
"QGraphicsRotation",
"QGraphicsScale",
"QGraphicsScene",
"QGraphicsSceneContextMenuEvent",
"QGraphicsSceneDragDropEvent",
"QGraphicsSceneEvent",
"QGraphicsSceneHelpEvent",
"QGraphicsSceneHoverEvent",
"QGraphicsSceneMouseEvent",
"QGraphicsSceneMoveEvent",
"QGraphicsSceneResizeEvent",
"QGraphicsSceneWheelEvent",
"QGraphicsSimpleTextItem",
"QGraphicsTextItem",
"QGraphicsTransform",
"QGraphicsView",
"QGraphicsWidget",
"QGridLayout",
"QGroupBox",
"QHBoxLayout",
"QHeaderView",
"QInputDialog",
"QItemDelegate",
"QItemEditorCreatorBase",
"QItemEditorFactory",
"QKeyEventTransition",
"QLCDNumber",
"QLabel",
"QLayout",
"QLayoutItem",
"QLineEdit",
"QListView",
"QListWidget",
"QListWidgetItem",
"QMainWindow",
"QMdiArea",
"QMdiSubWindow",
"QMenu",
"QMenuBar",
"QMessageBox",
"QMouseEventTransition",
"QPanGesture",
"QPinchGesture",
"QPlainTextDocumentLayout",
"QPlainTextEdit",
"QProgressBar",
"QProgressDialog",
"QPushButton",
"QRadioButton",
"QRubberBand",
"QScrollArea",
"QScrollBar",
"QShortcut",
"QSizeGrip",
"QSizePolicy",
"QSlider",
"QSpacerItem",
"QSpinBox",
"QSplashScreen",
"QSplitter",
"QSplitterHandle",
"QStackedLayout",
"QStackedWidget",
"QStatusBar",
"QStyle",
"QStyleFactory",
"QStyleHintReturn",
"QStyleHintReturnMask",
"QStyleHintReturnVariant",
"QStyleOption",
"QStyleOptionButton",
"QStyleOptionComboBox",
"QStyleOptionComplex",
"QStyleOptionDockWidget",
"QStyleOptionFocusRect",
"QStyleOptionFrame",
"QStyleOptionGraphicsItem",
"QStyleOptionGroupBox",
"QStyleOptionHeader",
"QStyleOptionMenuItem",
"QStyleOptionProgressBar",
"QStyleOptionRubberBand",
"QStyleOptionSizeGrip",
"QStyleOptionSlider",
"QStyleOptionSpinBox",
"QStyleOptionTab",
"QStyleOptionTabBarBase",
"QStyleOptionTabWidgetFrame",
"QStyleOptionTitleBar",
"QStyleOptionToolBar",
"QStyleOptionToolBox",
"QStyleOptionToolButton",
"QStyleOptionViewItem",
"QStylePainter",
"QStyledItemDelegate",
"QSwipeGesture",
"QSystemTrayIcon",
"QTabBar",
"QTabWidget",
"QTableView",
"QTableWidget",
"QTableWidgetItem",
"QTableWidgetSelectionRange",
"QTapAndHoldGesture",
"QTapGesture",
"QTextBrowser",
"QTextEdit",
"QTimeEdit",
"QToolBar",
"QToolBox",
"QToolButton",
"QToolTip",
"QTreeView",
"QTreeWidget",
"QTreeWidgetItem",
"QTreeWidgetItemIterator",
"QUndoCommand",
"QUndoGroup",
"QUndoStack",
"QUndoView",
"QVBoxLayout",
"QWhatsThis",
"QWidget",
"QWidgetAction",
"QWidgetItem",
"QWizard",
"QWizardPage"
],
"QtX11Extras": [
"QX11Info"
],
"QtXml": [
"QDomAttr",
"QDomCDATASection",
"QDomCharacterData",
"QDomComment",
"QDomDocument",
"QDomDocumentFragment",
"QDomDocumentType",
"QDomElement",
"QDomEntity",
"QDomEntityReference",
"QDomImplementation",
"QDomNamedNodeMap",
"QDomNode",
"QDomNodeList",
"QDomNotation",
"QDomProcessingInstruction",
"QDomText",
"QXmlAttributes",
"QXmlContentHandler",
"QXmlDTDHandler",
"QXmlDeclHandler",
"QXmlDefaultHandler",
"QXmlEntityResolver",
"QXmlErrorHandler",
"QXmlInputSource",
"QXmlLexicalHandler",
"QXmlLocator",
"QXmlNamespaceSupport",
"QXmlParseException",
"QXmlReader",
"QXmlSimpleReader"
],
"QtXmlPatterns": [
"QAbstractMessageHandler",
"QAbstractUriResolver",
"QAbstractXmlNodeModel",
"QAbstractXmlReceiver",
"QSourceLocation",
"QXmlFormatter",
"QXmlItem",
"QXmlName",
"QXmlNamePool",
"QXmlNodeModelIndex",
"QXmlQuery",
"QXmlResultItems",
"QXmlSchema",
"QXmlSchemaValidator",
"QXmlSerializer"
]
}
def _qInstallMessageHandler(handler):
"""Install a message handler that works in all bindings
Args:
handler: A function that takes 3 arguments, or None
"""
def messageOutputHandler(*args):
# In Qt4 bindings, message handlers are passed 2 arguments
# In Qt5 bindings, message handlers are passed 3 arguments
# The first argument is a QtMsgType
# The last argument is the message to be printed
# The Middle argument (if passed) is a QMessageLogContext
if len(args) == 3:
msgType, logContext, msg = args
elif len(args) == 2:
msgType, msg = args
logContext = None
else:
raise TypeError(
"handler expected 2 or 3 arguments, got {0}".format(len(args)))
if isinstance(msg, bytes):
# In python 3, some bindings pass a bytestring, which cannot be
# used elsewhere. Decoding a python 2 or 3 bytestring object will
# consistently return a unicode object.
msg = msg.decode()
handler(msgType, logContext, msg)
passObject = messageOutputHandler if handler else handler
if Qt.IsPySide or Qt.IsPyQt4:
return Qt._QtCore.qInstallMsgHandler(passObject)
elif Qt.IsPySide2 or Qt.IsPyQt5:
return Qt._QtCore.qInstallMessageHandler(passObject)
def _getcpppointer(object):
if hasattr(Qt, "_shiboken2"):
return getattr(Qt, "_shiboken2").getCppPointer(object)[0]
elif hasattr(Qt, "_shiboken"):
return getattr(Qt, "_shiboken").getCppPointer(object)[0]
elif hasattr(Qt, "_sip"):
return getattr(Qt, "_sip").unwrapinstance(object)
raise AttributeError("'module' has no attribute 'getCppPointer'")
def _wrapinstance(ptr, base=None):
"""Enable implicit cast of pointer to most suitable class
This behaviour is available in sip per default.
Based on http://nathanhorne.com/pyqtpyside-wrap-instance
Usage:
This mechanism kicks in under these circumstances.
1. Qt.py is using PySide 1 or 2.
2. A `base` argument is not provided.
See :func:`QtCompat.wrapInstance()`
Arguments:
ptr (long): Pointer to QObject in memory
base (QObject, optional): Base class to wrap with. Defaults to QObject,
which should handle anything.
"""
assert isinstance(ptr, long), "Argument 'ptr' must be of type <long>"
assert (base is None) or issubclass(base, Qt.QtCore.QObject), (
"Argument 'base' must be of type <QObject>")
if Qt.IsPyQt4 or Qt.IsPyQt5:
func = getattr(Qt, "_sip").wrapinstance
elif Qt.IsPySide2:
func = getattr(Qt, "_shiboken2").wrapInstance
elif Qt.IsPySide:
func = getattr(Qt, "_shiboken").wrapInstance
else:
raise AttributeError("'module' has no attribute 'wrapInstance'")
if base is None:
q_object = func(long(ptr), Qt.QtCore.QObject)
meta_object = q_object.metaObject()
class_name = meta_object.className()
super_class_name = meta_object.superClass().className()
if hasattr(Qt.QtWidgets, class_name):
base = getattr(Qt.QtWidgets, class_name)
elif hasattr(Qt.QtWidgets, super_class_name):
base = getattr(Qt.QtWidgets, super_class_name)
else:
base = Qt.QtCore.QObject
return func(long(ptr), base)
def _translate(context, sourceText, *args):
# In Qt4 bindings, translate can be passed 2 or 3 arguments
# In Qt5 bindings, translate can be passed 2 arguments
# The first argument is disambiguation[str]
# The last argument is n[int]
# The middle argument can be encoding[QtCore.QCoreApplication.Encoding]
if len(args) == 3:
disambiguation, encoding, n = args
elif len(args) == 2:
disambiguation, n = args
encoding = None
else:
raise TypeError(
"Expected 4 or 5 arguments, got {0}.".format(len(args) + 2))
if hasattr(Qt.QtCore, "QCoreApplication"):
app = getattr(Qt.QtCore, "QCoreApplication")
else:
raise NotImplementedError(
"Missing QCoreApplication implementation for {binding}".format(
binding=Qt.__binding__,
)
)
if Qt.__binding__ in ("PySide2", "PyQt5"):
sanitized_args = [context, sourceText, disambiguation, n]
else:
sanitized_args = [
context,
sourceText,
disambiguation,
encoding or app.CodecForTr,
n
]
return app.translate(*sanitized_args)
def _loadUi(uifile, baseinstance=None):
"""Dynamically load a user interface from the given `uifile`
This function calls `uic.loadUi` if using PyQt bindings,
else it implements a comparable binding for PySide.
Documentation:
http://pyqt.sourceforge.net/Docs/PyQt5/designer.html#PyQt5.uic.loadUi
Arguments:
uifile (str): Absolute path to Qt Designer file.
baseinstance (QWidget): Instantiated QWidget or subclass thereof
Return:
baseinstance if `baseinstance` is not `None`. Otherwise
return the newly created instance of the user interface.
"""
if hasattr(Qt, "_uic"):
return Qt._uic.loadUi(uifile, baseinstance)
elif hasattr(Qt, "_QtUiTools"):
# Implement `PyQt5.uic.loadUi` for PySide(2)
path = uifile
widget = baseinstance
loader = Qt._QtUiTools.QUiLoader()
loader.setWorkingDirectory(os.path.dirname(path))
f = Qt.QtCore.QFile(path)
f.open(Qt.QtCore.QFile.ReadOnly)
ui = loader.load(path, widget)
f.close()
layout = Qt.QtWidgets.QVBoxLayout()
layout.setObjectName("uiLayout")
layout.addWidget(ui)
widget.setLayout(layout)
layout.setContentsMargins(0, 0, 0, 0)
baseinstance.setMinimumWidth(ui.minimumWidth())
baseinstance.setMinimumHeight(ui.minimumHeight())
baseinstance.setMaximumWidth(ui.maximumWidth())
baseinstance.setMaximumHeight(ui.maximumHeight())
Qt.QtCore.QMetaObject.connectSlotsByName(widget)
return ui
else:
raise NotImplementedError("No implementation available for loadUi")
"""Misplaced members
These members from the original submodule are misplaced relative PySide2
"""
_misplaced_members = {
"PySide2": {
"QtCore.QStringListModel": "QtCore.QStringListModel",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken2.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken2.getCppPointer": ["QtCompat.getCppPointer", _getcpppointer],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtWidgets.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PyQt5": {
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QStringListModel": "QtCore.QStringListModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtWidgets.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PySide": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken.unwrapInstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtGui.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PyQt4": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
# "QtCore.pyqtSignature": "QtCore.Slot",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtCore.QString": "str",
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtGui.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
}
}
""" Compatibility Members
This dictionary is used to build Qt.QtCompat objects that provide a consistent
interface for obsolete members, and differences in binding return values.
{
"binding": {
"classname": {
"targetname": "binding_namespace",
}
}
}
"""
_compatibility_members = {
"PySide2": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable":
"QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode":
"QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt5": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable":
"QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode":
"QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PySide": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt4": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
}
def _apply_site_config():
try:
import QtSiteConfig
except ImportError:
# If no QtSiteConfig module found, no modifications
# to _common_members are needed.
pass
else:
# Provide the ability to modify the dicts used to build Qt.py
if hasattr(QtSiteConfig, 'update_members'):
QtSiteConfig.update_members(_common_members)
if hasattr(QtSiteConfig, 'update_misplaced_members'):
QtSiteConfig.update_misplaced_members(members=_misplaced_members)
if hasattr(QtSiteConfig, 'update_compatibility_members'):
QtSiteConfig.update_compatibility_members(
members=_compatibility_members)
def _new_module(name):
return types.ModuleType(__name__ + "." + name)
def _import_sub_module(module, name):
"""import_sub_module will mimic the function of importlib.import_module"""
module = __import__(module.__name__ + "." + name)
for level in name.split("."):
module = getattr(module, level)
return module
def _setup(module, extras):
"""Install common submodules"""
Qt.__binding__ = module.__name__
for name in list(_common_members) + extras:
try:
submodule = _import_sub_module(
module, name)
except ImportError:
try:
# For extra modules like sip and shiboken that may not be
# children of the binding.
submodule = __import__(name)
except ImportError:
continue
setattr(Qt, "_" + name, submodule)
if name not in extras:
# Store reference to original binding,
# but don't store speciality modules
# such as uic or QtUiTools
setattr(Qt, name, _new_module(name))
def _reassign_misplaced_members(binding):
"""Apply misplaced members from `binding` to Qt.py
Arguments:
binding (dict): Misplaced members
"""
for src, dst in _misplaced_members[binding].items():
dst_value = None
src_parts = src.split(".")
src_module = src_parts[0]
src_member = None
if len(src_parts) > 1:
src_member = src_parts[1:]
if isinstance(dst, (list, tuple)):
dst, dst_value = dst
dst_parts = dst.split(".")
dst_module = dst_parts[0]
dst_member = None
if len(dst_parts) > 1:
dst_member = dst_parts[1]
# Get the member we want to store in the namesapce.
if not dst_value:
try:
_part = getattr(Qt, "_" + src_module)
while src_member:
member = src_member.pop(0)
_part = getattr(_part, member)
dst_value = _part
except AttributeError:
# If the member we want to store in the namespace does not
# exist, there is no need to continue. This can happen if a
# request was made to rename a member that didn't exist, for
# example if QtWidgets isn't available on the target platform.
_log("Misplaced member has no source: {0}".format(src))
continue
try:
src_object = getattr(Qt, dst_module)
except AttributeError:
if dst_module not in _common_members:
# Only create the Qt parent module if its listed in
# _common_members. Without this check, if you remove QtCore
# from _common_members, the default _misplaced_members will add
# Qt.QtCore so it can add Signal, Slot, etc.
msg = 'Not creating missing member module "{m}" for "{c}"'
_log(msg.format(m=dst_module, c=dst_member))
continue
# If the dst is valid but the Qt parent module does not exist
# then go ahead and create a new module to contain the member.
setattr(Qt, dst_module, _new_module(dst_module))
src_object = getattr(Qt, dst_module)
# Enable direct import of the new module
sys.modules[__name__ + "." + dst_module] = src_object
if not dst_value:
dst_value = getattr(Qt, "_" + src_module)
if src_member:
dst_value = getattr(dst_value, src_member)
setattr(
src_object,
dst_member or dst_module,
dst_value
)
def _build_compatibility_members(binding, decorators=None):
"""Apply `binding` to QtCompat
Arguments:
binding (str): Top level binding in _compatibility_members.
decorators (dict, optional): Provides the ability to decorate the
original Qt methods when needed by a binding. This can be used
to change the returned value to a standard value. The key should
be the classname, the value is a dict where the keys are the
target method names, and the values are the decorator functions.
"""
decorators = decorators or dict()
# Allow optional site-level customization of the compatibility members.
# This method does not need to be implemented in QtSiteConfig.
try:
import QtSiteConfig
except ImportError:
pass
else:
if hasattr(QtSiteConfig, 'update_compatibility_decorators'):
QtSiteConfig.update_compatibility_decorators(binding, decorators)
_QtCompat = type("QtCompat", (object,), {})
for classname, bindings in _compatibility_members[binding].items():
attrs = {}
for target, binding in bindings.items():
namespaces = binding.split('.')
try:
src_object = getattr(Qt, "_" + namespaces[0])
except AttributeError as e:
_log("QtCompat: AttributeError: %s" % e)
# Skip reassignment of non-existing members.
# This can happen if a request was made to
# rename a member that didn't exist, for example
# if QtWidgets isn't available on the target platform.
continue
# Walk down any remaining namespace getting the object assuming
# that if the first namespace exists the rest will exist.
for namespace in namespaces[1:]:
src_object = getattr(src_object, namespace)
# decorate the Qt method if a decorator was provided.
if target in decorators.get(classname, []):
# staticmethod must be called on the decorated method to
# prevent a TypeError being raised when the decorated method
# is called.
src_object = staticmethod(
decorators[classname][target](src_object))
attrs[target] = src_object
# Create the QtCompat class and install it into the namespace
compat_class = type(classname, (_QtCompat,), attrs)
setattr(Qt.QtCompat, classname, compat_class)
def _pyside2():
"""Initialise PySide2
These functions serve to test the existence of a binding
along with set it up in such a way that it aligns with
the final step; adding members from the original binding
to Qt.py
"""
import PySide2 as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken2
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide2 import shiboken2
extras.append("shiboken2")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken2"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = shiboken2.delete
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = \
Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members("PySide2")
_build_compatibility_members("PySide2")
def _pyside():
"""Initialise PySide"""
import PySide as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide import shiboken
extras.append("shiboken")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = shiboken.delete
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
_reassign_misplaced_members("PySide")
_build_compatibility_members("PySide")
def _pyqt5():
"""Initialise PyQt5"""
import PyQt5 as module
extras = ["uic"]
try:
import sip
extras.append(sip.__name__)
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = sip.delete
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = \
Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members("PyQt5")
_build_compatibility_members('PyQt5')
def _pyqt4():
"""Initialise PyQt4"""
import sip
# Validation of envivornment variable. Prevents an error if
# the variable is invalid since it's just a hint.
try:
hint = int(QT_SIP_API_HINT)
except TypeError:
hint = None # Variable was None, i.e. not set.
except ValueError:
raise ImportError("QT_SIP_API_HINT=%s must be a 1 or 2")
for api in ("QString",
"QVariant",
"QDate",
"QDateTime",
"QTextStream",
"QTime",
"QUrl"):
try:
sip.setapi(api, hint or 2)
except AttributeError:
raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py")
except ValueError:
actual = sip.getapi(api)
if not hint:
raise ImportError("API version already set to %d" % actual)
else:
# Having provided a hint indicates a soft constraint, one
# that doesn't throw an exception.
sys.stderr.write(
"Warning: API '%s' has already been set to %d.\n"
% (api, actual)
)
import PyQt4 as module
extras = ["uic"]
try:
import sip
extras.append(sip.__name__)
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
Qt.QtCompat.delete = sip.delete
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = \
Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
_reassign_misplaced_members("PyQt4")
# QFileDialog QtCompat decorator
def _standardizeQFileDialog(some_function):
"""Decorator that makes PyQt4 return conform to other bindings"""
def wrapper(*args, **kwargs):
ret = (some_function(*args, **kwargs))
# PyQt4 only returns the selected filename, force it to a
# standard return of the selected filename, and a empty string
# for the selected filter
return ret, ''
wrapper.__doc__ = some_function.__doc__
wrapper.__name__ = some_function.__name__
return wrapper
decorators = {
"QFileDialog": {
"getOpenFileName": _standardizeQFileDialog,
"getOpenFileNames": _standardizeQFileDialog,
"getSaveFileName": _standardizeQFileDialog,
}
}
_build_compatibility_members('PyQt4', decorators)
def _none():
"""Internal option (used in installer)"""
Mock = type("Mock", (), {"__getattr__": lambda Qt, attr: None})
Qt.__binding__ = "None"
Qt.__qt_version__ = "0.0.0"
Qt.__binding_version__ = "0.0.0"
Qt.QtCompat.loadUi = lambda uifile, baseinstance=None: None
Qt.QtCompat.setSectionResizeMode = lambda *args, **kwargs: None
for submodule in _common_members.keys():
setattr(Qt, submodule, Mock())
setattr(Qt, "_" + submodule, Mock())
def _log(text):
if QT_VERBOSE:
sys.stdout.write(text + "\n")
def _convert(lines):
"""Convert compiled .ui file from PySide2 to Qt.py
Arguments:
lines (list): Each line of of .ui file
Usage:
>> with open("myui.py") as f:
.. lines = _convert(f.readlines())
"""
def parse(line):
line = line.replace("from PySide2 import", "from Qt import QtCompat,")
line = line.replace("QtWidgets.QApplication.translate",
"QtCompat.translate")
if "QtCore.SIGNAL" in line:
raise NotImplementedError("QtCore.SIGNAL is missing from PyQt5 "
"and so Qt.py does not support it: you "
"should avoid defining signals inside "
"your ui files.")
return line
parsed = list()
for line in lines:
line = parse(line)
parsed.append(line)
return parsed
def _cli(args):
"""Qt.py command-line interface"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--convert",
help="Path to compiled Python module, e.g. my_ui.py")
parser.add_argument("--compile",
help="Accept raw .ui file and compile with native "
"PySide2 compiler.")
parser.add_argument("--stdout",
help="Write to stdout instead of file",
action="store_true")
parser.add_argument("--stdin",
help="Read from stdin instead of file",
action="store_true")
args = parser.parse_args(args)
if args.stdout:
raise NotImplementedError("--stdout")
if args.stdin:
raise NotImplementedError("--stdin")
if args.compile:
raise NotImplementedError("--compile")
if args.convert:
sys.stdout.write("#\n"
"# WARNING: --convert is an ALPHA feature.\n#\n"
"# See https://github.com/mottosso/Qt.py/pull/132\n"
"# for details.\n"
"#\n")
#
# ------> Read
#
with open(args.convert) as f:
lines = _convert(f.readlines())
backup = "%s_backup%s" % os.path.splitext(args.convert)
sys.stdout.write("Creating \"%s\"..\n" % backup)
shutil.copy(args.convert, backup)
#
# <------ Write
#
with open(args.convert, "w") as f:
f.write("".join(lines))
sys.stdout.write("Successfully converted \"%s\"\n" % args.convert)
def _install():
# Default order (customise order and content via QT_PREFERRED_BINDING)
default_order = ("PySide2", "PyQt5", "PySide", "PyQt4")
preferred_order = list(
b for b in QT_PREFERRED_BINDING.split(os.pathsep) if b
)
order = preferred_order or default_order
available = {
"PySide2": _pyside2,
"PyQt5": _pyqt5,
"PySide": _pyside,
"PyQt4": _pyqt4,
"None": _none
}
_log("Order: '%s'" % "', '".join(order))
# Allow site-level customization of the available modules.
_apply_site_config()
found_binding = False
for name in order:
_log("Trying %s" % name)
try:
available[name]()
found_binding = True
break
except ImportError as e:
_log("ImportError: %s" % e)
except KeyError:
_log("ImportError: Preferred binding '%s' not found." % name)
if not found_binding:
# If not binding were found, throw this error
raise ImportError("No Qt binding were found.")
# Install individual members
for name, members in _common_members.items():
try:
their_submodule = getattr(Qt, "_%s" % name)
except AttributeError:
continue
our_submodule = getattr(Qt, name)
# Enable import *
__all__.append(name)
# Enable direct import of submodule,
# e.g. import Qt.QtCore
sys.modules[__name__ + "." + name] = our_submodule
for member in members:
# Accept that a submodule may miss certain members.
try:
their_member = getattr(their_submodule, member)
except AttributeError:
_log("'%s.%s' was missing." % (name, member))
continue
setattr(our_submodule, member, their_member)
# Enable direct import of QtCompat
sys.modules[__name__ + ".QtCompat"] = Qt.QtCompat
# Backwards compatibility
if hasattr(Qt.QtCompat, 'loadUi'):
Qt.QtCompat.load_ui = Qt.QtCompat.loadUi
_install()
# Setup Binding Enum states
Qt.IsPySide2 = Qt.__binding__ == 'PySide2'
Qt.IsPyQt5 = Qt.__binding__ == 'PyQt5'
Qt.IsPySide = Qt.__binding__ == 'PySide'
Qt.IsPyQt4 = Qt.__binding__ == 'PyQt4'
"""Augment QtCompat
QtCompat contains wrappers and added functionality
to the original bindings, such as the CLI interface
and otherwise incompatible members between bindings,
such as `QHeaderView.setSectionResizeMode`.
"""
Qt.QtCompat._cli = _cli
Qt.QtCompat._convert = _convert
# Enable command-line interface
if __name__ == "__main__":
_cli(sys.argv[1:])
# The MIT License (MIT)
#
# Copyright (c) 2016-2017 Marcus Ottosson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# In PySide(2), loadUi does not exist, so we implement it
#
# `_UiLoader` is adapted from the qtpy project, which was further influenced
# by qt-helpers which was released under a 3-clause BSD license which in turn
# is based on a solution at:
#
# - https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# The License for this code is as follows:
#
# qt-helpers - a common front-end to various Qt modules
#
# Copyright (c) 2015, Chris Beaumont and Thomas Robitaille
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
# * Neither the name of the Glue project nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Which itself was based on the solution at
#
# https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# which was released under the MIT license:
#
# Copyright (c) 2011 Sebastian Wiesner <lunaryorn@gmail.com>
# Modifications by Charl Botha <cpbotha@vxlabs.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files
# (the "Software"),to deal in the Software without restriction,
# including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
krathjen/studiolibrary
|
src/studiovendor/Qt.py
|
Python
|
lgpl-3.0
| 56,512
|
[
"VisIt"
] |
cb0adba3f07913a5333a8aead5e7d8ae2a3a20652a049a84e12b97c6baa27306
|
# -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Author: ryanss <ryanssdev@icloud.com> (c) 2014-2017
# dr-prodigy <maurizio.montel@gmail.com> (c) 2017-2020
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from itertools import product
from datetime import date, datetime, timedelta
from dateutil.relativedelta import relativedelta, MO
import unittest
import warnings
import sys
import holidays
class TestBasics(unittest.TestCase):
def setUp(self):
self.holidays = holidays.US()
def test_contains(self):
self.assertIn(date(2014, 1, 1), self.holidays)
self.assertNotIn(date(2014, 1, 2), self.holidays)
def test_getitem(self):
self.assertEqual(self.holidays[date(2014, 1, 1)], "New Year's Day")
self.assertEqual(self.holidays.get(date(2014, 1, 1)), "New Year's Day")
self.assertRaises(KeyError, lambda: self.holidays[date(2014, 1, 2)])
self.assertIsNone(self.holidays.get(date(2014, 1, 2)))
self.assertListEqual(
self.holidays[date(2013, 12, 31): date(2014, 1, 2)],
[date(2014, 1, 1)]
)
self.assertListEqual(
self.holidays[date(2013, 12, 24): date(2014, 1, 2)],
[date(2013, 12, 25), date(2014, 1, 1)]
)
self.assertListEqual(
self.holidays[date(2013, 12, 25): date(2014, 1, 2): 3],
[date(2013, 12, 25)]
)
self.assertListEqual(
self.holidays[date(2013, 12, 25): date(2014, 1, 2): 7],
[date(2013, 12, 25), date(2014, 1, 1)]
)
self.assertListEqual(
self.holidays[date(2014, 1, 2): date(2013, 12, 30)],
[date(2014, 1, 1)]
)
self.assertListEqual(
self.holidays[date(2014, 1, 2): date(2013, 12, 25)],
[date(2014, 1, 1)]
)
self.assertListEqual(
self.holidays[date(2014, 1, 2): date(2013, 12, 24)],
[date(2014, 1, 1), date(2013, 12, 25)]
)
self.assertListEqual(
self.holidays[date(2014, 1, 1): date(2013, 12, 24): 3],
[date(2014, 1, 1)]
)
self.assertListEqual(
self.holidays[date(2014, 1, 1): date(2013, 12, 24): 7],
[date(2014, 1, 1), date(2013, 12, 25)]
)
self.assertListEqual(
self.holidays[date(2013, 12, 31): date(2014, 1, 2): -3],
[]
)
self.assertListEqual(
self.holidays[
date(2014, 1, 1): date(2013, 12, 24): timedelta(days=3)
],
[date(2014, 1, 1)]
)
self.assertListEqual(
self.holidays[
date(2014, 1, 1): date(2013, 12, 24): timedelta(days=7)
],
[date(2014, 1, 1), date(2013, 12, 25)]
)
self.assertListEqual(
self.holidays[
date(2013, 12, 31): date(2014, 1, 2): timedelta(days=3)
],
[]
)
self.assertRaises(ValueError, lambda: self.holidays[date(2014, 1, 1):])
self.assertRaises(ValueError, lambda: self.holidays[:date(2014, 1, 1)])
self.assertRaises(
TypeError,
lambda: self.holidays[date(2014, 1, 1): date(2014, 1, 2): '']
)
self.assertRaises(
ValueError,
lambda: self.holidays[date(2014, 1, 1): date(2014, 1, 2): 0]
)
def test_get(self):
self.assertEqual(self.holidays.get('2014-01-01'), "New Year's Day")
self.assertIsNone(self.holidays.get('2014-01-02'))
self.assertFalse(self.holidays.get('2014-01-02', False))
self.assertTrue(self.holidays.get('2014-01-02', True))
def test_pop(self):
self.assertRaises(KeyError, lambda: self.holidays.pop('2014-01-02'))
self.assertFalse(self.holidays.pop('2014-01-02', False))
self.assertTrue(self.holidays.pop('2014-01-02', True))
self.assertIn(date(2014, 1, 1), self.holidays)
self.assertEqual(self.holidays.pop('2014-01-01'), "New Year's Day")
self.assertNotIn(date(2014, 1, 1), self.holidays)
self.assertIn(date(2014, 7, 4), self.holidays)
def test_setitem(self):
self.holidays = holidays.US(years=[2014])
self.assertEqual(len(self.holidays), 10)
self.holidays[date(2014, 1, 3)] = "Fake Holiday"
self.assertEqual(len(self.holidays), 11)
self.assertIn(date(2014, 1, 3), self.holidays)
self.assertEqual(self.holidays.get(date(2014, 1, 3)), "Fake Holiday")
def test_update(self):
h = holidays.HolidayBase()
h.update({
date(2015, 1, 1): "New Year's Day",
'2015-12-25': "Christmas Day",
})
self.assertIn('2015-01-01', h)
self.assertIn(date(2015, 12, 25), h)
def test_append(self):
h = holidays.HolidayBase()
h.update({
date(2015, 1, 1): "New Year's Day",
'2015-12-25': "Christmas Day",
})
h.append([date(2015, 4, 1), '2015-04-03'])
h.append(date(2015, 4, 6))
h.append('2015-04-07')
self.assertIn('2015-01-01', h)
self.assertIn(date(2015, 12, 25), h)
self.assertIn('2015-04-01', h)
self.assertNotIn('2015-04-02', h)
self.assertIn('2015-04-03', h)
self.assertNotIn('2015-04-04', h)
self.assertNotIn('2015-04-05', h)
self.assertIn('2015-04-06', h)
self.assertIn('2015-04-07', h)
def test_eq_ne(self):
us1 = holidays.UnitedStates()
us2 = holidays.US()
us3 = holidays.UnitedStates(years=[2014])
us4 = holidays.US(years=[2014])
ca1 = holidays.Canada()
ca2 = holidays.CA()
ca3 = holidays.Canada(years=[2014])
ca4 = holidays.CA(years=[2014])
self.assertEqual(us1, us2)
self.assertEqual(us3, us4)
self.assertEqual(ca1, ca2)
self.assertEqual(ca3, ca4)
self.assertNotEqual(us1, us3)
self.assertNotEqual(us1, ca1)
self.assertNotEqual(us3, ca3)
self.assertNotEqual(us1, us3)
def test_add(self):
ca = holidays.CA()
us = holidays.US()
mx = holidays.MX()
na = ca + (us + mx)
self.assertNotIn('2014-07-01', us)
self.assertIn('2014-07-01', ca)
self.assertNotIn('2014-07-04', ca)
self.assertIn('2014-07-04', us)
self.assertIn('2014-07-04', ca + us)
self.assertIn('2014-07-04', us + ca)
self.assertIn('2015-07-04', ca + us)
self.assertIn('2015-07-04', us + ca)
self.assertIn('2015-07-01', ca + us)
self.assertIn('2015-07-01', us + ca)
self.assertIn('2014-07-04', na)
self.assertIn('2015-07-04', na)
self.assertIn('2015-07-01', na)
self.assertIn('2000-02-05', na)
self.assertEqual((ca + us).prov, 'ON')
self.assertEqual((us + ca).prov, 'ON')
ca = holidays.CA(years=[2014], expand=False)
us = holidays.US(years=[2014, 2015], expand=True)
self.assertTrue((ca + us).expand)
self.assertEqual((ca + us).years, {2014, 2015})
self.assertEqual((us + ca).years, {2014, 2015})
na = holidays.CA()
na += holidays.US()
na += holidays.MX()
self.assertEqual(na.country, ['CA', 'US', 'MX'])
self.assertIn('2014-07-04', na)
self.assertIn('2014-07-04', na)
self.assertIn('2015-07-04', na)
self.assertIn('2015-07-04', na)
self.assertIn('2015-07-01', na)
self.assertIn('2015-07-01', na)
self.assertIn('2000-02-05', na)
self.assertEqual(na.prov, 'ON')
na = holidays.CA() + holidays.US()
na += holidays.MX()
self.assertIn('2014-07-04', na)
self.assertIn('2014-07-04', na)
self.assertIn('2015-07-04', na)
self.assertIn('2015-07-04', na)
self.assertIn('2015-07-01', na)
self.assertIn('2015-07-01', na)
self.assertIn('2000-02-05', na)
self.assertEqual(na.prov, 'ON')
self.assertRaises(TypeError, lambda: holidays.US() + {})
na = ca + (us + mx) + ca + (mx + us + holidays.CA(prov='BC'))
self.assertIn('2000-02-05', na)
self.assertIn('2014-02-10', na)
self.assertIn('2014-02-17', na)
self.assertIn('2014-07-04', na)
provs = (holidays.CA(prov='ON', years=[2014]) +
holidays.CA(prov='BC', years=[2015]))
self.assertIn("2015-02-09", provs)
self.assertIn("2015-02-16", provs)
self.assertEqual(provs.prov, ['ON', 'BC'])
a = sum(holidays.CA(prov=x) for x in holidays.CA.PROVINCES)
self.assertEqual(a.country, 'CA')
self.assertEqual(a.prov, holidays.CA.PROVINCES)
self.assertIn("2015-02-09", a)
self.assertIn("2015-02-16", a)
na = holidays.CA() + holidays.US() + holidays.MX()
self.assertIn(date(1969, 12, 25), na)
self.assertEqual(na.get(date(1969, 7, 1)), "Dominion Day")
self.assertEqual(na.get(date(1983, 7, 1)), "Canada Day")
self.assertEqual(na.get(date(1969, 12, 25)),
"Christmas Day, Navidad [Christmas]")
na = holidays.MX() + holidays.CA() + holidays.US()
self.assertEqual(na.get(date(1969, 12, 25)),
"Navidad [Christmas], Christmas Day")
def test_get_list(self):
westland = holidays.NZ(prov='WTL')
chathams = holidays.NZ(prov='CIT')
wild = westland + chathams
self.assertEqual(wild[date(1969, 12, 1)],
("Westland Anniversary Day, " +
"Chatham Islands Anniversary Day"))
self.assertEqual(wild.get_list(date(1969, 12, 1)),
["Westland Anniversary Day",
"Chatham Islands Anniversary Day"])
self.assertEqual(wild.get_list(date(1969, 1, 1)),
["New Year's Day"])
self.assertEqual(westland.get_list(date(1969, 12, 1)),
["Westland Anniversary Day"])
self.assertEqual(westland.get_list(date(1969, 1, 1)),
["New Year's Day"])
self.assertEqual(chathams.get_list(date(1969, 12, 1)),
["Chatham Islands Anniversary Day"])
self.assertEqual(chathams.get_list(date(1969, 1, 1)),
["New Year's Day"])
ca = holidays.CA()
us = holidays.US()
mx = holidays.MX()
na = ca + us + mx
self.assertIn(date(1969, 12, 25), na)
self.assertEqual(na.get_list(date(1969, 12, 25)),
["Christmas Day", "Navidad [Christmas]"])
self.assertEqual(na.get_list(date(1969, 7, 1)), ["Dominion Day"])
self.assertEqual(na.get_list(date(1969, 1, 3)), [])
def test_list_supported_countries(self):
self.assertEqual(holidays.list_supported_countries()[0], "AR")
self.assertEqual(holidays.list_supported_countries()[-1], "ZA")
def test_radd(self):
self.assertRaises(TypeError, lambda: 1 + holidays.US())
def test_inheritance(self):
class NoColumbusHolidays(holidays.US):
def _populate(self, year):
holidays.US._populate(self, year)
self.pop(date(year, 10, 1) + relativedelta(weekday=MO(+2)))
hdays = NoColumbusHolidays()
self.assertIn(date(2014, 10, 13), self.holidays)
self.assertNotIn(date(2014, 10, 13), hdays)
self.assertIn(date(2014, 1, 1), hdays)
self.assertIn(date(2020, 10, 12), self.holidays)
self.assertNotIn(date(2020, 10, 12), hdays)
self.assertIn(date(2020, 1, 1), hdays)
class NinjaTurtlesHolidays(holidays.US):
def _populate(self, year):
holidays.US._populate(self, year)
self[date(year, 7, 13)] = "Ninja Turtle's Day"
hdays = NinjaTurtlesHolidays()
self.assertNotIn(date(2014, 7, 13), self.holidays)
self.assertIn(date(2014, 7, 13), hdays)
self.assertIn(date(2014, 1, 1), hdays)
self.assertNotIn(date(2020, 7, 13), self.holidays)
self.assertIn(date(2020, 7, 13), hdays)
self.assertIn(date(2020, 1, 1), hdays)
class NewCountry(holidays.HolidayBase):
def _populate(self, year):
self[date(year, 1, 2)] = "New New Year's"
hdays = NewCountry()
self.assertNotIn(date(2014, 1, 1), hdays)
self.assertIn(date(2014, 1, 2), hdays)
class Dec31Holiday(holidays.HolidayBase):
def _populate(self, year):
self[date(year, 12, 31)] = "New Year's Eve"
self.assertIn(date(2014, 12, 31), Dec31Holiday())
class TestArgs(unittest.TestCase):
def setUp(self):
self.holidays = holidays.US()
def test_country(self):
self.assertEqual(self.holidays.country, 'US')
self.assertIn(date(2014, 7, 4), self.holidays)
self.assertNotIn(date(2014, 7, 1), self.holidays)
self.holidays = holidays.UnitedStates()
self.assertEqual(self.holidays.country, 'US')
self.assertIn(date(2014, 7, 4), self.holidays)
self.assertNotIn(date(2014, 7, 1), self.holidays)
self.assertEqual(self.holidays.country, 'US')
self.holidays = holidays.CA()
self.assertEqual(self.holidays.country, 'CA')
self.assertEqual(self.holidays.prov, 'ON')
self.assertIn(date(2014, 7, 1), self.holidays)
self.assertNotIn(date(2014, 7, 4), self.holidays)
self.holidays = holidays.CA(prov='BC')
self.assertEqual(self.holidays.country, 'CA')
self.assertEqual(self.holidays.prov, 'BC')
self.assertIn(date(2014, 7, 1), self.holidays)
self.assertNotIn(date(2014, 7, 4), self.holidays)
def test_years(self):
self.assertEqual(len(self.holidays.years), 0)
self.assertNotIn(date(2014, 1, 2), self.holidays)
self.assertEqual(len(self.holidays.years), 1)
self.assertIn(2014, self.holidays.years)
self.assertNotIn(date(2013, 1, 2), self.holidays)
self.assertNotIn(date(2014, 1, 2), self.holidays)
self.assertNotIn(date(2015, 1, 2), self.holidays)
self.assertEqual(len(self.holidays.years), 3)
self.assertIn(2013, self.holidays.years)
self.assertIn(2015, self.holidays.years)
self.holidays = holidays.US(years=range(2010, 2015 + 1))
self.assertEqual(len(self.holidays.years), 6)
self.assertNotIn(2009, self.holidays.years)
self.assertIn(2010, self.holidays.years)
self.assertIn(2015, self.holidays.years)
self.assertNotIn(2016, self.holidays.years)
self.holidays = holidays.US(years=(2013, 2015, 2015))
self.assertEqual(len(self.holidays.years), 2)
self.assertIn(2013, self.holidays.years)
self.assertNotIn(2014, self.holidays.years)
self.assertIn(2015, self.holidays.years)
self.assertIn(date(2021, 12, 31), holidays.US(years=[2022]).keys())
self.holidays = holidays.US(years=2015)
self.assertNotIn(2014, self.holidays.years)
self.assertIn(2015, self.holidays.years)
def test_expand(self):
self.holidays = holidays.US(years=(2013, 2015), expand=False)
self.assertEqual(len(self.holidays.years), 2)
self.assertIn(2013, self.holidays.years)
self.assertNotIn(2014, self.holidays.years)
self.assertIn(2015, self.holidays.years)
self.assertNotIn(date(2014, 1, 1), self.holidays)
self.assertEqual(len(self.holidays.years), 2)
self.assertNotIn(2014, self.holidays.years)
def test_observed(self):
self.holidays = holidays.US(observed=False)
self.assertIn(date(2000, 1, 1), self.holidays)
self.assertNotIn(date(1999, 12, 31), self.holidays)
self.assertIn(date(2012, 1, 1), self.holidays)
self.assertNotIn(date(2012, 1, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2000, 1, 1), self.holidays)
self.assertIn(date(1999, 12, 31), self.holidays)
self.assertIn(date(2012, 1, 1), self.holidays)
self.assertIn(date(2012, 1, 2), self.holidays)
self.holidays.observed = False
self.assertIn(date(2000, 1, 1), self.holidays)
self.assertNotIn(date(1999, 12, 31), self.holidays)
self.assertIn(date(2012, 1, 1), self.holidays)
self.assertNotIn(date(2012, 1, 2), self.holidays)
self.holidays = holidays.US(years=[2022], observed=False)
self.assertNotIn(date(2021, 12, 31), self.holidays.keys())
self.holidays = holidays.CA(observed=False)
self.assertNotIn(date(1878, 7, 3), self.holidays)
self.holidays.observed = True
self.assertIn(date(2018, 7, 2), self.holidays)
class TestKeyTransforms(unittest.TestCase):
def setUp(self):
self.holidays = holidays.US()
def test_dates(self):
self.assertIn(date(2014, 1, 1), self.holidays)
self.assertEqual(self.holidays[date(2014, 1, 1)], "New Year's Day")
self.holidays[date(2014, 1, 3)] = "Fake Holiday"
self.assertIn(date(2014, 1, 3), self.holidays)
self.assertEqual(self.holidays.pop(date(2014, 1, 3)), "Fake Holiday")
self.assertNotIn(date(2014, 1, 3), self.holidays)
def test_datetimes(self):
self.assertIn(datetime(2014, 1, 1, 13, 45), self.holidays)
self.assertEqual(self.holidays[datetime(2014, 1, 1, 13, 45)],
"New Year's Day")
self.holidays[datetime(2014, 1, 3, 1, 1)] = "Fake Holiday"
self.assertIn(datetime(2014, 1, 3, 2, 2), self.holidays)
self.assertEqual(self.holidays.pop(datetime(2014, 1, 3, 4, 4)),
"Fake Holiday")
self.assertNotIn(datetime(2014, 1, 3, 2, 2), self.holidays)
def test_timestamp(self):
self.assertIn(1388552400, self.holidays)
self.assertEqual(self.holidays[1388552400], "New Year's Day")
self.assertIn(1388552400.01, self.holidays)
self.assertEqual(self.holidays[1388552400.01], "New Year's Day")
self.holidays[1388725200] = "Fake Holiday"
self.assertIn(1388725201, self.holidays)
self.assertEqual(self.holidays.pop(1388725202), "Fake Holiday")
self.assertNotIn(1388725201, self.holidays)
def test_strings(self):
self.assertIn("2014-01-01", self.holidays)
self.assertEqual(self.holidays["2014-01-01"], "New Year's Day")
self.assertIn("01/01/2014", self.holidays)
self.assertEqual(self.holidays["01/01/2014"], "New Year's Day")
self.holidays["01/03/2014"] = "Fake Holiday"
self.assertIn("01/03/2014", self.holidays)
self.assertEqual(self.holidays.pop("01/03/2014"), "Fake Holiday")
self.assertNotIn("01/03/2014", self.holidays)
def test_exceptions(self):
self.assertRaises(
(TypeError, ValueError), lambda: "abc" in self.holidays)
self.assertRaises(
(TypeError, ValueError), lambda: self.holidays.get("abc123"))
self.assertRaises(
(TypeError, ValueError), self.holidays.__setitem__, "abc", "Test")
self.assertRaises(
(TypeError, ValueError), lambda: {} in self.holidays)
class TestCountryHoliday(unittest.TestCase):
def setUp(self):
self.holidays = holidays.CountryHoliday('US')
def test_country(self):
self.assertEqual(self.holidays.country, 'US')
def test_country_years(self):
h = holidays.CountryHoliday('US', years=[2015, 2016])
self.assertEqual(h.years, {2015, 2016})
def test_country_state(self):
h = holidays.CountryHoliday('US', state='NY')
self.assertEqual(h.state, 'NY')
def test_country_province(self):
h = holidays.CountryHoliday('AU', prov='NT')
self.assertEqual(h.prov, 'NT')
def test_exceptions(self):
self.assertRaises((KeyError), lambda: holidays.CountryHoliday('XXXX'))
class TestAruba(unittest.TestCase):
def setUp(self):
self.holidays = holidays.AW()
def test_2017(self):
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 1, 25), self.holidays)
self.assertIn(date(2017, 3, 18), self.holidays)
self.assertIn(date(2017, 2, 27), self.holidays)
self.assertIn(date(2017, 4, 14), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(2017, 4, 27), self.holidays)
self.assertIn(date(2017, 5, 1), self.holidays)
self.assertIn(date(2017, 5, 25), self.holidays)
self.assertIn(date(2017, 12, 25), self.holidays)
self.assertIn(date(2017, 12, 26), self.holidays)
def test_new_years(self):
self.assertIn(date(2017, 1, 1), self.holidays)
def test_betico_day(self):
self.assertIn(date(2017, 1, 25), self.holidays)
def test_carnaval_monday(self):
self.assertIn(date(2017, 2, 27), self.holidays)
def test_anthem_and_flag_day(self):
self.assertIn(date(2017, 3, 18), self.holidays)
def test_good_friday(self):
self.assertIn(date(2017, 4, 14), self.holidays)
def test_easter_monday(self):
self.assertIn(date(2017, 4, 17), self.holidays)
def test_labour_day(self):
self.assertIn(date(2017, 5, 1), self.holidays)
def test_queens_day_between_1891_and_1948(self):
# Between 1891 and 1948 Queens Day was celebrated on 8-31
self.holidays = holidays.AW(years=[1901])
self.assertIn(date(1901, 8, 31), self.holidays)
def test_queens_day_between_1891_and_1948_substituted_later(self):
# Between 1891 and 1948 Queens Day was celebrated on 9-1
# (one day later) when Queens Day falls on a Sunday
self.holidays = holidays.AW(years=[1947])
self.assertIn(date(1947, 9, 1), self.holidays)
def test_queens_day_between_1949_and_2013(self):
self.holidays = holidays.AW(years=[1965])
self.assertIn(date(1965, 4, 30), self.holidays)
def test_queens_day_between_1949_and_1980_substituted_later(self):
self.holidays = holidays.AW(years=[1967])
self.assertIn(date(1967, 5, 1), self.holidays)
def test_queens_day_between_1980_and_2013_substituted_earlier(self):
self.holidays = holidays.AW(years=[2006])
self.assertIn(date(2006, 4, 29), self.holidays)
def test_kings_day_after_2014(self):
self.holidays = holidays.AW(years=[2013])
self.assertNotIn(date(2013, 4, 27), self.holidays)
self.holidays = holidays.AW(years=[2017])
self.assertIn(date(2017, 4, 27), self.holidays)
def test_kings_day_after_2014_substituted_earlier(self):
self.holidays = holidays.AW(years=[2188])
self.assertIn(date(2188, 4, 26), self.holidays)
def test_ascension_day(self):
self.holidays = holidays.AW(years=2017)
self.assertIn(date(2017, 5, 25), self.holidays)
def test_christmas(self):
self.holidays = holidays.AW(years=2017)
self.assertIn(date(2017, 12, 25), self.holidays)
def test_second_christmas(self):
self.holidays = holidays.AW(years=2017)
self.assertIn(date(2017, 12, 26), self.holidays)
class TestBulgaria(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Bulgaria()
def test_before_1990(self):
self.assertEqual(len(holidays.Bulgaria(years=[1989])), 0)
def test_new_years_day(self):
for year in range(1990, 2020):
self.assertIn(date(year, 1, 1), self.holidays)
def test_liberation_day(self):
for year in range(1990, 2020):
self.assertIn(date(year, 3, 3), self.holidays)
def test_labour_day(self):
for year in range(1990, 2020):
self.assertIn(date(year, 5, 1), self.holidays)
def test_saint_georges_day(self):
for year in range(1990, 2020):
self.assertIn(date(year, 5, 6), self.holidays)
def test_twenty_fourth_of_may(self):
for year in range(1990, 2020):
self.assertIn(date(year, 5, 24), self.holidays)
def test_unification_day(self):
for year in range(1990, 2020):
self.assertIn(date(year, 9, 6), self.holidays)
def test_independence_day(self):
for year in range(1990, 2020):
self.assertIn(date(year, 9, 22), self.holidays)
def test_national_awakening_day(self):
for year in range(1990, 2020):
self.assertIn(date(year, 11, 1), self.holidays)
def test_christmas(self):
for year in range(1990, 2020):
self.assertIn(date(year, 12, 24), self.holidays)
self.assertIn(date(year, 12, 25), self.holidays)
self.assertIn(date(year, 12, 26), self.holidays)
def test_easter(self):
for year, month, day in [
(2000, 4, 30), (2001, 4, 15), (2002, 5, 5), (2003, 4, 27),
(2004, 4, 11), (2005, 5, 1), (2006, 4, 23), (2007, 4, 8),
(2008, 4, 27), (2009, 4, 19), (2010, 4, 4), (2011, 4, 24),
(2012, 4, 15), (2013, 5, 5), (2014, 4, 20), (2015, 4, 12),
(2016, 5, 1), (2017, 4, 16), (2018, 4, 8), (2019, 4, 28),
(2020, 4, 19), (2021, 5, 2), (2022, 4, 24)]:
easter = date(year, month, day)
easter_saturday = easter - timedelta(days=1)
easter_friday = easter - timedelta(days=2)
for holiday in [easter_friday, easter_saturday, easter]:
self.assertIn(holiday, self.holidays)
class TestCA(unittest.TestCase):
def setUp(self):
self.holidays = holidays.CA(observed=False)
def test_new_years(self):
self.assertNotIn(date(2010, 12, 31), self.holidays)
self.assertNotIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2010, 12, 31), self.holidays)
self.assertIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_islander_day(self):
pei_holidays = holidays.CA(prov="PE")
for dt in [date(2009, 2, 9), date(2010, 2, 15), date(2011, 2, 21),
date(2012, 2, 20), date(2013, 2, 18), date(2014, 2, 17),
date(2015, 2, 16), date(2016, 2, 15), date(2020, 2, 17)]:
if dt.year >= 2010:
self.assertNotEqual(self.holidays[dt], "Islander Day")
elif dt.year == 2009:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, pei_holidays)
self.assertNotIn(dt + relativedelta(days=-1), pei_holidays)
self.assertNotIn(dt + relativedelta(days=+1), pei_holidays)
def test_family_day(self):
ab_holidays = holidays.CA(prov="AB")
bc_holidays = holidays.CA(prov="BC")
mb_holidays = holidays.CA(prov="MB")
sk_holidays = holidays.CA(prov="SK")
nb_holidays = holidays.CA(prov="NB")
for dt in [date(1990, 2, 19), date(1999, 2, 15), date(2000, 2, 21),
date(2006, 2, 20)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ab_holidays)
self.assertNotIn(dt, bc_holidays)
self.assertNotIn(dt, mb_holidays)
self.assertNotIn(dt, sk_holidays)
dt = date(2007, 2, 19)
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ab_holidays)
self.assertNotIn(dt, bc_holidays)
self.assertNotIn(dt, mb_holidays)
self.assertIn(dt, sk_holidays)
for dt in [date(2008, 2, 18), date(2012, 2, 20), date(2014, 2, 17),
date(2018, 2, 19)]:
self.assertIn(dt, self.holidays)
self.assertIn(dt, ab_holidays)
self.assertNotIn(dt, bc_holidays)
self.assertIn(dt, mb_holidays)
self.assertIn(dt, sk_holidays)
for dt in [date(2018, 2, 19)]:
self.assertIn(dt, nb_holidays)
for dt in [date(2019, 2, 18), date(2020, 2, 17)]:
self.assertIn(dt, self.holidays)
self.assertIn(dt, ab_holidays)
self.assertIn(dt, bc_holidays)
self.assertIn(dt, mb_holidays)
self.assertIn(dt, sk_holidays)
for dt in [date(2013, 2, 11), date(2016, 2, 8)]:
self.assertNotIn(dt, self.holidays)
self.assertNotIn(dt, ab_holidays)
self.assertIn(dt, bc_holidays)
self.assertNotIn(dt, mb_holidays)
self.assertNotIn(dt, sk_holidays)
self.assertEqual(mb_holidays[date(2014, 2, 17)], "Louis Riel Day")
def test_st_patricks_day(self):
nl_holidays = holidays.CA(prov="NL", observed=False)
for dt in [date(1900, 3, 19), date(1999, 3, 15), date(2000, 3, 20),
date(2012, 3, 19), date(2013, 3, 18), date(2014, 3, 17),
date(2015, 3, 16), date(2016, 3, 14), date(2020, 3, 16)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, nl_holidays)
self.assertNotIn(dt + relativedelta(days=-1), nl_holidays)
self.assertNotIn(dt + relativedelta(days=+1), nl_holidays)
def test_good_friday(self):
qc_holidays = holidays.CA(prov="QC")
for dt in [date(1900, 4, 13), date(1901, 4, 5), date(1902, 3, 28),
date(1999, 4, 2), date(2000, 4, 21), date(2010, 4, 2),
date(2018, 3, 30), date(2019, 4, 19), date(2020, 4, 10)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(dt, qc_holidays)
def test_easter_monday(self):
qc_holidays = holidays.CA(prov="QC")
for dt in [date(1900, 4, 16), date(1901, 4, 8), date(1902, 3, 31),
date(1999, 4, 5), date(2000, 4, 24), date(2010, 4, 5),
date(2018, 4, 2), date(2019, 4, 22), date(2020, 4, 13)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, qc_holidays)
self.assertNotIn(dt + relativedelta(days=-1), qc_holidays)
self.assertNotIn(dt + relativedelta(days=+1), qc_holidays)
def test_st_georges_day(self):
nl_holidays = holidays.CA(prov="NL")
for dt in [date(1990, 4, 23), date(1999, 4, 26), date(2000, 4, 24),
date(2010, 4, 19), date(2016, 4, 25), date(2020, 4, 20)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, nl_holidays)
self.assertNotIn(dt + relativedelta(days=-1), nl_holidays)
self.assertNotIn(dt + relativedelta(days=+1), nl_holidays)
def test_victoria_day(self):
for dt in [date(1953, 5, 18), date(1999, 5, 24), date(2000, 5, 22),
date(2010, 5, 24), date(2015, 5, 18), date(2020, 5, 18)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_national_aboriginal_day(self):
nt_holidays = holidays.CA(prov="NT")
self.assertNotIn(date(1995, 6, 21), nt_holidays)
for year in range(1996, 2100):
dt = date(year, 6, 21)
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, nt_holidays)
self.assertNotIn(dt + relativedelta(days=-1), nt_holidays)
self.assertNotIn(dt + relativedelta(days=+1), nt_holidays)
def test_st_jean_baptiste_day(self):
qc_holidays = holidays.CA(prov="QC", observed=False)
self.assertNotIn(date(1924, 6, 24), qc_holidays)
for year in range(1925, 2100):
dt = date(year, 6, 24)
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, qc_holidays)
self.assertNotIn(dt + relativedelta(days=-1), qc_holidays)
self.assertNotIn(dt + relativedelta(days=+1), qc_holidays)
self.assertNotIn(date(2001, 6, 25), qc_holidays)
qc_holidays.observed = True
self.assertIn(date(2001, 6, 25), qc_holidays)
def test_discovery_day(self):
nl_holidays = holidays.CA(prov="NL")
yt_holidays = holidays.CA(prov="YT")
for dt in [date(1997, 6, 23), date(1999, 6, 21), date(2000, 6, 26),
date(2010, 6, 21), date(2016, 6, 27), date(2020, 6, 22)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, nl_holidays)
self.assertNotIn(dt, yt_holidays)
for dt in [date(1912, 8, 19), date(1999, 8, 16), date(2000, 8, 21),
date(2006, 8, 21), date(2016, 8, 15), date(2020, 8, 17)]:
self.assertNotIn(dt, self.holidays)
self.assertNotIn(dt, nl_holidays)
self.assertIn(dt, yt_holidays)
def test_canada_day(self):
for year in range(1900, 2100):
dt = date(year, 7, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2006, 7, 3), self.holidays)
self.assertNotIn(date(2007, 7, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2006, 7, 3), self.holidays)
self.assertIn(date(2007, 7, 2), self.holidays)
def test_nunavut_day(self):
nu_holidays = holidays.CA(prov="NU", observed=False)
self.assertNotIn(date(1999, 7, 9), nu_holidays)
self.assertNotIn(date(2000, 7, 9), nu_holidays)
self.assertIn(date(2000, 4, 1), nu_holidays)
for year in range(2001, 2100):
dt = date(year, 7, 9)
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, nu_holidays)
self.assertNotIn(dt + relativedelta(days=-1), nu_holidays)
self.assertNotIn(dt + relativedelta(days=+1), nu_holidays)
self.assertNotIn(date(2017, 7, 10), nu_holidays)
nu_holidays.observed = True
self.assertIn(date(2017, 7, 10), nu_holidays)
def test_civic_holiday(self):
bc_holidays = holidays.CA(prov="BC")
for dt in [date(1900, 8, 6), date(1955, 8, 1), date(1973, 8, 6)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt, bc_holidays)
for dt in [date(1974, 8, 5), date(1999, 8, 2), date(2000, 8, 7),
date(2010, 8, 2), date(2015, 8, 3), date(2020, 8, 3)]:
self.assertIn(dt, self.holidays)
self.assertIn(dt, bc_holidays)
def test_labour_day(self):
self.assertNotIn(date(1893, 9, 4), self.holidays)
for dt in [date(1894, 9, 3), date(1900, 9, 3), date(1999, 9, 6),
date(2000, 9, 4), date(2014, 9, 1), date(2015, 9, 7)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_thanksgiving(self):
ns_holidays = holidays.CA(prov="NB")
for dt in [date(1931, 10, 12), date(1990, 10, 8), date(1999, 10, 11),
date(2000, 10, 9), date(2013, 10, 14), date(2020, 10, 12)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(dt, ns_holidays)
def test_remembrance_day(self):
ab_holidays = holidays.CA(prov="AB", observed=False)
nl_holidays = holidays.CA(prov="NL", observed=False)
self.assertNotIn(date(1930, 11, 11), ab_holidays)
self.assertNotIn(date(1930, 11, 11), nl_holidays)
for year in range(1931, 2100):
dt = date(year, 11, 11)
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ab_holidays)
self.assertIn(dt, nl_holidays)
self.assertNotIn(dt + relativedelta(days=-1), nl_holidays)
self.assertNotIn(dt + relativedelta(days=+1), nl_holidays)
self.assertNotIn(date(2007, 11, 12), ab_holidays)
self.assertNotIn(date(2007, 11, 12), nl_holidays)
ab_holidays.observed = True
nl_holidays.observed = True
self.assertNotIn(date(2007, 11, 12), ab_holidays)
self.assertIn(date(2007, 11, 12), nl_holidays)
def test_christmas_day(self):
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(date(2010, 12, 24), self.holidays)
self.assertNotEqual(self.holidays[date(2011, 12, 26)],
"Christmas Day (Observed)")
self.holidays.observed = True
self.assertIn(date(2010, 12, 24), self.holidays)
self.assertEqual(self.holidays[date(2011, 12, 26)],
"Christmas Day (Observed)")
def test_boxing_day(self):
for year in range(1900, 2100):
dt = date(year, 12, 26)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2009, 12, 28), self.holidays)
self.assertNotIn(date(2010, 12, 27), self.holidays)
self.holidays.observed = True
self.assertIn(date(2009, 12, 28), self.holidays)
self.assertIn(date(2010, 12, 27), self.holidays)
class TestCO(unittest.TestCase):
def setUp(self):
self.holidays = holidays.CO(observed=True)
def test_2016(self):
# http://www.officeholidays.com/countries/colombia/
self.assertIn(date(2016, 1, 1), self.holidays)
self.assertIn(date(2016, 1, 11), self.holidays)
self.assertIn(date(2016, 3, 21), self.holidays)
self.assertIn(date(2016, 3, 24), self.holidays)
self.assertIn(date(2016, 3, 25), self.holidays)
self.assertIn(date(2016, 5, 1), self.holidays)
self.assertIn(date(2016, 5, 9), self.holidays)
self.assertIn(date(2016, 5, 30), self.holidays)
self.assertIn(date(2016, 6, 6), self.holidays)
self.assertIn(date(2016, 7, 4), self.holidays)
self.assertIn(date(2016, 7, 20), self.holidays)
self.assertIn(date(2016, 8, 7), self.holidays)
self.assertIn(date(2016, 8, 15), self.holidays)
self.assertIn(date(2016, 10, 17), self.holidays)
self.assertIn(date(2016, 11, 7), self.holidays)
self.assertIn(date(2016, 11, 14), self.holidays)
self.assertIn(date(2016, 12, 8), self.holidays)
self.assertIn(date(2016, 12, 25), self.holidays)
def test_others(self):
# holidays falling on weekend
self.assertNotIn(date(2017, 1, 1), self.holidays)
self.assertNotIn(date(2014, 7, 20), self.holidays)
self.assertNotIn(date(2018, 8, 12), self.holidays)
self.assertIn(date(2014, 1, 6), self.holidays)
self.assertIn(date(2012, 3, 19), self.holidays)
self.assertIn(date(2015, 6, 29), self.holidays)
self.assertIn(date(2010, 8, 16), self.holidays)
self.assertIn(date(2015, 10, 12), self.holidays)
self.assertIn(date(2010, 11, 1), self.holidays)
self.assertIn(date(2013, 11, 11), self.holidays)
self.holidays.observed = False
self.assertIn(date(2016, 5, 5), self.holidays)
self.assertIn(date(2016, 5, 26), self.holidays)
class TestMX(unittest.TestCase):
def setUp(self):
self.holidays = holidays.MX(observed=False)
def test_new_years(self):
self.assertNotIn(date(2010, 12, 31), self.holidays)
self.assertNotIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2010, 12, 31), self.holidays)
self.assertIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_constitution_day(self):
for dt in [date(2005, 2, 5), date(2006, 2, 5), date(2007, 2, 5),
date(2008, 2, 4), date(2009, 2, 2), date(2010, 2, 1),
date(2015, 2, 2), date(2016, 2, 1), date(2020, 2, 3)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_benito_juarez(self):
for dt in [date(2005, 3, 21), date(2006, 3, 21), date(2007, 3, 19),
date(2008, 3, 17), date(2009, 3, 16), date(2010, 3, 15),
date(2015, 3, 16), date(2016, 3, 21), date(2020, 3, 16)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_labor_day(self):
self.assertNotIn(date(2010, 4, 30), self.holidays)
self.assertNotIn(date(2011, 5, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2010, 4, 30), self.holidays)
self.assertIn(date(2011, 5, 2), self.holidays)
self.holidays.observed = False
self.assertNotIn(date(1922, 5, 1), self.holidays)
for year in range(1923, 2100):
dt = date(year, 5, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_independence_day(self):
self.assertNotIn(date(2006, 9, 15), self.holidays)
self.assertNotIn(date(2007, 9, 17), self.holidays)
self.holidays.observed = True
self.assertIn(date(2006, 9, 15), self.holidays)
self.assertIn(date(2007, 9, 17), self.holidays)
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 9, 16)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_revolution_day(self):
for dt in [date(2005, 11, 20), date(2006, 11, 20), date(2007, 11, 19),
date(2008, 11, 17), date(2009, 11, 16), date(2010, 11, 15),
date(2015, 11, 16), date(2016, 11, 21), date(2020, 11, 16)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_change_of_government(self):
self.assertNotIn(date(2012, 11, 30), self.holidays)
self.assertNotIn(date(2024, 12, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2012, 11, 30), self.holidays)
self.assertIn(date(2024, 12, 2), self.holidays)
self.holidays.observed = False
for year in range(1970, 2100):
dt = date(year, 12, 1)
if (2018 - year) % 6 == 0:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
else:
self.assertNotIn(dt, self.holidays)
def test_christmas(self):
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2010, 12, 24), self.holidays)
self.assertNotIn(date(2016, 12, 26), self.holidays)
self.holidays.observed = True
self.assertIn(date(2010, 12, 24), self.holidays)
self.assertIn(date(2016, 12, 26), self.holidays)
class TestNetherlands(unittest.TestCase):
def setUp(self):
self.holidays = holidays.NL()
def test_2017(self):
# http://www.iamsterdam.com/en/plan-your-trip/practical-info/public-holidays
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 4, 16), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(2017, 4, 27), self.holidays)
self.assertIn(date(2017, 5, 25), self.holidays)
self.assertIn(date(2017, 6, 4), self.holidays)
self.assertIn(date(2017, 6, 5), self.holidays)
self.assertIn(date(2017, 12, 25), self.holidays)
self.assertIn(date(2017, 12, 26), self.holidays)
def test_new_years(self):
self.assertIn(date(2017, 1, 1), self.holidays)
def test_easter(self):
self.assertIn(date(2017, 4, 16), self.holidays)
def test_easter_monday(self):
self.assertIn(date(2017, 4, 17), self.holidays)
def test_queens_day_between_1891_and_1948(self):
# Between 1891 and 1948 Queens Day was celebrated on 8-31
self.holidays = holidays.NL(years=[1901])
self.assertIn(date(1901, 8, 31), self.holidays)
def test_queens_day_between_1891_and_1948_substituted_later(self):
# Between 1891 and 1948 Queens Day was celebrated on 9-1
# (one day later) when Queens Day falls on a Sunday
self.holidays = holidays.NL(years=[1947])
self.assertIn(date(1947, 9, 1), self.holidays)
def test_queens_day_between_1949_and_2013(self):
self.holidays = holidays.NL(years=[1965])
self.assertIn(date(1965, 4, 30), self.holidays)
def test_queens_day_between_1949_and_1980_substituted_later(self):
self.holidays = holidays.NL(years=[1967])
self.assertIn(date(1967, 5, 1), self.holidays)
def test_queens_day_between_1980_and_2013_substituted_earlier(self):
self.holidays = holidays.NL(years=[2006])
self.assertIn(date(2006, 4, 29), self.holidays)
def test_kings_day_after_2014(self):
self.holidays = holidays.NL(years=[2013])
self.assertNotIn(date(2013, 4, 27), self.holidays)
self.holidays = holidays.NL(years=[2017])
self.assertIn(date(2017, 4, 27), self.holidays)
def test_kings_day_after_2014_substituted_earlier(self):
self.holidays = holidays.NL(years=[2188])
self.assertIn(date(2188, 4, 26), self.holidays)
def test_liberation_day(self):
self.holidays = holidays.NL(years=1900)
self.assertNotIn(date(1900, 5, 5), self.holidays)
def test_liberation_day_after_1990_non_lustrum_year(self):
self.holidays = holidays.NL(years=2017)
self.assertNotIn(date(2017, 5, 5), self.holidays)
def test_liberation_day_after_1990_in_lustrum_year(self):
self.holidays = holidays.NL(years=2020)
self.assertIn(date(2020, 5, 5), self.holidays)
def test_ascension_day(self):
self.holidays = holidays.NL(years=2017)
self.assertIn(date(2017, 5, 25), self.holidays)
def test_whit_sunday(self):
self.holidays = holidays.NL(years=2017)
self.assertIn(date(2017, 6, 4), self.holidays)
def test_whit_monday(self):
self.holidays = holidays.NL(years=2017)
self.assertIn(date(2017, 6, 5), self.holidays)
def test_first_christmas(self):
self.holidays = holidays.NL(years=2017)
self.assertIn(date(2017, 12, 25), self.holidays)
def test_second_christmas(self):
self.holidays = holidays.NL(years=2017)
self.assertIn(date(2017, 12, 26), self.holidays)
class TestUS(unittest.TestCase):
def setUp(self):
self.holidays = holidays.US(observed=False)
def test_new_years(self):
self.assertNotIn(date(2010, 12, 31), self.holidays)
self.assertNotIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2010, 12, 31), self.holidays)
self.assertIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_epiphany(self):
pr_holidays = holidays.US(state='PR')
for year in range(2010, 2021):
self.assertNotIn(date(year, 1, 6), self.holidays)
self.assertIn(date(year, 1, 6), pr_holidays)
def test_three_kings_day(self):
vi_holidays = holidays.US(state='VI')
for year in range(2010, 2021):
self.assertNotIn(date(year, 1, 6), self.holidays)
self.assertIn(date(year, 1, 6), vi_holidays)
def test_lee_jackson_day(self):
va_holidays = holidays.US(state='VA')
self.assertNotIn(date(1888, 1, 19), va_holidays)
self.assertNotIn(date(1983, 1, 19), va_holidays)
self.assertNotIn("Lee Jackson Day",
va_holidays.get_list(date(2000, 1, 17)))
for dt in [date(1889, 1, 19), date(1982, 1, 19), date(1983, 1, 17),
date(1999, 1, 18), date(2000, 1, 14), date(2001, 1, 12),
date(2013, 1, 18), date(2014, 1, 17), date(2018, 1, 12)]:
self.assertNotIn("Lee Jackson Day", self.holidays.get_list(dt))
self.assertIn(dt, va_holidays)
self.assertIn("Lee Jackson Day", va_holidays.get_list(dt))
def test_inauguration_day(self):
name = "Inauguration Day"
dc_holidays = holidays.US(state='DC')
la_holidays = holidays.US(state='LA')
md_holidays = holidays.US(state='MD')
va_holidays = holidays.US(state='VA')
for year in (1789, 1793, 1877, 1929, 1933):
self.assertNotIn(name, self.holidays.get_list(date(year, 3, 4)))
self.assertIn(name, dc_holidays.get_list(date(year, 3, 4)))
self.assertIn(name, la_holidays.get_list(date(year, 3, 4)))
self.assertIn(name, md_holidays.get_list(date(year, 3, 4)))
self.assertIn(name, va_holidays.get_list(date(year, 3, 4)))
for year in (1937, 1941, 1957, 2013, 2017):
self.assertNotIn(name, self.holidays.get_list(date(year, 1, 20)))
self.assertIn(name, dc_holidays.get_list(date(year, 1, 20)))
self.assertIn(name, la_holidays.get_list(date(year, 1, 20)))
self.assertIn(name, md_holidays.get_list(date(year, 1, 20)))
self.assertIn(name, va_holidays.get_list(date(year, 1, 20)))
for year in (1785, 1788, 2010, 2011, 2012, 2014, 2015, 2016):
self.assertNotIn(name, dc_holidays.get_list(date(year, 3, 4)))
self.assertNotIn(name, la_holidays.get_list(date(year, 3, 4)))
self.assertNotIn(name, md_holidays.get_list(date(year, 3, 4)))
self.assertNotIn(name, va_holidays.get_list(date(year, 3, 4)))
self.assertNotIn(name, dc_holidays.get_list(date(year, 1, 20)))
self.assertNotIn(name, la_holidays.get_list(date(year, 1, 20)))
self.assertNotIn(name, md_holidays.get_list(date(year, 1, 20)))
self.assertNotIn(name, va_holidays.get_list(date(year, 1, 20)))
def test_marthin_luther(self):
for dt in [date(1986, 1, 20), date(1999, 1, 18), date(2000, 1, 17),
date(2012, 1, 16), date(2013, 1, 21), date(2014, 1, 20),
date(2015, 1, 19), date(2016, 1, 18), date(2020, 1, 20)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn("Martin Luther King, Jr. Day",
holidays.US(years=[1985]).values())
self.assertIn("Martin Luther King, Jr. Day",
holidays.US(years=[1986]).values())
self.assertEqual(holidays.US(state='AL').get('2015-01-19'),
"Robert E. Lee/Martin Luther King Birthday")
self.assertEqual(holidays.US(state='AS').get('2015-01-19'),
("Dr. Martin Luther King Jr. "
"and Robert E. Lee's Birthdays"))
self.assertEqual(holidays.US(state='MS').get('2015-01-19'),
("Dr. Martin Luther King Jr. "
"and Robert E. Lee's Birthdays"))
self.assertEqual(holidays.US(state='AZ').get('2015-01-19'),
"Dr. Martin Luther King Jr./Civil Rights Day")
self.assertEqual(holidays.US(state='NH').get('2015-01-19'),
"Dr. Martin Luther King Jr./Civil Rights Day")
self.assertEqual(holidays.US(state='ID').get('2015-01-19'),
"Martin Luther King, Jr. - Idaho Human Rights Day")
self.assertNotEqual(holidays.US(state='ID').get('2000-01-17'),
"Martin Luther King, Jr. - Idaho Human Rights Day")
self.assertEqual(holidays.US(state='GA').get('2011-01-17'),
"Robert E. Lee's Birthday")
def test_lincolns_birthday(self):
ca_holidays = holidays.US(state='CA')
ct_holidays = holidays.US(state='CT')
il_holidays = holidays.US(state='IL')
ia_holidays = holidays.US(state='IA')
nj_holidays = holidays.US(state='NJ')
ny_holidays = holidays.US(state='NY')
for year in range(1971, 2010):
self.assertNotIn(date(year, 2, 12), self.holidays)
self.assertIn(date(year, 2, 12), ca_holidays)
self.assertIn(date(year, 2, 12), ct_holidays)
self.assertIn(date(year, 2, 12), il_holidays)
self.assertIn(date(year, 2, 12), ia_holidays)
self.assertIn(date(year, 2, 12), nj_holidays)
self.assertIn(date(year, 2, 12), ny_holidays)
if date(year, 2, 12).weekday() == 5:
self.assertNotIn(date(year, 2, 11), self.holidays)
self.assertIn(date(year, 2, 11), ca_holidays)
self.assertIn(date(year, 2, 11), ct_holidays)
self.assertIn(date(year, 2, 11), il_holidays)
self.assertIn(date(year, 2, 11), ia_holidays)
self.assertIn(date(year, 2, 11), nj_holidays)
self.assertIn(date(year, 2, 11), ny_holidays)
else:
self.assertNotIn(date(year, 2, 11), ca_holidays)
self.assertNotIn(date(year, 2, 11), ct_holidays)
self.assertNotIn(date(year, 2, 11), il_holidays)
self.assertNotIn(date(year, 2, 11), ia_holidays)
self.assertNotIn(date(year, 2, 11), nj_holidays)
self.assertNotIn(date(year, 2, 11), ny_holidays)
if date(year, 2, 12).weekday() == 6:
self.assertNotIn(date(year, 2, 13), self.holidays)
self.assertIn(date(year, 2, 13), ca_holidays)
self.assertIn(date(year, 2, 13), ct_holidays)
self.assertIn(date(year, 2, 13), il_holidays)
self.assertIn(date(year, 2, 13), ia_holidays)
self.assertIn(date(year, 2, 13), nj_holidays)
self.assertIn(date(year, 2, 13), ny_holidays)
else:
self.assertNotIn(date(year, 2, 13), ca_holidays)
self.assertNotIn(date(year, 2, 13), ct_holidays)
self.assertNotIn(date(year, 2, 13), il_holidays)
self.assertNotIn(date(year, 2, 13), ia_holidays)
self.assertNotIn(date(year, 2, 13), nj_holidays)
self.assertNotIn(date(year, 2, 13), ny_holidays)
for year in range(2010, 2050):
self.assertNotIn(date(year, 2, 12), self.holidays)
self.assertNotIn(date(year, 2, 12), ca_holidays)
self.assertIn(date(year, 2, 12), ct_holidays)
self.assertIn(date(year, 2, 12), il_holidays)
self.assertIn(date(year, 2, 12), ia_holidays)
self.assertIn(date(year, 2, 12), nj_holidays)
self.assertIn(date(year, 2, 12), ny_holidays)
if date(year, 2, 12).weekday() == 5:
self.assertNotIn(date(year, 2, 11), self.holidays)
self.assertNotIn(date(year, 2, 11), ca_holidays)
self.assertIn(date(year, 2, 11), ct_holidays)
self.assertIn(date(year, 2, 11), il_holidays)
self.assertIn(date(year, 2, 11), ia_holidays)
self.assertIn(date(year, 2, 11), nj_holidays)
self.assertIn(date(year, 2, 11), ny_holidays)
else:
self.assertNotIn(date(year, 2, 11), ca_holidays)
self.assertNotIn(date(year, 2, 11), ct_holidays)
self.assertNotIn(date(year, 2, 11), il_holidays)
self.assertNotIn(date(year, 2, 11), ia_holidays)
self.assertNotIn(date(year, 2, 11), nj_holidays)
self.assertNotIn(date(year, 2, 11), ny_holidays)
if date(year, 2, 12).weekday() == 6:
self.assertNotIn(date(year, 2, 13), self.holidays)
self.assertNotIn(date(year, 2, 13), ca_holidays)
self.assertIn(date(year, 2, 13), ct_holidays)
self.assertIn(date(year, 2, 13), il_holidays)
self.assertIn(date(year, 2, 13), ia_holidays)
self.assertIn(date(year, 2, 13), nj_holidays)
self.assertIn(date(year, 2, 13), ny_holidays)
else:
self.assertNotIn(date(year, 2, 13), ca_holidays)
self.assertNotIn(date(year, 2, 13), ct_holidays)
self.assertNotIn(date(year, 2, 13), il_holidays)
self.assertNotIn(date(year, 2, 13), ia_holidays)
self.assertNotIn(date(year, 2, 13), nj_holidays)
self.assertNotIn(date(year, 2, 13), ny_holidays)
def test_susan_b_anthony_day(self):
ca_holidays = holidays.US(state='CA')
fl_holidays = holidays.US(state='FL')
ny_holidays = holidays.US(state='NY')
wi_holidays = holidays.US(state='WI')
self.assertNotIn(date(1975, 2, 15), wi_holidays)
self.assertNotIn(date(2000, 2, 15), ca_holidays)
self.assertNotIn(date(2000, 2, 15), fl_holidays)
self.assertNotIn(date(2000, 2, 15), ny_holidays)
self.assertIn(date(2000, 2, 15), wi_holidays)
self.assertIn(date(2004, 2, 15), ny_holidays)
self.assertNotIn(date(2010, 2, 15), fl_holidays)
self.assertIn(date(2010, 2, 15), ny_holidays)
self.assertNotIn(date(2013, 2, 15), self.holidays)
self.assertNotIn(date(2013, 2, 15), ca_holidays)
self.assertIn(date(2013, 2, 15), fl_holidays)
self.assertIn(date(2013, 2, 15), ny_holidays)
self.assertNotIn(date(2014, 2, 15), self.holidays)
self.assertIn(date(2014, 2, 15), ca_holidays)
self.assertIn(date(2014, 2, 15), fl_holidays)
self.assertIn(date(2014, 2, 15), ny_holidays)
self.assertIn(date(2014, 2, 15), wi_holidays)
def test_washingtons_birthday(self):
de_holidays = holidays.US(state='DE')
fl_holidays = holidays.US(state='FL')
ga_holidays = holidays.US(state='GA')
nm_holidays = holidays.US(state='NM')
for dt in [date(1969, 2, 22), date(1970, 2, 22), date(1971, 2, 15),
date(1997, 2, 17), date(1999, 2, 15), date(2000, 2, 21),
date(2012, 2, 20), date(2013, 2, 18), date(2014, 2, 17),
date(2015, 2, 16), date(2016, 2, 15), date(2020, 2, 17)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(dt, de_holidays)
self.assertNotEqual(fl_holidays.get(dt), "Washington's Birthday")
self.assertNotIn(dt, ga_holidays)
self.assertNotIn(dt, nm_holidays)
for dt in [date(2013, 12, 24), date(2014, 12, 26), date(2015, 12, 24)]:
self.assertIn(dt, ga_holidays)
self.assertIn("Washington's Birthday", ga_holidays.get_list(dt))
self.assertEqual(holidays.US(state='AL').get('2015-02-16'),
"George Washington/Thomas Jefferson Birthday")
self.assertEqual(holidays.US(state='AS').get('2015-02-16'),
("George Washington's Birthday "
"and Daisy Gatson Bates Day"))
self.assertEqual(holidays.US(state='PR').get('2015-02-16'),
"Presidents' Day")
self.assertEqual(holidays.US(state='VI').get('2015-02-16'),
"Presidents' Day")
def test_mardi_gras(self):
la_holidays = holidays.US(state='LA')
self.assertNotIn(date(1856, 2, 5), la_holidays)
for dt in [date(1857, 2, 24), date(2008, 2, 5), date(2011, 3, 8),
date(2012, 2, 21), date(2014, 3, 4), date(2018, 2, 13)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, la_holidays)
def test_guam_discovery_day(self):
gu_holidays = holidays.US(state='GU')
self.assertNotIn(date(1969, 3, 1), gu_holidays)
for dt in [date(1970, 3, 2), date(1971, 3, 1), date(1977, 3, 7),
date(2014, 3, 3), date(2015, 3, 2), date(2016, 3, 7)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, gu_holidays)
self.assertEqual(gu_holidays.get(dt), "Guam Discovery Day")
def test_casimir_pulaski_day(self):
il_holidays = holidays.US(state='IL')
self.assertNotIn(date(1977, 3, 7), il_holidays)
for dt in [date(1978, 3, 6), date(1982, 3, 1), date(1983, 3, 7),
date(2014, 3, 3), date(2015, 3, 2), date(2016, 3, 7)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, il_holidays)
self.assertEqual(il_holidays.get(dt), "Casimir Pulaski Day")
def test_texas_independence_day(self):
tx_holidays = holidays.US(state='TX')
self.assertNotIn(date(1873, 3, 2), tx_holidays)
for year in range(1874, 2050):
self.assertNotIn(date(year, 3, 2), self.holidays)
self.assertIn(date(year, 3, 2), tx_holidays)
def test_town_meeting_day(self):
vt_holidays = holidays.US(state='VT')
self.assertNotIn(date(1799, 3, 5), vt_holidays)
for dt in [date(1800, 3, 4), date(1803, 3, 1), date(1804, 3, 6),
date(2011, 3, 1), date(2015, 3, 3), date(2017, 3, 7)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, vt_holidays)
def test_evacuation_day(self):
ma_holidays = holidays.US(state='MA')
self.assertNotIn(date(1900, 3, 17), ma_holidays)
for year in range(1901, 2050):
self.assertNotIn(date(year, 3, 17), self.holidays)
self.assertIn(date(year, 3, 17), ma_holidays)
self.assertNotIn(date(1995, 3, 20), ma_holidays)
for dt in [date(2012, 3, 19), date(2013, 3, 18), date(2018, 3, 19)]:
self.assertIn(dt, ma_holidays)
ma_holidays.observed = False
for dt in [date(2012, 3, 19), date(2013, 3, 18), date(2018, 3, 19)]:
self.assertNotIn(dt, ma_holidays)
def test_emancipation_day_in_puerto_rico(self):
pr_holidays = holidays.US(state='PR')
for year in range(2010, 2021):
self.assertNotIn(date(year, 3, 22), self.holidays)
self.assertIn(date(year, 3, 22), pr_holidays)
self.assertNotIn(date(2014, 3, 21), pr_holidays)
self.assertNotIn(date(2014, 3, 23), pr_holidays)
self.assertIn(date(2015, 3, 23), pr_holidays)
def test_prince_jonah_kuhio_kalanianaole_day(self):
hi_holidays = holidays.US(state='HI')
self.assertNotIn(date(1948, 3, 26), hi_holidays)
for year in range(1949, 2050):
self.assertNotIn(date(year, 3, 26), self.holidays)
self.assertIn(date(year, 3, 26), hi_holidays)
for dt in [date(1949, 3, 25), date(2016, 3, 25), date(2017, 3, 27)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, hi_holidays)
self.assertEqual(hi_holidays.get(dt),
"Prince Jonah Kuhio Kalanianaole Day (Observed)")
hi_holidays.observed = False
for dt in [date(1949, 3, 25), date(2016, 3, 25), date(2017, 3, 27)]:
self.assertNotIn(dt, hi_holidays)
def test_stewards_day(self):
ak_holidays = holidays.US(state='AK')
self.assertNotIn(date(1917, 3, 30), ak_holidays)
for dt in [date(1918, 3, 30), date(1954, 3, 30), date(1955, 3, 28),
date(2002, 3, 25), date(2014, 3, 31), date(2018, 3, 26)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ak_holidays)
def test_cesar_chavez_day(self):
ca_holidays = holidays.US(state='CA')
tx_holidays = holidays.US(state='TX')
for year in range(1995, 2000):
self.assertNotIn(date(year, 3, 31), self.holidays)
self.assertIn(date(year, 3, 31), ca_holidays)
for year in range(2000, 2020):
self.assertNotIn(date(year, 3, 31), self.holidays)
self.assertIn(date(year, 3, 31), ca_holidays)
self.assertIn(date(year, 3, 31), tx_holidays)
for year in (1996, 2002, 2013, 2019):
self.assertNotIn(date(year, 4, 1), self.holidays)
self.assertIn(date(year, 4, 1), ca_holidays)
self.assertNotIn(date(year, 4, 1), tx_holidays)
def test_transfer_day(self):
vi_holidays = holidays.US(state='VI')
for year in range(2010, 2021):
self.assertNotIn(date(year, 3, 31), self.holidays)
self.assertIn(date(year, 3, 31), vi_holidays)
def test_emancipation_day(self):
dc_holidays = holidays.US(state='DC')
self.assertNotIn(date(2004, 4, 16), dc_holidays)
for year in range(2005, 2020):
self.assertNotIn(date(year, 4, 16), self.holidays)
self.assertIn(date(year, 4, 16), dc_holidays)
self.assertIn(date(2005, 4, 15), dc_holidays)
self.assertIn(date(2006, 4, 17), dc_holidays)
dc_holidays.observed = False
self.assertNotIn(date(2005, 4, 15), dc_holidays)
self.assertNotIn(date(2006, 4, 17), dc_holidays)
def test_patriots_day(self):
me_holidays = holidays.US(state='ME')
ma_holidays = holidays.US(state='MA')
self.assertNotIn(date(1983, 4, 19), me_holidays)
self.assertNotIn(date(1983, 4, 19), ma_holidays)
for year in range(1894, 1969):
self.assertNotIn(date(year, 4, 19), self.holidays)
self.assertIn(date(year, 4, 19), me_holidays)
self.assertIn(date(year, 4, 19), ma_holidays)
for dt in [date(1969, 4, 21), date(1974, 4, 15), date(1975, 4, 21),
date(2015, 4, 20), date(2016, 4, 18), date(2019, 4, 15)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, me_holidays)
self.assertIn(dt, ma_holidays)
def test_holy_thursday(self):
vi_holidays = holidays.US(state='VI')
for dt in [date(2010, 4, 1), date(2011, 4, 21), date(2013, 3, 28),
date(2014, 4, 17), date(2015, 4, 2), date(2016, 3, 24)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, vi_holidays)
def test_good_friday(self):
ct_holidays = holidays.US(state='CT')
de_holidays = holidays.US(state='DE')
gu_holidays = holidays.US(state='GU')
in_holidays = holidays.US(state='IN')
ky_holidays = holidays.US(state='IN')
la_holidays = holidays.US(state='LA')
nj_holidays = holidays.US(state='NJ')
nc_holidays = holidays.US(state='NC')
tn_holidays = holidays.US(state='TN')
tx_holidays = holidays.US(state='TX')
vi_holidays = holidays.US(state='VI')
for dt in [date(1900, 4, 13), date(1901, 4, 5), date(1902, 3, 28),
date(1999, 4, 2), date(2000, 4, 21), date(2010, 4, 2),
date(2018, 3, 30), date(2019, 4, 19), date(2020, 4, 10)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ct_holidays)
self.assertIn(dt, de_holidays)
self.assertIn(dt, gu_holidays)
self.assertIn(dt, in_holidays)
self.assertIn(dt, ky_holidays)
self.assertIn(dt, la_holidays)
self.assertIn(dt, nj_holidays)
self.assertIn(dt, nc_holidays)
self.assertIn(dt, tn_holidays)
self.assertIn(dt, tx_holidays)
self.assertIn(dt, vi_holidays)
def test_easter_monday(self):
vi_holidays = holidays.US(state='VI')
for dt in [date(1900, 4, 16), date(1901, 4, 8), date(1902, 3, 31),
date(1999, 4, 5), date(2010, 4, 5),
date(2018, 4, 2), date(2019, 4, 22), date(2020, 4, 13)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, vi_holidays)
def test_confederate_memorial_day(self):
al_holidays = holidays.US(state='AL')
ga_holidays = holidays.US(state='GA')
ms_holidays = holidays.US(state='MS')
sc_holidays = holidays.US(state='SC')
tx_holidays = holidays.US(state='TX')
self.assertNotIn(date(1865, 4, 24), self.holidays)
self.assertNotIn(date(1865, 4, 24), al_holidays)
for dt in [date(1866, 4, 23), date(1878, 4, 22), date(1884, 4, 28),
date(2014, 4, 28), date(2015, 4, 27), date(2019, 4, 22)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, al_holidays)
self.assertIn(dt, ga_holidays)
self.assertIn(dt, ms_holidays)
self.assertIn(dt, sc_holidays)
self.assertNotIn(date(1930, 1, 19), tx_holidays)
self.assertNotIn(date(1931, 1, 19), self.holidays)
self.assertIn(date(1931, 1, 19), tx_holidays)
def test_san_jacinto_day(self):
tx_holidays = holidays.US(state='TX')
self.assertNotIn(date(1874, 4, 21), tx_holidays)
for year in (1875, 2050):
self.assertNotIn(date(year, 4, 21), self.holidays)
self.assertIn(date(year, 4, 21), tx_holidays)
def test_arbor_day(self):
ne_holidays = holidays.US(state='NE')
for dt in [date(1875, 4, 22), date(1988, 4, 22), date(1989, 4, 28),
date(2009, 4, 24), date(2010, 4, 30), date(2014, 4, 25)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ne_holidays)
def test_primary_election_day(self):
in_holidays = holidays.US(state='IN')
self.assertNotIn(date(2004, 5, 4), in_holidays)
for dt in [date(2006, 5, 2), date(2008, 5, 6), date(2010, 5, 4),
date(2012, 5, 8), date(2014, 5, 6), date(2015, 5, 5),
date(2016, 5, 3)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, in_holidays)
def test_truman_day(self):
mo_holidays = holidays.US(state='MO', observed=False)
self.assertNotIn(date(1948, 5, 8), self.holidays)
self.assertNotIn(date(1948, 5, 8), mo_holidays)
for year in range(1949, 2100):
dt = date(year, 5, 8)
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, mo_holidays)
self.assertNotIn(dt + relativedelta(days=-1), mo_holidays)
self.assertNotIn(dt + relativedelta(days=+1), mo_holidays)
self.assertNotIn(date(2004, 5, 7), mo_holidays)
self.assertNotIn(date(2005, 5, 10), mo_holidays)
mo_holidays.observed = True
self.assertIn(date(2004, 5, 7), mo_holidays)
self.assertIn(date(2005, 5, 10), mo_holidays)
def test_memorial_day(self):
for dt in [date(1969, 5, 30), date(1970, 5, 30), date(1971, 5, 31),
date(1997, 5, 26), date(1999, 5, 31), date(2000, 5, 29),
date(2012, 5, 28), date(2013, 5, 27), date(2014, 5, 26),
date(2015, 5, 25), date(2016, 5, 30), date(2020, 5, 25)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_jefferson_davis_birthday(self):
al_holidays = holidays.US(state='AL')
self.assertNotIn(date(1889, 6, 3), self.holidays)
self.assertNotIn(date(1889, 6, 3), al_holidays)
for dt in [date(1890, 6, 2), date(1891, 6, 1), date(1897, 6, 7),
date(2014, 6, 2), date(2015, 6, 1), date(2016, 6, 6)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, al_holidays)
def test_kamehameha_day(self):
hi_holidays = holidays.US(state='HI')
self.assertNotIn(date(1871, 6, 11), hi_holidays)
for year in range(1872, 2050):
self.assertNotIn(date(year, 6, 11), self.holidays)
self.assertIn(date(year, 6, 11), hi_holidays)
self.assertNotIn(date(2006, 6, 12), hi_holidays)
for dt in [date(2011, 6, 10), date(2016, 6, 10), date(2017, 6, 12)]:
self.assertIn(dt, hi_holidays)
self.assertEqual(hi_holidays.get(dt), "Kamehameha Day (Observed)")
hi_holidays.observed = False
for dt in [date(2011, 6, 10), date(2016, 6, 10), date(2017, 6, 12)]:
self.assertNotIn(dt, hi_holidays)
def test_emancipation_day_in_texas(self):
tx_holidays = holidays.US(state='TX')
self.assertNotIn(date(1979, 6, 19), tx_holidays)
for year in (1980, 2050):
self.assertNotIn(date(year, 6, 19), self.holidays)
self.assertIn(date(year, 6, 19), tx_holidays)
def test_west_virginia_day(self):
wv_holidays = holidays.US(state='WV')
self.assertNotIn(date(1926, 6, 20), wv_holidays)
for year in (1927, 2050):
self.assertNotIn(date(year, 6, 20), self.holidays)
self.assertIn(date(year, 6, 20), wv_holidays)
self.assertIn(date(2015, 6, 19), wv_holidays)
self.assertIn(date(2010, 6, 21), wv_holidays)
wv_holidays.observed = False
self.assertNotIn(date(2015, 6, 19), wv_holidays)
self.assertNotIn(date(2010, 6, 21), wv_holidays)
def test_emancipation_day_in_virgin_islands(self):
vi_holidays = holidays.US(state='VI')
for year in (2010, 2021):
self.assertNotIn(date(year, 7, 3), self.holidays)
self.assertIn(date(year, 7, 3), vi_holidays)
def test_independence_day(self):
for year in range(1900, 2100):
dt = date(year, 7, 4)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2010, 7, 5), self.holidays)
self.assertNotIn(date(2020, 7, 3), self.holidays)
self.holidays.observed = True
self.assertIn(date(2010, 7, 5), self.holidays)
self.assertIn(date(2020, 7, 3), self.holidays)
def test_liberation_day_guam(self):
gu_holidays = holidays.US(state='GU')
self.assertNotIn(date(1944, 7, 21), gu_holidays)
for year in range(1945, 2100):
self.assertNotIn(date(year, 7, 21), self.holidays)
self.assertIn(date(year, 7, 21), gu_holidays)
def test_pioneer_day(self):
ut_holidays = holidays.US(state='UT')
self.assertNotIn(date(1848, 7, 24), ut_holidays)
for year in (1849, 2050):
self.assertNotIn(date(year, 7, 24), self.holidays)
self.assertIn(date(year, 7, 24), ut_holidays)
self.assertIn('2010-07-23', ut_holidays)
self.assertIn('2011-07-25', ut_holidays)
ut_holidays.observed = False
self.assertNotIn('2010-07-23', ut_holidays)
self.assertNotIn('2011-07-25', ut_holidays)
def test_constitution_day(self):
pr_holidays = holidays.US(state='PR')
for year in range(2010, 2021):
self.assertNotIn(date(year, 7, 25), self.holidays)
self.assertIn(date(year, 7, 25), pr_holidays)
self.assertNotIn(date(2015, 7, 24), pr_holidays)
self.assertNotIn(date(2015, 7, 26), pr_holidays)
self.assertIn(date(2021, 7, 26), pr_holidays)
def test_victory_day(self):
ri_holidays = holidays.US(state='RI')
self.assertNotIn(date(1947, 8, 11), ri_holidays)
for dt in [date(1948, 8, 9), date(1995, 8, 14), date(2005, 8, 8),
date(2015, 8, 10), date(2016, 8, 8), date(2017, 8, 14)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ri_holidays)
def test_statehood_day(self):
hi_holidays = holidays.US(state='HI')
self.assertNotIn(date(1958, 8, 15), hi_holidays)
for dt in [date(1959, 8, 21), date(1969, 8, 15), date(1999, 8, 20),
date(2014, 8, 15), date(2015, 8, 21), date(2016, 8, 19)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, hi_holidays)
def test_bennington_battle_day(self):
vt_holidays = holidays.US(state='VT')
self.assertNotIn(date(1777, 8, 16), vt_holidays)
for year in range(1778, 2050):
self.assertNotIn(date(year, 8, 16), self.holidays)
self.assertIn(date(year, 8, 16), vt_holidays)
vt_holidays.observed = False
self.assertNotIn("Bennington Battle Day (Observed)",
vt_holidays.get_list(date(1997, 8, 15)))
vt_holidays.observed = True
self.assertIn("Bennington Battle Day (Observed)",
vt_holidays.get_list(date(1997, 8, 15)))
self.assertNotIn("Bennington Battle Day (Observed)",
vt_holidays.get_list(date(1997, 8, 17)))
self.assertIn("Bennington Battle Day (Observed)",
vt_holidays.get_list(date(1998, 8, 17)))
self.assertNotIn("Bennington Battle Day (Observed)",
vt_holidays.get_list(date(1999, 8, 15)))
self.assertNotIn("Bennington Battle Day (Observed)",
vt_holidays.get_list(date(1999, 8, 17)))
def test_lyndon_baines_johnson_day(self):
tx_holidays = holidays.US(state='TX')
self.assertNotIn(date(1972, 8, 27), tx_holidays)
for year in (1973, 2050):
self.assertNotIn(date(year, 8, 27), self.holidays)
self.assertIn(date(year, 8, 27), tx_holidays)
def test_labor_day(self):
for dt in [date(1997, 9, 1), date(1999, 9, 6), date(2000, 9, 4),
date(2012, 9, 3), date(2013, 9, 2), date(2014, 9, 1),
date(2015, 9, 7), date(2016, 9, 5), date(2020, 9, 7)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_columbus_day(self):
ak_holidays = holidays.US(state='AK')
de_holidays = holidays.US(state='DE')
fl_holidays = holidays.US(state='FL')
hi_holidays = holidays.US(state='HI')
sd_holidays = holidays.US(state='SD')
vi_holidays = holidays.US(state='VI')
for dt in [date(1937, 10, 12), date(1969, 10, 12), date(1970, 10, 12),
date(1999, 10, 11), date(2000, 10, 9), date(2001, 10, 8),
date(2013, 10, 14), date(2018, 10, 8), date(2019, 10, 14)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt, ak_holidays)
self.assertNotIn(dt, de_holidays)
self.assertNotIn(dt, fl_holidays)
self.assertNotIn(dt, hi_holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertEqual(sd_holidays.get(dt), "Native American Day")
self.assertEqual(vi_holidays.get(dt),
"Columbus Day and Puerto Rico Friendship Day")
self.assertNotIn(date(1936, 10, 12), self.holidays)
def test_alaska_day(self):
ak_holidays = holidays.US(state='AK', observed=False)
self.assertNotIn(date(1866, 10, 18), ak_holidays)
for year in range(1867, 2050):
self.assertIn(date(year, 10, 18), ak_holidays)
self.assertNotIn(date(year, 10, 17), ak_holidays)
self.assertNotIn(date(year, 10, 19), ak_holidays)
self.assertNotIn(date(year, 10, 18), self.holidays)
ak_holidays.observed = True
self.assertIn(date(2014, 10, 17), ak_holidays)
self.assertIn(date(2015, 10, 19), ak_holidays)
def test_nevada_day(self):
nv_holidays = holidays.US(state='NV')
self.assertNotIn(date(1932, 10, 31), nv_holidays)
for dt in [date(1933, 10, 31), date(1999, 10, 31), date(2000, 10, 27),
date(2002, 10, 25), date(2014, 10, 31), date(2015, 10, 30)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, nv_holidays)
self.assertIn("Nevada Day (Observed)",
nv_holidays.get_list(date(1998, 10, 30)))
self.assertIn("Nevada Day (Observed)",
nv_holidays.get_list(date(1999, 11, 1)))
nv_holidays.observed = False
self.assertNotIn("Nevada Day (Observed)",
nv_holidays.get_list(date(1998, 10, 30)))
self.assertNotIn("Nevada Day (Observed)",
nv_holidays.get_list(date(1999, 11, 1)))
def test_liberty_day(self):
vi_holidays = holidays.US(state='VI')
for year in range(2010, 2021):
self.assertNotIn(date(year, 11, 1), self.holidays)
self.assertIn(date(year, 11, 1), vi_holidays)
def test_election_day(self):
de_holidays = holidays.US(state='DE')
hi_holidays = holidays.US(state='HI')
il_holidays = holidays.US(state='IL')
in_holidays = holidays.US(state='IN')
la_holidays = holidays.US(state='LA')
mt_holidays = holidays.US(state='MT')
nh_holidays = holidays.US(state='NH')
nj_holidays = holidays.US(state='NJ')
ny_holidays = holidays.US(state='NY')
wv_holidays = holidays.US(state='WV')
self.assertNotIn(date(2004, 11, 2), de_holidays)
for dt in [date(2008, 11, 4), date(2010, 11, 2), date(2012, 11, 6),
date(2014, 11, 4), date(2016, 11, 8), date(2018, 11, 6)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, de_holidays)
self.assertIn(dt, hi_holidays)
self.assertIn(dt, il_holidays)
self.assertIn(dt, in_holidays)
self.assertIn(dt, la_holidays)
self.assertIn(dt, mt_holidays)
self.assertIn(dt, nh_holidays)
self.assertIn(dt, nj_holidays)
self.assertIn(dt, ny_holidays)
self.assertIn(dt, wv_holidays)
self.assertNotIn(date(2015, 11, 3), self.holidays)
self.assertNotIn(date(2015, 11, 3), de_holidays)
self.assertNotIn(date(2015, 11, 3), hi_holidays)
self.assertNotIn(date(2015, 11, 3), il_holidays)
self.assertIn(date(2015, 11, 3), in_holidays)
self.assertNotIn(date(2015, 11, 3), la_holidays)
self.assertNotIn(date(2015, 11, 3), mt_holidays)
self.assertNotIn(date(2015, 11, 3), nh_holidays)
self.assertNotIn(date(2015, 11, 3), nj_holidays)
self.assertIn(date(2015, 11, 3), ny_holidays)
self.assertNotIn(date(2015, 11, 3), wv_holidays)
def test_all_souls_day(self):
gu_holidays = holidays.US(state='GU')
for year in range(1945, 2100):
self.assertNotIn(date(year, 11, 2), self.holidays)
self.assertIn(date(year, 11, 2), gu_holidays)
def test_veterans_day(self):
for dt in [date(1938, 11, 11), date(1939, 11, 11), date(1970, 11, 11),
date(1971, 10, 25), date(1977, 10, 24), date(1978, 11, 11),
date(2012, 11, 11), date(2013, 11, 11), date(2014, 11, 11),
date(2015, 11, 11), date(2016, 11, 11), date(2020, 11, 11)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn("Armistice Day", holidays.US(years=[1937]).values())
self.assertNotIn("Armistice Day", holidays.US(years=[1937]).values())
self.assertIn("Armistice Day", holidays.US(years=[1938]).values())
self.assertIn("Armistice Day", holidays.US(years=[1953]).values())
self.assertIn("Veterans Day", holidays.US(years=[1954]).values())
self.assertNotIn(date(2012, 11, 12), self.holidays)
self.assertNotIn(date(2017, 11, 10), self.holidays)
self.holidays.observed = True
self.assertIn(date(2012, 11, 12), self.holidays)
self.assertIn(date(2017, 11, 10), self.holidays)
def test_discovery_day(self):
pr_holidays = holidays.US(state='PR')
for year in range(2010, 2021):
self.assertNotIn(date(year, 11, 19), self.holidays)
self.assertIn(date(year, 11, 19), pr_holidays)
self.assertNotIn(date(2016, 11, 18), pr_holidays)
self.assertNotIn(date(2016, 11, 20), pr_holidays)
self.assertIn(date(2017, 11, 20), pr_holidays)
def test_thanksgiving_day(self):
de_holidays = holidays.US(state='DE')
fl_holidays = holidays.US(state='FL')
in_holidays = holidays.US(state='IN')
md_holidays = holidays.US(state='MD')
nv_holidays = holidays.US(state='NV')
nh_holidays = holidays.US(state='NH')
nm_holidays = holidays.US(state='NM')
nc_holidays = holidays.US(state='NC')
ok_holidays = holidays.US(state='OK')
tx_holidays = holidays.US(state='TX')
wv_holidays = holidays.US(state='WV')
for dt in [date(1997, 11, 27), date(1999, 11, 25), date(2000, 11, 23),
date(2012, 11, 22), date(2013, 11, 28), date(2014, 11, 27),
date(2015, 11, 26), date(2016, 11, 24), date(2020, 11, 26)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertIn(dt + relativedelta(days=+1), de_holidays)
self.assertEqual(de_holidays.get(dt + relativedelta(days=+1)),
"Day After Thanksgiving")
self.assertEqual(nh_holidays.get(dt + relativedelta(days=+1)),
"Day After Thanksgiving")
self.assertEqual(nc_holidays.get(dt + relativedelta(days=+1)),
"Day After Thanksgiving")
self.assertEqual(ok_holidays.get(dt + relativedelta(days=+1)),
"Day After Thanksgiving")
self.assertEqual(wv_holidays.get(dt + relativedelta(days=+1)),
"Day After Thanksgiving")
self.assertIn(dt + relativedelta(days=+1), fl_holidays)
self.assertEqual(fl_holidays.get(dt + relativedelta(days=+1)),
"Friday After Thanksgiving")
self.assertIn(dt + relativedelta(days=+1), tx_holidays)
self.assertEqual(tx_holidays.get(dt + relativedelta(days=+1)),
"Friday After Thanksgiving")
self.assertEqual(nv_holidays.get(dt + relativedelta(days=+1)),
"Family Day")
self.assertEqual(nm_holidays.get(dt + relativedelta(days=+1)),
"Presidents' Day")
if dt.year >= 2008:
self.assertEqual(md_holidays.get(dt + relativedelta(days=1)),
"American Indian Heritage Day")
if dt.year >= 2010:
self.assertEqual(in_holidays.get(dt + relativedelta(days=1)),
"Lincoln's Birthday")
else:
self.assertNotEqual(
in_holidays.get(dt + relativedelta(days=1)),
"Lincoln's Birthday")
def test_robert_lee_birthday(self):
ga_holidays = holidays.US(state='GA')
self.assertNotIn(date(1985, 11, 25), ga_holidays)
for dt in [date(2007, 11, 23), date(2008, 11, 28), date(2010, 11, 26),
date(2013, 11, 29), date(2014, 11, 28), date(2015, 11, 27),
date(2018, 11, 23), date(2019, 11, 29), date(2020, 11, 27)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ga_holidays)
def test_lady_of_camarin_day(self):
gu_holidays = holidays.US(state='GU')
for year in range(1945, 2100):
self.assertNotIn(date(year, 12, 8), self.holidays)
self.assertIn(date(year, 12, 8), gu_holidays)
def test_christmas_eve(self):
as_holidays = holidays.US(state='AS')
ks_holidays = holidays.US(state='KS')
mi_holidays = holidays.US(state='MI')
nc_holidays = holidays.US(state='NC')
tx_holidays = holidays.US(state='TX')
wi_holidays = holidays.US(state='WI')
self.holidays.observed = False
for year in range(1900, 2050):
self.assertNotIn(date(year, 12, 24), self.holidays)
self.assertIn(date(year, 12, 24), as_holidays)
if year >= 2013:
f = ks_holidays.get(date(year, 12, 24)).find("Eve")
self.assertGreater(f, 0)
f = mi_holidays.get(date(year, 12, 24)).find("Eve")
self.assertGreater(f, 0)
f = nc_holidays.get(date(year, 12, 24)).find("Eve")
self.assertGreater(f, 0)
if year >= 2012:
f = wi_holidays.get(date(year, 12, 24)).find("Eve")
self.assertGreater(f, 0)
if year >= 1981:
f = tx_holidays.get(date(year, 12, 24)).find("Eve")
self.assertGreater(f, 0)
if year < 1981:
f = ks_holidays.get(date(year, 12, 24), "").find("Eve")
self.assertLess(f, 0)
f = mi_holidays.get(date(year, 12, 24), "").find("Eve")
self.assertLess(f, 0)
f = nc_holidays.get(date(year, 12, 24), "").find("Eve")
self.assertLess(f, 0)
f = tx_holidays.get(date(year, 12, 24), "").find("Eve")
self.assertLess(f, 0)
f = wi_holidays.get(date(year, 12, 24), "").find("Eve")
self.assertLess(f, 0)
self.assertIn(date(2016, 12, 23), as_holidays)
self.assertIn(date(2016, 12, 23), ks_holidays)
self.assertIn(date(2016, 12, 23), mi_holidays)
self.assertIn(date(2016, 12, 23), nc_holidays)
self.assertIn(date(2016, 12, 23), tx_holidays)
self.assertIn(date(2016, 12, 23), wi_holidays)
self.assertIn("Christmas Eve (Observed)",
as_holidays.get_list(date(2017, 12, 22)))
self.assertIn("Christmas Eve (Observed)",
ks_holidays.get_list(date(2017, 12, 22)))
self.assertIn("Christmas Eve (Observed)",
mi_holidays.get_list(date(2017, 12, 22)))
self.assertIn("Christmas Eve (Observed)",
nc_holidays.get_list(date(2017, 12, 22)))
self.assertIn("Christmas Eve (Observed)",
tx_holidays.get_list(date(2017, 12, 22)))
self.assertIn("Christmas Eve (Observed)",
wi_holidays.get_list(date(2017, 12, 22)))
def test_christmas_day(self):
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2010, 12, 24), self.holidays)
self.assertNotIn(date(2016, 12, 26), self.holidays)
self.holidays.observed = True
self.assertIn(date(2010, 12, 24), self.holidays)
self.assertIn(date(2016, 12, 26), self.holidays)
def test_day_after_christmas(self):
nc_holidays = holidays.US(state='NC', observed=False)
tx_holidays = holidays.US(state='TX', observed=False)
self.assertNotIn(date(2015, 12, 28), nc_holidays)
self.assertNotIn(date(2016, 12, 27), nc_holidays)
self.assertNotIn(date(2015, 12, 28), tx_holidays)
self.assertNotIn(date(2016, 12, 27), tx_holidays)
nc_holidays.observed = True
self.assertIn("Day After Christmas (Observed)",
nc_holidays.get_list(date(2015, 12, 28)))
self.assertIn("Day After Christmas (Observed)",
nc_holidays.get_list(date(2016, 12, 27)))
tx_holidays.observed = True
self.assertNotIn("Day After Christmas (Observed)",
tx_holidays.get_list(date(2015, 12, 28)))
self.assertNotIn("Day After Christmas (Observed)",
tx_holidays.get_list(date(2016, 12, 27)))
def test_new_years_eve(self):
ky_holidays = holidays.US(state='KY')
mi_holidays = holidays.US(state='MI')
wi_holidays = holidays.US(state='WI')
self.assertNotIn(date(2012, 12, 31), ky_holidays)
self.assertNotIn(date(2012, 12, 31), mi_holidays)
self.assertNotIn(date(2011, 12, 31), wi_holidays)
self.assertIn(date(2012, 12, 31), wi_holidays)
for dt in [date(2013, 12, 31), date(2016, 12, 30)]:
self.assertNotIn(dt, self.holidays)
self.assertIn(dt, ky_holidays)
self.assertIn(dt, mi_holidays)
self.assertIn(dt, wi_holidays)
class TestNZ(unittest.TestCase):
def setUp(self):
self.holidays = holidays.NZ(observed=True)
def test_new_years(self):
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
for year, day in enumerate([1, 1, 1, 1, 3, # 2001-05
3, 1, 1, 1, 1, # 2006-10
3, 3, 1, 1, 1, # 2011-15
1, 3, 1, 1, 1, 1], # 2016-21
2001):
dt = date(year, 1, day)
self.assertIn(dt, self.holidays)
self.assertEqual(self.holidays[dt][:10], "New Year's")
self.assertNotIn("1893-01-01", self.holidays)
self.assertIn("1894-01-01", self.holidays)
def test_day_after_new_years(self):
for year in range(1900, 2100):
dt = date(year, 1, 2)
self.assertIn(dt, self.holidays)
for year, day in enumerate([2, 2, 2, 2, 2, # 2001-05
2, 2, 2, 2, 4, # 2006-10
4, 2, 2, 2, 2, # 2011-15
4, 2, 2, 2, 2, 4], # 2016-21
2001):
dt = date(year, 1, day)
self.assertIn(dt, self.holidays)
self.assertEqual(self.holidays[dt][:10], "Day after ")
self.assertNotIn(date(2016, 1, 3), self.holidays)
def test_waitangi_day(self):
ntl_holidays = holidays.NZ(prov='Northland')
for year, day in enumerate([3, 8, 7, 6, 5], 1964):
dt = date(year, 2, day)
self.assertIn(dt, ntl_holidays, dt)
self.assertEqual(ntl_holidays[dt][:8], "Waitangi")
for year in range(1900, 1974):
dt = date(year, 2, 6)
self.assertNotIn(dt, self.holidays)
for year in range(1974, 2100):
dt = date(year, 2, 6)
self.assertIn(dt, self.holidays)
for year, day in enumerate([6, 6, 6, 6, 6, # 2001-05
6, 6, 6, 6, 6, # 2006-10
6, 6, 6, 6, 6, # 2011-15
8, 6, 6, 6, 6, 8], # 2016-21
2001):
dt = date(year, 2, day)
self.assertIn(dt, self.holidays)
self.assertEqual(self.holidays[dt][:8], "Waitangi")
self.assertNotIn(date(2005, 2, 7), self.holidays)
self.assertNotIn(date(2010, 2, 8), self.holidays)
self.assertNotIn(date(2011, 2, 7), self.holidays)
def test_good_friday(self):
for dt in [date(1900, 4, 13), date(1901, 4, 5), date(1902, 3, 28),
date(1999, 4, 2), date(2000, 4, 21), date(2010, 4, 2),
date(2018, 3, 30), date(2019, 4, 19), date(2020, 4, 10)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_easter_monday(self):
for dt in [date(1900, 4, 16), date(1901, 4, 8), date(1902, 3, 31),
date(1999, 4, 5), date(2010, 4, 5),
date(2018, 4, 2), date(2019, 4, 22), date(2020, 4, 13)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_anzac_day(self):
for year in range(1900, 1921):
dt = date(year, 4, 25)
self.assertNotIn(dt, self.holidays)
for year in range(1921, 2100):
dt = date(year, 4, 25)
self.assertIn(dt, self.holidays)
for year, day in enumerate([25, 25, 25, 25, 25, # 2001-05
25, 25, 25, 25, 25, # 2006-10
25, 25, 25, 25, 27, # 2011-15
25, 25, 25, 25, 27, 26], # 2016-21
2001):
dt = date(year, 4, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:5], "Anzac")
self.assertNotIn(date(2009, 4, 27), self.holidays)
self.assertNotIn(date(2010, 4, 26), self.holidays)
def test_sovereigns_birthday(self):
self.assertIn(date(1909, 11, 9), self.holidays)
self.assertIn(date(1936, 6, 23), self.holidays)
self.assertIn(date(1937, 6, 9), self.holidays)
self.assertIn(date(1940, 6, 3), self.holidays)
self.assertIn(date(1952, 6, 2), self.holidays)
for year in range(1912, 1936):
dt = date(year, 6, 3)
self.assertIn(dt, self.holidays)
self.assertEqual(self.holidays[dt], "King's Birthday")
for year, day in enumerate([4, 3, 2, 7, 6, # 2001-05
5, 4, 2, 1, 7, # 2006-10
6, 4, 3, 2, 1, # 2011-15
6, 5, 4, 3, 1, 7], # 2016-21
2001):
dt = date(year, 6, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt], "Queen's Birthday")
def test_labour_day(self):
for year, day in enumerate([22, 28, 27, 25, 24, # 2001-05
23, 22, 27, 26, 25, # 2006-10
24, 22, 28, 27, 26, # 2011-15
24, 23, 22, 28, 26, 25], # 2016-21
2001):
dt = date(year, 10, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt], "Labour Day")
def test_christmas_day(self):
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(date(2010, 12, 24), self.holidays)
self.assertNotEqual(self.holidays[date(2011, 12, 26)],
"Christmas Day (Observed)")
self.holidays.observed = True
self.assertEqual(self.holidays[date(2011, 12, 27)],
"Christmas Day (Observed)")
for year, day in enumerate([25, 25, 25, 27, 27, # 2001-05
25, 25, 25, 25, 27, # 2006-10
27, 25, 25, 25, 25, # 2011-15
27, 25, 25, 25, 25, 25], # 2016-21
2001):
dt = date(year, 12, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:9], "Christmas")
def test_boxing_day(self):
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 12, 26)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2009, 12, 28), self.holidays)
self.assertNotIn(date(2010, 12, 27), self.holidays)
self.holidays.observed = True
self.assertIn(date(2009, 12, 28), self.holidays)
self.assertIn(date(2010, 12, 27), self.holidays)
for year, day in enumerate([26, 26, 26, 28, 26, # 2001-05
26, 26, 26, 28, 28, # 2006-10
26, 26, 26, 26, 28, # 2011-15
26, 26, 26, 26, 28, 28], # 2016-21
2001):
dt = date(year, 12, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:6], "Boxing")
def test_auckland_anniversary_day(self):
auk_holidays = holidays.NZ(prov='Auckland')
for year, day in enumerate([29, 28, 27, 26, 31, # 2001-05
30, 29, 28, 26, 1, # 2006-10
31, 30, 28, 27, 26, # 2011-15
1, 30, 29, 28, 27, 1], # 2016-21
2001):
dt = date(year, 2 if day < 9 else 1, day)
self.assertIn(dt, auk_holidays, dt)
self.assertEqual(auk_holidays[dt],
"Auckland Anniversary Day")
def test_taranaki_anniversary_day(self):
tki_holidays = holidays.NZ(prov='Taranaki')
for year, day in enumerate([12, 11, 10, 8, 14, # 2001-05
13, 12, 10, 9, 8, # 2006-10
14, 12, 11, 10, 9, # 2011-15
14, 13, 12, 11, 9, 8], # 2016-21
2001):
dt = date(year, 3, day)
self.assertIn(dt, tki_holidays, dt)
self.assertEqual(tki_holidays[dt],
"Taranaki Anniversary Day")
def test_hawkes_bay_anniversary_day(self):
hkb_holidays = holidays.NZ(prov="Hawke's Bay")
for year, day in enumerate([19, 25, 24, 22, 21, # 2001-05
20, 19, 24, 23, 22, # 2006-10
21, 19, 25, 24, 23, # 2011-15
21, 20, 19, 25, 23, 22], # 2016-21
2001):
dt = date(year, 10, day)
self.assertIn(dt, hkb_holidays, dt)
self.assertEqual(hkb_holidays[dt],
"Hawke's Bay Anniversary Day")
def test_wellington_anniversary_day(self):
wgn_holidays = holidays.NZ(prov='Wellington')
for year, day in enumerate([22, 21, 20, 19, 24, # 2001-05
23, 22, 21, 19, 25, # 2006-10
24, 23, 21, 20, 19, # 2011-15
25, 23, 22, 21, 20, 25], # 2016-21
2001):
dt = date(year, 1, day)
self.assertIn(dt, wgn_holidays, dt)
self.assertEqual(wgn_holidays[dt],
"Wellington Anniversary Day", dt)
def test_marlborough_anniversary_day(self):
mbh_holidays = holidays.NZ(prov='Marlborough')
for year, day in enumerate([29, 4, 3, 1, 31, # 2001-05
30, 29, 3, 2, 1, # 2006-10
31, 29, 4, 3, 2, # 2011-15
31, 30, 29, 4, 2, 1], # 2016-21
2001):
dt = date(year, 11 if day < 9 else 10, day)
self.assertIn(dt, mbh_holidays, dt)
self.assertEqual(mbh_holidays[dt],
"Marlborough Anniversary Day", dt)
def test_nelson_anniversary_day(self):
nsn_holidays = holidays.NZ(prov='Nelson')
for year, day in enumerate([29, 4, 3, 2, 31, # 2001-05
30, 29, 4, 2, 1, # 2006-10
31, 30, 4, 3, 2, # 2011-15
1, 30, 29, 4, 3, 1], # 2016-21
2001):
dt = date(year, 2 if day < 9 else 1, day)
self.assertIn(dt, nsn_holidays, dt)
self.assertEqual(nsn_holidays[dt],
"Nelson Anniversary Day", dt)
def test_canterbury_anniversary_day(self):
can_holidays = holidays.NZ(prov='Canterbury')
for year, day in enumerate([16, 15, 14, 12, 11, # 2001-05
17, 16, 14, 13, 12, # 2006-10
11, 16, 15, 14, 13, # 2011-15
11, 17, 16, 15, 13, 12], # 2016-21
2001):
dt = date(year, 11, day)
self.assertIn(dt, can_holidays, dt)
self.assertEqual(can_holidays[dt],
"Canterbury Anniversary Day", dt)
def test_south_canterbury_anniversary_day(self):
stc_holidays = holidays.NZ(prov='South Canterbury')
for year, day in enumerate([24, 23, 22, 27, 26, # 2001-05
25, 24, 22, 28, 27, # 2006-10
26, 24, 23, 22, 28, # 2011-15
26, 25, 24, 23, 28, 27], # 2016-21
2001):
dt = date(year, 9, day)
self.assertIn(dt, stc_holidays, dt)
self.assertEqual(stc_holidays[dt],
"South Canterbury Anniversary Day", dt)
def test_westland_anniversary_day(self):
wtc_holidays = holidays.NZ(prov='Westland')
for year, day in enumerate([3, 2, 1, 29, 5, # 2001-05
4, 3, 1, 30, 29, # 2006-10
28, 3, 2, 1, 30, # 2011-15
28, 4, 3, 2, 30, 29], # 2016-21
2001):
dt = date(year, 12 if day < 9 else 11, day)
self.assertIn(dt, wtc_holidays, dt)
self.assertEqual(wtc_holidays[dt],
"Westland Anniversary Day", dt)
def test_otago_anniversary_day(self):
ota_holidays = holidays.NZ(prov='Otago')
for year, day in enumerate([26, 25, 24, 22, 21, # 2001-05
20, 26, 25, 23, 22, # 2006-10
21, 26, 25, 24, 23, # 2011-15
21, 20, 26, 25, 23, 22], # 2016-21
2001):
dt = date(year, 3, day)
self.assertIn(dt, ota_holidays, dt)
self.assertEqual(ota_holidays[dt],
"Otago Anniversary Day", dt)
def test_southland_anniversary_day(self):
stl_holidays = holidays.NZ(prov='Southland')
for year, day in enumerate([15, 14, 20, 19, 17, # 2001-05
16, 15, 14, 19, 18, 17], # 2006-11
2001):
dt = date(year, 1, day)
self.assertIn(dt, stl_holidays, dt)
self.assertEqual(stl_holidays[dt],
"Southland Anniversary Day", dt)
for year, (month, day) in enumerate([(4, 10), (4, 2), (4, 22),
(4, 7), (3, 29), (4, 18),
(4, 3), (4, 23), (4, 14),
(4, 6)], 2012):
dt = date(year, month, day)
self.assertIn(dt, stl_holidays, dt)
self.assertEqual(stl_holidays[dt],
"Southland Anniversary Day", dt)
def test_chatham_islands_anniversary_day(self):
cit_holidays = holidays.NZ(prov='Chatham Islands')
for year, day in enumerate([3, 2, 1, 29, 28, # 2001-05
27, 3, 1, 30, 29, # 2006-10
28, 3, 2, 1, 30, # 2011-15
28, 27, 3, 2, 30, 29], # 2016-21
2001):
dt = date(year, 12 if day < 9 else 11, day)
self.assertIn(dt, cit_holidays, dt)
self.assertEqual(cit_holidays[dt],
"Chatham Islands Anniversary Day", dt)
def test_all_holidays_present(self):
nz_1969 = sum(holidays.NZ(years=[1969], prov=p)
for p in holidays.NZ.PROVINCES)
holidays_in_1969 = sum((nz_1969.get_list(key) for key in nz_1969), [])
nz_2015 = sum(holidays.NZ(years=[2015], prov=p)
for p in holidays.NZ.PROVINCES)
holidays_in_2015 = sum((nz_2015.get_list(key) for key in nz_2015), [])
nz_1974 = sum(holidays.NZ(years=[1974], prov=p)
for p in holidays.NZ.PROVINCES)
holidays_in_1974 = sum((nz_1974.get_list(key) for key in nz_1974), [])
all_holidays = ["New Year's Day",
"Day after New Year's Day",
"Waitangi Day",
"Good Friday",
"Easter Monday",
"Anzac Day",
"Queen's Birthday",
"Labour Day",
"Christmas Day",
"Boxing Day",
"Auckland Anniversary Day",
"Taranaki Anniversary Day",
"Hawke's Bay Anniversary Day",
"Wellington Anniversary Day",
"Marlborough Anniversary Day",
"Nelson Anniversary Day",
"Canterbury Anniversary Day",
"South Canterbury Anniversary Day",
"Westland Anniversary Day",
"Otago Anniversary Day",
"Southland Anniversary Day",
"Chatham Islands Anniversary Day",
"Queen's Birthday",
"Labour Day",
"Christmas Day",
"Boxing Day"]
for holiday in all_holidays:
self.assertIn(holiday, holidays_in_1969, holiday)
self.assertIn(holiday, holidays_in_2015, holiday)
all_holidays.remove("Waitangi Day")
all_holidays.insert(2, "New Zealand Day")
for holiday in all_holidays:
self.assertIn(holiday, holidays_in_1974, holiday)
self.assertNotIn("Waitangi Day", holidays_in_1974)
class TestAU(unittest.TestCase):
def setUp(self):
self.holidays = holidays.AU(observed=True)
self.state_hols = {state: holidays.AU(observed=True, prov=state)
for state in holidays.AU.PROVINCES}
def test_new_years(self):
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
for year, day in enumerate([3, 2, 1, 1, 1, # 2011-15
1, 2, 1, 1, 1, 1], # 2016-21
2011):
dt = date(year, 1, day)
for state, hols in self.state_hols.items():
self.assertIn(dt, hols, (state, dt))
self.assertEqual(hols[dt][:10], "New Year's", state)
def test_australia_day(self):
for year, day in enumerate([26, 26, 28, 27, 26, # 2011-15
26, 26, 26, 28, 27, 26], # 2016-21
2011):
jan26 = date(year, 1, 26)
dt = date(year, 1, day)
self.assertIn(jan26, self.holidays, dt)
self.assertEqual(self.holidays[jan26], "Australia Day")
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:10], "Australia ")
for state in holidays.AU.PROVINCES:
self.assertIn(jan26, self.state_hols[state], (state, dt))
self.assertEqual(self.state_hols[state][jan26],
"Australia Day")
self.assertIn(dt, self.state_hols[state], (state, dt))
self.assertEqual(self.state_hols[state][dt][:10], "Australia ")
self.assertNotIn(date(2016, 1, 27), self.holidays)
self.assertNotIn(date(1887, 1, 26), self.holidays)
self.assertNotIn(date(1934, 1, 26), self.state_hols['SA'])
for dt in [date(1889, 1, 26), date(1936, 1, 26), date(1945, 1, 26)]:
self.assertIn(dt, self.state_hols['NSW'], dt)
self.assertEqual(self.state_hols['NSW'][dt], "Anniversary Day")
def test_good_friday(self):
for dt in [date(1900, 4, 13), date(1901, 4, 5), date(1902, 3, 28),
date(1999, 4, 2), date(2000, 4, 21), date(2010, 4, 2),
date(2018, 3, 30), date(2019, 4, 19), date(2020, 4, 10)]:
self.assertIn(dt, self.holidays)
self.assertEqual(self.holidays[dt], "Good Friday")
def test_easter_saturday(self):
for dt in [date(1900, 4, 14), date(1901, 4, 6), date(1902, 3, 29),
date(1999, 4, 3), date(2000, 4, 22), date(2010, 4, 3),
date(2018, 3, 31), date(2019, 4, 20), date(2020, 4, 11)]:
for state in ['ACT', 'NSW', 'NT', 'QLD', 'SA', 'VIC']:
self.assertIn(dt, self.state_hols[state], (state, dt))
self.assertEqual(self.state_hols[state][dt], "Easter Saturday")
for state in ['TAS', 'WA']:
self.assertNotIn(dt, self.state_hols[state], (state, dt))
def test_easter_sunday(self):
for dt in [date(1900, 4, 15), date(1901, 4, 7), date(1902, 3, 30),
date(1999, 4, 4), date(2010, 4, 4),
date(2018, 4, 1), date(2019, 4, 21), date(2020, 4, 12)]:
for state in ['NSW', 'ACT', 'QLD', 'VIC']:
self.assertIn(dt, self.state_hols[state], (state, dt))
self.assertEqual(self.state_hols[state][dt], "Easter Sunday")
for state in ['NT', 'SA', 'TAS', 'WA']:
self.assertNotIn(dt, self.state_hols[state], (state, dt))
def test_easter_monday(self):
for dt in [date(1900, 4, 16), date(1901, 4, 8), date(1902, 3, 31),
date(1999, 4, 5), date(2010, 4, 5),
date(2018, 4, 2), date(2019, 4, 22), date(2020, 4, 13)]:
self.assertIn(dt, self.holidays)
self.assertEqual(self.holidays[dt], "Easter Monday")
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_bank_holiday(self):
for dt in [date(1912, 8, 5), date(1913, 8, 4),
date(1999, 8, 2), date(2018, 8, 6), date(2020, 8, 3)]:
self.assertIn(dt, self.state_hols['NSW'], dt)
self.assertEqual(self.state_hols['NSW'][dt], "Bank Holiday")
def test_labour_day(self):
for year, day in enumerate([7, 5, 4, 3, 2, 7, 6, ], 2011):
dt = date(year, 3, day)
self.assertIn(dt, self.state_hols['WA'], dt)
self.assertEqual(self.state_hols['WA'][dt], "Labour Day")
for year, day in enumerate([10, 9, 14], 2014):
dt = date(year, 3, day)
self.assertNotIn(dt, self.holidays, dt)
self.assertIn(dt, self.state_hols['VIC'], dt)
self.assertEqual(self.state_hols['VIC'][dt], "Labour Day")
def test_anzac_day(self):
for year in range(1900, 1921):
dt = date(year, 4, 25)
self.assertNotIn(dt, self.holidays)
for year in range(1921, 2100):
dt = date(year, 4, 25)
self.assertIn(dt, self.holidays)
for dt in [date(2015, 4, 27), date(2020, 4, 27)]:
self.assertNotIn(dt, self.holidays, dt)
for state in ['NT', 'WA']:
self.assertIn(dt, self.state_hols[state], (state, dt))
self.assertEqual(self.state_hols[state][dt][:5], "Anzac")
for state in ['ACT', 'QLD', 'SA', 'NSW', 'TAS', 'VIC']:
self.assertNotIn(dt, self.state_hols[state], (state, dt))
dt = date(2021, 4, 26)
for state in ['ACT', 'NT', 'QLD', 'SA', 'WA']:
self.assertIn(dt, self.state_hols[state], (state, dt))
self.assertEqual(self.state_hols[state][dt][:5], "Anzac")
for state in ['NSW', 'TAS', 'VIC']:
self.assertNotIn(dt, self.state_hols[state], (state, dt))
def test_western_australia_day(self):
for year, day in enumerate([4, 3, 2], 2012):
dt = date(year, 6, day)
self.assertIn(dt, self.state_hols['WA'], dt)
self.assertEqual(self.state_hols['WA'][dt], "Foundation Day")
for year, day in enumerate([1, 6, 5], 2015):
dt = date(year, 6, day)
self.assertIn(dt, self.state_hols['WA'], dt)
self.assertEqual(self.state_hols['WA'][dt],
"Western Australia Day")
def test_adelaide_cup(self):
for dt in [date(2015, 3, 9), date(2016, 3, 14), date(2017, 3, 13)]:
self.assertIn(dt, self.state_hols['SA'], dt)
self.assertEqual(self.state_hols['SA'][dt], "Adelaide Cup")
def test_queens_birthday(self):
# Western Australia
for dt in [date(2012, 10, 1), date(2013, 9, 30), date(2014, 9, 29),
date(2015, 9, 28), date(2016, 9, 26), date(2017, 9, 25)]:
self.assertIn(dt, self.state_hols['WA'], dt)
self.assertEqual(self.state_hols['WA'][dt], "Queen's Birthday")
# Other states except Queensland
other_states = [
date(2010, 6, 14), date(2011, 6, 13), date(2012, 6, 11),
date(2013, 6, 10), date(2014, 6, 9), date(2015, 6, 8),
date(2016, 6, 13), date(2017, 6, 12), date(2018, 6, 11)]
for dt in other_states:
self.assertIn(dt, self.state_hols['NSW'], dt)
self.assertIn(dt, self.state_hols['VIC'], dt)
self.assertIn(dt, self.state_hols['ACT'], dt)
# Queensland
qld_dates = other_states[:-3]
qld_dates.remove(date(2012, 6, 11))
qld_dates.extend([date(2012, 10, 1), date(2016, 10, 3),
date(2017, 10, 2), date(2018, 10, 1)])
for dt in qld_dates:
self.assertIn(dt, self.state_hols['QLD'], dt)
self.assertEqual(self.state_hols['QLD'][dt], "Queen's Birthday")
self.assertIn(date(2012, 6, 11), self.state_hols['QLD'])
def test_picnic_day(self):
for dt in [date(2015, 8, 3), date(2016, 8, 1)]:
self.assertIn(dt, self.state_hols['NT'], dt)
self.assertEqual(self.state_hols['NT'][dt], "Picnic Day")
def test_family_and_community_day(self):
for dt in [date(2007, 11, 6), date(2008, 11, 4), date(2009, 11, 3),
date(2010, 9, 26), date(2011, 10, 10), date(2012, 10, 8),
date(2013, 9, 30), date(2014, 9, 29), date(2015, 9, 28),
date(2016, 9, 26), date(2017, 9, 25)]:
self.assertIn(dt, self.state_hols['ACT'], dt)
self.assertEqual(self.state_hols['ACT'][dt],
"Family & Community Day")
def test_reconciliation_day(self):
for dt in [date(2018, 5, 28), date(2019, 5, 27), date(2020, 6, 1)]:
self.assertIn(dt, self.state_hols['ACT'], dt)
self.assertEqual(self.state_hols['ACT'][dt], "Reconciliation Day")
def test_grand_final_day(self):
dt = date(2019, 9, 27)
self.assertIn(dt, self.state_hols['VIC'], dt)
self.assertEqual(self.state_hols['VIC'][dt], "Grand Final Day")
def test_melbourne_cup(self):
for dt in [date(2014, 11, 4), date(2015, 11, 3), date(2016, 11, 1)]:
self.assertIn(dt, self.state_hols['VIC'], dt)
self.assertEqual(self.state_hols['VIC'][dt], "Melbourne Cup")
def test_royal_queensland_show(self):
for year, day in enumerate([15, 14, 12, 11, 10, 16], 2018):
dt = date(year, 8, day)
self.assertIn(dt, self.state_hols['QLD'], dt)
self.assertEqual(self.state_hols['QLD'][dt],
"The Royal Queensland Show")
def test_christmas_day(self):
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(date(2010, 12, 24), self.holidays)
self.assertNotEqual(self.holidays[date(2011, 12, 26)],
"Christmas Day (Observed)")
self.holidays.observed = True
self.assertEqual(self.holidays[date(2011, 12, 27)],
"Christmas Day (Observed)")
for year, day in enumerate([25, 25, 25, 27, 27, # 2001-05
25, 25, 25, 25, 27, # 2006-10
27, 25, 25, 25, 25, # 2011-15
27, 25, 25, 25, 25, 25], # 2016-21
2001):
dt = date(year, 12, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:9], "Christmas")
def test_boxing_day(self):
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 12, 26)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2009, 12, 28), self.holidays)
self.assertNotIn(date(2010, 12, 27), self.holidays)
self.holidays.observed = True
self.assertIn(date(2009, 12, 28), self.holidays)
self.assertIn(date(2010, 12, 27), self.holidays)
for year, day in enumerate([26, 26, 26, 28, 26, # 2001-05
26, 26, 26, 28, 28, # 2006-10
26, 26, 26, 26, 28, # 2011-15
26, 26, 26, 26, 28, 28], # 2016-21
2001):
dt = date(year, 12, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:6], "Boxing")
def test_all_holidays(self):
au = sum(holidays.AU(years=[1957, 2012, 2015], prov=p)
for p in holidays.AU.PROVINCES)
holidays_found = sum((au.get_list(key) for key in au), [])
all_holidays = ["New Year's Day",
"Australia Day",
"Adelaide Cup",
"Canberra Day",
"Good Friday",
"Easter Saturday",
"Easter Sunday",
"Easter Monday",
"Anzac Day",
"Queen's Birthday",
"Western Australia Day",
"Family & Community Day",
"Labour Day",
"Eight Hours Day",
"May Day",
"Picnic Day",
"Melbourne Cup",
"Christmas Day",
"Proclamation Day",
"Boxing Day"]
for holiday in all_holidays:
self.assertIn(holiday, holidays_found, holiday)
class TestDE(unittest.TestCase):
def setUp(self):
self.holidays = holidays.DE()
self.prov_hols = {prov: holidays.DE(prov=prov)
for prov in holidays.DE.PROVINCES}
def test_no_data_before_1990(self):
de_1989 = sum(holidays.DE(years=[1989], prov=p)
for p in holidays.DE.PROVINCES)
self.assertEqual(len(de_1989), 0)
def test_all_holidays_present(self):
de_2015 = sum(holidays.DE(years=[2015], prov=p)
for p in holidays.DE.PROVINCES)
in_2015 = sum((de_2015.get_list(key) for key in de_2015), [])
all_de = ["Neujahr",
"Heilige Drei Könige",
"Karfreitag",
"Ostersonntag",
"Ostermontag",
"Erster Mai",
"Christi Himmelfahrt",
"Pfingstsonntag",
"Pfingstmontag",
"Fronleichnam",
"Mariä Himmelfahrt",
"Tag der Deutschen Einheit",
"Reformationstag",
"Allerheiligen",
"Buß- und Bettag",
"Erster Weihnachtstag",
"Zweiter Weihnachtstag"]
for holiday in all_de:
self.assertIn(holiday, in_2015, "missing: {}".format(holiday))
for holiday in in_2015:
self.assertIn(holiday, all_de, "extra: {}".format(holiday))
def test_fixed_holidays(self):
fixed_days_whole_country = (
(1, 1), # Neujahr
(5, 1), # Maifeiertag
(10, 3), # Tag der Deutschen Einheit
(12, 25), # Erster Weihnachtstag
(12, 26), # Zweiter Weihnachtstag
)
for y, (m, d) in product(range(1991, 2050), fixed_days_whole_country):
self.assertIn(date(y, m, d), self.holidays)
def test_tag_der_deutschen_einheit_in_1990(self):
self.assertIn(date(1990, 10, 3), self.holidays)
def test_heilige_drei_koenige(self):
provinces_that_have = {'BW', 'BY', 'ST'}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1991, 2050)):
self.assertIn(date(year, 1, 6), self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1991, 2050)):
self.assertNotIn(date(year, 1, 6), self.prov_hols[province])
def test_karfreitag(self):
known_good = [(2014, 4, 18), (2015, 4, 3), (2016, 3, 25),
(2017, 4, 14), (2018, 3, 30), (2019, 4, 19),
(2020, 4, 10), (2021, 4, 2), (2022, 4, 15),
(2023, 4, 7), (2024, 3, 29)]
for province, (y, m, d) in product(holidays.DE.PROVINCES, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
def test_ostersonntag(self):
known_good = [(2014, 4, 20), (2015, 4, 5), (2016, 3, 27),
(2017, 4, 16), (2018, 4, 1), (2019, 4, 21),
(2020, 4, 12), (2021, 4, 4), (2022, 4, 17),
(2023, 4, 9), (2024, 3, 31)]
provinces_that_have = {"BB"}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertNotIn(date(y, m, d), self.prov_hols[province])
def test_ostermontag(self):
known_good = [(2014, 4, 21), (2015, 4, 6), (2016, 3, 28),
(2017, 4, 17), (2018, 4, 2), (2019, 4, 22),
(2020, 4, 13), (2021, 4, 5), (2022, 4, 18),
(2023, 4, 10), (2024, 4, 1)]
for province, (y, m, d) in product(holidays.DE.PROVINCES, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
def test_75_jahrestag_beendigung_zweiter_weltkrieg(self):
known_good = [(2020, 5, 8)]
provinces_that_have = {"BE"}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertNotIn(date(y, m, d), self.prov_hols[province])
def test_christi_himmelfahrt(self):
known_good = [(2014, 5, 29), (2015, 5, 14), (2016, 5, 5),
(2017, 5, 25), (2018, 5, 10), (2019, 5, 30),
(2020, 5, 21), (2021, 5, 13), (2022, 5, 26),
(2023, 5, 18), (2024, 5, 9)]
for province, (y, m, d) in product(holidays.DE.PROVINCES, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
def test_weltkindertag(self):
known_good = [(2019, 9, 20), (2021, 9, 20)]
provinces_that_have = {"TH"}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertNotIn(date(y, m, d), self.prov_hols[province])
def test_frauentag(self):
known_good = [(2019, 3, 8), ]
provinces_that_have = {"BE"}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertNotIn(date(y, m, d), self.prov_hols[province])
def test_pfingstsonntag(self):
known_good = [(2014, 6, 8), (2015, 5, 24), (2016, 5, 15),
(2017, 6, 4), (2018, 5, 20), (2019, 6, 9),
(2020, 5, 31), (2021, 5, 23), (2022, 6, 5),
(2023, 5, 28), (2024, 5, 19)]
provinces_that_have = {"BB"}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertNotIn(date(y, m, d), self.prov_hols[province])
def test_pfingstmontag(self):
known_good = [(2014, 6, 9), (2015, 5, 25), (2016, 5, 16),
(2017, 6, 5), (2018, 5, 21), (2019, 6, 10),
(2020, 6, 1), (2021, 5, 24), (2022, 6, 6),
(2023, 5, 29), (2024, 5, 20)]
for province, (y, m, d) in product(holidays.DE.PROVINCES, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
def test_fronleichnam(self):
known_good = [(2014, 6, 19), (2015, 6, 4), (2016, 5, 26),
(2017, 6, 15), (2018, 5, 31), (2019, 6, 20),
(2020, 6, 11), (2021, 6, 3), (2022, 6, 16),
(2023, 6, 8), (2024, 5, 30)]
provinces_that_have = {'BW', 'BY', 'HE', 'NW', 'RP', 'SL'}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertNotIn(date(y, m, d), self.prov_hols[province])
def test_mariae_himmelfahrt(self):
provinces_that_have = {'BY', 'SL'}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1991, 2050)):
self.assertIn(date(year, 8, 15), self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1991, 2050)):
self.assertNotIn(date(year, 8, 15), self.prov_hols[province])
def test_reformationstag(self):
prov_that_have = {'BB', 'MV', 'SN', 'ST', 'TH'}
prov_yes_since_2018 = prov_that_have.union({'HB', 'HH', 'NI', 'SH'})
prov_that_dont = set(holidays.DE.PROVINCES) - prov_that_have
prov_not_since_2018 = set(holidays.DE.PROVINCES) - prov_yes_since_2018
for province, year in product(prov_that_have, range(1991, 2050)):
# in 2017 all states got the reformationstag for that year
if year == 2017:
continue
self.assertIn(date(year, 10, 31), self.prov_hols[province])
# additional provinces got this holiday 2018
for province, year in product(prov_yes_since_2018, range(2018, 2050)):
self.assertIn(date(year, 10, 31), self.prov_hols[province])
for province, year in product(prov_that_dont, range(1991, 2017)):
self.assertNotIn(date(year, 10, 31), self.prov_hols[province])
for province, year in product(prov_not_since_2018, range(2018, 2050)):
self.assertNotIn(date(year, 10, 31), self.prov_hols[province])
# check the 2017 case where all states have the reformationstag
for province in holidays.DE.PROVINCES:
self.assertIn(date(2017, 10, 31), self.prov_hols[province])
def test_allerheiligen(self):
provinces_that_have = {'BW', 'BY', 'NW', 'RP', 'SL'}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1991, 2050)):
self.assertIn(date(year, 11, 1), self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1991, 2050)):
self.assertNotIn(date(year, 11, 1), self.prov_hols[province])
def test_buss_und_bettag(self):
known_good = [(2014, 11, 19), (2015, 11, 18), (2016, 11, 16),
(2017, 11, 22), (2018, 11, 21), (2019, 11, 20),
(2020, 11, 18), (2021, 11, 17), (2022, 11, 16),
(2023, 11, 22), (2024, 11, 20)]
provinces_that_have = {'SN'}
provinces_that_dont = set(holidays.DE.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertIn(date(y, m, d), self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertNotIn(date(y, m, d), self.prov_hols[province])
def test_internationaler_frauentag(self):
prov_that_have = {'BE'}
prov_that_dont = set(holidays.DE.PROVINCES) - prov_that_have
for province, year in product(
holidays.DE.PROVINCES, range(1991, 2018)):
self.assertNotIn(date(year, 3, 8), self.prov_hols[province])
for province, year in product(prov_that_have, range(2019, 2050)):
self.assertIn(date(year, 3, 8), self.prov_hols[province])
for province, year in product(prov_that_dont, range(2019, 2050)):
self.assertNotIn(date(year, 3, 8), self.prov_hols[province])
class TestAT(unittest.TestCase):
def setUp(self):
self.holidays = holidays.AT()
def test_new_years(self):
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_christmas(self):
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+2), self.holidays)
def test_easter_monday(self):
for dt in [date(1900, 4, 16), date(1901, 4, 8), date(1902, 3, 31),
date(1999, 4, 5), date(2000, 4, 24), date(2010, 4, 5),
date(2018, 4, 2), date(2019, 4, 22), date(2020, 4, 13)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_national_day(self):
for year in range(1919, 1934):
dt = date(year, 11, 12)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
for year in range(1967, 2100):
dt = date(year, 10, 26)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_all_holidays_present(self):
at_2015 = holidays.AT(years=[2015])
all_holidays = ["Neujahr",
"Heilige Drei Könige",
"Ostermontag",
"Staatsfeiertag",
"Christi Himmelfahrt",
"Pfingstmontag",
"Fronleichnam",
"Mariä Himmelfahrt",
"Nationalfeiertag",
"Allerheiligen",
"Mariä Empfängnis",
"Christtag",
"Stefanitag"]
for holiday in all_holidays:
self.assertIn(holiday, at_2015.values())
class TestDK(unittest.TestCase):
def setUp(self):
self.holidays = holidays.DK()
def test_2016(self):
# http://www.officeholidays.com/countries/denmark/2016.php
self.assertIn(date(2016, 1, 1), self.holidays)
self.assertIn(date(2016, 3, 24), self.holidays)
self.assertIn(date(2016, 3, 25), self.holidays)
self.assertIn(date(2016, 3, 28), self.holidays)
self.assertIn(date(2016, 4, 22), self.holidays)
self.assertIn(date(2016, 5, 5), self.holidays)
self.assertIn(date(2016, 5, 16), self.holidays)
self.assertIn(date(2016, 12, 25), self.holidays)
class TestUK(unittest.TestCase):
def setUp(self):
self.holidays = holidays.England()
self.holidays = holidays.Wales()
self.holidays = holidays.Scotland()
self.holidays = holidays.IsleOfMan()
self.holidays = holidays.NorthernIreland()
self.holidays = holidays.UK()
def test_new_years(self):
for year in range(1974, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
if year == 2000:
self.assertIn(dt + relativedelta(days=-1), self.holidays)
else:
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
def test_good_friday(self):
for dt in [date(1900, 4, 13), date(1901, 4, 5), date(1902, 3, 28),
date(1999, 4, 2), date(2000, 4, 21), date(2010, 4, 2),
date(2018, 3, 30), date(2019, 4, 19), date(2020, 4, 10)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_easter_monday(self):
for dt in [date(1900, 4, 16), date(1901, 4, 8), date(1902, 3, 31),
date(1999, 4, 5), date(2000, 4, 24), date(2010, 4, 5),
date(2018, 4, 2), date(2019, 4, 22), date(2020, 4, 13)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_royal_wedding(self):
self.assertIn('2011-04-29', self.holidays)
self.assertNotIn('2010-04-29', self.holidays)
self.assertNotIn('2012-04-29', self.holidays)
def test_may_day(self):
for dt in [date(1978, 5, 1), date(1979, 5, 7), date(1980, 5, 5),
date(1999, 5, 3), date(2000, 5, 1), date(2010, 5, 3),
date(2018, 5, 7), date(2019, 5, 6), date(2020, 5, 8)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2020, 5, 4), self.holidays)
def test_spring_bank_holiday(self):
for dt in [date(1978, 5, 29), date(1979, 5, 28), date(1980, 5, 26),
date(1999, 5, 31), date(2000, 5, 29), date(2010, 5, 31),
date(2018, 5, 28), date(2019, 5, 27), date(2020, 5, 25)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_christmas_day(self):
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(date(2010, 12, 24), self.holidays)
self.assertNotEqual(self.holidays[date(2011, 12, 26)],
"Christmas Day (Observed)")
self.holidays.observed = True
self.assertEqual(self.holidays[date(2011, 12, 27)],
"Christmas Day (Observed)")
for year, day in enumerate([25, 25, 25, 27, 27, # 2001-05
25, 25, 25, 25, 27, # 2006-10
27, 25, 25, 25, 25, # 2011-15
27, 25, 25, 25, 25, 25], # 2016-21
2001):
dt = date(year, 12, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:9], "Christmas")
def test_boxing_day(self):
self.holidays.observed = False
for year in range(1900, 2100):
dt = date(year, 12, 26)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
self.assertNotIn(date(2009, 12, 28), self.holidays)
self.assertNotIn(date(2010, 12, 27), self.holidays)
self.holidays.observed = True
self.assertIn(date(2004, 12, 28), self.holidays)
self.assertIn(date(2010, 12, 28), self.holidays)
for year, day in enumerate([26, 26, 26, 28, 26,
26, 26, 26, 28, 28,
26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 28],
2001):
dt = date(year, 12, day)
self.assertIn(dt, self.holidays, dt)
self.assertEqual(self.holidays[dt][:6], "Boxing")
def test_all_holidays_present(self):
uk_2015 = holidays.UK(years=[2015])
all_holidays = ["New Year's Day",
"Good Friday",
"Easter Monday [England, Wales, Northern Ireland]",
"May Day",
"Spring Bank Holiday",
"Christmas Day",
"Boxing Day"]
for holiday in all_holidays:
self.assertIn(holiday, uk_2015.values())
class TestScotland(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Scotland()
def test_2017(self):
self.assertIn('2017-01-01', self.holidays)
self.assertIn('2017-01-02', self.holidays)
self.assertIn('2017-01-03', self.holidays)
self.assertIn('2017-04-14', self.holidays)
self.assertIn('2017-05-01', self.holidays)
self.assertIn('2017-05-29', self.holidays)
self.assertIn('2017-08-07', self.holidays)
self.assertIn('2017-11-30', self.holidays)
self.assertIn('2017-12-25', self.holidays)
self.assertIn('2017-12-26', self.holidays)
class TestIsleOfMan(unittest.TestCase):
def setUp(self):
self.holidays = holidays.IsleOfMan()
def test_2018(self):
self.assertIn('2018-06-01', self.holidays)
self.assertIn('2018-07-05', self.holidays)
class TestIreland(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Ireland()
def test_2020(self):
self.assertIn('2020-01-01', self.holidays) # New Year's Day
self.assertIn('2020-03-17', self.holidays) # St. Patrick's Day
self.assertIn('2020-04-13', self.holidays) # Easter Monday
self.assertIn('2020-05-04', self.holidays) # May Day in IE
self.assertNotIn('2020-05-08', self.holidays) # May Day in UK not IE
self.assertIn('2020-06-01', self.holidays) # June Bank Holiday
self.assertIn('2020-08-03', self.holidays) # Summer Bank Holiday
self.assertIn('2020-10-26', self.holidays) # October Bank Holiday
self.assertIn('2020-12-25', self.holidays) # Christmas Day
self.assertIn('2020-12-26', self.holidays) # Boxing Day
self.assertIn('2020-12-28', self.holidays) # Boxing Day (Observed)
class TestES(unittest.TestCase):
def setUp(self):
self.holidays = holidays.ES()
self.prov_holidays = {prov: holidays.ES(prov=prov)
for prov in holidays.ES.PROVINCES}
def test_fixed_holidays(self):
fixed_days_whole_country = (
(1, 1),
(1, 6),
(5, 1),
(8, 15),
(10, 12),
(11, 1),
(12, 6),
(12, 8),
(12, 25),
)
for y, (m, d) in product(range(1950, 2050), fixed_days_whole_country):
self.assertIn(date(y, m, d), self.holidays)
def test_variable_days_in_2016(self):
self.assertIn(date(2016, 3, 25), self.holidays)
for prov, prov_holidays in self.prov_holidays.items():
self.assertEqual(
date(2016, 3, 24) in prov_holidays, prov != 'CAT')
self.assertEqual(
date(2016, 3, 28) in prov_holidays,
prov in ['CAT', 'PVA', 'NAV', 'CVA', 'IBA'])
def test_province_specific_days(self):
province_days = {
(2, 28): ['AND', 'CAN', 'CAM'],
(3, 1): ['IBA'],
(4, 23): ['ARG', 'CAL'],
(5, 30): ['ICA'],
(5, 2): ['MAD'],
(6, 9): ['MUR', 'RIO'],
(7, 25): ['GAL'],
(9, 8): ['AST', 'EXT'],
(9, 11): ['CAT'],
(9, 27): ['NAV'],
(10, 9): ['CVA'],
(10, 25): ['PVA'],
}
for prov, prov_holidays in self.prov_holidays.items():
for year in range(2010, 2020):
self.assertEqual(
date(year, 12, 26) in prov_holidays,
prov in ['CAT', 'IBA'])
self.assertEqual(
date(year, 3, 19) in prov_holidays,
prov in ['CVA', 'MUR', 'MAD', 'NAV', 'PVA'])
self.assertEqual(
date(year, 6, 24) in prov_holidays,
prov in ['CAT', 'GAL'])
for fest_day, fest_prov in province_days.items():
self.assertEqual(
date(year, *fest_day) in prov_holidays,
prov in fest_prov)
class TestTAR(unittest.TestCase):
def setUp(self):
self.holidays = holidays.TAR()
def test_new_years(self):
for year in range(1974, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
def test_good_friday(self):
for dt in [date(1900, 4, 13), date(1901, 4, 5), date(1902, 3, 28),
date(1999, 4, 2), date(2000, 4, 21), date(2010, 4, 2),
date(2018, 3, 30), date(2019, 4, 19), date(2020, 4, 10)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_easter_monday(self):
for dt in [date(1900, 4, 16), date(1901, 4, 8), date(1902, 3, 31),
date(1999, 4, 5), date(2000, 4, 24), date(2010, 4, 5),
date(2018, 4, 2), date(2019, 4, 22), date(2020, 4, 13)]:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_labour_day(self):
for year in range(1900, 2100):
dt = date(year, 5, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_christmas_day(self):
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
def test_26_december_day(self):
for year in range(1900, 2100):
dt = date(year, 12, 26)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_all_holidays_present(self):
tar_2015 = holidays.TAR(years=[2015])
all_holidays = ["New Year's Day",
"Good Friday",
"Easter Monday",
"1 May (Labour Day)",
"Christmas Day",
"26 December"]
for holiday in all_holidays:
self.assertIn(holiday, tar_2015.values())
class TestECB(unittest.TestCase):
def setUp(self):
self.holidays_ecb = holidays.ECB()
self.holidays_tar = holidays.TAR()
def test_new_years(self):
for year in range(1974, 2100):
self.holidays_ecb._populate(year)
self.holidays_tar._populate(year)
for holiday in self.holidays_tar:
self.assertIn(holiday, self.holidays_ecb)
class TestCZ(unittest.TestCase):
def setUp(self):
self.holidays = holidays.CZ()
def test_2017(self):
# http://www.officeholidays.com/countries/czech_republic/2017.php
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 4, 14), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(2017, 5, 1), self.holidays)
self.assertIn(date(2017, 5, 8), self.holidays)
self.assertIn(date(2017, 7, 5), self.holidays)
self.assertIn(date(2017, 7, 6), self.holidays)
self.assertIn(date(2017, 9, 28), self.holidays)
self.assertIn(date(2017, 10, 28), self.holidays)
self.assertIn(date(2017, 11, 17), self.holidays)
self.assertIn(date(2017, 12, 24), self.holidays)
self.assertIn(date(2017, 12, 25), self.holidays)
self.assertIn(date(2017, 12, 26), self.holidays)
def test_others(self):
self.assertIn(date(1991, 5, 9), self.holidays)
def test_czech_deprecated(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
czech = holidays.Czech()
self.assertIsInstance(czech, holidays.Czechia)
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[-1].category, DeprecationWarning))
class TestSK(unittest.TestCase):
def setUp(self):
self.holidays = holidays.SK()
def test_2018(self):
# https://www.officeholidays.com/countries/slovakia/2018.php
self.assertIn(date(2018, 1, 1), self.holidays)
self.assertIn(date(2018, 1, 6), self.holidays)
self.assertIn(date(2018, 3, 30), self.holidays)
self.assertIn(date(2018, 5, 1), self.holidays)
self.assertIn(date(2018, 5, 8), self.holidays)
self.assertIn(date(2018, 4, 2), self.holidays)
self.assertIn(date(2018, 7, 5), self.holidays)
self.assertIn(date(2018, 8, 29), self.holidays)
self.assertIn(date(2018, 9, 1), self.holidays)
self.assertIn(date(2018, 9, 15), self.holidays)
self.assertIn(date(2018, 10, 30), self.holidays)
self.assertIn(date(2018, 11, 1), self.holidays)
self.assertIn(date(2018, 11, 17), self.holidays)
self.assertIn(date(2018, 12, 24), self.holidays)
self.assertIn(date(2018, 12, 25), self.holidays)
self.assertIn(date(2018, 12, 26), self.holidays)
def test_slovak_deprecated(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
slovakia = holidays.Slovak()
self.assertIsInstance(slovakia, holidays.Slovakia)
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[-1].category, DeprecationWarning))
class TestPL(unittest.TestCase):
def setUp(self):
self.holidays = holidays.PL()
def test_2017(self):
# http://www.officeholidays.com/countries/poland/2017.php
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 1, 6), self.holidays)
self.assertIn(date(2017, 4, 16), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(2017, 5, 1), self.holidays)
self.assertIn(date(2017, 5, 3), self.holidays)
self.assertIn(date(2017, 6, 4), self.holidays)
self.assertIn(date(2017, 6, 15), self.holidays)
self.assertIn(date(2017, 8, 15), self.holidays)
self.assertIn(date(2017, 11, 1), self.holidays)
self.assertIn(date(2017, 11, 11), self.holidays)
self.assertIn(date(2017, 12, 25), self.holidays)
self.assertIn(date(2017, 12, 26), self.holidays)
def test_polish_deprecated(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
poland = holidays.Polish()
self.assertIsInstance(poland, holidays.Poland)
self.assertEqual(1, len(w))
self.assertTrue(issubclass(w[-1].category, DeprecationWarning))
class TestPT(unittest.TestCase):
def setUp(self):
self.holidays = holidays.PT()
def test_2017(self):
# http://www.officeholidays.com/countries/portugal/2017.php
self.assertIn(date(2017, 1, 1), self.holidays) # New Year
self.assertIn(date(2017, 4, 14), self.holidays) # Good Friday
self.assertIn(date(2017, 4, 16), self.holidays) # Easter
self.assertIn(date(2017, 4, 25), self.holidays) # Liberation Day
self.assertIn(date(2017, 5, 1), self.holidays) # Labour Day
self.assertIn(date(2017, 6, 10), self.holidays) # Portugal Day
self.assertIn(date(2017, 6, 15), self.holidays) # Corpus Christi
self.assertIn(date(2017, 8, 15), self.holidays) # Assumption Day
self.assertIn(date(2017, 10, 5), self.holidays) # Republic Day
self.assertIn(date(2017, 11, 1), self.holidays) # All Saints Day
self.assertIn(date(2017, 12, 1), self.holidays) # Independence
self.assertIn(date(2017, 12, 8), self.holidays) # Immaculate
self.assertIn(date(2017, 12, 25), self.holidays) # Christmas
class TestPortugalExt(unittest.TestCase):
def setUp(self):
self.holidays = holidays.PortugalExt()
def test_2017(self):
self.assertIn(date(2017, 12, 24), self.holidays) # Christmas' Eve
self.assertIn(date(2017, 12, 26), self.holidays) # S.Stephan
self.assertIn(date(2017, 12, 26), self.holidays) # New Year's Eve
class TestNorway(unittest.TestCase):
def setUp(self):
self.holidays_without_sundays = holidays.Norway(include_sundays=False)
self.holidays_with_sundays = holidays.Norway()
def test_new_years(self):
self.assertIn('1900-01-01', self.holidays_without_sundays)
self.assertIn('2017-01-01', self.holidays_without_sundays)
self.assertIn('2999-01-01', self.holidays_without_sundays)
def test_easter(self):
self.assertIn('2000-04-20', self.holidays_without_sundays)
self.assertIn('2000-04-21', self.holidays_without_sundays)
self.assertIn('2000-04-23', self.holidays_without_sundays)
self.assertIn('2000-04-24', self.holidays_without_sundays)
self.assertIn('2010-04-01', self.holidays_without_sundays)
self.assertIn('2010-04-02', self.holidays_without_sundays)
self.assertIn('2010-04-04', self.holidays_without_sundays)
self.assertIn('2010-04-05', self.holidays_without_sundays)
self.assertIn('2021-04-01', self.holidays_without_sundays)
self.assertIn('2021-04-02', self.holidays_without_sundays)
self.assertIn('2021-04-04', self.holidays_without_sundays)
self.assertIn('2021-04-05', self.holidays_without_sundays)
self.assertIn('2024-03-28', self.holidays_without_sundays)
self.assertIn('2024-03-29', self.holidays_without_sundays)
self.assertIn('2024-03-31', self.holidays_without_sundays)
self.assertIn('2024-04-01', self.holidays_without_sundays)
def test_workers_day(self):
self.assertNotIn('1900-05-01', self.holidays_without_sundays)
self.assertNotIn('1946-05-01', self.holidays_without_sundays)
self.assertIn('1947-05-01', self.holidays_without_sundays)
self.assertIn('2017-05-01', self.holidays_without_sundays)
self.assertIn('2999-05-01', self.holidays_without_sundays)
def test_constitution_day(self):
self.assertNotIn('1900-05-17', self.holidays_without_sundays)
self.assertNotIn('1946-05-17', self.holidays_without_sundays)
self.assertIn('1947-05-17', self.holidays_without_sundays)
self.assertIn('2017-05-17', self.holidays_without_sundays)
self.assertIn('2999-05-17', self.holidays_without_sundays)
def test_pentecost(self):
self.assertIn('2000-06-11', self.holidays_without_sundays)
self.assertIn('2000-06-12', self.holidays_without_sundays)
self.assertIn('2010-05-23', self.holidays_without_sundays)
self.assertIn('2010-05-24', self.holidays_without_sundays)
self.assertIn('2021-05-23', self.holidays_without_sundays)
self.assertIn('2021-05-24', self.holidays_without_sundays)
self.assertIn('2024-05-19', self.holidays_without_sundays)
self.assertIn('2024-05-20', self.holidays_without_sundays)
def test_christmas(self):
self.assertIn('1901-12-25', self.holidays_without_sundays)
self.assertIn('1901-12-26', self.holidays_without_sundays)
self.assertIn('2016-12-25', self.holidays_without_sundays)
self.assertIn('2016-12-26', self.holidays_without_sundays)
self.assertIn('2500-12-25', self.holidays_without_sundays)
self.assertIn('2500-12-26', self.holidays_without_sundays)
def test_sundays(self):
"""
Sundays are considered holidays in Norway
:return:
"""
self.assertIn('1989-12-31', self.holidays_with_sundays)
self.assertIn('2017-02-05', self.holidays_with_sundays)
self.assertIn('2017-02-12', self.holidays_with_sundays)
self.assertIn('2032-02-29', self.holidays_with_sundays)
def test_not_holiday(self):
"""
Note: Sundays in Norway are considered holidays,
so make sure none of these are actually Sundays
TODO: Should add more dates that are often confused for being a holiday
:return:
"""
self.assertNotIn('2017-02-06', self.holidays_without_sundays)
self.assertNotIn('2017-02-07', self.holidays_without_sundays)
self.assertNotIn('2017-02-08', self.holidays_without_sundays)
self.assertNotIn('2017-02-09', self.holidays_without_sundays)
self.assertNotIn('2017-02-10', self.holidays_without_sundays)
self.assertNotIn('2001-12-24', self.holidays_without_sundays)
self.assertNotIn('2001-05-16', self.holidays_without_sundays)
self.assertNotIn('2001-05-18', self.holidays_without_sundays)
self.assertNotIn('1999-12-31', self.holidays_without_sundays)
self.assertNotIn('2016-12-31', self.holidays_without_sundays)
self.assertNotIn('2016-12-27', self.holidays_without_sundays)
self.assertNotIn('2016-12-28', self.holidays_without_sundays)
self.assertNotIn('2017-02-06', self.holidays_with_sundays)
self.assertNotIn('2017-02-07', self.holidays_with_sundays)
self.assertNotIn('2017-02-08', self.holidays_with_sundays)
self.assertNotIn('2017-02-09', self.holidays_with_sundays)
self.assertNotIn('2017-02-10', self.holidays_with_sundays)
self.assertNotIn('2001-12-24', self.holidays_with_sundays)
self.assertNotIn('2001-05-16', self.holidays_with_sundays)
self.assertNotIn('2001-05-18', self.holidays_with_sundays)
self.assertNotIn('1999-12-31', self.holidays_with_sundays)
self.assertNotIn('2016-12-31', self.holidays_with_sundays)
self.assertNotIn('2016-12-27', self.holidays_with_sundays)
self.assertNotIn('2016-12-28', self.holidays_with_sundays)
class TestItaly(unittest.TestCase):
def setUp(self):
self.holidays = holidays.IT()
def test_2017(self):
# https://www.giorni-festivi.it/
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 1, 6), self.holidays)
self.assertIn(date(2017, 4, 16), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(2017, 4, 25), self.holidays)
self.assertIn(date(2017, 5, 1), self.holidays)
self.assertIn(date(2017, 6, 2), self.holidays)
self.assertIn(date(2017, 8, 15), self.holidays)
self.assertIn(date(2017, 11, 1), self.holidays)
self.assertIn(date(2017, 12, 8), self.holidays)
self.assertIn(date(2017, 12, 25), self.holidays)
self.assertIn(date(2017, 12, 26), self.holidays)
def test_new_years(self):
for year in range(1974, 2100):
self.assertIn(date(year, 1, 1), self.holidays)
def test_easter(self):
self.assertIn(date(2017, 4, 16), self.holidays)
def test_easter_monday(self):
self.assertIn(date(2017, 4, 17), self.holidays)
def test_republic_day_before_1948(self):
self.holidays = holidays.IT(years=[1947])
self.assertNotIn(date(1947, 6, 2), self.holidays)
def test_republic_day_after_1948(self):
self.holidays = holidays.IT(years=[1948])
self.assertIn(date(1948, 6, 2), self.holidays)
def test_liberation_day_before_1946(self):
self.holidays = holidays.IT(years=1945)
self.assertNotIn(date(1945, 4, 25), self.holidays)
def test_liberation_day_after_1946(self):
self.holidays = holidays.IT(years=1946)
self.assertIn(date(1946, 4, 25), self.holidays)
def test_christmas(self):
self.holidays = holidays.IT(years=2017)
self.assertIn(date(2017, 12, 25), self.holidays)
def test_saint_stephan(self):
self.holidays = holidays.IT(years=2017)
self.assertIn(date(2017, 12, 26), self.holidays)
def test_province_specific_days(self):
prov_an = (holidays.IT(prov='AN', years=[2017]))
prov_ao = (holidays.IT(prov='AO', years=[2017]))
prov_ba = (holidays.IT(prov='BA', years=[2017]))
prov_bl = (holidays.IT(prov='BL', years=[2017]))
prov_bo = (holidays.IT(prov='BO', years=[2017]))
prov_bz = (holidays.IT(prov='BZ', years=[2017]))
prov_bs = (holidays.IT(prov='BS', years=[2017]))
prov_cb = (holidays.IT(prov='CB', years=[2017]))
prov_ch = (holidays.IT(prov='CH', years=[2017]))
prov_cs = (holidays.IT(prov='CS', years=[2017]))
prov_ct = (holidays.IT(prov='CT', years=[2017]))
prov_en = (holidays.IT(prov='EN', years=[2017]))
prov_fc = (holidays.IT(prov='FC', years=[2017]))
prov_fe = (holidays.IT(prov='FE', years=[2017]))
prov_fi = (holidays.IT(prov='FI', years=[2017]))
prov_fr = (holidays.IT(prov='FR', years=[2017]))
prov_ge = (holidays.IT(prov='GE', years=[2017]))
prov_go = (holidays.IT(prov='GO', years=[2017]))
prov_is = (holidays.IT(prov='IS', years=[2017]))
prov_kr = (holidays.IT(prov='KR', years=[2017]))
prov_lt = (holidays.IT(prov='LT', years=[2017]))
prov_mb = (holidays.IT(prov='MB', years=[2017]))
prov_me = (holidays.IT(prov='ME', years=[2017]))
prov_mi = (holidays.IT(prov='MI', years=[2017]))
prov_mn = (holidays.IT(prov='MN', years=[2017]))
prov_mo = (holidays.IT(prov='MO', years=[2017]))
prov_ms = (holidays.IT(prov='MS', years=[2017]))
prov_na = (holidays.IT(prov='NA', years=[2017]))
prov_pa = (holidays.IT(prov='PA', years=[2017]))
prov_pc = (holidays.IT(prov='PC', years=[2017]))
prov_pd = (holidays.IT(prov='PD', years=[2017]))
prov_pg = (holidays.IT(prov='PG', years=[2017]))
prov_pr = (holidays.IT(prov='PR', years=[2017]))
prov_rm = (holidays.IT(prov='RM', years=[2017]))
prov_sp = (holidays.IT(prov='SP', years=[2017]))
prov_to = (holidays.IT(prov='TO', years=[2017]))
prov_ts = (holidays.IT(prov='TS', years=[2017]))
prov_vi = (holidays.IT(prov='VI', years=[2017]))
self.assertIn("2017-05-04", prov_an)
self.assertIn("2017-09-07", prov_ao)
self.assertIn("2017-12-06", prov_ba)
self.assertIn("2017-11-11", prov_bl)
self.assertIn("2017-10-04", prov_bo)
self.assertIn("2017-08-15", prov_bz)
self.assertIn("2017-02-15", prov_bs)
self.assertIn("2017-04-23", prov_cb)
self.assertIn("2017-05-11", prov_ch)
self.assertIn("2017-02-12", prov_cs)
self.assertIn("2017-02-05", prov_ct)
self.assertIn("2017-07-02", prov_en)
self.assertIn("2017-06-24", prov_fc)
self.assertIn("2017-02-04", prov_fc)
self.assertIn("2017-04-23", prov_fe)
self.assertIn("2017-06-24", prov_fi)
self.assertIn("2017-06-20", prov_fr)
self.assertIn("2017-06-24", prov_ge)
self.assertIn("2017-03-16", prov_go)
self.assertIn("2017-05-19", prov_is)
self.assertIn("2017-03-19", prov_sp)
self.assertIn("2017-10-09", prov_kr)
self.assertIn("2017-04-25", prov_lt)
self.assertIn("2017-06-24", prov_mb)
self.assertIn("2017-06-03", prov_me)
self.assertIn("2017-12-07", prov_mi)
self.assertIn("2017-03-18", prov_mn)
self.assertIn("2017-01-31", prov_mo)
self.assertIn("2017-10-04", prov_ms)
self.assertIn("2017-09-19", prov_na)
self.assertIn("2017-07-15", prov_pa)
self.assertIn("2017-07-04", prov_pc)
self.assertIn("2017-06-13", prov_pd)
self.assertIn("2017-01-29", prov_pg)
self.assertIn("2017-01-13", prov_pr)
self.assertIn("2017-06-29", prov_rm)
self.assertIn("2017-06-24", prov_to)
self.assertIn("2017-11-03", prov_ts)
self.assertIn("2017-04-25", prov_vi)
class TestSweden(unittest.TestCase):
def setUp(self):
self.holidays_without_sundays = holidays.Sweden(include_sundays=False)
self.holidays_with_sundays = holidays.Sweden()
def test_new_years(self):
self.assertIn('1900-01-01', self.holidays_without_sundays)
self.assertIn('2017-01-01', self.holidays_without_sundays)
self.assertIn('2999-01-01', self.holidays_without_sundays)
def test_easter(self):
self.assertNotIn('2000-04-20', self.holidays_without_sundays)
self.assertIn('2000-04-21', self.holidays_without_sundays)
self.assertIn('2000-04-23', self.holidays_without_sundays)
self.assertIn('2000-04-24', self.holidays_without_sundays)
self.assertNotIn('2010-04-01', self.holidays_without_sundays)
self.assertIn('2010-04-02', self.holidays_without_sundays)
self.assertIn('2010-04-04', self.holidays_without_sundays)
self.assertIn('2010-04-05', self.holidays_without_sundays)
self.assertNotIn('2021-04-01', self.holidays_without_sundays)
self.assertIn('2021-04-02', self.holidays_without_sundays)
self.assertIn('2021-04-04', self.holidays_without_sundays)
self.assertIn('2021-04-05', self.holidays_without_sundays)
self.assertNotIn('2024-03-28', self.holidays_without_sundays)
self.assertIn('2024-03-29', self.holidays_without_sundays)
self.assertIn('2024-03-31', self.holidays_without_sundays)
self.assertIn('2024-04-01', self.holidays_without_sundays)
def test_workers_day(self):
self.assertNotIn('1800-05-01', self.holidays_without_sundays)
self.assertNotIn('1879-05-01', self.holidays_without_sundays)
self.assertIn('1939-05-01', self.holidays_without_sundays)
self.assertIn('2017-05-01', self.holidays_without_sundays)
self.assertIn('2999-05-01', self.holidays_without_sundays)
def test_constitution_day(self):
self.assertNotIn('1900-06-06', self.holidays_without_sundays)
self.assertNotIn('2004-06-06', self.holidays_without_sundays)
self.assertIn('2005-06-06', self.holidays_without_sundays)
self.assertIn('2017-06-06', self.holidays_without_sundays)
self.assertIn('2999-06-06', self.holidays_without_sundays)
def test_pentecost(self):
self.assertIn('2000-06-11', self.holidays_without_sundays)
self.assertIn('2000-06-12', self.holidays_without_sundays)
self.assertIn('2010-05-23', self.holidays_without_sundays)
self.assertNotIn('2010-05-24', self.holidays_without_sundays)
self.assertIn('2021-05-23', self.holidays_without_sundays)
self.assertNotIn('2021-05-24', self.holidays_without_sundays)
self.assertIn('2003-06-09', self.holidays_without_sundays)
self.assertIn('2024-05-19', self.holidays_without_sundays)
self.assertNotIn('2024-05-20', self.holidays_without_sundays)
def test_christmas(self):
self.assertIn('1901-12-25', self.holidays_without_sundays)
self.assertIn('1901-12-26', self.holidays_without_sundays)
self.assertIn('2016-12-25', self.holidays_without_sundays)
self.assertIn('2016-12-26', self.holidays_without_sundays)
self.assertIn('2500-12-25', self.holidays_without_sundays)
self.assertIn('2500-12-26', self.holidays_without_sundays)
def test_sundays(self):
"""
Sundays are considered holidays in Sweden
:return:
"""
self.assertIn('1989-12-31', self.holidays_with_sundays)
self.assertIn('2017-02-05', self.holidays_with_sundays)
self.assertIn('2017-02-12', self.holidays_with_sundays)
self.assertIn('2032-02-29', self.holidays_with_sundays)
def test_not_holiday(self):
"""
Note: Sundays in Sweden are considered holidays,
so make sure none of these are actually Sundays
:return:
"""
self.assertNotIn('2017-02-06', self.holidays_without_sundays)
self.assertNotIn('2017-02-07', self.holidays_without_sundays)
self.assertNotIn('2017-02-08', self.holidays_without_sundays)
self.assertNotIn('2017-02-09', self.holidays_without_sundays)
self.assertNotIn('2017-02-10', self.holidays_without_sundays)
self.assertNotIn('2016-12-27', self.holidays_without_sundays)
self.assertNotIn('2016-12-28', self.holidays_without_sundays)
self.assertNotIn('2017-02-06', self.holidays_with_sundays)
self.assertNotIn('2017-02-07', self.holidays_with_sundays)
self.assertNotIn('2017-02-08', self.holidays_with_sundays)
self.assertNotIn('2017-02-09', self.holidays_with_sundays)
self.assertNotIn('2017-02-10', self.holidays_with_sundays)
self.assertNotIn('2016-12-27', self.holidays_with_sundays)
self.assertNotIn('2016-12-28', self.holidays_with_sundays)
class TestJapan(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Japan(observed=False)
def test_new_years_day(self):
self.assertIn(date(1949, 1, 1), self.holidays)
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2050, 1, 1), self.holidays)
def test_coming_of_age(self):
self.assertIn(date(1999, 1, 15), self.holidays)
self.assertIn(date(2000, 1, 10), self.holidays)
self.assertIn(date(2017, 1, 9), self.holidays)
self.assertIn(date(2030, 1, 14), self.holidays)
self.assertIn(date(2050, 1, 10), self.holidays)
self.assertNotIn(date(2000, 1, 15), self.holidays)
self.assertNotIn(date(2017, 1, 15), self.holidays)
self.assertNotIn(date(2030, 1, 15), self.holidays)
def test_foundation_day(self):
self.assertIn(date(1949, 2, 11), self.holidays)
self.assertIn(date(2017, 2, 11), self.holidays)
self.assertIn(date(2050, 2, 11), self.holidays)
def test_vernal_equinox_day(self):
self.assertIn(date(1956, 3, 21), self.holidays)
self.assertIn(date(1960, 3, 20), self.holidays)
self.assertIn(date(1970, 3, 21), self.holidays)
self.assertIn(date(1980, 3, 20), self.holidays)
self.assertIn(date(1990, 3, 21), self.holidays)
self.assertIn(date(2000, 3, 20), self.holidays)
self.assertIn(date(2010, 3, 21), self.holidays)
self.assertIn(date(2017, 3, 20), self.holidays)
self.assertIn(date(2020, 3, 20), self.holidays)
self.assertIn(date(2030, 3, 20), self.holidays)
self.assertIn(date(2040, 3, 20), self.holidays)
self.assertIn(date(2092, 3, 19), self.holidays)
def test_showa_day(self):
self.assertIn(date(1950, 4, 29), self.holidays)
self.assertIn(date(1990, 4, 29), self.holidays)
self.assertIn(date(2010, 4, 29), self.holidays)
def test_constitution_memorial_day(self):
self.assertIn(date(1950, 5, 3), self.holidays)
self.assertIn(date(2000, 5, 3), self.holidays)
self.assertIn(date(2050, 5, 3), self.holidays)
def test_greenery_day(self):
self.assertNotIn(date(1950, 5, 4), self.holidays)
self.assertIn(date(2007, 5, 4), self.holidays)
self.assertIn(date(2050, 5, 4), self.holidays)
def test_childrens_day(self):
self.assertIn(date(1950, 5, 5), self.holidays)
self.assertIn(date(2000, 5, 5), self.holidays)
self.assertIn(date(2050, 5, 5), self.holidays)
def test_marine_day(self):
self.assertNotIn(date(1950, 7, 20), self.holidays)
self.assertIn(date(2000, 7, 20), self.holidays)
self.assertIn(date(2003, 7, 21), self.holidays)
self.assertIn(date(2017, 7, 17), self.holidays)
self.assertIn(date(2020, 7, 23), self.holidays)
self.assertIn(date(2050, 7, 18), self.holidays)
def test_mountain_day(self):
self.assertNotIn(date(1950, 8, 11), self.holidays)
self.assertNotIn(date(2015, 8, 11), self.holidays)
self.assertIn(date(2016, 8, 11), self.holidays)
self.assertIn(date(2017, 8, 11), self.holidays)
self.assertIn(date(2020, 8, 10), self.holidays)
self.assertIn(date(2050, 8, 11), self.holidays)
def test_respect_for_the_aged_day(self):
self.assertNotIn(date(1965, 9, 15), self.holidays)
self.assertIn(date(1966, 9, 15), self.holidays)
self.assertIn(date(2002, 9, 15), self.holidays)
self.assertIn(date(2003, 9, 15), self.holidays)
self.assertNotIn(date(2004, 9, 15), self.holidays)
self.assertIn(date(2004, 9, 20), self.holidays)
self.assertIn(date(2017, 9, 18), self.holidays)
self.assertIn(date(2050, 9, 19), self.holidays)
def test_autumnal_equinox_day(self):
self.assertIn(date(2000, 9, 23), self.holidays)
self.assertIn(date(2010, 9, 23), self.holidays)
self.assertIn(date(2017, 9, 23), self.holidays)
self.assertIn(date(2020, 9, 22), self.holidays)
self.assertIn(date(2030, 9, 23), self.holidays)
self.assertIn(date(1979, 9, 24), self.holidays)
self.assertIn(date(2032, 9, 21), self.holidays)
def test_health_and_sports_day(self):
self.assertNotIn(date(1965, 10, 10), self.holidays)
self.assertIn(date(1966, 10, 10), self.holidays)
self.assertIn(date(1999, 10, 10), self.holidays)
self.assertNotIn(date(2000, 10, 10), self.holidays)
self.assertIn(date(2000, 10, 9), self.holidays)
self.assertIn(date(2017, 10, 9), self.holidays)
self.assertIn(date(2020, 7, 24), self.holidays)
self.assertIn(date(2050, 10, 10), self.holidays)
def test_culture_day(self):
self.assertIn(date(1950, 11, 3), self.holidays)
self.assertIn(date(2000, 11, 3), self.holidays)
self.assertIn(date(2050, 11, 3), self.holidays)
def test_labour_thanks_giving_day(self):
self.assertIn(date(1950, 11, 23), self.holidays)
self.assertIn(date(2000, 11, 23), self.holidays)
self.assertIn(date(2050, 11, 23), self.holidays)
def test_emperors_birthday(self):
self.assertIn(date(1989, 12, 23), self.holidays)
self.assertIn(date(2017, 12, 23), self.holidays)
self.assertNotIn(date(2019, 12, 23), self.holidays)
self.assertIn(date(2020, 2, 23), self.holidays)
def test_reiwa_emperor_holidays(self):
self.assertIn(date(2019, 4, 30), self.holidays)
self.assertIn(date(2019, 5, 1), self.holidays)
self.assertIn(date(2019, 5, 2), self.holidays)
self.assertIn(date(2019, 10, 22), self.holidays)
def test_invalid_years(self):
self.assertRaises(NotImplementedError,
lambda: date(1948, 1, 1) in self.holidays)
self.assertRaises(NotImplementedError,
lambda: date(2100, 1, 1) in self.holidays)
class TestFrance(unittest.TestCase):
def setUp(self):
self.holidays = holidays.France()
self.prov_holidays = {prov: holidays.France(prov=prov)
for prov in holidays.France.PROVINCES}
def test_2017(self):
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(2017, 5, 1), self.holidays)
self.assertIn(date(2017, 5, 8), self.holidays)
self.assertIn(date(2017, 5, 25), self.holidays)
self.assertIn(date(2017, 6, 5), self.holidays)
self.assertIn(date(2017, 7, 14), self.holidays)
def test_others(self):
self.assertEqual(self.holidays[date(1948, 5, 1)],
'Fête du Travail et de la Concorde sociale')
def test_alsace_moselle(self):
am_holidays = self.prov_holidays['Alsace-Moselle']
self.assertIn(date(2017, 4, 14), am_holidays)
self.assertIn(date(2017, 12, 26), am_holidays)
def test_mayotte(self):
am_holidays = self.prov_holidays['Mayotte']
self.assertIn(date(2017, 4, 27), am_holidays)
def test_wallis_et_futuna(self):
am_holidays = self.prov_holidays['Wallis-et-Futuna']
self.assertIn(date(2017, 4, 28), am_holidays)
self.assertIn(date(2017, 7, 29), am_holidays)
def test_martinique(self):
am_holidays = self.prov_holidays['Martinique']
self.assertIn(date(2017, 5, 22), am_holidays)
def test_guadeloupe(self):
am_holidays = self.prov_holidays['Guadeloupe']
self.assertIn(date(2017, 5, 27), am_holidays)
self.assertIn(date(2017, 7, 21), am_holidays)
def test_guyane(self):
am_holidays = self.prov_holidays['Guyane']
self.assertIn(date(2017, 6, 10), am_holidays)
def test_polynesie_francaise(self):
am_holidays = self.prov_holidays['Polynésie Française']
self.assertIn(date(2017, 6, 29), am_holidays)
def test_nouvelle_caledonie(self):
am_holidays = self.prov_holidays['Nouvelle-Calédonie']
self.assertIn(date(2017, 9, 24), am_holidays)
def test_saint_barthelemy(self):
am_holidays = self.prov_holidays['Saint-Barthélémy']
self.assertIn(date(2017, 10, 9), am_holidays)
def test_la_reunion(self):
am_holidays = self.prov_holidays['La Réunion']
self.assertIn(date(2017, 12, 20), am_holidays)
class TestBelgium(unittest.TestCase):
def setUp(self):
self.holidays = holidays.BE()
def test_2017(self):
# https://www.belgium.be/nl/over_belgie/land/belgie_in_een_notendop/feestdagen
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 4, 16), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(2017, 5, 1), self.holidays)
self.assertIn(date(2017, 5, 25), self.holidays)
self.assertIn(date(2017, 6, 4), self.holidays)
self.assertIn(date(2017, 6, 5), self.holidays)
self.assertIn(date(2017, 7, 21), self.holidays)
self.assertIn(date(2017, 8, 15), self.holidays)
self.assertIn(date(2017, 11, 1), self.holidays)
self.assertIn(date(2017, 11, 11), self.holidays)
self.assertIn(date(2017, 12, 25), self.holidays)
class TestSouthAfrica(unittest.TestCase):
def setUp(self):
self.holidays = holidays.ZA()
def test_new_years(self):
self.assertIn('1910-01-01', self.holidays)
self.assertIn('2017-01-01', self.holidays)
self.assertIn('2999-01-01', self.holidays)
self.assertIn('2017-01-02', self.holidays) # sunday
def test_easter(self):
self.assertIn(date(2017, 4, 14), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertIn(date(1994, 4, 1), self.holidays)
def test_static(self):
self.assertIn('2004-08-09', self.holidays)
def test_not_holiday(self):
self.assertNotIn('2016-12-28', self.holidays)
self.assertNotIn('2015-03-02', self.holidays)
def test_onceoff(self):
self.assertIn('1999-12-31', self.holidays) # Y2K
self.assertIn('2008-05-02', self.holidays) # Y2K
self.assertIn('2000-01-02', self.holidays) # Y2K
self.assertNotIn('2017-08-03', self.holidays)
def test_historic(self):
self.assertIn('1980-05-31', self.holidays) # Union/Republic Day
self.assertNotIn('2018-05-31', self.holidays)
self.assertIn('1952-12-16', self.holidays) # Day of the Vow
self.assertIn('1988-05-06', self.holidays) # Workers' Day
self.assertIn('1961-07-10', self.holidays) # Family Day
self.assertIn('1947-08-04', self.holidays) # King's Birthday
self.assertNotIn('1948-08-04', self.holidays)
self.assertIn('1975-09-01', self.holidays) # Settler's Day
self.assertNotIn('1976-09-01', self.holidays)
def test_elections(self):
self.assertTrue('1999-06-02' in self.holidays) # Election Day 1999
self.assertTrue('2004-04-14' in self.holidays) # Election Day 2004
self.assertTrue('2006-03-01' in self.holidays) # Local Election
self.assertTrue('2009-04-22' in self.holidays) # Election Day 2008
self.assertTrue('2011-05-18' in self.holidays) # Election Day 2011
self.assertTrue('2014-05-07' in self.holidays) # Election Day 2014
self.assertTrue('2016-08-03' in self.holidays) # Election Day 2016
self.assertTrue('2019-05-08' in self.holidays) # Election Day 2019
class TestSI(unittest.TestCase):
def setUp(self):
self.holidays = holidays.SI()
def test_holidays(self):
"""
Test all expected holiday dates
:return:
"""
# New Year
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 1, 2), self.holidays)
# Prešeren's day
self.assertIn(date(2017, 2, 8), self.holidays)
# Easter monday - 2016 and 2017
self.assertIn(date(2016, 3, 28), self.holidays)
self.assertIn(date(2017, 4, 17), self.holidays)
# Day of uprising against occupation
self.assertIn(date(2017, 4, 27), self.holidays)
# Labour day
self.assertIn(date(2017, 5, 1), self.holidays)
# Labour day
self.assertIn(date(2017, 5, 2), self.holidays)
# Statehood day
self.assertIn(date(2017, 6, 25), self.holidays)
# Assumption day
self.assertIn(date(2017, 8, 15), self.holidays)
# Reformation day
self.assertIn(date(2017, 10, 31), self.holidays)
# Remembrance day
self.assertIn(date(2017, 11, 1), self.holidays)
# Christmas
self.assertIn(date(2017, 12, 25), self.holidays)
# Day of independence and unity
self.assertIn(date(2017, 12, 26), self.holidays)
def test_non_holidays(self):
"""
Test dates that should be excluded from holidays list
:return:
"""
# January 2nd was not public holiday between 2012 and 2017
self.assertNotIn(date(2013, 1, 2), self.holidays)
self.assertNotIn(date(2014, 1, 2), self.holidays)
self.assertNotIn(date(2015, 1, 2), self.holidays)
self.assertNotIn(date(2016, 1, 2), self.holidays)
def test_missing_years(self):
self.assertNotIn(date(1990, 1, 1), self.holidays)
class TestIE(unittest.TestCase):
def setUp(self):
self.irish_holidays = holidays.IE()
def test_new_year_day(self):
self.assertIn('2017-01-02', self.irish_holidays)
self.assertIn('2018-01-01', self.irish_holidays)
def test_st_patricks_day(self):
self.assertIn('2017-03-17', self.irish_holidays)
self.assertIn('2018-03-17', self.irish_holidays)
def test_easter_monday(self):
self.assertIn('2017-04-17', self.irish_holidays)
self.assertIn('2018-04-02', self.irish_holidays)
def test_may_bank_holiday(self):
self.assertIn('2017-05-01', self.irish_holidays)
self.assertIn('2018-05-07', self.irish_holidays)
def test_june_bank_holiday(self):
self.assertIn('2017-06-05', self.irish_holidays)
self.assertIn('2018-06-04', self.irish_holidays)
def test_august_bank_holiday(self):
self.assertIn('2017-08-07', self.irish_holidays)
self.assertIn('2018-08-06', self.irish_holidays)
def test_october_bank_holiday(self):
self.assertIn('2017-10-30', self.irish_holidays)
self.assertIn('2018-10-29', self.irish_holidays)
def test_christmas_period(self):
self.assertIn('2015-12-25', self.irish_holidays)
self.assertIn('2015-12-28', self.irish_holidays)
self.assertIn('2016-12-26', self.irish_holidays)
self.assertIn('2016-12-27', self.irish_holidays)
self.assertIn('2017-12-25', self.irish_holidays)
self.assertIn('2017-12-26', self.irish_holidays)
self.assertIn('2018-12-25', self.irish_holidays)
self.assertIn('2018-12-26', self.irish_holidays)
class TestFinland(unittest.TestCase):
def setUp(self):
self.holidays = holidays.FI()
def test_fixed_holidays(self):
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertEqual(self.holidays[date(2017, 1, 1)],
"Uudenvuodenpäivä")
self.assertIn(date(2017, 1, 6), self.holidays)
self.assertEqual(self.holidays[date(2017, 1, 6)],
"Loppiainen")
self.assertIn(date(2017, 5, 1), self.holidays)
self.assertEqual(self.holidays[date(2017, 5, 1)],
"Vappu")
self.assertIn(date(2017, 12, 6), self.holidays)
self.assertEqual(self.holidays[date(2017, 12, 6)],
"Itsenäisyyspäivä")
self.assertIn(date(2017, 12, 25), self.holidays)
self.assertEqual(self.holidays[date(2017, 12, 25)],
"Joulupäivä")
self.assertIn(date(2017, 12, 26), self.holidays)
self.assertEqual(self.holidays[date(2017, 12, 26)],
"Tapaninpäivä")
def test_relative_holidays(self):
self.assertIn(date(2017, 4, 14), self.holidays)
self.assertEqual(self.holidays[date(2017, 4, 14)],
"Pitkäperjantai")
self.assertIn(date(2017, 4, 16), self.holidays)
self.assertEqual(self.holidays[date(2017, 4, 16)],
"Pääsiäispäivä")
self.assertIn(date(2017, 4, 17), self.holidays)
self.assertEqual(self.holidays[date(2017, 4, 17)],
"2. pääsiäispäivä")
self.assertIn(date(2017, 5, 25), self.holidays)
self.assertEqual(self.holidays[date(2017, 5, 25)],
"Helatorstai")
self.assertIn(date(2017, 11, 4), self.holidays)
self.assertEqual(self.holidays[date(2017, 11, 4)],
"Pyhäinpäivä")
def test_Juhannus(self):
self.assertIn(date(2017, 6, 24), self.holidays)
self.assertNotIn(date(2017, 6, 20), self.holidays)
self.assertIn(date(2020, 6, 20), self.holidays)
self.assertIn(date(2021, 6, 26), self.holidays)
self.assertIn(date(2018, 6, 22), self.holidays)
self.assertEqual(self.holidays[date(2018, 6, 22)],
"Juhannusaatto")
self.assertEqual(self.holidays[date(2018, 6, 23)],
"Juhannuspäivä")
class TestHungary(unittest.TestCase):
def setUp(self):
self.holidays = holidays.HU(observed=False)
self.next_year = date.today().year + 1
def test_national_day_was_not_celebrated_during_communism(self):
for year in range(1951, 1988):
self.assertNotIn(date(year, 3, 15), self.holidays)
self.assertIn(date(1989, 3, 15), self.holidays)
def test_holidays_during_communism(self):
for year in range(1950, 1989):
self.assertIn(date(year, 3, 21), self.holidays)
self.assertIn(date(year, 4, 4), self.holidays)
if year != 1956:
self.assertIn(date(year, 11, 7), self.holidays)
self.assertIn(date(1989, 3, 21), self.holidays)
def test_foundation_day_renamed_during_communism(self):
for year in range(1950, 1990):
self.assertEqual(
self.holidays[date(year, 8, 20)], "A kenyér ünnepe")
def test_christian_holidays_2nd_day_was_not_held_in_1955(self):
hu_1955 = holidays.Hungary(years=[1955])
self.assertNotIn(date(1955, 4, 11), hu_1955)
self.assertNotIn(date(1955, 12, 26), hu_1955)
def test_good_friday_since_2017(self):
self.assertNotIn(date(2016, 3, 25), self.holidays)
self.assertIn(date(2017, 4, 14), self.holidays)
self.assertIn(date(2018, 3, 30), self.holidays)
def test_whit_monday_since_1992(self):
self.assertNotIn(date(1991, 5, 20), self.holidays)
self.assertIn(date(1992, 6, 8), self.holidays)
def test_labour_day_since_1946(self):
self.assertNotIn(date(1945, 5, 1), self.holidays)
for year in range(1946, self.next_year):
self.assertIn(date(year, 5, 1), self.holidays)
def test_labour_day_was_doubled_in_early_50s(self):
for year in range(1950, 1954):
self.assertIn(date(year, 5, 2), self.holidays)
def test_october_national_day_since_1991(self):
for year in range(1991, self.next_year):
self.assertIn(date(year, 10, 23), self.holidays)
def test_all_saints_day_since_1999(self):
for year in range(1999, self.next_year):
self.assertIn(date(year, 11, 1), self.holidays)
def test_additional_day_off(self):
observed_days_off = holidays.HU(
observed=True, years=range(2010, self.next_year))
for day in [
date(2010, 12, 24),
date(2011, 3, 14), date(2011, 10, 31),
date(2012, 3, 16), date(2012, 4, 30), date(2012, 10, 22),
date(2012, 11, 2), date(2012, 12, 24),
date(2013, 8, 19), date(2013, 12, 24), date(2013, 12, 27),
date(2014, 5, 2), date(2014, 10, 24), date(2014, 12, 24),
date(2015, 1, 2), date(2015, 8, 21), date(2015, 12, 24),
date(2016, 3, 14), date(2016, 10, 31),
date(2018, 3, 16), date(2018, 4, 30), date(2018, 10, 22),
date(2018, 11, 2), date(2018, 12, 24), date(2018, 12, 31),
date(2019, 8, 19), date(2019, 12, 24), date(2019, 12, 27)]:
self.assertNotIn(day, self.holidays)
self.assertIn(day, observed_days_off)
def test_monday_new_years_eve_day_off(self):
observed_day_off = holidays.HU(observed=True)
self.assertIn(date(2018, 12, 31), observed_day_off)
def test_2018(self):
self.assertIn(date(2018, 1, 1), self.holidays) # newyear
self.assertIn(date(2018, 3, 15), self.holidays) # national holiday
self.assertIn(date(2018, 3, 30), self.holidays) # good friday
self.assertIn(date(2018, 4, 1), self.holidays) # easter 1.
self.assertIn(date(2018, 4, 2), self.holidays) # easter 2.
self.assertIn(date(2018, 5, 1), self.holidays) # Workers' Day
self.assertIn(date(2018, 5, 20), self.holidays) # Pentecost
self.assertIn(date(2018, 5, 21), self.holidays) # Pentecost monday
self.assertIn(date(2018, 8, 20), self.holidays) # State Foundation Day
self.assertIn(date(2018, 10, 23), self.holidays) # National Day
self.assertIn(date(2018, 11, 1), self.holidays) # All Saints' Day
self.assertIn(date(2018, 12, 25), self.holidays) # First christmas
self.assertIn(date(2018, 12, 26), self.holidays) # Second christmas
class TestSwitzerland(unittest.TestCase):
def setUp(self):
self.holidays = holidays.CH()
self.prov_hols = dict((prov, holidays.CH(prov=prov))
for prov in holidays.CH.PROVINCES)
def test_all_holidays_present(self):
ch_2018 = sum(holidays.CH(years=[2018], prov=p)
for p in holidays.CH.PROVINCES)
in_2018 = sum((ch_2018.get_list(key) for key in ch_2018), [])
all_ch = ['Neujahrestag',
'Berchtoldstag',
'Heilige Drei Könige',
'Jahrestag der Ausrufung der Republik',
'Josefstag',
'Näfelser Fahrt',
'Karfreitag',
'Ostern',
'Ostermontag',
'Tag der Arbeit',
'Auffahrt',
'Pfingsten',
'Pfingstmontag',
'Fronleichnam',
'Fest der Unabhängigkeit',
'Peter und Paul',
'Nationalfeiertag',
'Mariä Himmelfahrt',
'Lundi du Jeûne',
'Bruder Klaus',
'Allerheiligen',
'Mariä Empfängnis',
'Escalade de Genève',
'Weihnachten',
'Stephanstag',
'Wiederherstellung der Republik']
for holiday in all_ch:
self.assertTrue(holiday in all_ch, "missing: {}".format(holiday))
for holiday in in_2018:
self.assertTrue(holiday in in_2018, "extra: {}".format(holiday))
def test_fixed_holidays(self):
fixed_days_whole_country = (
(1, 1), # Neujahrestag
(8, 1), # Nationalfeiertag
(12, 25), # Weihnachten
)
for y, (m, d) in product(range(1291, 2050), fixed_days_whole_country):
self.assertTrue(date(y, m, d) in self.holidays)
def test_berchtoldstag(self):
provinces_that_have = {'AG', 'BE', 'FR', 'GE', 'GL', 'GR', 'JU', 'LU',
'NE', 'OW', 'SH', 'SO', 'TG', 'VD', 'ZG', 'ZH'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 1, 2) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 1, 2) not in self.prov_hols[province])
def test_heilige_drei_koenige(self):
provinces_that_have = {'SZ', 'TI', 'UR'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 1, 6) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 1, 6) not in self.prov_hols[province])
def test_jahrestag_der_ausrufung_der_republik(self):
provinces_that_have = {'NE'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 3, 1) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 3, 1) not in self.prov_hols[province])
def test_josefstag(self):
provinces_that_have = {'NW', 'SZ', 'TI', 'UR', 'VS'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 3, 19) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 3, 19) not in self.prov_hols[province])
def test_naefelser_fahrt(self):
known_good = [(2018, 4, 5), (2019, 4, 4), (2020, 4, 2),
(2021, 4, 8), (2022, 4, 7), (2023, 4, 13),
(2024, 4, 4), (2025, 4, 3), (2026, 4, 9),
(2027, 4, 1), (2028, 4, 6), (2029, 4, 5),
(2030, 4, 4), (2031, 4, 3), (2032, 4, 1),
(2033, 4, 7), (2034, 4, 13), (2035, 4, 5)]
provinces_that_have = {'GL'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_karfreitag(self):
known_good = [(2018, 3, 30), (2019, 4, 19), (2020, 4, 10),
(2021, 4, 2), (2022, 4, 15), (2023, 4, 7),
(2024, 3, 29), (2025, 4, 18), (2026, 4, 3),
(2027, 3, 26), (2028, 4, 14), (2029, 3, 30),
(2030, 4, 19), (2031, 4, 11), (2032, 3, 26),
(2033, 4, 15), (2034, 4, 7), (2035, 3, 23)]
provinces_that_dont = {'VS'}
provinces_that_have = set(holidays.CH.PROVINCES) - provinces_that_dont
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_ostern(self):
known_good = [(2018, 4, 1), (2019, 4, 21), (2020, 4, 12),
(2021, 4, 4), (2022, 4, 17), (2023, 4, 9),
(2024, 3, 31), (2025, 4, 20), (2026, 4, 5),
(2027, 3, 28), (2028, 4, 16), (2029, 4, 1),
(2030, 4, 21), (2031, 4, 13), (2032, 3, 28),
(2033, 4, 17), (2034, 4, 9), (2035, 3, 25)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_ostermontag(self):
known_good = [(2018, 4, 2), (2019, 4, 22), (2020, 4, 13),
(2021, 4, 5), (2022, 4, 18), (2023, 4, 10),
(2024, 4, 1), (2025, 4, 21), (2026, 4, 6),
(2027, 3, 29), (2028, 4, 17), (2029, 4, 2),
(2030, 4, 22), (2031, 4, 14), (2032, 3, 29),
(2033, 4, 18), (2034, 4, 10), (2035, 3, 26)]
provinces_that_dont = {'VS'}
provinces_that_have = set(holidays.CH.PROVINCES) - provinces_that_dont
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_auffahrt(self):
known_good = [(2018, 5, 10), (2019, 5, 30), (2020, 5, 21),
(2021, 5, 13), (2022, 5, 26), (2023, 5, 18),
(2024, 5, 9), (2025, 5, 29), (2026, 5, 14),
(2027, 5, 6), (2028, 5, 25), (2029, 5, 10),
(2030, 5, 30), (2031, 5, 22), (2032, 5, 6),
(2033, 5, 26), (2034, 5, 18), (2035, 5, 3)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_pfingsten(self):
known_good = [(2018, 5, 20), (2019, 6, 9), (2020, 5, 31),
(2021, 5, 23), (2022, 6, 5), (2023, 5, 28),
(2024, 5, 19), (2025, 6, 8), (2026, 5, 24),
(2027, 5, 16), (2028, 6, 4), (2029, 5, 20),
(2030, 6, 9), (2031, 6, 1), (2032, 5, 16),
(2033, 6, 5), (2034, 5, 28), (2035, 5, 13)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_pfingstmontag(self):
known_good = [(2018, 5, 21), (2019, 6, 10), (2020, 6, 1),
(2021, 5, 24), (2022, 6, 6), (2023, 5, 29),
(2024, 5, 20), (2025, 6, 9), (2026, 5, 25),
(2027, 5, 17), (2028, 6, 5), (2029, 5, 21),
(2030, 6, 10), (2031, 6, 2), (2032, 5, 17),
(2033, 6, 6), (2034, 5, 29), (2035, 5, 14)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_fronleichnam(self):
known_good = [(2014, 6, 19), (2015, 6, 4), (2016, 5, 26),
(2017, 6, 15), (2018, 5, 31), (2019, 6, 20),
(2020, 6, 11), (2021, 6, 3), (2022, 6, 16),
(2023, 6, 8), (2024, 5, 30)]
provinces_that_have = {'AI', 'JU', 'LU', 'NW', 'OW', 'SZ', 'TI', 'UR',
'VS', 'ZG'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_fest_der_unabhaengikeit(self):
provinces_that_have = {'JU'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 6, 23) in self.prov_hols[province])
# 2011 is "Fronleichnam" on the same date, we don't test this year
for province, year in product(provinces_that_dont, range(1970, 2010)):
self.assertTrue(date(year, 6, 23) not in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(2012, 2050)):
self.assertTrue(date(year, 6, 23) not in self.prov_hols[province])
def test_peter_und_paul(self):
provinces_that_have = {'TI'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 6, 29) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 6, 29) not in self.prov_hols[province])
def test_mariae_himmelfahrt(self):
provinces_that_have = {'AI', 'JU', 'LU', 'NW', 'OW', 'SZ', 'TI', 'UR',
'VS', 'ZG'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 8, 15) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 8, 15) not in self.prov_hols[province])
def test_lundi_du_jeune(self):
known_good = [(2014, 9, 22), (2015, 9, 21), (2016, 9, 19),
(2017, 9, 18), (2018, 9, 17), (2019, 9, 16),
(2020, 9, 21), (2021, 9, 20), (2022, 9, 19),
(2023, 9, 18), (2024, 9, 16)]
provinces_that_have = {'VD'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_bruder_chlaus(self):
provinces_that_have = {'OW'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 9, 25) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 9, 25) not in self.prov_hols[province])
def test_allerheiligen(self):
provinces_that_have = {'AI', 'GL', 'JU', 'LU', 'NW', 'OW', 'SG', 'SZ',
'TI', 'UR', 'VS', 'ZG'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 11, 1) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 11, 1) not in self.prov_hols[province])
def test_escalade_de_geneve(self):
provinces_that_have = {'GE'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 12, 12) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 12, 12) not in self.prov_hols[province])
def test_stephanstag(self):
provinces_that_have = {'AG', 'AR', 'AI', 'BL', 'BS', 'BE', 'FR', 'GL',
'GR', 'LU', 'NE', 'NW', 'OW', 'SG', 'SH', 'SZ',
'SO', 'TG', 'TI', 'UR', 'ZG', 'ZH'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 12, 26) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 12, 26) not in self.prov_hols[province])
def test_wiedererstellung_der_republik(self):
provinces_that_have = {'GE'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 12, 31) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 12, 31) not in self.prov_hols[province])
class TestAR(unittest.TestCase):
def setUp(self):
self.holidays = holidays.AR(observed=True)
def test_new_years(self):
self.holidays.observed = False
self.assertNotIn(date(2010, 12, 31), self.holidays)
self.assertNotIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2017, 1, 1), self.holidays)
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_carnival_day(self):
for dt in [date(2018, 2, 12), date(2018, 2, 13), date(2017, 2, 27),
date(2017, 2, 28), date(2016, 2, 8), date(2016, 2, 9)]:
self.assertIn(dt, self.holidays)
def test_memory_national_day(self):
self.holidays.observed = False
self.assertNotIn(date(1907, 3, 24), self.holidays)
self.assertNotIn(date(2002, 3, 24), self.holidays)
self.holidays.observed = True
for dt in [date(2018, 3, 24), date(2017, 3, 24),
date(2016, 3, 24)]:
self.assertIn(dt, self.holidays)
def test_holy_week_day(self):
for dt in [date(2018, 3, 29), date(2018, 3, 30), date(2017, 4, 13),
date(2017, 4, 14), date(2016, 3, 24), date(2016, 3, 25)]:
self.assertIn(dt, self.holidays)
def test_malvinas_war_day(self):
for year in range(1900, 2100):
dt = date(year, 4, 2)
self.assertIn(dt, self.holidays)
def test_labor_day(self):
self.holidays.observed = False
self.assertNotIn(date(2010, 4, 30), self.holidays)
self.assertNotIn(date(2011, 5, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(1922, 5, 1), self.holidays)
for year in range(1900, 2100):
dt = date(year, 5, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_may_revolution_day(self):
self.holidays.observed = False
self.assertNotIn(date(1930, 5, 25), self.holidays)
self.assertNotIn(date(2014, 5, 25), self.holidays)
self.holidays.observed = True
for year in range(1900, 2100):
dt = date(year, 5, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_guemes_day(self):
for year in range(1900, 2100):
dt = date(year, 6, 17)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_belgrano_day(self):
for year in range(1900, 2100):
dt = date(year, 6, 20)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_independence_day(self):
self.holidays.observed = False
self.assertNotIn(date(2017, 7, 9), self.holidays)
self.assertNotIn(date(2011, 7, 9), self.holidays)
self.holidays.observed = True
self.assertIn(date(2017, 7, 9), self.holidays)
self.assertIn(date(2011, 7, 9), self.holidays)
for year in range(1900, 2100):
dt = date(year, 7, 9)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_san_martin_day(self):
self.holidays.observed = False
self.assertNotIn(date(1930, 8, 10), self.holidays)
self.assertNotIn(date(2008, 8, 10), self.holidays)
self.holidays.observed = True
for year in range(1900, 2100):
dt = date(year, 8, 17)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_cultural_day(self):
self.holidays.observed = False
self.assertNotIn(date(2014, 10, 12), self.holidays)
self.assertNotIn(date(1913, 10, 12), self.holidays)
self.holidays.observed = True
for year in range(1900, 2100):
dt = date(year, 10, 12)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_national_sovereignty_day(self):
for year in range(1900, 2100):
dt = date(year, 11, 20)
if year < 2010:
self.assertNotIn(dt, self.holidays)
else:
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_inmaculate_conception_day(self):
self.holidays.observed = False
self.assertNotIn(date(1940, 12, 8), self.holidays)
self.assertNotIn(date(2013, 12, 8), self.holidays)
self.holidays.observed = True
for year in range(1900, 2100):
dt = date(year, 12, 8)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_christmas(self):
for year in range(1900, 2100):
dt = date(year, 12, 25)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
class TestIND(unittest.TestCase):
def setUp(self):
self.holidays = holidays.IND()
def test_2018(self):
self.assertIn(date(2018, 1, 14), self.holidays)
self.assertIn(date(2018, 1, 26), self.holidays)
self.assertIn(date(2018, 10, 2), self.holidays)
self.assertIn(date(2018, 5, 1), self.holidays)
self.assertIn(date(2018, 8, 15), self.holidays)
self.assertIn(date(2018, 10, 2), self.holidays)
self.assertIn(date(2018, 12, 25), self.holidays)
gj_holidays = holidays.IND(prov="GJ")
as_holidays = holidays.IND(prov="AS")
tn_holidays = holidays.IND(prov="TN")
wb_holidays = holidays.IND(prov="WB")
cg_holidays = holidays.IND(prov="CG")
sk_holidays = holidays.IND(prov="SK")
ka_holidays = holidays.IND(prov="KA")
br_holidays = holidays.IND(prov="BR")
rj_holidays = holidays.IND(prov="RJ")
od_holidays = holidays.IND(prov="OD")
ap_holidays = holidays.IND(prov="AP")
kl_holidays = holidays.IND(prov="KL")
hr_holidays = holidays.IND(prov="HR")
mh_holidays = holidays.IND(prov="MH")
mp_holidays = holidays.IND(prov="MP")
up_holidays = holidays.IND(prov="UP")
uk_holidays = holidays.IND(prov="UK")
ts_holidays = holidays.IND(prov="TS")
for dt in ([date(2018, 1, 14), date(2018, 5, 1), date(2018, 10, 31)]):
self.assertIn(dt, gj_holidays)
for dt in [date(2018, 4, 15), date(2018, 4, 14)]:
self.assertIn(dt, tn_holidays)
self.assertIn(dt, wb_holidays)
for dt in ([date(2018, 1, 14), date(2018, 5, 1), date(2018, 10, 31)]):
self.assertIn(dt, gj_holidays)
self.assertIn(date(2018, 3, 22), br_holidays)
self.assertIn(date(2018, 3, 30), rj_holidays)
self.assertIn(date(2018, 6, 15), rj_holidays)
self.assertIn(date(2018, 4, 1), od_holidays)
self.assertIn(date(2018, 4, 6), ts_holidays)
self.assertIn(date(2018, 4, 15), od_holidays)
self.assertIn(date(2018, 4, 14), od_holidays)
self.assertIn(date(2018, 4, 14), br_holidays)
self.assertIn(date(2018, 4, 14), kl_holidays)
self.assertIn(date(2018, 4, 14), up_holidays)
self.assertIn(date(2018, 4, 14), uk_holidays)
self.assertIn(date(2018, 4, 14), hr_holidays)
self.assertIn(date(2018, 4, 14), mh_holidays)
self.assertIn(date(2018, 4, 14), wb_holidays)
self.assertIn(date(2018, 5, 9), wb_holidays)
self.assertIn(date(2018, 4, 15), as_holidays)
self.assertIn(date(2018, 5, 1), mh_holidays)
self.assertIn(date(2018, 5, 16), sk_holidays)
self.assertIn(date(2018, 10, 6), ts_holidays)
self.assertIn(date(2018, 11, 1), ka_holidays)
self.assertIn(date(2018, 11, 1), ap_holidays)
self.assertIn(date(2018, 11, 1), hr_holidays)
self.assertIn(date(2018, 11, 1), mp_holidays)
self.assertIn(date(2018, 11, 1), kl_holidays)
self.assertIn(date(2018, 11, 1), cg_holidays)
class TestBelarus(unittest.TestCase):
def setUp(self):
self.holidays = holidays.BY()
def test_2018(self):
# http://calendar.by/procal.php?year=2018
# https://www.officeholidays.com/countries/belarus/index.php
self.assertIn(date(2018, 1, 1), self.holidays)
self.assertIn(date(2018, 1, 7), self.holidays)
self.assertIn(date(2018, 3, 8), self.holidays)
self.assertIn(date(2018, 4, 17), self.holidays)
self.assertIn(date(2018, 5, 1), self.holidays)
self.assertIn(date(2018, 5, 9), self.holidays)
self.assertIn(date(2018, 7, 3), self.holidays)
self.assertIn(date(2018, 11, 7), self.holidays)
self.assertIn(date(2018, 12, 25), self.holidays)
def test_new_year(self):
self.assertIn(date(2019, 1, 1), self.holidays)
self.assertNotIn(date(2019, 1, 2), self.holidays)
self.assertIn(date(2020, 1, 1), self.holidays)
self.assertIn(date(2020, 1, 2), self.holidays)
self.assertIn(date(2021, 1, 1), self.holidays)
self.assertIn(date(2021, 1, 2), self.holidays)
def test_radunitsa(self):
# http://calendar.by/content.php?id=20
self.assertIn(date(2012, 4, 24), self.holidays)
self.assertIn(date(2013, 5, 14), self.holidays)
self.assertIn(date(2014, 4, 29), self.holidays)
self.assertIn(date(2015, 4, 21), self.holidays)
self.assertIn(date(2016, 5, 10), self.holidays)
self.assertIn(date(2017, 4, 25), self.holidays)
self.assertIn(date(2018, 4, 17), self.holidays)
self.assertIn(date(2019, 5, 7), self.holidays)
self.assertIn(date(2020, 4, 28), self.holidays)
self.assertIn(date(2021, 5, 11), self.holidays)
self.assertIn(date(2022, 5, 3), self.holidays)
self.assertIn(date(2023, 4, 25), self.holidays)
self.assertIn(date(2024, 5, 14), self.holidays)
self.assertIn(date(2025, 4, 29), self.holidays)
self.assertIn(date(2026, 4, 21), self.holidays)
self.assertIn(date(2027, 5, 11), self.holidays)
self.assertIn(date(2028, 4, 25), self.holidays)
self.assertIn(date(2029, 4, 17), self.holidays)
self.assertIn(date(2030, 5, 7), self.holidays)
def test_before_1998(self):
self.assertNotIn(date(1997, 7, 3), self.holidays)
class TestHonduras(unittest.TestCase):
def setUp(self):
self.holidays = holidays.HND()
def test_2014(self):
self.assertIn(date(2014, 10, 3), self.holidays) # Morazan's Day
self.assertIn(date(2014, 10, 12), self.holidays) # Columbus Day
self.assertIn(date(2014, 10, 21), self.holidays) # Army Day
def test_2018(self):
self.assertIn(date(2018, 1, 1), self.holidays) # New Year
self.assertIn(date(2018, 4, 14), self.holidays) # America's Day
self.assertIn(date(2018, 5, 1), self.holidays) # Workers' Day
self.assertNotIn(date(2018, 5, 6), self.holidays) # Mother's Day
self.assertIn(date(2018, 5, 13), self.holidays) # Mother's Day
self.assertIn(date(2018, 9, 10), self.holidays) # Children weekend
self.assertIn(date(2018, 9, 15), self.holidays) # Independence Day
self.assertIn(date(2018, 9, 17), self.holidays) # Teacher's Day
self.assertIn(date(2018, 10, 3), self.holidays) # Morazan's weekend
self.assertIn(date(2018, 12, 25), self.holidays) # Christmas
class TestCroatia(unittest.TestCase):
def setUp(self):
self.holidays = holidays.HR()
def test_2018(self):
self.assertIn(date(2018, 1, 1), self.holidays)
self.assertIn(date(2018, 1, 6), self.holidays)
self.assertIn(date(2018, 4, 1), self.holidays)
self.assertIn(date(2018, 4, 2), self.holidays)
self.assertIn(date(2018, 5, 1), self.holidays)
self.assertIn(date(2018, 6, 25), self.holidays)
self.assertIn(date(2018, 8, 15), self.holidays)
self.assertIn(date(2018, 10, 8), self.holidays)
self.assertIn(date(2018, 11, 1), self.holidays)
self.assertIn(date(2018, 12, 25), self.holidays)
self.assertIn(date(2018, 12, 26), self.holidays)
class TestUkraine(unittest.TestCase):
def setUp(self):
self.holidays = holidays.UA()
def test_before_1918(self):
self.assertNotIn(date(1917, 12, 31), self.holidays)
def test_2018(self):
# http://www.buhoblik.org.ua/kadry-zarplata/vremya/1676-1676-kalendar.html
self.assertIn(date(2018, 1, 1), self.holidays)
self.assertIn(date(2018, 1, 7), self.holidays)
self.assertIn(date(2018, 12, 25), self.holidays)
self.assertIn(date(2018, 4, 8), self.holidays)
self.assertIn(date(2018, 5, 27), self.holidays)
self.assertIn(date(2018, 5, 9), self.holidays)
self.assertIn(date(2018, 6, 28), self.holidays)
self.assertIn(date(2018, 8, 24), self.holidays)
self.assertIn(date(2018, 10, 14), self.holidays)
def test_old_holidays(self):
self.assertIn(date(2018, 5, 1), self.holidays)
self.assertIn(date(2016, 5, 2), self.holidays)
self.assertIn(date(1991, 7, 16), self.holidays)
self.assertIn(date(1950, 1, 22), self.holidays)
self.assertIn(date(1999, 11, 7), self.holidays)
self.assertIn(date(1999, 11, 8), self.holidays)
self.assertIn(date(1945, 5, 9), self.holidays)
self.assertIn(date(1945, 9, 3), self.holidays)
self.assertIn(date(1981, 10, 7), self.holidays)
self.assertIn(date(1937, 12, 5), self.holidays)
self.assertIn(date(1918, 3, 18), self.holidays)
class TestBrazil(unittest.TestCase):
def test_BR_holidays(self):
self.holidays = holidays.BR(years=2018)
self.assertIn("2018-01-01", self.holidays)
self.assertEqual(self.holidays[date(2018, 1, 1)], "Ano novo")
self.assertIn("2018-02-14", self.holidays)
self.assertEqual(self.holidays[date(2018, 2, 14)],
"Quarta-feira de cinzas (Início da Quaresma)")
self.assertIn("2018-02-13", self.holidays)
self.assertEqual(self.holidays[date(2018, 2, 13)], "Carnaval")
self.assertIn("2018-03-30", self.holidays)
self.assertEqual(self.holidays[date(2018, 3, 30)], "Sexta-feira Santa")
self.assertIn("2018-02-13", self.holidays)
self.assertEqual(self.holidays[date(2018, 2, 13)], "Carnaval")
self.assertIn("2018-04-01", self.holidays)
self.assertEqual(self.holidays[date(2018, 4, 1)], "Páscoa")
self.assertIn("2018-04-21", self.holidays)
self.assertEqual(self.holidays[date(2018, 4, 21)], "Tiradentes")
self.assertIn("2018-05-01", self.holidays)
self.assertEqual(self.holidays[date(2018, 5, 1)],
"Dia Mundial do Trabalho")
self.assertIn("2018-05-31", self.holidays)
self.assertEqual(self.holidays[date(2018, 5, 31)], "Corpus Christi")
self.assertIn("2018-09-07", self.holidays)
self.assertEqual(self.holidays[date(2018, 9, 7)],
"Independência do Brasil")
self.assertIn("2018-10-12", self.holidays)
self.assertEqual(self.holidays[date(2018, 10, 12)],
"Nossa Senhora Aparecida")
self.assertIn("2018-11-02", self.holidays)
self.assertEqual(self.holidays[date(2018, 11, 2)], "Finados")
self.assertIn("2018-11-15", self.holidays)
self.assertEqual(self.holidays[date(2018, 11, 15)],
"Proclamação da República")
self.assertIn("2018-12-25", self.holidays)
self.assertEqual(self.holidays[date(2018, 12, 25)], "Natal")
def test_AC_holidays(self):
ac_holidays = holidays.BR(state="AC")
self.assertIn("2018-01-23", ac_holidays)
self.assertEqual(ac_holidays[date(2018, 1, 23)], "Dia do evangélico")
self.assertIn("2018-06-15", ac_holidays)
self.assertEqual(ac_holidays[date(2018, 6, 15)], "Aniversário do Acre")
self.assertIn("2018-09-05", ac_holidays)
self.assertEqual(ac_holidays[date(2018, 9, 5)], "Dia da Amazônia")
self.assertIn("2018-11-17", ac_holidays)
self.assertEqual(ac_holidays[date(2018, 11, 17)],
"Assinatura do Tratado de Petrópolis")
def test_AL_holidays(self):
al_holidays = holidays.BR(state="AL")
self.assertIn("2018-06-24", al_holidays)
self.assertEqual(al_holidays[date(2018, 6, 24)], "São João")
self.assertIn("2018-06-29", al_holidays)
self.assertEqual(al_holidays[date(2018, 6, 29)], "São Pedro")
self.assertIn("2018-09-16", al_holidays)
self.assertEqual(al_holidays[date(2018, 9, 16)],
"Emancipação política de Alagoas")
self.assertIn("2018-11-20", al_holidays)
self.assertEqual(al_holidays[date(2018, 11, 20)], "Consciência Negra")
def test_AP_holidays(self):
ap_holidays = holidays.BR(state="AP")
self.assertIn("2018-03-19", ap_holidays)
self.assertEqual(ap_holidays[date(2018, 3, 19)], "Dia de São José")
self.assertIn("2018-07-25", ap_holidays)
self.assertEqual(ap_holidays[date(2018, 7, 25)], "São Tiago")
self.assertIn("2018-10-05", ap_holidays)
self.assertEqual(ap_holidays[date(2018, 10, 5)], "Criação do estado")
self.assertIn("2018-11-20", ap_holidays)
self.assertEqual(ap_holidays[date(2018, 11, 20)], "Consciência Negra")
def test_AM_holidays(self):
am_holidays = holidays.BR(state="AM")
self.assertIn("2018-09-05", am_holidays)
self.assertEqual(am_holidays[date(2018, 9, 5)],
"Elevação do Amazonas à categoria de província")
self.assertIn("2018-11-20", am_holidays)
self.assertEqual(am_holidays[date(2018, 11, 20)], "Consciência Negra")
self.assertIn("2018-12-08", am_holidays)
self.assertEqual(am_holidays[date(2018, 12, 8)],
"Dia de Nossa Senhora da Conceição")
def test_BA_holidays(self):
ba_holidays = holidays.BR(state="BA")
self.assertIn("2018-07-02", ba_holidays)
self.assertEqual(ba_holidays[date(2018, 7, 2)],
"Independência da Bahia")
def test_CE_holidays(self):
ce_holidays = holidays.BR(state="CE")
self.assertIn("2018-03-19", ce_holidays)
self.assertEqual(ce_holidays[date(2018, 3, 19)], "São José")
self.assertIn("2018-03-25", ce_holidays)
self.assertEqual(ce_holidays[date(2018, 3, 25)], "Data Magna do Ceará")
def test_DF_holidays(self):
df_holidays = holidays.BR(state="DF")
self.assertIn("2018-04-21", df_holidays)
self.assertEqual(df_holidays[date(2018, 4, 21)],
"Fundação de Brasília, Tiradentes")
self.assertIn("2018-11-30", df_holidays)
self.assertEqual(df_holidays[date(2018, 11, 30)], "Dia do Evangélico")
def test_ES_holidays(self):
es_holidays = holidays.BR(state="ES")
self.assertIn("2018-10-28", es_holidays)
self.assertEqual(
es_holidays[date(2018, 10, 28)], "Dia do Servidor Público")
def test_GO_holidays(self):
go_holidays = holidays.BR(state="GO")
self.assertIn("2018-10-28", go_holidays)
self.assertEqual(
go_holidays[date(2018, 10, 28)], "Dia do Servidor Público")
def test_MA_holidays(self):
ma_holidays = holidays.BR(state="MA")
self.assertIn("2018-07-28", ma_holidays)
self.assertEqual(ma_holidays[date(2018, 7, 28)],
"Adesão do Maranhão à independência do Brasil")
self.assertIn("2018-12-08", ma_holidays)
self.assertEqual(ma_holidays[date(2018, 12, 8)],
"Dia de Nossa Senhora da Conceição")
def test_MT_holidays(self):
mt_holidays = holidays.BR(state="MT")
self.assertIn("2018-11-20", mt_holidays)
self.assertEqual(mt_holidays[date(2018, 11, 20)], "Consciência Negra")
def test_MS_holidays(self):
ms_holidays = holidays.BR(state="MS")
self.assertIn("2018-10-11", ms_holidays)
self.assertEqual(ms_holidays[date(2018, 10, 11)], "Criação do estado")
def test_MG_holidays(self):
mg_holidays = holidays.BR(state="MG")
self.assertIn("2018-04-21", mg_holidays)
self.assertEqual(mg_holidays[date(2018, 4, 21)],
"Data Magna de MG, Tiradentes")
def test_PA_holidays(self):
pa_holidays = holidays.BR(state="PA")
self.assertIn("2018-08-15", pa_holidays)
self.assertEqual(pa_holidays[date(2018, 8, 15)],
"Adesão do Grão-Pará à independência do Brasil")
def test_PB_holidays(self):
pb_holidays = holidays.BR(state="PB")
self.assertIn("2018-08-05", pb_holidays)
self.assertEqual(pb_holidays[date(2018, 8, 5)], "Fundação do Estado")
def test_PE_holidays(self):
pe_holidays = holidays.BR(state="PE")
self.assertIn("2018-03-06", pe_holidays)
self.assertEqual(pe_holidays[date(2018, 3, 6)],
"Revolução Pernambucana (Data Magna)")
self.assertIn("2018-06-24", pe_holidays)
self.assertEqual(pe_holidays[date(2018, 6, 24)], "São João")
def test_PI_holidays(self):
pi_holidays = holidays.BR(state="PI")
self.assertIn("2018-03-13", pi_holidays)
self.assertEqual(pi_holidays[date(2018, 3, 13)],
"Dia da Batalha do Jenipapo")
self.assertIn("2018-10-19", pi_holidays)
self.assertEqual(pi_holidays[date(2018, 10, 19)], "Dia do Piauí")
def test_PR_holidays(self):
pr_holidays = holidays.BR(state="PR")
self.assertIn("2018-12-19", pr_holidays)
self.assertEqual(pr_holidays[date(2018, 12, 19)],
"Emancipação do Paraná")
def test_RJ_holidays(self):
rj_holidays = holidays.BR(state="RJ")
self.assertIn("2018-04-23", rj_holidays)
self.assertEqual(rj_holidays[date(2018, 4, 23)], "Dia de São Jorge")
self.assertIn("2018-10-28", rj_holidays)
self.assertEqual(rj_holidays[date(2018, 10, 28)],
"Dia do Funcionário Público")
self.assertIn("2018-11-20", rj_holidays)
self.assertEqual(rj_holidays[date(2018, 11, 20)], "Zumbi dos Palmares")
def test_RN_holidays(self):
rn_holidays = holidays.BR(state="RN")
self.assertIn("2018-06-29", rn_holidays)
self.assertEqual(rn_holidays[date(2018, 6, 29)], "Dia de São Pedro")
self.assertIn("2018-10-03", rn_holidays)
self.assertEqual(rn_holidays[date(2018, 10, 3)],
"Mártires de Cunhaú e Uruaçuu")
def test_RS_holidays(self):
rs_holidays = holidays.BR(state="RS")
self.assertIn("2018-09-20", rs_holidays)
self.assertEqual(
rs_holidays[date(2018, 9, 20)], "Revolução Farroupilha")
def test_RO_holidays(self):
ro_holidays = holidays.BR(state="RO")
self.assertIn("2018-01-04", ro_holidays)
self.assertEqual(ro_holidays[date(2018, 1, 4)], "Criação do estado")
self.assertIn("2018-06-18", ro_holidays)
self.assertEqual(ro_holidays[date(2018, 6, 18)], "Dia do Evangélico")
def test_RR_holidays(self):
rr_holidays = holidays.BR(state="RR")
self.assertIn("2018-10-05", rr_holidays)
self.assertEqual(rr_holidays[date(2018, 10, 5)], "Criação de Roraima")
def test_SC_holidays(self):
sc_holidays = holidays.BR(state="SC")
self.assertIn("2018-08-11", sc_holidays)
self.assertEqual(sc_holidays[date(2018, 8, 11)],
"Criação da capitania, separando-se de SP")
def test_SP_holidays(self):
sp_holidays = holidays.BR(state="SP")
self.assertIn("2018-07-09", sp_holidays)
self.assertEqual(sp_holidays[date(2018, 7, 9)],
"Revolução Constitucionalista de 1932")
def test_SE_holidays(self):
se_holidays = holidays.BR(state="SE")
self.assertIn("2018-07-08", se_holidays)
self.assertEqual(se_holidays[date(2018, 7, 8)],
"Autonomia política de Sergipe")
def test_TO_holidays(self):
to_holidays = holidays.BR(state="TO")
self.assertIn("2018-01-01", to_holidays)
self.assertEqual(to_holidays[date(2018, 1, 1)],
"Instalação de Tocantins, Ano novo")
self.assertIn("2018-09-08", to_holidays)
self.assertEqual(to_holidays[date(2018, 9, 8)],
"Nossa Senhora da Natividade")
self.assertIn("2018-10-05", to_holidays)
self.assertEqual(to_holidays[date(2018, 10, 5)],
"Criação de Tocantins")
class TestLU(unittest.TestCase):
def setUp(self):
self.holidays = holidays.LU()
def test_2019(self):
# https://www.officeholidays.com/countries/luxembourg/2019
self.assertIn(date(2019, 1, 1), self.holidays)
self.assertIn(date(2019, 4, 22), self.holidays)
self.assertIn(date(2019, 5, 1), self.holidays)
self.assertIn(date(2019, 5, 9), self.holidays)
self.assertIn(date(2019, 5, 30), self.holidays)
self.assertIn(date(2019, 6, 10), self.holidays)
self.assertIn(date(2019, 6, 23), self.holidays)
self.assertIn(date(2019, 8, 15), self.holidays)
self.assertIn(date(2019, 11, 1), self.holidays)
self.assertIn(date(2019, 12, 25), self.holidays)
self.assertIn(date(2019, 12, 26), self.holidays)
class TestRussia(unittest.TestCase):
def setUp(self):
self.holidays = holidays.RU()
def test_2018(self):
# https://en.wikipedia.org/wiki/Public_holidays_in_Russia
self.assertIn(date(2018, 1, 1), self.holidays)
self.assertIn(date(2018, 1, 2), self.holidays)
self.assertIn(date(2018, 1, 3), self.holidays)
self.assertIn(date(2018, 1, 4), self.holidays)
self.assertIn(date(2018, 1, 5), self.holidays)
self.assertIn(date(2018, 1, 6), self.holidays)
self.assertIn(date(2018, 1, 7), self.holidays)
self.assertIn(date(2018, 1, 8), self.holidays)
self.assertIn(date(2018, 2, 23), self.holidays)
self.assertIn(date(2018, 3, 8), self.holidays)
self.assertIn(date(2018, 5, 1), self.holidays)
self.assertIn(date(2018, 5, 9), self.holidays)
self.assertIn(date(2018, 6, 12), self.holidays)
self.assertIn(date(2018, 11, 4), self.holidays)
class TestLithuania(unittest.TestCase):
def setUp(self):
self.holidays = holidays.LT()
def test_2018(self):
# New Year's Day
self.assertIn(date(2018, 1, 1), self.holidays)
# Day of Restoration of the State of Lithuania
self.assertIn(date(2018, 2, 16), self.holidays)
# Day of Restoration of Independence of Lithuania
self.assertIn(date(2018, 3, 11), self.holidays)
# Easter
self.assertIn(date(2018, 4, 1), self.holidays)
# Easter 2nd day
self.assertIn(date(2018, 4, 2), self.holidays)
# International Workers' Day
self.assertIn(date(2018, 5, 1), self.holidays)
# Mother's day
self.assertIn(date(2018, 5, 6), self.holidays)
# Fathers's day
self.assertIn(date(2018, 6, 3), self.holidays)
# St. John's Day, Day of Dew
self.assertIn(date(2018, 6, 24), self.holidays)
# Statehood Day
self.assertIn(date(2018, 7, 6), self.holidays)
# Assumption Day
self.assertIn(date(2018, 8, 15), self.holidays)
# All Saints' Day
self.assertIn(date(2018, 11, 1), self.holidays)
# Christmas Eve
self.assertIn(date(2018, 12, 24), self.holidays)
# Christmas 1st day
self.assertIn(date(2018, 12, 25), self.holidays)
# Christmas 2nd day
self.assertIn(date(2018, 12, 26), self.holidays)
def test_easter(self):
self.assertNotIn(date(2019, 4, 20), self.holidays)
self.assertIn(date(2019, 4, 21), self.holidays)
self.assertIn(date(2019, 4, 22), self.holidays)
self.assertNotIn(date(2019, 4, 23), self.holidays)
def test_mothers_day(self):
self.assertNotIn(date(2019, 5, 4), self.holidays)
self.assertIn(date(2019, 5, 5), self.holidays)
self.assertNotIn(date(2019, 5, 6), self.holidays)
self.assertIn(date(2020, 5, 3), self.holidays)
def test_fathers_day(self):
self.assertNotIn(date(2019, 6, 1), self.holidays)
self.assertIn(date(2019, 6, 2), self.holidays)
self.assertNotIn(date(2019, 6, 3), self.holidays)
self.assertIn(date(2020, 6, 7), self.holidays)
def test_day_of_dew(self):
self.assertNotIn(date(2002, 6, 24), self.holidays)
self.assertIn(date(2020, 6, 24), self.holidays)
class TestEstonia(unittest.TestCase):
def setUp(self):
self.holidays = holidays.EE()
self.cur_date = datetime.now()
def test_new_years(self):
test_date = date(self.cur_date.year, 1, 1)
self.assertEqual(self.holidays.get(test_date), "uusaasta")
self.assertIn(test_date, self.holidays)
def test_independence_day(self):
test_date = date(self.cur_date.year, 2, 24)
self.assertEqual(self.holidays.get(test_date), "iseseisvuspäev")
self.assertIn(test_date, self.holidays)
def test_good_friday(self):
test_date = date(2019, 4, 19)
self.assertEqual(self.holidays.get(test_date), "suur reede")
self.assertIn(test_date, self.holidays)
def test_easter_sunday(self):
test_date = date(2019, 4, 21)
self.assertEqual(self.holidays.get(test_date),
"ülestõusmispühade 1. püha")
self.assertIn(test_date, self.holidays)
def test_spring_day(self):
test_date = date(self.cur_date.year, 5, 1)
self.assertEqual(self.holidays.get(test_date), "kevadpüha")
self.assertIn(test_date, self.holidays)
def test_pentecost(self):
test_date = date(2019, 6, 9)
self.assertEqual(self.holidays.get(test_date), "nelipühade 1. püha")
self.assertIn(test_date, self.holidays)
def test_victory_day(self):
test_date = date(self.cur_date.year, 6, 23)
self.assertEqual(self.holidays.get(test_date), "võidupüha")
self.assertIn(test_date, self.holidays)
def test_midsummers_day(self):
test_date = date(self.cur_date.year, 6, 24)
self.assertEqual(self.holidays.get(test_date), "jaanipäev")
self.assertIn(test_date, self.holidays)
def test_restoration_of_independence_day(self):
test_date = date(self.cur_date.year, 8, 20)
self.assertEqual(self.holidays.get(test_date), "taasiseseisvumispäev")
self.assertIn(test_date, self.holidays)
def test_christmas_eve(self):
test_date = date(self.cur_date.year, 12, 24)
self.assertEqual(self.holidays.get(test_date), "jõululaupäev")
self.assertIn(test_date, self.holidays)
def test_christmas_day(self):
test_date = date(self.cur_date.year, 12, 25)
self.assertEqual(self.holidays.get(test_date), "esimene jõulupüha")
self.assertIn(test_date, self.holidays)
def test_boxing_day(self):
test_date = date(self.cur_date.year, 12, 26)
self.assertEqual(self.holidays.get(test_date), "teine jõulupüha")
self.assertIn(test_date, self.holidays)
class TestIceland(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Iceland()
self.cur_date = datetime.now()
def test_new_year(self):
test_date = date(self.cur_date.year, 1, 1)
self.assertEqual(self.holidays.get(test_date), "Nýársdagur")
self.assertIn(test_date, self.holidays)
def test_maundy_thursday(self):
test_date = date(2019, 4, 18)
self.assertEqual(self.holidays.get(test_date), "Skírdagur")
self.assertIn(test_date, self.holidays)
def test_first_day_of_summer(self):
test_date = date(2019, 4, 25)
self.assertEqual(self.holidays.get(test_date), "Sumardagurinn fyrsti")
self.assertIn(test_date, self.holidays)
def test_commerce_day(self):
test_date = date(2019, 8, 5)
self.assertEqual(self.holidays.get(test_date),
"Frídagur verslunarmanna")
self.assertIn(test_date, self.holidays)
def test_holy_friday(self):
test_date = date(2019, 4, 19)
self.assertEqual(self.holidays.get(test_date), "Föstudagurinn langi")
self.assertIn(test_date, self.holidays)
class TestKenya(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Kenya()
def test_2019(self):
# New Year's Day
self.assertIn(date(2019, 1, 1), self.holidays)
# Good Friday
self.assertIn(date(2019, 4, 19), self.holidays)
# Easter Monday
self.assertIn(date(2019, 4, 22), self.holidays)
# Labour Day
self.assertIn(date(2019, 5, 1), self.holidays)
# Madaraka Day
self.assertIn(date(2019, 6, 1), self.holidays)
# Mashujaa Day
self.assertIn(date(2019, 10, 20), self.holidays)
# Jamhuri (Independence) Day
self.assertIn(date(2019, 12, 12), self.holidays)
# Christmas Day
self.assertIn(date(2019, 12, 25), self.holidays)
# Boxing Day
self.assertIn(date(2018, 12, 26), self.holidays)
class TestHongKong(unittest.TestCase):
def setUp(self):
self.holidays = holidays.HK()
def test_common(self):
self.assertTrue(self.holidays.isLeapYear(2000))
self.assertFalse(self.holidays.isLeapYear(2100))
holidaysNoObserved = holidays.HK(observed=False)
self.assertEqual(holidaysNoObserved[date(2019, 1, 1)],
"The first day of January")
self.assertEqual(self.holidays[date(2015, 9, 3)], "The 70th " +
"anniversary day of the victory of the Chinese " +
"people's war of resistance against Japanese " +
"aggression")
def test_first_day_of_january(self):
exception_years = [2006, 2012, 2017]
for year in range(2006, 2021):
if year in exception_years:
self.assertEqual(self.holidays[date(year, 1, 2)],
"The day following the first day of January")
else:
self.assertEqual(self.holidays[date(year, 1, 1)],
"The first day of January")
def test_lunar_new_year(self):
for year, month, day in [
(2006, 1, 28), (2007, 2, 17), (2010, 2, 13)]:
self.assertEqual(self.holidays[date(year, month, day)],
"The day preceding Lunar New Year's Day")
for year, month, day in [
(2008, 2, 7), (2009, 1, 26), (2011, 2, 3), (2012, 1, 23),
(2014, 1, 31), (2015, 2, 19), (2016, 2, 8), (2017, 1, 28),
(2018, 2, 16), (2019, 2, 5), (2020, 1, 25)]:
self.assertEqual(self.holidays[date(year, month, day)],
"Lunar New Year's Day")
for year, month, day in [
(2006, 1, 30), (2007, 2, 19), (2008, 2, 8), (2009, 1, 27),
(2010, 2, 15), (2011, 2, 4), (2012, 1, 24), (2013, 2, 11),
(2014, 2, 1), (2015, 2, 20), (2016, 2, 9), (2018, 2, 17),
(2019, 2, 6)]:
self.assertEqual(self.holidays[date(year, month, day)],
"The second day of Lunar New Year")
for year, month, day in [
(2006, 1, 31), (2007, 2, 20), (2008, 2, 9), (2009, 1, 28),
(2010, 2, 16), (2011, 2, 5), (2012, 1, 25), (2013, 2, 12),
(2015, 2, 21), (2016, 2, 10), (2017, 1, 30), (2019, 2, 7),
(2020, 1, 27)]:
self.assertEqual(self.holidays[date(year, month, day)],
"The third day of Lunar New Year")
for year, month, day in [
(2013, 2, 13), (2014, 2, 3), (2017, 1, 31), (2020, 1, 28),
(2018, 2, 19)]:
self.assertEqual(self.holidays[date(year, month, day)],
"The fourth day of Lunar New Year")
def test_ching_ming_festival(self):
for year, month, day in [
(2006, 4, 5), (2007, 4, 5), (2008, 4, 4), (2009, 4, 4),
(2010, 4, 5), (2011, 4, 5), (2012, 4, 4), (2013, 4, 4),
(2014, 4, 5), (2016, 4, 4), (2017, 4, 4), (2018, 4, 5),
(2019, 4, 5), (2020, 4, 4)]:
self.assertEqual(self.holidays[date(year, month, day)],
"Ching Ming Festival")
self.assertEqual(self.holidays[date(2015, 4, 6)], "The day " +
"following Ching Ming Festival")
def test_easter(self):
for year, month, day in [
(2006, 4, 14), (2007, 4, 6), (2008, 3, 21), (2009, 4, 10),
(2010, 4, 2), (2011, 4, 22), (2012, 4, 6), (2013, 3, 29),
(2014, 4, 18), (2015, 4, 3), (2016, 3, 25), (2017, 4, 14),
(2018, 3, 30), (2019, 4, 19), (2020, 4, 10)]:
self.assertEqual(self.holidays[date(year, month, day)],
"Good Friday")
for year, month, day in [
(2019, 4, 20), (2013, 3, 30), (2020, 4, 11), (2009, 4, 11),
(2018, 3, 31), (2008, 3, 22), (2011, 4, 23), (2010, 4, 3),
(2015, 4, 4), (2006, 4, 15), (2017, 4, 15), (2016, 3, 26),
(2012, 4, 7), (2007, 4, 7), (2014, 4, 19)]:
self.assertEqual(self.holidays[date(year, month, day)],
"The day following Good Friday")
for year, month, day in [
(2006, 4, 17), (2007, 4, 9), (2009, 4, 13), (2008, 3, 24),
(2011, 4, 25), (2012, 4, 9), (2013, 4, 1), (2014, 4, 21),
(2016, 3, 28), (2017, 4, 17), (2018, 4, 2), (2019, 4, 22),
(2020, 4, 13)]:
self.assertEqual(self.holidays[date(year, month, day)],
"Easter Monday")
name = "The day following Easter Monday"
self.assertEqual(self.holidays[date(2010, 4, 6)], name)
self.assertEqual(self.holidays[date(2015, 4, 7)], name)
def test_labour_day(self):
for year in [2006, 2007, 2008, 2009, 2010, 2012, 2013, 2014, 2015,
2017, 2018, 2019, 2020]:
self.assertEqual(self.holidays[date(year, 5, 1)],
"Labour Day")
name = "The day following Labour Day"
self.assertEqual(self.holidays[date(2011, 5, 2)], name)
self.assertEqual(self.holidays[date(2016, 5, 2)], name)
def test_tuen_ng_festival(self):
for year, month, day in [
(2006, 5, 31), (2007, 6, 19), (2009, 5, 28), (2010, 6, 16),
(2011, 6, 6), (2012, 6, 23), (2013, 6, 12), (2014, 6, 2),
(2015, 6, 20), (2016, 6, 9), (2017, 5, 30), (2018, 6, 18),
(2019, 6, 7), (2020, 6, 25)]:
self.assertEqual(self.holidays[date(year, month, day)], "Tuen " +
"Ng Festival")
self.assertEqual(self.holidays[date(2008, 6, 9)], "The day " +
"following Tuen Ng Festival")
def test_hksar_day(self):
for year in [2006, 2008, 2009, 2010, 2011, 2013, 2014, 2015, 2016,
2017, 2019, 2020]:
self.assertEqual(self.holidays[date(year, 7, 1)], "Hong Kong " +
"Special Administrative Region Establishment " +
"Day")
name = "The day following Hong Kong Special Administrative Region " + \
"Establishment Day"
self.assertEqual(self.holidays[date(2007, 7, 2)], name)
self.assertEqual(self.holidays[date(2012, 7, 2)], name)
self.assertEqual(self.holidays[date(2018, 7, 2)], name)
def test_mid_autumn_festival(self):
for year, month, day in [
(2006, 10, 7), (2007, 9, 26), (2008, 9, 15), (2010, 9, 23),
(2011, 9, 13), (2012, 10, 1), (2013, 9, 20), (2014, 9, 9),
(2015, 9, 28), (2016, 9, 16), (2017, 10, 5), (2018, 9, 25),
(2019, 9, 14), (2020, 10, 2)]:
self.assertEqual(self.holidays[date(year, month, day)], "The " +
"day following the Chinese Mid-Autumn Festival")
self.assertEqual(self.holidays[date(2009, 10, 3)], "Chinese " +
"Mid-Autumn Festival")
def test_national_day(self):
for year in [2007, 2008, 2009, 2010, 2011, 2013, 2014, 2015, 2016,
2018, 2019, 2020]:
self.assertEqual(self.holidays[date(year, 10, 1)], "National Day")
name = "The day following National Day"
self.assertEqual(self.holidays[date(2006, 10, 2)], name)
self.assertEqual(self.holidays[date(2012, 10, 2)], name)
self.assertEqual(self.holidays[date(2017, 10, 2)], name)
def test_chung_yeung_festival(self):
for year, month, day in [
(2006, 10, 30), (2007, 10, 19), (2008, 10, 7), (2009, 10, 26),
(2010, 10, 16), (2011, 10, 5), (2012, 10, 23), (2014, 10, 2),
(2015, 10, 21), (2017, 10, 28), (2018, 10, 17), (2019, 10, 7)]:
self.assertEqual(self.holidays[date(year, month, day)], "Chung " +
"Yeung Festival")
name = "The day following Chung Yeung Festival"
self.assertEqual(self.holidays[date(2013, 10, 14)], name)
self.assertEqual(self.holidays[date(2016, 10, 10)], name)
self.assertEqual(self.holidays[date(2020, 10, 26)], name)
def test_christmas_day(self):
for year in [2006, 2007, 2008, 2009, 2010, 2012, 2013, 2014, 2015,
2017, 2018, 2019, 2020]:
self.assertEqual(self.holidays[date(year, 12, 25)], "Christmas " +
"Day")
name = "The first weekday after Christmas Day"
for year in range(2006, 2010):
self.assertEqual(self.holidays[date(year, 12, 26)], name)
self.assertEqual(self.holidays[date(2010, 12, 27)], name)
for year in range(2011, 2021):
self.assertEqual(self.holidays[date(year, 12, 26)], name)
name = "The second weekday after Christmas Day"
self.assertEqual(self.holidays[date(2011, 12, 27)], name)
self.assertEqual(self.holidays[date(2016, 12, 27)], name)
class TestPeru(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Peru()
def test_2019(self):
# No laborables (sector público) not included
self.assertIn(date(2019, 1, 1), self.holidays)
self.assertIn(date(2019, 4, 18), self.holidays)
self.assertIn(date(2019, 4, 19), self.holidays)
self.assertIn(date(2019, 5, 1), self.holidays)
self.assertIn(date(2019, 6, 29), self.holidays)
self.assertIn(date(2019, 7, 29), self.holidays)
self.assertIn(date(2019, 8, 30), self.holidays)
self.assertIn(date(2019, 10, 8), self.holidays)
self.assertIn(date(2019, 11, 1), self.holidays)
self.assertIn(date(2019, 12, 8), self.holidays)
self.assertIn(date(2019, 12, 25), self.holidays)
class TestNigeria(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Nigeria()
def test_fixed_holidays(self):
self.assertIn(date(2019, 1, 1), self.holidays)
self.assertIn(date(2019, 5, 1), self.holidays)
self.assertIn(date(2019, 5, 27), self.holidays)
self.assertIn(date(2019, 6, 12), self.holidays)
self.assertIn(date(2019, 10, 1), self.holidays)
self.assertIn(date(2019, 12, 25), self.holidays)
self.assertIn(date(2019, 12, 26), self.holidays)
class TestChile(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Chile()
def test_2019(self):
# No laborables (sector público) not included
self.assertIn(date(2019, 1, 1), self.holidays)
# self.assertIn(date(2019, 4, 18), self.holidays)
self.assertIn(date(2019, 4, 19), self.holidays)
self.assertIn(date(2019, 5, 1), self.holidays)
self.assertIn(date(2019, 5, 21), self.holidays)
self.assertIn(date(2019, 6, 29), self.holidays)
self.assertIn(date(2019, 7, 16), self.holidays)
self.assertIn(date(2019, 8, 15), self.holidays)
self.assertIn(date(2019, 9, 18), self.holidays)
self.assertIn(date(2019, 9, 19), self.holidays)
self.assertIn(date(2019, 9, 20), self.holidays)
self.assertIn(date(2009, 10, 12), self.holidays)
self.assertIn(date(2019, 10, 12), self.holidays)
self.assertIn(date(2019, 11, 1), self.holidays)
self.assertIn(date(2019, 12, 8), self.holidays)
self.assertIn(date(2019, 12, 25), self.holidays)
class TestDominicanRepublic(unittest.TestCase):
def setUp(self):
self.do_holidays = holidays.DO()
def test_do_holidays_2020(self):
year = 2020
# New Year's Day
self.assertIn(date(year, 1, 1), self.do_holidays)
# Epiphany
self.assertIn(date(year, 1, 6), self.do_holidays)
# Lady of Altagracia
self.assertIn(date(year, 1, 21), self.do_holidays)
# Juan Pablo Duarte Day
self.assertIn(date(year, 1, 26), self.do_holidays)
# Independence Day
self.assertIn(date(year, 2, 27), self.do_holidays)
# Good Friday
self.assertIn(date(year, 4, 10), self.do_holidays)
# Labor Day
self.assertIn(date(year, 5, 4), self.do_holidays)
# Feast of Corpus Christi
self.assertIn(date(year, 6, 11), self.do_holidays)
# Restoration Day
self.assertIn(date(year, 8, 16), self.do_holidays)
# Our Lady of Mercedes Day
self.assertIn(date(year, 9, 24), self.do_holidays)
# Constitution Day
self.assertIn(date(year, 11, 9), self.do_holidays)
# Christmas Day
self.assertIn(date(year, 12, 25), self.do_holidays)
# Change day by law test
# New Year's Day
self.assertIn(date(2019, 1, 1), self.do_holidays)
class TestNicaragua(unittest.TestCase):
def setUp(self):
self.ni_holidays = holidays.NI()
def test_ni_holidays_2020(self):
year = 2020
mn_holidays = holidays.NI(prov="MN")
# New Year's Day
self.assertIn(date(year, 1, 1), self.ni_holidays)
# Maundy Thursday
self.assertIn(date(year, 4, 9), self.ni_holidays)
# Good Friday
self.assertIn(date(year, 4, 10), self.ni_holidays)
# Labor Day
self.assertIn(date(year, 5, 1), self.ni_holidays)
# Revolution Day
self.assertIn(date(year, 7, 19), self.ni_holidays)
# Battle of San Jacinto Day
self.assertIn(date(year, 9, 14), self.ni_holidays)
# Independence Day
self.assertIn(date(year, 9, 15), self.ni_holidays)
# Virgin's Day
self.assertIn(date(year, 12, 8), self.ni_holidays)
# Christmas Day
self.assertIn(date(year, 12, 25), self.ni_holidays)
# Santo Domingo Day Down
self.assertIn(date(year, 8, 1), mn_holidays)
# Santo Domingo Day Up
self.assertIn(date(year, 8, 10), mn_holidays)
class TestSingapore(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Singapore()
def test_Singapore(self):
# <= 1968 holidays
self.assertIn(date(1968, 4, 13), self.holidays)
self.assertIn(date(1968, 4, 15), self.holidays)
self.assertIn(date(1968, 12, 26), self.holidays)
# latest polling day
self.assertIn(date(2015, 9, 11), self.holidays)
# SG50
self.assertIn(date(2015, 8, 7), self.holidays)
# Year with lunar leap month
self.assertIn(date(2015, 8, 7), self.holidays)
# Latest holidays
# Source: https://www.mom.gov.sg/employment-practices/public-holidays
# 2018
self.assertIn(date(2018, 1, 1), self.holidays)
self.assertIn(date(2018, 2, 16), self.holidays)
self.assertIn(date(2018, 2, 17), self.holidays)
self.assertIn(date(2018, 3, 30), self.holidays)
self.assertIn(date(2018, 5, 1), self.holidays)
self.assertIn(date(2018, 5, 29), self.holidays)
self.assertIn(date(2018, 6, 15), self.holidays)
self.assertIn(date(2018, 8, 9), self.holidays)
self.assertIn(date(2018, 8, 22), self.holidays)
self.assertIn(date(2018, 11, 6), self.holidays)
self.assertIn(date(2018, 12, 25), self.holidays)
# total holidays (11 + 0 falling on a Sunday)
self.assertEqual(len(holidays.Singapore(years=[2018])), 11 + 0)
# 2019
self.assertIn(date(2019, 1, 1), self.holidays)
self.assertIn(date(2019, 2, 5), self.holidays)
self.assertIn(date(2019, 2, 6), self.holidays)
self.assertIn(date(2019, 4, 19), self.holidays)
self.assertIn(date(2019, 5, 1), self.holidays)
self.assertIn(date(2019, 5, 19), self.holidays)
self.assertIn(date(2019, 6, 5), self.holidays)
self.assertIn(date(2019, 8, 9), self.holidays)
self.assertIn(date(2019, 8, 11), self.holidays)
self.assertIn(date(2019, 10, 27), self.holidays)
self.assertIn(date(2019, 12, 25), self.holidays)
# total holidays (11 + 3 falling on a Sunday)
self.assertEqual(len(holidays.Singapore(years=[2019])), 11 + 3)
# 2020
self.assertIn(date(2020, 1, 1), self.holidays)
self.assertIn(date(2020, 1, 25), self.holidays)
self.assertIn(date(2020, 1, 26), self.holidays)
self.assertIn(date(2020, 4, 10), self.holidays)
self.assertIn(date(2020, 5, 1), self.holidays)
self.assertIn(date(2020, 5, 7), self.holidays)
self.assertIn(date(2020, 5, 24), self.holidays)
self.assertIn(date(2020, 7, 31), self.holidays)
self.assertIn(date(2020, 8, 9), self.holidays)
self.assertIn(date(2020, 11, 14), self.holidays)
self.assertIn(date(2020, 12, 25), self.holidays)
# total holidays (11 + 3 falling on a Sunday)
self.assertEqual(len(holidays.Singapore(years=[2020])), 11 + 3)
# holidays estimated using lunar calendar
self.assertIn(date(2021, 5, 26), self.holidays)
self.assertIn(date(2021, 11, 3), self.holidays)
# holidays estimated using libary hijri-converter
if sys.version_info >= (3, 6):
import importlib.util
if importlib.util.find_spec("hijri_converter"):
# <= 1968 holidays
self.assertIn(date(1968, 1, 2), self.holidays)
# 2021
self.assertIn(date(2021, 5, 13), self.holidays)
self.assertIn(date(2021, 7, 20), self.holidays)
class TestSerbia(unittest.TestCase):
def setUp(self):
self.holidays = holidays.Serbia(observed=True)
def test_new_year(self):
# If January 1st is in Weekend, test oberved
self.assertIn(date(2017, 1, 1), self.holidays)
self.assertIn(date(2017, 1, 2), self.holidays)
self.assertIn(date(2017, 1, 3), self.holidays)
self.holidays.observed = False
self.assertNotIn(date(2017, 1, 3), self.holidays)
def test_statehood_day(self):
# If February 15th is in Weekend, test oberved
self.assertIn(date(2020, 2, 15), self.holidays)
self.assertIn(date(2020, 2, 16), self.holidays)
self.assertIn(date(2020, 2, 17), self.holidays)
self.holidays.observed = False
self.assertNotIn(date(2020, 2, 17), self.holidays)
def test_labour_day(self):
# If May 1st is in Weekend, test oberved
self.assertIn(date(2016, 5, 1), self.holidays)
self.assertIn(date(2016, 5, 2), self.holidays)
self.assertIn(date(2016, 5, 3), self.holidays)
self.holidays.observed = False
self.assertNotIn(date(2016, 5, 3), self.holidays)
def test_armistice_day(self):
# If November 11th is in Weekend, test oberved
self.assertIn(date(2018, 11, 11), self.holidays)
self.assertIn(date(2018, 11, 12), self.holidays)
self.holidays.observed = False
self.assertNotIn(date(2018, 11, 12), self.holidays)
def test_religious_holidays(self):
# Orthodox Christmas
self.assertIn(date(2020, 1, 7), self.holidays)
self.assertNotIn(date(2020, 1, 8), self.holidays)
# Orthodox Easter
self.assertNotIn(date(2020, 4, 16), self.holidays)
self.assertIn(date(2020, 4, 17), self.holidays)
self.assertIn(date(2020, 4, 18), self.holidays)
self.assertIn(date(2020, 4, 19), self.holidays)
self.assertIn(date(2020, 4, 20), self.holidays)
self.assertNotIn(date(2020, 4, 21), self.holidays)
class TestEgypt(unittest.TestCase):
def setUp(self):
self.holidays = holidays.EG()
def test_2019(self):
self.assertIn(date(2019, 1, 7), self.holidays)
self.assertIn(date(2019, 1, 25), self.holidays)
self.assertIn(date(2019, 4, 25), self.holidays)
self.assertIn(date(2019, 4, 28), self.holidays)
self.assertIn(date(2019, 4, 29), self.holidays)
self.assertIn(date(2019, 5, 1), self.holidays)
self.assertIn(date(2019, 6, 30), self.holidays)
self.assertIn(date(2019, 7, 23), self.holidays)
self.assertIn(date(2019, 10, 6), self.holidays)
def test_coptic_christmas(self):
self.assertIn(date(2019, 1, 7), self.holidays)
def test_25_jan(self):
self.assertIn(date(2019, 1, 25), self.holidays)
def test_labour_day(self):
self.assertIn(date(2019, 5, 1), self.holidays)
def test_25_jan_from_2009(self):
# Before 2009 Jan 25th wasn't celebrated
self.holidays = holidays.EG(years=[2010])
self.assertIn(date(2010, 1, 25), self.holidays)
def test_hijri_based(self):
if sys.version_info >= (3, 6):
import importlib.util
if importlib.util.find_spec("hijri_converter"):
self.holidays = holidays.EG(years=[2010])
self.assertIn(date(2019, 6, 5), self.holidays)
self.assertIn(date(2019, 8, 10), self.holidays)
self.assertIn(date(2019, 8, 11), self.holidays)
self.assertIn(date(2019, 8, 12), self.holidays)
self.assertIn(date(2019, 8, 31), self.holidays)
self.assertIn(date(2019, 11, 9), self.holidays)
# eid_alfitr
self.assertIn(date(2019, 6, 4), self.holidays)
# eid_aladha
self.assertIn(date(2019, 8, 11), self.holidays)
# islamic_new_year
self.assertIn(date(2019, 8, 31), self.holidays)
# eid_elfetr_2010
self.assertIn(date(2010, 9, 10), self.holidays)
# arafat_2010
self.assertIn(date(2010, 11, 15), self.holidays)
# muhammad's birthday
self.assertIn(date(2010, 2, 26), self.holidays)
class TestIsrael(unittest.TestCase):
def test_memorial_day(self):
self._test_observed_holidays('Memorial Day')
def test_independence_day(self):
self._test_observed_holidays('Independence Day')
def _test_observed_holidays(self, holiday_name):
days_delta = 0 if holiday_name == 'Memorial Day' else 1
# Postponed
il_holidays = holidays.IL(years=[2017], observed=True)
official_memorial_day = date(2017, 4, 30) + relativedelta(days=days_delta)
observed_memorial_day = date(2017, 5, 1) + relativedelta(days=days_delta)
self.assertIn(official_memorial_day, il_holidays)
self.assertIn(holiday_name, il_holidays[official_memorial_day])
self.assertIn(observed_memorial_day, il_holidays)
self.assertIn(holiday_name + ' (Observed)', il_holidays[observed_memorial_day])
# Earlier
il_holidays = holidays.IL(years=[2018], observed=True)
official_memorial_day = date(2018, 4, 19) + relativedelta(days=days_delta)
observed_memorial_day = date(2018, 4, 18) + relativedelta(days=days_delta)
self.assertIn(official_memorial_day, il_holidays)
self.assertIn(holiday_name, il_holidays[official_memorial_day])
self.assertIn(observed_memorial_day, il_holidays)
self.assertIn(holiday_name + ' (Observed)', il_holidays[observed_memorial_day])
# On time
il_holidays = holidays.IL(years=[2020], observed=True)
official_memorial_day = date(2020, 4, 28) + relativedelta(days=days_delta)
self.assertIn(official_memorial_day, il_holidays)
self.assertIn(holiday_name, il_holidays[official_memorial_day])
for names in il_holidays.values():
self.assertNotIn(holiday_name + ' (Observed)', names)
class TestGreece(unittest.TestCase):
def setUp(self):
self.gr_holidays = holidays.GR()
def test_fixed_holidays(self):
years = range(2000, 2025)
for y in years:
fdays = ((date(y, 1, 1), "Πρωτοχρονιά [New Year's Day]"),
(date(y, 1, 6), "Θεοφάνεια [Epiphany]"),
(date(y, 3, 25), "Εικοστή Πέμπτη Μαρτίου " +
"[Independence Day]"),
(date(y, 5, 1), "Εργατική Πρωτομαγιά [Labour day]"),
(date(y, 8, 15), "Κοίμηση της Θεοτόκου " +
"[Assumption of Mary]"),
(date(y, 10, 28), "Ημέρα του Όχι [Ochi Day]"),
(date(y, 12, 25), "Χριστούγεννα [Christmas]"),
(date(y, 12, 26), "Επόμενη ημέρα των Χριστουγέννων " +
"[Day after Christmas]"))
for (d, dstr) in fdays:
self.assertIn(d, self.gr_holidays)
self.assertIn(dstr, self.gr_holidays[d])
def test_gr_clean_monday(self):
checkdates = (date(2018, 2, 19),
date(2019, 3, 11),
date(2020, 3, 2),
date(2021, 3, 15),
date(2022, 3, 7),
date(2023, 2, 27),
date(2024, 3, 18))
for d in checkdates:
self.assertIn(d, self.gr_holidays)
self.assertIn("Καθαρά Δευτέρα [Clean Monday]", self.gr_holidays[d])
def test_gr_easter_monday(self):
checkdates = (date(2018, 4, 9),
date(2019, 4, 29),
date(2020, 4, 20),
date(2021, 5, 3),
date(2022, 4, 25),
date(2023, 4, 17),
date(2024, 5, 6))
for d in checkdates:
self.assertIn(d, self.gr_holidays)
self.assertIn("Δευτέρα του Πάσχα [Easter Monday]",
self.gr_holidays[d])
def test_gr_monday_of_the_holy_spirit(self):
checkdates = (date(2018, 5, 28),
date(2019, 6, 17),
date(2020, 6, 8),
date(2021, 6, 21),
date(2022, 6, 13),
date(2023, 6, 5),
date(2024, 6, 24))
for d in checkdates:
self.assertIn(d, self.gr_holidays)
self.assertIn("Δευτέρα του Αγίου Πνεύματος " +
"[Monday of the Holy Spirit]", self.gr_holidays[d])
class TestParaguay(unittest.TestCase):
def setUp(self):
self.holidays = holidays.PY()
def test_fixed_holidays(self):
checkdates = (date(2016, 1, 1),
date(2020, 1, 1),
date(2020, 3, 2),
date(2020, 4, 9),
date(2020, 5, 1),
date(2020, 5, 15),
date(2020, 6, 15),
date(2020, 8, 15),
date(2020, 9, 29),
date(2020, 12, 8),
date(2020, 12, 25))
for d in checkdates:
self.assertIn(d, self.holidays)
def test_no_observed(self):
# no observed dates
self.holidays.observed = False
checkdates = (date(2017, 1, 1),
date(2014, 3, 2),
date(2020, 4, 12),
date(2016, 5, 1),
date(2016, 5, 15),
date(2016, 6, 12),
date(2015, 8, 15),
date(2018, 9, 29),
date(2018, 12, 8))
for d in checkdates:
self.assertNotIn(d, self.holidays)
def test_easter(self):
for year, month, day in [
(2002, 3, 31), (2003, 4, 20), (2004, 4, 11),
(2005, 3, 27), (2006, 4, 16), (2007, 4, 8),
(2008, 3, 23), (2009, 4, 12), (2010, 4, 4),
(2011, 4, 24), (2012, 4, 8), (2013, 3, 31),
(2014, 4, 20), (2015, 4, 5), (2016, 3, 27),
(2017, 4, 16), (2018, 4, 1), (2019, 4, 21),
(2020, 4, 12), (2021, 4, 4), (2022, 4, 17)]:
easter = date(year, month, day)
easter_thursday = easter - timedelta(days=3)
easter_friday = easter - timedelta(days=2)
for holiday in [easter_thursday, easter_friday, easter]:
self.assertIn(holiday, self.holidays)
if __name__ == "__main__":
unittest.main()
|
ryanss/python-holidays
|
tests.py
|
Python
|
mit
| 274,276
|
[
"COLUMBUS"
] |
80ca879fbac2322e8903c48868036217b407c02923e1a95d3ee0e076befbae5b
|
#!/usr/bin/env python
# Copyright 2014-2021 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
from functools import reduce
import numpy
from pyscf import lib
from pyscf.lib import logger
from pyscf.tdscf import uhf
from pyscf.pbc import scf
from pyscf.pbc.tdscf.krhf import _get_e_ia, purify_krlyov_heff
from pyscf.pbc.lib.kpts_helper import gamma_point
from pyscf.pbc.scf import _response_functions # noqa
from pyscf import __config__
REAL_EIG_THRESHOLD = getattr(__config__, 'pbc_tdscf_uhf_TDDFT_pick_eig_threshold', 1e-3)
POSTIVE_EIG_THRESHOLD = getattr(__config__, 'pbc_tdscf_uhf_TDDFT_positive_eig_threshold', 1e-3)
class TDA(uhf.TDA):
conv_tol = getattr(__config__, 'pbc_tdscf_rhf_TDA_conv_tol', 1e-6)
def __init__(self, mf):
from pyscf.pbc.df.df_ao2mo import warn_pbc2d_eri
assert(isinstance(mf, scf.khf.KSCF))
self.cell = mf.cell
uhf.TDA.__init__(self, mf)
warn_pbc2d_eri(mf)
def gen_vind(self, mf):
'''Compute Ax'''
mo_coeff = mf.mo_coeff
mo_energy = mf.mo_energy
mo_occ = mf.mo_occ
nkpts = len(mo_occ[0])
nao, nmo = mo_coeff[0][0].shape
occidxa = [numpy.where(mo_occ[0][k]> 0)[0] for k in range(nkpts)]
occidxb = [numpy.where(mo_occ[1][k]> 0)[0] for k in range(nkpts)]
viridxa = [numpy.where(mo_occ[0][k]==0)[0] for k in range(nkpts)]
viridxb = [numpy.where(mo_occ[1][k]==0)[0] for k in range(nkpts)]
orboa = [mo_coeff[0][k][:,occidxa[k]] for k in range(nkpts)]
orbob = [mo_coeff[1][k][:,occidxb[k]] for k in range(nkpts)]
orbva = [mo_coeff[0][k][:,viridxa[k]] for k in range(nkpts)]
orbvb = [mo_coeff[1][k][:,viridxb[k]] for k in range(nkpts)]
e_ia_a = _get_e_ia(mo_energy[0], mo_occ[0])
e_ia_b = _get_e_ia(mo_energy[1], mo_occ[1])
hdiag = numpy.hstack([x.ravel() for x in (e_ia_a + e_ia_b)])
mem_now = lib.current_memory()[0]
max_memory = max(2000, self.max_memory*.8-mem_now)
vresp = mf.gen_response(hermi=0, max_memory=max_memory)
def vind(zs):
nz = len(zs)
zs = [_unpack(z, mo_occ) for z in zs]
dmov = numpy.empty((2,nz,nkpts,nao,nao), dtype=numpy.complex128)
for i in range(nz):
dm1a, dm1b = zs[i]
for k in range(nkpts):
dmov[0,i,k] = reduce(numpy.dot, (orboa[k], dm1a[k], orbva[k].conj().T))
dmov[1,i,k] = reduce(numpy.dot, (orbob[k], dm1b[k], orbvb[k].conj().T))
with lib.temporary_env(mf, exxdiv=None):
dmov = dmov.reshape(2*nz,nkpts,nao,nao)
v1ao = vresp(dmov)
v1ao = v1ao.reshape(2,nz,nkpts,nao,nao)
v1s = []
for i in range(nz):
dm1a, dm1b = zs[i]
v1as = []
v1bs = []
for k in range(nkpts):
v1a = reduce(numpy.dot, (orboa[k].conj().T, v1ao[0,i,k], orbva[k]))
v1b = reduce(numpy.dot, (orbob[k].conj().T, v1ao[1,i,k], orbvb[k]))
v1a += e_ia_a[k] * dm1a[k]
v1b += e_ia_b[k] * dm1b[k]
v1as.append(v1a.ravel())
v1bs.append(v1b.ravel())
v1s += v1as + v1bs
return numpy.hstack(v1s).reshape(nz,-1)
return vind, hdiag
def init_guess(self, mf, nstates=None):
if nstates is None: nstates = self.nstates
mo_energy = mf.mo_energy
mo_occ = mf.mo_occ
e_ia_a = _get_e_ia(mo_energy[0], mo_occ[0])
e_ia_b = _get_e_ia(mo_energy[1], mo_occ[1])
e_ia = numpy.hstack([x.ravel() for x in (e_ia_a + e_ia_b)])
e_ia_max = e_ia.max()
nov = e_ia.size
nstates = min(nstates, nov)
e_threshold = min(e_ia_max, e_ia[numpy.argsort(e_ia)[nstates-1]])
# Handle degeneracy
e_threshold += 1e-6
idx = numpy.where(e_ia <= e_threshold)[0]
x0 = numpy.zeros((idx.size, nov))
for i, j in enumerate(idx):
x0[i, j] = 1 # Koopmans' excitations
return x0
def kernel(self, x0=None):
'''TDA diagonalization solver
'''
self.check_sanity()
self.dump_flags()
vind, hdiag = self.gen_vind(self._scf)
precond = self.get_precond(hdiag)
if x0 is None:
x0 = self.init_guess(self._scf, self.nstates)
def pickeig(w, v, nroots, envs):
idx = numpy.where(w > POSTIVE_EIG_THRESHOLD)[0]
return w[idx], v[:,idx], idx
log = logger.Logger(self.stdout, self.verbose)
precision = self.cell.precision * 1e-2
hermi = 1
self.converged, self.e, x1 = \
lib.davidson1(vind, x0, precond,
tol=self.conv_tol,
nroots=self.nstates, lindep=self.lindep,
max_space=self.max_space, pick=pickeig,
fill_heff=purify_krlyov_heff(precision, hermi, log),
verbose=self.verbose)
mo_occ = self._scf.mo_occ
self.xy = [(_unpack(xi, mo_occ), # (X_alpha, X_beta)
(0, 0)) # (Y_alpha, Y_beta)
for xi in x1]
#TODO: analyze CIS wfn point group symmetry
return self.e, self.xy
CIS = KTDA = TDA
class TDHF(TDA):
def gen_vind(self, mf):
mo_coeff = mf.mo_coeff
mo_energy = mf.mo_energy
mo_occ = mf.mo_occ
nkpts = len(mo_occ[0])
nao, nmo = mo_coeff[0][0].shape
occidxa = [numpy.where(mo_occ[0][k]> 0)[0] for k in range(nkpts)]
occidxb = [numpy.where(mo_occ[1][k]> 0)[0] for k in range(nkpts)]
viridxa = [numpy.where(mo_occ[0][k]==0)[0] for k in range(nkpts)]
viridxb = [numpy.where(mo_occ[1][k]==0)[0] for k in range(nkpts)]
orboa = [mo_coeff[0][k][:,occidxa[k]] for k in range(nkpts)]
orbob = [mo_coeff[1][k][:,occidxb[k]] for k in range(nkpts)]
orbva = [mo_coeff[0][k][:,viridxa[k]] for k in range(nkpts)]
orbvb = [mo_coeff[1][k][:,viridxb[k]] for k in range(nkpts)]
e_ia_a = _get_e_ia(mo_energy[0], mo_occ[0])
e_ia_b = _get_e_ia(mo_energy[1], mo_occ[1])
hdiag = numpy.hstack([x.ravel() for x in (e_ia_a + e_ia_b)])
hdiag = numpy.hstack((hdiag, -hdiag))
tot_x_a = sum(x.size for x in e_ia_a)
tot_x_b = sum(x.size for x in e_ia_b)
tot_x = tot_x_a + tot_x_b
mem_now = lib.current_memory()[0]
max_memory = max(2000, self.max_memory*.8-mem_now)
vresp = mf.gen_response(hermi=0, max_memory=max_memory)
def vind(xys):
nz = len(xys)
x1s = [_unpack(x[:tot_x], mo_occ) for x in xys]
y1s = [_unpack(x[tot_x:], mo_occ) for x in xys]
dmov = numpy.empty((2,nz,nkpts,nao,nao), dtype=numpy.complex128)
for i in range(nz):
xa, xb = x1s[i]
ya, yb = y1s[i]
for k in range(nkpts):
dmx = reduce(numpy.dot, (orboa[k], xa[k] , orbva[k].conj().T))
dmy = reduce(numpy.dot, (orbva[k], ya[k].T, orboa[k].conj().T))
dmov[0,i,k] = dmx + dmy # AX + BY
dmx = reduce(numpy.dot, (orbob[k], xb[k] , orbvb[k].conj().T))
dmy = reduce(numpy.dot, (orbvb[k], yb[k].T, orbob[k].conj().T))
dmov[1,i,k] = dmx + dmy # AX + BY
with lib.temporary_env(mf, exxdiv=None):
dmov = dmov.reshape(2*nz,nkpts,nao,nao)
v1ao = vresp(dmov)
v1ao = v1ao.reshape(2,nz,nkpts,nao,nao)
v1s = []
for i in range(nz):
xa, xb = x1s[i]
ya, yb = y1s[i]
v1xsa = []
v1xsb = []
v1ysa = []
v1ysb = []
for k in range(nkpts):
v1xa = reduce(numpy.dot, (orboa[k].conj().T, v1ao[0,i,k], orbva[k]))
v1xb = reduce(numpy.dot, (orbob[k].conj().T, v1ao[1,i,k], orbvb[k]))
v1ya = reduce(numpy.dot, (orbva[k].conj().T, v1ao[0,i,k], orboa[k])).T
v1yb = reduce(numpy.dot, (orbvb[k].conj().T, v1ao[1,i,k], orbob[k])).T
v1xa+= e_ia_a[k] * xa[k]
v1xb+= e_ia_b[k] * xb[k]
v1ya+= e_ia_a[k] * ya[k]
v1yb+= e_ia_b[k] * yb[k]
v1xsa.append(v1xa.ravel())
v1xsb.append(v1xb.ravel())
v1ysa.append(-v1ya.ravel())
v1ysb.append(-v1yb.ravel())
v1s += v1xsa + v1xsb + v1ysa + v1ysb
return numpy.hstack(v1s).reshape(nz,-1)
return vind, hdiag
def init_guess(self, mf, nstates=None, wfnsym=None):
x0 = TDA.init_guess(self, mf, nstates)
y0 = numpy.zeros_like(x0)
return numpy.hstack((x0,y0))
def kernel(self, x0=None):
'''TDHF diagonalization with non-Hermitian eigenvalue solver
'''
self.check_sanity()
self.dump_flags()
vind, hdiag = self.gen_vind(self._scf)
precond = self.get_precond(hdiag)
if x0 is None:
x0 = self.init_guess(self._scf, self.nstates)
real_system = (gamma_point(self._scf.kpts) and
self._scf.mo_coeff[0][0].dtype == numpy.double)
# We only need positive eigenvalues
def pickeig(w, v, nroots, envs):
realidx = numpy.where((abs(w.imag) < REAL_EIG_THRESHOLD) &
(w.real > POSTIVE_EIG_THRESHOLD))[0]
return lib.linalg_helper._eigs_cmplx2real(w, v, realidx, real_system)
log = logger.Logger(self.stdout, self.verbose)
precision = self.cell.precision * 1e-2
self.converged, w, x1 = \
lib.davidson_nosym1(vind, x0, precond,
tol=self.conv_tol,
nroots=self.nstates, lindep=self.lindep,
max_space=self.max_space, pick=pickeig,
fill_heff=purify_krlyov_heff(precision, 0, log),
verbose=self.verbose)
mo_occ = self._scf.mo_occ
e = []
xy = []
for i, z in enumerate(x1):
xs, ys = z.reshape(2,-1)
norm = lib.norm(xs)**2 - lib.norm(ys)**2
if norm > 0:
norm = 1/numpy.sqrt(norm)
xs *= norm
ys *= norm
e.append(w[i])
xy.append((_unpack(xs, mo_occ), _unpack(ys, mo_occ)))
self.e = numpy.array(e)
self.xy = xy
return self.e, self.xy
RPA = KTDHF = TDHF
def _unpack(vo, mo_occ):
za = []
zb = []
p1 = 0
for k, occ in enumerate(mo_occ[0]):
no = numpy.count_nonzero(occ > 0)
nv = occ.size - no
p0, p1 = p1, p1 + no * nv
za.append(vo[p0:p1].reshape(no,nv))
for k, occ in enumerate(mo_occ[1]):
no = numpy.count_nonzero(occ > 0)
nv = occ.size - no
p0, p1 = p1, p1 + no * nv
zb.append(vo[p0:p1].reshape(no,nv))
return za, zb
scf.kuhf.KUHF.TDA = lib.class_as_method(KTDA)
scf.kuhf.KUHF.TDHF = lib.class_as_method(KTDHF)
if __name__ == '__main__':
from pyscf.pbc import gto
from pyscf.pbc import scf
from pyscf.pbc import df
cell = gto.Cell()
cell.unit = 'B'
cell.atom = '''
C 0. 0. 0.
C 1.68506879 1.68506879 1.68506879
'''
cell.a = '''
0. 3.37013758 3.37013758
3.37013758 0. 3.37013758
3.37013758 3.37013758 0.
'''
cell.basis = 'gth-szv'
cell.pseudo = 'gth-pade'
cell.mesh = [37]*3
cell.build()
mf = scf.KUHF(cell, cell.make_kpts([2,1,1])).set(exxdiv=None)
# mf.with_df = df.DF(cell, cell.make_kpts([2,1,1]))
# mf.with_df.auxbasis = 'weigend'
# mf.with_df._cderi = 'eri3d-df.h5'
# mf.with_df.build(with_j3c=False)
mf.run()
td = TDA(mf)
td.verbose = 5
td.nstates = 5
print(td.kernel()[0] * 27.2114)
td = TDHF(mf)
td.verbose = 5
td.nstates = 5
print(td.kernel()[0] * 27.2114)
cell.spin = 2
mf = scf.KUHF(cell, cell.make_kpts([2,1,1])).set(exxdiv=None)
mf.run()
td = TDA(mf)
td.verbose = 5
td.nstates = 5
print(td.kernel()[0] * 27.2114)
td = TDHF(mf)
td.verbose = 5
td.nstates = 5
print(td.kernel()[0] * 27.2114)
|
sunqm/pyscf
|
pyscf/pbc/tdscf/kuhf.py
|
Python
|
apache-2.0
| 13,207
|
[
"PySCF"
] |
8ab7789056265950013e2aa533f92e0c94234451554f727915eed5571c006d40
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.