text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
""" Module gifted
Provides functionality for reading and writing animated GIF images.
Use write_gif to write a series of numpy arrays or PIL images as an
animated GIF. Use read_gif to read an animated gif as a series of numpy
arrays.
Note that since July 2004, all patents on the LZW compression patent have
expired. Therefore the GIF format may now be used freely.
Acknowledgements
----------------
Many thanks to Ant1 for:
* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly
improves the results.
* the modifications to save each image with its own palette, or optionally
the global palette (if its the same).
Many thanks to Marius van Voorden for porting the NeuQuant quantization
algorithm of Anthony Dekker to Python (See the NeuQuant class for its
license).
Many thanks to Alex Robinson for implementing the concept of subrectangles,
which (depening on image content) can give a very significant reduction in
file size.
This code is based on gifmaker (in the scripts folder of the source
distribution of PIL)
Usefull links
-------------
* http://tronche.com/computer-graphics/gif/
* http://en.wikipedia.org/wiki/Graphics_Interchange_Format
* http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
import os
from fnmatch import fnmatch
try:
import PIL
from PIL import Image
from PIL.GifImagePlugin import getheader, getdata
except ImportError:
PIL = None
try:
import numpy as np
except ImportError:
np = None
def get_cKDTree():
try:
from scipy.spatial import cKDTree
except ImportError:
cKDTree = None
return cKDTree
# getheader gives a 87a header and a color palette (two elements in a list).
# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size".
# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well
# technically the first byte says how many bytes follow, after which that
# amount (max 255) follows).
def check_images(images):
""" check_images(images)
Check numpy images and correct intensity range etc.
The same for all movie formats.
"""
# Init results
images2 = []
for img in images:
if PIL and isinstance(img, PIL.Image.Image):
# We assume PIL images are allright
images2.append(img)
elif np and isinstance(img, np.ndarray):
# Check and convert dtype
if img.dtype == np.uint8:
images2.append(img) # Ok
elif img.dtype in [np.float32, np.float64]:
img = img.copy()
img[img < 0] = 0
img[img > 1] = 1
img *= 255
images2.append(img.astype(np.uint8))
else:
img = img.astype(np.uint8)
images2.append(img)
# Check size
if img.ndim == 2:
pass # ok
elif img.ndim == 3:
if img.shape[2] not in [3, 4]:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('Invalid image type: ' + str(type(img)))
# Done
return images2
def int_to_bin(i):
""" Integer to two bytes """
# make string (little endian)
return i.to_bytes(2, byteorder='little')
class GifWriter(object):
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def __init__(self):
self.transparency = None
@staticmethod
def get_header_anim(img):
""" get_header_anim(img)
Get animation header. To replace PILs getheader()[0]
"""
header = b'GIF89a'
header += int_to_bin(img.size[0])
header += int_to_bin(img.size[1])
header += b'\x87\x00\x00'
return header
@staticmethod
def get_image_descriptor(img, coords=None):
""" get_image_descriptor(img, coords=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if coords is None:
coords = (0, 0)
# Image separator
descriptor = b'\x2C'
# Image position and size
descriptor += int_to_bin(coords[0]) # Left position
descriptor += int_to_bin(coords[1]) # Top position
descriptor += int_to_bin(img.size[0]) # image width
descriptor += int_to_bin(img.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
descriptor += b'\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return descriptor
@staticmethod
def get_app_ext(loops=float('inf')):
""" get_app_ext(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2**16-1
# bb = "" # application extension should not be used
# # (the extension interprets zero loops
# # to mean an infinite number of loops)
# # Mmm, does not seem to work
ext = b"\x21\xFF\x0B" # application extension
ext += b"NETSCAPE2.0"
ext += b"\x03\x01"
ext += int_to_bin(loops)
ext += b'\x00' # end
return ext
@staticmethod
def get_graphics_control_ext(
duration=0.1, dispose=2, transparent_flag=0, transparency_index=0):
""" get_graphics_control_ext(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
ext = b'\x21\xF9\x04'
ext += bytes([((dispose & 3) << 2) | (transparent_flag & 1)]) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
ext += int_to_bin(int(duration * 100)) # in 100th of seconds
ext += bytes([transparency_index])
ext += b'\x00' # end
return ext
def handle_sub_rectangles(self, images, sub_rectangles):
""" handle_sub_rectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(sub_rectangles, (tuple, list)):
# xy given directly
# Check xy
sub_recs = sub_rectangles
if sub_recs is None:
sub_recs = (0, 0)
if hasattr(sub_recs, '__len__'):
if len(sub_recs) == len(images):
sub_recs = [xxyy for xxyy in sub_recs]
else:
raise ValueError("len(sub_recs) doesn't match amount of images.")
else:
sub_recs = [sub_recs for image in images]
sub_recs[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-sub_rectangles.")
# First make numpy arrays if required
for i in range(len(images)):
image = images[i]
if isinstance(image, Image.Image):
tmp = image.convert() # Make without palette
array_ = np.asarray(tmp)
if len(array_.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = array_
# Determine the sub rectangles
images, sub_rec = self.get_sub_rectangles(images)
# Done
return images, sub_rec
@staticmethod
def get_sub_rectangles(images):
""" get_sub_rectangles(images)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(images) < 2:
return images, [(0, 0) for i in images]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [images[0]]
coords = [(0, 0)]
# Iterate over images
prev = images[0]
for image in images[1:]:
# Get difference, sum over colors
diff = np.abs(image - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = int(X[0][0]), int(X[-1][0]+1)
y0, y1 = int(Y[0][0]), int(Y[-1][0]+1)
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = image[y0:y1, x0:x1]
prev = image
ims2.append(im2)
coords.append((x0, y0))
return ims2, coords
def convert_images_to_PIL(self, images, dither, nq=0):
""" convert_images_to_PIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for image in images:
if isinstance(image, Image.Image):
images2.append(image)
elif np and isinstance(image, np.ndarray):
if image.ndim == 3 and image.shape[2] == 3:
image = Image.fromarray(image, 'RGB')
elif image.ndim == 3 and image.shape[2] == 4:
# image = Image.fromarray(image[:,:,:3],'RGB')
self.transparency = True
image = Image.fromarray(image[:, :, :4], 'RGBA')
elif image.ndim == 2:
image = Image.fromarray(image, 'L')
images2.append(image)
# Convert to paletted PIL images
images, images2 = images2, []
if nq >= 1:
# NeuQuant algorithm
for image in images:
image = image.convert("RGBA") # NQ assumes RGBA
# Learn colors from image
nq_instance = NeuQuant(image, int(nq))
if dither:
image = image.convert("RGB").quantize(
palette=nq_instance.paletteImage(), colors=255)
else:
# Use to quantize the image itself
image = nq_instance.quantize(image, colors=255)
# since NQ assumes transparency
self.transparency = True
if self.transparency:
alpha = image.split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <= 128 else 0)
image.paste(255, mask=mask)
images2.append(image)
else:
# for index,image in enumerate(images):
for i in range(len(images)):
image = images[i].convert('RGB').convert(
'P',
palette=Image.ADAPTIVE, # Adaptive PIL algorithm
dither=dither,
colors=255
)
if self.transparency:
alpha = images[i].split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <= 128 else 0)
image.paste(255, mask=mask)
images2.append(image)
# Done
return images2
def write_gif_to_file(self, file_, images, durations, loops, xys, disposes):
""" write_gif_to_file(file_, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for image in images:
palettes.append(getheader(image)[0][3])
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
global_palette = palettes[occur.index(max(occur))]
# Init
frames = 0
first_frame = True
for image, palette in zip(images, palettes):
if first_frame:
# Write header
# Gather info
header = self.get_header_anim(image)
appext = self.get_app_ext(loops)
# Write
file_.write(header)
file_.write(global_palette)
file_.write(appext)
# Next frame is not the first
first_frame = False
if True:
# Write palette and image data
# Gather info
data = getdata(image)
imdes, data = data[0], data[1:]
transparent_flag = 1 if self.transparency else 0
graphext = self.get_graphics_control_ext(
durations[frames],
disposes[frames],
transparent_flag=transparent_flag,
transparency_index=255
)
# Make image descriptor suitable for using 256 local color palette
lid = self.get_image_descriptor(image, xys[frames])
# Write local header
if (palette != global_palette) or (disposes[frames] != 2):
# Use local color palette
file_.write(graphext)
file_.write(lid) # write suitable image descriptor
file_.write(palette) # write local color table
file_.write(b'\x08') # LZW minimum size code
else:
# Use global color palette
file_.write(graphext)
file_.write(imdes) # write suitable image descriptor
# Write image data
for datum in data:
file_.write(datum)
# Prepare for next round
frames = frames + 1
file_.write(b';') # end gif
return frames
# Exposed functions
def write_gif(filename, images, duration=0.1,
repeat=True, dither=False, nq=0, sub_rectangles=True, dispose=None):
""" write_gif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, sub_rectangles=True, dispose=None)
Write an animated gif from the specified images.
Parameters
----------
filename : string
The name of the file to write the image to.
images : list
Should be a list consisting of PIL images or numpy arrays.
The latter should be between 0 and 255 for integer types, and
between 0 and 1 for float types.
duration : scalar or list of scalars
The duration for all frames, or (if a list) for each frame.
repeat : bool or integer
The amount of loops. If True, loops infinitetely.
dither : bool
Whether to apply dithering
nq : integer
If nonzero, applies the NeuQuant quantization algorithm to create
the color palette. This algorithm is superior, but slower than
the standard PIL algorithm. The value of nq is the quality
parameter. 1 represents the best quality. 10 is in general a
good tradeoff between quality and speed. When using this option,
better results are usually obtained when sub_rectangles is False.
sub_rectangles : False, True, or a list of 2-element tuples
Whether to use sub-rectangles. If True, the minimal rectangle that
is required to update each frame is automatically detected. This
can give significant reductions in file size, particularly if only
a part of the image changes. One can also give a list of x-y
coordinates if you want to do the cropping yourself. The default
is True.
dispose : int
How to dispose each frame. 1 means that each frame is to be left
in place. 2 means the background color should be restored after
each frame. 3 means the decoder should restore the previous frame.
If sub_rectangles==False, the default is 2, otherwise it is 1.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to write animated gif files.")
# Check images
images = check_images(images)
# Instantiate writer object
gif_writer = GifWriter()
gif_writer.transparency = False # init transparency flag used in GifWriter functions
# Check loops
if repeat is False:
loops = 1
elif repeat is True:
loops = 0 # zero means infinite
else:
loops = int(repeat)
# Check duration
if hasattr(duration, '__len__'):
if len(duration) == len(images):
duration = [d for d in duration]
else:
raise ValueError("len(duration) doesn't match amount of images.")
else:
duration = [duration for im in images]
# Check subrectangles
if sub_rectangles:
images, xy = gif_writer.handle_sub_rectangles(images, sub_rectangles)
default_dispose = 1 # Leave image in place
else:
# Normal mode
xy = [(0, 0) for im in images]
default_dispose = 2 # Restore to background color.
# Check dispose
if dispose is None:
dispose = default_dispose
if hasattr(dispose, '__len__'):
if len(dispose) != len(images):
raise ValueError("len(xy) doesn't match amount of images.")
else:
dispose = [dispose for im in images]
# Make images in a format that we can write easy
images = gif_writer.convert_images_to_PIL(images, dither, nq)
# Write
with open(filename, 'wb') as file_:
gif_writer.write_gif_to_file(file_, images, duration, loops, xy, dispose)
def read_gif(filename, as_numpy=True):
""" read_gif(filename, as_numpy=True)
Read images from an animated GIF file. Returns a list of numpy
arrays, or, if as_numpy is false, a list if PIL images.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to read animated gif files.")
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to read animated gif files.")
# Check whether it exists
if not os.path.isfile(filename):
raise IOError('File not found: '+str(filename))
# Load file using PIL
pil_image = PIL.Image.open(filename)
pil_image.seek(0)
# Read all images inside
images = []
try:
while True:
# Get image as numpy array
tmp = pil_image.convert() # Make without palette
array_ = np.asarray(tmp)
if len(array_.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
# Store, and next
images.append(array_)
pil_image.seek(pil_image.tell()+1)
except EOFError:
pass
# Convert to normal PIL images if needed
if not as_numpy:
images2 = images
images = []
for image in images2:
tmp = PIL.Image.fromarray(image)
images.append(tmp)
# Done
return images
class NeuQuant:
""" NeuQuant(image, samplefac=10, colors=256)
samplefac should be an integer number of 1 or higher, 1
being the highest quality, but the slowest performance.
With avalue of 10, one tenth of all pixels are used during
training. This value seems a nice tradeof between speed
and quality.
colors is the amount of colors to reduce the image to. This
should best be a power of two.
See also:
http://members.ozemail.com.au/~dekker/NEUQUANT.HTML
License of the NeuQuant Neural-Net Quantization Algorithm
---------------------------------------------------------
Copyright (c) 1994 Anthony Dekker
Ported to python by Marius van Voorden in 2010
NEUQUANT Neural-Net quantization algorithm by Anthony Dekker, 1994.
See "Kohonen neural networks for optimal colour quantization"
in "network: Computation in Neural Systems" Vol. 5 (1994) pp 351-367.
for a discussion of the algorithm.
See also http://members.ozemail.com.au/~dekker/NEUQUANT.HTML
Any party obtaining a copy of these files from the author, directly or
indirectly, is granted, free of charge, a full and unrestricted irrevocable,
world-wide, paid up, royalty-free, nonexclusive right and license to deal
in this software and documentation files (the "Software"), including without
limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons who receive
copies from any such party to do so, with the only requirement being
that this copyright notice remain intact.
"""
NCYCLES = None # Number of learning cycles
NETSIZE = None # Number of colours used
SPECIALS = None # Number of reserved colours used
BGCOLOR = None # Reserved background colour
CUTNETSIZE = None
MAXNETPOS = None
INITRAD = None # For 256 colours, radius starts at 32
RADIUSBIASSHIFT = None
RADIUSBIAS = None
INITBIASRADIUS = None
RADIUSDEC = None # Factor of 1/30 each cycle
ALPHABIASSHIFT = None
INITALPHA = None # biased by 10 bits
GAMMA = None
BETA = None
BETAGAMMA = None
network = None # The network itself
colormap = None # The network itself
netindex = None # For network lookup - really 256
bias = None # Bias and freq arrays for learning
freq = None
pimage = None
# Four primes near 500 - assume no image has a length so large
# that it is divisible by all four primes
PRIME1 = 499
PRIME2 = 491
PRIME3 = 487
PRIME4 = 503
MAXPRIME = PRIME4
pixels = None
samplefac = None
a_s = None
def __init__(self, image, samplefac=10, colors=256):
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy for the NeuQuant algorithm.")
# Check image
if image.size[0] * image.size[1] < NeuQuant.MAXPRIME:
raise IOError("Image is too small")
if image.mode != "RGBA":
raise IOError("Image mode should be RGBA.")
# Initialize
self.setconstants(samplefac, colors)
self.pixels = np.fromstring(image.tostring(), np.uint32)
self.set_up_arrays()
self.learn()
self.fix()
self.inxbuild()
def setconstants(self, samplefac, colors):
"""
Sets class constants
"""
self.NCYCLES = 100 # Number of learning cycles
self.NETSIZE = colors # Number of colours used
self.SPECIALS = 3 # Number of reserved colours used
self.BGCOLOR = self.SPECIALS-1 # Reserved background colour
self.CUTNETSIZE = self.NETSIZE - self.SPECIALS
self.MAXNETPOS = self.NETSIZE - 1
self.INITRAD = self.NETSIZE/8 # For 256 colours, radius starts at 32
self.RADIUSBIASSHIFT = 6
self.RADIUSBIAS = 1 << self.RADIUSBIASSHIFT
self.INITBIASRADIUS = self.INITRAD * self.RADIUSBIAS
self.RADIUSDEC = 30 # Factor of 1/30 each cycle
self.ALPHABIASSHIFT = 10 # Alpha starts at 1
self.INITALPHA = 1 << self.ALPHABIASSHIFT # biased by 10 bits
self.GAMMA = 1024.0
self.BETA = 1.0/1024.0
self.BETAGAMMA = self.BETA * self.GAMMA
self.network = np.empty((self.NETSIZE, 3), dtype='float64') # The network itself
self.colormap = np.empty((self.NETSIZE, 4), dtype='int32') # The network itself
self.netindex = np.empty(256, dtype='int32') # For network lookup - really 256
self.bias = np.empty(self.NETSIZE, dtype='float64') # Bias and freq arrays for learning
self.freq = np.empty(self.NETSIZE, dtype='float64')
self.pixels = None
self.samplefac = samplefac
self.a_s = dict()
def write_colour_map(self, rgb, outstream):
"""
"""
for i in range(self.NETSIZE):
blue = self.colormap[i, 0]
green = self.colormap[i, 1]
red = self.colormap[i, 2]
outstream.write(red if rgb else blue)
outstream.write(green)
outstream.write(blue if rgb else red)
return self.NETSIZE
def set_up_arrays(self):
"""
"""
self.network[0, 0] = 0.0 # Black
self.network[0, 1] = 0.0
self.network[0, 2] = 0.0
self.network[1, 0] = 255.0 # White
self.network[1, 1] = 255.0
self.network[1, 2] = 255.0
# RESERVED self.BGCOLOR # Background
for i in range(self.SPECIALS):
self.freq[i] = 1.0 / self.NETSIZE
self.bias[i] = 0.0
for i in range(self.SPECIALS, self.NETSIZE):
p = self.network[i]
p[:] = (255.0 * (i-self.SPECIALS)) / self.CUTNETSIZE
self.freq[i] = 1.0 / self.NETSIZE
self.bias[i] = 0.0
# Omitted: setPixels
def altersingle(self, alpha, i, b, g, r):
"""Move neuron i towards biased (b,g,r) by factor alpha"""
n = self.network[i] # Alter hit neuron
n[0] -= (alpha*(n[0] - b))
n[1] -= (alpha*(n[1] - g))
n[2] -= (alpha*(n[2] - r))
def geta(self, alpha, rad):
try:
return self.a_s[(alpha, rad)]
except KeyError:
length = rad*2-1
mid = length/2
q = np.array(list(range(mid-1, -1, -1))+list(range(-1, mid)))
a = alpha*(rad*rad - q*q)/(rad*rad)
a[mid] = 0
self.a_s[(alpha, rad)] = a
return a
def alterneigh(self, alpha, rad, i, b, g, r):
if i-rad >= self.SPECIALS-1:
lo = i-rad
start = 0
else:
lo = self.SPECIALS-1
start = (self.SPECIALS-1 - (i-rad))
if i+rad <= self.NETSIZE:
hi = i+rad
end = rad*2-1
else:
hi = self.NETSIZE
end = (self.NETSIZE - (i+rad))
a = self.geta(alpha, rad)[start:end]
p = self.network[lo+1:hi]
p -= np.transpose(np.transpose(p - np.array([b, g, r])) * a)
def contest(self, b, g, r):
""" Search for biased BGR values
Finds closest neuron (min dist) and updates self.freq
finds best neuron (min dist-self.bias) and returns position
for frequently chosen neurons, self.freq[i] is high and self.bias[i] is negative
self.bias[i] = self.GAMMA*((1/self.NETSIZE)-self.freq[i])"""
i, j = self.SPECIALS, self.NETSIZE
dists = abs(self.network[i:j] - np.array([b, g, r])).sum(1)
bestpos = i + np.argmin(dists)
biasdists = dists - self.bias[i:j]
bestbiaspos = i + np.argmin(biasdists)
self.freq[i:j] *= (1-self.BETA)
self.bias[i:j] += self.BETAGAMMA * self.freq[i:j]
self.freq[bestpos] += self.BETA
self.bias[bestpos] -= self.BETAGAMMA
return bestbiaspos
def special_find(self, b, g, r):
for i in range(self.SPECIALS):
n = self.network[i]
if n[0] == b and n[1] == g and n[2] == r:
return i
return -1
def learn(self):
biasRadius = self.INITBIASRADIUS
alphadec = 30 + ((self.samplefac-1)/3)
lengthcount = self.pixels.size
samplepixels = lengthcount / self.samplefac
delta = samplepixels / self.NCYCLES
alpha = self.INITALPHA
i = 0
rad = biasRadius >> self.RADIUSBIASSHIFT
if rad <= 1:
rad = 0
print("Beginning 1D learning: samplepixels = %1.2f rad = %i" % (samplepixels, rad))
step = 0
pos = 0
if lengthcount % NeuQuant.PRIME1 != 0:
step = NeuQuant.PRIME1
elif lengthcount % NeuQuant.PRIME2 != 0:
step = NeuQuant.PRIME2
elif lengthcount % NeuQuant.PRIME3 != 0:
step = NeuQuant.PRIME3
else:
step = NeuQuant.PRIME4
i = 0
printed_string = ''
while i < samplepixels:
if i % 100 == 99:
tmp = '\b'*len(printed_string)
printed_string = str((i+1)*100/samplepixels)+"%\n"
print(tmp + printed_string)
p = self.pixels[pos]
r = (p >> 16) & 0xff
g = (p >> 8) & 0xff
b = (p) & 0xff
if i == 0: # Remember background colour
self.network[self.BGCOLOR] = [b, g, r]
j = self.special_find(b, g, r)
if j < 0:
j = self.contest(b, g, r)
if j >= self.SPECIALS: # Don't learn for specials
a = (1.0 * alpha) / self.INITALPHA
self.altersingle(a, j, b, g, r)
if rad > 0:
self.alterneigh(a, rad, j, b, g, r)
pos = (pos+step) % lengthcount
i += 1
if i % delta == 0:
alpha -= alpha / alphadec
biasRadius -= biasRadius / self.RADIUSDEC
rad = biasRadius >> self.RADIUSBIASSHIFT
if rad <= 1:
rad = 0
finalAlpha = (1.0*alpha)/self.INITALPHA
print("Finished 1D learning: final alpha = %1.2f!" % finalAlpha)
def fix(self):
for i in range(self.NETSIZE):
for j in range(3):
x = int(0.5 + self.network[i, j])
x = max(0, x)
x = min(255, x)
self.colormap[i, j] = x
self.colormap[i, 3] = i
def inxbuild(self):
previouscol = 0
startpos = 0
for i in range(self.NETSIZE):
p = self.colormap[i]
q = None
smallpos = i
smallval = p[1] # Index on g
# Find smallest in i..self.NETSIZE-1
for j in range(i+1, self.NETSIZE):
q = self.colormap[j]
if q[1] < smallval: # Index on g
smallpos = j
smallval = q[1] # Index on g
q = self.colormap[smallpos]
# Swap p (i) and q (smallpos) entries
if i != smallpos:
p[:], q[:] = q, p.copy()
# smallval entry is now in position i
if smallval != previouscol:
self.netindex[previouscol] = (startpos+i) >> 1
for j in range(previouscol+1, smallval):
self.netindex[j] = i
previouscol = smallval
startpos = i
self.netindex[previouscol] = (startpos+self.MAXNETPOS) >> 1
for j in range(previouscol+1, 256): # Really 256
self.netindex[j] = self.MAXNETPOS
def paletteImage(self):
""" PIL weird interface for making a paletted image: create an image which
already has the palette, and use that in Image.quantize. This function
returns this palette image. """
if self.pimage is None:
palette = []
for i in range(self.NETSIZE):
palette.extend(self.colormap[i][:3])
palette.extend([0]*(256-self.NETSIZE)*3)
# a palette image to use for quant
self.pimage = Image.new("P", (1, 1), 0)
self.pimage.putpalette(palette)
return self.pimage
def quantize(self, image):
""" Use a kdtree to quickly find the closest palette colors for the pixels """
if get_cKDTree():
return self.quantize_with_scipy(image)
else:
print('Scipy not available, falling back to slower version.')
return self.quantize_without_scipy(image)
def quantize_with_scipy(self, image):
w, h = image.size
px = np.asarray(image).copy()
px2 = px[:, :, :3].reshape((w*h, 3))
cKDTree = get_cKDTree()
kdtree = cKDTree(self.colormap[:, :3], leafsize=10)
result = kdtree.query(px2)
colorindex = result[1]
print("Distance: %1.2f" % (result[0].sum()/(w*h)))
px2[:] = self.colormap[colorindex, :3]
return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage())
def quantize_without_scipy(self, image):
"""" This function can be used if no scipy is availabe.
It's 7 times slower though.
"""
w, h = image.size
px = np.asarray(image).copy()
memo = {}
for j in range(w):
for i in range(h):
key = (px[i, j, 0], px[i, j, 1], px[i, j, 2])
try:
val = memo[key]
except KeyError:
val = self.convert(*key)
memo[key] = val
px[i, j, 0], px[i, j, 1], px[i, j, 2] = val
return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage())
def convert(self, *color):
i = self.inxsearch(*color)
return self.colormap[i, :3]
def inxsearch(self, r, g, b):
"""Search for BGR values 0..255 and return colour index"""
dists = (self.colormap[:, :3] - np.array([r, g, b]))
a = np.argmin((dists*dists).sum(1))
return a
def load_images(image_directory, extension, prefix=None):
"""
Locates image files in image_directory with the specified extension and/or
prefix, and loads them into memory as PIL/Pillow objects
:param image_directory: string
:param extension: string
:param prefix: string
:returns: List of PIL Image objects
"""
exl = extension.lower()
exu = extension.upper()
# List everything in dir:
all_files = os.listdir(image_directory)
# Prune out unwanted extension types
images = [i for i in all_files if fnmatch(i, "*." + exl) or fnmatch(i, "*." + exu)]
# Prune out unwanted prefix types
if prefix:
images = [i for i in images if fnmatch(i, prefix + "*")]
# Sort to maintain order during GIF creation
images.sort()
return [Image.open(os.path.join(image_directory, i)) for i in images]
|
levi-rs/gifted
|
gifted/gifted.py
|
Python
|
bsd-3-clause
| 35,791
|
[
"NEURON"
] |
d4cff7f9a898e05a253a5ee978ea665cb18629fd3b6178484bc7a01ed9dd3027
|
# Orca
#
# Copyright 2004-2009 Sun Microsystems Inc.
# Copyright 2010-2011 The Orca Team
# Copyright 2012 Igalia, S.L.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""The main module for the Orca screen reader."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2004-2009 Sun Microsystems Inc." \
"Copyright (c) 2010-2011 The Orca Team" \
"Copyright (c) 2012 Igalia, S.L."
__license__ = "LGPL"
import faulthandler
import gi
import importlib
import os
import pyatspi
import re
import signal
import subprocess
import sys
try:
from gi.repository.Gio import Settings
a11yAppSettings = Settings(schema_id='org.gnome.desktop.a11y.applications')
except:
a11yAppSettings = None
try:
# This can fail due to gtk not being available. We want to
# be able to recover from that if possible. The main driver
# for this is to allow "orca --text-setup" to work even if
# the desktop is not running.
#
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk
gi.require_version("Gdk", "3.0")
from gi.repository import Gdk
# Note: This last import is here due to bgo #673396.
# See bgo#673397 for the rest of the story.
gi.require_version("GdkX11", "3.0")
from gi.repository.GdkX11 import X11Screen
except:
pass
from . import braille
from . import debug
from . import event_manager
from . import keybindings
from . import logger
from . import messages
from . import mouse_review
from . import notification_messages
from . import orca_state
from . import orca_platform
from . import script_manager
from . import settings
from . import settings_manager
from . import speech
from . import sound
from .input_event import BrailleEvent
_eventManager = event_manager.getManager()
_scriptManager = script_manager.getManager()
_settingsManager = settings_manager.getManager()
_logger = logger.getLogger()
def onEnabledChanged(gsetting, key):
try:
enabled = gsetting.get_boolean(key)
except:
return
if key == 'screen-reader-enabled' and not enabled:
shutdown()
def getSettingsManager():
return _settingsManager
def getLogger():
return _logger
EXIT_CODE_HANG = 50
# The user-settings module (see loadUserSettings).
#
_userSettings = None
# A subset of the original Xmodmap info prior to our stomping on it.
# Right now, this is just for the user's chosen Orca modifier(s).
#
_originalXmodmap = ""
_orcaModifiers = settings.DESKTOP_MODIFIER_KEYS + settings.LAPTOP_MODIFIER_KEYS
_capsLockCleared = False
_restoreOrcaKeys = False
########################################################################
# #
# METHODS TO HANDLE APPLICATION LIST AND FOCUSED OBJECTS #
# #
########################################################################
CARET_TRACKING = "caret-tracking"
FOCUS_TRACKING = "focus-tracking"
FLAT_REVIEW = "flat-review"
MOUSE_REVIEW = "mouse-review"
SAY_ALL = "say-all"
def emitRegionChanged(obj, startOffset=None, endOffset=None, mode=None):
"""Notifies interested clients that the current region of interest has changed."""
if startOffset is None:
startOffset = 0
if endOffset is None:
endOffset = startOffset
if mode is None:
mode = FOCUS_TRACKING
try:
obj.emit("mode-changed::" + mode, 1, "")
except:
msg = "ORCA: Exception emitting mode-changed notification"
debug.println(debug.LEVEL_INFO, msg, True)
if mode != orca_state.activeMode:
msg = "ORCA: Switching active mode from %s to %s" % (orca_state.activeMode, mode)
debug.println(debug.LEVEL_INFO, msg, True)
orca_state.activeMode = mode
try:
msg = "ORCA: Region of interest: %s (%i, %i)" % (obj, startOffset, endOffset)
debug.println(debug.LEVEL_INFO, msg, True)
obj.emit("region-changed", startOffset, endOffset)
except:
msg = "ORCA: Exception emitting region-changed notification"
debug.println(debug.LEVEL_INFO, msg, True)
def setLocusOfFocus(event, obj, notifyScript=True, force=False):
"""Sets the locus of focus (i.e., the object with visual focus) and
notifies the script of the change should the script wish to present
the change to the user.
Arguments:
- event: if not None, the Event that caused this to happen
- obj: the Accessible with the new locus of focus.
- notifyScript: if True, propagate this event
- force: if True, don't worry if this is the same object as the
current locusOfFocus
"""
if not force and obj == orca_state.locusOfFocus:
msg = "ORCA: Setting locusOfFocus to existing locusOfFocus"
debug.println(debug.LEVEL_INFO, msg, True)
return
if event and (orca_state.activeScript and not orca_state.activeScript.app):
script = _scriptManager.getScript(event.host_application, event.source)
_scriptManager.setActiveScript(script, "Setting locusOfFocus")
oldFocus = orca_state.locusOfFocus
try:
oldFocus.getRole()
except:
msg = "ORCA: Old locusOfFocus is null or defunct"
debug.println(debug.LEVEL_INFO, msg, True)
oldFocus = None
if not obj:
msg = "ORCA: New locusOfFocus is null (being cleared)"
debug.println(debug.LEVEL_INFO, msg, True)
orca_state.locusOfFocus = None
return
if orca_state.activeScript:
msg = "ORCA: Active script is: %s" % orca_state.activeScript
debug.println(debug.LEVEL_INFO, msg, True)
if orca_state.activeScript.utilities.isZombie(obj):
msg = "ERROR: New locusOfFocus (%s) is zombie. Not updating." % obj
debug.println(debug.LEVEL_INFO, msg, True)
return
if orca_state.activeScript.utilities.isDead(obj):
msg = "ERROR: New locusOfFocus (%s) is dead. Not updating." % obj
debug.println(debug.LEVEL_INFO, msg, True)
return
msg = "ORCA: Changing locusOfFocus from %s to %s" % (oldFocus, obj)
debug.println(debug.LEVEL_INFO, msg, True)
orca_state.locusOfFocus = obj
if not notifyScript:
return
if not orca_state.activeScript:
msg = "ORCA: Cannot notify active script because there isn't one"
debug.println(debug.LEVEL_INFO, msg, True)
return
orca_state.activeScript.locusOfFocusChanged(event, oldFocus, orca_state.locusOfFocus)
########################################################################
# #
# METHODS FOR PRE-PROCESSING AND MASSAGING BRAILLE EVENTS. #
# #
########################################################################
def _processBrailleEvent(event):
"""Called whenever a key is pressed on the Braille display.
Arguments:
- command: the BrlAPI event for the key that was pressed.
Returns True if the event was consumed; otherwise False
"""
consumed = False
# Braille key presses always interrupt speech.
#
event = BrailleEvent(event)
if event.event['command'] not in braille.dontInteruptSpeechKeys:
speech.stop()
orca_state.lastInputEvent = event
try:
consumed = _eventManager.processBrailleEvent(event)
except:
debug.printException(debug.LEVEL_SEVERE)
if (not consumed) and orca_state.learnModeEnabled:
consumed = True
return consumed
########################################################################
# #
# METHODS FOR HANDLING INITIALIZATION, SHUTDOWN, AND USE. #
# #
########################################################################
def deviceChangeHandler(deviceManager, device):
"""New keyboards being plugged in stomp on our changes to the keymappings, so we have to re-apply"""
source = device.get_source()
if source == Gdk.InputSource.KEYBOARD:
msg = "ORCA: Keyboard change detected, re-creating the xmodmap"
debug.println(debug.LEVEL_INFO, msg, True)
_createOrcaXmodmap()
def updateKeyMap(keyboardEvent):
"""Unsupported convenience method to call sad hacks which should go away."""
global _restoreOrcaKeys
if keyboardEvent.isPressedKey():
return
if keyboardEvent.event_string in settings.orcaModifierKeys \
and orca_state.bypassNextCommand:
_restoreXmodmap()
_restoreOrcaKeys = True
return
if _restoreOrcaKeys and not orca_state.bypassNextCommand:
_createOrcaXmodmap()
_restoreOrcaKeys = False
def _setXmodmap(xkbmap):
"""Set the keyboard map using xkbcomp."""
p = subprocess.Popen(['xkbcomp', '-w0', '-', os.environ['DISPLAY']],
stdin=subprocess.PIPE, stdout=None, stderr=None)
p.communicate(xkbmap)
def _setCapsLockAsOrcaModifier(enable):
"""Enable or disable use of the caps lock key as an Orca modifier key."""
interpretCapsLineProg = re.compile(
r'^\s*interpret\s+Caps[_+]Lock[_+]AnyOfOrNone\s*\(all\)\s*{\s*$', re.I)
normalCapsLineProg = re.compile(
r'^\s*action\s*=\s*LockMods\s*\(\s*modifiers\s*=\s*Lock\s*\)\s*;\s*$', re.I)
interpretShiftLineProg = re.compile(
r'^\s*interpret\s+Shift[_+]Lock[_+]AnyOf\s*\(\s*Shift\s*\+\s*Lock\s*\)\s*{\s*$', re.I)
normalShiftLineProg = re.compile(
r'^\s*action\s*=\s*LockMods\s*\(\s*modifiers\s*=\s*Shift\s*\)\s*;\s*$', re.I)
disabledModLineProg = re.compile(
r'^\s*action\s*=\s*NoAction\s*\(\s*\)\s*;\s*$', re.I)
normalCapsLine = ' action= LockMods(modifiers=Lock);'
normalShiftLine = ' action= LockMods(modifiers=Shift);'
disabledModLine = ' action= NoAction();'
lines = _originalXmodmap.decode('UTF-8').split('\n')
foundCapsInterpretSection = False
foundShiftInterpretSection = False
modified = False
for i, line in enumerate(lines):
if not foundCapsInterpretSection and not foundShiftInterpretSection:
if interpretCapsLineProg.match(line):
foundCapsInterpretSection = True
elif interpretShiftLineProg.match(line):
foundShiftInterpretSection = True
elif foundCapsInterpretSection:
if enable:
if normalCapsLineProg.match(line):
lines[i] = disabledModLine
modified = True
else:
if disabledModLineProg.match(line):
lines[i] = normalCapsLine
modified = True
if line.find('}'):
foundCapsInterpretSection = False
else: # foundShiftInterpretSection
if enable:
if normalShiftLineProg.match(line):
lines[i] = disabledModLine
modified = True
else:
if disabledModLineProg.match(line):
lines[i] = normalShiftLine
modified = True
if line.find('}'):
foundShiftInterpretSection = False
if modified:
_setXmodmap(bytes('\n'.join(lines), 'UTF-8'))
def _createOrcaXmodmap():
"""Makes an Orca-specific Xmodmap so that the keys behave as we
need them to do. This is especially the case for the Orca modifier.
"""
global _capsLockCleared
cmd = []
if "Caps_Lock" in settings.orcaModifierKeys \
or "Shift_Lock" in settings.orcaModifierKeys:
_setCapsLockAsOrcaModifier(True)
_capsLockCleared = True
elif _capsLockCleared:
_setCapsLockAsOrcaModifier(False)
_capsLockCleared = False
def _storeXmodmap(keyList):
"""Save the original xmodmap for the keys in keyList before we alter it.
Arguments:
- keyList: A list of named keys to look for.
"""
global _originalXmodmap
_originalXmodmap = subprocess.check_output(['xkbcomp', os.environ['DISPLAY'], '-'])
def _restoreXmodmap(keyList=[]):
"""Restore the original xmodmap values for the keys in keyList.
Arguments:
- keyList: A list of named keys to look for. An empty list means
to restore the entire saved xmodmap.
"""
global _capsLockCleared
_capsLockCleared = False
p = subprocess.Popen(['xkbcomp', '-w0', '-', os.environ['DISPLAY']],
stdin=subprocess.PIPE, stdout=None, stderr=None)
p.communicate(_originalXmodmap)
def setKeyHandling(new):
"""Toggle use of the new vs. legacy key handling mode.
"""
_eventManager.setKeyHandling(new)
def loadUserSettings(script=None, inputEvent=None, skipReloadMessage=False):
"""Loads (and reloads) the user settings module, reinitializing
things such as speech if necessary.
Returns True to indicate the input event has been consumed.
"""
debug.println(debug.LEVEL_INFO, 'ORCA: Loading User Settings', True)
global _userSettings
# Shutdown the output drivers and give them a chance to die.
player = sound.getPlayer()
player.shutdown()
speech.shutdown()
braille.shutdown()
_scriptManager.deactivate()
reloaded = False
if _userSettings:
_profile = _settingsManager.getSetting('activeProfile')[1]
try:
_userSettings = _settingsManager.getGeneralSettings(_profile)
_settingsManager.setProfile(_profile)
reloaded = True
except ImportError:
debug.printException(debug.LEVEL_INFO)
except:
debug.printException(debug.LEVEL_SEVERE)
else:
_profile = _settingsManager.profile
try:
_userSettings = _settingsManager.getGeneralSettings(_profile)
except ImportError:
debug.printException(debug.LEVEL_INFO)
except:
debug.printException(debug.LEVEL_SEVERE)
if not script:
script = _scriptManager.getDefaultScript()
_settingsManager.loadAppSettings(script)
if _settingsManager.getSetting('enableSpeech'):
msg = 'ORCA: About to enable speech'
debug.println(debug.LEVEL_INFO, msg, True)
try:
speech.init()
if reloaded and not skipReloadMessage:
script.speakMessage(messages.SETTINGS_RELOADED)
except:
debug.printException(debug.LEVEL_SEVERE)
else:
msg = 'ORCA: Speech is not enabled in settings'
debug.println(debug.LEVEL_INFO, msg, True)
if _settingsManager.getSetting('enableBraille'):
msg = 'ORCA: About to enable braille'
debug.println(debug.LEVEL_INFO, msg, True)
try:
braille.init(_processBrailleEvent)
except:
debug.printException(debug.LEVEL_WARNING)
msg = 'ORCA: Could not initialize connection to braille.'
debug.println(debug.LEVEL_WARNING, msg, True)
else:
msg = 'ORCA: Braille is not enabled in settings'
debug.println(debug.LEVEL_INFO, msg, True)
if _settingsManager.getSetting('enableMouseReview'):
mouse_review.reviewer.activate()
else:
mouse_review.reviewer.deactivate()
if _settingsManager.getSetting('enableSound'):
player.init()
global _orcaModifiers
custom = [k for k in settings.orcaModifierKeys if k not in _orcaModifiers]
_orcaModifiers += custom
# Handle the case where a change was made in the Orca Preferences dialog.
#
if _originalXmodmap:
_restoreXmodmap(_orcaModifiers)
_storeXmodmap(_orcaModifiers)
_createOrcaXmodmap()
_scriptManager.activate()
_eventManager.activate()
debug.println(debug.LEVEL_INFO, 'ORCA: User Settings Loaded', True)
return True
def _showPreferencesUI(script, prefs):
if orca_state.orcaOS:
orca_state.orcaOS.showGUI()
return
try:
module = importlib.import_module('.orca_gui_prefs', 'orca')
except:
debug.printException(debug.LEVEL_SEVERE)
return
uiFile = os.path.join(orca_platform.datadir,
orca_platform.package,
"ui",
"orca-setup.ui")
orca_state.orcaOS = module.OrcaSetupGUI(uiFile, "orcaSetupWindow", prefs)
orca_state.orcaOS.init(script)
orca_state.orcaOS.showGUI()
def showAppPreferencesGUI(script=None, inputEvent=None):
"""Displays the user interface to configure the settings for a
specific applications within Orca and set up those app-specific
user preferences using a GUI.
Returns True to indicate the input event has been consumed.
"""
prefs = {}
for key in settings.userCustomizableSettings:
prefs[key] = _settingsManager.getSetting(key)
script = script or orca_state.activeScript
_showPreferencesUI(script, prefs)
return True
def showPreferencesGUI(script=None, inputEvent=None):
"""Displays the user interface to configure Orca and set up
user preferences using a GUI.
Returns True to indicate the input event has been consumed.
"""
prefs = _settingsManager.getGeneralSettings(_settingsManager.profile)
script = _scriptManager.getDefaultScript()
_showPreferencesUI(script, prefs)
return True
def helpForOrca(script=None, inputEvent=None, page=""):
"""Show Orca Help window (part of the GNOME Access Guide).
Returns True to indicate the input event has been consumed.
"""
orca_state.learnModeEnabled = False
uri = "help:orca"
if page:
uri += "?%s" % page
Gtk.show_uri(Gdk.Screen.get_default(),
uri,
Gtk.get_current_event_time())
return True
def addKeyGrab(binding):
""" Add a key grab for the given key binding. """
ret = []
for kd in binding.keyDefs():
ret.append(orca_state.device.add_key_grab(kd, None))
return ret
def removeKeyGrab(id):
""" Remove the key grab for the given key binding. """
orca_state.device.remove_key_grab(id)
def mapModifier(keycode):
return orca_state.device.map_modifier(keycode)
def quitOrca(script=None, inputEvent=None):
"""Quit Orca. Check if the user wants to confirm this action.
If so, show the confirmation GUI otherwise just shutdown.
Returns True to indicate the input event has been consumed.
"""
shutdown()
return True
def showFindGUI(script=None, inputEvent=None):
"""Displays the user interface to perform an Orca Find.
Returns True to indicate the input event has been consumed.
"""
try:
module = importlib.import_module('.orca_gui_find', 'orca')
module.showFindUI()
except:
debug.printException(debug.LEVEL_SEVERE)
# If True, this module has been initialized.
#
_initialized = False
def init(registry):
"""Initialize the orca module, which initializes the speech and braille
modules. Also builds up the application list, registers for AT-SPI events,
and creates scripts for all known applications.
Returns True if the initialization procedure has run, or False if this
module has already been initialized.
"""
debug.println(debug.LEVEL_INFO, 'ORCA: Initializing', True)
global _initialized
if _initialized and _settingsManager.isScreenReaderServiceEnabled():
debug.println(debug.LEVEL_INFO, 'ORCA: Already initialized', True)
return False
# Do not hang on initialization if we can help it.
#
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.signal(signal.SIGALRM, settings.timeoutCallback)
signal.alarm(settings.timeoutTime)
loadUserSettings()
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.alarm(0)
_initialized = True
# In theory, we can do this through dbus. In practice, it fails to
# work sometimes. Until we know why, we need to leave this as-is
# so that we respond when gnome-control-center is used to stop Orca.
if a11yAppSettings:
a11yAppSettings.connect('changed', onEnabledChanged)
debug.println(debug.LEVEL_INFO, 'ORCA: Initialized', True)
return True
def start(registry, cacheValues):
"""Starts Orca."""
debug.println(debug.LEVEL_INFO, 'ORCA: Starting', True)
if not _initialized:
init(registry)
# Do not hang on startup if we can help it.
#
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.signal(signal.SIGALRM, settings.timeoutCallback)
signal.alarm(settings.timeoutTime)
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.alarm(0)
if cacheValues:
pyatspi.setCacheLevel(pyatspi.CACHE_PROPERTIES)
# Event handlers for input devices being plugged in/unplugged.
# Used to re-create the Xmodmap when a new keyboard is plugged in.
# Necessary, because plugging in a new keyboard resets the Xmodmap
# and stomps our changes
display = Gdk.Display.get_default()
devmanager=display.get_device_manager()
devmanager.connect("device-added", deviceChangeHandler)
devmanager.connect("device-removed", deviceChangeHandler)
Gdk.notify_startup_complete()
msg = 'ORCA: Startup complete notification made'
debug.println(debug.LEVEL_INFO, msg, True)
debug.println(debug.LEVEL_INFO, 'ORCA: Starting registry', True)
registry.start(gil=False)
def die(exitCode=1):
pid = os.getpid()
if exitCode == EXIT_CODE_HANG:
# Someting is hung and we wish to abort.
os.kill(pid, signal.SIGKILL)
return
shutdown()
sys.exit(exitCode)
if exitCode > 1:
os.kill(pid, signal.SIGTERM)
def timeout(signum=None, frame=None):
msg = 'TIMEOUT: something has hung. Aborting.'
debug.println(debug.LEVEL_SEVERE, msg, True)
debug.printStack(debug.LEVEL_SEVERE)
debug.examineProcesses(force=True)
die(EXIT_CODE_HANG)
def shutdown(script=None, inputEvent=None):
"""Exits Orca. Unregisters any event listeners and cleans up.
Returns True if the shutdown procedure ran or False if this module
was never initialized.
"""
debug.println(debug.LEVEL_INFO, 'ORCA: Shutting down', True)
global _initialized
if not _initialized:
return False
# Try to say goodbye, but be defensive if something has hung.
#
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.signal(signal.SIGALRM, settings.timeoutCallback)
signal.alarm(settings.timeoutTime)
orca_state.activeScript.presentMessage(messages.STOP_ORCA, resetStyles=False)
_scriptManager.deactivate()
_eventManager.deactivate()
# Shutdown all the other support.
#
if settings.enableSpeech:
speech.shutdown()
if settings.enableBraille:
braille.shutdown()
if settings.enableSound:
player = sound.getPlayer()
player.shutdown()
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.alarm(0)
_initialized = False
_restoreXmodmap(_orcaModifiers)
debug.println(debug.LEVEL_INFO, 'ORCA: Stopping registry', True)
pyatspi.Registry.stop()
debug.println(debug.LEVEL_INFO, 'ORCA: Shutdown complete', True)
return True
exitCount = 0
def shutdownOnSignal(signum, frame):
global exitCount
try:
# Requires python 3.8
signalString = '(%s)' % signal.strsignal(signum)
except:
signalString = ''
msg = 'ORCA: Shutting down and exiting due to signal=%d %s' % \
(signum, signalString)
debug.println(debug.LEVEL_INFO, msg, True)
# Well...we'll try to exit nicely, but if we keep getting called,
# something bad is happening, so just quit.
#
if exitCount:
die(signum)
else:
exitCount += 1
# Try to do a graceful shutdown if we can.
#
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.signal(signal.SIGALRM, settings.timeoutCallback)
signal.alarm(settings.timeoutTime)
try:
if _initialized:
shutdown()
else:
# We always want to try to shutdown speech since the
# speech servers are very persistent about living.
#
speech.shutdown()
shutdown()
cleanExit = True
except:
cleanExit = False
if settings.timeoutCallback and (settings.timeoutTime > 0):
signal.alarm(0)
if not cleanExit:
die(EXIT_CODE_HANG)
def crashOnSignal(signum, frame):
signal.signal(signum, signal.SIG_DFL)
_restoreXmodmap(_orcaModifiers)
os.kill(os.getpid(), signum)
def main(cacheValues=True):
"""The main entry point for Orca. The exit codes for Orca will
loosely be based on signals, where the exit code will be the
signal used to terminate Orca (if a signal was used). Otherwise,
an exit code of 0 means normal completion and an exit code of 50
means Orca exited because of a hang."""
msg = "ORCA: Launching version %s" % orca_platform.version
if orca_platform.revision:
msg += " (rev %s)" % orca_platform.revision
debug.println(debug.LEVEL_INFO, msg, True)
if debug.debugFile and os.path.exists(debug.debugFile.name):
faulthandler.enable(file=debug.debugFile, all_threads=True)
else:
faulthandler.enable(all_threads=False)
# Method to call when we think something might be hung.
#
settings.timeoutCallback = timeout
# Various signal handlers we want to listen for.
#
signal.signal(signal.SIGHUP, shutdownOnSignal)
signal.signal(signal.SIGINT, shutdownOnSignal)
signal.signal(signal.SIGTERM, shutdownOnSignal)
signal.signal(signal.SIGQUIT, shutdownOnSignal)
signal.signal(signal.SIGSEGV, crashOnSignal)
debug.println(debug.LEVEL_INFO, "ORCA: Enabling accessibility (if needed).", True)
if not _settingsManager.isAccessibilityEnabled():
_settingsManager.setAccessibility(True)
debug.println(debug.LEVEL_INFO, "ORCA: Initializing ATSPI registry.", True)
init(pyatspi.Registry)
debug.println(debug.LEVEL_INFO, "ORCA: ATSPI registry initialized.", True)
try:
message = messages.START_ORCA
script = _scriptManager.getDefaultScript()
script.presentMessage(message)
except:
debug.printException(debug.LEVEL_SEVERE)
script = orca_state.activeScript
if script:
window = script.utilities.activeWindow()
if window and not orca_state.locusOfFocus:
try:
app = window.getApplication()
except:
msg = "ORCA: Exception getting app for %s" % window
debug.println(debug.LEVEL_INFO, msg, True)
else:
script = _scriptManager.getScript(app, window)
_scriptManager.setActiveScript(script, "Launching.")
setLocusOfFocus(None, window)
focusedObject = script.utilities.focusedObject(window)
if focusedObject:
setLocusOfFocus(None, focusedObject)
script = _scriptManager.getScript(focusedObject.getApplication(), focusedObject)
_scriptManager.setActiveScript(script, "Found focused object.")
try:
debug.println(debug.LEVEL_INFO, "ORCA: Starting ATSPI registry.", True)
start(pyatspi.Registry, cacheValues) # waits until we stop the registry
except:
debug.println(debug.LEVEL_SEVERE, "ORCA: Exception starting ATSPI registry.", True)
die(EXIT_CODE_HANG)
return 0
if __name__ == "__main__":
sys.exit(main())
|
GNOME/orca
|
src/orca/orca.py
|
Python
|
lgpl-2.1
| 28,530
|
[
"ORCA"
] |
be57ace019f253f6ab90683e8b93150aa12254b13ba006ccdf7c4cc334484296
|
# CLEANUP NOTES (for ISHAN):
# - add documentation for each method
# - add comments inline explaining each piece
# - add a unit test for each method (at least)
# future
from __future__ import annotations
# stdlib
from typing import Any
from typing import List as TypeList
from typing import Optional
# third party
from nacl.signing import VerifyKey
# the most generic class
class Scalar:
"""
A Scalar is the most generic class, which keeps track of the current value, and a data-independent
min-val and max-val.
"""
def publish(
self, acc: Any, user_key: VerifyKey, sigma: float = 1.5
) -> TypeList[Any]:
"""Adversarial accountant adds Gaussian noise and publishes the scalar's value"""
# relative
from ...publish import publish
return publish([self], acc=acc, sigma=sigma, user_key=user_key)
@property
def max_val(self) -> Optional[float]:
raise NotImplementedError
@property
def min_val(self) -> Optional[float]:
raise NotImplementedError
@property
def value(self) -> Optional[float]:
raise NotImplementedError
def __str__(self) -> str:
return (
"<"
+ str(type(self).__name__)
+ ": ("
+ str(self.min_val)
+ " < "
+ str(self.value)
+ " < "
+ str(self.max_val)
+ ")>"
)
def __repr__(self) -> str:
return str(self)
|
OpenMined/PySyft
|
packages/syft/src/syft/core/adp/scalar/abstract/scalar.py
|
Python
|
apache-2.0
| 1,478
|
[
"Gaussian"
] |
051f3bb25a3a1228174127d25559591a4c7e84a22877463c805b14b9f5283f4b
|
"""Forest of trees-based ensemble methods
Those methods include random forests and extremely randomized trees.
The module structure is the following:
- The ``BaseForest`` base class implements a common ``fit`` method for all
the estimators in the module. The ``fit`` method of the base ``Forest``
class calls the ``fit`` method of each sub-estimator on random samples
(with replacement, a.k.a. bootstrap) of the training set.
The init of the sub-estimator is further delegated to the
``BaseEnsemble`` constructor.
- The ``ForestClassifier`` and ``ForestRegressor`` base classes further
implement the prediction logic by computing an average of the predicted
outcomes of the sub-estimators.
- The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived
classes provide the user with concrete implementations of
the forest ensemble method using classical, deterministic
``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as
sub-estimator implementations.
- The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived
classes provide the user with concrete implementations of the
forest ensemble method using the extremly randomized trees
``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as
sub-estimator implementations.
Single and multi-output problems are both handled.
"""
# Authors: Gilles Louppe, Brian Holt
# License: BSD 3
import itertools
import numpy as np
from warnings import warn
from abc import ABCMeta, abstractmethod
from ..base import ClassifierMixin, RegressorMixin
from ..externals.joblib import Parallel, delayed, cpu_count
from ..feature_selection.selector_mixin import SelectorMixin
from ..tree import DecisionTreeClassifier, DecisionTreeRegressor, \
ExtraTreeClassifier, ExtraTreeRegressor
from ..tree._tree import DTYPE, DOUBLE
from ..utils import array2d, check_random_state, check_arrays
from ..metrics import r2_score
from .base import BaseEnsemble
__all__ = ["RandomForestClassifier",
"RandomForestRegressor",
"ExtraTreesClassifier",
"ExtraTreesRegressor"]
MAX_INT = np.iinfo(np.int32).max
def _parallel_build_trees(n_trees, forest, X, y,
sample_mask, X_argsorted, seed, verbose):
"""Private function used to build a batch of trees within a job."""
random_state = check_random_state(seed)
trees = []
for i in xrange(n_trees):
if verbose > 1:
print("building tree %d of %d" % (i + 1, n_trees))
seed = random_state.randint(MAX_INT)
tree = forest._make_estimator(append=False)
tree.set_params(compute_importances=forest.compute_importances)
tree.set_params(random_state=check_random_state(seed))
if forest.bootstrap:
n_samples = X.shape[0]
indices = random_state.randint(0, n_samples, n_samples)
tree.fit(X[indices], y[indices],
sample_mask=sample_mask, X_argsorted=X_argsorted)
tree.indices_ = indices
else:
tree.fit(X, y,
sample_mask=sample_mask, X_argsorted=X_argsorted)
trees.append(tree)
return trees
def _parallel_predict_proba(trees, X, n_classes, n_outputs):
"""Private function used to compute a batch of predictions within a job."""
n_samples = X.shape[0]
p = []
for k in xrange(n_outputs):
p.append(np.zeros((n_samples, n_classes[k])))
for tree in trees:
p_tree = tree.predict_proba(X)
if n_outputs == 1:
p_tree = [p_tree]
for k in xrange(n_outputs):
if n_classes[k] == tree.n_classes_[k]:
p[k] += p_tree[k]
else:
for j, c in enumerate(tree.classes_[k]):
p[k][:, c] += p_tree[k][:, j]
return p
def _parallel_predict_regression(trees, X):
"""Private function used to compute a batch of predictions within a job."""
return sum(tree.predict(X) for tree in trees)
def _partition_trees(forest):
"""Private function used to partition trees between jobs."""
# Compute the number of jobs
if forest.n_jobs == -1:
n_jobs = min(cpu_count(), forest.n_estimators)
else:
n_jobs = min(forest.n_jobs, forest.n_estimators)
# Partition trees between jobs
n_trees = [int(forest.n_estimators / n_jobs)] * n_jobs
for i in xrange(forest.n_estimators % n_jobs):
n_trees[i] += 1
starts = [0] * (n_jobs + 1)
for i in xrange(1, n_jobs + 1):
starts[i] = starts[i - 1] + n_trees[i - 1]
return n_jobs, n_trees, starts
def _parallel_X_argsort(X):
"""Private function used to sort the features of X."""
return np.asarray(np.argsort(X.T, axis=1).T, dtype=np.int32, order="F")
def _partition_features(forest, n_total_features):
"""Private function used to partition features between jobs."""
# Compute the number of jobs
if forest.n_jobs == -1:
n_jobs = min(cpu_count(), n_total_features)
else:
n_jobs = min(forest.n_jobs, n_total_features)
# Partition features between jobs
n_features = [n_total_features / n_jobs] * n_jobs
for i in xrange(n_total_features % n_jobs):
n_features[i] += 1
starts = [0] * (n_jobs + 1)
for i in xrange(1, n_jobs + 1):
starts[i] = starts[i - 1] + n_features[i - 1]
return n_jobs, n_features, starts
class BaseForest(BaseEnsemble, SelectorMixin):
"""Base class for forests of trees.
Warning: This class should not be used directly. Use derived classes
instead.
"""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, base_estimator,
n_estimators=10,
estimator_params=[],
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0):
super(BaseForest, self).__init__(
base_estimator=base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params)
self.bootstrap = bootstrap
self.compute_importances = compute_importances
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = random_state
self.n_features_ = None
self.n_outputs_ = None
self.classes_ = None
self.n_classes_ = None
self.feature_importances_ = None
self.verbose = verbose
def fit(self, X, y):
"""Build a forest of trees from the training set (X, y).
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples] or [n_samples, n_outputs]
The target values (integers that correspond to classes in
classification, real numbers in regression).
Returns
-------
self : object
Returns self.
"""
self.random_state = check_random_state(self.random_state)
# Precompute some data
X, y = check_arrays(X, y, sparse_format="dense")
if getattr(X, "dtype", None) != DTYPE or \
X.ndim != 2 or not X.flags.fortran:
X = array2d(X, dtype=DTYPE, order="F")
n_samples, self.n_features_ = X.shape
if self.bootstrap:
sample_mask = None
X_argsorted = None
else:
if self.oob_score:
raise ValueError("Out of bag estimation only available"
" if bootstrap=True")
sample_mask = np.ones((n_samples,), dtype=np.bool)
n_jobs, _, starts = _partition_features(self, self.n_features_)
all_X_argsorted = Parallel(n_jobs=n_jobs)(
delayed(_parallel_X_argsort)(
X[:, starts[i]:starts[i + 1]])
for i in xrange(n_jobs))
X_argsorted = np.asfortranarray(np.hstack(all_X_argsorted))
y = np.atleast_1d(y)
if y.ndim == 1:
y = y[:, np.newaxis]
self.classes_ = []
self.n_classes_ = []
self.n_outputs_ = y.shape[1]
if isinstance(self.base_estimator, ClassifierMixin):
y = np.copy(y)
for k in xrange(self.n_outputs_):
unique = np.unique(y[:, k])
self.classes_.append(unique)
self.n_classes_.append(unique.shape[0])
y[:, k] = np.searchsorted(unique, y[:, k])
if getattr(y, "dtype", None) != DTYPE or not y.flags.contiguous:
y = np.ascontiguousarray(y, dtype=DOUBLE)
# Assign chunk of trees to jobs
n_jobs, n_trees, _ = _partition_trees(self)
# Parallel loop
all_trees = Parallel(n_jobs=n_jobs, verbose=self.verbose)(
delayed(_parallel_build_trees)(
n_trees[i],
self,
X,
y,
sample_mask,
X_argsorted,
self.random_state.randint(MAX_INT),
verbose=self.verbose)
for i in xrange(n_jobs))
# Reduce
self.estimators_ = [tree for tree in itertools.chain(*all_trees)]
# Calculate out of bag predictions and score
if self.oob_score:
if isinstance(self, ClassifierMixin):
self.oob_decision_function_ = []
self.oob_score_ = 0.0
predictions = []
for k in xrange(self.n_outputs_):
predictions.append(np.zeros((n_samples,
self.n_classes_[k])))
for estimator in self.estimators_:
mask = np.ones(n_samples, dtype=np.bool)
mask[estimator.indices_] = False
p_estimator = estimator.predict_proba(X[mask, :])
if self.n_outputs_ == 1:
p_estimator = [p_estimator]
for k in xrange(self.n_outputs_):
predictions[k][mask, :] += p_estimator[k]
for k in xrange(self.n_outputs_):
if (predictions[k].sum(axis=1) == 0).any():
warn("Some inputs do not have OOB scores. "
"This probably means too few trees were used "
"to compute any reliable oob estimates.")
decision = predictions[k] \
/ predictions[k].sum(axis=1)[:, np.newaxis]
self.oob_decision_function_.append(decision)
self.oob_score_ += np.mean(y[:, k] \
== np.argmax(predictions[k], axis=1))
if self.n_outputs_ == 1:
self.oob_decision_function_ = \
self.oob_decision_function_[0]
self.oob_score_ /= self.n_outputs_
else:
# Regression:
predictions = np.zeros((n_samples, self.n_outputs_))
n_predictions = np.zeros((n_samples, self.n_outputs_))
for estimator in self.estimators_:
mask = np.ones(n_samples, dtype=np.bool)
mask[estimator.indices_] = False
p_estimator = estimator.predict(X[mask, :])
if self.n_outputs_ == 1:
p_estimator = p_estimator[:, np.newaxis]
predictions[mask, :] += p_estimator
n_predictions[mask, :] += 1
if (n_predictions == 0).any():
warn("Some inputs do not have OOB scores. "
"This probably means too few trees were used "
"to compute any reliable oob estimates.")
n_predictions[n_predictions == 0] = 1
predictions /= n_predictions
self.oob_prediction_ = predictions
if self.n_outputs_ == 1:
self.oob_prediction_ = \
self.oob_prediction_.reshape((n_samples, ))
self.oob_score_ = 0.0
for k in xrange(self.n_outputs_):
self.oob_score_ += r2_score(y[:, k], predictions[:, k])
self.oob_score_ /= self.n_outputs_
# Sum the importances
if self.compute_importances:
self.feature_importances_ = \
sum(tree.feature_importances_ for tree in self.estimators_) \
/ self.n_estimators
return self
class ForestClassifier(BaseForest, ClassifierMixin):
"""Base class for forest of trees-based classifiers.
Warning: This class should not be used directly. Use derived classes
instead.
"""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, base_estimator,
n_estimators=10,
estimator_params=[],
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0):
super(ForestClassifier, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
def predict(self, X):
"""Predict class for X.
The predicted class of an input sample is computed as the majority
prediction of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape = [n_samples] or [n_samples, n_outputs]
The predicted classes.
"""
n_samples = len(X)
P = self.predict_proba(X)
if self.n_outputs_ == 1:
P = [P]
predictions = np.zeros((n_samples, self.n_outputs_))
for k in xrange(self.n_outputs_):
predictions[:, k] = self.classes_[k].take(np.argmax(P[k], axis=1),
axis=0)
if self.n_outputs_ == 1:
predictions = predictions.reshape((n_samples, ))
return predictions
def predict_proba(self, X):
"""Predict class probabilities for X.
The predicted class probabilities of an input sample is computed as
the mean predicted class probabilities of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. Classes are
ordered by arithmetical order.
"""
# Check data
if getattr(X, "dtype", None) != DTYPE or X.ndim != 2:
X = array2d(X, dtype=DTYPE)
# Assign chunk of trees to jobs
n_jobs, n_trees, starts = _partition_trees(self)
# Parallel loop
all_p = Parallel(n_jobs=n_jobs)(
delayed(_parallel_predict_proba)(
self.estimators_[starts[i]:starts[i + 1]],
X,
self.n_classes_,
self.n_outputs_)
for i in xrange(n_jobs))
# Reduce
p = all_p[0]
for j in xrange(1, len(all_p)):
for k in xrange(self.n_outputs_):
p[k] += all_p[j][k]
for k in xrange(self.n_outputs_):
p[k] /= self.n_estimators
if self.n_outputs_ == 1:
return p[0]
else:
return p
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
The predicted class log-probabilities of an input sample is computed as
the mean predicted class log-probabilities of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class log-probabilities of the input samples. Classes are
ordered by arithmetical order.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
for k in xrange(self.n_outputs_):
proba[k] = np.log(proba[k])
return proba
class ForestRegressor(BaseForest, RegressorMixin):
"""Base class for forest of trees-based regressors.
Warning: This class should not be used directly. Use derived classes
instead.
"""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, base_estimator,
n_estimators=10,
estimator_params=[],
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0):
super(ForestRegressor, self).__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
def predict(self, X):
"""Predict regression target for X.
The predicted regression target of an input sample is computed as the
mean predicted regression targets of the trees in the forest.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
y: array of shape = [n_samples] or [n_samples, n_outputs]
The predicted values.
"""
# Check data
if getattr(X, "dtype", None) != DTYPE or X.ndim != 2:
X = array2d(X, dtype=DTYPE)
# Assign chunk of trees to jobs
n_jobs, n_trees, starts = _partition_trees(self)
# Parallel loop
all_y_hat = Parallel(n_jobs=n_jobs)(
delayed(_parallel_predict_regression)(
self.estimators_[starts[i]:starts[i + 1]], X)
for i in xrange(n_jobs))
# Reduce
y_hat = sum(all_y_hat) / self.n_estimators
return y_hat
class RandomForestClassifier(ForestClassifier):
"""A random forest classifier.
A random forest is a meta estimator that fits a number of classifical
decision trees on various sub-samples of the dataset and use averaging
to improve the predictive accuracy and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features` on regression
problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controlls the verbosity of the tree building process.
Attributes
----------
`estimators_`: list of DecisionTreeClassifier
The collection of fitted sub-estimators.
`feature_importances_` : array, shape = [n_features]
The feature importances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_decision_function_` : array, shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeClassifier, ExtraTreesClassifier
"""
def __init__(self, n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=True,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0):
super(RandomForestClassifier, self).__init__(
base_estimator=DecisionTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
class RandomForestRegressor(ForestRegressor):
"""A random forest regressor.
A random forest is a meta estimator that fits a number of classifical
decision trees on various sub-samples of the dataset and use averaging
to improve the predictive accuracy and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. The only supported
criterion is "mse" for the mean squared error.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features`
on regression problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=True)
Whether bootstrap samples are used when building trees.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controlls the verbosity of the tree building process.
Attributes
----------
`estimators_`: list of DecisionTreeRegressor
The collection of fitted sub-estimators.
`feature_importances_` : array of shape = [n_features]
The feature mportances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_prediction_` : array, shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
See also
--------
DecisionTreeRegressor, ExtraTreesRegressor
"""
def __init__(self, n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=True,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0):
super(RandomForestRegressor, self).__init__(
base_estimator=DecisionTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
class ExtraTreesClassifier(ForestClassifier):
"""An extra-trees classifier.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split.
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features`
on regression problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controlls the verbosity of the tree building process.
Attributes
----------
`estimators_`: list of DecisionTreeClassifier
The collection of fitted sub-estimators.
`feature_importances_` : array of shape = [n_features]
The feature mportances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_decision_function_` : array, shape = [n_samples, n_classes]
Decision function computed with out-of-bag estimate on the training
set.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble.
RandomForestClassifier : Ensemble Classifier based on trees with optimal
splits.
"""
def __init__(self, n_estimators=10,
criterion="gini",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0):
super(ExtraTreesClassifier, self).__init__(
base_estimator=ExtraTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
class ExtraTreesRegressor(ForestRegressor):
"""An extra-trees regressor.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="mse")
The function to measure the quality of a split. The only supported
criterion is "mse" for the mean squared error.
Note: this parameter is tree-specific.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
Note: this parameter is tree-specific.
min_samples_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
Note: this parameter is tree-specific.
min_samples_leaf : integer, optional (default=1)
The minimum number of samples in newly created leaves. A split is
discarded if after the split, one of the leaves would contain less then
``min_samples_leaf`` samples.
Note: this parameter is tree-specific.
min_density : float, optional (default=0.1)
This parameter controls a trade-off in an optimization heuristic. It
controls the minimum density of the `sample_mask` (i.e. the
fraction of samples in the mask). If the density falls below this
threshold the mask is recomputed and the input data is packed
which results in data copying. If `min_density` equals to one,
the partitions are always represented as copies of the original
data. Otherwise, partitions are represented as bit masks (aka
sample masks).
Note: this parameter is tree-specific.
max_features : int, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If "auto", then `max_features=sqrt(n_features)` on
classification tasks and `max_features=n_features`
on regression problems.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: this parameter is tree-specific.
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
Note: this parameter is tree-specific.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
oob_score : bool
Whether to use out-of-bag samples to estimate
the generalization error.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : int, optional (default=0)
Controlls the verbosity of the tree building process.
Attributes
----------
`estimators_`: list of DecisionTreeRegressor
The collection of fitted sub-estimators.
`feature_importances_` : array of shape = [n_features]
The feature mportances (the higher, the more important the feature).
`oob_score_` : float
Score of the training dataset obtained using an out-of-bag estimate.
`oob_prediction_` : array, shape = [n_samples]
Prediction computed with out-of-bag estimate on the training set.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
sklearn.tree.ExtraTreeRegressor: Base estimator for this ensemble.
RandomForestRegressor: Ensemble regressor using trees with optimal splits.
"""
def __init__(self, n_estimators=10,
criterion="mse",
max_depth=None,
min_samples_split=1,
min_samples_leaf=1,
min_density=0.1,
max_features="auto",
bootstrap=False,
compute_importances=False,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0):
super(ExtraTreesRegressor, self).__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_density", "max_features",
"random_state"),
bootstrap=bootstrap,
compute_importances=compute_importances,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_density = min_density
self.max_features = max_features
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/sklearn/ensemble/forest.py
|
Python
|
agpl-3.0
| 41,898
|
[
"Brian"
] |
70332f50177c22d4fc5fbc0263d3b9fe04fe68ac988453836d627fac9f9cc463
|
# Copyright 2003-2009 by Bartek Wilczynski. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Tools for sequence motif analysis (DEPRECATED, see Bio.motifs instead).
This module (Bio.Motif) has been deprecated and will be removed in a
future release of release of Biopython. Please use the new module
Bio.motifs instead.
This contains the core Motif class containing various I/O methods as
well as methods for motif comparisons and motif searching in sequences.
It also inlcudes functionality for parsing AlignACE and MEME programs.
"""
from __future__ import print_function
import warnings
from Bio import BiopythonDeprecationWarning
warnings.warn("The module Bio.Motif has been deprecated and will be "
"removed in a future release of Biopython. Instead "
"please use the new module Bio.motifs instead. Please "
"be aware that though the functionality of Bio.Motif "
"is retained (and extended) in Bio.motifs, usage may "
"be different.",
BiopythonDeprecationWarning)
from Bio.Motif._Motif import Motif
from Bio.Motif.Parsers.AlignAce import read as _AlignAce_read
from Bio.Motif.Parsers.MEME import read as _MEME_read
from Bio.Motif.Thresholds import ScoreDistribution
__docformat__ = "restructuredtext en"
_parsers = {"AlignAce": _AlignAce_read,
"MEME": _MEME_read,
}
def _from_pfm(handle):
return Motif()._from_jaspar_pfm(handle)
def _from_sites(handle):
return Motif()._from_jaspar_sites(handle)
_readers = {"jaspar-pfm": _from_pfm,
"jaspar-sites": _from_sites
}
def parse(handle, format):
"""Parses an output file of motif finding programs.
Currently supported formats:
- AlignAce
- MEME
You can also use single-motif formats, although the Bio.Motif.read()
function is simpler to use in this situation.
- jaspar-pfm
- jaspar-sites
For example:
>>> from Bio import Motif
>>> with open("Motif/alignace.out") as handle:
... for motif in Motif.parse(handle, "AlignAce"):
... print(motif.consensus())
...
TCTACGATTGAG
CTGCACCTAGCTACGAGTGAG
GTGCCCTAAGCATACTAGGCG
GCCACTAGCAGAGCAGGGGGC
CGACTCAGAGGTT
CCACGCTAAGAGAAGTGCCGGAG
GCACGTCCCTGAGCA
GTCCATCGCAAAGCGTGGGGC
GAGATCAGAGGGCCG
TGGACGCGGGG
GACCAGAGCCTCGCATGGGGG
AGCGCGCGTG
GCCGGTTGCTGTTCATTAGG
ACCGACGGCAGCTAAAAGGG
GACGCCGGGGAT
CGACTCGCGCTTACAAGG
"""
try:
parser = _parsers[format]
except KeyError:
try: # not a true parser, try reader formats
reader = _readers[format]
except:
raise ValueError("Wrong parser format")
else: # we have a proper reader
yield reader(handle)
else: # we have a proper reader
for m in parser(handle).motifs:
yield m
def read(handle, format):
"""Reads a motif from a handle using a specified file-format.
This supports the same formats as Bio.Motif.parse(), but
only for files containing exactly one record. For example,
reading a pfm file:
>>> from Bio import Motif
>>> with open("Motif/SRF.pfm") as handle:
... motif = Motif.read(handle, "jaspar-pfm")
...
>>> motif.consensus()
Seq('GCCCATATATGG', IUPACUnambiguousDNA())
Or a single-motif MEME file,
>>> from Bio import Motif
>>> with open("Motif/meme.out") as handle:
... motif = Motif.read(handle, "MEME")
...
>>> motif.consensus()
Seq('CTCAATCGTA', IUPACUnambiguousDNA())
If the handle contains no records, or more than one record,
an exception is raised:
>>> from Bio import Motif
>>> with open("Motif/alignace.out") as handle:
... motif = Motif.read(handle, "AlignAce")
...
Traceback (most recent call last):
...
ValueError: More than one motif found in handle
If however you want the first record from a file containing
multiple records this function would raise an exception (as
shown in the example above). Instead use:
>>> from Bio import Motif
>>> with open("Motif/alignace.out") as handle:
... motif = next(Motif.parse(handle, "AlignAce"))
...
>>> motif.consensus()
Seq('TCTACGATTGAG', IUPACUnambiguousDNA())
Use the Bio.Motif.parse(handle, format) function if you want
to read multiple records from the handle.
"""
iterator = parse(handle, format)
try:
first = next(iterator)
except StopIteration:
first = None
if first is None:
raise ValueError("No motifs found in handle")
try:
second = next(iterator)
except StopIteration:
second = None
if second is not None:
raise ValueError("More than one motif found in handle")
return first
if __name__ == "__main__":
from Bio._utils import run_doctest
run_doctest(verbose=0)
|
poojavade/Genomics_Docker
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Bio/Motif/__init__.py
|
Python
|
apache-2.0
| 5,092
|
[
"Biopython"
] |
50f49464dde1ec92bb843b0d8ab985b5bf3bb24937d76bd3da88f8c18c7a305e
|
import sys
sys.path.insert(1, "../../../")
import h2o
def link_incompatible_error(ip,port):
print("Reading in original prostate data.")
prostate = h2o.import_file(path=h2o.locate("smalldata/prostate/prostate.csv.zip"))
print("Throw error when trying to create model with incompatible logit link.")
try:
h2o.model = h2o.glm(x=prostate[1:8], y=prostate[8], family="gaussian", link="logit")
assert False, "expected an error"
except EnvironmentError:
assert True
try:
h2o.model = h2o.glm(x=prostate[1:8], y=prostate[8], family="tweedie", link="log")
assert False, "expected an error"
except EnvironmentError:
assert True
try:
h2o.model = h2o.glm(x=prostate[2:9], y=prostate[1], family="binomial", link="inverse")
assert False, "expected an error"
except EnvironmentError:
assert True
if __name__ == "__main__":
h2o.run_test(sys.argv, link_incompatible_error)
|
weaver-viii/h2o-3
|
h2o-py/tests/testdir_algos/glm/pyunit_link_incompatible_errorGLM.py
|
Python
|
apache-2.0
| 985
|
[
"Gaussian"
] |
3a9fc598563419cb040746c614494c41bccc4ecf70c4859e4b17b2a3bb6fe05d
|
from setuptools import setup, find_packages
import imp
version = imp.load_source('crema.version', 'crema/version.py')
setup(
name='crema',
version=version.version,
description="Convolutional-recurrent estimators for music analysis",
author='Brian McFee',
url='http://github.com/bmcfee/crema',
download_url='http://github.com/bmcfee/crema/releases',
packages=find_packages(),
package_data={'': ['models/*/*.pkl',
'models/*/*.h5',
'models/*/*.json',
'models/*/*.txt']},
long_description="Convolutional-recurrent estimators for music analysis",
classifiers=[
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='audio music learning',
license='ISC',
install_requires=['six',
'librosa>=0.6',
'jams>=0.3',
'scikit-learn>=0.18',
'keras>=2.0',
'tensorflow>=1.0',
'mir_eval>=0.5',
'pumpp>=0.4',
'h5py>=2.7'],
extras_require={
'docs': ['numpydoc', 'sphinx'],
'tests': ['pytest', 'pytest-cov'],
'training': ['pescador>=2.0.1', 'muda']
}
)
|
bmcfee/crema
|
setup.py
|
Python
|
bsd-2-clause
| 1,596
|
[
"Brian"
] |
a4f22d0c98b76281108b318ac7ce0442d560c9c0f21ae153547a89aaa9087b6c
|
"""
Django settings for Exchange project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import uuid
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%p*m%z8z)jxkgvt1b0m)ha=e$uexa$i5o5-tifc=-t#9%7p+gg'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
MEDIA_URL = '/api/1.0/files/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'assets')
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
SENDER = '84e847f929d34f66a94bf376cf30d12c'
LOGIN_URL = 'login'
LOGIN_REDIRECT_URL = 'dashboard'
# Application definition
INSTALLED_APPS = (
'sslserver',
'flat',
'activelink',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'incidents',
'sending',
'corsheaders',
'oauth2_provider',
'rest_framework',
'widget_tweaks',
'datetimewidget',
'bootstrap_pagination',
'a_ppl_e',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'Exchange', 'templates')]
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.media',
'django.core.context_processors.request',
)
ROOT_URLCONF = 'Exchange.urls'
WSGI_APPLICATION = 'Exchange.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'assets'),
)
# RestFramework
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'oauth2_provider.ext.rest_framework.OAuth2Authentication',
'rest_framework.authentication.SessionAuthentication',
),
# 'DEFAULT_PERMISSION_CLASSES': (
# 'rest_framework.permissions.IsAuthenticated',
# ),
'PAGINATE_BY': 10,
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.renderers.BrowsableAPIRenderer',
)
}
OAUTH2_PROVIDER = {
# this is the list of available scopes
'SCOPES': {'subscriber': 'Subscriber', 'provider': 'Provider'}
}
CORS_ORIGIN_WHITELIST = (
'localhost:8000',
'localhost:8800',
)
OAUTH2_PROVIDER_APPLICATION_MODEL = 'oauth2_provider.Application'
# TLP
TLP_SCHEME = 'enisa' # US-CERT or ENISA
TLP_DEFAULT_VALUE = 'green' # red, amber, green or white
|
SINTEF-Infosec/Incident-Information-Sharing-Tool
|
Exchange/settings.py
|
Python
|
apache-2.0
| 3,811
|
[
"Amber"
] |
95a55d281d2c94d3d57a8c79eaceeff3567a526787b4272e1d7c3a5c4c5eef4b
|
# This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import warnings
from pyemma._base.logging import Loggable
from pyemma.util.types import is_string
import mdtraj
import six
from pyemma.coordinates.data.featurization.util import (_parse_pairwise_input,
_parse_groupwise_input)
from .misc import CustomFeature
import numpy as np
__author__ = 'Frank Noe, Martin Scherer'
__all__ = ['MDFeaturizer']
class MDFeaturizer(Loggable):
r"""Extracts features from MD trajectories."""
def __init__(self, topfile):
"""extracts features from MD trajectories.
Parameters
----------
topfile : str or mdtraj.Topology
a path to a topology file (pdb etc.) or an mdtraj Topology() object
"""
self.topologyfile = None
if isinstance(topfile, six.string_types):
self.topology = (mdtraj.load(topfile)).topology
self.topologyfile = topfile
elif isinstance(topfile, mdtraj.Topology):
self.topology = topfile
else:
raise ValueError("no valid topfile arg: type was %s, "
"but only string or mdtraj.Topology allowed." % type(topfile))
self.active_features = []
self._dim = 0
self._showed_warning_empty_feature_list = False
def __add_feature(self, f):
# perform sanity checks
if f.dimension == 0:
self._logger.error("given an empty feature (eg. due to an empty/"
"ineffective selection). Skipping it."
" Feature desc: %s" % f.describe())
return
if f not in self.active_features:
self.active_features.append(f)
else:
self._logger.warning("tried to re-add the same feature %s"
% f.__class__.__name__)
def describe(self):
"""
Returns a list of strings, one for each feature selected,
with human-readable descriptions of the features.
Returns
-------
labels : list of str
An ordered list of strings, one for each feature selected,
with human-readable descriptions of the features.
"""
all_labels = []
for f in self.active_features:
all_labels += f.describe()
return all_labels
def select(self, selstring):
"""
Returns the indexes of atoms matching the given selection
Parameters
----------
selstring : str
Selection string. See mdtraj documentation for details:
http://mdtraj.org/latest/atom_selection.html
Returns
-------
indexes : ndarray((n), dtype=int)
array with selected atom indexes
"""
return self.topology.select(selstring)
def select_Ca(self):
"""
Returns the indexes of all Ca-atoms
Returns
-------
indexes : ndarray((n), dtype=int)
array with selected atom indexes
"""
return self.topology.select("name CA")
def select_Backbone(self):
"""
Returns the indexes of backbone C, CA and N atoms
Returns
-------
indexes : ndarray((n), dtype=int)
array with selected atom indexes
"""
return self.topology.select("backbone and (name C or name CA or name N)")
def select_Heavy(self):
"""
Returns the indexes of all heavy atoms (Mass >= 2)
Returns
-------
indexes : ndarray((n), dtype=int)
array with selected atom indexes
"""
return self.topology.select("mass >= 2")
@staticmethod
def pairs(sel, excluded_neighbors=0):
"""
Creates all pairs between indexes. Will exclude closest neighbors up to :py:obj:`excluded_neighbors`
The self-pair (i,i) is always excluded
Parameters
----------
sel : ndarray((n), dtype=int)
array with selected atom indexes
excluded_neighbors: int, default = 0
number of neighbors that will be excluded when creating the pairs
Returns
-------
sel : ndarray((m,2), dtype=int)
m x 2 array with all pair indexes between different atoms that are at least :obj:`excluded_neighbors`
indexes apart, i.e. if i is the index of an atom, the pairs [i,i-2], [i,i-1], [i,i], [i,i+1], [i,i+2], will
not be in :py:obj:`sel` (n=excluded_neighbors) if :py:obj:`excluded_neighbors` = 2.
Moreover, the list is non-redundant,i.e. if [i,j] is in sel, then [j,i] is not.
"""
assert isinstance(excluded_neighbors,int)
p = []
for i in range(len(sel)):
for j in range(i + 1, len(sel)):
# get ordered pair
I = sel[i]
J = sel[j]
if (I > J):
I = sel[j]
J = sel[i]
# exclude 1 and 2 neighbors
if (J > I + excluded_neighbors):
p.append([I, J])
return np.array(p)
def _check_indices(self, pair_inds, pair_n=2):
"""ensure pairs are valid (shapes, all atom indices available?, etc.)
"""
pair_inds = np.array(pair_inds).astype(dtype=np.int, casting='safe')
if pair_inds.ndim != 2:
raise ValueError("pair indices has to be a matrix.")
if pair_inds.shape[1] != pair_n:
raise ValueError("pair indices shape has to be (x, %i)." % pair_n)
if pair_inds.max() > self.topology.n_atoms:
raise ValueError("index out of bounds: %i."
" Maximum atom index available: %i"
% (pair_inds.max(), self.topology.n_atoms))
return pair_inds
def add_all(self):
"""
Adds all atom coordinates to the feature list.
The coordinates are flattened as follows: [x1, y1, z1, x2, y2, z2, ...]
"""
# TODO: add possibility to align to a reference structure
self.add_selection(list(range(self.topology.n_atoms)))
def add_selection(self, indexes):
"""
Adds the coordinates of the selected atom indexes to the feature list.
The coordinates of the selection [1, 2, ...] are flattened as follows: [x1, y1, z1, x2, y2, z2, ...]
Parameters
----------
indexes : ndarray((n), dtype=int)
array with selected atom indexes
"""
# TODO: add possibility to align to a reference structure
from .misc import SelectionFeature
f = SelectionFeature(self.topology, indexes)
self.__add_feature(f)
def add_distances(self, indices, periodic=True, indices2=None):
r"""
Adds the distances between atoms to the feature list.
Parameters
----------
indices : can be of two types:
ndarray((n, 2), dtype=int):
n x 2 array with the pairs of atoms between which the distances shall be computed
iterable of integers (either list or ndarray(n, dtype=int)):
indices (not pairs of indices) of the atoms between which the distances shall be computed.
periodic : optional, boolean, default is True
If periodic is True and the trajectory contains unitcell information,
distances will be computed under the minimum image convention.
indices2: iterable of integers (either list or ndarray(n, dtype=int)), optional:
Only has effect if :py:obj:`indices` is an iterable of integers. Instead of the above behaviour,
only the distances between the atoms in :py:obj:`indices` and :py:obj:`indices2` will be computed.
.. note::
When using the iterable of integers input, :py:obj:`indices` and :py:obj:`indices2`
will be sorted numerically and made unique before converting them to a pairlist.
Please look carefully at the output of :py:func:`describe()` to see what features exactly have been added.
"""
from .distances import DistanceFeature
atom_pairs = _parse_pairwise_input(
indices, indices2, self._logger, fname='add_distances()')
atom_pairs = self._check_indices(atom_pairs)
f = DistanceFeature(self.topology, atom_pairs, periodic=periodic)
self.__add_feature(f)
def add_distances_ca(self, periodic=True, excluded_neighbors=2):
"""
Adds the distances between all Ca's to the feature list.
Parameters
----------
periodic : boolean, default is True
Use the minimum image convetion when computing distances
excluded_neighbors : int, default is 2
Number of exclusions when compiling the list of pairs. Two CA-atoms are considered
neighbors if they belong to adjacent residues.
"""
# Atom indices for CAs
at_idxs_ca = self.select_Ca()
# Residue indices for residues contatinig CAs
res_idxs_ca = [self.topology.atom(ca).residue.index for ca in at_idxs_ca]
# Pairs of those residues, with possibility to exclude neighbors
res_idxs_ca_pairs = self.pairs(res_idxs_ca, excluded_neighbors=excluded_neighbors)
# Mapping back pairs of residue indices to pairs of CA indices
distance_indexes = []
for ri, rj in res_idxs_ca_pairs:
distance_indexes.append([self.topology.residue(ri).atom('CA').index,
self.topology.residue(rj).atom('CA').index
])
distance_indexes = np.array(distance_indexes)
self.add_distances(distance_indexes, periodic=periodic)
def add_inverse_distances(self, indices, periodic=True, indices2=None):
"""
Adds the inverse distances between atoms to the feature list.
Parameters
----------
indices : can be of two types:
ndarray((n, 2), dtype=int):
n x 2 array with the pairs of atoms between which the inverse distances shall be computed
iterable of integers (either list or ndarray(n, dtype=int)):
indices (not pairs of indices) of the atoms between which the inverse distances shall be computed.
periodic : optional, boolean, default is True
If periodic is True and the trajectory contains unitcell information,
distances will be computed under the minimum image convention.
indices2: iterable of integers (either list or ndarray(n, dtype=int)), optional:
Only has effect if :py:obj:`indices` is an iterable of integers. Instead of the above behaviour,
only the inverse distances between the atoms in :py:obj:`indices` and :py:obj:`indices2` will be computed.
.. note::
When using the *iterable of integers* input, :py:obj:`indices` and :py:obj:`indices2`
will be sorted numerically and made unique before converting them to a pairlist.
Please look carefully at the output of :py:func:`describe()` to see what features exactly have been added.
"""
from .distances import InverseDistanceFeature
atom_pairs = _parse_pairwise_input(
indices, indices2, self._logger, fname='add_inverse_distances()')
atom_pairs = self._check_indices(atom_pairs)
f = InverseDistanceFeature(self.topology, atom_pairs, periodic=periodic)
self.__add_feature(f)
def add_contacts(self, indices, indices2=None, threshold=0.3, periodic=True, count_contacts=False):
r"""
Adds the contacts to the feature list.
Parameters
----------
indices : can be of two types:
ndarray((n, 2), dtype=int):
n x 2 array with the pairs of atoms between which the contacts shall be computed
iterable of integers (either list or ndarray(n, dtype=int)):
indices (not pairs of indices) of the atoms between which the contacts shall be computed.
indices2: iterable of integers (either list or ndarray(n, dtype=int)), optional:
Only has effect if :py:obj:`indices` is an iterable of integers. Instead of the above behaviour,
only the contacts between the atoms in :py:obj:`indices` and :py:obj:`indices2` will be computed.
threshold : float, optional, default = .3
distances below this threshold (in nm) will result in a feature 1.0, distances above will result in 0.0.
The default is set to .3 nm (3 Angstrom)
periodic : boolean, default True
use the minimum image convention if unitcell information is available
count_contacts : boolean, default False
If set to true, this feature will return the number of formed contacts (and not feature values with either 1.0 or 0)
The ouput of this feature will be of shape (Nt,1), and not (Nt, nr_of_contacts)
.. note::
When using the *iterable of integers* input, :py:obj:`indices` and :py:obj:`indices2`
will be sorted numerically and made unique before converting them to a pairlist.
Please look carefully at the output of :py:func:`describe()` to see what features exactly have been added.
"""
from .distances import ContactFeature
atom_pairs = _parse_pairwise_input(
indices, indices2, self._logger, fname='add_contacts()')
atom_pairs = self._check_indices(atom_pairs)
f = ContactFeature(self.topology, atom_pairs, threshold, periodic, count_contacts)
self.__add_feature(f)
def add_residue_mindist(self,
residue_pairs='all',
scheme='closest-heavy',
ignore_nonprotein=True,
threshold=None,
periodic=True):
r"""
Adds the minimum distance between residues to the feature list. See below how
the minimum distance can be defined. If the topology generated out of :py:obj:`topfile`
contains information on periodic boundary conditions, the minimum image convention
will be used when computing distances.
Parameters
----------
residue_pairs : can be of two types:
'all'
Computes distances between all pairs of residues excluding first and second neighbors
ndarray((n, 2), dtype=int):
n x 2 array with the pairs residues for which distances will be computed
scheme : 'ca', 'closest', 'closest-heavy', default is closest-heavy
Within a residue, determines the sub-group atoms that will be considered when computing distances
ignore_nonprotein : boolean, default True
Ignore residues that are not of protein type (e.g. water molecules, post-traslational modifications etc)
threshold : float, optional, default is None
distances below this threshold (in nm) will result in a feature 1.0, distances above will result in 0.0. If
left to None, the numerical value will be returned
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
.. note::
Using :py:obj:`scheme` = 'closest' or 'closest-heavy' with :py:obj:`residue pairs` = 'all'
will compute nearly all interatomic distances, for every frame, before extracting the closest pairs.
This can be very time consuming. Those schemes are intended to be used with a subset of residues chosen
via :py:obj:`residue_pairs`.
"""
from .distances import ResidueMinDistanceFeature
if scheme != 'ca' and is_string(residue_pairs):
if residue_pairs == 'all':
self._logger.warning("Using all residue pairs with schemes like closest or closest-heavy is "
"very time consuming. Consider reducing the residue pairs")
f = ResidueMinDistanceFeature(self.topology, residue_pairs, scheme, ignore_nonprotein, threshold, periodic)
self.__add_feature(f)
def add_group_mindist(self,
group_definitions,
group_pairs='all',
threshold=None,
periodic=True,
):
r"""
Adds the minimum distance between groups of atoms to the feature list. If the groups of
atoms are identical to residues, use :py:obj:`add_residue_mindist <pyemma.coordinates.data.featurizer.MDFeaturizer.add_residue_mindist>`.
Parameters
----------
group_definition : list of 1D-arrays/iterables containing the group definitions via atom indices.
If there is only one group_definition, it is assumed the minimum distance within this group (excluding the
self-distance) is wanted. In this case, :py:obj:`group_pairs` is ignored.
group_pairs : Can be of two types:
'all'
Computes minimum distances between all pairs of groups contained in the group definitions
ndarray((n, 2), dtype=int):
n x 2 array with the pairs of groups for which the minimum distances will be computed.
threshold : float, optional, default is None
distances below this threshold (in nm) will result in a feature 1.0, distances above will result in 0.0. If
left to None, the numerical value will be returned
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
"""
from .distances import GroupMinDistanceFeature
# Some thorough input checking and reformatting
group_definitions, group_pairs, distance_list, group_identifiers = \
_parse_groupwise_input(group_definitions, group_pairs, self._logger, 'add_group_mindist')
distance_list = self._check_indices(distance_list)
f = GroupMinDistanceFeature(self.topology, group_definitions, group_pairs, distance_list, group_identifiers, threshold, periodic)
self.__add_feature(f)
def add_angles(self, indexes, deg=False, cossin=False, periodic=True):
"""
Adds the list of angles to the feature list
Parameters
----------
indexes : np.ndarray, shape=(num_pairs, 3), dtype=int
an array with triplets of atom indices
deg : bool, optional, default = False
If False (default), angles will be computed in radians.
If True, angles will be computed in degrees.
cossin : bool, optional, default = False
If True, each angle will be returned as a pair of (sin(x), cos(x)).
This is useful, if you calculate the mean (e.g TICA/PCA, clustering)
in that space.
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
"""
from .angles import AngleFeature
indexes = self._check_indices(indexes, pair_n=3)
f = AngleFeature(self.topology, indexes, deg=deg, cossin=cossin,
periodic=periodic)
self.__add_feature(f)
def add_dihedrals(self, indexes, deg=False, cossin=False, periodic=True):
"""
Adds the list of dihedrals to the feature list
Parameters
----------
indexes : np.ndarray, shape=(num_pairs, 4), dtype=int
an array with quadruplets of atom indices
deg : bool, optional, default = False
If False (default), angles will be computed in radians.
If True, angles will be computed in degrees.
cossin : bool, optional, default = False
If True, each angle will be returned as a pair of (sin(x), cos(x)).
This is useful, if you calculate the mean (e.g TICA/PCA, clustering)
in that space.
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
"""
from .angles import DihedralFeature
indexes = self._check_indices(indexes, pair_n=4)
f = DihedralFeature(self.topology, indexes, deg=deg, cossin=cossin,
periodic=periodic)
self.__add_feature(f)
def add_backbone_torsions(self, selstr=None, deg=False, cossin=False, periodic=True):
"""
Adds all backbone phi/psi angles or the ones specified in :obj:`selstr` to the feature list.
Parameters
----------
selstr : str, optional, default = ""
selection string specifying the atom selection used to specify a specific set of backbone angles
If "" (default), all phi/psi angles found in the topology will be computed
deg : bool, optional, default = False
If False (default), angles will be computed in radians.
If True, angles will be computed in degrees.
cossin : bool, optional, default = False
If True, each angle will be returned as a pair of (sin(x), cos(x)).
This is useful, if you calculate the mean (e.g TICA/PCA, clustering)
in that space.
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
"""
from .angles import BackboneTorsionFeature
f = BackboneTorsionFeature(
self.topology, selstr=selstr, deg=deg, cossin=cossin, periodic=periodic)
self.__add_feature(f)
def add_chi1_torsions(self, selstr="", deg=False, cossin=False, periodic=True):
"""
Adds all chi1 angles or the ones specified in :obj:`selstr` to the feature list.
Parameters
----------
selstr : str, optional, default = ""
selection string specifying the atom selection used to specify a specific set of backbone angles
If "" (default), all chi1 angles found in the topology will be computed
deg : bool, optional, default = False
If False (default), angles will be computed in radians.
If True, angles will be computed in degrees.
cossin : bool, optional, default = False
If True, each angle will be returned as a pair of (sin(x), cos(x)).
This is useful, if you calculate the mean (e.g TICA/PCA, clustering)
in that space.
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
"""
from .angles import Chi1TorsionFeature
f = Chi1TorsionFeature(
self.topology, selstr=selstr, deg=deg, cossin=cossin, periodic=periodic)
self.__add_feature(f)
def add_custom_feature(self, feature):
"""
Adds a custom feature to the feature list.
Parameters
----------
feature : object
an object with interface like CustomFeature (map, describe methods)
"""
if feature.dimension <= 0:
raise ValueError("Dimension has to be positive. "
"Please override dimension attribute in feature!")
if not hasattr(feature, 'map'):
raise ValueError("no map method in given feature")
else:
if not callable(getattr(feature, 'map')):
raise ValueError("map exists but is not a method")
self.__add_feature(feature)
def add_minrmsd_to_ref(self, ref, ref_frame=0, atom_indices=None, precentered=False):
r"""
Adds the minimum root-mean-square-deviation (minrmsd) with respect to a reference structure to the feature list.
Parameters
----------
ref:
Reference structure for computing the minrmsd. Can be of two types:
1. :py:obj:`mdtraj.Trajectory` object
2. filename for mdtraj to load. In this case, only the :py:obj:`ref_frame` of that file will be used.
ref_frame: integer, default=0
Reference frame of the filename specified in :py:obj:`ref`.
This parameter has no effect if :py:obj:`ref` is not a filename.
atom_indices: array_like, default=None
Atoms that will be used for:
1. aligning the target and reference geometries.
2. computing rmsd after the alignment.
If left to None, all atoms of :py:obj:`ref` will be used.
precentered: bool, default=False
Use this boolean at your own risk to let mdtraj know that the target conformations are already
centered at the origin, i.e., their (uniformly weighted) center of mass lies at the origin.
This will speed up the computation of the rmsd.
"""
from .misc import MinRmsdFeature
f = MinRmsdFeature(ref, ref_frame=ref_frame, atom_indices=atom_indices, topology=self.topology,
precentered=precentered)
self.__add_feature(f)
def add_custom_func(self, func, dim, *args, **kwargs):
""" adds a user defined function to extract features
Parameters
----------
func : function
a user-defined function, which accepts mdtraj.Trajectory object as
first parameter and as many optional and named arguments as desired.
Has to return a numpy.ndarray
dim : int
output dimension of :py:obj:`function`
args : any number of positional arguments
these have to be in the same order as :py:obj:`func` is expecting them
kwargs : dictionary
named arguments passed to func
"""
f = CustomFeature(func, dim=dim, *args, **kwargs)
self.add_custom_feature(f)
def dimension(self):
""" current dimension due to selected features
Returns
-------
dim : int
total dimension due to all selection features
"""
dim = sum(f.dimension for f in self.active_features)
return dim
def transform(self, traj):
"""
Maps an mdtraj Trajectory object to the selected output features
Parameters
----------
traj : mdtraj Trajectory
Trajectory object used as an input
Returns
-------
out : ndarray((T, n), dtype=float32)
Output features: For each of T time steps in the given trajectory,
a vector with all n output features selected.
"""
# if there are no features selected, return given trajectory
if len(self.active_features) == 0:
if not self._showed_warning_empty_feature_list:
warnings.warn("You have no features selected."
" Returning plain coordinates.")
self._showed_warning_empty_feature_list = True
s = traj.xyz.shape
new_shape = (s[0], s[1] * s[2])
return traj.xyz.reshape(new_shape)
# handle empty chunks (which might occur due to time lagged access
if traj.xyz.shape[0] == 0:
return np.empty((0, self.dimension()))
# otherwise build feature vector.
feature_vec = []
# TODO: consider parallel evaluation computation here, this effort is
# only worth it, if computation time dominates memory transfers
for f in self.active_features:
# perform sanity checks for custom feature input
if isinstance(f, CustomFeature):
# NOTE: casting=safe raises in numpy>=1.9
vec = f.transform(traj).astype(np.float32, casting='safe')
if vec.shape[0] == 0:
vec = np.empty((0, f.dimension))
if not isinstance(vec, np.ndarray):
raise ValueError('Your custom feature %s did not return'
' a numpy.ndarray!' % str(f.describe()))
if not vec.ndim == 2:
raise ValueError('Your custom feature %s did not return'
' a 2d array. Shape was %s'
% (str(f.describe()),
str(vec.shape)))
if not vec.shape[0] == traj.xyz.shape[0]:
raise ValueError('Your custom feature %s did not return'
' as many frames as it received!'
'Input was %i, output was %i'
% (str(f.describe()),
traj.xyz.shape[0],
vec.shape[0]))
else:
vec = f.transform(traj).astype(np.float32)
feature_vec.append(vec)
if len(feature_vec) > 1:
res = np.hstack(feature_vec)
else:
res = feature_vec[0]
return res
|
gph82/PyEMMA
|
pyemma/coordinates/data/featurization/featurizer.py
|
Python
|
lgpl-3.0
| 30,879
|
[
"MDTraj"
] |
49dc7204a2fb3d28b1d66bed6ea44ace927f8d09f42d9f001d7bcfc79ad3a925
|
from galaxy.managers import base as manager_base
class LDDAManager( manager_base.ModelManager ):
""" A fairly sparse manager for LDDAs.
"""
def __init__( self ):
"""
Set up and initialize other managers needed by lddas.
"""
pass
def get( self, trans, id, check_accessible=True ):
return manager_base.get_object( trans, id, 'LibraryDatasetDatasetAssociation',
check_ownership=False, check_accessible=check_accessible )
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/lib/galaxy/managers/lddas.py
|
Python
|
gpl-3.0
| 518
|
[
"Galaxy"
] |
4b54a33fb41741bb6224aa150894db633364138649df440007428d39e30e0dd4
|
# Orca
#
# Copyright 2006-2008 Sun Microsystems Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""Provides an HTTP server for Orca. This currently serves mainly as
something that self-voicing applications can use as their speech
service."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2006-2008 Sun Microsystems Inc."
__license__ = "LGPL"
import threading
import BaseHTTPServer
import debug
import orca_platform
import settings
import speech
_httpRequestThread = None
# Handlers for logging speech and braille output.
#
loggingFileHandlers = {}
loggingStreamHandlers = {}
class _HTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Provides support for communicating with Orca via HTTP. This is
mainly to support self-voicing applications that want to use Orca
as a speech service.
The protocol is simple: POST content is 'stop', 'speak:<text>',
or 'isSpeaking'.
To test this, run:
wget --post-data='speak:hello world' localhost:20433
"""
def log_request(self, code=None, size=None):
"""Override to avoid getting a log message on stdout for
each GET, POST, etc. request"""
pass
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write("<html><body><p>Orca %s</p></body></html>" \
% orca_platform.version)
def do_POST(self):
contentLength = self.headers.getheader('content-length')
if contentLength:
contentLength = int(contentLength)
inputBody = self.rfile.read(contentLength)
debug.println(debug.LEVEL_FINEST,
"httpserver._HTTPRequestHandler received %s" \
% inputBody)
if inputBody.startswith("speak:"):
speech.speak(inputBody[6:])
self.send_response(200, 'OK')
elif inputBody == "stop":
speech.stop()
self.send_response(200, 'OK')
elif inputBody == "isSpeaking":
self.send_response(200, 'OK')
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write("%s" % speech.isSpeaking())
else:
debug.println(debug.LEVEL_FINEST,
"httpserver._HTTPRequestHandler received no data")
class _HTTPRequestThread(threading.Thread):
"""Runs a _HTTPRequestHandler in a separate thread."""
def run(self):
"""Try to start an HTTP server on settings.httpServerPort.
If this fails, retry settings.maxHttpServerRetries times,
each time incrementing the server port number by 1. If we
are still unable to start a server, just fail gracefully.
"""
portNo = settings.httpServerPort
connected = False
while not connected and \
(portNo < settings.httpServerPort + settings.maxHttpServerRetries):
try:
httpd = BaseHTTPServer.HTTPServer(('', portNo),
_HTTPRequestHandler)
connected = True
except:
if portNo == settings.httpServerPort:
debug.printException(debug.LEVEL_WARNING)
debug.println(debug.LEVEL_WARNING,
"httpserver._HTTPRequestThread unable to start server " \
"on port %d" % portNo)
portNo += 1
if not connected:
debug.println(debug.LEVEL_WARNING,
"httpserver._HTTPRequestThread server startup failed.")
else:
httpd.serve_forever()
def init():
"""Creates an HTTP server that listens for speak commands from a
separate port defined by settings.httpServerPort. We run this
as a daemon so it will die automatically when orca dies."""
global _httpRequestThread
if settings.httpServerPort and (not _httpRequestThread):
try:
_httpRequestThread = _HTTPRequestThread()
_httpRequestThread.setDaemon(True)
_httpRequestThread.start()
except:
debug.printException(debug.LEVEL_WARNING)
def shutdown():
"""Stops the HTTP server. [[[WDW - not implemented yet.]]]"""
pass
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/share/pyshared/orca/httpserver.py
|
Python
|
gpl-3.0
| 5,107
|
[
"ORCA"
] |
356d67d915e45a058b3e20f617101c38246fab42fb4dcc54af998eca3affba62
|
import re
from pygments.lexers.theorem import IsabelleLexer
from pygments.lexer import RegexLexer, inherit, bygroups, words
from pygments.token import *
from . import encoding
__all__ = ['IsarLexer']
class IsarLexer(IsabelleLexer):
name = 'Isabelle/Isar'
keyword_cartouche_text = ('text', 'txt', 'text_raw',
'chapter', 'section', 'subsection', 'subsubsection',
'paragraph', 'subparagraph',
)
tokens = {
'root': [
(words(keyword_cartouche_text, prefix=r'\b', suffix=r'(%\w+)?(\s*\\<open>)'), bygroups(Keyword, Comment.Preproc, Comment), 'cartouche-text'),
(r'\\<comment>.*$', Comment),
(r'%\w+', Comment.Preproc),
(r'\\<open>', String.Other, 'fact'),
inherit,
],
'cartouche-text': [
(r'[^\\@]', Comment),
(r'(@\{)(\w+)', bygroups(String.Other, Keyword), 'antiquotation'),
(r'\\<open>', Text, '#push'),
(r'\\<close>', Comment, '#pop'),
(r'\\<[\^\w]+>', Comment.Symbol),
(r'\\', Comment),
],
'antiquotation': [
(r'[^\{\}\\]', Text),
(r'\{', String.Other, '#push'),
(r'\}', String.Other, '#pop'),
(r'\\<[\^\w]+>', String.Symbol),
(r'\\', Text),
],
'fact': [
(r'\\<close>', String.Other, '#pop'),
inherit,
],
}
def get_tokens_unprocessed(self, text):
for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
value = isar_decode(value)
yield index, token, value
def isar_decode(raw):
global symbol_table
if symbol_table is None:
symbol_table = {}
for line in symbols_raw.splitlines():
if line:
if re.match(r"^#", line):
continue
m = re.match(r"^(\\<.*>)\s+code:\s+0x([0-9a-f]+).*$", line)
assert m, "Failed to parse " + line
n = int(m.group(2), 16)
if n < 0x10000:
symbol_table[m.group(1)] = chr(n)
if isinstance(raw, str):
raw = encoding.get_unicode(raw)
def repl(m):
if m.group(0) in symbol_table:
return symbol_table[m.group(0)]
else:
return m.group(0)
return re.sub(r"\\<[\^a-zA-Z]+>", repl, raw)
# ~~/etc/symbols from Isabelle2016
symbol_table = None
symbols_raw = """
\<zero> code: 0x01d7ec group: digit
\<one> code: 0x01d7ed group: digit
\<two> code: 0x01d7ee group: digit
\<three> code: 0x01d7ef group: digit
\<four> code: 0x01d7f0 group: digit
\<five> code: 0x01d7f1 group: digit
\<six> code: 0x01d7f2 group: digit
\<seven> code: 0x01d7f3 group: digit
\<eight> code: 0x01d7f4 group: digit
\<nine> code: 0x01d7f5 group: digit
\<A> code: 0x01d49c group: letter
\<B> code: 0x00212c group: letter
\<C> code: 0x01d49e group: letter
\<D> code: 0x01d49f group: letter
\<E> code: 0x002130 group: letter
\<F> code: 0x002131 group: letter
\<G> code: 0x01d4a2 group: letter
\<H> code: 0x00210b group: letter
\<I> code: 0x002110 group: letter
\<J> code: 0x01d4a5 group: letter
\<K> code: 0x01d4a6 group: letter
\<L> code: 0x002112 group: letter
\<M> code: 0x002133 group: letter
\<N> code: 0x01d4a9 group: letter
\<O> code: 0x01d4aa group: letter
\<P> code: 0x01d4ab group: letter
\<Q> code: 0x01d4ac group: letter
\<R> code: 0x00211b group: letter
\<S> code: 0x01d4ae group: letter
\<T> code: 0x01d4af group: letter
\<U> code: 0x01d4b0 group: letter
\<V> code: 0x01d4b1 group: letter
\<W> code: 0x01d4b2 group: letter
\<X> code: 0x01d4b3 group: letter
\<Y> code: 0x01d4b4 group: letter
\<Z> code: 0x01d4b5 group: letter
\<a> code: 0x01d5ba group: letter
\<b> code: 0x01d5bb group: letter
\<c> code: 0x01d5bc group: letter
\<d> code: 0x01d5bd group: letter
\<e> code: 0x01d5be group: letter
\<f> code: 0x01d5bf group: letter
\<g> code: 0x01d5c0 group: letter
\<h> code: 0x01d5c1 group: letter
\<i> code: 0x01d5c2 group: letter
\<j> code: 0x01d5c3 group: letter
\<k> code: 0x01d5c4 group: letter
\<l> code: 0x01d5c5 group: letter
\<m> code: 0x01d5c6 group: letter
\<n> code: 0x01d5c7 group: letter
\<o> code: 0x01d5c8 group: letter
\<p> code: 0x01d5c9 group: letter
\<q> code: 0x01d5ca group: letter
\<r> code: 0x01d5cb group: letter
\<s> code: 0x01d5cc group: letter
\<t> code: 0x01d5cd group: letter
\<u> code: 0x01d5ce group: letter
\<v> code: 0x01d5cf group: letter
\<w> code: 0x01d5d0 group: letter
\<x> code: 0x01d5d1 group: letter
\<y> code: 0x01d5d2 group: letter
\<z> code: 0x01d5d3 group: letter
\<AA> code: 0x01d504 group: letter
\<BB> code: 0x01d505 group: letter
\<CC> code: 0x00212d group: letter
\<DD> code: 0x01d507 group: letter
\<EE> code: 0x01d508 group: letter
\<FF> code: 0x01d509 group: letter
\<GG> code: 0x01d50a group: letter
\<HH> code: 0x00210c group: letter
\<II> code: 0x002111 group: letter
\<JJ> code: 0x01d50d group: letter
\<KK> code: 0x01d50e group: letter
\<LL> code: 0x01d50f group: letter
\<MM> code: 0x01d510 group: letter
\<NN> code: 0x01d511 group: letter
\<OO> code: 0x01d512 group: letter
\<PP> code: 0x01d513 group: letter
\<QQ> code: 0x01d514 group: letter
\<RR> code: 0x00211c group: letter
\<SS> code: 0x01d516 group: letter
\<TT> code: 0x01d517 group: letter
\<UU> code: 0x01d518 group: letter
\<VV> code: 0x01d519 group: letter
\<WW> code: 0x01d51a group: letter
\<XX> code: 0x01d51b group: letter
\<YY> code: 0x01d51c group: letter
\<ZZ> code: 0x002128 group: letter
\<aa> code: 0x01d51e group: letter
\<bb> code: 0x01d51f group: letter
\<cc> code: 0x01d520 group: letter
\<dd> code: 0x01d521 group: letter
\<ee> code: 0x01d522 group: letter
\<ff> code: 0x01d523 group: letter
\<gg> code: 0x01d524 group: letter
\<hh> code: 0x01d525 group: letter
\<ii> code: 0x01d526 group: letter
\<jj> code: 0x01d527 group: letter
\<kk> code: 0x01d528 group: letter
\<ll> code: 0x01d529 group: letter
\<mm> code: 0x01d52a group: letter
\<nn> code: 0x01d52b group: letter
\<oo> code: 0x01d52c group: letter
\<pp> code: 0x01d52d group: letter
\<qq> code: 0x01d52e group: letter
\<rr> code: 0x01d52f group: letter
\<ss> code: 0x01d530 group: letter
\<tt> code: 0x01d531 group: letter
\<uu> code: 0x01d532 group: letter
\<vv> code: 0x01d533 group: letter
\<ww> code: 0x01d534 group: letter
\<xx> code: 0x01d535 group: letter
\<yy> code: 0x01d536 group: letter
\<zz> code: 0x01d537 group: letter
\<alpha> code: 0x0003b1 group: greek
\<beta> code: 0x0003b2 group: greek
\<gamma> code: 0x0003b3 group: greek
\<delta> code: 0x0003b4 group: greek
\<epsilon> code: 0x0003b5 group: greek
\<zeta> code: 0x0003b6 group: greek
\<eta> code: 0x0003b7 group: greek
\<theta> code: 0x0003b8 group: greek
\<iota> code: 0x0003b9 group: greek
\<kappa> code: 0x0003ba group: greek
\<lambda> code: 0x0003bb group: greek abbrev: %
\<mu> code: 0x0003bc group: greek
\<nu> code: 0x0003bd group: greek
\<xi> code: 0x0003be group: greek
\<pi> code: 0x0003c0 group: greek
\<rho> code: 0x0003c1 group: greek
\<sigma> code: 0x0003c3 group: greek
\<tau> code: 0x0003c4 group: greek
\<upsilon> code: 0x0003c5 group: greek
\<phi> code: 0x0003c6 group: greek
\<chi> code: 0x0003c7 group: greek
\<psi> code: 0x0003c8 group: greek
\<omega> code: 0x0003c9 group: greek
\<Gamma> code: 0x000393 group: greek
\<Delta> code: 0x000394 group: greek
\<Theta> code: 0x000398 group: greek
\<Lambda> code: 0x00039b group: greek
\<Xi> code: 0x00039e group: greek
\<Pi> code: 0x0003a0 group: greek
\<Sigma> code: 0x0003a3 group: greek
\<Upsilon> code: 0x0003a5 group: greek
\<Phi> code: 0x0003a6 group: greek
\<Psi> code: 0x0003a8 group: greek
\<Omega> code: 0x0003a9 group: greek
\<bool> code: 0x01d539 group: letter
\<complex> code: 0x002102 group: letter
\<nat> code: 0x002115 group: letter
\<rat> code: 0x00211a group: letter
\<real> code: 0x00211d group: letter
\<int> code: 0x002124 group: letter
\<leftarrow> code: 0x002190 group: arrow abbrev: <.
\<longleftarrow> code: 0x0027f5 group: arrow abbrev: <.
\<longlongleftarrow> code: 0x00290e group: arrow abbrev: <.
\<longlonglongleftarrow> code: 0x0021e0 group: arrow abbrev: <.
\<rightarrow> code: 0x002192 group: arrow abbrev: .> abbrev: ->
\<longrightarrow> code: 0x0027f6 group: arrow abbrev: .> abbrev: -->
\<longlongrightarrow> code: 0x00290f group: arrow abbrev: .> abbrev: --->
\<longlonglongrightarrow> code: 0x0021e2 group: arrow abbrev: .> abbrev: --->
\<Leftarrow> code: 0x0021d0 group: arrow abbrev: <.
\<Longleftarrow> code: 0x0027f8 group: arrow abbrev: <.
\<Lleftarrow> code: 0x0021da group: arrow abbrev: <.
\<Rightarrow> code: 0x0021d2 group: arrow abbrev: .> abbrev: =>
\<Longrightarrow> code: 0x0027f9 group: arrow abbrev: .> abbrev: ==>
\<Rrightarrow> code: 0x0021db group: arrow abbrev: .>
\<leftrightarrow> code: 0x002194 group: arrow abbrev: <> abbrev: <->
\<longleftrightarrow> code: 0x0027f7 group: arrow abbrev: <> abbrev: <-> abbrev: <-->
\<Leftrightarrow> code: 0x0021d4 group: arrow abbrev: <>
\<Longleftrightarrow> code: 0x0027fa group: arrow abbrev: <>
\<mapsto> code: 0x0021a6 group: arrow abbrev: .> abbrev: |->
\<longmapsto> code: 0x0027fc group: arrow abbrev: .> abbrev: |-->
\<midarrow> code: 0x002500 group: arrow abbrev: <>
\<Midarrow> code: 0x002550 group: arrow abbrev: <>
\<hookleftarrow> code: 0x0021a9 group: arrow abbrev: <.
\<hookrightarrow> code: 0x0021aa group: arrow abbrev: .>
\<leftharpoondown> code: 0x0021bd group: arrow abbrev: <.
\<rightharpoondown> code: 0x0021c1 group: arrow abbrev: .>
\<leftharpoonup> code: 0x0021bc group: arrow abbrev: <.
\<rightharpoonup> code: 0x0021c0 group: arrow abbrev: .>
\<rightleftharpoons> code: 0x0021cc group: arrow abbrev: <> abbrev: ==
\<leadsto> code: 0x00219d group: arrow abbrev: .> abbrev: ~>
\<downharpoonleft> code: 0x0021c3 group: arrow
\<downharpoonright> code: 0x0021c2 group: arrow
\<upharpoonleft> code: 0x0021bf group: arrow
#\<upharpoonright> code: 0x0021be group: arrow
\<restriction> code: 0x0021be group: punctuation
\<Colon> code: 0x002237 group: punctuation
\<up> code: 0x002191 group: arrow
\<Up> code: 0x0021d1 group: arrow
\<down> code: 0x002193 group: arrow
\<Down> code: 0x0021d3 group: arrow
\<updown> code: 0x002195 group: arrow
\<Updown> code: 0x0021d5 group: arrow
\<langle> code: 0x0027e8 group: punctuation abbrev: <<
\<rangle> code: 0x0027e9 group: punctuation abbrev: >>
\<lceil> code: 0x002308 group: punctuation abbrev: [.
\<rceil> code: 0x002309 group: punctuation abbrev: .]
\<lfloor> code: 0x00230a group: punctuation abbrev: [.
\<rfloor> code: 0x00230b group: punctuation abbrev: .]
\<lparr> code: 0x002987 group: punctuation abbrev: (|
\<rparr> code: 0x002988 group: punctuation abbrev: |)
\<lbrakk> code: 0x0027e6 group: punctuation abbrev: [|
\<rbrakk> code: 0x0027e7 group: punctuation abbrev: |]
\<lbrace> code: 0x002983 group: punctuation abbrev: {|
\<rbrace> code: 0x002984 group: punctuation abbrev: |}
\<guillemotleft> code: 0x0000ab group: punctuation abbrev: <<
\<guillemotright> code: 0x0000bb group: punctuation abbrev: >>
\<bottom> code: 0x0022a5 group: logic
\<top> code: 0x0022a4 group: logic
\<and> code: 0x002227 group: logic abbrev: /\ abbrev: &
\<And> code: 0x0022c0 group: logic abbrev: !!
\<or> code: 0x002228 group: logic abbrev: \/ abbrev: |
\<Or> code: 0x0022c1 group: logic abbrev: ??
\<forall> code: 0x002200 group: logic abbrev: ! abbrev: ALL
\<exists> code: 0x002203 group: logic abbrev: ? abbrev: EX
\<nexists> code: 0x002204 group: logic abbrev: ~?
\<not> code: 0x0000ac group: logic abbrev: ~
\<box> code: 0x0025a1 group: logic
\<diamond> code: 0x0025c7 group: logic
\<diamondop> code: 0x0022c4 group: operator
\<turnstile> code: 0x0022a2 group: relation abbrev: |-
\<Turnstile> code: 0x0022a8 group: relation abbrev: |=
\<tturnstile> code: 0x0022a9 group: relation abbrev: |-
\<TTurnstile> code: 0x0022ab group: relation abbrev: |=
\<stileturn> code: 0x0022a3 group: relation abbrev: -|
\<surd> code: 0x00221a group: relation
\<le> code: 0x002264 group: relation abbrev: <=
\<ge> code: 0x002265 group: relation abbrev: >=
\<lless> code: 0x00226a group: relation abbrev: <<
\<ggreater> code: 0x00226b group: relation abbrev: >>
\<lesssim> code: 0x002272 group: relation
\<greatersim> code: 0x002273 group: relation
\<lessapprox> code: 0x002a85 group: relation
\<greaterapprox> code: 0x002a86 group: relation
\<in> code: 0x002208 group: relation abbrev: :
\<notin> code: 0x002209 group: relation abbrev: ~:
\<subset> code: 0x002282 group: relation
\<supset> code: 0x002283 group: relation
\<subseteq> code: 0x002286 group: relation abbrev: (=
\<supseteq> code: 0x002287 group: relation abbrev: )=
\<sqsubset> code: 0x00228f group: relation
\<sqsupset> code: 0x002290 group: relation
\<sqsubseteq> code: 0x002291 group: relation abbrev: [=
\<sqsupseteq> code: 0x002292 group: relation abbrev: ]=
\<inter> code: 0x002229 group: operator abbrev: Int
\<Inter> code: 0x0022c2 group: operator abbrev: Inter abbrev: INT
\<union> code: 0x00222a group: operator abbrev: Un
\<Union> code: 0x0022c3 group: operator abbrev: Union abbrev: UN
\<squnion> code: 0x002294 group: operator
\<Squnion> code: 0x002a06 group: operator abbrev: SUP
\<sqinter> code: 0x002293 group: operator
\<Sqinter> code: 0x002a05 group: operator abbrev: INF
\<setminus> code: 0x002216 group: operator
\<propto> code: 0x00221d group: operator
\<uplus> code: 0x00228e group: operator
\<Uplus> code: 0x002a04 group: operator
\<noteq> code: 0x002260 group: relation abbrev: ~=
\<sim> code: 0x00223c group: relation
\<doteq> code: 0x002250 group: relation abbrev: .=
\<simeq> code: 0x002243 group: relation
\<approx> code: 0x002248 group: relation
\<asymp> code: 0x00224d group: relation
\<cong> code: 0x002245 group: relation
\<smile> code: 0x002323 group: relation
\<equiv> code: 0x002261 group: relation abbrev: ==
\<frown> code: 0x002322 group: relation
\<Join> code: 0x0022c8
\<bowtie> code: 0x002a1d
\<prec> code: 0x00227a group: relation
\<succ> code: 0x00227b group: relation
\<preceq> code: 0x00227c group: relation
\<succeq> code: 0x00227d group: relation
\<parallel> code: 0x002225 group: punctuation abbrev: ||
\<bar> code: 0x0000a6 group: punctuation abbrev: ||
\<plusminus> code: 0x0000b1 group: operator
\<minusplus> code: 0x002213 group: operator
\<times> code: 0x0000d7 group: operator abbrev: <*>
\<div> code: 0x0000f7 group: operator
\<cdot> code: 0x0022c5 group: operator
\<star> code: 0x0022c6 group: operator
\<bullet> code: 0x002219 group: operator
\<circ> code: 0x002218 group: operator
\<dagger> code: 0x002020
\<ddagger> code: 0x002021
\<lhd> code: 0x0022b2 group: relation
\<rhd> code: 0x0022b3 group: relation
\<unlhd> code: 0x0022b4 group: relation
\<unrhd> code: 0x0022b5 group: relation
\<triangleleft> code: 0x0025c3 group: relation
\<triangleright> code: 0x0025b9 group: relation
\<triangle> code: 0x0025b3 group: relation
\<triangleq> code: 0x00225c group: relation
\<oplus> code: 0x002295 group: operator
\<Oplus> code: 0x002a01 group: operator
\<otimes> code: 0x002297 group: operator
\<Otimes> code: 0x002a02 group: operator
\<odot> code: 0x002299 group: operator
\<Odot> code: 0x002a00 group: operator
\<ominus> code: 0x002296 group: operator
\<oslash> code: 0x002298 group: operator
\<dots> code: 0x002026 group: punctuation abbrev: ...
\<cdots> code: 0x0022ef group: punctuation
\<Sum> code: 0x002211 group: operator abbrev: SUM
\<Prod> code: 0x00220f group: operator abbrev: PROD
\<Coprod> code: 0x002210 group: operator
\<infinity> code: 0x00221e
\<integral> code: 0x00222b group: operator
\<ointegral> code: 0x00222e group: operator
\<clubsuit> code: 0x002663
\<diamondsuit> code: 0x002662
\<heartsuit> code: 0x002661
\<spadesuit> code: 0x002660
\<aleph> code: 0x002135
\<emptyset> code: 0x002205
\<nabla> code: 0x002207
\<partial> code: 0x002202
\<flat> code: 0x00266d
\<natural> code: 0x00266e
\<sharp> code: 0x00266f
\<angle> code: 0x002220
\<copyright> code: 0x0000a9
\<registered> code: 0x0000ae
\<hyphen> code: 0x0000ad group: punctuation
\<inverse> code: 0x0000af group: punctuation
\<onequarter> code: 0x0000bc group: digit
\<onehalf> code: 0x0000bd group: digit
\<threequarters> code: 0x0000be group: digit
\<ordfeminine> code: 0x0000aa
\<ordmasculine> code: 0x0000ba
\<section> code: 0x0000a7
\<paragraph> code: 0x0000b6
\<exclamdown> code: 0x0000a1
\<questiondown> code: 0x0000bf
\<euro> code: 0x0020ac
\<pounds> code: 0x0000a3
\<yen> code: 0x0000a5
\<cent> code: 0x0000a2
\<currency> code: 0x0000a4
\<degree> code: 0x0000b0
\<amalg> code: 0x002a3f group: operator
\<mho> code: 0x002127 group: operator
\<lozenge> code: 0x0025ca
\<wp> code: 0x002118
\<wrong> code: 0x002240 group: relation
\<acute> code: 0x0000b4
\<index> code: 0x000131
\<dieresis> code: 0x0000a8
\<cedilla> code: 0x0000b8
\<hungarumlaut> code: 0x0002dd
\<bind> code: 0x00291c abbrev: >>=
\<then> code: 0x002aa2 abbrev: >>
\<some> code: 0x0003f5
\<hole> code: 0x002311
\<newline> code: 0x0023ce
\<comment> code: 0x002015 group: document font: IsabelleText
\<open> code: 0x002039 group: punctuation font: IsabelleText abbrev: <<
\<close> code: 0x00203a group: punctuation font: IsabelleText abbrev: >>
\<here> code: 0x002302 font: IsabelleText
\<^undefined> code: 0x002756 font: IsabelleText
\<^noindent> code: 0x0021e4 group: document font: IsabelleText
\<^smallskip> code: 0x002508 group: document font: IsabelleText
\<^medskip> code: 0x002509 group: document font: IsabelleText
\<^bigskip> code: 0x002501 group: document font: IsabelleText
\<^item> code: 0x0025aa group: document font: IsabelleText
\<^enum> code: 0x0025b8 group: document font: IsabelleText
\<^descr> code: 0x0027a7 group: document font: IsabelleText
\<^footnote> code: 0x00204b group: document font: IsabelleText
\<^verbatim> code: 0x0025a9 group: document font: IsabelleText
\<^theory_text> code: 0x002b1a group: document font: IsabelleText
\<^emph> code: 0x002217 group: document font: IsabelleText
\<^bold> code: 0x002759 group: control group: document font: IsabelleText
\<^sub> code: 0x0021e9 group: control font: IsabelleText
\<^sup> code: 0x0021e7 group: control font: IsabelleText
\<^bsub> code: 0x0021d8 group: control_block font: IsabelleText abbrev: =_(
\<^esub> code: 0x0021d9 group: control_block font: IsabelleText abbrev: =_)
\<^bsup> code: 0x0021d7 group: control_block font: IsabelleText abbrev: =^(
\<^esup> code: 0x0021d6 group: control_block font: IsabelleText abbrev: =^)
"""
|
KITPraktomatTeam/Praktomat
|
src/utilities/isar_lexer.py
|
Python
|
gpl-2.0
| 24,376
|
[
"Bowtie"
] |
61c5a9afbe1245838a7e7f294fb623705327ad256a0d1dac8550c029e047e0c9
|
import os
from os.path import join as pjoin
import numpy as np
import h5py
from gps_viewer import read_gps_fields
from WGS84toENU import WGS84toECEF, WGS84toENU
from pipeline_config import EXPORT_STEP, EXPORT_START, MAPPING_PATH, ICP_ITERS, ICP_MAX_DIST, NUM_CPUS
from graphslam_config import MATCH_JSON_DATA, CHUNK_SIZE, GRAPHSLAM_CHUNK_DIR, GRAPHSLAM_ALIGN_DIR, REALIGN_EVERY
from pipeline_utils import print_and_call, dset_dir_from_rss
from joblib import Parallel, delayed
'''
For every alignment, we need to create two small chunks of the
full maps that we want to align. These chunks are selected by
looking at the best NN matches.
We compute a new alignment every REALIGN_EVERY steps
Since the chunks are stored relative to IMU 0, we need to first
transform them by the global position of IMU 0
'''
def get_ecef0(gps_file):
llh = read_gps_fields(gps_file, ['lat', 'long', 'height'])
llh = np.array(llh, dtype=np.float64).T
ecef = WGS84toECEF(llh)
return ecef[:, 0]
def get_enu0(gps_file, gps_ref_file):
llh = read_gps_fields(gps_file, ['lat', 'long', 'height'])
llh = np.array(llh, dtype=np.float64).T
llh_ref = read_gps_fields(gps_ref_file, ['lat', 'long', 'height'])
llh_ref = np.array(llh_ref, dtype=np.float64).T
return WGS84toENU(llh_ref[0, :], llh)[:, 0]
def vtk_filename(pcd_file):
return os.path.splitext(pcd_file)[0] + '.vtk'
# Helper function for chunk_and_align all
def chunk_and_align(start1, start2, enu1, enu2, rss1, rss2, pcd_dir1, pcd_dir2, chunk_num, debug=False):
chunk1_files = list()
chunk2_files = list()
for k in range(0, CHUNK_SIZE):
ind1 = start1 + k
chunk1_files.append('%s/%d.pcd' % (pcd_dir1, ind1))
assert os.path.exists(chunk1_files[-1])
ind2 = start2 + k
chunk2_files.append('%s/%d.pcd' % (pcd_dir2, ind2))
assert os.path.exists(chunk2_files[-1])
merged_chunks1 = '%s/%s' % (GRAPHSLAM_CHUNK_DIR, '--'.join(rss1) + '+' + '--'.join(rss2) + '%d_1.pcd' % chunk_num)
merged_chunks2 = '%s/%s' % (GRAPHSLAM_CHUNK_DIR, '--'.join(rss1) + '+' + '--'.join(rss2) + '%d_2.pcd' % chunk_num)
# Concatenate
cmd = 'concatenate_points_pcd %s %s' % (' '.join(chunk1_files), merged_chunks1)
print_and_call(cmd)
cmd = 'concatenate_points_pcd %s %s' % (' '.join(chunk2_files), merged_chunks2)
print_and_call(cmd)
# Translate
cmd = 'transform_point_cloud %s %s -trans %f,%f,%f' % (merged_chunks1, merged_chunks1, enu1[0]-enu2[0], enu1[1]-enu2[1], enu1[2]-enu2[2])
print_and_call(cmd)
#cmd = 'transform_point_cloud %s %s -trans %f,%f,%f' % (merged_chunks2, merged_chunks2, -1*enu2[0], -1*enu2[1], -1*enu2[2])
#print_and_call(cmd)
# Generate VTK files so we can easily visualize to debug
if debug:
cmd = 'pcl_pcd2vtk %s %s' % (merged_chunks1, vtk_filename(merged_chunks1))
print_and_call(cmd)
cmd = 'pcl_pcd2vtk %s %s' % (merged_chunks2, vtk_filename(merged_chunks2))
print_and_call(cmd)
# Finally perform alignment
reg = '%s/bin/align_clouds' % MAPPING_PATH
h5f = '%s/%s' % (GRAPHSLAM_ALIGN_DIR, '--'.join(rss1) + '+' + '--'.join(rss2) + '--%d' % chunk_num + '.h5')
cmd = '{reg} --pcd_tgt {tgt} --pcd_src {src} --h5_file {h5f} --icp_iters {iters} --max_dist {dist}'.format(
reg=reg, tgt=merged_chunks1, src=merged_chunks2, h5f=h5f, iters=ICP_ITERS, dist=ICP_MAX_DIST)
print_and_call(cmd)
def get_closest_key_value(k, d, max_shift=5):
shift = -1
while k not in d:
k = k + shift
shift = -1 * (abs(shift) + 1) * cmp(shift, 0)
assert abs(shift) < max_shift, 'Index %d shift %d' % (k, shift)
return d[k]
def chunk_and_align_all(d):
print d['match_file']
rss1 = d['rss1']
rss2 = d['rss2']
pcd_dir1 = pjoin(dset_dir_from_rss(rss1), 'pcd_downsampled_normals')
pcd_dir2 = pjoin(dset_dir_from_rss(rss2), 'pcd_downsampled_normals')
# Read and save initial transform files
enu1 = get_enu0(d['gps_file1'], d['gps_file1'])
enu2 = get_enu0(d['gps_file2'], d['gps_file1'])
h5f = h5py.File(d['match_file'], 'r')
nn_matches = h5f['matches'][...]
nn_dict = dict(zip(nn_matches[:, 1], nn_matches[:, 0]))
h5f.close()
assert EXPORT_START == 0
start1 = nn_matches[0, 1] / EXPORT_STEP
args_all = list()
chunk_num = 0
for k in range(start1, nn_matches[-1, 1] / EXPORT_STEP - CHUNK_SIZE, REALIGN_EVERY):
#def chunk_and_align(start1, start2, enu1, enu2, rss1, rss2, pcd_dir1, pcd_dir2, chunk_num):
try:
k2 = get_closest_key_value(k * EXPORT_STEP, nn_dict, max_shift=10)
except:
# TODO Think this sometimes occurs near end of alignments
break
args_all.append((k, k2 / EXPORT_STEP, enu1, enu2, rss1, rss2, pcd_dir1, pcd_dir2, chunk_num))
chunk_num += 1
Parallel(n_jobs=NUM_CPUS)(delayed(chunk_and_align)(*args) for args in args_all)
# For debugging
'''
for args in args_all:
_, _, _, _, rss1, rss2, _, _, chunk_num = args
h5f = '%s/%s' % (GRAPHSLAM_ALIGN_DIR, '--'.join(rss1) + '+' + '--'.join(rss2) + '--%d' % chunk_num + '.h5')
if os.path.exists(h5f):
continue
chunk_and_align(*args)
'''
if __name__ == '__main__':
for d in MATCH_JSON_DATA:
chunk_and_align_all(d)
|
sameeptandon/sail-car-log
|
mapping/sandbox/graphslam/chunk_and_align.py
|
Python
|
bsd-2-clause
| 5,357
|
[
"VTK"
] |
414b8c0d82019578c57f08a0083dc05df46e79e997a55ce50c75a211508ceb39
|
#!/usr/bin/env python3
# -*- coding=utf-8 -*-
"""
cry2cif\n\n
Read the last geometry corresponding to the CRYSTALLOGRAPHIC CELL on a
CRYSTAL09 output file and print it in a cif format. If geometry
optimization did not converge, input geometry is printed instead.
"""
# TODO:
# * returns coordinates instead of write a file
# * make functions for various formats
__author__ = "Germain Vallverdu"
__email__ = "germain.vallverdu@univ-pau.fr"
__licence__ = "GPL"
import os
import argparse
from pymatgen import Structure, Lattice
from crystalio import CrystalOutfile
def get_options():
""" get options from command lines """
parser = argparse.ArgumentParser(prog="cry2cif", description=__doc__)
# mandatory argument is CRYSTAL output filename
parser.add_argument("filename",
help="CRYSTAL output file",
metavar="FILENAME",
type=str)
# choose either cif or POSCAR format
parser.add_argument("-t", "--to",
help="output format: either cif or VASP (POSCAR)",
metavar="format",
default="cif",
choices=("cif", "vasp"),
type=str)
# center slab or nanotubes
parser.add_argument("-i", "--center",
help="move the slab or nanotubes in the center of the box",
action="store_true",
dest="center",
default=False)
parser.add_argument("-n", "--num_structure",
help="Structure number to be extracted (default, the last)",
metavar="N",
default=-1,
type=int)
# sort atom along z or x for slab or nanotubes
parser.add_argument("-z", "--sortz",
help="Sort atoms along z axis (for slabs)",
dest="sortz",
action="store_true",
default=False)
parser.add_argument("-x", "--sortx",
help="Sort atoms along x axis (for nanotubes)",
dest="sortx",
action="store_true",
default=False)
# in the case of slabs or nanotubes, you have to give a value for b or c
parser.add_argument("-b",
help="lattice parameter b",
metavar="b",
default=50,
type=float)
parser.add_argument("-c",
help="lattice parameter c",
metavar="c",
default=50,
type=float)
return parser.parse_args()
def cry2cif(filename, to="cif", center=False, sortx=False, sortz=False,
b_dum=50, c_dum=50, istruct=-1):
"""
Read a CRYSTAL output file and return the structure in a cif or POSCAR format.
Args:
filename (str): crystal output filename
to (str): 'cif' or 'vasp', format of the output file (default is cif)
center (bool): if True, the slab or nanotube is translated to the center of
the box (default is False)
sortx (bool): Nanotube : if True, atoms are sorted along x axes (default is False).
sortz (bool): slab : if True, atoms are sorted along z axes (default is False).
b_dum (float): dummy lattice paramters b in angstrom for nanotubes (default 50 A)
c_dum (float): dummy lattice paramters c in angstrom for nanotubes and slabs (default 50 A)
istruct (int): structure to be extracted
"""
cryout = CrystalOutfile(filename)
print("title : ", cryout.title)
if cryout.group:
print("group : ", cryout.group)
# print("Number of structure read: ", len(cryout.structures))
if istruct == -1:
print("structure : Final structure")
structure = cryout.final_structure
else:
print("structure : Structure %d" % istruct)
structure = cryout.get_structure(istruct)
print("# atom : ", structure.num_sites)
print("composition: ", structure.composition)
print("Cell parameters:")
print("a : %10.4f" % structure.lattice.a)
print("b : %10.4f" % structure.lattice.b)
print("c : %10.4f" % structure.lattice.c)
print("alpha : %10.4f" % structure.lattice.alpha)
print("beta : %10.4f" % structure.lattice.beta)
print("gamma : %10.4f" % structure.lattice.gamma)
# ----------------------------------------------------------
# New b and c axes
# ----------------------------------------------------------
if cryout.slab:
frac_coords = structure.frac_coords
frac_coords[:, 2] *= structure.lattice.c / c_dum
matrix = structure.lattice.matrix.copy()
matrix[2, 2] = c_dum
structure = Structure(Lattice(matrix), structure.species, frac_coords)
if cryout.nanotube:
frac_coords = structure.frac_coords
frac_coords[:, 1] *= structure.lattice.c / c_dum
frac_coords[:, 2] *= structure.lattice.b / b_dum
matrix = structure.lattice.matrix.copy()
matrix[1, 1] = b_dum
matrix[2, 2] = c_dum
structure = Structure(Lattice(matrix), structure.species, frac_coords)
# ----------------------------------------------------------
# move slab or nanotube to the center of the box
# ----------------------------------------------------------
if center:
if cryout.slab:
coords = structure.frac_coords.copy()
coords[:, 2] += .5
structure = Structure(structure.lattice, structure.species, coords)
elif cryout.nanotube:
coords = structure.frac_coords
coords += .5
structure = Structure(structure.lattice, structure.species, coords)
# ----------------------------------------------------------
# sort atom along x or z axis for slab
# ----------------------------------------------------------
if sortz:
isort = 2
elif sortx:
isort = 0
axes = {2: "z", 0: "x"}
if sortz or sortx:
print("\nSort atoms along %s" % axes[isort])
data = zip(structure.species, structure.frac_coords)
data = sorted(data, key=lambda d: d[-1][isort], reverse=True)
species = [d[0] for d in data]
coords = [d[1] for d in data]
structure = Structure(structure.lattice, species, coords)
# ----------------------------------------------------------
# export in the given format
# ----------------------------------------------------------
basename, _ = os.path.splitext(filename)
if to.lower() == "cif":
ext = ".cif"
elif to.lower() == "vasp":
to = "POSCAR"
ext = ".vasp"
else:
to = "POSCAR"
ext = ".vasp"
structure.to(to, filename=basename + ext)
if __name__ == "__main__":
# get arguments
args = vars(get_options())
# rename some args
args["b_dum"] = args.pop("b")
args["c_dum"] = args.pop("c")
args["istruct"] = args.pop("num_structure")
# call main program
cry2cif(**args)
|
gVallverdu/myScripts
|
CRYSTAL/cry2cif.py
|
Python
|
gpl-2.0
| 7,246
|
[
"CRYSTAL",
"VASP",
"pymatgen"
] |
9f17d1ca622a9d8ac35c6fbed0c90143cc262316cfc129f48e071549c112913e
|
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
from rdkit import Chem
import itertools, operator
from deepchem.feat import Featurizer
from deepchem.feat.mol_graphs import ConvMol
def one_of_k_encoding(x, allowable_set):
if x not in allowable_set:
raise Exception(
"input {0} not in allowable set{1}:".format(x, allowable_set))
return list(map(lambda s: x == s, allowable_set))
def one_of_k_encoding_unk(x, allowable_set):
"""Maps inputs not in the allowable set to the last element."""
if x not in allowable_set:
x = allowable_set[-1]
return list(map(lambda s: x == s, allowable_set))
def get_intervals(l):
"""For list of lists, gets the cumulative products of the lengths"""
intervals = len(l) * [0]
# Initalize with 1
intervals[0] = 1
for k in range(1, len(l)):
intervals[k] = (len(l[k]) + 1) * intervals[k-1]
return intervals
def safe_index(l, e):
"""Gets the index of e in l, providing an index of len(l) if not found"""
try:
return l.index(e)
except:
return len(l)
possible_atom_list = ['C', 'N', 'O', 'S', 'F', 'P', 'Cl', 'Mg', 'Na', 'Br',
'Fe', 'Ca', 'Cu', 'Mc', 'Pd', 'Pb',
'K','I','Al','Ni','Mn']
possible_numH_list = [0, 1, 2, 3, 4]
possible_valence_list = [0, 1, 2, 3, 4, 5, 6]
possible_formal_charge_list = [-3, -2, -1, 0, 1, 2, 3]
possible_hybridization_list = [Chem.rdchem.HybridizationType.SP,
Chem.rdchem.HybridizationType.SP2,
Chem.rdchem.HybridizationType.SP3,
Chem.rdchem.HybridizationType.SP3D,
Chem.rdchem.HybridizationType.SP3D2]
possible_number_radical_e_list = [0, 1, 2]
reference_lists = [possible_atom_list, possible_numH_list,
possible_valence_list, possible_formal_charge_list,
possible_number_radical_e_list, possible_hybridization_list]
intervals = get_intervals(reference_lists)
def get_feature_list(atom):
features = 6 * [0]
features[0] = safe_index(possible_atom_list, atom.GetSymbol())
features[1] = safe_index(possible_numH_list, atom.GetTotalNumHs())
features[2] = safe_index(possible_valence_list, atom.GetImplicitValence())
features[3] = safe_index(possible_formal_charge_list, atom.GetFormalCharge())
features[4] = safe_index(possible_number_radical_e_list,
atom.GetNumRadicalElectrons())
features[5] = safe_index(possible_hybridization_list, atom.GetHybridization())
return features
def features_to_id(features, intervals):
"""Convert list of features into index using spacings provided in intervals"""
id = 0
for k in range(len(intervals)):
id += features[k] * intervals[k]
# Allow 0 index to correspond to null molecule 1
id = id + 1
return id
def id_to_features(id, intervals):
features = 6* [0]
# Correct for null
id -= 1
for k in range(0,6-1):
#print(6-k-1, id)
features[6-k-1] = id // intervals[6-k-1]
id -= features[6-k-1]*intervals[6-k-1]
# Correct for last one
features[0] = id
return features
def atom_to_id(atom):
"""Return a unique id corresponding to the atom type"""
features = get_feature_list(atom)
return features_to_id(features, intervals)
def atom_features(atom, bool_id_feat=False):
if bool_id_feat:
return np.array([atom_to_id(atom)])
else:
return np.array(one_of_k_encoding_unk(
atom.GetSymbol(),
['C', 'N', 'O', 'S', 'F', 'Si', 'P', 'Cl', 'Br', 'Mg', 'Na',
'Ca', 'Fe', 'As', 'Al', 'I', 'B', 'V', 'K', 'Tl', 'Yb',
'Sb', 'Sn', 'Ag', 'Pd', 'Co', 'Se', 'Ti', 'Zn', 'H', # H?
'Li', 'Ge', 'Cu', 'Au', 'Ni', 'Cd', 'In', 'Mn', 'Zr',
'Cr', 'Pt', 'Hg', 'Pb', 'Unknown']) +
one_of_k_encoding(atom.GetDegree(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) +
one_of_k_encoding_unk(atom.GetTotalNumHs(), [0, 1, 2, 3, 4]) +
one_of_k_encoding_unk(atom.GetImplicitValence(), [0, 1, 2, 3, 4, 5, 6]) +
[atom.GetFormalCharge(), atom.GetNumRadicalElectrons()] +
one_of_k_encoding_unk(
atom.GetHybridization(),
[Chem.rdchem.HybridizationType.SP,
Chem.rdchem.HybridizationType.SP2,
Chem.rdchem.HybridizationType.SP3,
Chem.rdchem.HybridizationType.SP3D,
Chem.rdchem.HybridizationType.SP3D2]) +
[atom.GetIsAromatic()])
def bond_features(bond):
bt = bond.GetBondType()
return np.array([bt == Chem.rdchem.BondType.SINGLE,
bt == Chem.rdchem.BondType.DOUBLE,
bt == Chem.rdchem.BondType.TRIPLE,
bt == Chem.rdchem.BondType.AROMATIC,
bond.GetIsConjugated(),
bond.IsInRing()])
class ConvMolFeaturizer(Featurizer):
name = ['conv_mol']
def __init__(self):
# Since ConvMol is an object and not a numpy array, need to set dtype to
# object.
self.dtype = object
def _featurize(self, mol):
"""Encodes mol as a ConvMol object."""
# Get the node features
idx_nodes = [(a.GetIdx(), atom_features(a)) for a in mol.GetAtoms()]
idx_nodes.sort() # Sort by ind to ensure same order as rd_kit
idx, nodes = list(zip(*idx_nodes))
# Stack nodes into an array
nodes = np.vstack(nodes)
# Get bond lists with reverse edges included
edge_list = [(b.GetBeginAtomIdx(), b.GetEndAtomIdx()) for b in mol.GetBonds()]
# Get canonical adjacency list
canon_adj_list = [[] for mol_id in range(len(nodes))]
for edge in edge_list:
canon_adj_list[edge[0]].append(edge[1])
canon_adj_list[edge[1]].append(edge[0])
return ConvMol(nodes, canon_adj_list)
|
bowenliu16/deepchem
|
deepchem/feat/graph_features.py
|
Python
|
gpl-3.0
| 5,773
|
[
"RDKit"
] |
3db79ed9e095690295e0992c0c4bdeb6ffc10f0a76c2912a83a6ed48e5f5623a
|
from django.conf import settings
from django.db import models
from django.utils import timezone
from patients.models import Patient
class BaseActe(models.Model):
"""
Base Abstract class for for differnets actions
made by usej
"""
patient = models.ForeignKey(
Patient, related_name="%(class)ss", on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(default=timezone.now)
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name="%(class)ss",
on_delete=models.PROTECT)
class Meta:
abstract = True
def save(self, *args, **kwargs):
self.modified = timezone.now()
super().save()
class Observation(BaseActe):
"""
A small text of user about a patient
motif : purpose of the visit. can't be blank.this is the most minimam
thing a user schould enter.
"""
motif = models.CharField(max_length=40, blank=False)
body = models.TextField(blank=True)
def __str__(self):
return self.motif
"""
BAseActe:
non modifiable if not today
Observation :
TA/pouls
conclusion
ordonnance
vaccin
certif
titre
texte
courries
dest
corps
courriers reçus
spé
nom
contenu
pdf
examens:
type
effecteur
pdf
REGROUPER courrier et examens ?
bio
antécédants
intolérances
allergies
"""
|
jgirardet/unolog
|
unolog/actes/models.py
|
Python
|
gpl-3.0
| 1,489
|
[
"VisIt"
] |
0b89dc5a4ddddea18047f66e5b2b2f662314ff240eac48725e7754089f9ed475
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from collections import defaultdict
from spack import *
from spack.util.environment import is_system_path
class Cdo(AutotoolsPackage):
"""CDO is a collection of command line Operators to manipulate and analyse
Climate and NWP model Data.
"""
homepage = 'https://code.mpimet.mpg.de/projects/cdo'
url = 'https://code.mpimet.mpg.de/attachments/download/12760/cdo-1.7.2.tar.gz'
list_url = 'https://code.mpimet.mpg.de/projects/cdo/files'
maintainers = ['skosukhin', 'Try2Code']
version('2.0.2', sha256='34dfdd0d4126cfd35fc69e37e60901c8622d13ec5b3fa5f0fe6a1cc866cc5a70', url='https://code.mpimet.mpg.de/attachments/download/26654/cdo-2.0.2.tar.gz')
version('2.0.1', sha256='d0794d261e22efa0adac8e6d18de2b60d54de5e1a4df6127c65fc417feb8fdac', url='https://code.mpimet.mpg.de/attachments/download/26477/cdo-2.0.1.tar.gz')
version('2.0.0', sha256='6bca54e9d69d8c1f072f1996547b7347a65743d15ba751967e9bb16e0ff7a843', url='https://code.mpimet.mpg.de/attachments/download/26370/cdo-2.0.0.tar.gz')
version('1.9.10', sha256='cc39c89bbb481d7b3945a06c56a8492047235f46ac363c4f0d980fccdde6677e', url='https://code.mpimet.mpg.de/attachments/download/24638/cdo-1.9.10.tar.gz')
version('1.9.9', sha256='959b5b58f495d521a7fd1daa84644888ec87d6a0df43f22ad950d17aee5ba98d', url='https://code.mpimet.mpg.de/attachments/download/23323/cdo-1.9.9.tar.gz')
version('1.9.8', sha256='f2660ac6f8bf3fa071cf2a3a196b3ec75ad007deb3a782455e80f28680c5252a', url='https://code.mpimet.mpg.de/attachments/download/20826/cdo-1.9.8.tar.gz')
version('1.9.7.1', sha256='3771952e065bcf935d43e492707370ed2a0ecb59a06bea24f9ab69d77943962c', url='https://code.mpimet.mpg.de/attachments/download/20124/cdo-1.9.7.1.tar.gz')
version('1.9.6', sha256='b31474c94548d21393758caa33f35cf7f423d5dfc84562ad80a2bdcb725b5585', url='https://code.mpimet.mpg.de/attachments/download/19299/cdo-1.9.6.tar.gz')
version('1.9.5', sha256='48ed65cc5b436753c8e7f9eadd8aa97376698ce230ceafed2a4350a5b1a27148', url='https://code.mpimet.mpg.de/attachments/download/18264/cdo-1.9.5.tar.gz')
version('1.9.4', sha256='3d1c0fd3f7d38c5d3d88139ca1546c9d24e1b1ff752a794a4194dfe624695def', url='https://code.mpimet.mpg.de/attachments/download/17374/cdo-1.9.4.tar.gz')
version('1.9.3', sha256='e83a3de7b402600c0d9a5df18073d36d133ff9719d3c561a0efa90f9c1599f3f', url='https://code.mpimet.mpg.de/attachments/download/16435/cdo-1.9.3.tar.gz')
version('1.9.2', sha256='d1c5092167034a48e4b8ada24cf78a1d4b84e364ffbb08b9ca70d13f428f300c', url='https://code.mpimet.mpg.de/attachments/download/16035/cdo-1.9.2.tar.gz')
version('1.9.1', sha256='33cba3cfcc27e5896769143c5f8e2f300ca14c7a40d1f19ffd1ed24b49ea3d55', url='https://code.mpimet.mpg.de/attachments/download/15653/cdo-1.9.1.tar.gz')
version('1.9.0', sha256='df367f8c3abf4ab085bcfc61e0205b28a5ecc69b7b83ba398b4d3c874dd69008', url='https://code.mpimet.mpg.de/attachments/download/15187/cdo-1.9.0.tar.gz')
version('1.8.2', sha256='6ca6c1263af2237737728ac937a275f8aa27680507636a6b6320f347c69a369a', url='https://code.mpimet.mpg.de/attachments/download/14686/cdo-1.8.2.tar.gz')
version('1.7.2', sha256='4c43eba7a95f77457bfe0d30fb82382b3b5f2b0cf90aca6f0f0a008f6cc7e697', url='https://code.mpimet.mpg.de/attachments/download/12760/cdo-1.7.2.tar.gz')
variant('netcdf', default=True, description='Enable NetCDF support')
variant('grib2', default='eccodes', values=('eccodes', 'grib-api', 'none'),
description='Specify GRIB2 backend')
variant('external-grib1', default=False,
description='Ignore the built-in support and use the external '
'GRIB2 backend for GRIB1 files')
variant('szip', default=True,
description='Enable szip compression for GRIB1')
variant('hdf5', default=True, description='Enable HDF5 support')
variant('udunits2', default=True, description='Enable UDUNITS2 support')
variant('libxml2', default=True, description='Enable libxml2 support')
variant('proj', default=True,
description='Enable PROJ library for cartographic projections')
variant('curl', default=False, description='Enable curl support')
variant('fftw3', default=True, description='Enable support for fftw3')
variant('magics', default=False,
description='Enable Magics library support')
variant('openmp', default=True, description='Enable OpenMP support')
depends_on('pkgconfig', type='build')
depends_on('netcdf-c', when='+netcdf')
# The internal library of CDO implicitly links to hdf5.
# We also need the backend of netcdf to be thread safe.
depends_on('hdf5+threadsafe', when='+netcdf')
depends_on('grib-api', when='grib2=grib-api')
depends_on('eccodes', when='grib2=eccodes')
depends_on('szip', when='+szip')
depends_on('hdf5+threadsafe', when='+hdf5')
depends_on('udunits', when='+udunits2')
depends_on('libxml2', when='+libxml2')
depends_on('proj@:5', when='@:1.9.6+proj')
depends_on('proj@:7', when='@1.9.7+proj')
depends_on('proj@5:', when='@1.9.8:+proj')
depends_on('curl', when='+curl')
depends_on('fftw-api@3:', when='+fftw3')
depends_on('magics', when='+magics')
depends_on('uuid')
conflicts('+szip', when='+external-grib1 grib2=none',
msg='The configuration does not support GRIB1')
conflicts('%gcc@9:', when='@:1.9.6',
msg='GCC 9 changed OpenMP data sharing behavior')
def configure_args(self):
config_args = []
flags = defaultdict(list)
def yes_or_prefix(spec_name):
prefix = self.spec[spec_name].prefix
return 'yes' if is_system_path(prefix) else prefix
if '+netcdf' in self.spec:
config_args.append('--with-netcdf=' + yes_or_prefix('netcdf-c'))
# We need to make sure that the libtool script of libcdi - the
# internal library of CDO - finds the correct version of hdf5.
# Note that the argument of --with-hdf5 is not passed to the
# configure script of libcdi, therefore we have to provide
# additional flags regardless of whether hdf5 support is enabled.
hdf5_spec = self.spec['hdf5']
if not is_system_path(hdf5_spec.prefix):
flags['LDFLAGS'].append(self.spec['hdf5'].libs.search_flags)
else:
config_args.append('--without-netcdf')
if self.spec.variants['grib2'].value == 'eccodes':
if self.spec.satisfies('@1.9:'):
config_args.append('--with-eccodes=' + yes_or_prefix('eccodes'))
config_args.append('--without-grib_api')
else:
config_args.append('--with-grib_api=yes')
eccodes_spec = self.spec['eccodes']
eccodes_libs = eccodes_spec.libs
flags['LIBS'].append(eccodes_libs.link_flags)
if not is_system_path(eccodes_spec.prefix):
flags['LDFLAGS'].append(eccodes_libs.search_flags)
elif self.spec.variants['grib2'].value == 'grib-api':
config_args.append('--with-grib_api=' + yes_or_prefix('grib-api'))
if self.spec.satisfies('@1.9:'):
config_args.append('--without-eccodes')
else:
config_args.append('--without-grib_api')
if self.spec.satisfies('@1.9:'):
config_args.append('--without-eccodes')
if '+external-grib1' in self.spec:
config_args.append('--disable-cgribex')
else:
config_args.append('--enable-cgribex')
if '+szip' in self.spec:
config_args.append('--with-szlib=' + yes_or_prefix('szip'))
else:
config_args.append('--without-szlib')
config_args += self.with_or_without('hdf5',
activation_value=yes_or_prefix)
config_args += self.with_or_without(
'udunits2',
activation_value=lambda x: yes_or_prefix('udunits'))
if '+libxml2' in self.spec:
libxml2_spec = self.spec['libxml2']
if is_system_path(libxml2_spec.prefix):
config_args.append('--with-libxml2=yes')
# Spack does not inject the header search flag in this case,
# which is still required, unless libxml2 is installed to '/usr'
# (handled by the configure script of CDO).
if libxml2_spec.prefix != '/usr':
flags['CPPFLAGS'].append(libxml2_spec.headers.include_flags)
else:
config_args.append('--with-libxml2=' + libxml2_spec.prefix)
else:
config_args.append('--without-libxml2')
config_args += self.with_or_without('proj',
activation_value=yes_or_prefix)
config_args += self.with_or_without('curl',
activation_value=yes_or_prefix)
config_args += self.with_or_without('magics',
activation_value=yes_or_prefix)
config_args += self.with_or_without('fftw3')
config_args += self.enable_or_disable('openmp')
# Starting version 1.9.0 CDO is a C++ program but it uses the C
# interface of HDF5 without the parallel features. To avoid
# unnecessary dependencies on mpi's cxx library, we need to set the
# following flags. This works for OpenMPI, MPICH, MVAPICH, Intel MPI,
# IBM Spectrum MPI, bullx MPI, and Cray MPI.
if self.spec.satisfies('@1.9:+hdf5^hdf5+mpi'):
flags['CPPFLAGS'].append('-DOMPI_SKIP_MPICXX -DMPICH_SKIP_MPICXX')
config_args.extend(['{0}={1}'.format(var, ' '.join(val))
for var, val in flags.items()])
return config_args
|
LLNL/spack
|
var/spack/repos/builtin/packages/cdo/package.py
|
Python
|
lgpl-2.1
| 10,082
|
[
"NetCDF"
] |
5daf458f2e315d74110c9cee940cb6998d5a53797afdff00c9506eb6954783b6
|
########################################################################
# $HeadURL $
# File: FileCatalogHandler.py
########################################################################
"""
:mod: FileCatalogHandler
.. module: FileCatalogHandler
:synopsis: FileCatalogHandler is a simple Replica and Metadata Catalog service
"""
__RCSID__ = "$Id$"
## imports
import os
from types import IntType, LongType, DictType, StringTypes, BooleanType, ListType
## from DIRAC
from DIRAC.Core.DISET.RequestHandler import RequestHandler, getServiceOption
from DIRAC import gLogger, S_OK, S_ERROR, gMonitor
from DIRAC.DataManagementSystem.DB.FileCatalogDB import FileCatalogDB
from DIRAC.Core.Utilities.List import sortList
# This is a global instance of the FileCatalogDB class
gFileCatalogDB = None
def initializeFileCatalogHandler( serviceInfo ):
""" handler initialisation """
global gFileCatalogDB
dbLocation = getServiceOption( serviceInfo, 'Database', 'DataManagement/FileCatalogDB' )
gFileCatalogDB = FileCatalogDB( dbLocation )
databaseConfig = {}
# Obtain the plugins to be used for DB interaction
gLogger.info( "Initializing with FileCatalog with following managers:" )
defaultManagers = { 'UserGroupManager' : 'UserAndGroupManagerDB',
'SEManager' : 'SEManagerDB',
'SecurityManager' : 'NoSecurityManager',
'DirectoryManager' : 'DirectoryLevelTree',
'FileManager' : 'FileManager',
'DirectoryMetadata' : 'DirectoryMetadata',
'FileMetadata' : 'FileMetadata',
'DatasetManager' : 'DatasetManager' }
for configKey in sortList( defaultManagers.keys() ):
defaultValue = defaultManagers[configKey]
configValue = getServiceOption( serviceInfo, configKey, defaultValue )
gLogger.info( "%-20s : %-20s" % ( str( configKey ), str( configValue ) ) )
databaseConfig[configKey] = configValue
# Obtain some general configuration of the database
gLogger.info( "Initializing the FileCatalog with the following configuration:" )
defaultConfig = { 'UniqueGUID' : False,
'GlobalReadAccess' : True,
'LFNPFNConvention' : 'Strong',
'ResolvePFN' : True,
'DefaultUmask' : 0775,
'ValidFileStatus' : ['AprioriGood','Trash','Removing','Probing'],
'ValidReplicaStatus' : ['AprioriGood','Trash','Removing','Probing'],
'VisibleFileStatus' : ['AprioriGood'],
'VisibleReplicaStatus': ['AprioriGood']}
for configKey in sortList( defaultConfig.keys() ):
defaultValue = defaultConfig[configKey]
configValue = getServiceOption( serviceInfo, configKey, defaultValue )
gLogger.info( "%-20s : %-20s" % ( str( configKey ), str( configValue ) ) )
databaseConfig[configKey] = configValue
res = gFileCatalogDB.setConfig( databaseConfig )
gMonitor.registerActivity( "AddFile", "Amount of addFile calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "AddFileSuccessful", "Files successfully added",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "AddFileFailed", "Files failed to add",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveFile", "Amount of removeFile calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveFileSuccessful", "Files successfully removed",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveFileFailed", "Files failed to remove",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "AddReplica", "Amount of addReplica calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "AddReplicaSuccessful", "Replicas successfully added",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "AddReplicaFailed", "Replicas failed to add",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveReplica", "Amount of removeReplica calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveReplicaSuccessful", "Replicas successfully removed",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveReplicaFailed", "Replicas failed to remove",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "ListDirectory", "Amount of listDirectory calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM )
return res
class FileCatalogHandler( RequestHandler ):
"""
..class:: FileCatalogHandler
A simple Replica and Metadata Catalog service.
"""
########################################################################
# Path operations (not updated)
#
types_changePathOwner = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_changePathOwner( self, lfns, recursive = False ):
""" Get replica info for the given list of LFNs
"""
return gFileCatalogDB.changePathOwner( lfns, self.getRemoteCredentials(), recursive )
types_changePathGroup = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_changePathGroup( self, lfns, recursive = False ):
""" Get replica info for the given list of LFNs
"""
return gFileCatalogDB.changePathGroup( lfns, self.getRemoteCredentials(), recursive )
types_changePathMode = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_changePathMode( self, lfns, recursive = False ):
""" Get replica info for the given list of LFNs
"""
return gFileCatalogDB.changePathMode( lfns, self.getRemoteCredentials(), recursive )
########################################################################
# ACL Operations
#
types_getPathPermissions = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_getPathPermissions( self, lfns ):
""" Determine the ACL information for a supplied path
"""
return gFileCatalogDB.getPathPermissions( lfns, self.getRemoteCredentials() )
###################################################################
#
# isOK
#
types_isOK = []
@staticmethod
def export_isOK():
""" returns S_OK if DB is connected
"""
if gFileCatalogDB and gFileCatalogDB._connected:
return S_OK()
return S_ERROR( 'Server not connected to DB' )
###################################################################
#
# User/Group write operations
#
types_addUser = [ StringTypes ]
def export_addUser( self, userName ):
""" Add a new user to the File Catalog """
return gFileCatalogDB.addUser( userName, self.getRemoteCredentials() )
types_deleteUser = [ StringTypes ]
def export_deleteUser( self, userName ):
""" Delete user from the File Catalog """
return gFileCatalogDB.deleteUser( userName, self.getRemoteCredentials() )
types_addGroup = [ StringTypes ]
def export_addGroup( self, groupName ):
""" Add a new group to the File Catalog """
return gFileCatalogDB.addGroup( groupName, self.getRemoteCredentials() )
types_deleteGroup = [ StringTypes ]
def export_deleteGroup( self, groupName ):
""" Delete group from the File Catalog """
return gFileCatalogDB.deleteGroup( groupName, self.getRemoteCredentials() )
###################################################################
#
# User/Group read operations
#
types_getUsers = []
def export_getUsers( self ):
""" Get all the users defined in the File Catalog """
return gFileCatalogDB.getUsers( self.getRemoteCredentials() )
types_getGroups = []
def export_getGroups( self ):
""" Get all the groups defined in the File Catalog """
return gFileCatalogDB.getGroups( self.getRemoteCredentials() )
########################################################################
#
# Path read operations
#
types_exists = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_exists( self, lfns ):
""" Check whether the supplied paths exists """
return gFileCatalogDB.exists( lfns, self.getRemoteCredentials() )
########################################################################
#
# File write operations
#
types_addFile = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_addFile( self, lfns ):
""" Register supplied files """
gMonitor.addMark( "AddFile", 1 )
res = gFileCatalogDB.addFile( lfns, self.getRemoteCredentials() )
if res['OK']:
gMonitor.addMark( "AddFileSuccessful", len( res.get( 'Value', {} ).get( 'Successful', [] ) ) )
gMonitor.addMark( "AddFileFailed", len( res.get( 'Value', {} ).get( 'Failed', [] ) ) )
return res
types_removeFile = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_removeFile( self, lfns ):
""" Remove the supplied lfns """
gMonitor.addMark( "RemoveFile", 1 )
res = gFileCatalogDB.removeFile( lfns, self.getRemoteCredentials() )
if res['OK']:
gMonitor.addMark( "RemoveFileSuccessful", len( res.get( 'Value', {} ).get( 'Successful', [] ) ) )
gMonitor.addMark( "RemoveFileFailed", len( res.get( 'Value', {} ).get( 'Failed', [] ) ) )
return res
types_setFileStatus = [ DictType ]
def export_setFileStatus( self, lfns ):
""" Remove the supplied lfns """
return gFileCatalogDB.setFileStatus( lfns, self.getRemoteCredentials() )
types_addReplica = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_addReplica( self, lfns ):
""" Register supplied replicas """
gMonitor.addMark( "AddReplica", 1 )
res = gFileCatalogDB.addReplica( lfns, self.getRemoteCredentials() )
if res['OK']:
gMonitor.addMark( "AddReplicaSuccessful", len( res.get( 'Value', {} ).get( 'Successful', [] ) ) )
gMonitor.addMark( "AddReplicaFailed", len( res.get( 'Value', {} ).get( 'Failed', [] ) ) )
return res
types_removeReplica = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_removeReplica( self, lfns ):
""" Remove the supplied replicas """
gMonitor.addMark( "RemoveReplica", 1 )
res = gFileCatalogDB.removeReplica( lfns, self.getRemoteCredentials() )
if res['OK']:
gMonitor.addMark( "RemoveReplicaSuccessful", len( res.get( 'Value', {} ).get( 'Successful', [] ) ) )
gMonitor.addMark( "RemoveReplicaFailed", len( res.get( 'Value', {} ).get( 'Failed', [] ) ) )
return res
types_setReplicaStatus = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_setReplicaStatus( self, lfns ):
""" Set the status for the supplied replicas """
return gFileCatalogDB.setReplicaStatus( lfns, self.getRemoteCredentials() )
types_setReplicaHost = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_setReplicaHost( self, lfns ):
""" Change the registered SE for the supplied replicas """
return gFileCatalogDB.setReplicaHost( lfns, self.getRemoteCredentials() )
types_addFileAncestors = [ DictType ]
def export_addFileAncestors( self, lfns ):
""" Add file ancestor information for the given list of LFNs """
return gFileCatalogDB.addFileAncestors( lfns, self.getRemoteCredentials() )
########################################################################
#
# File read operations
#
types_isFile = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_isFile( self, lfns ):
""" Check whether the supplied lfns are files """
return gFileCatalogDB.isFile( lfns, self.getRemoteCredentials() )
types_getFileSize = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_getFileSize( self, lfns ):
""" Get the size associated to supplied lfns """
return gFileCatalogDB.getFileSize( lfns, self.getRemoteCredentials() )
types_getFileMetadata = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_getFileMetadata( self, lfns ):
""" Get the metadata associated to supplied lfns """
return gFileCatalogDB.getFileMetadata( lfns, self.getRemoteCredentials() )
types_getReplicas = [ [ ListType, DictType ] + list( StringTypes ), BooleanType ]
def export_getReplicas( self, lfns, allStatus = False ):
""" Get replicas for supplied lfns """
return gFileCatalogDB.getReplicas( lfns, allStatus, self.getRemoteCredentials() )
types_getReplicaStatus = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_getReplicaStatus( self, lfns ):
""" Get the status for the supplied replicas """
return gFileCatalogDB.getReplicaStatus( lfns, self.getRemoteCredentials() )
types_getFileAncestors = [ ListType, [ ListType, IntType, LongType ] ]
def export_getFileAncestors( self, lfns, depths ):
""" Get the status for the supplied replicas """
dList = depths
if type( dList ) != ListType:
dList = [ depths ]
lfnDict = dict.fromkeys( lfns, True )
return gFileCatalogDB.getFileAncestors( lfnDict, dList, self.getRemoteCredentials() )
types_getFileDescendents = [ ListType, [ ListType, IntType, LongType ] ]
def export_getFileDescendents( self, lfns, depths ):
""" Get the status for the supplied replicas """
dList = depths
if type( dList ) != ListType:
dList = [depths]
lfnDict = dict.fromkeys( lfns, True )
return gFileCatalogDB.getFileDescendents( lfnDict, dList, self.getRemoteCredentials() )
########################################################################
#
# Directory write operations
#
types_createDirectory = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_createDirectory( self, lfns ):
""" Create the supplied directories """
return gFileCatalogDB.createDirectory( lfns, self.getRemoteCredentials() )
types_removeDirectory = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_removeDirectory( self, lfns ):
""" Remove the supplied directories """
return gFileCatalogDB.removeDirectory( lfns, self.getRemoteCredentials() )
########################################################################
#
# Directory read operations
#
types_listDirectory = [ [ ListType, DictType ] + list( StringTypes ), BooleanType ]
def export_listDirectory( self, lfns, verbose ):
""" List the contents of supplied directories """
gMonitor.addMark( 'ListDirectory', 1 )
return gFileCatalogDB.listDirectory( lfns, self.getRemoteCredentials(), verbose = verbose )
types_isDirectory = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_isDirectory( self, lfns ):
""" Determine whether supplied path is a directory """
return gFileCatalogDB.isDirectory( lfns, self.getRemoteCredentials() )
types_getDirectorySize = [ [ ListType, DictType ] + list( StringTypes ) ]
def export_getDirectorySize( self, lfns, longOut = False, fromFiles = False ):
""" Get the size of the supplied directory """
return gFileCatalogDB.getDirectorySize( lfns, longOut, fromFiles, self.getRemoteCredentials() )
types_getDirectoryReplicas = [ [ ListType, DictType ] + list( StringTypes ), BooleanType ]
def export_getDirectoryReplicas( self, lfns, allStatus = False ):
""" Get replicas for files in the supplied directory """
return gFileCatalogDB.getDirectoryReplicas( lfns, allStatus, self.getRemoteCredentials() )
########################################################################
#
# Administrative database operations
#
types_getCatalogCounters = []
def export_getCatalogCounters( self ):
""" Get the number of registered directories, files and replicas in various tables """
return gFileCatalogDB.getCatalogCounters( self.getRemoteCredentials() )
types_rebuildDirectoryUsage = []
@staticmethod
def export_rebuildDirectoryUsage():
""" Rebuild DirectoryUsage table from scratch """
return gFileCatalogDB.rebuildDirectoryUsage()
types_repairCatalog = []
def export_repairCatalog( self ):
""" Repair the catalog inconsistencies """
return gFileCatalogDB.repairCatalog( self.getRemoteCredentials() )
########################################################################
# Metadata Catalog Operations
#
types_addMetadataField = [ StringTypes, StringTypes ]
def export_addMetadataField( self, fieldName, fieldType, metaType = '-d' ):
""" Add a new metadata field of the given type
"""
if metaType.lower() == "-d":
return gFileCatalogDB.dmeta.addMetadataField( fieldName, fieldType, self.getRemoteCredentials() )
elif metaType.lower() == "-f":
return gFileCatalogDB.fmeta.addMetadataField( fieldName, fieldType, self.getRemoteCredentials() )
else:
return S_ERROR( 'Unknown metadata type %s' % metaType )
types_deleteMetadataField = [ StringTypes ]
def export_deleteMetadataField( self, fieldName ):
""" Delete the metadata field
"""
result = gFileCatalogDB.dmeta.deleteMetadataField( fieldName, self.getRemoteCredentials() )
error = ''
if not result['OK']:
error = result['Message']
result = gFileCatalogDB.fmeta.deleteMetadataField( fieldName, self.getRemoteCredentials() )
if not result['OK']:
if error:
result["Message"] = error + "; " + result["Message"]
return result
types_getMetadataFields = [ ]
def export_getMetadataFields( self ):
""" Get all the metadata fields
"""
resultDir = gFileCatalogDB.dmeta.getMetadataFields( self.getRemoteCredentials() )
if not resultDir['OK']:
return resultDir
resultFile = gFileCatalogDB.fmeta.getFileMetadataFields( self.getRemoteCredentials() )
if not resultFile['OK']:
return resultFile
return S_OK( { 'DirectoryMetaFields' : resultDir['Value'],
'FileMetaFields' : resultFile['Value'] } )
types_setMetadata = [ StringTypes, DictType ]
def export_setMetadata( self, path, metadatadict ):
""" Set metadata parameter for the given path
"""
return gFileCatalogDB.setMetadata( path, metadatadict, self.getRemoteCredentials() )
types_setMetadataBulk = [ DictType ]
def export_setMetadataBulk( self, pathMetadataDict ):
""" Set metadata parameter for the given path
"""
return gFileCatalogDB.setMetadataBulk( pathMetadataDict, self.getRemoteCredentials() )
types_removeMetadata = [ DictType ]
def export_removeMetadata( self, pathMetadataDict ):
""" Remove the specified metadata for the given path
"""
return gFileCatalogDB.removeMetadata( pathMetadataDict, self.getRemoteCredentials() )
types_getDirectoryMetadata = [ StringTypes ]
def export_getDirectoryMetadata( self, path ):
""" Get all the metadata valid for the given directory path
"""
return gFileCatalogDB.dmeta.getDirectoryMetadata( path, self.getRemoteCredentials() )
types_getFileUserMetadata = [ StringTypes ]
def export_getFileUserMetadata( self, path ):
""" Get all the metadata valid for the given file
"""
return gFileCatalogDB.fmeta.getFileUserMetadata( path, self.getRemoteCredentials() )
types_findDirectoriesByMetadata = [ DictType ]
def export_findDirectoriesByMetadata( self, metaDict, path = '/' ):
""" Find all the directories satisfying the given metadata set
"""
return gFileCatalogDB.dmeta.findDirectoriesByMetadata ( metaDict, path, self.getRemoteCredentials() )
types_findFilesByMetadata = [ DictType, StringTypes ]
def export_findFilesByMetadata( self, metaDict, path = '/' ):
""" Find all the files satisfying the given metadata set
"""
return gFileCatalogDB.fmeta.findFilesByMetadata( metaDict, path, self.getRemoteCredentials() )
types_getReplicasByMetadata = [ DictType, StringTypes, BooleanType ]
def export_getReplicasByMetadata( self, metaDict, path = '/', allStatus = False ):
""" Find all the files satisfying the given metadata set
"""
return gFileCatalogDB.fileManager.getReplicasByMetadata( metaDict,
path,
allStatus,
self.getRemoteCredentials() )
types_findFilesByMetadataDetailed = [ DictType, StringTypes ]
def export_findFilesByMetadataDetailed( self, metaDict, path = '/' ):
""" Find all the files satisfying the given metadata set
"""
result = gFileCatalogDB.fmeta.findFilesByMetadata( metaDict, path, self.getRemoteCredentials() )
if not result['OK'] or not result['Value']:
return result
lfns = []
for directory in result['Value']:
for fname in result['Value'][directory]:
lfns.append( os.path.join( directory, fname ) )
return gFileCatalogDB.getFileDetails( lfns, self.getRemoteCredentials() )
types_findFilesByMetadataWeb = [ DictType, StringTypes, [IntType, LongType], [IntType, LongType]]
def export_findFilesByMetadataWeb( self, metaDict, path, startItem, maxItems ):
""" Find files satisfying the given metadata set
"""
result = gFileCatalogDB.dmeta.findFileIDsByMetadata( metaDict, path, self.getRemoteCredentials(), startItem, maxItems )
if not result['OK'] or not result['Value']:
return result
fileIDs = result['Value']
totalRecords = result['TotalRecords']
result = gFileCatalogDB.fileManager._getFileLFNs( fileIDs )
if not result['OK']:
return result
lfnsResultList = result['Value']['Successful'].values()
resultDetails = gFileCatalogDB.getFileDetails( lfnsResultList, self.getRemoteCredentials() )
if not resultDetails['OK']:
return resultDetails
result = S_OK( {"TotalRecords":totalRecords, "Records":resultDetails['Value'] } )
return result
def findFilesByMetadataWeb( self, metaDict, path, startItem, maxItems ):
""" Find all the files satisfying the given metadata set
"""
result = gFileCatalogDB.fmeta.findFilesByMetadata( metaDict, path, self.getRemoteCredentials() )
if not result['OK'] or not result['Value']:
return result
lfns = []
for directory in result['Value']:
for fname in result['Value'][directory]:
lfns.append( os.path.join( directory, fname ) )
start = startItem
totalRecords = len( lfns )
if start > totalRecords:
return S_ERROR( 'Requested files out of existing range' )
end = start + maxItems
if end > totalRecords:
end = totalRecords
lfnsResultList = lfns[start:end]
resultDetails = gFileCatalogDB.getFileDetails( lfnsResultList, self.getRemoteCredentials() )
if not resultDetails['OK']:
return resultDetails
result = S_OK( {"TotalRecords":totalRecords, "Records":resultDetails['Value'] } )
return result
types_getCompatibleMetadata = [ DictType, StringTypes ]
def export_getCompatibleMetadata( self, metaDict, path = '/' ):
""" Get metadata values compatible with the given metadata subset
"""
return gFileCatalogDB.dmeta.getCompatibleMetadata( metaDict, path, self.getRemoteCredentials() )
types_addMetadataSet = [ StringTypes, DictType ]
def export_addMetadataSet( self, setName, setDict ):
""" Add a new metadata set
"""
return gFileCatalogDB.dmeta.addMetadataSet( setName, setDict, self.getRemoteCredentials() )
types_getMetadataSet = [ StringTypes, BooleanType ]
def export_getMetadataSet( self, setName, expandFlag ):
""" Add a new metadata set
"""
return gFileCatalogDB.dmeta.getMetadataSet( setName, expandFlag, self.getRemoteCredentials() )
types_listMetadataSets = []
def export_listMetadataSets(self):
""" Get the list of metadata sets with their definitions
"""
return gFileCatalogDB.dmeta.listMetadataSets(self.getRemoteCredentials())
#########################################################################################
#
# Dataset manipulation methods
#
types_addDataset = [ StringTypes, DictType ]
def export_addDataset( self, datasetName, metaQuery ):
""" Add a new dynamic dataset defined by its meta query
"""
return gFileCatalogDB.datasetManager.addDataset( datasetName, metaQuery, self.getRemoteCredentials() )
types_addDatasetAnnotation = [ DictType ]
def export_addDatasetAnnotation( self, datasetDict ):
""" Add annotation to an already created dataset
"""
return gFileCatalogDB.datasetManager.addDatasetAnnotation( datasetDict, self.getRemoteCredentials() )
types_removeDataset = [ StringTypes ]
def export_removeDataset( self, datasetName ):
""" Check the given dynamic dataset for changes since its definition
"""
return gFileCatalogDB.datasetManager.removeDataset( datasetName, self.getRemoteCredentials() )
types_checkDataset = [ StringTypes ]
def export_checkDataset( self, datasetName ):
""" Check the given dynamic dataset for changes since its definition
"""
return gFileCatalogDB.datasetManager.checkDataset( datasetName, self.getRemoteCredentials() )
types_updateDataset = [ StringTypes ]
def export_updateDataset( self, datasetName ):
""" Update the given dynamic dataset for changes since its definition
"""
return gFileCatalogDB.datasetManager.updateDataset( datasetName, self.getRemoteCredentials() )
types_getDatasets = [ list( StringTypes ) + [ListType] ]
def export_getDatasets( self, datasetName ):
""" Get parameters of the given dynamic dataset as they are stored in the database
"""
return gFileCatalogDB.datasetManager.getDatasets( datasetName, self.getRemoteCredentials() )
types_getDatasetParameters = [ StringTypes ]
def export_getDatasetParameters( self, datasetName ):
""" Get parameters of the given dynamic dataset as they are stored in the database
"""
return gFileCatalogDB.datasetManager.getDatasetParameters( datasetName, self.getRemoteCredentials() )
types_getDatasetAnnotation = [ list( StringTypes ) + [ListType] ]
def export_getDatasetAnnotation( self, datasetName ):
""" Get annotation of the given datasets
"""
return gFileCatalogDB.datasetManager.getDatasetAnnotation( datasetName, self.getRemoteCredentials() )
types_freezeDataset = [ StringTypes ]
def export_freezeDataset( self, datasetName ):
""" Freeze the contents of the dataset making it effectively static
"""
return gFileCatalogDB.datasetManager.freezeDataset( datasetName, self.getRemoteCredentials() )
types_releaseDataset = [ StringTypes ]
def export_releaseDataset( self, datasetName ):
""" Release the contents of the frozen dataset allowing changes in its contents
"""
return gFileCatalogDB.datasetManager.releaseDataset( datasetName, self.getRemoteCredentials() )
types_getDatasetFiles = [ StringTypes ]
def export_getDatasetFiles( self, datasetName ):
""" Get lfns in the given dataset
"""
return gFileCatalogDB.datasetManager.getDatasetFiles( datasetName, self.getRemoteCredentials() )
|
Sbalbp/DIRAC
|
DataManagementSystem/Service/FileCatalogHandler.py
|
Python
|
gpl-3.0
| 27,381
|
[
"DIRAC"
] |
3c8e5859724ef4e5a61a86e61ce03b65130071b9677124f53073409ceeeb5ab9
|
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
APP = ['main.py']
DATA_FILES = [
'config.json',
'firefly-blacklist.txt',
'firefly-blacklist.meta.json',
'firefly-hosts.txt',
'firefly-hosts.meta.json',
'firefly-hosts-disabled.txt',
'custom-blacklist.txt',
'custom-whitelist.txt',
'meek-relays.txt',
'cacert.pem',
'README.md',
'LICENSE',
('webpanel', ['webpanel/static', ]),
('webpanel', ['webpanel/templates', ])
]
OPTIONS = {
'iconfile': 'firefly.icns',
'plist': {'CFBundleShortVersionString':'0.3.0',},
'argv_emulation': True
}
setup(
name="Firefly",
version="0.3.0",
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
Jonavin/firefly-proxy
|
setup_mac.py
|
Python
|
bsd-2-clause
| 826
|
[
"Firefly"
] |
71a96ba7e6006da35f4c2442ecc7f67e2eceffc652440d7d032d4d2b6cd8da8e
|
# Copyright 2016 Mingbo Cai, Princeton Neuroscience Instititute,
# Princeton University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Bayesian Representational Similarity Analysis (BRSA)
This implementation is based on [Cai2016]_ and [Cai2019]_:
.. [Cai2016] "A Bayesian method for reducing bias in neural
representational similarity analysis",
M.B. Cai, N.W. Schuck, J.W. Pillow, Y. Niv,
Advances in Neural Information Processing Systems 29, 2016, 4952--4960
Available at:
http://papers.nips.cc/paper/6131-a-bayesian-method-for-reducing-bias-in-neural-representational-similarity-analysis.pdf
.. [Cai2019] "Representational structure or task structure?
Bias in neural representational similarity analysis and
a Bayesian method for reducing bias",
M.B. Cai, N.W. Schuck, J.W. Pillow, Y. Niv,
PLoS computational biology 15.5 (2019): e1006299.
https://doi.org/10.1371/journal.pcbi.1006299
`.BRSA` is based on [Cai2016] with additional consideration
of spatial noise correlation proposed in [Cai2019].
`.GBRSA` is based on [Cai2019].
`.GBRSA` may perform better than `.BRSA` due to marginalization of all
voxel-wise parameters. It can be use for single participant as well.
"""
# Authors: Mingbo Cai
# Princeton Neuroscience Institute, Princeton University, 2016
import numpy as np
import scipy
import scipy.optimize
import scipy.stats
import scipy.special
import time
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils import assert_all_finite, check_random_state
from sklearn.decomposition import PCA, FactorAnalysis, SparsePCA, FastICA
import logging
import brainiak.utils.utils as utils
import scipy.spatial.distance as spdist
from nitime import algorithms as alg
import copy
logger = logging.getLogger(__name__)
__all__ = [
"BRSA",
"GBRSA",
"Ncomp_SVHT_MG_DLD_approx",
"prior_GP_var_inv_gamma",
"prior_GP_var_half_cauchy",
]
def prior_GP_var_inv_gamma(y_invK_y, n_y, tau_range):
""" Imposing an inverse-Gamma prior onto the variance (tau^2)
parameter of a Gaussian Process, which is in turn a prior
imposed over an unknown function y = f(x).
The inverse-Gamma prior of tau^2, tau^2 ~ invgamma(shape, scale)
is described by a shape parameter alpha=2 and a scale parameter
beta=tau_range^2. tau_range describes the reasonable range of
tau in the inverse-Gamma prior.
The data y's at locations x's are assumed to follow Gaussian Process:
f(x, x') ~ N(0, K(x, x') / 2 tau^2), where K is a kernel
function defined on x. For n observations, K(x1, x2, ..., xn) is
an n by n positive definite matrix.
Given the prior parameter tau_range, number of observations
n_y, and y_invK_y = y * inv(K) * y',
the function returns the MAP estimate of tau^2 and
the log posterior probability of tau^2 at the MAP value:
log(p(tau^2|tau_range)).
This function is written primarily for BRSA but can also
be used elsewhere. y in this case corresponds to the log of
SNR in each voxel. GBRSA does not rely on this function.
An alternative form of prior is half-Cauchy prior on tau.
Inverse-Gamma prior penalizes for both very small and very
large values of tau, while half-Cauchy prior only penalizes
for very large values of tau.
For more information on usage, see description in BRSA class:
`.BRSA`
See also: `.prior_GP_var_half_cauchy`
Parameters
----------
y_invK_y: float
y * inv(K) * y^T, where y=f(x) is a vector of observations
of unknown function f at different locations x.
K is correlation matrix of f between different locations, based
on a Gaussian Process (GP) describing the smoothness property
of f. K fully incorporates the form of the kernel
and the length scale of the GP, but not the variance of the GP
(the purpose of this function is to estimate the variance).
n_y: int, number of observations
tau_range: float,
The reasonable range of tau, the standard deviation of the
Gaussian Process imposed on y=f(x). tau_range is parameter
of the inverse-Gamma prior. Say, if you expect the standard
deviation of the Gaussian process to be around 3, tau_range
can be set to 3.
The smaller it is, the more penalization is imposed
on large variation of y.
Returns
-------
tau2: The MAP estimation of tau^2 based on the prior on tau
and y_invK_y.
log_ptau: log(p(tau)) of the returned tau^2 based on the
inverse-Gamma prior.
"""
alpha = 2
tau2 = (y_invK_y + 2 * tau_range**2) / (alpha * 2 + 2 + n_y)
log_ptau = scipy.stats.invgamma.logpdf(
tau2, scale=tau_range**2, a=2)
return tau2, log_ptau
def prior_GP_var_half_cauchy(y_invK_y, n_y, tau_range):
""" Imposing a half-Cauchy prior onto the standard deviation (tau)
of the Gaussian Process which is in turn a prior imposed over
a function y = f(x).
The scale parameter of the half-Cauchy prior is tau_range.
The function returns the MAP estimate of tau^2 and
log(p(tau|tau_range)) for the MAP value of tau^2,
where tau_range describes the reasonable range of tau
in the half-Cauchy prior.
An alternative form of prior is inverse-Gamma prior on tau^2.
Inverse-Gamma prior penalizes for both very small and very
large values of tau, while half-Cauchy prior only penalizes
for very large values of tau.
For more information on usage, see description in BRSA class:
`.BRSA`
"""
tau2 = (y_invK_y - n_y * tau_range**2
+ np.sqrt(n_y**2 * tau_range**4 + (2 * n_y + 8)
* tau_range**2 * y_invK_y + y_invK_y**2))\
/ 2 / (n_y + 2)
log_ptau = scipy.stats.halfcauchy.logpdf(
tau2**0.5, scale=tau_range)
return tau2, log_ptau
def Ncomp_SVHT_MG_DLD_approx(X, zscore=True):
""" This function implements the approximate calculation of the
optimal hard threshold for singular values, by Matan Gavish
and David L. Donoho:
"The optimal hard threshold for singular values is 4 / sqrt(3)"
http://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=6846297
Parameters
----------
X: 2-D numpy array of size [n_T, n_V]
The data to estimate the optimal rank for selecting principal
components.
zscore: Boolean
Whether to z-score the data before calculating number of components.
Returns
-------
ncomp: integer
The optimal number of components determined by the method of MG
and DLD
"""
beta = X.shape[0] / X.shape[1]
if beta > 1:
beta = 1 / beta
omega = 0.56 * beta ** 3 - 0.95 * beta ** 2 + 1.82 * beta + 1.43
if zscore:
sing = np.linalg.svd(_zscore(X), False, False)
else:
sing = np.linalg.svd(X, False, False)
thresh = omega * np.median(sing)
ncomp = int(np.sum(np.logical_and(sing > thresh, np.logical_not(
np.isclose(sing, thresh)))))
# In the line above, we look for the singular values larger than
# the threshold but excluding those that happen to be "just" larger
# than the threshold by an amount close to the numerical precision.
# This is to prevent close-to-zero singular values to be included if
# the median of the eigenvalues is close to 0 (which could happen
# when the input X has lower rank than its minimal size.
return ncomp
def _zscore(a):
""" Calculating z-score of data on the first axis.
If the numbers in any column are all equal, scipy.stats.zscore
will return NaN for this column. We shall correct them all to
be zeros.
Parameters
----------
a: numpy array
Returns
-------
zscore: numpy array
The z-scores of input "a", with any columns including non-finite
numbers replaced by all zeros.
"""
assert a.ndim > 1, 'a must have more than one dimensions'
zscore = scipy.stats.zscore(a, axis=0)
zscore[:, np.logical_not(np.all(np.isfinite(zscore), axis=0))] = 0
return zscore
class BRSA(BaseEstimator, TransformerMixin):
"""Bayesian representational Similarity Analysis (BRSA)
Given the time series of neural imaging data in a region of interest
(ROI) and the hypothetical neural response (design matrix) to
each experimental condition of interest,
calculate the shared covariance matrix U of
the voxels(recording unit)' response profiles \\beta_i to each condition,
and the relative SNR of each voxels.
The relative SNR could be considered as the degree of contribution
of each voxel to this shared covariance matrix.
A correlation matrix converted from the covariance matrix U
will be provided as a quantification of neural representational similarity.
.. math::
Y = X \\cdot \\beta + X_0 \\cdot \\beta_0 + \\epsilon
\\beta_i \\sim N(0,(s_{i} \\sigma_{i})^2 U)
\\epsilon_i \\sim AR(1)
Please note that the model assumes that the covariance matrix U which
all \\beta_i follow is zero-meaned. This assumption does not imply
there must be both positive and negative responses across voxels.
However, it means that Bayesian RSA treats the task-evoked activity
against baseline BOLD level as signal, while in other RSA tools
the deviation of task-evoked activity in each voxel from the average
task-evoked activity level across voxels may be considered as signal
of interest. Due to this assumption in BRSA, relatively high degree
of similarity may be expected when the activity patterns of two
task conditions both include strong sensory driven signals regardless
of their specific stimuli. When two task conditions elicit exactly
the same activity patterns but only differ in their global magnitudes,
under the assumption in BRSA, their similarity is 1; under the assumption
that only deviation of pattern from average patterns is signal of interest,
their similarity should be -1.
Parameters
----------
n_iter : int.
Number of maximum iterations to run the algorithm.
rank : int. Default: None
The rank of the covariance matrix.
If not provided, the covariance matrix will be assumed
to be full rank. When you have many conditions
(e.g., calculating the similarity matrix of responses to each event),
you might try specifying a lower rank.
auto_nuisance: boolean.
In order to model spatial correlation between voxels that cannot
be accounted for by common response captured in the design matrix,
we assume that a set of time courses not related to the task
conditions are shared across voxels with unknown amplitudes.
One approach is for users to provide time series which they consider
as nuisance but exist in the noise (such as head motion).
The other way is to take the first n_nureg principal components
in the residual after subtracting the response to the design matrix
from the data, and use these components as the nuisance regressor.
This flag is for the second approach. If turned on,
PCA or factor analysis will be applied to the residuals
to obtain new nuisance regressors in each round of fitting.
These two approaches can be combined. If the users provide nuisance
regressors and set this flag as True, then the first n_nureg
principal components of the residuals after subtracting
both the responses to design matrix and the user-supplied nuisance
regressors will be used in addition to the nuisance regressors
provided by the users.
Note that nuisance regressor is not required from user. If it is
not provided, DC components for each run will be included as nuisance
regressor regardless of the auto_nuisance parameter.
n_nureg: Optional[int].
Number of nuisance regressors to use in order to model signals
shared across voxels not captured by the design matrix.
This number is in addition to any nuisance regressor that the user
has already provided.
If set to None, the number of nuisance regressors will be
automatically determined based on M Gavish
and D Donoho's approximate estimation of optimal hard
threshold for singular values.
This only takes effect if auto_nuisance is True.
nureg_zscore: boolean.
A flag to tell the algorithm whether data is z-scored before
estimating the number of nuisance regressor components necessary to
account for spatial noise correlation. It also determinie whether
the residual noise is z-scored before estimating the nuisance
regressors from residual.
This only takes effect if auto_nuisance is True.
nureg_method: string, naming a method from sklearn.decomposition.
'PCA', 'ICA', 'FA' or 'SPCA' are currently supported.
The method to estimate the shared component in noise across voxels.
This only takes effect if auto_nuisance is True.
baseline_single: boolean.
A time course of constant 1 will be included to the nuisance
regressor regardless of whether the user requests.
If baseline_single is set to False, one such regressor is included
for each fMRI run, but a single component in beta0\\_ will be
computed as the average of the weight maps corresponding to
these regressors. This might cause underestimation of noise variance.
If baseline_single is True, only one regressor of constant 1 will be
used for the whole dataset. This might be desirable if you
believe the average image intensity might not scale with the
same proportion for different voxels across scan. In other words,
it is possible that some part of the brain is more vulnerable to
change in baseline intensity due to facts such as
field inhomogeneity. Setting baseline_single to True will force the
nuisance regressors automatically estimated from residuals to
capture this. However, when each task condition only occurs in one
run and when the design matrix in each run sums together close to
a flat line, this option can cause the estimated similarity to be
extremely high between conditions occuring in the same run.
GP_space: boolean.
Whether to impose a Gaussion Process (GP) prior on the log(pseudo-SNR).
If true, the GP has a kernel defined over spatial coordinate
of each voxel. The idea behind this option is that
adjacent voxels should have similar SNRs.
This is relatively slow for big ROI. We find that when SNR
is generally low, smoothness can be overestimated.
But such regularization may reduce variance in the estimated
SNR map and similarity matrix.
GP_inten: boolean.
Whether to include a kernel defined over the intensity of image.
GP_space should be True as well if you want to use this,
because the smoothness should be primarily in space.
Smoothness in intensity is just complementary. The idea
behind this option is that voxels should have similar
SNRs when they are both adjacent (imposed by GP_space)
and are of the same tissue type (when their image intensities
are close). If you accept the second assumption, then
you can set GP_inten as True and provide an array to the `inten`
variable, expressing the intensities (brightness) for each voxel.
space_smooth_range: float.
The distance (in unit the same as what
you would use when supplying the spatial coordiates of
each voxel, typically millimeter) which you believe is
the maximum range of the length scale parameter of
Gaussian Process defined over voxel location. This is
used to impose a half-Cauchy prior on the length scale.
If set to None, the program will default to half of the
maximum distance between all voxels.
inten_smooth_range: float.
The difference in image intensity which
you believe is the maximum range of plausible length
scale for the Gaussian Process defined over image
intensity. Length scales larger than this are allowed,
but will be penalized. If set to None, this parameter
will default to half of the maximal intensity difference.
tau_range: float.
The reasonable range of the standard deviation
of log(SNR). This range should not be too
large. 5 is a loose range.
When a Gaussian Process is imposed on the log(SNR),
this parameter is used in a half-Cauchy prior
on the standard deviation, or an inverse-Gamma prior
on the variance of the GP.
tau2_prior: Callable[[float, int, float]], [float, float]],
Default: prior_GP_var_inv_gamma.
Can be prior_GP_var_inv_gamma or prior_GP_var_half_cauchy,
or a custom function.
The function which impose a prior for tau^2, the variance of the
GP prior on log(SNR), and returns the MAP estimate of tau^2.
It can be either prior_GP_var_inv_gamma for inverse-Gamma
or prior_GP_var_half_cauchy for half-Cauchy.
half-Cauchy prior is in fact imposed on tau.
But tau_range describes the range of tau in the prior in both cases.
Both functions are part of brsa module.
See also `.prior_GP_var_inv_gamma` and
`.prior_GP_var_half_cauchy`
To use the default inverse-Gamma prior, you can ignore this argument::
from brainiak.reprsimil.brsa import BRSA
brsa = BRSA()
If you want to try the alternative half-Cauchy prior,
then you need to import it in addition to BRSA::
from brainiak.reprsimil.brsa import BRSA, prior_GP_var_half_cauchy
brsa = BRSA(tau2_prior=prior_GP_var_half_cauchy)
eta: float.
A small number added to the diagonal element of the
covariance matrix in the Gaussian Process prior. This is
to ensure that the matrix is invertible.
init_iter: int.
How many initial iterations to fit the model
without introducing the GP prior before fitting with it,
if GP_space or GP_inten is requested. This initial
fitting is to give the parameters a good starting point.
optimizer: str or callable.
The optimizer to use for minimizing cost function which
scipy.optimize.minimize can accept.
We use 'L-BFGS-B' as a default. Users can try other strings
corresponding to optimizer provided by scipy.optimize.minimize,
or a custom optimizer, such as 'BFGS' or 'CG'.
Note that BRSA fits a lot of parameters. So a chosen optimizer
should accept gradient (Jacobian) of the cost function. Otherwise
the fitting is likely to be unbarely slow. We do not calculate
Hessian of the objective function. So an optimizer which requires
Hessian cannot be used.
random_state : RandomState or an int seed.
A random number generator instance to define the state of
the random permutations generator whenever the module
needs to generate random number (e.g., initial parameter
of the Cholesky factor).
anneal_speed: float.
Annealing is introduced in fitting of the Cholesky
decomposition of the shared covariance matrix. The amount
of perturbation decays exponentially. This parameter sets
the ratio of the maximum number of iteration to the
time constant of the exponential.
anneal_speed=10 means by n_iter/10 iterations,
the amount of perturbation is reduced by 2.713 times.
minimize_options: dictionary.
Default: {'gtol': 1e-4, 'disp': False, 'maxiter': 6}
This is the dictionary passed as the options argument to
scipy.optimize.minize which minimizes the cost function during
fitting. Notice that the minimization is performed for many times,
alternating between optimizing the covariance matrix U underlying
the pattern similarity matrix, and SNR. At most n_iter times
of this alternation is performed. So within each step of fitting,
the step of iteration performed by scipy.optimize.minize does not
have to be very large. In other words, scipy.optimize.minize does
not need to converge within each step of the alternating fitting
procedure.
tol: float.
Tolerance parameter passed to scipy.optimize.minimize. It is also
used for determining convergence of the alternating fitting
procedure.
Attributes
----------
U_ : numpy array, shape=[condition,condition].
The shared covariance matrix.
L_ : numpy array, shape=[condition,rank].
The Cholesky factor of the shared covariance matrix
(lower-triangular matrix).
C_: numpy array, shape=[condition,condition].
The correlation matrix derived from the shared covariance matrix.
This is the estimated similarity matrix between neural patterns
to your task conditions. Notice that it is recommended that
you also check U\\_, which is the covariance matrix underlying
this correlation matrix. In cases there is almost no response
to your task conditions, the diagonal values of U\\_ would become
very small and C\\_ might contain many correlation coefficients
close to 1 or -1. This might not reflect true strong correlation
or strong negative correlation, but a result of lack of
task-related neural activity, design matrix that does not match
true neural response, or not enough data.
It is also recommended to check nSNR\\_ after mapping it back to
the brain. A "reasonable" map should at least have higher values
in gray matter in than white matter.
nSNR_ : numpy array, shape=[voxels,].
The normalized pseuso-SNR of all voxels.
They are normalized such that the geometric mean is 1.
Note that this attribute can not be interpreted as true SNR,
but the relative ratios between voxel indicates the contribution
of each voxel to the representational similarity structure.
sigma_ : numpy array, shape=[voxels,].
The estimated standard deviation of the noise in each voxel
Assuming AR(1) model, this means the standard deviation
of the innovation noise.
rho_ : numpy array, shape=[voxels,].
The estimated autoregressive coefficient of each voxel
bGP_ : float, only if GP_space or GP_inten is True.
The standard deviation of the GP prior
lGPspace_ : float, only if GP_space or GP_inten is True
The length scale of Gaussian Process prior of log(SNR)
lGPinten_: float, only if GP_inten is True
The length scale in fMRI intensity of the GP prior of log(SNR)
beta_: array, shape=[conditions, voxels]
The maximum a posterior estimation of the response amplitudes
of each voxel to each task condition.
beta0_: numpy array, shape=[n_nureg + n_base, voxels]
The loading weights of each voxel for the shared time courses
not captured by the design matrix. This helps capture the
structure of spatial covariance of task-unrelated signal.
n_base is the number of columns of the user-supplied nuisance
regressors plus one for DC component
X0_: numpy array, shape=[time_points, n_nureg + n_base]
The estimated time course that is shared across voxels but
unrelated to the events of interest (design matrix).
beta0_null_: numpy array, shape=[n_nureg + n_base, voxels]
The equivalent of beta0\\_ in a null model which does not
include the design matrix and response pattern beta.
X0_null_: numpy array, shape=[time_points, n_nureg + n_base]
The equivalent of X0\\_ in a null model which does not
include the design matrix and response pattern beta
n_nureg_: int
Number of nuisance regressor in addition to such
regressors provided by the user (if any), if auto_nuisance
is set to True. If n_nureg is set to 'opt',
this will be estimated from data. 'opt' will use M Gavish
and D Donoho's approximate estimation of optimal hard
threshold for singular values.
random_state_: `RandomState`
Random number generator initialized using random_state.
"""
def __init__(
self, n_iter=100, rank=None,
auto_nuisance=True, n_nureg=None, nureg_zscore=True,
nureg_method='PCA', baseline_single=False,
GP_space=False, GP_inten=False,
space_smooth_range=None, inten_smooth_range=None,
tau_range=5.0,
tau2_prior=prior_GP_var_inv_gamma,
eta=0.0001, init_iter=20, optimizer='L-BFGS-B',
random_state=None, anneal_speed=10, tol=1e-4,
minimize_options={'gtol': 1e-4, 'disp': False,
'maxiter': 6}):
self.n_iter = n_iter
self.rank = rank
self.GP_space = GP_space
self.GP_inten = GP_inten
self.tol = tol
self.auto_nuisance = auto_nuisance
self.n_nureg = n_nureg
self.nureg_zscore = nureg_zscore
if auto_nuisance:
assert (n_nureg is None) \
or (isinstance(n_nureg, int) and n_nureg > 0), \
'n_nureg should be a positive integer or None'\
' if auto_nuisance is True.'
if self.nureg_zscore:
self.preprocess_residual = lambda x: _zscore(x)
else:
self.preprocess_residual = lambda x: x
if nureg_method == 'FA':
self.nureg_method = lambda x: FactorAnalysis(n_components=x)
elif nureg_method == 'PCA':
self.nureg_method = lambda x: PCA(n_components=x, whiten=True)
elif nureg_method == 'SPCA':
self.nureg_method = lambda x: SparsePCA(n_components=x,
max_iter=20, tol=tol)
elif nureg_method == 'ICA':
self.nureg_method = lambda x: FastICA(n_components=x,
whiten=True)
else:
raise ValueError('nureg_method can only be FA, PCA, '
'SPCA(for sparse PCA) or ICA')
self.baseline_single = baseline_single
self.minimize_options = minimize_options
self.eta = eta
# This is a tiny ridge added to the Gaussian Process
# covariance matrix template to gaurantee that it is invertible.
# Mathematically it means we assume that this proportion of the
# variance is always independent between voxels for the log(SNR2).
self.space_smooth_range = space_smooth_range
self.inten_smooth_range = inten_smooth_range
# The kernel of the Gaussian Process is the product of a kernel
# defined on spatial coordinate and a kernel defined on
# image intensity.
self.tau_range = tau_range
self.tau2_prior = tau2_prior
self.init_iter = init_iter
# When imposing smoothness prior, fit the model without this
# prior for this number of iterations.
self.optimizer = optimizer
self.random_state = random_state
self.anneal_speed = anneal_speed
return
def fit(self, X, design, nuisance=None, scan_onsets=None, coords=None,
inten=None):
"""Compute the Bayesian RSA
Parameters
----------
X: numpy array, shape=[time_points, voxels]
If you have multiple scans of the same participants that you
want to analyze together, you should concatenate them along
the time dimension after proper preprocessing (e.g. spatial
alignment), and specify the onsets of each scan in scan_onsets.
design: numpy array, shape=[time_points, conditions]
This is the design matrix. It should only include the hypothetic
response for task conditions. You should not include
regressors for a DC component or motion parameters, unless you
want to estimate their pattern similarity with response patterns
to your task conditions. If you want to model head motion,
you should include them in nuisance regressors.
If you have multiple run, the design matrix
of all runs should be concatenated along the time dimension,
with every column for one condition across runs.
For example, if you have 3 runs of experiment of one participant,
with each run lasting 200 TR. And you have 4 conditions,
then design should be a 600 x 4 numpy array.
nuisance: optional, numpy array, shape=[time_points, nuisance_factors]
The responses to these regressors will be marginalized out from
each voxel, which means they are considered, but won't be assumed
to share the same pseudo-SNR map with the design matrix.
Therefore, the pseudo-SNR map will only reflect the
relative contribution of design matrix to each voxel.
You can provide time courses such as those for head motion
to this parameter.
Note that if auto_nuisance is set to True, the first
n_nureg principal components of residual (excluding the response
to the design matrix and the user-provided nuisance regressors
and a constant baseline)
will be included as additional nuisance regressor after the
first round of fitting.
If auto_nuisance is set to False, the nuisance regressors supplied
by the users together with DC components will be used as
nuisance time series.
Please do not include time course of constant baseline in nuisance.
scan_onsets: optional, numpy array, shape=[runs,]
This specifies the indices of X which correspond to the onset
of each scanning run. For example, if you have two experimental
runs of the same subject, each with 100 TRs, then scan_onsets
should be [0,100].
If you do not provide the argument, the program will
assume all data are from the same run.
The effect of them is to make the inverse matrix
of the temporal covariance matrix of noise block-diagonal.
coords: optional, numpy array, shape=[voxels,3]
This is the coordinate of each voxel,
used for implementing Gaussian Process prior.
inten: optional, numpy array, shape=[voxel,]
This is the average fMRI intensity in each voxel.
It should be calculated from your data without any preprocessing
such as z-scoring. Because it should reflect
whether a voxel is bright (grey matter) or dark (white matter).
A Gaussian Process kernel defined on both coordinate and intensity
imposes a smoothness prior on adjcent voxels
but with the same tissue type. The Gaussian Process
is experimental and has shown good performance on
some visual datasets.
"""
logger.info('Running Bayesian RSA')
self.random_state_ = check_random_state(self.random_state)
# setting random seed
logger.debug('RandState set to {}'.format(self.random_state_))
assert not self.GP_inten or (self.GP_inten and self.GP_space),\
'You must speficiy GP_space to True'\
'if you want to use GP_inten'
# Check input data
assert_all_finite(X)
assert X.ndim == 2, 'The data should be 2-dimensional ndarray'
assert np.all(np.std(X, axis=0) > 0),\
'The time courses of some voxels do not change at all.'\
' Please make sure all voxels are within the brain'
# check design matrix
assert_all_finite(design)
assert design.ndim == 2,\
'The design matrix should be 2-dimensional ndarray'
assert np.linalg.matrix_rank(design) == design.shape[1], \
'Your design matrix has rank smaller than the number of'\
' columns. Some columns can be explained by linear '\
'combination of other columns. Please check your design matrix.'
assert np.size(design, axis=0) == np.size(X, axis=0),\
'Design matrix and data do not '\
'have the same number of time points.'
assert self.rank is None or self.rank <= design.shape[1],\
'Your design matrix has fewer columns than the rank you set'
# Check the nuisance regressors.
if nuisance is not None:
assert_all_finite(nuisance)
assert nuisance.ndim == 2,\
'The nuisance regressor should be 2-dimensional ndarray'
assert np.linalg.matrix_rank(nuisance) == nuisance.shape[1], \
'The nuisance regressor has rank smaller than the number of'\
'columns. Some columns can be explained by linear '\
'combination of other columns. Please check your nuisance' \
'regressors.'
assert np.size(nuisance, axis=0) == np.size(X, axis=0), \
'Nuisance regressor and data do not have the same '\
'number of time points.'
# check scan_onsets validity
assert scan_onsets is None or\
(np.max(scan_onsets) <= X.shape[0] and np.min(scan_onsets) >= 0),\
'Some scan onsets provided are out of the range of time points.'
# check the size of coords and inten
if self.GP_space:
logger.info('Fitting with Gaussian Process prior on log(SNR)')
assert coords is not None and coords.shape[0] == X.shape[1],\
'Spatial smoothness was requested by setting GP_space. '\
'But the voxel number of coords does not match that of '\
'data X, or voxel coordinates are not provided. '\
'Please make sure that coords is in the shape of '\
'[n_voxel x 3].'
assert coords.ndim == 2,\
'The coordinate matrix should be a 2-d array'
if self.GP_inten:
assert inten is not None and inten.shape[0] == X.shape[1],\
'The voxel number of intensity does not '\
'match that of data X, or intensity not provided.'
assert np.var(inten) > 0,\
'All voxels have the same intensity.'
if (not self.GP_space and coords is not None) or\
(not self.GP_inten and inten is not None):
logger.warning('Coordinates or image intensity provided'
' but GP_space or GP_inten is not set '
'to True. The coordinates or intensity are'
' ignored.')
# Estimate the number of necessary nuisance regressors
if self.auto_nuisance:
if self.n_nureg is None:
logger.info('number of nuisance regressors is determined '
'automatically.')
run_TRs, n_runs = self._run_TR_from_scan_onsets(
X.shape[0], scan_onsets)
ts_dc = self._gen_legendre(run_TRs, [0])
_, ts_base, _ = self._merge_DC_to_base(
ts_dc, nuisance, False)
ts_reg = np.concatenate((ts_base, design), axis=1)
beta_hat = np.linalg.lstsq(ts_reg, X, rcond=None)[0]
residuals = X - np.dot(ts_reg, beta_hat)
self.n_nureg_ = np.max(
[1, Ncomp_SVHT_MG_DLD_approx(residuals,
self.nureg_zscore)])
logger.info('Use {} nuisance regressors to model the spatial '
'correlation in noise.'.format(self.n_nureg_))
self.n_nureg_ = np.int32(self.n_nureg_)
else:
self.n_nureg_ = self.n_nureg
self.n_nureg_ = np.int32(self.n_nureg_)
# Run Bayesian RSA
# Note that we have a change of notation here. Within _fit_RSA_UV,
# design matrix is named X and data is named Y, to reflect the
# generative model that data Y is generated by mixing the response
# X to experiment conditions and other neural activity.
# However, in fit(), we keep the tradition of scikit-learn that
# X is the input data to fit and y, a reserved name not used, is
# the label to map to from X.
if not self.GP_space:
# If GP_space is not requested, then the model is fitted
# without imposing any Gaussian Process prior on log(SNR^2)
self.U_, self.L_, self.nSNR_, self.beta_, self.beta0_,\
self._beta_latent_, self.sigma_, self.rho_, _, _, _,\
self.X0_ = self._fit_RSA_UV(X=design, Y=X, X_base=nuisance,
scan_onsets=scan_onsets)
elif not self.GP_inten:
# If GP_space is requested, but GP_inten is not, a GP prior
# based on spatial locations of voxels will be imposed.
self.U_, self.L_, self.nSNR_, self.beta_, self.beta0_,\
self._beta_latent_, self.sigma_, self.rho_, \
self.lGPspace_, self.bGP_, _, \
self.X0_ = self._fit_RSA_UV(
X=design, Y=X, X_base=nuisance,
scan_onsets=scan_onsets, coords=coords)
else:
# If both self.GP_space and self.GP_inten are True,
# a GP prior based on both location and intensity is imposed.
self.U_, self.L_, self.nSNR_, self.beta_, self.beta0_,\
self._beta_latent_, self.sigma_, self.rho_, \
self.lGPspace_, self.bGP_, self.lGPinten_, self.X0_ = \
self._fit_RSA_UV(X=design, Y=X, X_base=nuisance,
scan_onsets=scan_onsets,
coords=coords, inten=inten)
self.C_ = utils.cov2corr(self.U_)
self.design_ = design.copy()
self._rho_design_, self._sigma2_design_ = \
self._est_AR1(self.design_, same_para=True)
self._rho_X0_, self._sigma2_X0_ = self._est_AR1(self.X0_)
# AR(1) parameters of the design matrix and nuisance regressors,
# which will be used in transform or score.
# Finally, we fit a null model with the same setting except
# that there is no response to X
self.beta0_null_, self.sigma_null_, self.rho_null_, \
self.X0_null_ = self._fit_null(Y=X, X_base=nuisance,
scan_onsets=scan_onsets)
self._rho_X0_null_, self._sigma2_X0_null_ =\
self._est_AR1(self.X0_null_)
return self
def transform(self, X, y=None, scan_onsets=None):
""" Use the model to estimate the time course of response to
each condition (ts), and the time course unrelated to task
(ts0) which is spread across the brain.
This is equivalent to "decoding" the design matrix and
nuisance regressors from a new dataset different from the
training dataset on which fit() was applied. An AR(1) smooth
prior is imposed on the decoded ts and ts0 with the AR(1)
parameters learnt from the corresponding time courses in the
training data.
Notice: if you set the rank to be lower than the number of
experimental conditions (number of columns in the design
matrix), the recovered task-related activity will have
collinearity (the recovered time courses of some conditions
can be linearly explained by the recovered time courses
of other conditions).
Parameters
----------
X : numpy arrays, shape=[time_points, voxels]
fMRI data of new data of the same subject. The voxels should
match those used in the fit() function. If data are z-scored
(recommended) when fitting the model, data should be z-scored
as well when calling transform()
y : not used (as it is unsupervised learning)
scan_onsets : numpy array, shape=[number of runs].
A list of indices corresponding to the onsets of
scans in the data X. If not provided, data will be assumed
to be acquired in a continuous scan.
Returns
-------
ts : numpy arrays, shape = [time_points, condition]
The estimated response to the task conditions which have the
response amplitudes estimated during the fit step.
ts0: numpy array, shape = [time_points, n_nureg]
The estimated time course spread across the brain, with the
loading weights estimated during the fit step.
"""
assert X.ndim == 2 and X.shape[1] == self.beta_.shape[1], \
'The shape of X is not consistent with the shape of data '\
'used in the fitting step. They should have the same number '\
'of voxels'
assert scan_onsets is None or (scan_onsets.ndim == 1 and
0 in scan_onsets), \
'scan_onsets should either be None or an array of indices '\
'If it is given, it should include at least 0'
if scan_onsets is None:
scan_onsets = np.array([0], dtype=int)
else:
scan_onsets = np.int32(scan_onsets)
ts, ts0, log_p = self._transform(
Y=X, scan_onsets=scan_onsets, beta=self.beta_,
beta0=self.beta0_, rho_e=self.rho_, sigma_e=self.sigma_,
rho_X=self._rho_design_, sigma2_X=self._sigma2_design_,
rho_X0=self._rho_X0_, sigma2_X0=self._sigma2_X0_)
return ts, ts0
def score(self, X, design, scan_onsets=None):
""" Use the model and parameters estimated by fit function
from some data of a participant to evaluate the log
likelihood of some new data of the same participant.
Design matrix of the same set of experimental
conditions in the testing data should be provided, with each
column corresponding to the same condition as that column
in the design matrix of the training data.
Unknown nuisance time series will be marginalized, assuming
they follow the same spatial pattern as in the training
data. The hypothetical response captured by the design matrix
will be subtracted from data before the marginalization
when evaluating the log likelihood. For null model,
nothing will be subtracted before marginalization.
There is a difference between the form of likelihood function
used in fit() and score(). In fit(), the response amplitude
beta to design matrix X and the modulation beta0 by nuisance
regressor X0 are both marginalized, with X provided and X0
estimated from data. In score(), posterior estimation of
beta and beta0 from the fitting step are assumed unchanged
to testing data and X0 is marginalized.
The logic underlying score() is to transfer
as much as what we can learn from training data when
calculating a likelihood score for testing data.
If you z-scored your data during fit step, you should
z-score them for score function as well. If you did not
z-score in fitting, you should not z-score here either.
Parameters
----------
X : numpy arrays, shape=[time_points, voxels]
fMRI data of new data of the same subject. The voxels should
match those used in the fit() function. If data are z-scored
(recommended) when fitting the model, data should be z-scored
as well when calling transform()
design : numpy array, shape=[time_points, conditions]
Design matrix expressing the hypothetical response of
the task conditions in data X.
scan_onsets : numpy array, shape=[number of runs].
A list of indices corresponding to the onsets of
scans in the data X. If not provided, data will be assumed
to be acquired in a continuous scan.
Returns
-------
ll: float.
The log likelihood of the new data based on the model and its
parameters fit to the training data.
ll_null: float.
The log likelihood of the new data based on a null model
which assumes the same as the full model for everything
except for that there is no response to any of the
task conditions.
"""
assert X.ndim == 2 and X.shape[1] == self.beta_.shape[1], \
'The shape of X is not consistent with the shape of data '\
'used in the fitting step. They should have the same number '\
'of voxels'
assert scan_onsets is None or (scan_onsets.ndim == 1 and
0 in scan_onsets), \
'scan_onsets should either be None or an array of indices '\
'If it is given, it should include at least 0'
if scan_onsets is None:
scan_onsets = np.array([0], dtype=int)
else:
scan_onsets = np.int32(scan_onsets)
ll = self._score(Y=X, design=design, beta=self.beta_,
scan_onsets=scan_onsets, beta0=self.beta0_,
rho_e=self.rho_, sigma_e=self.sigma_,
rho_X0=self._rho_X0_, sigma2_X0=self._sigma2_X0_)
ll_null = self._score(Y=X, design=None, beta=None,
scan_onsets=scan_onsets, beta0=self.beta0_,
rho_e=self.rho_, sigma_e=self.sigma_,
rho_X0=self._rho_X0_,
sigma2_X0=self._sigma2_X0_)
return ll, ll_null
# The following 2 functions _D_gen and _F_gen generate templates used
# for constructing inverse of covariance matrix of AR(1) noise
# The inverse of covarian matrix is
# (I - rho1 * D + rho1**2 * F) / sigma**2. D is a matrix where all the
# elements adjacent to the diagonal are 1 and all others are 0. F is
# a matrix which is 1 on all diagonal elements except for in the first
# and last columns. We denote (I - rho1 * D + rho1**2 * F) with A.
# In the function calculating likelihood function,
# XTAX, YTAY_diag, YTAX all mean multiplying the inverse covariance matrix
# in between either the design matrix or the data.
# As one can see, even though rho1 and sigma2 might update as we keep
# fitting parameters, several terms stay unchanged and do not need to
# be re-calculated.
# For example, in X'AX = X'(I + rho1*D + rho1**2*F)X / sigma2,
# the products X'X, X'DX, X'FX, etc. can always be re-used if they
# are pre-calculated. Therefore, _D_gen and _F_gen constructs matrices
# D and F, and _prepare_data_* calculates these products that can be
# re-used. In principle, once parameters have been fitted for a
# dataset, they can be updated for new incoming data by adding the
# products X'X, X'DX, X'FX, X'Y etc. from new data to those from
# existing data, and refit the parameters starting from the ones
# fitted from existing data.
def _D_gen(self, TR):
if TR > 0:
return np.diag(np.ones(TR - 1), -1) \
+ np.diag(np.ones(TR - 1), 1)
else:
return np.empty([0, 0])
def _F_gen(self, TR):
if TR > 0:
F = np.eye(TR)
F[0, 0] = 0
F[TR - 1, TR - 1] = 0
return F
else:
return np.empty([0, 0])
def _run_TR_from_scan_onsets(self, n_T, scan_onsets=None):
if scan_onsets is None:
# assume that all data are acquired within the same scan.
n_run = 1
run_TRs = np.array([n_T], dtype=int)
else:
# Each value in the scan_onsets tells the index at which
# a new scan starts. For example, if n_T = 500, and
# scan_onsets = [0,100,200,400], this means that the time points
# of 0-99 are from the first scan, 100-199 are from the second,
# 200-399 are from the third and 400-499 are from the fourth
run_TRs = np.int32(np.diff(np.append(scan_onsets, n_T)))
run_TRs = np.delete(run_TRs, np.where(run_TRs == 0))
n_run = run_TRs.size
# delete run length of 0 in case of duplication in scan_onsets.
logger.info('I infer that the number of volumes'
' in each scan are: {}'.format(run_TRs))
return run_TRs, n_run
def _prepare_DF(self, n_T, scan_onsets=None):
""" Prepare the essential template matrices D and F for
pre-calculating some terms to be re-used.
The inverse covariance matrix of AR(1) noise is
sigma^-2 * (I - rho1*D + rho1**2 * F).
And we denote A = I - rho1*D + rho1**2 * F"""
run_TRs, n_run = self._run_TR_from_scan_onsets(n_T, scan_onsets)
D_ele = map(self._D_gen, run_TRs)
F_ele = map(self._F_gen, run_TRs)
D = scipy.linalg.block_diag(*D_ele)
F = scipy.linalg.block_diag(*F_ele)
# D and F above are templates for constructing
# the inverse of temporal covariance matrix of noise
return D, F, run_TRs, n_run
def _prepare_data_XY(self, X, Y, D, F):
"""Prepares different forms of products of design matrix X
and data Y, or between themselves.
These products are re-used a lot during fitting.
So we pre-calculate them. Because these are reused,
it is in principle possible to update the fitting
as new data come in, by just incrementally adding
the products of new data and their corresponding parts
of design matrix to these pre-calculated terms.
"""
XTY, XTDY, XTFY = self._make_templates(D, F, X, Y)
YTY_diag = np.sum(Y * Y, axis=0)
YTDY_diag = np.sum(Y * np.dot(D, Y), axis=0)
YTFY_diag = np.sum(Y * np.dot(F, Y), axis=0)
XTX, XTDX, XTFX = self._make_templates(D, F, X, X)
return XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, \
XTDX, XTFX
def _gen_X_DC(self, run_TRs):
if self.baseline_single:
X_DC = np.ones((np.sum(run_TRs), 1))
else:
X_DC = scipy.linalg.block_diag(*map(np.ones, run_TRs)).T
return X_DC
def _gen_legendre(self, run_TRs, orders):
def reg(x):
return np.concatenate(
[scipy.special.legendre(o)(np.linspace(-1, 1, x))[None, :]
for o in orders], axis=0)
reg_poly = scipy.linalg.block_diag(
*map(reg, run_TRs)).T
return reg_poly
def _prepare_data_XYX0(self, X, Y, X_base, X_res, D, F, run_TRs,
no_DC=False):
"""Prepares different forms of products between design matrix X or
data Y or nuisance regressors X0.
These products are re-used a lot during fitting.
So we pre-calculate them.
no_DC means not inserting regressors for DC components
into nuisance regressor.
It will only take effect if X_base is not None.
"""
X_DC = self._gen_X_DC(run_TRs)
reg_sol = np.linalg.lstsq(X_DC, X, rcond=None)
if np.any(np.isclose(reg_sol[1], 0)):
raise ValueError('Your design matrix appears to have '
'included baseline time series.'
'Either remove them, or move them to'
' nuisance regressors.')
X_DC, X_base, idx_DC = self._merge_DC_to_base(X_DC, X_base,
no_DC)
if X_res is None:
X0 = X_base
else:
X0 = np.concatenate((X_base, X_res), axis=1)
n_X0 = X0.shape[1]
X0TX0, X0TDX0, X0TFX0 = self._make_templates(D, F, X0, X0)
XTX0, XTDX0, XTFX0 = self._make_templates(D, F, X, X0)
X0TY, X0TDY, X0TFY = self._make_templates(D, F, X0, Y)
return X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \
X0TY, X0TDY, X0TFY, X0, X_base, n_X0, idx_DC
def _merge_DC_to_base(self, X_DC, X_base, no_DC):
""" Merge DC components X_DC to the baseline time series
X_base (By baseline, this means any fixed nuisance
regressors not updated during fitting, including DC
components and any nuisance regressors provided by
the user.
X_DC is always in the first few columns of X_base.
"""
if X_base is not None:
reg_sol = np.linalg.lstsq(X_DC, X_base, rcond=None)
if not no_DC:
if not np.any(np.isclose(reg_sol[1], 0)):
# No columns in X_base can be explained by the
# baseline regressors. So we insert them.
X_base = np.concatenate((X_DC, X_base), axis=1)
idx_DC = np.arange(0, X_DC.shape[1])
else:
logger.warning('Provided regressors for uninteresting '
'time series already include baseline. '
'No additional baseline is inserted.')
idx_DC = np.where(np.isclose(reg_sol[1], 0))[0]
else:
idx_DC = np.where(np.isclose(reg_sol[1], 0))[0]
else:
# If a set of regressors for non-interested signals is not
# provided, then we simply include one baseline for each run.
X_base = X_DC
idx_DC = np.arange(0, X_base.shape[1])
logger.info('You did not provide time series of no interest '
'such as DC component. Trivial regressors of'
' DC component are included for further modeling.'
' The final covariance matrix won''t '
'reflect these components.')
return X_DC, X_base, idx_DC
def _make_ar1_quad_form(self, XTX, XTDX, XTFX, rho1):
# Calculate the matrix X'AX = X'X - rho1 * X'DX + rho1^2 * X'FX
# Here, rho1 is the AR(1) coefficient. X is a matrix of time series
# with each row corresponding to a vector at one
# time point. The forms of matrices D and F are defined in _prepare_DF
# function. sigma^-2 * A would be the inverse of covariance matrix
# of AR(1) process (precision matrix) with rho1 as the AR coefficient
# and sigma^2 as the variance of independent noise at each time point.
return XTX - rho1 * XTDX + rho1**2 * XTFX
def _make_ar1_quad_form_grad(self, XTDX, XTFX, rho1):
# Calculate the derivative of the quadratic form X'AX with respect to
# AR1 coefficient rho1, given precalculated terms X'DX and X'FX,
# and rho1.
return - XTDX + 2 * rho1 * XTFX
def _make_templates(self, D, F, X, Y):
XTY = np.dot(X.T, Y)
XTDY = np.dot(np.dot(X.T, D), Y)
XTFY = np.dot(np.dot(X.T, F), Y)
return XTY, XTDY, XTFY
def _precompute_ar1_quad_forms(self, XTY, XTDY, XTFY, YTY_diag, YTDY_diag,
YTFY_diag, XTX, XTDX, XTFX, X0TX0, X0TDX0,
X0TFX0, XTX0, XTDX0, XTFX0, X0TY, X0TDY,
X0TFY, L, rho1, n_V, n_X0):
# Calculate the sandwich terms which put A between X, Y and X0
# These terms are used a lot in the likelihood. But in the _fitV
# step, they only need to be calculated once, since A is fixed.
# In _fitU step, they need to be calculated at each iteration,
# because rho1 changes.
XTAY = self._make_ar1_quad_form(XTY, XTDY, XTFY, rho1)
# dimension: feature*space
YTAY = self._make_ar1_quad_form(YTY_diag, YTDY_diag, YTFY_diag, rho1)
# dimension: space,
# A/sigma2 is the inverse of noise covariance matrix in each voxel.
# YTAY means Y'AY
XTAX = XTX[None, :, :] - rho1[:, None, None] \
* XTDX[None, :, :] \
+ rho1[:, None, None]**2 * XTFX[None, :, :]
# dimension: space*feature*feature
X0TAX0 = X0TX0[None, :, :] - rho1[:, None, None] \
* X0TDX0[None, :, :] \
+ rho1[:, None, None]**2 * X0TFX0[None, :, :]
# dimension: space*#baseline*#baseline
XTAX0 = XTX0[None, :, :] - rho1[:, None, None] \
* XTDX0[None, :, :] \
+ rho1[:, None, None]**2 * XTFX0[None, :, :]
# dimension: space*feature*#baseline
X0TAY = self._make_ar1_quad_form(X0TY, X0TDY, X0TFY, rho1)
# dimension: #baseline*space
X0TAX0_i = np.linalg.solve(X0TAX0, np.identity(n_X0)[None, :, :])
# dimension: space*#baseline*#baseline
XTAcorrX = XTAX
# dimension: space*feature*feature
XTAcorrY = XTAY
# dimension: feature*space
for i_v in range(n_V):
XTAcorrX[i_v, :, :] -= \
np.dot(np.dot(XTAX0[i_v, :, :], X0TAX0_i[i_v, :, :]),
XTAX0[i_v, :, :].T)
XTAcorrY[:, i_v] -= np.dot(np.dot(XTAX0[i_v, :, :],
X0TAX0_i[i_v, :, :]),
X0TAY[:, i_v])
XTAcorrXL = np.dot(XTAcorrX, L)
# dimension: space*feature*rank
LTXTAcorrXL = np.tensordot(XTAcorrXL, L, axes=(1, 0))
# dimension: rank*feature*rank
LTXTAcorrY = np.dot(L.T, XTAcorrY)
# dimension: rank*space
YTAcorrY = YTAY - np.sum(X0TAY * np.einsum('ijk,ki->ji',
X0TAX0_i, X0TAY), axis=0)
# dimension: space
return X0TAX0, XTAX0, X0TAY, X0TAX0_i, \
XTAcorrX, XTAcorrY, YTAcorrY, LTXTAcorrY, XTAcorrXL, LTXTAcorrXL
def _calc_LL(self, rho1, LTXTAcorrXL, LTXTAcorrY, YTAcorrY, X0TAX0, SNR2,
n_V, n_T, n_run, rank, n_X0):
# Calculate the log likelihood (excluding the GP prior of log(SNR))
# for both _loglike_AR1_diagV_fitU and _loglike_AR1_diagV_fitV,
# in addition to a few other terms.
LAMBDA_i = LTXTAcorrXL * SNR2[:, None, None] + np.eye(rank)
# dimension: space*rank*rank
LAMBDA = np.linalg.solve(LAMBDA_i, np.identity(rank)[None, :, :])
# dimension: space*rank*rank
# LAMBDA is essentially the inverse covariance matrix of the
# posterior probability of alpha, which bears the relation with
# beta by beta = L * alpha. L is the Cholesky factor of the
# shared covariance matrix U. Refer to the explanation below
# Equation 5 in the NIPS paper.
YTAcorrXL_LAMBDA = np.einsum('ji,ijk->ik', LTXTAcorrY, LAMBDA)
# dimension: space*rank
sigma2 = (YTAcorrY - np.sum(LTXTAcorrY * YTAcorrXL_LAMBDA.T, axis=0)
* SNR2) / (n_T - n_X0)
# dimension: space
LL = - np.sum(np.log(sigma2)) * (n_T - n_X0) * 0.5 \
+ np.sum(np.log(1 - rho1**2)) * n_run * 0.5 \
- np.sum(self._half_log_det(X0TAX0)) \
- np.sum(self._half_log_det(LAMBDA_i)) \
- (n_T - n_X0) * n_V * (1 + np.log(2 * np.pi)) * 0.5
# Log likelihood
return LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, sigma2
def _calc_dist2_GP(self, coords=None, inten=None,
GP_space=False, GP_inten=False):
# calculate the square of difference between each voxel's location
# coorinates and image intensity.
if GP_space:
assert coords is not None, 'coordinate is not provided'
# square of spatial distance between every two voxels
dist2 = spdist.squareform(spdist.pdist(coords, 'sqeuclidean'))
# set the hyperparameter for the GP process:
if self.space_smooth_range is None:
space_smooth_range = np.max(dist2)**0.5 / 2.0
# By default, we assume the length scale should be
# within half the size of ROI.
else:
space_smooth_range = self.space_smooth_range
if GP_inten:
assert inten is not None, 'intensity is not provided'
# squre of difference between intensities of
# # every two voxels
inten_diff2 = spdist.squareform(
spdist.pdist(inten[:, None], 'sqeuclidean'))
# set the hyperparameter for the GP process:
if self.inten_smooth_range is None:
inten_smooth_range = np.max(inten_diff2)**0.5 / 2.0
# By default, we assume the length scale should be
# within half the maximum difference of intensity.
else:
inten_smooth_range = self.inten_smooth_range
n_smooth = 2
else:
inten_diff2 = None
inten_smooth_range = None
n_smooth = 1
else:
n_smooth = 0
dist2 = None
inten_diff2 = None
GP_inten = False
space_smooth_range = None
inten_smooth_range = None
return dist2, inten_diff2, space_smooth_range, inten_smooth_range,\
n_smooth
def _build_index_param(self, n_l, n_V, n_smooth):
""" Build dictionaries to retrieve each parameter
from the combined parameters.
"""
idx_param_sing = {'Cholesky': np.arange(n_l), 'a1': n_l}
# for simplified fitting
idx_param_fitU = {'Cholesky': np.arange(n_l),
'a1': np.arange(n_l, n_l + n_V)}
# for the likelihood function when we fit U (the shared covariance).
idx_param_fitV = {'log_SNR2': np.arange(n_V - 1),
'c_space': n_V - 1, 'c_inten': n_V,
'c_both': np.arange(n_V - 1, n_V - 1 + n_smooth)}
# for the likelihood function when we fit V (reflected by SNR of
# each voxel)
return idx_param_sing, idx_param_fitU, idx_param_fitV
def _half_log_det(self, M):
""" Return log(|M|)*0.5. For positive definite matrix M
of more than 2 dimensions, calculate this for the
last two dimension and return a value corresponding
to each element in the first few dimensions.
"""
chol = np.linalg.cholesky(M)
if M.ndim == 2:
return np.sum(np.log(np.abs(np.diag(chol))))
else:
return np.sum(np.log(np.abs(np.diagonal(
chol, axis1=-2, axis2=-1))), axis=-1)
def _chol_idx(self, n_C, rank):
l_idx = np.tril_indices(n_C)
if rank is not None:
# The rank of covariance matrix is specified
idx_rank = np.where(l_idx[1] < rank)
l_idx = (l_idx[0][idx_rank], l_idx[1][idx_rank])
logger.info('Using the rank specified by the user: '
'{}'.format(rank))
else:
rank = n_C
# if not specified, we assume you want to
# estimate a full rank matrix
logger.warning('Please be aware that you did not specify the'
' rank of covariance matrix to estimate.'
'I will assume that the covariance matrix '
'shared among voxels is of full rank.'
'Rank = {}'.format(rank))
logger.warning('Please be aware that estimating a matrix of '
'high rank can be very slow.'
'If you have a good reason to specify a rank '
'lower than the number of experiment conditions,'
' do so.')
return l_idx, rank
def _fit_RSA_UV(self, X, Y, X_base,
scan_onsets=None, coords=None, inten=None):
""" The major utility of fitting Bayesian RSA.
Note that there is a naming change of variable. X in fit()
is changed to Y here, and design in fit() is changed to X here.
This is because we follow the tradition that X expresses the
variable defined (controlled) by the experimenter, i.e., the
time course of experimental conditions convolved by an HRF,
and Y expresses data.
However, in wrapper function fit(), we follow the naming
routine of scikit-learn.
"""
GP_inten = self.GP_inten
GP_space = self.GP_space
rank = self.rank
n_V = np.size(Y, axis=1)
n_T = np.size(Y, axis=0)
n_C = np.size(X, axis=1)
l_idx, rank = self._chol_idx(n_C, rank)
n_l = np.size(l_idx[0]) # the number of parameters for L
t_start = time.time()
D, F, run_TRs, n_run = self._prepare_DF(
n_T, scan_onsets=scan_onsets)
XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag, XTX, \
XTDX, XTFX = self._prepare_data_XY(X, Y, D, F)
X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \
X0TY, X0TDY, X0TFY, X0, X_base, n_X0, idx_DC = \
self._prepare_data_XYX0(
X, Y, X_base, None, D, F, run_TRs, no_DC=False)
# Prepare the data for fitting. These pre-calculated matrices
# will be re-used a lot in evaluating likelihood function and
# gradient.
# DC component will be added to the nuisance regressors.
# In later steps, we do not need to add DC components again
dist2, inten_diff2, space_smooth_range, inten_smooth_range,\
n_smooth = self._calc_dist2_GP(
coords=coords, inten=inten,
GP_space=GP_space, GP_inten=GP_inten)
# Calculating the distance between voxel locations and betweeen
# voxel intensities. These are used if a Gaussian Process prior
# is requested to regularize log(SNR^2)
idx_param_sing, idx_param_fitU, idx_param_fitV = \
self._build_index_param(n_l, n_V, n_smooth)
# Indexes to find each parameter in a combined parameter vector.
current_GP = np.zeros(n_smooth)
# We will perform the fitting in 2~3 steps:
# (1) A preliminary fitting assuming all voxels share
# exactly the same temporal covariance matrix for their noise.
# SNR is assumed to be 1 for all voxels in this fitting.
# Therefore, there are only n_l+2 free parameters.
# (2) (optional) A fitting which allows each voxel to have their
# own pseudo-SNR and AR(1) coefficients. But no Gaussian Process
# prior is imposed on log(SNR). This step is neglected if GP
# prior is not requested. This step allows the SNR parameters to
# move closer to their correct values before GP is introduced.
# This step alternately fits the shared covariance and voxel-
# specific variance. It fits for init_iter steps and the
# tolerance is also increased by a factor of 5 to speed up
# fitting.
# (3) Final fitting. If GP prior is requested, it will be
# introduced in this step. Otherwise, just fit as the previous
# step, but using un-altered tolerance setting, and n_iter
# as the number of iteration.
# Step 1 fitting, with a simplified model
current_vec_U_chlsk_l, current_a1, current_logSigma2 = \
self._initial_fit_singpara(
XTX, XTDX, XTFX, YTY_diag, YTDY_diag, YTFY_diag,
XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
X, Y, X0, idx_param_sing,
l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank)
current_logSNR2 = -current_logSigma2
norm_factor = np.mean(current_logSNR2)
current_logSNR2 = current_logSNR2 - norm_factor
X_res = None
# Step 2 fitting, which only happens if
# GP prior is requested
if GP_space:
current_vec_U_chlsk_l, current_a1, current_logSNR2, X_res\
= self._fit_diagV_noGP(
XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs,
current_vec_U_chlsk_l,
current_a1, current_logSNR2,
idx_param_fitU, idx_param_fitV,
l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank)
current_GP[0] = np.log(np.min(
dist2[np.tril_indices_from(dist2, k=-1)]))
# We start fitting the model with GP prior with a small
# length scale: the size of voxels.
# Alternatively, initialize with a large distance.
# Further testing of initial parameters need to be done.
# current_GP[0] = np.log(np.max(dist2)/4.0)
logger.debug('current GP[0]:{}'.format(current_GP[0]))
if GP_inten:
current_GP[1] = np.log(np.maximum(
np.percentile(inten_diff2[np.tril_indices_from(
inten_diff2, k=-1)], 2), 0.5))
logger.debug(
'current GP[1]:{}'.format(current_GP[1]))
# We start the length scale for intensity with
# a small value. A heuristic is 2 percentile of
# all the square differences. But it should not be
# smaller than 0.5. This limit is set in case
# many voxels have close to equal intensities,
# which might render 2 percentile to 0.
# Step 3 fitting. GP prior is imposed if requested.
# In this step, unless auto_nuisance is set to False, X_res
# will be re-estimated from the residuals after each step
# of fitting. And X0 will be concatenation of X_base and X_res
logger.debug('indexing:{}'.format(idx_param_fitV))
logger.debug('initial GP parameters:{}'.format(current_GP))
current_vec_U_chlsk_l, current_a1, current_logSNR2,\
current_GP, X_res = self._fit_diagV_GP(
XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs,
current_vec_U_chlsk_l,
current_a1, current_logSNR2, current_GP, n_smooth,
idx_param_fitU, idx_param_fitV,
l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank,
GP_space, GP_inten, dist2, inten_diff2,
space_smooth_range, inten_smooth_range)
estU_chlsk_l_AR1_UV = np.zeros([n_C, rank])
estU_chlsk_l_AR1_UV[l_idx] = current_vec_U_chlsk_l
est_cov_AR1_UV = np.dot(estU_chlsk_l_AR1_UV, estU_chlsk_l_AR1_UV.T)
est_rho1_AR1_UV = 2 / np.pi * np.arctan(current_a1)
est_SNR_AR1_UV = np.exp(current_logSNR2 / 2.0)
# Calculating est_sigma_AR1_UV, est_sigma_AR1_UV,
# est_beta_AR1_UV and est_beta0_AR1_UV
X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \
X0TY, X0TDY, X0TFY, X0, X_base, n_X0, _ \
= self._prepare_data_XYX0(
X, Y, X_base, X_res, D, F, run_TRs, no_DC=True)
X0TAX0, XTAX0, X0TAY, X0TAX0_i, \
XTAcorrX, XTAcorrY, YTAcorrY, LTXTAcorrY, XTAcorrXL, LTXTAcorrXL\
= self._precompute_ar1_quad_forms(XTY, XTDY, XTFY,
YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X0TX0, X0TDX0,
X0TFX0, XTX0, XTDX0, XTFX0, X0TY,
X0TDY, X0TFY,
estU_chlsk_l_AR1_UV,
est_rho1_AR1_UV, n_V, n_X0)
LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, sigma2 \
= self._calc_LL(est_rho1_AR1_UV, LTXTAcorrXL, LTXTAcorrY, YTAcorrY,
X0TAX0, est_SNR_AR1_UV**2,
n_V, n_T, n_run, rank, n_X0)
est_sigma_AR1_UV = sigma2**0.5
est_beta_AR1_UV = est_SNR_AR1_UV**2 \
* np.dot(estU_chlsk_l_AR1_UV, YTAcorrXL_LAMBDA.T)
est_beta_AR1_UV_latent = \
est_SNR_AR1_UV**2 * YTAcorrXL_LAMBDA.T
# the latent term means that X*L multiplied by this term
# is the same as X*beta. This will be used for decoding
# and cross-validating, in case L is low-rank
est_beta0_AR1_UV = np.einsum(
'ijk,ki->ji', X0TAX0_i,
(X0TAY - np.einsum('ikj,ki->ji', XTAX0, est_beta_AR1_UV)))
# Now we want to collapse all beta0 corresponding to DC components
# of different runs to a single map, and preserve only one DC component
# across runs. This is because they should express the same component
# and the new data to transform do not necessarily have the same
# numbers of runs as the training data.
if idx_DC.size > 1:
collapsed_DC = np.sum(X0[:, idx_DC], axis=1)
X0 = np.insert(np.delete(X0, idx_DC, axis=1), 0,
collapsed_DC, axis=1)
collapsed_beta0 = np.mean(est_beta0_AR1_UV[idx_DC, :], axis=0)
est_beta0_AR1_UV = np.insert(
np.delete(est_beta0_AR1_UV, idx_DC, axis=0),
0, collapsed_beta0, axis=0)
t_finish = time.time()
logger.info(
'total time of fitting: {} seconds'.format(t_finish - t_start))
logger.debug('final GP parameters:{}'.format(current_GP))
if GP_space:
est_space_smooth_r = np.exp(current_GP[0] / 2.0)
if GP_inten:
est_intensity_kernel_r = np.exp(current_GP[1] / 2.0)
K_major = np.exp(- (dist2 / est_space_smooth_r**2 +
inten_diff2 / est_intensity_kernel_r**2)
/ 2.0)
else:
est_intensity_kernel_r = None
K_major = np.exp(- dist2 / est_space_smooth_r**2 / 2.0)
K = K_major + np.diag(np.ones(n_V) * self.eta)
invK_tilde_log_SNR = np.linalg.solve(K, current_logSNR2) / 2
log_SNR_invK_tilde_log_SNR = np.dot(current_logSNR2,
invK_tilde_log_SNR) / 2
tau2, _ = self.tau2_prior(log_SNR_invK_tilde_log_SNR, n_V,
self.tau_range)
est_std_log_SNR = tau2 ** 0.5
else:
est_space_smooth_r = None
est_intensity_kernel_r = None
est_std_log_SNR = None
return est_cov_AR1_UV, estU_chlsk_l_AR1_UV, est_SNR_AR1_UV, \
est_beta_AR1_UV, est_beta0_AR1_UV, est_beta_AR1_UV_latent,\
est_sigma_AR1_UV, est_rho1_AR1_UV, est_space_smooth_r, \
est_std_log_SNR, est_intensity_kernel_r, X0
def _transform(self, Y, scan_onsets, beta, beta0,
rho_e, sigma_e, rho_X, sigma2_X, rho_X0, sigma2_X0):
""" Given the data Y and the response amplitudes beta and beta0
estimated in the fit step, estimate the corresponding X and X0.
It is done by a forward-backward algorithm.
We assume X and X0 both are vector autoregressive (VAR)
processes, to capture temporal smoothness. Their VAR
parameters are estimated from training data at the fit stage.
"""
logger.info('Transforming new data.')
# Constructing the transition matrix and the variance of
# innovation noise as prior for the latent variable X and X0
# in new data.
n_C = beta.shape[0]
n_T = Y.shape[0]
weight = np.concatenate((beta, beta0), axis=0)
T_X = np.diag(np.concatenate((rho_X, rho_X0)))
Var_X = np.concatenate((sigma2_X / (1 - rho_X**2),
sigma2_X0 / (1 - rho_X0**2)))
Var_dX = np.concatenate((sigma2_X, sigma2_X0))
sigma2_e = sigma_e ** 2
scan_onsets = np.setdiff1d(scan_onsets, n_T)
n_scan = scan_onsets.size
X = [None] * scan_onsets.size
X0 = [None] * scan_onsets.size
total_log_p = 0
for scan, onset in enumerate(scan_onsets):
# Forward step
if scan == n_scan - 1:
offset = n_T
else:
offset = scan_onsets[scan + 1]
mu, mu_Gamma_inv, Gamma_inv, log_p_data, Lambda_0, \
Lambda_1, H, deltaY, deltaY_sigma2inv_rho_weightT = \
self._forward_step(Y[onset:offset, :],
T_X, Var_X, Var_dX, rho_e, sigma2_e,
weight)
total_log_p += log_p_data
# Backward step
mu_hat, mu_Gamma_inv_hat, Gamma_inv_hat \
= self._backward_step(
deltaY, deltaY_sigma2inv_rho_weightT, sigma2_e,
weight, mu, mu_Gamma_inv, Gamma_inv,
Lambda_0, Lambda_1, H)
X[scan] = np.concatenate(
[mu_t[None, :n_C] for mu_t in mu_hat])
X0[scan] = np.concatenate(
[mu_t[None, n_C:] for mu_t in mu_hat])
X = np.concatenate(X)
X0 = np.concatenate(X0)
return X, X0, total_log_p
def _score(self, Y, design, beta, scan_onsets, beta0, rho_e, sigma_e,
rho_X0, sigma2_X0):
""" Given the data Y, and the spatial pattern beta0
of nuisance time series, return the cross-validated score
of the data Y given all parameters of the subject estimated
during the first step.
It is assumed that the user has design matrix built for the
data Y. Both beta and beta0 are posterior expectation estimated
from training data with the estimated covariance matrix U and
SNR serving as prior. We marginalize X0 instead of fitting
it in this function because this function is for the purpose
of evaluating model no new data. We should avoid doing any
additional fitting when performing cross-validation.
The hypothetic response to the task will be subtracted, and
the unknown nuisance activity which contributes to the data
through beta0 will be marginalized.
"""
logger.info('Estimating cross-validated score for new data.')
n_T = Y.shape[0]
if design is not None:
Y = Y - np.dot(design, beta)
# The function works for both full model and null model.
# If design matrix is not provided, the whole data is
# used as input for _forward_step. If design matrix is provided,
# residual after subtracting design * beta is fed to _forward_step
T_X = np.diag(rho_X0)
Var_X = sigma2_X0 / (1 - rho_X0**2)
Var_dX = sigma2_X0
# Prior parmeters for X0: T_X is transitioning matrix, Var_X
# is the marginal variance of the first time point. Var_dX is the
# variance of the updating noise.
sigma2_e = sigma_e ** 2
# variance of voxel-specific updating noise component
scan_onsets = np.setdiff1d(scan_onsets, n_T).astype(int)
n_scan = scan_onsets.size
total_log_p = 0
for scan, onset in enumerate(scan_onsets):
# Forward step
if scan == n_scan - 1:
offset = n_T
else:
offset = scan_onsets[scan + 1]
_, _, _, log_p_data, _, _, _, _, _ = \
self._forward_step(
Y[onset:offset, :], T_X, Var_X, Var_dX, rho_e, sigma2_e,
beta0)
total_log_p += log_p_data
return total_log_p
def _est_AR1(self, x, same_para=False):
""" Estimate the AR(1) parameters of input x.
Each column of x is assumed as independent from other columns,
and each column is treated as an AR(1) process.
If same_para is set as True, then all columns of x
are concatenated and a single set of AR(1) parameters
is estimated. Strictly speaking the breaking point
between each concatenated column should be considered.
But for long time series, this is ignored.
"""
if same_para:
n_c = x.shape[1]
x = np.reshape(x, x.size, order='F')
rho, sigma2 = alg.AR_est_YW(x, 1)
# We concatenate all the design matrix to estimate common AR(1)
# parameters. This creates some bias because the end of one column
# and the beginning of the next column of the design matrix are
# treated as consecutive samples.
rho = np.ones(n_c) * rho
sigma2 = np.ones(n_c) * sigma2
else:
rho = np.zeros(np.shape(x)[1])
sigma2 = np.zeros(np.shape(x)[1])
for c in np.arange(np.shape(x)[1]):
rho[c], sigma2[c] = alg.AR_est_YW(x[:, c], 1)
return rho, sigma2
def _forward_step(self, Y, T_X, Var_X, Var_dX, rho_e, sigma2_e, weight):
""" forward step for HMM, assuming both the hidden state and noise
have 1-step dependence on the previous value.
"""
# We currently only implement diagonal form
# of covariance matrix for Var_X, Var_dX and T_X, which means
# each dimension of X is independent and their innovation noise
# are also independent. Note that log_p_data takes this assumption.
if Var_X.ndim == 1:
inv_Var_X = np.diag(1 / Var_X)
half_log_det_Var_X = np.sum(np.log(Var_X)) / 2.0
Var_X = np.diag(Var_X)
# the marginal variance of X
else:
half_log_det_Var_X = self._half_log_det(Var_X)
inv_Var_X = np.linalg.inv(Var_X)
if Var_dX.ndim == 1:
inv_Var_dX = np.diag(1 / Var_dX)
half_log_det_Var_dX = np.sum(np.log(Var_dX)) / 2.0
Var_dX = np.diag(Var_dX)
# the marginal variance of Delta X (the change of X from
# previous time point)
else:
inv_Var_dX = np.linalg.inv(Var_dX)
half_log_det_Var_dX = self._half_log_det(Var_dX)
if T_X.ndim == 1:
T_X = np.diag(T_X)
# Transfer function of X: the expected mean of X at t+1
# time point is T_x * X
[n_T, n_V] = np.shape(Y)
# numbers of time points and voxels
mu = [None] * n_T
# posterior mean of X, conditioned on all data up till the current
# time point
Gamma_inv = [None] * n_T
# inverse of poterior Gamma.
mu_Gamma_inv = [None] * n_T
# mu * inv(Gamma)
log_p_data = - np.log(np.pi * 2) * (n_T * n_V) / 2 \
- half_log_det_Var_X - np.sum(np.log(sigma2_e)) * n_T / 2.0\
+ np.sum(np.log(1 - rho_e**2)) / 2.0 - half_log_det_Var_dX \
* (n_T - 1)
# This is the term to be incremented by c_n at each time step.
# We first add all the fixed terms to it.
# The following are a few fixed terms.
Lambda_0 = np.dot(T_X, np.dot(inv_Var_dX, T_X.T)) \
+ np.dot(weight * rho_e**2 / sigma2_e, weight.T)
H = np.dot(inv_Var_dX, T_X.T) + np.dot(weight * rho_e / sigma2_e,
weight.T)
Lambda_1 = inv_Var_dX + np.dot(weight / sigma2_e, weight.T)
Gamma_inv[0] = inv_Var_X + np.dot(
weight * (1 - rho_e**2) / sigma2_e, weight.T)
# We might not need this and only use linalg.solve for related terms.
mu_Gamma_inv[0] = np.dot(
Y[0, :] * (1 - rho_e**2) / sigma2_e, weight.T)
mu[0] = np.linalg.solve(Gamma_inv[0], mu_Gamma_inv[0])
log_p_data -= 0.5 * np.sum(Y[0, :]**2 * (1 - rho_e**2) / sigma2_e)
# This is the term added for the first time point.
deltaY = Y[1:, :] - rho_e * Y[:-1, :]
deltaY_sigma2inv_rho_weightT = np.dot(
deltaY / sigma2_e * rho_e, weight.T)
for t in np.arange(1, n_T):
Gamma_tilde_inv = Lambda_0 + Gamma_inv[t - 1]
tmp = np.linalg.solve(Gamma_tilde_inv, H.T)
Gamma_inv[t] = Lambda_1 - np.dot(H, tmp)
mu_Gamma_inv[t] = np.dot(deltaY[t - 1, :] / sigma2_e, weight.T) \
+ np.dot(mu_Gamma_inv[t - 1]
- deltaY_sigma2inv_rho_weightT[t - 1, :], tmp)
mu[t] = np.linalg.solve(Gamma_inv[t], mu_Gamma_inv[t])
tmp2 = mu_Gamma_inv[t - 1] - deltaY_sigma2inv_rho_weightT[t - 1, :]
log_p_data += -self._half_log_det(Gamma_tilde_inv) \
+ np.dot(tmp2, np.linalg.solve(Gamma_tilde_inv, tmp2)) / 2.0
log_p_data += -self._half_log_det(Gamma_inv[-1]) \
+ np.dot(mu_Gamma_inv[-1], mu[-1]) / 2.0 \
- np.sum(deltaY**2 / sigma2_e) / 2.0
return mu, mu_Gamma_inv, Gamma_inv, log_p_data, Lambda_0, \
Lambda_1, H, deltaY, deltaY_sigma2inv_rho_weightT
def _backward_step(self, deltaY, deltaY_sigma2inv_rho_weightT,
sigma2_e, weight, mu, mu_Gamma_inv, Gamma_inv,
Lambda_0, Lambda_1, H):
""" backward step for HMM, assuming both the hidden state and noise
have 1-step dependence on the previous value.
"""
n_T = len(Gamma_inv)
# All the terms with hat before are parameters of posterior
# distributions of X conditioned on data from all time points,
# whereas the ones without hat calculated by _forward_step
# are mean and covariance of posterior of X conditioned on
# data up to the time point.
Gamma_inv_hat = [None] * n_T
mu_Gamma_inv_hat = [None] * n_T
mu_hat = [None] * n_T
mu_hat[-1] = mu[-1].copy()
mu_Gamma_inv_hat[-1] = mu_Gamma_inv[-1].copy()
Gamma_inv_hat[-1] = Gamma_inv[-1].copy()
for t in np.arange(n_T - 2, -1, -1):
tmp = np.linalg.solve(Gamma_inv_hat[t + 1] - Gamma_inv[t + 1]
+ Lambda_1, H)
Gamma_inv_hat[t] = Gamma_inv[t] + Lambda_0 - np.dot(H.T, tmp)
mu_Gamma_inv_hat[t] = mu_Gamma_inv[t] \
- deltaY_sigma2inv_rho_weightT[t, :] + np.dot(
mu_Gamma_inv_hat[t + 1] - mu_Gamma_inv[t + 1]
+ np.dot(deltaY[t, :] / sigma2_e, weight.T), tmp)
mu_hat[t] = np.linalg.solve(Gamma_inv_hat[t],
mu_Gamma_inv_hat[t])
return mu_hat, mu_Gamma_inv_hat, Gamma_inv_hat
def _initial_fit_singpara(self, XTX, XTDX, XTFX,
YTY_diag, YTDY_diag, YTFY_diag,
XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
X, Y, X0, idx_param_sing, l_idx,
n_C, n_T, n_V, n_l, n_run, n_X0, rank):
""" Perform initial fitting of a simplified model, which assumes
that all voxels share exactly the same temporal covariance
matrix for their noise (the same noise variance and
auto-correlation). The SNR is implicitly assumed to be 1
for all voxels.
"""
logger.info('Initial fitting assuming single parameter of '
'noise for all voxels')
X_joint = np.concatenate((X0, X), axis=1)
beta_hat = np.linalg.lstsq(X_joint, Y, rcond=None)[0]
residual = Y - np.dot(X_joint, beta_hat)
# point estimates of betas and fitting residuals without assuming
# the Bayesian model underlying RSA.
# There are several possible ways of initializing the covariance.
# (1) start from the point estimation of covariance
cov_point_est = np.cov(beta_hat[n_X0:, :]) / np.var(residual)
current_vec_U_chlsk_l = \
np.linalg.cholesky((cov_point_est + np.eye(n_C)) / 2)[l_idx]
# We use the average of covariance of point estimation and an identity
# matrix as the initial value of the covariance matrix, just in case
# the user provides data in which n_V is smaller than n_C.
# (2) start from identity matrix
# current_vec_U_chlsk_l = np.eye(n_C)[l_idx]
# (3) random initialization
# current_vec_U_chlsk_l = self.random_state_.randn(n_l)
# vectorized version of L, Cholesky factor of U, the shared
# covariance matrix of betas across voxels.
rho1 = np.sum(
residual[0:-1, :] * residual[1:, :], axis=0) / \
np.sum(residual[0:-1, :] * residual[0:-1, :], axis=0)
# Estimate of auto correlation assuming data includes pure noise.
log_sigma2 = np.log(np.var(
residual[1:, :] - residual[0:-1, :] * rho1, axis=0))
# log of estimates of the variance of the "innovation" noise
# of AR(1) process at each time point.
param0 = np.empty(np.sum(np.fromiter(
(np.size(v) for v in idx_param_sing.values()), int)))
# Initial parameter
# Then we fill each part of the original guess of parameters
param0[idx_param_sing['Cholesky']] = current_vec_U_chlsk_l
param0[idx_param_sing['a1']] = np.median(np.tan(rho1 * np.pi / 2))
# Fit it.
res = scipy.optimize.minimize(
self._loglike_AR1_singpara, param0,
args=(XTX, XTDX, XTFX, YTY_diag, YTDY_diag, YTFY_diag,
XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
l_idx, n_C, n_T, n_V, n_run, n_X0,
idx_param_sing, rank),
method=self.optimizer, jac=True, tol=self.tol,
options={'disp': self.minimize_options['disp'],
'maxiter': 100})
current_vec_U_chlsk_l = res.x[idx_param_sing['Cholesky']]
current_a1 = res.x[idx_param_sing['a1']] * np.ones(n_V)
# log(sigma^2) assuming the data include no signal is returned,
# as a starting point for the iteration in the next step.
# Although it should overestimate the variance,
# setting it this way might allow it to track log(sigma^2)
# more closely for each voxel.
return current_vec_U_chlsk_l, current_a1, log_sigma2
def _fit_diagV_noGP(
self, XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs,
current_vec_U_chlsk_l,
current_a1, current_logSNR2,
idx_param_fitU, idx_param_fitV,
l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank):
""" (optional) second step of fitting, full model but without
GP prior on log(SNR). This step is only done if GP prior
is requested.
"""
init_iter = self.init_iter
logger.info('second fitting without GP prior'
' for {} times'.format(init_iter))
# Initial parameters
param0_fitU = np.empty(np.sum(np.fromiter(
(np.size(v) for v in idx_param_fitU.values()), int)))
param0_fitV = np.empty(np.size(idx_param_fitV['log_SNR2']))
# We cannot use the same logic as the line above because
# idx_param_fitV also includes entries for GP parameters.
param0_fitU[idx_param_fitU['Cholesky']] = \
current_vec_U_chlsk_l.copy()
param0_fitU[idx_param_fitU['a1']] = current_a1.copy()
param0_fitV[idx_param_fitV['log_SNR2']] = \
current_logSNR2[:-1].copy()
L = np.zeros((n_C, rank))
tol = self.tol * 5
for it in range(0, init_iter):
X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \
X0TY, X0TDY, X0TFY, X0, X_base, n_X0, _ \
= self._prepare_data_XYX0(
X, Y, X_base, X_res, D, F, run_TRs, no_DC=True)
# fit U, the covariance matrix, together with AR(1) param
param0_fitU[idx_param_fitU['Cholesky']] = \
current_vec_U_chlsk_l \
+ self.random_state_.randn(n_l) \
* np.linalg.norm(current_vec_U_chlsk_l) \
/ n_l**0.5 * np.exp(-it / init_iter * self.anneal_speed - 1)
param0_fitU[idx_param_fitU['a1']] = current_a1
res_fitU = scipy.optimize.minimize(
self._loglike_AR1_diagV_fitU, param0_fitU,
args=(XTX, XTDX, XTFX, YTY_diag, YTDY_diag, YTFY_diag,
XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
current_logSNR2, l_idx, n_C,
n_T, n_V, n_run, n_X0, idx_param_fitU, rank),
method=self.optimizer, jac=True, tol=tol,
options=self.minimize_options)
current_vec_U_chlsk_l = \
res_fitU.x[idx_param_fitU['Cholesky']]
current_a1 = res_fitU.x[idx_param_fitU['a1']]
norm_fitUchange = np.linalg.norm(res_fitU.x - param0_fitU)
logger.debug('norm of parameter change after fitting U: '
'{}'.format(norm_fitUchange))
param0_fitU = res_fitU.x.copy()
# fit V, reflected in the log(SNR^2) of each voxel
rho1 = np.arctan(current_a1) * 2 / np.pi
L[l_idx] = current_vec_U_chlsk_l
X0TAX0, XTAX0, X0TAY, X0TAX0_i, \
XTAcorrX, XTAcorrY, YTAcorrY, \
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL = \
self._precompute_ar1_quad_forms(XTY, XTDY, XTFY,
YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX,
X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0,
X0TY, X0TDY, X0TFY,
L, rho1, n_V, n_X0)
res_fitV = scipy.optimize.minimize(
self._loglike_AR1_diagV_fitV, param0_fitV,
args=(X0TAX0, XTAX0, X0TAY,
X0TAX0_i, XTAcorrX, XTAcorrY, YTAcorrY,
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL,
current_vec_U_chlsk_l,
current_a1, l_idx, n_C, n_T, n_V, n_run,
n_X0, idx_param_fitV, rank,
False, False),
method=self.optimizer, jac=True, tol=tol,
options=self.minimize_options)
current_logSNR2[0:n_V - 1] = res_fitV.x
current_logSNR2[-1] = - np.sum(current_logSNR2[0:n_V - 1])
norm_fitVchange = np.linalg.norm(res_fitV.x - param0_fitV)
logger.debug('norm of parameter change after fitting V: '
'{}'.format(norm_fitVchange))
logger.debug('E[log(SNR2)^2]: {}'.format(
np.mean(current_logSNR2**2)))
# The lines below are for debugging purpose.
# If any voxel's log(SNR^2) gets to non-finite number,
# something might be wrong -- could be that the data has
# nothing to do with the design matrix.
if np.any(np.logical_not(np.isfinite(current_logSNR2))):
logger.warning('Initial fitting: iteration {}'.format(it))
logger.warning('current log(SNR^2): '
'{}'.format(current_logSNR2))
logger.warning('log(sigma^2) has non-finite number')
param0_fitV = res_fitV.x.copy()
# Re-estimating X_res from residuals
current_SNR2 = np.exp(current_logSNR2)
if self.auto_nuisance:
LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, current_sigma2 \
= self._calc_LL(rho1, LTXTAcorrXL, LTXTAcorrY, YTAcorrY,
X0TAX0, current_SNR2,
n_V, n_T, n_run, rank, n_X0)
betas = current_SNR2 * np.dot(L, YTAcorrXL_LAMBDA.T)
beta0s = np.einsum(
'ijk,ki->ji', X0TAX0_i,
(X0TAY - np.einsum('ikj,ki->ji', XTAX0, betas)))
residuals = Y - np.dot(X, betas) - np.dot(
X_base, beta0s[:np.shape(X_base)[1], :])
X_res = self.nureg_method(
self.n_nureg_).fit_transform(
self.preprocess_residual(residuals))
if norm_fitVchange / np.sqrt(param0_fitV.size) < tol \
and norm_fitUchange / np.sqrt(param0_fitU.size) \
< tol:
break
return current_vec_U_chlsk_l, current_a1, current_logSNR2, X_res
def _fit_diagV_GP(
self, XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs,
current_vec_U_chlsk_l,
current_a1, current_logSNR2, current_GP, n_smooth,
idx_param_fitU, idx_param_fitV, l_idx,
n_C, n_T, n_V, n_l, n_run, n_X0, rank, GP_space, GP_inten,
dist2, inten_diff2, space_smooth_range, inten_smooth_range):
""" Last step of fitting. If GP is not requested, this step will
still be done, just without GP prior on log(SNR).
"""
tol = self.tol
n_iter = self.n_iter
logger.info('Last step of fitting.'
' for maximum {} times'.format(n_iter))
# Initial parameters
param0_fitU = np.empty(np.sum(np.fromiter(
(np.size(v) for v in idx_param_fitU.values()), int)))
param0_fitV = np.empty(np.size(idx_param_fitV['log_SNR2'])
+ np.size(idx_param_fitV['c_both']))
# We cannot use the same logic as the line above because
# idx_param_fitV also includes entries for GP parameters.
param0_fitU[idx_param_fitU['Cholesky']] = \
current_vec_U_chlsk_l.copy()
param0_fitU[idx_param_fitU['a1']] = current_a1.copy()
param0_fitV[idx_param_fitV['log_SNR2']] = \
current_logSNR2[:-1].copy()
L = np.zeros((n_C, rank))
L[l_idx] = current_vec_U_chlsk_l
if self.GP_space:
param0_fitV[idx_param_fitV['c_both']] = current_GP.copy()
for it in range(0, n_iter):
X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \
X0TY, X0TDY, X0TFY, X0, X_base, n_X0, _ = \
self._prepare_data_XYX0(
X, Y, X_base, X_res, D, F, run_TRs, no_DC=True)
# fit U
param0_fitU[idx_param_fitU['Cholesky']] = \
current_vec_U_chlsk_l \
+ self.random_state_.randn(n_l) \
* np.linalg.norm(current_vec_U_chlsk_l) \
/ n_l**0.5 * np.exp(-it / n_iter * self.anneal_speed - 1)
param0_fitU[idx_param_fitU['a1']] = current_a1
res_fitU = scipy.optimize.minimize(
self._loglike_AR1_diagV_fitU, param0_fitU,
args=(XTX, XTDX, XTFX, YTY_diag, YTDY_diag, YTFY_diag,
XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
current_logSNR2, l_idx, n_C, n_T, n_V,
n_run, n_X0, idx_param_fitU, rank),
method=self.optimizer, jac=True,
tol=tol,
options=self.minimize_options)
current_vec_U_chlsk_l = \
res_fitU.x[idx_param_fitU['Cholesky']]
current_a1 = res_fitU.x[idx_param_fitU['a1']]
L[l_idx] = current_vec_U_chlsk_l
fitUchange = res_fitU.x - param0_fitU
norm_fitUchange = np.linalg.norm(fitUchange)
logger.debug('norm of parameter change after fitting U: '
'{}'.format(norm_fitUchange))
param0_fitU = res_fitU.x.copy()
# fit V
rho1 = np.arctan(current_a1) * 2 / np.pi
X0TAX0, XTAX0, X0TAY, X0TAX0_i, \
XTAcorrX, XTAcorrY, YTAcorrY, \
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL = \
self._precompute_ar1_quad_forms(XTY, XTDY, XTFY,
YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX,
X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0,
X0TY, X0TDY, X0TFY,
L, rho1, n_V, n_X0)
res_fitV = scipy.optimize.minimize(
self._loglike_AR1_diagV_fitV, param0_fitV, args=(
X0TAX0, XTAX0, X0TAY, X0TAX0_i,
XTAcorrX, XTAcorrY, YTAcorrY,
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL,
current_vec_U_chlsk_l, current_a1,
l_idx, n_C, n_T, n_V, n_run, n_X0,
idx_param_fitV, rank,
GP_space, GP_inten, dist2, inten_diff2,
space_smooth_range, inten_smooth_range),
method=self.optimizer, jac=True,
tol=tol,
options=self.minimize_options)
current_logSNR2[0:n_V - 1] = \
res_fitV.x[idx_param_fitV['log_SNR2']]
current_logSNR2[n_V - 1] = -np.sum(current_logSNR2[0:n_V - 1])
current_GP = res_fitV.x[idx_param_fitV['c_both']]
fitVchange = res_fitV.x - param0_fitV
norm_fitVchange = np.linalg.norm(fitVchange)
param0_fitV = res_fitV.x.copy()
logger.debug('norm of parameter change after fitting V: '
'{}'.format(norm_fitVchange))
logger.debug('E[log(SNR2)^2]: {}'.format(
np.mean(current_logSNR2**2)))
# Re-estimating X_res from residuals
current_SNR2 = np.exp(current_logSNR2)
if self.auto_nuisance:
LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, current_sigma2 \
= self._calc_LL(rho1, LTXTAcorrXL, LTXTAcorrY, YTAcorrY,
X0TAX0, current_SNR2,
n_V, n_T, n_run, rank, n_X0)
betas = current_SNR2 \
* np.dot(L, YTAcorrXL_LAMBDA.T)
beta0s = np.einsum(
'ijk,ki->ji', X0TAX0_i,
(X0TAY - np.einsum('ikj,ki->ji', XTAX0, betas)))
residuals = Y - np.dot(X, betas) - np.dot(
X_base, beta0s[:np.shape(X_base)[1], :])
X_res = self.nureg_method(self.n_nureg_).fit_transform(
self.preprocess_residual(residuals))
if GP_space:
logger.debug('current GP[0]: {}'.format(current_GP[0]))
logger.debug('gradient for GP[0]: {}'.format(
res_fitV.jac[idx_param_fitV['c_space']]))
if GP_inten:
logger.debug('current GP[1]: {}'.format(current_GP[1]))
logger.debug('gradient for GP[1]: {}'.format(
res_fitV.jac[idx_param_fitV['c_inten']]))
if np.max(np.abs(fitVchange)) < tol and \
np.max(np.abs(fitUchange)) < tol:
break
return current_vec_U_chlsk_l, current_a1, current_logSNR2,\
current_GP, X_res
def _fit_null(self, Y, X_base, scan_onsets=None):
""" Fit a null model.
"""
n_V = np.size(Y, axis=1)
n_T = np.size(Y, axis=0)
t_start = time.time()
D, F, run_TRs, n_run = self._prepare_DF(
n_T, scan_onsets=scan_onsets)
YTY_diag = np.sum(Y * Y, axis=0)
YTDY_diag = np.sum(Y * np.dot(D, Y), axis=0)
YTFY_diag = np.sum(Y * np.dot(F, Y), axis=0)
tol = self.tol
n_iter = self.n_iter
logger.info('Fitting null model'
' for maximum {} times'.format(n_iter))
# Add DC components capturing run-specific baselines.
X_DC = self._gen_X_DC(run_TRs)
X_DC, X_base, idx_DC = self._merge_DC_to_base(
X_DC, X_base, no_DC=False)
X_res = None
param0 = np.zeros(n_V)
for it in range(0, n_iter):
if X_res is None:
X0 = X_base
else:
X0 = np.concatenate((X_base, X_res), axis=1)
n_X0 = X0.shape[1]
X0TX0, X0TDX0, X0TFX0 = self._make_templates(D, F, X0, X0)
X0TY, X0TDY, X0TFY = self._make_templates(D, F, X0, Y)
res_null = scipy.optimize.minimize(
self._loglike_AR1_null, param0, args=(
YTY_diag, YTDY_diag, YTFY_diag,
X0TX0, X0TDX0, X0TFX0, X0TY, X0TDY, X0TFY,
n_T, n_V, n_run, n_X0),
method=self.optimizer, jac=True, tol=tol,
options=self.minimize_options)
param_change = res_null.x - param0
param0 = res_null.x.copy()
est_rho1_AR1_null = 2.0 / np.pi * np.arctan(param0)
if self.auto_nuisance:
X0TAX0 = X0TX0[None, :, :] \
- est_rho1_AR1_null[:, None, None] \
* X0TDX0[None, :, :] \
+ est_rho1_AR1_null[:, None, None]**2 \
* X0TFX0[None, :, :]
# dimension: space*#baseline*#baseline
X0TAY = self._make_ar1_quad_form(X0TY, X0TDY, X0TFY,
est_rho1_AR1_null)
# dimension: #baseline*space
beta0s = np.linalg.solve(X0TAX0, X0TAY.T).T
residuals = Y - np.dot(X_base, beta0s[:np.shape(X_base)[1], :])
X_res = self.nureg_method(self.n_nureg_).fit_transform(
self.preprocess_residual(residuals))
if np.max(np.abs(param_change)) < self.tol:
logger.info('The change of parameters is smaller than '
'the tolerance value {}. Fitting is finished '
'after {} iterations'.format(self.tol, it + 1))
break
X0TAX0 = X0TX0[None, :, :] \
- est_rho1_AR1_null[:, None, None] \
* X0TDX0[None, :, :] \
+ est_rho1_AR1_null[:, None, None]**2 \
* X0TFX0[None, :, :]
# dimension: space*#baseline*#baseline
X0TAY = self._make_ar1_quad_form(X0TY, X0TDY, X0TFY,
est_rho1_AR1_null)
# dimension: #baseline*space
est_beta0_AR1_null = np.linalg.solve(X0TAX0, X0TAY.T).T
YTAY = self._make_ar1_quad_form(YTY_diag, YTDY_diag, YTFY_diag,
est_rho1_AR1_null)
# dimension: space,
YTAcorrY = YTAY - np.sum(X0TAY * est_beta0_AR1_null, axis=0)
# dimension: space,
est_sigma_AR1_null = (YTAcorrY / (n_T - n_X0)) ** 0.5
if idx_DC.size > 1:
collapsed_DC = np.sum(X0[:, idx_DC], axis=1)
X0 = np.insert(np.delete(X0, idx_DC, axis=1), 0,
collapsed_DC, axis=1)
collapsed_beta0 = np.mean(est_beta0_AR1_null[idx_DC, :], axis=0)
est_beta0_AR1_null = np.insert(
np.delete(est_beta0_AR1_null, idx_DC, axis=0),
0, collapsed_beta0, axis=0)
t_finish = time.time()
logger.info(
'total time of fitting: {} seconds'.format(t_finish - t_start))
return est_beta0_AR1_null, est_sigma_AR1_null, est_rho1_AR1_null, X0
# We fit two parts of the parameters iteratively.
# The following are the corresponding negative log likelihood functions.
def _loglike_AR1_diagV_fitU(self, param, XTX, XTDX, XTFX, YTY_diag,
YTDY_diag, YTFY_diag, XTY, XTDY, XTFY,
X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
log_SNR2, l_idx, n_C, n_T, n_V, n_run, n_X0,
idx_param_fitU, rank):
# This function calculates the log likelihood of data given cholesky
# decomposition of U and AR(1) parameters of noise as free parameters.
# Free parameters are in param.
# The log of the square of signal to noise level in each voxel
# (the ratio of the diagonal elements in V and
# the noise variance) are fixed. This likelihood is iteratively
# optimized with the one with suffix _fitV.
#
# The meaing of U and V follow this wiki page of matrix normal
# distribution:
# https://en.wikipedia.org/wiki/Matrix_normal_distribution
#
# We assume betas of all voxels as a matrix follow this distribution.
# U describe the covariance between conditions. V describe the
# covariance between voxels.
#
# In this version, we assume that beta is independent between voxels
# and noise is also independent.
# By the assumption that noise is independent, we only need to pass
# the products X'X, X'Y and Y'Y, instead of X and Y
# Y'Y is passed in the form of its diagonal elements.
# DiagV means we assume that the variance of beta can be different
# between voxels. This means that V is a diagonal matrix instead of
# an identity matrix. The parameter includes the lower triangular
# part of the cholesky decomposition
# of U (flattened), then tan(rho1*pi/2) where rho1 is
# each voxel's autoregressive coefficient (assumging AR(1) model).
# Such parametrization avoids the need of boundaries
# for parameters.
L = np.zeros([n_C, rank])
# lower triagular matrix L, cholesky decomposition of U
L[l_idx] = param[idx_param_fitU['Cholesky']]
a1 = param[idx_param_fitU['a1']]
rho1 = 2.0 / np.pi * np.arctan(a1) # auto-regressive coefficients
SNR2 = np.exp(log_SNR2)
# each element of SNR2 is the ratio of the diagonal element on V
# to the variance of the fresh noise in that voxel
X0TAX0, XTAX0, X0TAY, X0TAX0_i, \
XTAcorrX, XTAcorrY, YTAcorrY, \
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL = \
self._precompute_ar1_quad_forms(XTY, XTDY, XTFY,
YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X0TX0, X0TDX0,
X0TFX0, XTX0, XTDX0, XTFX0,
X0TY, X0TDY, X0TFY,
L, rho1, n_V, n_X0)
# Only starting from this point, SNR2 is involved
LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, sigma2 \
= self._calc_LL(rho1, LTXTAcorrXL, LTXTAcorrY, YTAcorrY,
X0TAX0, SNR2, n_V, n_T, n_run, rank, n_X0)
if not np.isfinite(LL):
logger.warning('NaN detected!')
logger.warning('LL: {}'.format(LL))
logger.warning('sigma2: {}'.format(sigma2))
logger.warning('YTAcorrY: {}'.format(YTAcorrY))
logger.warning('LTXTAcorrY: {}'.format(LTXTAcorrY))
logger.warning('YTAcorrXL_LAMBDA: {}'.format(YTAcorrXL_LAMBDA))
logger.warning('SNR2: {}'.format(SNR2))
YTAcorrXL_LAMBDA_LT = np.dot(YTAcorrXL_LAMBDA, L.T)
# dimension: space*feature (feature can be larger than rank)
deriv_L = -np.einsum('ijk,ikl,i', XTAcorrXL, LAMBDA, SNR2) \
- np.dot(np.einsum('ijk,ik->ji', XTAcorrXL, YTAcorrXL_LAMBDA)
* SNR2**2 / sigma2, YTAcorrXL_LAMBDA) \
+ np.dot(XTAcorrY / sigma2 * SNR2, YTAcorrXL_LAMBDA)
# dimension: feature*rank
# The following are for calculating the derivative to a1
deriv_a1 = np.empty(n_V)
dXTAX_drho1 = -XTDX + 2 * rho1[:, None, None] * XTFX
# dimension: space*feature*feature
dXTAY_drho1 = self._make_ar1_quad_form_grad(XTDY, XTFY, rho1)
# dimension: feature*space
dYTAY_drho1 = self._make_ar1_quad_form_grad(YTDY_diag, YTFY_diag, rho1)
# dimension: space,
dX0TAX0_drho1 = - X0TDX0 \
+ 2 * rho1[:, None, None] * X0TFX0
# dimension: space*rank*rank
dXTAX0_drho1 = - XTDX0 \
+ 2 * rho1[:, None, None] * XTFX0
# dimension: space*feature*rank
dX0TAY_drho1 = self._make_ar1_quad_form_grad(X0TDY, X0TFY, rho1)
# dimension: rank*space
# The following are executed for each voxel.
for i_v in range(n_V):
# All variables with _ele as suffix are for data of just one voxel
invX0TAX0_X0TAX_ele = np.dot(X0TAX0_i[i_v, :, :],
XTAX0[i_v, :, :].T)
invX0TAX0_X0TAY_ele = np.dot(X0TAX0_i[i_v, :, :], X0TAY[:, i_v])
dXTAX0_drho1_invX0TAX0_X0TAX_ele = np.dot(dXTAX0_drho1[i_v, :, :],
invX0TAX0_X0TAX_ele)
# preparation for the variable below
dXTAcorrX_drho1_ele = dXTAX_drho1[i_v, :, :] \
- dXTAX0_drho1_invX0TAX0_X0TAX_ele \
- dXTAX0_drho1_invX0TAX0_X0TAX_ele.T \
+ np.dot(np.dot(invX0TAX0_X0TAX_ele.T,
dX0TAX0_drho1[i_v, :, :]),
invX0TAX0_X0TAX_ele)
dXTAcorrY_drho1_ele = dXTAY_drho1[:, i_v] \
- np.dot(invX0TAX0_X0TAX_ele.T, dX0TAY_drho1[:, i_v]) \
- np.dot(dXTAX0_drho1[i_v, :, :], invX0TAX0_X0TAY_ele) \
+ np.dot(np.dot(invX0TAX0_X0TAX_ele.T,
dX0TAX0_drho1[i_v, :, :]),
invX0TAX0_X0TAY_ele)
dYTAcorrY_drho1_ele = dYTAY_drho1[i_v] \
- np.dot(dX0TAY_drho1[:, i_v], invX0TAX0_X0TAY_ele) * 2\
+ np.dot(np.dot(invX0TAX0_X0TAY_ele, dX0TAX0_drho1[i_v, :, :]),
invX0TAX0_X0TAY_ele)
deriv_a1[i_v] = 2 / np.pi / (1 + a1[i_v]**2) \
* (- n_run * rho1[i_v] / (1 - rho1[i_v]**2)
- np.einsum('ij,ij', X0TAX0_i[i_v, :, :],
dX0TAX0_drho1[i_v, :, :]) * 0.5
- np.einsum('ij,ij', LAMBDA[i_v, :, :],
np.dot(np.dot(
L.T, dXTAcorrX_drho1_ele), L))
* (SNR2[i_v] * 0.5)
- dYTAcorrY_drho1_ele * 0.5 / sigma2[i_v]
+ SNR2[i_v] / sigma2[i_v]
* np.dot(dXTAcorrY_drho1_ele,
YTAcorrXL_LAMBDA_LT[i_v, :])
- (0.5 * SNR2[i_v]**2 / sigma2[i_v])
* np.dot(np.dot(YTAcorrXL_LAMBDA_LT[i_v, :],
dXTAcorrX_drho1_ele),
YTAcorrXL_LAMBDA_LT[i_v, :]))
deriv = np.empty(np.size(param))
deriv[idx_param_fitU['Cholesky']] = deriv_L[l_idx]
deriv[idx_param_fitU['a1']] = deriv_a1
return -LL, -deriv
def _loglike_AR1_diagV_fitV(self, param,
X0TAX0, XTAX0, X0TAY, X0TAX0_i,
XTAcorrX, XTAcorrY, YTAcorrY,
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL,
L_l, a1, l_idx, n_C, n_T, n_V, n_run,
n_X0, idx_param_fitV, rank=None,
GP_space=False, GP_inten=False,
dist2=None, inten_dist2=None,
space_smooth_range=None,
inten_smooth_range=None):
# This function calculates the log likelihood of data given
# the log of the square of pseudo signal to noise ratio in each voxel.
# The free parameter log(SNR^2) is in param
# This likelihood is iteratively optimized with the one with _fitU.
# The cholesky factor of U and autoregressive coefficient
# in temporal AR(1) model for noise are fixed.
# Because the ML estimate of the variance of noise in each voxel
# (sigma^2) given other parameters has analytic form,
# we do not need to explicitly parametrize it.
# Just set it to the ML value.
#
# L_l is the lower triangular part of L, a1 is tan(rho1*pi/2),
# where rho1 is the autoregressive coefficient in each voxel
# We can optionally include Gaussion Process prior to log(SNR).
# This term is not included in _fitU, because log(SNR)
# are fixed in _fitU.
# GP_space and GP_inten are Boolean, indicating whether we want to
# include GP kernels either on voxel coordinates or intensity.
# dist2 and inten_dist2 are the squares of spatial distances and
# intensity differences ([n_voxel x n_voxel]. space_smooth_range
# and inten_smooth_range are the range we believe the GP length
# scale should reside in. They are used in additional half-cauchy
# prior to constraint these length scales.
n_l = np.size(l_idx[0])
# the number of parameters in the index of lower-triangular matrix
if rank is None:
rank = int((2 * n_C + 1 -
np.sqrt(n_C**2 * 4 + n_C * 4 + 1 - 8 * n_l)) / 2)
L = np.zeros([n_C, rank])
L[l_idx] = L_l
log_SNR2 = np.empty(n_V)
log_SNR2[0:n_V - 1] = param[idx_param_fitV['log_SNR2']]
log_SNR2[-1] = -np.sum(log_SNR2[0:n_V - 1])
# This is following the restriction that SNR's have geometric mean
# of 1. That is why they are called pseudo-SNR. This restriction
# is imposed because SNR and L are determined only up to a scale
# Be cautious that during simulation, when there is absolute
# no signal in the data, sometimes the fitting diverges,
# presumably because we have created correlation between logS_NR2
# due to the constraint. But I have not reproduced this often.
SNR2 = np.exp(log_SNR2)
# If requested, a GP prior is imposed on log(SNR).
rho1 = 2.0 / np.pi * np.arctan(a1)
# AR(1) coefficient, dimension: space
LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, sigma2 \
= self._calc_LL(rho1, LTXTAcorrXL, LTXTAcorrY, YTAcorrY, X0TAX0,
SNR2, n_V, n_T, n_run, rank, n_X0)
# Log likelihood of data given parameters, without the GP prior.
deriv_log_SNR2 = (-rank + np.trace(LAMBDA, axis1=1, axis2=2)) * 0.5\
+ np.sum(YTAcorrXL_LAMBDA**2, axis=1) * SNR2 / sigma2 / 2
# Partial derivative of log likelihood over log(SNR^2)
# dimension: space,
# The second term above is due to the equation for calculating
# sigma2
if GP_space:
# Imposing GP prior on log(SNR) at least over
# spatial coordinates
c_space = param[idx_param_fitV['c_space']]
l2_space = np.exp(c_space)
# The square of the length scale of the GP kernel defined on
# the spatial coordinates of voxels
dl2_dc_space = l2_space
# partial derivative of l^2 over b
if GP_inten:
c_inten = param[idx_param_fitV['c_inten']]
l2_inten = np.exp(c_inten)
# The square of the length scale of the GP kernel defined
# on the image intensity of voxels
dl2_dc_inten = l2_inten
# partial derivative of l^2 over b
K_major = np.exp(- (dist2 / l2_space
+ inten_dist2 / l2_inten)
/ 2.0)
else:
K_major = np.exp(- dist2 / l2_space / 2.0)
# The kernel defined over the spatial coordinates of voxels.
# This is a template: the diagonal values are all 1, meaning
# the variance of log(SNR) has not been multiplied
K_tilde = K_major + np.diag(np.ones(n_V) * self.eta)
# We add a small number to the diagonal to make sure the matrix
# is invertible.
# Note that the K_tilder here is still template:
# It is the correct K divided by the variance tau^2
# So it does not depend on the variance of the GP.
L_K_tilde = np.linalg.cholesky(K_tilde)
inv_L_K_tilde = np.linalg.solve(L_K_tilde, np.identity(n_V))
inv_K_tilde = np.dot(inv_L_K_tilde.T, inv_L_K_tilde)
log_det_K_tilde = np.sum(np.log(np.diag(L_K_tilde)**2))
invK_tilde_log_SNR = np.dot(inv_K_tilde, log_SNR2) / 2
log_SNR_invK_tilde_log_SNR = np.dot(log_SNR2,
invK_tilde_log_SNR) / 2
# MAP estimate of the variance of the Gaussian Process given
# other parameters.
tau2, log_ptau = self.tau2_prior(log_SNR_invK_tilde_log_SNR, n_V,
self.tau_range)
# log_ptau is log(p(tau)) given the form of prior for tau
LL += log_ptau
# GP prior terms added to the log likelihood
LL = LL - log_det_K_tilde / 2.0 - n_V / 2.0 * np.log(tau2) \
- np.log(2 * np.pi) * n_V / 2.0 \
- log_SNR_invK_tilde_log_SNR / tau2 / 2
deriv_log_SNR2 -= invK_tilde_log_SNR / tau2 / 2.0
# Note that the derivative to log(SNR) is
# invK_tilde_log_SNR / tau2, but we are calculating the
# derivative to log(SNR^2)
dK_tilde_dl2_space = dist2 * (K_major) / 2.0 \
/ l2_space**2
deriv_c_space = \
(np.dot(np.dot(invK_tilde_log_SNR, dK_tilde_dl2_space),
invK_tilde_log_SNR) / tau2 / 2.0
- np.sum(inv_K_tilde * dK_tilde_dl2_space) / 2.0)\
* dl2_dc_space
# Prior on the length scales
LL += scipy.stats.halfcauchy.logpdf(
l2_space**0.5, scale=space_smooth_range)
deriv_c_space -= 1 / (l2_space + space_smooth_range**2)\
* dl2_dc_space
if GP_inten:
dK_tilde_dl2_inten = inten_dist2 * K_major \
/ 2.0 / l2_inten**2
deriv_c_inten = \
(np.dot(np.dot(invK_tilde_log_SNR, dK_tilde_dl2_inten),
invK_tilde_log_SNR) / tau2 / 2.0
- np.sum(inv_K_tilde * dK_tilde_dl2_inten) / 2.0)\
* dl2_dc_inten
# Prior on the length scale
LL += scipy.stats.halfcauchy.logpdf(
l2_inten**0.5, scale=inten_smooth_range)
deriv_c_inten -= 1 / (l2_inten + inten_smooth_range**2)\
* dl2_dc_inten
else:
LL += np.sum(scipy.stats.norm.logpdf(log_SNR2 / 2.0,
scale=self.tau_range))
# If GP prior is not requested, we still want to regularize on
# the magnitude of log(SNR).
deriv_log_SNR2 += - log_SNR2 / self.tau_range**2 / 4.0
deriv = np.empty(np.size(param))
deriv[idx_param_fitV['log_SNR2']] = \
deriv_log_SNR2[0:n_V - 1] - deriv_log_SNR2[n_V - 1]
if GP_space:
deriv[idx_param_fitV['c_space']] = deriv_c_space
if GP_inten:
deriv[idx_param_fitV['c_inten']] = deriv_c_inten
return -LL, -deriv
def _loglike_AR1_singpara(self, param, XTX, XTDX, XTFX, YTY_diag,
YTDY_diag, YTFY_diag, XTY, XTDY, XTFY,
X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
l_idx, n_C, n_T, n_V, n_run, n_X0,
idx_param_sing, rank=None):
# In this version, we assume that beta is independent
# between voxels and noise is also independent.
# singpara version uses single parameter of sigma^2 and rho1
# to all voxels. This serves as the initial fitting to get
# an estimate of L and sigma^2 and rho1. The SNR is inherently
# assumed to be 1.
n_l = np.size(l_idx[0])
# the number of parameters in the index of lower-triangular matrix
if rank is None:
rank = int((2 * n_C + 1
- np.sqrt(n_C**2 * 4 + n_C * 4 + 1 - 8 * n_l)) / 2)
L = np.zeros([n_C, rank])
L[l_idx] = param[idx_param_sing['Cholesky']]
a1 = param[idx_param_sing['a1']]
rho1 = 2.0 / np.pi * np.arctan(a1)
XTAX = XTX - rho1 * XTDX + rho1**2 * XTFX
X0TAX0 = X0TX0 - rho1 * X0TDX0 + rho1**2 * X0TFX0
XTAX0 = XTX0 - rho1 * XTDX0 + rho1**2 * XTFX0
XTAcorrX = XTAX - np.dot(XTAX0, np.linalg.solve(X0TAX0, XTAX0.T))
XTAcorrXL = np.dot(XTAcorrX, L)
LAMBDA_i = np.dot(np.dot(L.T, XTAcorrX), L) + np.eye(rank)
XTAY = XTY - rho1 * XTDY + rho1**2 * XTFY
X0TAY = X0TY - rho1 * X0TDY + rho1**2 * X0TFY
XTAcorrY = XTAY - np.dot(XTAX0, np.linalg.solve(X0TAX0, X0TAY))
LTXTAcorrY = np.dot(L.T, XTAcorrY)
YTAY = YTY_diag - rho1 * YTDY_diag + rho1**2 * YTFY_diag
YTAcorrY = YTAY \
- np.sum(X0TAY * np.linalg.solve(X0TAX0, X0TAY), axis=0)
LAMBDA_LTXTAcorrY = np.linalg.solve(LAMBDA_i, LTXTAcorrY)
L_LAMBDA_LTXTAcorrY = np.dot(L, LAMBDA_LTXTAcorrY)
sigma2 = np.mean(YTAcorrY -
np.sum(LTXTAcorrY * LAMBDA_LTXTAcorrY, axis=0))\
/ (n_T - n_X0)
LL = n_V * (-np.log(sigma2) * (n_T - n_X0) * 0.5
+ np.log(1 - rho1**2) * n_run * 0.5
- self._half_log_det(X0TAX0)
- self._half_log_det(LAMBDA_i))
deriv_L = np.dot(XTAcorrY, LAMBDA_LTXTAcorrY.T) / sigma2 \
- np.dot(np.dot(XTAcorrXL, LAMBDA_LTXTAcorrY),
LAMBDA_LTXTAcorrY.T) / sigma2 \
- np.linalg.solve(LAMBDA_i, XTAcorrXL.T).T * n_V
# These terms are used to construct derivative to a1.
dXTAX_drho1 = - XTDX + 2 * rho1 * XTFX
dX0TAX0_drho1 = - X0TDX0 + 2 * rho1 * X0TFX0
dXTAX0_drho1 = - XTDX0 + 2 * rho1 * XTFX0
invX0TAX0_X0TAX = np.linalg.solve(X0TAX0, XTAX0.T)
dXTAX0_drho1_invX0TAX0_X0TAX = np.dot(dXTAX0_drho1, invX0TAX0_X0TAX)
dXTAcorrX_drho1 = dXTAX_drho1 - dXTAX0_drho1_invX0TAX0_X0TAX \
- dXTAX0_drho1_invX0TAX0_X0TAX.T \
+ np.dot(np.dot(invX0TAX0_X0TAX.T, dX0TAX0_drho1),
invX0TAX0_X0TAX)
dLTXTAcorrXL_drho1 = np.dot(np.dot(L.T, dXTAcorrX_drho1), L)
dYTAY_drho1 = - YTDY_diag + 2 * rho1 * YTFY_diag
dX0TAY_drho1 = - X0TDY + 2 * rho1 * X0TFY
invX0TAX0_X0TAY = np.linalg.solve(X0TAX0, X0TAY)
dYTAX0_drho1_invX0TAX0_X0TAY = np.sum(dX0TAY_drho1
* invX0TAX0_X0TAY, axis=0)
dYTAcorrY_drho1 = dYTAY_drho1 - dYTAX0_drho1_invX0TAX0_X0TAY * 2\
+ np.sum(invX0TAX0_X0TAY *
np.dot(dX0TAX0_drho1, invX0TAX0_X0TAY), axis=0)
dXTAY_drho1 = - XTDY + 2 * rho1 * XTFY
dXTAcorrY_drho1 = dXTAY_drho1 \
- np.dot(dXTAX0_drho1, invX0TAX0_X0TAY) \
- np.dot(invX0TAX0_X0TAX.T, dX0TAY_drho1) \
+ np.dot(np.dot(invX0TAX0_X0TAX.T, dX0TAX0_drho1),
invX0TAX0_X0TAY)
deriv_a1 = 2.0 / (np.pi * (1 + a1**2)) \
* (n_V * (- n_run * rho1 / (1 - rho1**2)
- 0.5 * np.trace(np.linalg.solve(
X0TAX0, dX0TAX0_drho1))
- 0.5 * np.trace(np.linalg.solve(
LAMBDA_i, dLTXTAcorrXL_drho1)))
- 0.5 * np.sum(dYTAcorrY_drho1) / sigma2
+ np.sum(dXTAcorrY_drho1 * L_LAMBDA_LTXTAcorrY) / sigma2
- 0.5 * np.sum(np.dot(dXTAcorrX_drho1, L_LAMBDA_LTXTAcorrY)
* L_LAMBDA_LTXTAcorrY) / sigma2)
deriv = np.empty(np.size(param))
deriv[idx_param_sing['Cholesky']] = deriv_L[l_idx]
deriv[idx_param_sing['a1']] = deriv_a1
return -LL, -deriv
def _loglike_AR1_null(self, param, YTY_diag, YTDY_diag, YTFY_diag,
X0TX0, X0TDX0, X0TFX0, X0TY, X0TDY, X0TFY,
n_T, n_V, n_run, n_X0):
# This function calculates the log likelihood of data given AR(1)
# parameters of noise as free parameters.
# Free parameters are in param.
# It serves as a null model which assumes no response to design
# matrix.
a1 = param
rho1 = 2.0 / np.pi * np.arctan(a1) # auto-regressive coefficients
YTAY = self._make_ar1_quad_form(YTY_diag, YTDY_diag, YTFY_diag, rho1)
# dimension: space,
# A/sigma2 is the inverse of noise covariance matrix in each voxel.
# YTAY means Y'AY
X0TAX0 = X0TX0[None, :, :] - rho1[:, None, None] \
* X0TDX0[None, :, :] \
+ rho1[:, None, None]**2 * X0TFX0[None, :, :]
# dimension: space*#baseline*#baseline
X0TAY = self._make_ar1_quad_form(X0TY, X0TDY, X0TFY, rho1)
# dimension: #baseline*space
# X0TAX0_i = np.linalg.solve(X0TAX0, np.identity(n_X0)[None, :, :])
X0TAX0_i = np.linalg.inv(X0TAX0)
# dimension: space*#baseline*#baseline
YTAcorrY = YTAY - np.sum(X0TAY * np.einsum('ijk,ki->ji',
X0TAX0_i, X0TAY), axis=0)
# dimension: space,
sigma2 = YTAcorrY / (n_T - n_X0)
# dimension: space,
LL = - np.sum(np.log(sigma2)) * (n_T - n_X0) * 0.5 \
+ np.sum(np.log(1 - rho1**2)) * n_run * 0.5 \
- np.sum(self._half_log_det(X0TAX0)) \
- (n_T - n_X0) * n_V * (1 + np.log(2 * np.pi)) * 0.5
# The following are for calculating the derivative to a1
deriv_a1 = np.empty(n_V)
dYTAY_drho1 = self._make_ar1_quad_form_grad(YTDY_diag, YTFY_diag, rho1)
# dimension: space,
dX0TAX0_drho1 = - X0TDX0 \
+ 2 * rho1[:, None, None] * X0TFX0
# dimension: space*rank*rank
dX0TAY_drho1 = self._make_ar1_quad_form_grad(X0TDY, X0TFY, rho1)
# dimension: rank*space
# The following are executed for each voxel.
for i_v in range(n_V):
# All variables with _ele as suffix are for data of just one voxel
invX0TAX0_X0TAY_ele = np.dot(X0TAX0_i[i_v, :, :], X0TAY[:, i_v])
# preparation for the variable below
dYTAcorrY_drho1_ele = dYTAY_drho1[i_v] \
- np.dot(dX0TAY_drho1[:, i_v], invX0TAX0_X0TAY_ele) * 2\
+ np.dot(np.dot(invX0TAX0_X0TAY_ele, dX0TAX0_drho1[i_v, :, :]),
invX0TAX0_X0TAY_ele)
deriv_a1[i_v] = 2 / np.pi / (1 + a1[i_v]**2) \
* (- n_run * rho1[i_v] / (1 - rho1[i_v]**2)
- np.einsum('ij,ij', X0TAX0_i[i_v, :, :],
dX0TAX0_drho1[i_v, :, :]) * 0.5
- dYTAcorrY_drho1_ele * 0.5 / sigma2[i_v])
deriv = deriv_a1
return -LL, -deriv
class GBRSA(BRSA):
"""Group Bayesian representational Similarity Analysis (GBRSA)
Given the time series of neural imaging data in a region of interest
(ROI) and the hypothetical neural response (design matrix) to
each experimental condition of interest,
calculate the shared covariance matrix of
the voxels(recording unit)' response to each condition,
and the relative SNR of each voxels.
The relative SNR could be considered as the degree of contribution
of each voxel to this shared covariance matrix.
A correlation matrix converted from the covariance matrix
will be provided as a quantification of neural representational similarity.
Both tools provide estimation of SNR and noise parameters at the end,
and both tools provide empirical Bayesian estimates of activity patterns
beta, together with weight map of nuisance signals beta0.
The differences of this tool from BRSA are:
(1) It allows fitting a shared covariance matrix (which can be converted
to similarity matrix) across multiple subjects.
This is analogous to SRM under funcalign submodule. Because of using
multiple subjects, the result is less noisy.
(2) In the fitting process, the SNR and noise parameters are marginalized
for each voxel. Therefore, this tool should be faster than BRSA
when analyzing an ROI of hundreds to thousands voxels. It does not
provide a spatial smoothness prior on SNR though.
(3) The voxel-wise pseudo-SNR and noise parameters estimated are
posterior mean estimates, while those estimated by BRSA are
maximum-a-posterior estimates.
If your goal is to perform searchlight RSA with relatively fewer voxels
on single subject, BRSA should be faster. However, GBRSA can in principle
be used together with searchlight in a template space such as MNI.
.. math::
Y = X \\cdot \\beta + X_0 \\cdot \\beta_0 + \\epsilon
\\beta_i \\sim N(0,(s_{i} \\sigma_{i})^2 U)
See also `.BRSA`.
Please note that the model assumes that the covariance matrix U which
all \\beta_i follow is zero-meaned. For more details of its implication,
see documentation of `.BRSA`
Parameters
----------
n_iter : int.
Number of maximum iterations to run the algorithm.
rank : int.
The rank of the covariance matrix.
If not provided, the covariance matrix will be assumed
to be full rank. When you have many conditions
(e.g., calculating the similarity matrix of responses to each event),
you might want to start with specifying a lower rank and use metrics
such as AIC or BIC to decide the optimal rank. The log likelihood
for the fitted data can be retrieved through private attributes
_LL_train\\_. Note that this log likelihood score is only used
here for selecting hyperparameters such as rank. For any formal
model comparison, we recommend using score() function on left-out
data.
auto_nuisance: Boolean.
In order to model spatial correlation between voxels that cannot
be accounted for by common response captured in the design matrix,
we assume that a set of time courses not related to the task
conditions are shared across voxels with unknown amplitudes.
One approach is for users to provide time series which they consider
as nuisance but exist in the noise (such as head motion).
The other way is to take the first n_nureg principal components
in the residual after subtracting the response to the design matrix
from the data, and use these components as the nuisance regressor.
This flag is for the second approach. If turned on,
PCA or factor analysis will be applied to the residuals
to obtain new nuisance regressors in each round of fitting.
These two approaches can be combined. If the users provide nuisance
regressors and set this flag as True, then the first n_nureg
principal components of the residuals after subtracting
both the responses to design matrix and the user-supplied nuisance
regressors will be used in addition to the nuisance regressors
provided by the users.
Note that nuisance regressor is not required from user. If it is
not provided, DC components for each run will be included as nuisance
regressor regardless of the auto_nuisance parameter.
n_nureg: Optional[int].
Number of nuisance regressors to use in order to model signals
shared across voxels not captured by the design matrix.
This number is in addition to any nuisance regressor that the user
has already provided.
If set to None, the number of nuisance regressors will be
automatically determined based on M Gavish
and D Donoho's approximate estimation of optimal hard
threshold for singular values. (Gavish & Donoho,
IEEE Transactions on Information Theory 60.8 (2014): 5040-5053.)
This only takes effect if auto_nuisance is True.
nureg_zscore: Boolean.
A flag to tell the algorithm whether data is z-scored before
estimating the number of nuisance regressor components necessary to
account for spatial noise correlation. It also determinie whether
the residual noise is z-scored before estimating the nuisance
regressors from residual.
This only takes effect if auto_nuisance is True.
nureg_method: string, naming a method from sklearn.decomposition.
'PCA', 'ICA', 'FA' or 'SPCA' are currently supported.
The method to estimate the shared component in noise across voxels.
This only takes effect if auto_nuisance is True.
baseline_single: Boolean.
A time course of constant 1 will be included to the nuisance
regressor for each participant. If baseline_single is set to False,
one such regressor is included for each fMRI run, but at the end of
fitting, a single component in beta0\\_ will be computed as the average
of the weight maps corresponding to these regressors. This might
cause underestimation of noise variance.
If baseline_single is True, only one regressor of constant 1 will be
used for the whole dataset. This might be desirable if you
believe the average image intensity might not scale with the
same proportion for different voxels across scan. In other words,
it is possible that some part of the brain is more vulnerable to
change in baseline intensity due to facts such as
field inhomogeneity. Setting baseline_single to True will force the
nuisance regressors automatically estimated from residuals to
capture this. However, when each task condition only occurs in one
run and when the design matrix in each run sums together close to
a flat line, this option can cause the estimated similarity to be
extremely high between conditions occuring in the same run.
SNR_prior: string.
The type of prior for pseudo-SNR.
If set to 'exp', truncated exponential distribution with scale
parameter of 1 is imposed on pseudo-SNR.
If set to 'lognorm', a truncated log normal prior is imposed.
In this case, the standard deviation of log(SNR) is set
by the parameter logS_range.
If set to 'unif', a uniform prior in [0,1] is imposed.
In all above cases, SNR is numerically
marginalized on a grid of parameters. So the parameter SNR_bins
determines how accurate the numerical integration is. The more
number of bins are used, the more accurate the numerical
integration becomes.
If set to 'equal', all voxels are assumed to have the same fixed
SNR. Pseudo-SNR is 1.0 for all voxels.
In all the cases, the grids used for pseudo-SNR do not really
set an upper bound for SNR, because the real SNR is determined
by both pseudo-SNR and U, the shared covariance structure.
logS_range: float.
The reasonable range of the spread of SNR in log scale.
This parameter only takes effect if SNR_prior is set to 'lognorm'.
It is effectively the `s` parameter of `scipy.stats.lognorm`,
or the standard deviation of the distribution in log scale.
logS_range specifies how variable you believe the SNRs
to vary across voxels in log scale.
This range should not be set too large, otherwise the fitting
may encounter numerical issue.
If it is set too small, the estimated SNR will turn to be too
close to each other and the estimated similarity matrix might
overfit to voxels of low SNR.
If you increase logS_range, it is recommended to increase
SNR_bins accordingly, otherwise the pseudo-SNR values evaluated might
be too sparse, causing the posterior pseudo-SNR estimations
to be clustered around the bins.
SNR_bins: integer.
The number of bins used to numerically marginalize the pseudo-SNR
parameter. In general, you should try to choose a large number
to the degree that decreasing SNR_bins does not change the result
of fitting result. However, very large number of bins also causes
slower computation and larger memory consumption.
For SNR_prior='lognorm', the default value 21 is based on
the default value of logS_range=1.0 and bin width of 0.3 on log scale.
But it is also a reasonable choice for the other two options
for SNR_prior.
rho_bins: integer.
The number of bins to divide the region of (-1, 1) for rho.
This only takes effect for fitting the marginalized version.
If set to 20, discrete numbers of {-0.95, -0.85, ..., 0.95} will
be used to numerically integrate rho from -1 to 1.
optimizer: str or callable.
The optimizer to use for minimizing cost function which
scipy.optimize.minimize can accept.
We use 'L-BFGS-B' as a default. Users can try other strings
corresponding to optimizer provided by scipy.optimize.minimize,
or a custom optimizer, such as 'BFGS' or 'CG'.
Note that BRSA fits a lot of parameters. So a chosen optimizer
should accept gradient (Jacobian) of the cost function. Otherwise
the fitting is likely to be unbarely slow. We do not calculate
Hessian of the objective function. So an optimizer which requires
Hessian cannot be used.
minimize_options: dictionary.
This is the dictionary passed as the options argument to
scipy.optimize.minize which minimizes the cost function during
fitting. Notice that the minimization is performed for up to
n_iter times, with the nuisance regressor re-estimated each time.
So within each of the n_iter steps of fitting,
scipy.optimize.minize does not need to fully converge. The key
'maxiter' in this dictionary determines the maximum number of
iteration done by scipy.optimize.minimize within each of the n_iter
steps of fitting.
tol: float.
Tolerance parameter passed to scipy.optimize.minimize. It is also
used for determining convergence of the alternating fitting
procedure.
random_state : RandomState or an int seed.
A random number generator instance to define the state of
the random permutations generator whenever the module
needs to generate random number (e.g., initial parameter
of the Cholesky factor).
anneal_speed: float.
Annealing is introduced in fitting of the Cholesky
decomposition of the shared covariance matrix. The amount
of perturbation decays exponentially. This parameter sets
the ratio of the maximum number of iteration to the
time constant of the exponential.
anneal_speed=10 means by n_iter/10 iterations,
the amount of perturbation is reduced by 2.713 times.
Attributes
----------
U_ : numpy array, shape=[condition,condition].
The shared covariance matrix
L_ : numpy array, shape=[condition,condition].
The Cholesky factor of the shared covariance matrix
(lower-triangular matrix).
C_: numpy array, shape=[condition,condition].
The correlation matrix derived from the shared covariance matrix.
This is the estimated similarity matrix between neural patterns
to your task conditions. Notice that it is recommended that
you also check U\\_, which is the covariance matrix underlying
this correlation matrix. In cases there is almost no response
to your task conditions, the diagonal values of U\\_ would become
very small and C\\_ might contain many correlation coefficients
close to 1 or -1. This might not reflect true strong correlation
or strong negative correlation, but a result of lack of
task-related neural activity, design matrix that does not match
true neural response, or not enough data.
It is also recommended to check nSNR\\_ after mapping it back to
the brain. A "reasonable" map should at least have higher values
in gray matter in than white matter.
nSNR_ : list of numpy arrays, shape=[voxels,] for each subject in the list.
The pseuso-SNR of all voxels. If SNR_prior='lognormal',
the geometric mean of nSNR\\_ would be approximately 1.
If SNR_prior='unif', all nSNR\\_ would be in the range of (0,1).
If SNR_prior='exp' (default), the range of values would vary
depending on the data and SNR_bins, but many should have low
values with few voxels with high values.
Note that this attribute can not be interpreted as true SNR,
but the relative ratios between voxels indicate the contribution
of each voxel to the representational similarity structure.
sigma_ : list of numpy arrays, shape=[voxels,] for each subject.
The estimated standard deviation of the noise in each voxel
Assuming AR(1) model, this means the standard deviation
of the innovation noise.
rho_ : list of numpy arrays, shape=[voxels,] for each subject.
The estimated autoregressive coefficient of each voxel
beta_: list of numpy arrays, shape=[conditions, voxels] for each subject.
The posterior mean estimation of the response amplitudes
of each voxel to each task condition.
beta0_: list of numpy arrays, shape=[n_nureg + n_base, voxels]
for each subject.
The loading weights of each voxel for the shared time courses
not captured by the design matrix.
n_base is the number of columns of the user-supplied nuisance
regressors plus one for DC component.
X0_: list of numpy arrays, shape=[time_points, n_nureg + n_base]
for each subject.
The estimated time course that is shared across voxels but
unrelated to the events of interest (design matrix).
beta0_null_: list of numpy arrays, shape=[n_nureg + n_base, voxels]
for each subject.
The equivalent of beta0\\_ in a null model which does not
include the design matrix and response pattern beta
X0_null_: list of numpy arrays, shape=[time_points, n_nureg + n_base]
for each subject.
The equivalent of X0\\_ in a null model which does not
include the design matrix and response pattern beta
n_nureg_: 1-d numpy array
Number of nuisance regressor used to model the spatial noise
correlation of each participant.
random_state_: `RandomState`
Random number generator initialized using random_state.
"""
def __init__(
self, n_iter=100, rank=None,
auto_nuisance=True, n_nureg=None, nureg_zscore=True,
nureg_method='PCA',
baseline_single=False, logS_range=1.0, SNR_prior='exp',
SNR_bins=21, rho_bins=20, tol=1e-4, optimizer='L-BFGS-B',
minimize_options={'gtol': 1e-4, 'disp': False,
'maxiter': 20}, random_state=None,
anneal_speed=10):
self.n_iter = n_iter
self.rank = rank
self.auto_nuisance = auto_nuisance
self.n_nureg = n_nureg
self.nureg_zscore = nureg_zscore
if auto_nuisance:
assert (n_nureg is None) \
or (isinstance(n_nureg, int) and n_nureg > 0), \
'n_nureg should be a positive integer or None'\
' if auto_nuisance is True.'
if self.nureg_zscore:
self.preprocess_residual = lambda x: _zscore(x)
else:
self.preprocess_residual = lambda x: x
if nureg_method == 'FA':
self.nureg_method = lambda x: FactorAnalysis(n_components=x)
elif nureg_method == 'PCA':
self.nureg_method = lambda x: PCA(n_components=x, whiten=True)
elif nureg_method == 'SPCA':
self.nureg_method = lambda x: SparsePCA(n_components=x,
max_iter=20, tol=tol)
elif nureg_method == 'ICA':
self.nureg_method = lambda x: FastICA(n_components=x,
whiten=True)
else:
raise ValueError('nureg_method can only be FA, PCA, '
'SPCA(for sparse PCA) or ICA')
self.baseline_single = baseline_single
if type(logS_range) is int:
logS_range = float(logS_range)
self.logS_range = logS_range
assert SNR_prior in ['unif', 'lognorm', 'exp', 'equal'], \
'SNR_prior can only be chosen from ''unif'', ''lognorm''' \
' ''exp'' and ''equal'''
self.SNR_prior = SNR_prior
if self.SNR_prior == 'equal':
self.SNR_bins = 1
else:
self.SNR_bins = SNR_bins
self.rho_bins = rho_bins
self.tol = tol
self.optimizer = optimizer
self.minimize_options = minimize_options
self.random_state = random_state
self.anneal_speed = anneal_speed
return
def fit(self, X, design, nuisance=None, scan_onsets=None):
""" Fit the model to data of all participants jointly.
Parameters
----------
X: list of numpy arrays, shape=[time_points, voxels] for each entry.
Data to be fitted. Each participant corresponds to one item in
the list. If you have multiple scans of the same participants
that you want to analyze together, you should concatenate them
along the time dimension after proper preprocessing (e.g. spatial
alignment), and specify the onsets of each scan in scan_onsets.
design: list of numpy arrays, shape=[time_points, conditions] for each.
This is the design matrix of each participant.
It should only include the hypothetic response for task conditions.
You should not include regressors for a DC component or
motion parameters, unless with a strong reason.
If you want to model head motion, you should include them
in nuisance regressors.
If you have multiple run, the design matrix
of all runs should be concatenated along the time dimension for
each participant, with every column for one condition across runs.
If the design matrix is the same for all subjects,
either provide a list as required, or provide single numpy array.
nuisance: optional, list of numpy arrays,
shape=[time_points, nuisance_factors] for each subject in the list.
Nuisance regressors of each participant.
The responses to these regressors will be marginalized out from
each voxel, which means they are considered, but won't be assumed
to share the same pseudo-SNR map with the design matrix.
Therefore, the pseudo-SNR map will only reflect the
relative contribution of design matrix to each voxel.
You can provide time courses such as those for head motion
to this parameter.
Note that if auto_nuisance is set to True, the first
n_nureg principal components of residual (excluding the response
to the design matrix and the user-provided nuisance regressors)
will be included as additional nuisance regressor after the
first round of fitting.
If auto_nuisance is set to False, the nuisance regressors supplied
by the users together with DC components will be used as
nuisance time series.
scan_onsets: optional, list numpy arrays, shape=[runs,] for each.
Each item in the list specifies the indices of X which correspond
to the onset of each scanning run for one participant.
For example, if you have two experimental runs of
the first participant, each with 100 TRs, and one run of the
second participant, with 150 TR, then scan_onsets should be
[ndarry([0, 100]), ndarry([150])].
The effect of this argument is to make the inverse matrix
of the temporal covariance matrix of noise block-diagonal.
If you do not provide the argument, the program will
assume all data are from the same run for each participant.
"""
logger.info('Running Group Bayesian RSA (which can also analyze'
' data of a single participant). Voxel-specific parameters'
'are all marginalized.')
self.random_state_ = check_random_state(self.random_state)
# setting random seed
logger.debug('RandState set to {}'.format(self.random_state_))
# Checking all inputs.
X = self._check_data_GBRSA(X)
design = self._check_design_GBRSA(design, X)
nuisance = self._check_nuisance_GBRSA(
copy.deepcopy(nuisance), X)
# The reason that we use copy of nuisance is because they
# may be modified inside our code.
scan_onsets = self._check_scan_onsets_GBRSA(scan_onsets, X)
# Run Marginalized Bayesian RSA
# Note that we have a change of notation here.
# Within _fit_RSA_marginalized, design matrix is named X
# and data is named Y, to reflect the
# generative model that data Y is generated by mixing the response
# X to experiment conditions and other neural activity.
# However, in fit(), we keep the scikit-learn API that
# X is the input data to fit and y, a reserved name not used, is
# the label to map to from X.
assert self.SNR_bins >= 10 and self.SNR_prior != 'equal' or \
self.SNR_bins == 1 and self.SNR_prior == 'equal', \
'At least 10 bins are required to perform the numerical'\
' integration over SNR, unless choosing SNR_prior=''equal'','\
' in which case SNR_bins should be 1.'
assert self.rho_bins >= 10, \
'At least 10 bins are required to perform the numerical'\
' integration over rho'
assert self.logS_range * 6 / self.SNR_bins < 0.5 \
or self.SNR_prior != 'lognorm', \
'The minimum grid of log(SNR) should not be larger than 0.5 '\
'if log normal prior is chosen for SNR.' \
' Please consider increasing SNR_bins or reducing logS_range'
self.n_subj_ = len(X)
self.n_V_ = [None] * self.n_subj_
for subj, x in enumerate(X):
self.n_V_[subj] = x.shape[1]
if self.auto_nuisance:
if self.n_nureg is None:
logger.info('numbers of nuisance regressors are determined '
'automatically.')
n_runs = np.zeros(self.n_subj_)
n_comps = np.ones(self.n_subj_)
for s_id in np.arange(self.n_subj_):
# For each subject, determine the number of nuisance
# regressors needed to account for the covariance
# in residuals.
# Residual is calculated by regrssing
# out the design matrix and DC component and linear trend
# from data of each run.
run_TRs, n_runs[s_id] = self._run_TR_from_scan_onsets(
X[s_id].shape[0], scan_onsets[s_id])
ts_dc = self._gen_legendre(run_TRs, [0])
_, ts_base, _ = self._merge_DC_to_base(
ts_dc, nuisance[s_id], False)
ts_reg = np.concatenate((ts_base, design[s_id]), axis=1)
beta_hat = np.linalg.lstsq(ts_reg, X[s_id], rcond=None)[0]
residuals = X[s_id] - np.dot(ts_reg, beta_hat)
n_comps[s_id] = np.min(
[np.max([Ncomp_SVHT_MG_DLD_approx(
residuals, self.nureg_zscore), 1]),
np.linalg.matrix_rank(residuals) - 1])
# n_nureg_ should not exceed the rank of
# residual minus 1.
self.n_nureg_ = n_comps
logger.info('Use {} nuisance regressors to model the spatial '
'correlation in noise.'.format(self.n_nureg_))
else:
self.n_nureg_ = self.n_nureg * np.ones(self.n_subj_)
self.n_nureg_ = np.int32(self.n_nureg_)
self.beta0_null_, self.sigma_null_, self.rho_null_, self.X0_null_,\
self._LL_null_train_ = self._fit_RSA_marginalized_null(
Y=X, X_base=nuisance, scan_onsets=scan_onsets)
self.U_, self.L_, self.nSNR_, self.beta_, self.beta0_,\
self.sigma_, self.rho_, self.X0_, self._LL_train_ = \
self._fit_RSA_marginalized(
X=design, Y=X, X_base=nuisance,
scan_onsets=scan_onsets)
self.C_ = utils.cov2corr(self.U_)
self.design_ = design.copy()
self._rho_design_ = [None] * self.n_subj_
self._sigma2_design_ = [None] * self.n_subj_
self._rho_X0_ = [None] * self.n_subj_
self._sigma2_X0_ = [None] * self.n_subj_
self._rho_X0_null_ = [None] * self.n_subj_
self._sigma2_X0_null_ = [None] * self.n_subj_
for subj in np.arange(self.n_subj_):
self._rho_design_[subj], self._sigma2_design_[subj] = \
self._est_AR1(self.design_[subj], same_para=True)
self._rho_X0_[subj], self._sigma2_X0_[subj] = \
self._est_AR1(self.X0_[subj])
self._rho_X0_null_[subj], self._sigma2_X0_null_[subj] =\
self._est_AR1(self.X0_null_[subj])
# AR(1) parameters of the design matrix and nuisance regressors,
# which will be used in transform or score.
return self
def transform(self, X, y=None, scan_onsets=None):
""" Use the model to estimate the time course of response to
each condition (ts), and the time course unrelated to task
(ts0) which is spread across the brain.
This is equivalent to "decoding" the design matrix and
nuisance regressors from a new dataset different from the
training dataset on which fit() was applied. An AR(1) smooth
prior is imposed on the decoded ts and ts0 with the AR(1)
parameters learnt from the corresponding time courses in the
training data.
Parameters
----------
X : list of 2-D arrays. For each item, shape=[time_points, voxels]
New fMRI data of the same subjects. The voxels should
match those used in the fit() function.
The size of the list should match the size of the list X fed
to fit(), with each item in the list corresponding to data
from the same subject in the X fed to fit(). If you do not
need to transform some subjects' data, leave the entry
corresponding to that subject as None.
If data are z-scored when fitting the model,
data should be z-scored as well when calling transform()
y : not used (as it is unsupervised learning)
scan_onsets : list of 1-D numpy arrays,
Each array corresponds to the onsets of
scans in the data X for the particular subject.
If not provided, data will be assumed
to be acquired in a continuous scan.
Returns
-------
ts : list of 2-D arrays. For each, shape = [time_points, condition]
The estimated response to the cognitive dimensions
(task dimensions) whose response amplitudes were estimated
during the fit step.
One item for each subject. If some subjects' data are
not provided, None will be returned.
ts0: list of 2-D array. For each, shape = [time_points, n_nureg]
The estimated time courses spread across the brain, with the
loading weights estimated during the fit step.
One item for each subject. If some subjects' data are
not provided, None will be returned.
"""
X = self._check_data_GBRSA(X, for_fit=False)
scan_onsets = self._check_scan_onsets_GBRSA(scan_onsets, X)
assert len(X) == self.n_subj_
ts = [None] * self.n_subj_
ts0 = [None] * self.n_subj_
log_p = [None] * self.n_subj_
for i, x in enumerate(X):
if x is not None:
s = scan_onsets[i]
ts[i], ts0[i], log_p[i] = self._transform(
Y=x, scan_onsets=s, beta=self.beta_[i],
beta0=self.beta0_[i], rho_e=self.rho_[i],
sigma_e=self.sigma_[i], rho_X=self._rho_design_[i],
sigma2_X=self._sigma2_design_[i],
rho_X0=self._rho_X0_[i], sigma2_X0=self._sigma2_X0_[i])
return ts, ts0
def score(self, X, design, scan_onsets=None):
""" After fit() is applied to the data of a group of participants,
use the parameters estimated by fit() function to evaluate
from some data of a set of participants to evaluate
the log likelihood of some new data of the same participants
given these estimated parameters.
Design matrices of the same set of experimental
conditions in the testing data should be provided, with each
column corresponding to the same condition as that column
in the design matrix of the training data.
Unknown nuisance time series will be marginalized, assuming
they follow the same spatial pattern as in the training
data. The hypothetical response captured by the design matrix
will be subtracted from data before the marginalization
when evaluating the log likelihood. For null model,
nothing will be subtracted before marginalization.
There is a difference between the form of likelihood function
used in fit() and score(). In fit(), the response amplitude
beta to design matrix X and the modulation beta0 by nuisance
regressor X0 are both marginalized, with X provided and X0
estimated from data. In score(), posterior estimation of
beta and beta0 from the fitting step are assumed unchanged
in testing data; X is assumed given by the user,
and X0 is marginalized.
The logic underlying score() is to transfer
as much as what we can learn from training data when
calculating a likelihood score for testing data. This is done
at the cost of using point estimation for beta and beta0.
If you z-scored your data during fit step, you should
z-score them for score function as well. If you did not
z-score in fitting, you should not z-score here either.
Parameters
----------
X : List of 2-D arrays. For each item, shape=[time_points, voxels]
fMRI data of new data of the same participants.
The voxels of each participants should
match those used in the fit() function. If data are z-scored
(recommended) when fitting the model, data should be z-scored
as well when calling transform()
design : List of 2-D arrays. shape=[time_points, conditions] for each
Each corresponds to one participant.
Design matrices expressing the hypothetical response of
the task conditions in data X.
scan_onsets : List of 2-D arrays, shape=[#fMRI runs] for each
Each array corresponds to one participant.
Lists of indices corresponding to the onsets of
scans in the data X.
If not provided, data will be assumed
to be acquired in a continuous scan.
Returns
-------
ll: list, shape=[number of participants]
The log likelihoods of the new data based on the model and its
parameters fit to the training data.
If data of some participants are not provided, the corresponding
entry will be None.
ll_null: list, shape=[number of participants]
The log likelihood of the new data based on a null model
which assumes the same as the full model for everything
except for that there is no response to any of the
task conditions.
"""
X = self._check_data_GBRSA(X, for_fit=False)
scan_onsets = self._check_scan_onsets_GBRSA(scan_onsets, X)
design = self._check_design_GBRSA(design, X)
assert len(X) == self.n_subj_
ll = [None] * self.n_subj_
ll_null = [None] * self.n_subj_
for subj in np.arange(self.n_subj_):
if X[subj] is not None:
ll[subj] = self._score(
Y=X[subj], design=design[subj], beta=self.beta_[subj],
scan_onsets=scan_onsets[subj], beta0=self.beta0_[subj],
rho_e=self.rho_[subj], sigma_e=self.sigma_[subj],
rho_X0=self._rho_X0_[subj],
sigma2_X0=self._sigma2_X0_[subj])
ll_null[subj] = self._score(
Y=X[subj], design=None, beta=None,
scan_onsets=scan_onsets[subj], beta0=self.beta0_[subj],
rho_e=self.rho_[subj], sigma_e=self.sigma_[subj],
rho_X0=self._rho_X0_[subj],
sigma2_X0=self._sigma2_X0_[subj])
return ll, ll_null
def _precompute_ar1_quad_forms_marginalized(
self, XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
rho1, n_V, n_X0):
# Calculate the sandwich terms which put Acorr between X, Y and X0
# These terms are used a lot in the likelihood. This function
# is used for the marginalized version.
XTAY = XTY - rho1[:, None, None] * XTDY \
+ rho1[:, None, None]**2 * XTFY
# dimension: #rho*feature*space
YTAY_diag = YTY_diag - rho1[:, None] * YTDY_diag \
+ rho1[:, None]**2 * YTFY_diag
# dimension: #rho*space,
# A/sigma2 is the inverse of noise covariance matrix in each voxel.
# YTAY means Y'AY
XTAX = XTX - rho1[:, None, None] * XTDX \
+ rho1[:, None, None]**2 * XTFX
# dimension: n_rho*feature*feature
X0TAX0 = X0TX0[None, :, :] - rho1[:, None, None] \
* X0TDX0[None, :, :] \
+ rho1[:, None, None]**2 * X0TFX0[None, :, :]
# dimension: #rho*#baseline*#baseline
XTAX0 = XTX0[None, :, :] - rho1[:, None, None] \
* XTDX0[None, :, :] \
+ rho1[:, None, None]**2 * XTFX0[None, :, :]
# dimension: n_rho*feature*#baseline
X0TAY = X0TY - rho1[:, None, None] * X0TDY \
+ rho1[:, None, None]**2 * X0TFY
# dimension: #rho*#baseline*space
X0TAX0_i = np.linalg.solve(X0TAX0, np.identity(n_X0)[None, :, :])
# dimension: #rho*#baseline*#baseline
XTAcorrX = XTAX
# dimension: #rho*feature*feature
XTAcorrY = XTAY
# dimension: #rho*feature*space
YTAcorrY_diag = YTAY_diag
for i_r in range(np.size(rho1)):
XTAcorrX[i_r, :, :] -= \
np.dot(np.dot(XTAX0[i_r, :, :], X0TAX0_i[i_r, :, :]),
XTAX0[i_r, :, :].T)
XTAcorrY[i_r, :, :] -= np.dot(np.dot(XTAX0[i_r, :, :],
X0TAX0_i[i_r, :, :]),
X0TAY[i_r, :, :])
YTAcorrY_diag[i_r, :] -= np.sum(
X0TAY[i_r, :, :] * np.dot(X0TAX0_i[i_r, :, :],
X0TAY[i_r, :, :]), axis=0)
return X0TAX0, X0TAX0_i, XTAcorrX, XTAcorrY, YTAcorrY_diag, \
X0TAY, XTAX0
def _fit_RSA_marginalized(self, X, Y, X_base,
scan_onsets=None):
""" The major utility of fitting Bayesian RSA
(marginalized version).
Note that there is a naming change of variable. X in fit()
is changed to Y here, and design in fit() is changed to X here.
This is because we follow the tradition that X expresses the
variable defined (controlled) by the experimenter, i.e., the
time course of experimental conditions convolved by an HRF,
and Y expresses data.
However, in wrapper function fit(), we follow the naming
routine of scikit-learn.
"""
rank = self.rank
n_subj = len(Y)
n_V = [np.size(y, axis=1) for y in Y]
n_T = [np.size(y, axis=0) for y in Y]
n_C = np.size(X[0], axis=1)
l_idx, rank = self._chol_idx(n_C, rank)
n_l = np.size(l_idx[0]) # the number of parameters for L
t_start = time.time()
logger.info('Starting to fit the model. Maximum iteration: '
'{}.'.format(self.n_iter))
# log_SNR_grids, SNR_weights \
# = np.polynomial.hermite.hermgauss(SNR_bins)
# SNR_weights = SNR_weights / np.pi**0.5
# SNR_grids = np.exp(log_SNR_grids * self.logS_range * 2**.5)
SNR_grids, SNR_weights = self._set_SNR_grids()
logger.info('The grids of pseudo-SNR used for numerical integration '
'is {}.'.format(SNR_grids))
assert np.max(SNR_grids) < 1e10, \
'ATTENTION!! The range of grids of pseudo-SNR' \
' to be marginalized is too large. Please ' \
'consider reducing logS_range to 1 or 2'
rho_grids, rho_weights = self._set_rho_grids()
logger.info('The grids of rho used to do numerical integration '
'is {}.'.format(rho_grids))
n_grid = self.SNR_bins * self.rho_bins
log_weights = np.reshape(
np.log(SNR_weights[:, None]) + np.log(rho_weights), n_grid)
all_rho_grids = np.reshape(np.repeat(
rho_grids[None, :], self.SNR_bins, axis=0), n_grid)
all_SNR_grids = np.reshape(np.repeat(
SNR_grids[:, None], self.rho_bins, axis=1), n_grid)
# Prepare the data for fitting. These pre-calculated matrices
# will be re-used a lot in evaluating likelihood function and
# gradient.
D = [None] * n_subj
F = [None] * n_subj
run_TRs = [None] * n_subj
n_run = [None] * n_subj
XTY = [None] * n_subj
XTDY = [None] * n_subj
XTFY = [None] * n_subj
YTY_diag = [None] * n_subj
YTDY_diag = [None] * n_subj
YTFY_diag = [None] * n_subj
XTX = [None] * n_subj
XTDX = [None] * n_subj
XTFX = [None] * n_subj
X0TX0 = [None] * n_subj
X0TDX0 = [None] * n_subj
X0TFX0 = [None] * n_subj
XTX0 = [None] * n_subj
XTDX0 = [None] * n_subj
XTFX0 = [None] * n_subj
X0TY = [None] * n_subj
X0TDY = [None] * n_subj
X0TFY = [None] * n_subj
X0 = [None] * n_subj
X_res = [None] * n_subj
n_X0 = [None] * n_subj
idx_DC = [None] * n_subj
log_fixed_terms = [None] * n_subj
# Initialization for L.
# There are several possible ways of initializing the covariance.
# (1) start from the point estimation of covariance
cov_point_est = np.zeros((n_C, n_C))
for subj in range(n_subj):
D[subj], F[subj], run_TRs[subj], n_run[subj] = self._prepare_DF(
n_T[subj], scan_onsets=scan_onsets[subj])
XTY[subj], XTDY[subj], XTFY[subj], YTY_diag[subj], \
YTDY_diag[subj], YTFY_diag[subj], XTX[subj], XTDX[subj], \
XTFX[subj] = self._prepare_data_XY(
X[subj], Y[subj], D[subj], F[subj])
# The contents above stay fixed during fitting.
# Initializing X0 as DC baseline
# DC component will be added to the nuisance regressors.
# In later steps, we do not need to add DC components again
X0TX0[subj], X0TDX0[subj], X0TFX0[subj], XTX0[subj], XTDX0[subj], \
XTFX0[subj], X0TY[subj], X0TDY[subj], X0TFY[subj], X0[subj], \
X_base[subj], n_X0[subj], idx_DC[subj] = \
self._prepare_data_XYX0(
X[subj], Y[subj], X_base[subj], None, D[subj], F[subj],
run_TRs[subj], no_DC=False)
X_joint = np.concatenate((X0[subj], X[subj]), axis=1)
beta_hat = np.linalg.lstsq(X_joint, Y[subj], rcond=None)[0]
residual = Y[subj] - np.dot(X_joint, beta_hat)
# point estimates of betas and fitting residuals without assuming
# the Bayesian model underlying RSA.
cov_point_est += np.cov(beta_hat[n_X0[subj]:, :]
/ np.std(residual, axis=0))
log_fixed_terms[subj] = - (n_T[subj] - n_X0[subj]) \
/ 2 * np.log(2 * np.pi) + n_run[subj] \
/ 2 * np.log(1 - all_rho_grids**2) \
+ scipy.special.gammaln(
(n_T[subj] - n_X0[subj] - 2) / 2) \
+ (n_T[subj] - n_X0[subj] - 2) / 2 * np.log(2)
# These are terms in the log likelihood that do not
# depend on L. Notice that the last term comes from
# ther term of marginalizing sigma. We take the 2 in
# the denominator out. Accordingly, the "denominator"
# variable in the _raw_loglike_grids() function is not
# divided by 2
cov_point_est = cov_point_est / n_subj
current_vec_U_chlsk_l = np.linalg.cholesky(
(cov_point_est + np.eye(n_C)) / 2)[l_idx]
# We use the average of covariance of point estimation and an identity
# matrix as the initial value of the covariance matrix, just in case
# the user provides data in which n_V is smaller than n_C.
# (2) start from identity matrix
# current_vec_U_chlsk_l = np.eye(n_C)[l_idx]
# (3) random initialization
# current_vec_U_chlsk_l = self.random_state_.randn(n_l)
# vectorized version of L, Cholesky factor of U, the shared
# covariance matrix of betas across voxels.
L = np.zeros((n_C, rank))
L[l_idx] = current_vec_U_chlsk_l
X0TAX0 = [None] * n_subj
X0TAX0_i = [None] * n_subj
XTAcorrX = [None] * n_subj
s2XTAcorrX = [None] * n_subj
YTAcorrY_diag = [None] * n_subj
XTAcorrY = [None] * n_subj
sXTAcorrY = [None] * n_subj
X0TAY = [None] * n_subj
XTAX0 = [None] * n_subj
half_log_det_X0TAX0 = [None] * n_subj
s_post = [None] * n_subj
rho_post = [None] * n_subj
sigma_post = [None] * n_subj
beta_post = [None] * n_subj
beta0_post = [None] * n_subj
# The contents below can be updated during fitting.
# e.g., X0 will be re-estimated
logger.info('start real fitting')
LL = np.zeros(n_subj)
for it in range(self.n_iter):
logger.info('Iteration {}'.format(it))
# Re-estimate part of X0: X_res
for subj in range(n_subj):
if self.auto_nuisance and it > 0:
residuals = Y[subj] - np.dot(X[subj], beta_post[subj]) \
- np.dot(
X_base[subj],
beta0_post[subj][:np.shape(X_base[subj])[1], :])
X_res[subj] = self.nureg_method(
self.n_nureg_[subj]).fit_transform(
self.preprocess_residual(residuals))
X0TX0[subj], X0TDX0[subj], X0TFX0[subj], XTX0[subj],\
XTDX0[subj], XTFX0[subj], X0TY[subj], X0TDY[subj], \
X0TFY[subj], X0[subj], X_base[subj], n_X0[subj], _ = \
self._prepare_data_XYX0(
X[subj], Y[subj], X_base[subj], X_res[subj],
D[subj], F[subj], run_TRs[subj], no_DC=True)
X0TAX0[subj], X0TAX0_i[subj], XTAcorrX[subj], XTAcorrY[subj],\
YTAcorrY_diag[subj], X0TAY[subj], XTAX0[subj] \
= self._precompute_ar1_quad_forms_marginalized(
XTY[subj], XTDY[subj], XTFY[subj], YTY_diag[subj],
YTDY_diag[subj], YTFY_diag[subj], XTX[subj],
XTDX[subj], XTFX[subj], X0TX0[subj], X0TDX0[subj],
X0TFX0[subj], XTX0[subj], XTDX0[subj], XTFX0[subj],
X0TY[subj], X0TDY[subj], X0TFY[subj], rho_grids,
n_V[subj], n_X0[subj])
# Now we expand to another dimension including SNR
# and collapse the dimension again.
half_log_det_X0TAX0[subj], X0TAX0[subj], X0TAX0_i[subj], \
s2XTAcorrX[subj], YTAcorrY_diag[subj], sXTAcorrY[subj], \
X0TAY[subj], XTAX0[subj] = self._matrix_flattened_grid(
X0TAX0[subj], X0TAX0_i[subj], SNR_grids,
XTAcorrX[subj], YTAcorrY_diag[subj], XTAcorrY[subj],
X0TAY[subj], XTAX0[subj], n_C, n_V[subj], n_X0[subj],
n_grid)
res = scipy.optimize.minimize(
self._sum_loglike_marginalized, current_vec_U_chlsk_l
+ self.random_state_.randn(n_l) *
np.linalg.norm(current_vec_U_chlsk_l)
/ n_l**0.5 * np.exp(-it / self.n_iter
* self.anneal_speed - 1),
args=(s2XTAcorrX, YTAcorrY_diag, sXTAcorrY,
half_log_det_X0TAX0,
log_weights, log_fixed_terms,
l_idx, n_C, n_T, n_V, n_X0,
n_grid, rank),
method=self.optimizer, jac=True, tol=self.tol,
options=self.minimize_options)
param_change = res.x - current_vec_U_chlsk_l
current_vec_U_chlsk_l = res.x.copy()
# Estimating a few parameters.
L[l_idx] = current_vec_U_chlsk_l
for subj in range(n_subj):
LL_raw, denominator, L_LAMBDA, L_LAMBDA_LT = \
self._raw_loglike_grids(
L, s2XTAcorrX[subj], YTAcorrY_diag[subj],
sXTAcorrY[subj], half_log_det_X0TAX0[subj],
log_weights, log_fixed_terms[subj], n_C, n_T[subj],
n_V[subj], n_X0[subj], n_grid, rank)
result_sum, max_value, result_exp = utils.sumexp_stable(LL_raw)
LL[subj] = np.sum(np.log(result_sum) + max_value)
weight_post = result_exp / result_sum
s_post[subj] = np.sum(all_SNR_grids[:, None] * weight_post,
axis=0)
# Mean-posterior estimate of SNR.
rho_post[subj] = np.sum(all_rho_grids[:, None] * weight_post,
axis=0)
# Mean-posterior estimate of rho.
sigma_means = denominator ** 0.5 \
* (np.exp(scipy.special.gammaln(
(n_T[subj] - n_X0[subj] - 3) / 2)
- scipy.special.gammaln(
(n_T[subj] - n_X0[subj] - 2) / 2)) / 2**0.5)
sigma_post[subj] = np.sum(sigma_means * weight_post, axis=0)
# The mean of inverse-Gamma distribution is beta/(alpha-1)
# The mode is beta/(alpha+1). Notice that beta here does not
# refer to the brain activation, but the scale parameter of
# inverse-Gamma distribution. In the _UV version, we use the
# maximum likelihood estimate of sigma^2. So we divide by
# (alpha+1), which is (n_T - n_X0).
beta_post[subj] = np.zeros((n_C, n_V[subj]))
beta0_post[subj] = np.zeros((n_X0[subj], n_V[subj]))
for grid in range(n_grid):
beta_post[subj] += np.dot(L_LAMBDA_LT[grid, :, :],
sXTAcorrY[subj][grid, :, :])\
* all_SNR_grids[grid] \
* weight_post[grid, :]
beta0_post[subj] += weight_post[grid, :] * np.dot(
X0TAX0_i[subj][grid, :, :],
(X0TAY[subj][grid, :, :]
- np.dot(np.dot(XTAX0[subj][grid, :, :].T,
L_LAMBDA_LT[grid, :, :]),
sXTAcorrY[subj][grid, :, :])
* all_SNR_grids[grid]))
if np.max(np.abs(param_change)) < self.tol:
logger.info('The change of parameters is smaller than '
'the tolerance value {}. Fitting is finished '
'after {} iterations'.format(self.tol, it + 1))
break
for subj in range(n_subj):
if idx_DC[subj].size > 1:
collapsed_DC = np.sum(X0[subj][:, idx_DC[subj]], axis=1)
X0[subj] = np.insert(np.delete(X0[subj], idx_DC[subj], axis=1),
0, collapsed_DC, axis=1)
collapsed_beta0 = np.mean(beta0_post[subj][idx_DC[subj], :],
axis=0)
beta0_post[subj] = np.insert(
np.delete(beta0_post[subj], idx_DC[subj], axis=0),
0, collapsed_beta0, axis=0)
t_finish = time.time()
logger.info(
'total time of fitting: {} seconds'.format(t_finish - t_start))
return np.dot(L, L.T), L, s_post, \
beta_post, beta0_post, sigma_post, \
rho_post, X0, LL
def _fit_RSA_marginalized_null(self, Y, X_base,
scan_onsets):
""" The marginalized version of the null model for Bayesian RSA.
The null model assumes no task-related response to the
design matrix.
Note that there is a naming change of variable. X in fit()
is changed to Y here.
This is because we follow the tradition that Y corresponds
to data.
However, in wrapper function fit(), we follow the naming
routine of scikit-learn.
"""
# Because there is nothing to learn that is shared across
# participants, we can run each subject in serial.
# The only fitting required is to re-estimate X0 after
# each iteration
n_subj = len(Y)
t_start = time.time()
logger.info('Starting to fit the model. Maximum iteration: '
'{}.'.format(self.n_iter))
rho_grids, rho_weights = self._set_rho_grids()
logger.info('The grids of rho used to do numerical integration '
'is {}.'.format(rho_grids))
n_grid = self.rho_bins
log_weights = np.log(rho_weights)
rho_post = [None] * n_subj
sigma_post = [None] * n_subj
beta0_post = [None] * n_subj
X0 = [None] * n_subj
LL_null = np.zeros(n_subj)
for subj in range(n_subj):
logger.debug('Running on subject {}.'.format(subj))
[n_T, n_V] = np.shape(Y[subj])
D, F, run_TRs, n_run = self._prepare_DF(
n_T, scan_onsets=scan_onsets[subj])
YTY_diag = np.sum(Y[subj] * Y[subj], axis=0)
YTDY_diag = np.sum(Y[subj] * np.dot(D, Y[subj]), axis=0)
YTFY_diag = np.sum(Y[subj] * np.dot(F, Y[subj]), axis=0)
# Add DC components capturing run-specific baselines.
X_DC = self._gen_X_DC(run_TRs)
X_DC, X_base[subj], idx_DC = self._merge_DC_to_base(
X_DC, X_base[subj], no_DC=False)
X_res = np.empty((n_T, 0))
for it in range(0, self.n_iter):
X0[subj] = np.concatenate(
(X_base[subj], X_res), axis=1)
n_X0 = X0[subj].shape[1]
X0TX0, X0TDX0, X0TFX0 = self._make_templates(
D, F, X0[subj], X0[subj])
X0TY, X0TDY, X0TFY = self._make_templates(
D, F, X0[subj], Y[subj])
YTAY_diag = YTY_diag - rho_grids[:, None] * YTDY_diag \
+ rho_grids[:, None]**2 * YTFY_diag
# dimension: #rho*space,
# A/sigma2 is the inverse of noise covariance matrix.
# YTAY means Y'AY
X0TAX0 = X0TX0[None, :, :] \
- rho_grids[:, None, None] \
* X0TDX0[None, :, :] \
+ rho_grids[:, None, None]**2 \
* X0TFX0[None, :, :]
# dimension: #rho*#baseline*#baseline
X0TAY = X0TY - rho_grids[:, None, None] * X0TDY \
+ rho_grids[:, None, None]**2 * X0TFY
# dimension: #rho*#baseline*space
X0TAX0_i = np.linalg.solve(
X0TAX0, np.identity(n_X0)[None, :, :])
# dimension: #rho*#baseline*#baseline
YTAcorrY_diag = np.empty(np.shape(YTAY_diag))
for i_r in range(np.size(rho_grids)):
YTAcorrY_diag[i_r, :] = YTAY_diag[i_r, :] \
- np.sum(X0TAY[i_r, :, :] * np.dot(
X0TAX0_i[i_r, :, :], X0TAY[i_r, :, :]),
axis=0)
log_fixed_terms = - (n_T - n_X0) / 2 * np.log(2 * np.pi)\
+ n_run / 2 * np.log(1 - rho_grids**2) \
+ scipy.special.gammaln((n_T - n_X0 - 2) / 2) \
+ (n_T - n_X0 - 2) / 2 * np.log(2)
# These are terms in the log likelihood that do not
# depend on L. Notice that the last term comes from
# ther term of marginalizing sigma. We take the 2 in
# the denominator out. Accordingly, the "denominator"
# variable in the _raw_loglike_grids() function is not
# divided by 2
half_log_det_X0TAX0 = self._half_log_det(X0TAX0)
LL_raw = -half_log_det_X0TAX0[:, None] \
- (n_T - n_X0 - 2) / 2 * np.log(YTAcorrY_diag) \
+ log_weights[:, None] + log_fixed_terms[:, None]
# dimension: n_grid * space
# The log likelihood at each pair of values of rho1.
# half_log_det_X0TAX0 is 0.5*log(det(X0TAX0)) with the size of
# number of parameter grids. So is the size of log_weights
result_sum, max_value, result_exp = utils.sumexp_stable(LL_raw)
weight_post = result_exp / result_sum
rho_post[subj] = np.sum(rho_grids[:, None] * weight_post,
axis=0)
# Mean-posterior estimate of rho.
sigma_means = YTAcorrY_diag ** 0.5 \
* (np.exp(scipy.special.gammaln((n_T - n_X0 - 3) / 2)
- scipy.special.gammaln((n_T - n_X0 - 2) / 2))
/ 2**0.5)
sigma_post[subj] = np.sum(sigma_means * weight_post, axis=0)
beta0_post[subj] = np.zeros((n_X0, n_V))
for grid in range(n_grid):
beta0_post[subj] += weight_post[grid, :] * np.dot(
X0TAX0_i[grid, :, :], X0TAY[grid, :, :])
if self.auto_nuisance:
residuals = Y[subj] - np.dot(
X_base[subj],
beta0_post[subj][:np.size(X_base[subj], 1), :])
X_res_new = self.nureg_method(
self.n_nureg_[subj]).fit_transform(
self.preprocess_residual(residuals))
if it >= 1:
if np.max(np.abs(X_res_new - X_res)) <= self.tol:
logger.info('The change of X_res is '
'smaller than the tolerance value {}.'
'Fitting is finished after {} '
'iterations'.format(self.tol, it + 1))
break
X_res = X_res_new
if idx_DC.size > 1:
collapsed_DC = np.sum(X0[subj][:, idx_DC], axis=1)
X0[subj] = np.insert(np.delete(X0[subj], idx_DC, axis=1), 0,
collapsed_DC, axis=1)
collapsed_beta0 = np.mean(beta0_post[subj][idx_DC, :], axis=0)
beta0_post[subj] = np.insert(
np.delete(beta0_post[subj], idx_DC, axis=0),
0, collapsed_beta0, axis=0)
LL_null[subj] = np.sum(np.log(result_sum) + max_value)
t_finish = time.time()
logger.info(
'total time of fitting: {} seconds'.format(t_finish - t_start))
return beta0_post, sigma_post, rho_post, X0, LL_null
def _raw_loglike_grids(self, L, s2XTAcorrX, YTAcorrY_diag,
sXTAcorrY, half_log_det_X0TAX0,
log_weights, log_fixed_terms,
n_C, n_T, n_V, n_X0,
n_grid, rank):
# LAMBDA_i = np.dot(np.einsum('ijk,jl->ilk', s2XTAcorrX, L), L) \
# + np.identity(rank)
LAMBDA_i = np.empty((n_grid, rank, rank))
for grid in np.arange(n_grid):
LAMBDA_i[grid, :, :] = np.dot(np.dot(L.T,
s2XTAcorrX[grid, :, :]), L)
LAMBDA_i += np.identity(rank)
# dimension: n_grid * rank * rank
Chol_LAMBDA_i = np.linalg.cholesky(LAMBDA_i)
# dimension: n_grid * rank * rank
half_log_det_LAMBDA_i = np.sum(
np.log(np.abs(np.diagonal(Chol_LAMBDA_i, axis1=1, axis2=2))),
axis=1)
# dimension: n_grid
L_LAMBDA = np.empty((n_grid, n_C, rank))
L_LAMBDA_LT = np.empty((n_grid, n_C, n_C))
s2YTAcorrXL_LAMBDA_LTXTAcorrY = np.empty((n_grid, n_V))
# dimension: space * n_grid
for grid in np.arange(n_grid):
L_LAMBDA[grid, :, :] = scipy.linalg.cho_solve(
(Chol_LAMBDA_i[grid, :, :], True), L.T).T
L_LAMBDA_LT[grid, :, :] = np.dot(L_LAMBDA[grid, :, :], L.T)
s2YTAcorrXL_LAMBDA_LTXTAcorrY[grid, :] = np.sum(
sXTAcorrY[grid, :, :] * np.dot(L_LAMBDA_LT[grid, :, :],
sXTAcorrY[grid, :, :]),
axis=0)
denominator = (YTAcorrY_diag - s2YTAcorrXL_LAMBDA_LTXTAcorrY)
# dimension: n_grid * space
# Not necessary the best name for it. But this term appears
# as the denominator within the gradient wrt L
# In the equation of the log likelihood, this "denominator"
# term is in fact divided by 2. But we absorb that into the
# log fixted term.
LL_raw = -half_log_det_X0TAX0[:, None] \
- half_log_det_LAMBDA_i[:, None] \
- (n_T - n_X0 - 2) / 2 * np.log(denominator) \
+ log_weights[:, None] + log_fixed_terms[:, None]
# dimension: n_grid * space
# The log likelihood at each pair of values of SNR and rho1.
# half_log_det_X0TAX0 is 0.5*log(det(X0TAX0)) with the size of
# number of parameter grids. So is the size of log_weights
return LL_raw, denominator, L_LAMBDA, L_LAMBDA_LT
def _sum_loglike_marginalized(self, L_vec, s2XTAcorrX, YTAcorrY_diag,
sXTAcorrY, half_log_det_X0TAX0,
log_weights, log_fixed_terms,
l_idx, n_C, n_T, n_V, n_X0,
n_grid, rank=None):
sum_LL_total = 0
sum_grad_L = np.zeros(np.size(l_idx[0]))
for subj in range(len(YTAcorrY_diag)):
LL_total, grad_L = self._loglike_marginalized(
L_vec, s2XTAcorrX[subj], YTAcorrY_diag[subj],
sXTAcorrY[subj], half_log_det_X0TAX0[subj], log_weights,
log_fixed_terms[subj], l_idx, n_C, n_T[subj],
n_V[subj], n_X0[subj], n_grid, rank)
sum_LL_total += LL_total
sum_grad_L += grad_L
return sum_LL_total, sum_grad_L
def _loglike_marginalized(self, L_vec, s2XTAcorrX, YTAcorrY_diag,
sXTAcorrY, half_log_det_X0TAX0,
log_weights, log_fixed_terms,
l_idx, n_C, n_T, n_V, n_X0,
n_grid, rank=None):
# In this version, we assume that beta is independent
# between voxels and noise is also independent. X0 captures the
# co-flucturation between voxels that is
# not captured by design matrix X.
# marginalized version marginalize sigma^2, s and rho1
# for all voxels. n_grid is the number of grid on which the numeric
# integration is performed to marginalize s and rho1 for each voxel.
# The log likelihood is an inverse-Gamma distribution sigma^2,
# so we can analytically marginalize it assuming uniform prior.
# n_grid is the number of grid in the parameter space of (s, rho1)
# that is used for numerical integration over (s, rho1).
n_l = np.size(l_idx[0])
# the number of parameters in the index of lower-triangular matrix
if rank is None:
rank = int((2 * n_C + 1
- np.sqrt(n_C**2 * 4 + n_C * 4 + 1 - 8 * n_l)) / 2)
L = np.zeros([n_C, rank])
L[l_idx] = L_vec
LL_raw, denominator, L_LAMBDA, _ = self._raw_loglike_grids(
L, s2XTAcorrX, YTAcorrY_diag, sXTAcorrY, half_log_det_X0TAX0,
log_weights, log_fixed_terms, n_C, n_T, n_V, n_X0, n_grid, rank)
result_sum, max_value, result_exp = utils.sumexp_stable(LL_raw)
LL_total = np.sum(np.log(result_sum) + max_value)
# Now we start the gradient with respect to L
# s2XTAcorrXL_LAMBDA = np.einsum('ijk,ikl->ijl',
# s2XTAcorrX, L_LAMBDA)
s2XTAcorrXL_LAMBDA = np.empty((n_grid, n_C, rank))
for grid in range(n_grid):
s2XTAcorrXL_LAMBDA[grid, :, :] = np.dot(s2XTAcorrX[grid, :, :],
L_LAMBDA[grid, :, :])
# dimension: n_grid * condition * rank
I_minus_s2XTAcorrXL_LAMBDA_LT = np.identity(n_C) \
- np.dot(s2XTAcorrXL_LAMBDA, L.T)
# dimension: n_grid * condition * condition
# The step above may be calculated by einsum. Not sure
# which is faster.
weight_grad = result_exp / result_sum
weight_grad_over_denominator = weight_grad / denominator
# dimension: n_grid * space
weighted_sXTAcorrY = sXTAcorrY \
* weight_grad_over_denominator[:, None, :]
# dimension: n_grid * condition * space
# sYTAcorrXL_LAMBDA = np.einsum('ijk,ijl->ikl', sXTAcorrY, L_LAMBDA)
# dimension: n_grid * space * rank
grad_L = np.zeros([n_C, rank])
for grid in range(n_grid):
grad_L += np.dot(
np.dot(I_minus_s2XTAcorrXL_LAMBDA_LT[grid, :, :],
sXTAcorrY[grid, :, :]),
np.dot(weighted_sXTAcorrY[grid, :, :].T,
L_LAMBDA[grid, :, :])) * (n_T - n_X0 - 2)
grad_L -= np.sum(s2XTAcorrXL_LAMBDA
* np.sum(weight_grad, axis=1)[:, None, None],
axis=0)
# dimension: condition * rank
return -LL_total, -grad_L[l_idx]
def _check_data_GBRSA(self, X, for_fit=True):
# Check input data
if type(X) is np.ndarray:
X = [X]
assert type(X) is list, 'Input data X must be either a list '\
'with each entry for one participant, or a numpy arrary '\
'for single participant.'
if for_fit:
for i, x in enumerate(X):
assert_all_finite(x)
assert x.ndim == 2, 'Each participants'' data should be ' \
'2 dimension ndarray'
assert np.all(np.std(x, axis=0) > 0),\
'The time courses of some voxels in participant {} '\
'do not change at all. Please make sure all voxels '\
'are within the brain'.format(i)
else:
for i, x in enumerate(X):
if x is not None:
assert x.ndim == 2, 'Each participants'' data should be ' \
'2 dimension ndarray'
assert x.shape[1] == self.n_V_[i], 'Number of voxels '\
'does not match that in the data used for fitting: '\
'subject {}'.format(i)
# This program allows to fit a single subject. But to have a consistent
# data structure, we make sure X and design are both lists.
return X
def _check_design_GBRSA(self, design, X):
# check design matrix
if type(design) is np.ndarray:
design = [design] * len(X)
if len(X) > 1:
logger.warning('There are multiple subjects while '
'there is only one design matrix. '
'I assume that the design matrix '
'is shared across all subjects.')
assert type(design) is list, 'design matrix must be either a list '\
'with each entry for one participant, or an numpy arrary '\
'for single participant.'
for i, d in enumerate(design):
if X[i] is not None:
assert_all_finite(d)
assert d.ndim == 2,\
'The design matrix should be 2 dimension ndarray'
assert np.linalg.matrix_rank(d) == d.shape[1], \
'Your design matrix of subject {} has rank ' \
'smaller than the number of columns. Some columns '\
'can be explained by linear combination of other columns.'\
'Please check your design matrix.'.format(i)
assert np.size(d, axis=0) == np.size(X[i], axis=0),\
'Design matrix and data of subject {} do not '\
'have the same number of time points.'.format(i)
assert self.rank is None or self.rank <= d.shape[1],\
'Your design matrix of subject {} '\
'has fewer columns than the rank you set'.format(i)
if i == 0:
n_C = np.shape(d)[1]
else:
assert n_C == np.shape(d)[1], \
'In Group Bayesian RSA, all subjects should have'\
' the same set of experiment conditions, t'\
'hus the same number of columns in design matrix'
if X[i].shape[1] <= d.shape[1]:
logger.warning('Your data have fewer voxels than the '
'number of task conditions. This might '
'cause problem in fitting. Please consider '
'increasing the size of your ROI, or set '
'the rank parameter to a lower number to '
'estimate a low-rank representational '
'structure.')
return design
def _check_nuisance_GBRSA(sef, nuisance, X):
# Check the nuisance regressors.
if nuisance is not None:
if type(nuisance) is np.ndarray:
nuisance = [nuisance] * len(X)
if len(X) > 1:
logger.warning('ATTENTION! There are multiple subjects '
'while there is only one nuisance matrix. '
'I assume that the nuisance matrix '
'is shared across all subjects. '
'Please double check.')
assert type(nuisance) is list, \
'nuisance matrix must be either a list '\
'with each entry for one participant, or an numpy arrary '\
'for single participant.'
for i, n in enumerate(nuisance):
assert_all_finite(n)
if n is not None:
assert n.ndim == 2,\
'The nuisance regressor should be '\
'2 dimension ndarray or None'
assert np.linalg.matrix_rank(n) == n.shape[1], \
'The nuisance regressor of subject {} has rank '\
'smaller than the number of columns.'\
'Some columns can be explained by linear '\
'combination of other columns. Please check your' \
' nuisance regressors.'.format(i)
assert np.size(n, axis=0) == np.size(X[i], axis=0), \
'Nuisance regressor and data do not have the same '\
'number of time points.'
else:
nuisance = [None] * len(X)
logger.info('None was provided for nuisance matrix. Replicating '
'it for all subjects.')
return nuisance
def _check_scan_onsets_GBRSA(self, scan_onsets, X):
# check scan_onsets validity
if scan_onsets is None or type(scan_onsets) is np.ndarray:
if scan_onsets is None:
scan_onsets = np.array([0], dtype=int)
scan_onsets = [scan_onsets] * len(X)
if len(X) > 1:
logger.warning('There are multiple subjects while '
'there is only one set of scan_onsets. '
'I assume that it is the same for all'
' subjects. Please double check')
for i in np.arange(len(scan_onsets)):
if X[i] is not None:
if scan_onsets[i] is None:
scan_onsets[i] = np.array([0], dtype=int)
logger.warning('No scan onsets were provided for subject'
' {}. Treating all data of this subject as'
' coming from the same run.')
else:
scan_onsets[i] = np.int32(scan_onsets[i])
assert (np.max(scan_onsets[i]) <= X[i].shape[0]
and np.min(scan_onsets[i]) >= 0
and 0 in scan_onsets[i]
and scan_onsets[i].ndim == 1), \
'Scan onsets of subject {} has formatting ' \
'issues: {}'.format(i, scan_onsets[i])
return scan_onsets
def _bin_exp(self, n_bin, scale=1.0):
""" Calculate the bin locations to approximate exponential distribution.
It breaks the cumulative probability of exponential distribution
into n_bin equal bins, each covering 1 / n_bin probability. Then it
calculates the center of mass in each bins and returns the
centers of mass. So, it approximates the exponential distribution
with n_bin of Delta function weighted by 1 / n_bin, at the
locations of these centers of mass.
Parameters:
-----------
n_bin: int
The number of bins to approximate the exponential distribution
scale: float.
The scale parameter of the exponential distribution, defined in
the same way as scipy.stats. It does not influence the ratios
between the bins, but just controls the spacing between the bins.
So generally users should not change its default.
Returns:
--------
bins: numpy array of size [n_bin,]
The centers of mass for each segment of the
exponential distribution.
"""
boundaries = np.flip(scipy.stats.expon.isf(
np.linspace(0, 1, n_bin + 1),
scale=scale), axis=0)
bins = np.empty(n_bin)
for i in np.arange(n_bin):
bins[i] = utils.center_mass_exp(
(boundaries[i], boundaries[i + 1]), scale=scale)
return bins
def _set_SNR_grids(self):
""" Set the grids and weights for SNR used in numerical integration
of SNR parameters.
"""
if self.SNR_prior == 'unif':
SNR_grids = np.linspace(0, 1, self.SNR_bins)
SNR_weights = np.ones(self.SNR_bins) / (self.SNR_bins - 1)
SNR_weights[0] = SNR_weights[0] / 2.0
SNR_weights[-1] = SNR_weights[-1] / 2.0
elif self.SNR_prior == 'lognorm':
dist = scipy.stats.lognorm
alphas = np.arange(np.mod(self.SNR_bins, 2),
self.SNR_bins + 2, 2) / self.SNR_bins
# The goal here is to divide the area under the pdf curve
# to segments representing equal probabilities.
bounds = dist.interval(alphas, (self.logS_range,))
bounds = np.unique(bounds)
# bounds contain the boundaries which equally separate
# the probability mass of the distribution
SNR_grids = np.zeros(self.SNR_bins)
for i in np.arange(self.SNR_bins):
SNR_grids[i] = dist.expect(
lambda x: x, args=(self.logS_range,),
lb=bounds[i], ub=bounds[i + 1]) * self.SNR_bins
# Center of mass of each segment between consecutive
# bounds are set as the grids for SNR.
SNR_weights = np.ones(self.SNR_bins) / self.SNR_bins
elif self.SNR_prior == 'exp':
SNR_grids = self._bin_exp(self.SNR_bins)
SNR_weights = np.ones(self.SNR_bins) / self.SNR_bins
else:
SNR_grids = np.ones(1)
SNR_weights = np.ones(1)
SNR_weights = SNR_weights / np.sum(SNR_weights)
return SNR_grids, SNR_weights
def _set_rho_grids(self):
""" Set the grids and weights for rho used in numerical integration
of AR(1) parameters.
"""
rho_grids = np.arange(self.rho_bins) * 2 / self.rho_bins - 1 \
+ 1 / self.rho_bins
rho_weights = np.ones(self.rho_bins) / self.rho_bins
return rho_grids, rho_weights
def _matrix_flattened_grid(self, X0TAX0, X0TAX0_i, SNR_grids, XTAcorrX,
YTAcorrY_diag, XTAcorrY, X0TAY, XTAX0,
n_C, n_V, n_X0, n_grid):
""" We need to integrate parameters SNR and rho on 2-d discrete grids.
This function generates matrices which have only one dimension for
these two parameters, with each slice in that dimension
corresponding to each combination of the discrete grids of SNR
and discrete grids of rho.
"""
half_log_det_X0TAX0 = np.reshape(
np.repeat(self._half_log_det(X0TAX0)[None, :],
self.SNR_bins, axis=0), n_grid)
X0TAX0 = np.reshape(
np.repeat(X0TAX0[None, :, :, :],
self.SNR_bins, axis=0),
(n_grid, n_X0, n_X0))
X0TAX0_i = np.reshape(np.repeat(
X0TAX0_i[None, :, :, :],
self.SNR_bins, axis=0),
(n_grid, n_X0, n_X0))
s2XTAcorrX = np.reshape(
SNR_grids[:, None, None, None]**2 * XTAcorrX,
(n_grid, n_C, n_C))
YTAcorrY_diag = np.reshape(np.repeat(
YTAcorrY_diag[None, :, :],
self.SNR_bins, axis=0), (n_grid, n_V))
sXTAcorrY = np.reshape(SNR_grids[:, None, None, None]
* XTAcorrY, (n_grid, n_C, n_V))
X0TAY = np.reshape(np.repeat(X0TAY[None, :, :, :],
self.SNR_bins, axis=0),
(n_grid, n_X0, n_V))
XTAX0 = np.reshape(np.repeat(XTAX0[None, :, :, :],
self.SNR_bins, axis=0),
(n_grid, n_C, n_X0))
return half_log_det_X0TAX0, X0TAX0, X0TAX0_i, s2XTAcorrX, \
YTAcorrY_diag, sXTAcorrY, X0TAY, XTAX0
|
lcnature/brainiak
|
brainiak/reprsimil/brsa.py
|
Python
|
apache-2.0
| 211,992
|
[
"Gaussian"
] |
cd84a148f8270fb7a16c3150d7e0990c60922e2e55f0e7b09b4275ecb5b0bf42
|
class Display:
def visit(self, state_machine):
states = state_machine.states
for state in states.values():
print "state: ", state.name
for transition in state.transitions_out.values():
print " transition: ", transition.name, " attributes: ",
for attribute_or_group in transition.attributes:
print attribute_or_group.name,
print
|
bwtaylor/statemach
|
visitors/display.py
|
Python
|
apache-2.0
| 392
|
[
"VisIt"
] |
3890a196fc331e38d203619589ea33107eb5ae415f5eb88769a9189946aa9104
|
# $Id$
#
# Copyright (C) 2003 Rational Discovery LLC
# All Rights Reserved
#
from rdkit import six
from rdkit.VLib.Node import VLibNode
class SupplyNode(VLibNode):
""" base class for nodes which supply things
Assumptions:
1) no parents
Usage Example:
>>> supplier = SupplyNode(contents=[1,2,3])
>>> supplier.next()
1
>>> supplier.next()
2
>>> supplier.next()
3
>>> supplier.next()
Traceback (most recent call last):
...
StopIteration
>>> supplier.reset()
>>> supplier.next()
1
>>> [x for x in supplier]
[1, 2, 3]
"""
def __init__(self, contents=None, **kwargs):
VLibNode.__init__(self, **kwargs)
if contents is not None:
self._contents = contents
else:
self._contents = []
self._pos = 0
def reset(self):
VLibNode.reset(self)
self._pos = 0
def next(self):
if self._pos == len(self._contents):
raise StopIteration
res = self._contents[self._pos]
self._pos += 1
return res
def AddParent(self, parent, notify=1):
raise ValueError('SupplyNodes do not have parents')
if six.PY3:
SupplyNode.__next__ = SupplyNode.next
# ------------------------------------
#
# doctest boilerplate
#
def _runDoctests(verbose=None): # pragma: nocover
import sys
import doctest
failed, _ = doctest.testmod(optionflags=doctest.ELLIPSIS, verbose=verbose)
sys.exit(failed)
if __name__ == '__main__': # pragma: nocover
_runDoctests()
|
rvianello/rdkit
|
rdkit/VLib/Supply.py
|
Python
|
bsd-3-clause
| 1,492
|
[
"RDKit"
] |
23af8f1b73244f7ed97781cf1ba0d7f295dd09e7355e900042556e92d6cb4d16
|
import os
import sys
import time
import socket
import inspect
import traceback
import pickle
import uuid
from GangaCore.Runtime.GPIexport import exportToGPI
from GangaCore.GPIDev.Base.Proxy import addProxy, stripProxy
from GangaCore.Utility.Config import getConfig
from GangaCore.Utility.logging import getLogger
#from GangaCore.Core.GangaThread.WorkerThreads.WorkerThreadPool import WorkerThreadPool
#from GangaCore.Core.GangaThread.WorkerThreads.ThreadPoolQueueMonitor import ThreadPoolQueueMonitor
from GangaDirac.Lib.Utilities.DiracUtilities import execute
logger = getLogger()
#user_threadpool = WorkerThreadPool()
#monitoring_threadpool = WorkerThreadPool()
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/#
def diracAPI(cmd, timeout=60, cred_req=None):
'''
Args:
cmd (str): This is the command you want to execute from within an active DIRAC session
timeout (int): This is the maximum time(sec) the session has to complete the task
cred_req (ICredentialRequirement): This is the (optional) credential passed to construct the correct DIRAC env
Execute DIRAC API commands from w/in GangaCore.
The stdout will be returned, e.g.:
# this will simply return 87
diracAPI(\'print 87\')
# this will return the status of job 66
# note a Dirac() object is already provided set up as \'dirac\'
diracAPI(\'print(Dirac().getJobStatus([66]))\')
diracAPI(\'print(dirac.getJobStatus([66]))\')
# or can achieve the same using command defined and included from
# getConfig('DIRAC')['DiracCommandFiles']
diracAPI(\'status([66])\')
'''
return execute(cmd, timeout=timeout, cred_req=cred_req)
exportToGPI('diracAPI', diracAPI, 'Functions')
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/#
running_dirac_process = False
dirac_process = None
dirac_process_ids = None
def startDiracProcess():
'''
Start a subprocess that runs the DIRAC commands
'''
HOST = 'localhost' #Connect to localhost
end_trans = '###END-TRANS###'
import subprocess
from GangaDirac.Lib.Utilities.DiracUtilities import getDiracEnv, getDiracCommandIncludes, GangaDiracError
global dirac_process
#Some magic to locate the python script to run
from GangaDirac.Lib.Server.InspectionClient import runClient
#Create a socket and bind it to 0 to find a free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, 0))
PORT = s.getsockname()[1]
s.close()
#Pass the port no as an argument to the popen
serverpath = os.path.join(os.path.dirname(inspect.getsourcefile(runClient)), 'DiracProcess.py')
popen_cmd = ['python',serverpath, str(PORT)]
dirac_process = subprocess.Popen(popen_cmd, env = getDiracEnv(), stdin=subprocess.PIPE)
global running_dirac_process
running_dirac_process = (dirac_process.pid, PORT)
#Now set a random string to make sure only commands from this sessions are executed
rand_hash = uuid.uuid4()
global dirac_process_ids
dirac_process_ids = (dirac_process.pid, PORT, rand_hash)
#Pipe the random string without waiting for the process to finish.
dirac_process.stdin.write(str(rand_hash).encode("utf-8"))
dirac_process.stdin.close()
data = ''
#We have to wait a little bit for the subprocess to start the server so we try until the connection stops being refused. Set a limit of one minute.
connection_timeout = time.time() + 60
started = False
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while time.time()<connection_timeout and not started:
try:
s.connect((HOST, PORT))
started = True
except socket.error as serr:
time.sleep(1)
if not started:
raise GangaDiracError("Failed to start the Dirac server process!")
#Now setup the Dirac environment in the subprocess
dirac_command = str(rand_hash)
dirac_command = dirac_command + getDiracCommandIncludes()
dirac_command = dirac_command + end_trans
s.sendall(dirac_command.encode("utf-8"))
data = s.recv(1024)
s.close()
exportToGPI('startDiracProcess', startDiracProcess, 'Functions')
def stopDiracProcess():
'''
Stop the Dirac process if it is running
'''
global running_dirac_process
if running_dirac_process:
logger.info('Stopping the DIRAC process')
dirac_process.kill()
running_dirac_process = False
exportToGPI('stopDiracProcess', stopDiracProcess, 'Functions')
def diracAPI_interactive(connection_attempts=5):
'''
Run an interactive server within the DIRAC environment.
'''
from GangaDirac.Lib.Server.InspectionClient import runClient
serverpath = os.path.join(os.path.dirname(inspect.getsourcefile(runClient)), 'InspectionServer.py')
from GangaCore.Core.GangaThread.WorkerThreads import getQueues
getQueues().add(execute("execfile('%s')" % serverpath, timeout=None, shell=False))
#time.sleep(1)
sys.stdout.write( "\nType 'q' or 'Q' or 'exit' or 'exit()' to quit but NOT ctrl-D")
i = 0
excpt = None
while i < connection_attempts:
try:
runClient()
break
except:
if i == (connection_attempts - 1):
excpt = traceback.format_exc()
finally:
i += 1
return excpt
exportToGPI('diracAPI_interactive', diracAPI_interactive, 'Functions')
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/#
def diracAPI_async(cmd, timeout=120):
'''
Execute DIRAC API commands from w/in GangaCore.
'''
from GangaCore.Core.GangaThread.WorkerThreads import getQueues
return getQueues().add(execute(cmd, timeout=timeout))
exportToGPI('diracAPI_async', diracAPI_async, 'Functions')
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/#
def getDiracFiles():
from GangaDirac.Lib.Files.DiracFile import DiracFile
from GangaCore.GPIDev.Lib.GangaList.GangaList import GangaList
filename = DiracFile.diracLFNBase().replace('/', '-') + '.lfns'
logger.info('Creating list, this can take a while if you have a large number of SE files, please wait...')
execute('dirac-dms-user-lfns &> /dev/null', shell=True, timeout=None)
g = GangaList()
with open(filename[1:], 'r') as lfnlist:
lfnlist.seek(0)
g.extend((DiracFile(lfn='%s' % lfn.strip()) for lfn in lfnlist.readlines()))
return addProxy(g)
exportToGPI('getDiracFiles', getDiracFiles, 'Functions')
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/#
def dumpObject(object, filename):
'''
These are complimentary functions to export/load which are already exported to
the GPI from GangaCore.GPIDev.Persistency. The difference being that these functions will
export the objects using the pickle persistency format rather than a Ganga streaming
(human readable) format.
'''
try:
with open(os.path.expandvars(os.path.expanduser(filename)), 'wb') as f:
pickle.dump(stripProxy(object), f)
except:
logger.error("Problem when dumping file '%s': %s" % (filename, traceback.format_exc()))
exportToGPI('dumpObject', dumpObject, 'Functions')
def loadObject(filename):
'''
These are complimentary functions to export/load which are already exported to
the GPI from GangaCore.GPIDev.Persistency. The difference being that these functions will
export the objects using the pickle persistency format rather than a Ganga streaming
(human readable) format.
'''
try:
with open(os.path.expandvars(os.path.expanduser(filename)), 'rb') as f:
r = pickle.load(f)
except:
logger.error("Problem when loading file '%s': %s" % (filename, traceback.format_exc()))
else:
return addProxy(r)
exportToGPI('loadObject', loadObject, 'Functions')
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/#
|
ganga-devs/ganga
|
ganga/GangaDirac/BOOT.py
|
Python
|
gpl-3.0
| 7,982
|
[
"DIRAC"
] |
1fdbc43fc2a4acec244fdc965c9779737728eb093d37a84b3d9ef6e741e02be0
|
"""
NetCDF reader/writer module.
This module is used to read and create NetCDF files. NetCDF files are
accessed through the `netcdf_file` object. Data written to and from NetCDF
files are contained in `netcdf_variable` objects. Attributes are given
as member variables of the `netcdf_file` and `netcdf_variable` objects.
Notes
-----
NetCDF files are a self-describing binary data format. The file contains
metadata that describes the dimensions and variables in the file. More
details about NetCDF files can be found `here
<http://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html>`_. There
are three main sections to a NetCDF data structure:
1. Dimensions
2. Variables
3. Attributes
The dimensions section records the name and length of each dimension used
by the variables. The variables would then indicate which dimensions it
uses and any attributes such as data units, along with containing the data
values for the variable. It is good practice to include a
variable that is the same name as a dimension to provide the values for
that axes. Lastly, the attributes section would contain additional
information such as the name of the file creator or the instrument used to
collect the data.
When writing data to a NetCDF file, there is often the need to indicate the
'record dimension'. A record dimension is the unbounded dimension for a
variable. For example, a temperature variable may have dimensions of
latitude, longitude and time. If one wants to add more temperature data to
the NetCDF file as time progresses, then the temperature variable should
have the time dimension flagged as the record dimension.
This module implements the Scientific.IO.NetCDF API to read and create
NetCDF files. The same API is also used in the PyNIO and pynetcdf
modules, allowing these modules to be used interchangeably when working
with NetCDF files. The major advantage of this module over other
modules is that it doesn't require the code to be linked to the NetCDF
libraries.
In addition, the NetCDF file header contains the position of the data in
the file, so access can be done in an efficient manner without loading
unnecessary data into memory. It uses the ``mmap`` module to create
Numpy arrays mapped to the data on disk, for the same purpose.
Examples
--------
To create a NetCDF file:
>>> from scipy.io import netcdf
>>> f = netcdf.netcdf_file('simple.nc', 'w')
>>> f.history = 'Created for a test'
>>> f.createDimension('time', 10)
>>> time = f.createVariable('time', 'i', ('time',))
>>> time[:] = range(10)
>>> time.units = 'days since 2008-01-01'
>>> f.close()
Note the assignment of ``range(10)`` to ``time[:]``. Exposing the slice
of the time variable allows for the data to be set in the object, rather
than letting ``range(10)`` overwrite the ``time`` variable.
To read the NetCDF file we just created:
>>> from scipy.io import netcdf
>>> f = netcdf.netcdf_file('simple.nc', 'r')
>>> print f.history
Created for a test
>>> time = f.variables['time']
>>> print time.units
days since 2008-01-01
>>> print time.shape
(10,)
>>> print time[-1]
9
>>> f.close()
"""
#TODO:
# * properly implement ``_FillValue``.
# * implement Jeff Whitaker's patch for masked variables.
# * fix character variables.
# * implement PAGESIZE for Python 2.6?
#The Scientific.IO.NetCDF API allows attributes to be added directly to
#instances of ``netcdf_file`` and ``netcdf_variable``. To differentiate
#between user-set attributes and instance attributes, user-set attributes
#are automatically stored in the ``_attributes`` attribute by overloading
#``__setattr__``. This is the reason why the code sometimes uses
#``obj.__dict__['key'] = value``, instead of simply ``obj.key = value``;
#otherwise the key would be inserted into userspace attributes.
__all__ = ['netcdf_file', 'netcdf_variable']
from operator import mul
from mmap import mmap, ACCESS_READ
import numpy as np
from numpy.compat import asbytes, asstr
from numpy import fromstring, ndarray, dtype, empty, array, asarray
from numpy import little_endian as LITTLE_ENDIAN
ABSENT = asbytes('\x00\x00\x00\x00\x00\x00\x00\x00')
ZERO = asbytes('\x00\x00\x00\x00')
NC_BYTE = asbytes('\x00\x00\x00\x01')
NC_CHAR = asbytes('\x00\x00\x00\x02')
NC_SHORT = asbytes('\x00\x00\x00\x03')
NC_INT = asbytes('\x00\x00\x00\x04')
NC_FLOAT = asbytes('\x00\x00\x00\x05')
NC_DOUBLE = asbytes('\x00\x00\x00\x06')
NC_DIMENSION = asbytes('\x00\x00\x00\n')
NC_VARIABLE = asbytes('\x00\x00\x00\x0b')
NC_ATTRIBUTE = asbytes('\x00\x00\x00\x0c')
TYPEMAP = { NC_BYTE: ('b', 1),
NC_CHAR: ('c', 1),
NC_SHORT: ('h', 2),
NC_INT: ('i', 4),
NC_FLOAT: ('f', 4),
NC_DOUBLE: ('d', 8) }
REVERSE = { 'b': NC_BYTE,
'c': NC_CHAR,
'h': NC_SHORT,
'i': NC_INT,
'f': NC_FLOAT,
'd': NC_DOUBLE,
# these come from asarray(1).dtype.char and asarray('foo').dtype.char,
# used when getting the types from generic attributes.
'l': NC_INT,
'S': NC_CHAR }
class netcdf_file(object):
"""
A file object for NetCDF data.
A `netcdf_file` object has two standard attributes: `dimensions` and
`variables`. The values of both are dictionaries, mapping dimension
names to their associated lengths and variable names to variables,
respectively. Application programs should never modify these
dictionaries.
All other attributes correspond to global attributes defined in the
NetCDF file. Global file attributes are created by assigning to an
attribute of the `netcdf_file` object.
Parameters
----------
filename : string or file-like
string -> filename
mode : {'r', 'w'}, optional
read-write mode, default is 'r'
mmap : None or bool, optional
Whether to mmap `filename` when reading. Default is True
when `filename` is a file name, False when `filename` is a
file-like object
version : {1, 2}, optional
version of netcdf to read / write, where 1 means *Classic
format* and 2 means *64-bit offset format*. Default is 1. See
`here <http://www.unidata.ucar.edu/software/netcdf/docs/netcdf/Which-Format.html>`_
for more info.
"""
def __init__(self, filename, mode='r', mmap=None, version=1):
"""Initialize netcdf_file from fileobj (str or file-like)."""
if hasattr(filename, 'seek'): # file-like
self.fp = filename
self.filename = 'None'
if mmap is None:
mmap = False
elif mmap and not hasattr(filename, 'fileno'):
raise ValueError('Cannot use file object for mmap')
else: # maybe it's a string
self.filename = filename
self.fp = open(self.filename, '%sb' % mode)
if mmap is None:
mmap = True
self.use_mmap = mmap
self.version_byte = version
if not mode in 'rw':
raise ValueError("Mode must be either 'r' or 'w'.")
self.mode = mode
self.dimensions = {}
self.variables = {}
self._dims = []
self._recs = 0
self._recsize = 0
self._attributes = {}
if mode == 'r':
self._read()
def __setattr__(self, attr, value):
# Store user defined attributes in a separate dict,
# so we can save them to file later.
try:
self._attributes[attr] = value
except AttributeError:
pass
self.__dict__[attr] = value
def close(self):
"""Closes the NetCDF file."""
if not self.fp.closed:
try:
self.flush()
finally:
self.fp.close()
__del__ = close
def createDimension(self, name, length):
"""
Adds a dimension to the Dimension section of the NetCDF data structure.
Note that this function merely adds a new dimension that the variables can
reference. The values for the dimension, if desired, should be added as
a variable using `createVariable`, referring to this dimension.
Parameters
----------
name : str
Name of the dimension (Eg, 'lat' or 'time').
length : int
Length of the dimension.
See Also
--------
createVariable
"""
self.dimensions[name] = length
self._dims.append(name)
def createVariable(self, name, type, dimensions):
"""
Create an empty variable for the `netcdf_file` object, specifying its data
type and the dimensions it uses.
Parameters
----------
name : str
Name of the new variable.
type : dtype or str
Data type of the variable.
dimensions : sequence of str
List of the dimension names used by the variable, in the desired order.
Returns
-------
variable : netcdf_variable
The newly created ``netcdf_variable`` object.
This object has also been added to the `netcdf_file` object as well.
See Also
--------
createDimension
Notes
-----
Any dimensions to be used by the variable should already exist in the
NetCDF data structure or should be created by `createDimension` prior to
creating the NetCDF variable.
"""
shape = tuple([self.dimensions[dim] for dim in dimensions])
shape_ = tuple([dim or 0 for dim in shape]) # replace None with 0 for numpy
if isinstance(type, basestring): type = dtype(type)
typecode, size = type.char, type.itemsize
dtype_ = '>%s' % typecode
if size > 1: dtype_ += str(size)
data = empty(shape_, dtype=dtype_)
self.variables[name] = netcdf_variable(data, typecode, shape, dimensions)
return self.variables[name]
def flush(self):
"""
Perform a sync-to-disk flush if the `netcdf_file` object is in write mode.
See Also
--------
sync : Identical function
"""
if hasattr(self, 'mode') and self.mode is 'w':
self._write()
sync = flush
def _write(self):
self.fp.write(asbytes('CDF'))
self.fp.write(array(self.version_byte, '>b').tostring())
# Write headers and data.
self._write_numrecs()
self._write_dim_array()
self._write_gatt_array()
self._write_var_array()
def _write_numrecs(self):
# Get highest record count from all record variables.
for var in self.variables.values():
if var.isrec and len(var.data) > self._recs:
self.__dict__['_recs'] = len(var.data)
self._pack_int(self._recs)
def _write_dim_array(self):
if self.dimensions:
self.fp.write(NC_DIMENSION)
self._pack_int(len(self.dimensions))
for name in self._dims:
self._pack_string(name)
length = self.dimensions[name]
self._pack_int(length or 0) # replace None with 0 for record dimension
else:
self.fp.write(ABSENT)
def _write_gatt_array(self):
self._write_att_array(self._attributes)
def _write_att_array(self, attributes):
if attributes:
self.fp.write(NC_ATTRIBUTE)
self._pack_int(len(attributes))
for name, values in attributes.items():
self._pack_string(name)
self._write_values(values)
else:
self.fp.write(ABSENT)
def _write_var_array(self):
if self.variables:
self.fp.write(NC_VARIABLE)
self._pack_int(len(self.variables))
# Sort variables non-recs first, then recs. We use a DSU
# since some people use pupynere with Python 2.3.x.
deco = [ (v._shape and not v.isrec, k) for (k, v) in self.variables.items() ]
deco.sort()
variables = [ k for (unused, k) in deco ][::-1]
# Set the metadata for all variables.
for name in variables:
self._write_var_metadata(name)
# Now that we have the metadata, we know the vsize of
# each record variable, so we can calculate recsize.
self.__dict__['_recsize'] = sum([
var._vsize for var in self.variables.values()
if var.isrec])
# Set the data for all variables.
for name in variables:
self._write_var_data(name)
else:
self.fp.write(ABSENT)
def _write_var_metadata(self, name):
var = self.variables[name]
self._pack_string(name)
self._pack_int(len(var.dimensions))
for dimname in var.dimensions:
dimid = self._dims.index(dimname)
self._pack_int(dimid)
self._write_att_array(var._attributes)
nc_type = REVERSE[var.typecode()]
self.fp.write(asbytes(nc_type))
if not var.isrec:
vsize = var.data.size * var.data.itemsize
vsize += -vsize % 4
else: # record variable
try:
vsize = var.data[0].size * var.data.itemsize
except IndexError:
vsize = 0
rec_vars = len([var for var in self.variables.values()
if var.isrec])
if rec_vars > 1:
vsize += -vsize % 4
self.variables[name].__dict__['_vsize'] = vsize
self._pack_int(vsize)
# Pack a bogus begin, and set the real value later.
self.variables[name].__dict__['_begin'] = self.fp.tell()
self._pack_begin(0)
def _write_var_data(self, name):
var = self.variables[name]
# Set begin in file header.
the_beguine = self.fp.tell()
self.fp.seek(var._begin)
self._pack_begin(the_beguine)
self.fp.seek(the_beguine)
# Write data.
if not var.isrec:
self.fp.write(var.data.tostring())
count = var.data.size * var.data.itemsize
self.fp.write(asbytes('0') * (var._vsize - count))
else: # record variable
# Handle rec vars with shape[0] < nrecs.
if self._recs > len(var.data):
shape = (self._recs,) + var.data.shape[1:]
var.data.resize(shape)
pos0 = pos = self.fp.tell()
for rec in var.data:
# Apparently scalars cannot be converted to big endian. If we
# try to convert a ``=i4`` scalar to, say, '>i4' the dtype
# will remain as ``=i4``.
if not rec.shape and (rec.dtype.byteorder == '<' or
(rec.dtype.byteorder == '=' and LITTLE_ENDIAN)):
rec = rec.byteswap()
self.fp.write(rec.tostring())
# Padding
count = rec.size * rec.itemsize
self.fp.write(asbytes('0') * (var._vsize - count))
pos += self._recsize
self.fp.seek(pos)
self.fp.seek(pos0 + var._vsize)
def _write_values(self, values):
if hasattr(values, 'dtype'):
nc_type = REVERSE[values.dtype.char]
else:
types = [
(int, NC_INT),
(long, NC_INT),
(float, NC_FLOAT),
(basestring, NC_CHAR),
]
try:
sample = values[0]
except TypeError:
sample = values
for class_, nc_type in types:
if isinstance(sample, class_): break
typecode, size = TYPEMAP[nc_type]
if typecode is 'c':
dtype_ = '>c'
else:
dtype_ = '>%s' % typecode
if size > 1: dtype_ += str(size)
values = asarray(values, dtype=dtype_)
self.fp.write(asbytes(nc_type))
if values.dtype.char == 'S':
nelems = values.itemsize
else:
nelems = values.size
self._pack_int(nelems)
if not values.shape and (values.dtype.byteorder == '<' or
(values.dtype.byteorder == '=' and LITTLE_ENDIAN)):
values = values.byteswap()
self.fp.write(values.tostring())
count = values.size * values.itemsize
self.fp.write(asbytes('0') * (-count % 4)) # pad
def _read(self):
# Check magic bytes and version
magic = self.fp.read(3)
if not magic == asbytes('CDF'):
raise TypeError("Error: %s is not a valid NetCDF 3 file" %
self.filename)
self.__dict__['version_byte'] = fromstring(self.fp.read(1), '>b')[0]
# Read file headers and set data.
self._read_numrecs()
self._read_dim_array()
self._read_gatt_array()
self._read_var_array()
def _read_numrecs(self):
self.__dict__['_recs'] = self._unpack_int()
def _read_dim_array(self):
header = self.fp.read(4)
assert header in [ZERO, NC_DIMENSION]
count = self._unpack_int()
for dim in range(count):
name = asstr(self._unpack_string())
length = self._unpack_int() or None # None for record dimension
self.dimensions[name] = length
self._dims.append(name) # preserve order
def _read_gatt_array(self):
for k, v in self._read_att_array().items():
self.__setattr__(k, v)
def _read_att_array(self):
header = self.fp.read(4)
assert header in [ZERO, NC_ATTRIBUTE]
count = self._unpack_int()
attributes = {}
for attr in range(count):
name = asstr(self._unpack_string())
attributes[name] = self._read_values()
return attributes
def _read_var_array(self):
header = self.fp.read(4)
assert header in [ZERO, NC_VARIABLE]
begin = 0
dtypes = {'names': [], 'formats': []}
rec_vars = []
count = self._unpack_int()
for var in range(count):
(name, dimensions, shape, attributes,
typecode, size, dtype_, begin_, vsize) = self._read_var()
# http://www.unidata.ucar.edu/software/netcdf/docs/netcdf.html
# Note that vsize is the product of the dimension lengths
# (omitting the record dimension) and the number of bytes
# per value (determined from the type), increased to the
# next multiple of 4, for each variable. If a record
# variable, this is the amount of space per record. The
# netCDF "record size" is calculated as the sum of the
# vsize's of all the record variables.
#
# The vsize field is actually redundant, because its value
# may be computed from other information in the header. The
# 32-bit vsize field is not large enough to contain the size
# of variables that require more than 2^32 - 4 bytes, so
# 2^32 - 1 is used in the vsize field for such variables.
if shape and shape[0] is None: # record variable
rec_vars.append(name)
# The netCDF "record size" is calculated as the sum of
# the vsize's of all the record variables.
self.__dict__['_recsize'] += vsize
if begin == 0: begin = begin_
dtypes['names'].append(name)
dtypes['formats'].append(str(shape[1:]) + dtype_)
# Handle padding with a virtual variable.
if typecode in 'bch':
actual_size = reduce(mul, (1,) + shape[1:]) * size
padding = -actual_size % 4
if padding:
dtypes['names'].append('_padding_%d' % var)
dtypes['formats'].append('(%d,)>b' % padding)
# Data will be set later.
data = None
else: # not a record variable
# Calculate size to avoid problems with vsize (above)
a_size = reduce(mul, shape, 1) * size
if self.use_mmap:
mm = mmap(self.fp.fileno(), begin_+a_size, access=ACCESS_READ)
data = ndarray.__new__(ndarray, shape, dtype=dtype_,
buffer=mm, offset=begin_, order=0)
else:
pos = self.fp.tell()
self.fp.seek(begin_)
data = fromstring(self.fp.read(a_size), dtype=dtype_)
data.shape = shape
self.fp.seek(pos)
# Add variable.
self.variables[name] = netcdf_variable(
data, typecode, shape, dimensions, attributes)
if rec_vars:
# Remove padding when only one record variable.
if len(rec_vars) == 1:
dtypes['names'] = dtypes['names'][:1]
dtypes['formats'] = dtypes['formats'][:1]
# Build rec array.
if self.use_mmap:
mm = mmap(self.fp.fileno(), begin+self._recs*self._recsize, access=ACCESS_READ)
rec_array = ndarray.__new__(ndarray, (self._recs,), dtype=dtypes,
buffer=mm, offset=begin, order=0)
else:
pos = self.fp.tell()
self.fp.seek(begin)
rec_array = fromstring(self.fp.read(self._recs*self._recsize), dtype=dtypes)
rec_array.shape = (self._recs,)
self.fp.seek(pos)
for var in rec_vars:
self.variables[var].__dict__['data'] = rec_array[var]
def _read_var(self):
name = asstr(self._unpack_string())
dimensions = []
shape = []
dims = self._unpack_int()
for i in range(dims):
dimid = self._unpack_int()
dimname = self._dims[dimid]
dimensions.append(dimname)
dim = self.dimensions[dimname]
shape.append(dim)
dimensions = tuple(dimensions)
shape = tuple(shape)
attributes = self._read_att_array()
nc_type = self.fp.read(4)
vsize = self._unpack_int()
begin = [self._unpack_int, self._unpack_int64][self.version_byte-1]()
typecode, size = TYPEMAP[nc_type]
if typecode is 'c':
dtype_ = '>c'
else:
dtype_ = '>%s' % typecode
if size > 1: dtype_ += str(size)
return name, dimensions, shape, attributes, typecode, size, dtype_, begin, vsize
def _read_values(self):
nc_type = self.fp.read(4)
n = self._unpack_int()
typecode, size = TYPEMAP[nc_type]
count = n*size
values = self.fp.read(int(count))
self.fp.read(-count % 4) # read padding
if typecode is not 'c':
values = fromstring(values, dtype='>%s%d' % (typecode, size))
if values.shape == (1,): values = values[0]
else:
values = values.rstrip(asbytes('\x00'))
return values
def _pack_begin(self, begin):
if self.version_byte == 1:
self._pack_int(begin)
elif self.version_byte == 2:
self._pack_int64(begin)
def _pack_int(self, value):
self.fp.write(array(value, '>i').tostring())
_pack_int32 = _pack_int
def _unpack_int(self):
return int(fromstring(self.fp.read(4), '>i')[0])
_unpack_int32 = _unpack_int
def _pack_int64(self, value):
self.fp.write(array(value, '>q').tostring())
def _unpack_int64(self):
return fromstring(self.fp.read(8), '>q')[0]
def _pack_string(self, s):
count = len(s)
self._pack_int(count)
self.fp.write(asbytes(s))
self.fp.write(asbytes('0') * (-count % 4)) # pad
def _unpack_string(self):
count = self._unpack_int()
s = self.fp.read(count).rstrip(asbytes('\x00'))
self.fp.read(-count % 4) # read padding
return s
class netcdf_variable(object):
"""
A data object for the `netcdf` module.
`netcdf_variable` objects are constructed by calling the method
`netcdf_file.createVariable` on the `netcdf_file` object. `netcdf_variable`
objects behave much like array objects defined in numpy, except that their
data resides in a file. Data is read by indexing and written by assigning
to an indexed subset; the entire array can be accessed by the index ``[:]``
or (for scalars) by using the methods `getValue` and `assignValue`.
`netcdf_variable` objects also have attribute `shape` with the same meaning
as for arrays, but the shape cannot be modified. There is another read-only
attribute `dimensions`, whose value is the tuple of dimension names.
All other attributes correspond to variable attributes defined in
the NetCDF file. Variable attributes are created by assigning to an
attribute of the `netcdf_variable` object.
Parameters
----------
data : array_like
The data array that holds the values for the variable.
Typically, this is initialized as empty, but with the proper shape.
typecode : dtype character code
Desired data-type for the data array.
shape : sequence of ints
The shape of the array. This should match the lengths of the
variable's dimensions.
dimensions : sequence of strings
The names of the dimensions used by the variable. Must be in the
same order of the dimension lengths given by `shape`.
attributes : dict, optional
Attribute values (any type) keyed by string names. These attributes
become attributes for the netcdf_variable object.
Attributes
----------
dimensions : list of str
List of names of dimensions used by the variable object.
isrec, shape
Properties
See also
--------
isrec, shape
"""
def __init__(self, data, typecode, shape, dimensions, attributes=None):
self.data = data
self._typecode = typecode
self._shape = shape
self.dimensions = dimensions
self._attributes = attributes or {}
for k, v in self._attributes.items():
self.__dict__[k] = v
def __setattr__(self, attr, value):
# Store user defined attributes in a separate dict,
# so we can save them to file later.
try:
self._attributes[attr] = value
except AttributeError:
pass
self.__dict__[attr] = value
def isrec(self):
return self.data.shape and not self._shape[0]
isrec = property(isrec)
def shape(self):
return self.data.shape
shape = property(shape)
def getValue(self):
"""
Retrieve a scalar value from a `netcdf_variable` of length one.
Raises
------
ValueError
If the netcdf variable is an array of length greater than one,
this exception will be raised.
"""
return self.data.item()
def assignValue(self, value):
"""
Assign a scalar value to a `netcdf_variable` of length one.
Parameters
----------
value : scalar
Scalar value (of compatible type) to assign to a length-one netcdf
variable. This value will be written to file.
Raises
------
ValueError
If the input is not a scalar, or if the destination is not a length-one
netcdf variable.
"""
self.data.itemset(value)
def typecode(self):
"""
Return the typecode of the variable.
Returns
-------
typecode : char
The character typecode of the variable (eg, 'i' for int).
"""
return self._typecode
def __getitem__(self, index):
return self.data[index]
def __setitem__(self, index, data):
# Expand data for record vars?
if self.isrec:
if isinstance(index, tuple):
rec_index = index[0]
else:
rec_index = index
if isinstance(rec_index, slice):
recs = (rec_index.start or 0) + len(data)
else:
recs = rec_index + 1
if recs > len(self.data):
shape = (recs,) + self._shape[1:]
self.data.resize(shape)
self.data[index] = data
NetCDFFile = netcdf_file
NetCDFVariable = netcdf_variable
|
scipy/scipy-svn
|
scipy/io/netcdf.py
|
Python
|
bsd-3-clause
| 28,822
|
[
"NetCDF"
] |
2d65540f9b4ca31001e60b018553ac4a6c0c5d60c41031733483ec0a5224e468
|
"""
Tools for the instructor dashboard
"""
import json
import operator
import dateutil
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.http import HttpResponseBadRequest
from django.utils.translation import ugettext as _
from edx_when import api
from opaque_keys.edx.keys import UsageKey
from pytz import UTC
from common.djangoapps.student.models import CourseEnrollment, get_user_by_username_or_email
from openedx.core.djangoapps.schedules.models import Schedule
class DashboardError(Exception):
"""
Errors arising from use of the instructor dashboard.
"""
def response(self):
"""
Generate an instance of HttpResponseBadRequest for this error.
"""
error = str(self)
return HttpResponseBadRequest(json.dumps({'error': error}))
def handle_dashboard_error(view):
"""
Decorator which adds seamless DashboardError handling to a view. If a
DashboardError is raised during view processing, an HttpResponseBadRequest
is sent back to the client with JSON data about the error.
"""
def wrapper(request, course_id):
"""
Wrap the view.
"""
try:
return view(request, course_id=course_id)
except DashboardError as error:
return error.response()
return wrapper
def strip_if_string(value):
if isinstance(value, str):
return value.strip()
return value
def get_student_from_identifier(unique_student_identifier):
"""
Gets a student object using either an email address or username.
Returns the student object associated with `unique_student_identifier`
Raises User.DoesNotExist if no user object can be found, the user was
retired, or the user is in the process of being retired.
DEPRECATED: use student.models.get_user_by_username_or_email instead.
"""
return get_user_by_username_or_email(unique_student_identifier)
def require_student_from_identifier(unique_student_identifier):
"""
Same as get_student_from_identifier() but will raise a DashboardError if
the student does not exist.
"""
try:
return get_student_from_identifier(unique_student_identifier)
except User.DoesNotExist:
raise DashboardError( # lint-amnesty, pylint: disable=raise-missing-from
_("Could not find student matching identifier: {student_identifier}").format(
student_identifier=unique_student_identifier
)
)
def parse_datetime(datestr):
"""
Convert user input date string into an instance of `datetime.datetime` in
UTC.
"""
try:
return dateutil.parser.parse(datestr).replace(tzinfo=UTC)
except ValueError:
raise DashboardError(_("Unable to parse date: ") + datestr) # lint-amnesty, pylint: disable=raise-missing-from
def find_unit(course, url):
"""
Finds the unit (block, module, whatever the terminology is) with the given
url in the course tree and returns the unit. Raises DashboardError if no
unit is found.
"""
def find(node, url):
"""
Find node in course tree for url.
"""
if str(node.location) == url:
return node
for child in node.get_children():
found = find(child, url)
if found:
return found
return None
unit = find(course, url)
if unit is None:
raise DashboardError(_("Couldn't find module for url: {0}").format(url))
return unit
def get_units_with_due_date(course):
"""
Returns all top level units which have due dates. Does not return
descendents of those nodes.
"""
units = []
# Pass in a schedule here so that we get back any relative dates in the course, but actual value
# doesn't matter, since we don't care about the dates themselves, just whether they exist.
# Thus we don't save or care about this temporary schedule object.
schedule = Schedule(start_date=course.start)
course_dates = api.get_dates_for_course(course.id, schedule=schedule)
def visit(node):
"""
Visit a node. Checks to see if node has a due date and appends to
`units` if it does. Otherwise recurses into children to search for
nodes with due dates.
"""
if (node.location, 'due') in course_dates:
units.append(node)
else:
for child in node.get_children():
visit(child)
visit(course)
#units.sort(key=_title_or_url)
return units
def title_or_url(node):
"""
Returns the `display_name` attribute of the passed in node of the course
tree, if it has one. Otherwise returns the node's url.
"""
title = getattr(node, 'display_name', None)
if not title:
title = str(node.location)
return title
def set_due_date_extension(course, unit, student, due_date, actor=None, reason=''):
"""
Sets a due date extension.
Raises:
DashboardError if the unit or extended, due date is invalid or user is
not enrolled in the course.
"""
mode, __ = CourseEnrollment.enrollment_mode_for_user(user=student, course_id=str(course.id))
if not mode:
raise DashboardError(_("Could not find student enrollment in the course."))
# We normally set dates at the subsection level. But technically dates can be anywhere down the tree (and
# usually are in self paced courses, where the subsection date gets propagated down).
# So find all children that we need to set the date on, then set those dates.
course_dates = api.get_dates_for_course(course.id, user=student)
blocks_to_set = {unit} # always include the requested unit, even if it doesn't appear to have a due date now
def visit(node):
"""
Visit a node. Checks to see if node has a due date and appends to
`blocks_to_set` if it does. And recurses into children to search for
nodes with due dates.
"""
if (node.location, 'due') in course_dates:
blocks_to_set.add(node)
for child in node.get_children():
visit(child)
visit(unit)
for block in blocks_to_set:
if due_date:
try:
api.set_date_for_block(course.id, block.location, 'due', due_date, user=student, reason=reason,
actor=actor)
except api.MissingDateError as ex:
raise DashboardError(_("Unit {0} has no due date to extend.").format(unit.location)) from ex
except api.InvalidDateError as ex:
raise DashboardError(_("An extended due date must be later than the original due date.")) from ex
else:
api.set_date_for_block(course.id, block.location, 'due', None, user=student, reason=reason, actor=actor)
def dump_module_extensions(course, unit):
"""
Dumps data about students with due date extensions for a particular module,
specified by 'url', in a particular course.
"""
header = [_("Username"), _("Full Name"), _("Extended Due Date")]
data = []
for username, fullname, due_date in api.get_overrides_for_block(course.id, unit.location):
due_date = due_date.strftime('%Y-%m-%d %H:%M')
data.append(dict(list(zip(header, (username, fullname, due_date)))))
data.sort(key=operator.itemgetter(_("Username")))
return {
"header": header,
"title": _("Users with due date extensions for {0}").format(
title_or_url(unit)),
"data": data
}
def dump_student_extensions(course, student):
"""
Dumps data about the due date extensions granted for a particular student
in a particular course.
"""
data = []
header = [_("Unit"), _("Extended Due Date")]
units = get_units_with_due_date(course)
units = {u.location: u for u in units}
query = api.get_overrides_for_user(course.id, student)
for override in query:
location = override['location'].replace(course_key=course.id)
if location not in units:
continue
due = override['actual_date']
due = due.strftime("%Y-%m-%d %H:%M")
title = title_or_url(units[location])
data.append(dict(list(zip(header, (title, due)))))
data.sort(key=operator.itemgetter(_("Unit")))
return {
"header": header,
"title": _("Due date extensions for {0} {1} ({2})").format(
student.first_name, student.last_name, student.username),
"data": data}
def add_block_ids(payload):
"""
rather than manually parsing block_ids from module_ids on the client, pass the block_ids explicitly in the payload
"""
if 'data' in payload:
for ele in payload['data']:
if 'module_id' in ele:
ele['block_id'] = UsageKey.from_string(ele['module_id']).block_id
|
eduNEXT/edunext-platform
|
lms/djangoapps/instructor/views/tools.py
|
Python
|
agpl-3.0
| 8,924
|
[
"VisIt"
] |
e998c91b2b60b135d494acbe790f69025ea0ba0296f749f72d282abb904f5639
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
This module implements a Composition class to represent compositions,
and a ChemicalPotential class to represent potentials.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Production"
__date__ = "Nov 10, 2012"
import collections
import numbers
import re
import string
import six
from six.moves import filter, map, zip
from fractions import gcd
from functools import total_ordering
from itertools import chain
from pymatgen.core.periodic_table import get_el_sp, Element
from pymatgen.util.string_utils import formula_double_format
from pymatgen.serializers.json_coders import PMGSONable
from pymatgen.core.units import unitized
@total_ordering
class Composition(collections.Mapping, collections.Hashable, PMGSONable):
"""
Represents a Composition, which is essentially a {element:amount} mapping
type. Composition is written to be immutable and hashable,
unlike a standard Python dict.
Note that the key can be either an Element or a Specie. Elements and Specie
are treated differently. i.e., a Fe2+ is not the same as a Fe3+ Specie and
would be put in separate keys. This differentiation is deliberate to
support using Composition to determine the fraction of a particular Specie.
Works almost completely like a standard python dictionary, except that
__getitem__ is overridden to return 0 when an element is not found.
(somewhat like a defaultdict, except it is immutable).
Also adds more convenience methods relevant to compositions, e.g.,
get_fraction.
It should also be noted that many Composition related functionality takes
in a standard string as a convenient input. For example,
even though the internal representation of a Fe2O3 composition is
{Element("Fe"): 2, Element("O"): 3}, you can obtain the amount of Fe
simply by comp["Fe"] instead of the more verbose comp[Element("Fe")].
>>> comp = Composition("LiFePO4")
>>> comp.get_atomic_fraction(Element("Li"))
0.14285714285714285
>>> comp.num_atoms
7.0
>>> comp.reduced_formula
'LiFePO4'
>>> comp.formula
'Li1 Fe1 P1 O4'
>>> comp.get_wt_fraction(Element("Li"))
0.04399794666951898
>>> comp.num_atoms
7.0
"""
"""
Tolerance in distinguishing different composition amounts.
1e-8 is fairly tight, but should cut out most floating point arithmetic
errors.
"""
amount_tolerance = 1e-8
"""
Special formula handling for peroxides and certain elements. This is so
that formula output does not write LiO instead of Li2O2 for example.
"""
special_formulas = {"LiO": "Li2O2", "NaO": "Na2O2", "KO": "K2O2",
"HO": "H2O2", "CsO": "Cs2O2", "RbO": "Rb2O2",
"O": "O2", "N": "N2", "Cl": "Cl2", "H": "H2"}
def __init__(self, *args, **kwargs): #allow_negative=False
"""
Very flexible Composition construction, similar to the built-in Python
dict(). Also extended to allow simple string init.
Args:
Any form supported by the Python built-in dict() function.
1. A dict of either {Element/Specie: amount},
{string symbol:amount}, or {atomic number:amount} or any mixture
of these. E.g., {Element("Li"):2 ,Element("O"):1},
{"Li":2, "O":1}, {3:2, 8:1} all result in a Li2O composition.
2. Keyword arg initialization, similar to a dict, e.g.,
Compostion(Li = 2, O = 1)
In addition, the Composition constructor also allows a single
string as an input formula. E.g., Composition("Li2O").
allow_negative: Whether to allow negative compositions. This
argument must be popped from the \*\*kwargs due to \*args
ambiguity.
"""
self.allow_negative = kwargs.pop('allow_negative', False)
if len(args) == 1 and isinstance(args[0], six.string_types):
elmap = self._parse_formula(args[0])
else:
elmap = dict(*args, **kwargs)
for k, v in list(elmap.items()):
if v < -Composition.amount_tolerance and not self.allow_negative:
raise CompositionError("Amounts in Composition cannot be "
"negative!")
elif abs(v) < Composition.amount_tolerance:
del elmap[k]
self._elmap = {get_el_sp(k): v for k, v in elmap.items()}
self._natoms = sum(map(abs, self._elmap.values()))
def __getitem__(self, el):
"""
Get the amount for element.
"""
return self._elmap.get(get_el_sp(el), 0)
def __eq__(self, other):
for el in chain(self.elements, other.elements):
if abs(self[el] - other[el]) > Composition.amount_tolerance:
return False
return True
def __ge__(self, other):
"""
Defines >= for Compositions. Should ONLY be used for defining a sort
order (the behavior is probably not what you'd expect)
"""
for el in sorted(set(self.elements + other.elements)):
if other[el] - self[el] >= Composition.amount_tolerance:
return False
elif self[el] - other[el] >= Composition.amount_tolerance:
return True
return True
def __ne__(self, other):
return not self.__eq__(other)
def __add__(self, other):
"""
Adds two compositions. For example, an Fe2O3 composition + an FeO
composition gives a Fe3O4 composition.
"""
new_el_map = collections.defaultdict(float)
new_el_map.update(self)
for k, v in other.items():
new_el_map[get_el_sp(k)] += v
return Composition(new_el_map, allow_negative=self.allow_negative)
def __sub__(self, other):
"""
Subtracts two compositions. For example, an Fe2O3 composition - an FeO
composition gives an FeO2 composition.
Raises:
CompositionError if the subtracted composition is greater than the
original composition in any of its elements, unless allow_negative
is True
"""
new_el_map = collections.defaultdict(float)
new_el_map.update(self)
for k, v in other.items():
new_el_map[get_el_sp(k)] -= v
return Composition(new_el_map, allow_negative=self.allow_negative)
def __mul__(self, other):
"""
Multiply a Composition by an integer or a float.
Fe2O3 * 4 -> Fe8O12
"""
if not isinstance(other, numbers.Number):
return NotImplemented
return Composition({el: self[el] * other for el in self},
allow_negative=self.allow_negative)
__rmul__ = __mul__
def __truediv__(self, other):
if not isinstance(other, numbers.Number):
return NotImplemented
return Composition({el: self[el] / other for el in self},
allow_negative=self.allow_negative)
def __hash__(self):
"""
Minimally effective hash function that just distinguishes between
Compositions with different elements.
"""
hashcode = 0
for el in self._elmap.keys():
hashcode += el.Z
return hashcode
def __contains__(self, el):
return el in self._elmap
def __len__(self):
return len(self._elmap)
def __iter__(self):
return self._elmap.__iter__()
@property
def average_electroneg(self):
return sum((el.X * abs(amt) for el, amt in self._elmap.items())) / \
self.num_atoms
def almost_equals(self, other, rtol=0.1, atol=1e-8):
"""
Returns true if compositions are equal within a tolerance.
Args:
other (Composition): Other composition to check
rtol (float): Relative tolerance
atol (float): Absolute tolerance
"""
sps = set(self.elements + other.elements)
for sp in sps:
a = self[sp]
b = other[sp]
tol = atol + rtol * (abs(a) + abs(b)) / 2
if abs(b - a) > tol:
return False
return True
@property
def is_element(self):
"""
True if composition is for an element.
"""
return len(self._elmap) == 1
def copy(self):
return Composition(self._elmap, allow_negative=self.allow_negative)
@property
def formula(self):
"""
Returns a formula string, with elements sorted by electronegativity,
e.g., Li4 Fe4 P4 O16.
"""
sym_amt = self.get_el_amt_dict()
syms = sorted(sym_amt.keys(), key=lambda sym: get_el_sp(sym).X)
formula = [s + formula_double_format(sym_amt[s], False) for s in syms]
return " ".join(formula)
@property
def alphabetical_formula(self):
"""
Returns a formula string, with elements sorted by alphabetically
e.g., Fe4 Li4 O16 P4.
"""
sym_amt = self.get_el_amt_dict()
syms = sorted(sym_amt.keys())
formula = [s + formula_double_format(sym_amt[s], False) for s in syms]
return " ".join(formula)
@property
def element_composition(self):
"""
Returns the composition replacing any species by the corresponding
element.
"""
return Composition(self.get_el_amt_dict(),
allow_negative=self.allow_negative)
@property
def fractional_composition(self):
"""
Returns the normalized composition which the number of species sum to
1.
Returns:
Normalized composition which the number of species sum to 1.
"""
return self / self._natoms
@property
def reduced_composition(self):
"""
Returns the reduced composition,i.e. amounts normalized by greatest
common denominator. e.g., Composition("FePO4") for
Composition("Fe4P4O16").
"""
return self.get_reduced_composition_and_factor()[0]
def get_reduced_composition_and_factor(self):
"""
Calculates a reduced composition and factor.
Returns:
A normalized composition and a multiplicative factor, i.e.,
Li4Fe4P4O16 returns (Composition("LiFePO4"), 4).
"""
factor = self.get_reduced_formula_and_factor()[1]
return self / factor, factor
def get_reduced_formula_and_factor(self):
"""
Calculates a reduced formula and factor.
Returns:
A pretty normalized formula and a multiplicative factor, i.e.,
Li4Fe4P4O16 returns (LiFePO4, 4).
"""
all_int = all([x == int(x) for x in self._elmap.values()])
if not all_int:
return self.formula.replace(" ", ""), 1
d = self.get_el_amt_dict()
(formula, factor) = reduce_formula(d)
if formula in Composition.special_formulas:
formula = Composition.special_formulas[formula]
factor /= 2
return formula, factor
@property
def reduced_formula(self):
"""
Returns a pretty normalized formula, i.e., LiFePO4 instead of
Li4Fe4P4O16.
"""
return self.get_reduced_formula_and_factor()[0]
@property
def elements(self):
"""
Returns view of elements in Composition.
"""
return list(self._elmap.keys())
def __str__(self):
return " ".join([
"{}{}".format(k, formula_double_format(v, ignore_ones=False))
for k, v in self.as_dict().items()])
@property
def num_atoms(self):
"""
Total number of atoms in Composition. For negative amounts, sum
of absolute values
"""
return self._natoms
@property
@unitized("amu")
def weight(self):
"""
Total molecular weight of Composition
"""
return sum([amount * el.atomic_mass
for el, amount in self._elmap.items()])
def get_atomic_fraction(self, el):
"""
Calculate atomic fraction of an Element or Specie.
Args:
el (Element/Specie): Element or Specie to get fraction for.
Returns:
Atomic fraction for element el in Composition
"""
return abs(self[el]) / self._natoms
def get_wt_fraction(self, el):
"""
Calculate weight fraction of an Element or Specie.
Args:
el (Element/Specie): Element or Specie to get fraction for.
Returns:
Weight fraction for element el in Composition
"""
return get_el_sp(el).atomic_mass * abs(self[el]) / self.weight
def _parse_formula(self, formula):
"""
Args:
formula (str): A string formula, e.g. Fe2O3, Li3Fe2(PO4)3
Returns:
Composition with that formula.
"""
def get_sym_dict(f, factor):
sym_dict = collections.defaultdict(float)
for m in re.finditer(r"([A-Z][a-z]*)([-*\.\d]*)", f):
el = m.group(1)
amt = 1
if m.group(2).strip() != "":
amt = float(m.group(2))
sym_dict[el] += amt * factor
f = f.replace(m.group(), "", 1)
if f.strip():
raise CompositionError("{} is an invalid formula!".format(f))
return sym_dict
m = re.search(r"\(([^\(\)]+)\)([\.\d]*)", formula)
if m:
factor = 1
if m.group(2) != "":
factor = float(m.group(2))
unit_sym_dict = get_sym_dict(m.group(1), factor)
expanded_sym = "".join(["{}{}".format(el, amt)
for el, amt in unit_sym_dict.items()])
expanded_formula = formula.replace(m.group(), expanded_sym)
return self._parse_formula(expanded_formula)
return get_sym_dict(formula, 1)
@property
def anonymized_formula(self):
"""
An anonymized formula. Unique species are arranged in ordering of
increasing amounts and assigned ascending alphabets. Useful for
prototyping formulas. For example, all stoichiometric perovskites have
anonymized_formula ABC3.
"""
reduced_comp = self.get_reduced_composition_and_factor()[0]
els = sorted(reduced_comp.elements, key=lambda e: reduced_comp[e])
anon_formula = []
for anon, e in zip(string.ascii_uppercase, els):
amt = reduced_comp[e]
if amt > 0:
if amt == 1:
amt_str = ""
elif abs(amt % 1) < 1e-8:
amt_str = str(int(amt))
else:
amt_str = str(amt)
anon_formula.append("{}{}".format(anon, amt_str))
return "".join(anon_formula)
def __repr__(self):
return "Comp: " + self.formula
@classmethod
def from_dict(cls, d):
"""
Creates a composition from a dict generated by as_dict(). Strictly not
necessary given that the standard constructor already takes in such an
input, but this method preserves the standard pymatgen API of having
from_dict methods to reconstitute objects generated by as_dict(). Allows
for easier introspection.
Args:
d (dict): {symbol: amount} dict.
"""
return cls(d)
def get_el_amt_dict(self):
"""
Returns:
Dict with element symbol and (unreduced) amount e.g.,
{"Fe": 4.0, "O":6.0} or {"Fe3+": 4.0, "O2-":6.0}
"""
d = collections.defaultdict(float)
for e, a in self.items():
d[e.symbol] += a
return d
def as_dict(self):
"""
Returns:
dict with species symbol and (unreduced) amount e.g.,
{"Fe": 4.0, "O":6.0} or {"Fe3+": 4.0, "O2-":6.0}
"""
d = collections.defaultdict(float)
for e, a in self.items():
d[str(e)] += a
return d
@property
def to_reduced_dict(self):
"""
Returns:
Dict with element symbol and reduced amount e.g.,
{"Fe": 2.0, "O":3.0}
"""
c = Composition(self.reduced_formula)
return c.as_dict()
@property
def to_data_dict(self):
"""
Returns:
A dict with many keys and values relating to Composition/Formula,
including reduced_cell_composition, unit_cell_composition,
reduced_cell_formula, elements and nelements.
"""
return {"reduced_cell_composition": self.to_reduced_dict,
"unit_cell_composition": self.as_dict(),
"reduced_cell_formula": self.reduced_formula,
"elements": self.as_dict().keys(),
"nelements": len(self.as_dict().keys())}
@staticmethod
def ranked_compositions_from_indeterminate_formula(fuzzy_formula,
lock_if_strict=True):
"""
Takes in a formula where capitilization might not be correctly entered,
and suggests a ranked list of potential Composition matches.
Author: Anubhav Jain
Args:
fuzzy_formula (str): A formula string, such as "co2o3" or "MN",
that may or may not have multiple interpretations
lock_if_strict (bool): If true, a properly entered formula will
only return the one correct interpretation. For example,
"Co1" will only return "Co1" if true, but will return both
"Co1" and "C1 O1" if false.
Returns:
A ranked list of potential Composition matches
"""
#if we have an exact match and the user specifies lock_if_strict, just
#return the exact match!
if lock_if_strict:
#the strict composition parsing might throw an error, we can ignore
#it and just get on with fuzzy matching
try:
comp = Composition(fuzzy_formula)
return [comp]
except (CompositionError, ValueError):
pass
all_matches = Composition._comps_from_fuzzy_formula(fuzzy_formula)
#remove duplicates
all_matches = list(set(all_matches))
#sort matches by rank descending
all_matches = sorted(all_matches,
key=lambda match: match[1], reverse=True)
all_matches = [m[0] for m in all_matches]
return all_matches
@staticmethod
def _comps_from_fuzzy_formula(fuzzy_formula, m_dict={}, m_points=0,
factor=1):
"""
A recursive helper method for formula parsing that helps in
interpreting and ranking indeterminate formulas.
Author: Anubhav Jain
Args:
fuzzy_formula (str): A formula string, such as "co2o3" or "MN",
that may or may not have multiple interpretations.
m_dict (dict): A symbol:amt dictionary from the previously parsed
formula.
m_points: Number of points gained from the previously parsed
formula.
factor: Coefficient for this parse, e.g. (PO4)2 will feed in PO4
as the fuzzy_formula with a coefficient of 2.
Returns:
A list of tuples, with the first element being a Composition and
the second element being the number of points awarded that
Composition intepretation.
"""
def _parse_chomp_and_rank(m, f, m_dict, m_points):
"""
A helper method for formula parsing that helps in interpreting and
ranking indeterminate formulas
Author: Anubhav Jain
Args:
m: A regex match, with the first group being the element and
the second group being the amount
f: The formula part containing the match
m_dict: A symbol:amt dictionary from the previously parsed
formula
m_points: Number of points gained from the previously parsed
formula
Returns:
A tuple of (f, m_dict, points) where m_dict now contains data
from the match and the match has been removed (chomped) from
the formula f. The "goodness" of the match determines the
number of points returned for chomping. Returns
(None, None, None) if no element could be found...
"""
points = 0
# Points awarded if the first element of the element is correctly
# specified as a capital
points_first_capital = 100
# Points awarded if the second letter of the element is correctly
# specified as lowercase
points_second_lowercase = 100
#get element and amount from regex match
el = m.group(1)
if len(el) > 2 or len(el) < 1:
raise CompositionError("Invalid element symbol entered!")
amt = float(m.group(2)) if m.group(2).strip() != "" else 1
#convert the element string to proper [uppercase,lowercase] format
#and award points if it is already in that format
char1 = el[0]
char2 = el[1] if len(el) > 1 else ""
if char1 == char1.upper():
points += points_first_capital
if char2 and char2 == char2.lower():
points += points_second_lowercase
el = char1.upper() + char2.lower()
#if it's a valid element, chomp and add to the points
if Element.is_valid_symbol(el):
if el in m_dict:
m_dict[el] += amt * factor
else:
m_dict[el] = amt * factor
return f.replace(m.group(), "", 1), m_dict, m_points + points
#else return None
return None, None, None
fuzzy_formula = fuzzy_formula.strip()
if len(fuzzy_formula) == 0:
#The entire formula has been parsed into m_dict. Return the
#corresponding Composition and number of points
if m_dict:
yield (Composition.from_dict(m_dict), m_points)
else:
#if there is a parenthesis, remove it and match the remaining stuff
#with the appropriate factor
for mp in re.finditer(r"\(([^\(\)]+)\)([\.\d]*)", fuzzy_formula):
mp_points = m_points
mp_form = fuzzy_formula.replace(mp.group(), " ", 1)
mp_dict = dict(m_dict)
mp_factor = 1 if mp.group(2) == "" else float(mp.group(2))
#Match the stuff inside the parenthesis with the appropriate
#factor
for match in \
Composition._comps_from_fuzzy_formula(mp.group(1),
mp_dict,
mp_points,
factor=mp_factor):
only_me = True
# Match the stuff outside the parentheses and return the
# sum.
for match2 in \
Composition._comps_from_fuzzy_formula(mp_form,
mp_dict,
mp_points,
factor=1):
only_me = False
yield (match[0] + match2[0], match[1] + match2[1])
#if the stuff inside the parenthesis is nothing, then just
#return the stuff inside the parentheses
if only_me:
yield match
return
#try to match the single-letter elements
m1 = re.match(r"([A-z])([\.\d]*)", fuzzy_formula)
if m1:
m_points1 = m_points
m_form1 = fuzzy_formula
m_dict1 = dict(m_dict)
(m_form1, m_dict1, m_points1) = \
_parse_chomp_and_rank(m1, m_form1, m_dict1, m_points1)
if m_dict1:
#there was a real match
for match in \
Composition._comps_from_fuzzy_formula(m_form1,
m_dict1,
m_points1,
factor):
yield match
#try to match two-letter elements
m2 = re.match(r"([A-z]{2})([\.\d]*)", fuzzy_formula)
if m2:
m_points2 = m_points
m_form2 = fuzzy_formula
m_dict2 = dict(m_dict)
(m_form2, m_dict2, m_points2) = \
_parse_chomp_and_rank(m2, m_form2, m_dict2, m_points2)
if m_dict2:
#there was a real match
for match in \
Composition._comps_from_fuzzy_formula(m_form2, m_dict2,
m_points2,
factor):
yield match
def reduce_formula(sym_amt):
"""
Helper method to reduce a sym_amt dict to a reduced formula and factor.
Args:
sym_amt (dict): {symbol: amount}.
Returns:
(reduced_formula, factor).
"""
syms = sorted(sym_amt.keys(),
key=lambda s: get_el_sp(s).X)
syms = list(filter(lambda s: abs(sym_amt[s]) >
Composition.amount_tolerance, syms))
num_el = len(syms)
contains_polyanion = (num_el >= 3 and
get_el_sp(syms[num_el - 1]).X
- get_el_sp(syms[num_el - 2]).X < 1.65)
factor = abs(six.moves.reduce(gcd, sym_amt.values()))
reduced_form = []
n = num_el - 2 if contains_polyanion else num_el
for i in range(0, n):
s = syms[i]
normamt = sym_amt[s] * 1.0 / factor
reduced_form.append(s)
reduced_form.append(formula_double_format(normamt))
if contains_polyanion:
poly_sym_amt = {syms[i]: sym_amt[syms[i]] / factor
for i in range(n, num_el)}
(poly_form, poly_factor) = reduce_formula(poly_sym_amt)
if poly_factor != 1:
reduced_form.append("({}){}".format(poly_form, int(poly_factor)))
else:
reduced_form.append(poly_form)
reduced_form = "".join(reduced_form)
return reduced_form, factor
class CompositionError(Exception):
"""Exception class for composition errors"""
pass
class ChemicalPotential(dict, PMGSONable):
"""
Class to represent set of chemical potentials. Can be:
multiplied/divided by a Number
multiplied by a Composition (returns an energy)
added/subtracted with other ChemicalPotentials.
"""
def __init__(self, *args, **kwargs):
"""
Args:
*args, **kwargs: any valid dict init arguments
"""
d = dict(*args, **kwargs)
super(ChemicalPotential, self).__init__((get_el_sp(k), v)
for k, v in d.items())
if len(d) != len(self):
raise ValueError("Duplicate potential specified")
def __mul__(self, other):
if isinstance(other, numbers.Number):
return ChemicalPotential({k: v * other for k, v in self.items()})
else:
return NotImplemented
__rmul__ = __mul__
def __truediv__(self, other):
if isinstance(other, numbers.Number):
return ChemicalPotential({k: v / other for k, v in self.items()})
else:
return NotImplemented
def __sub__(self, other):
if isinstance(other, ChemicalPotential):
els = set(self.keys()).union(other.keys())
return ChemicalPotential({e: self.get(e, 0) - other.get(e, 0)
for e in els})
else:
return NotImplemented
def __add__(self, other):
if isinstance(other, ChemicalPotential):
els = set(self.keys()).union(other.keys())
return ChemicalPotential({e: self.get(e, 0) + other.get(e, 0)
for e in els})
else:
return NotImplemented
def get_energy(self, composition, strict=True):
"""
Calculates the energy of a composition
Args:
composition (Composition): input composition
strict (bool): Whether all potentials must be specified
"""
if strict and set(composition.keys()) > set(self.keys()):
s = set(composition.keys()) - set(self.keys())
raise ValueError("Potentials not specified for {}".format(s))
return sum(self.get(k, 0) * v for k, v in composition.items())
def __repr__(self):
return "ChemPots: " + super(ChemicalPotential, self).__repr__()
if __name__ == "__main__":
import doctest
doctest.testmod()
|
yanikou19/pymatgen
|
pymatgen/core/composition.py
|
Python
|
mit
| 29,842
|
[
"pymatgen"
] |
5668a8b25df2cf6e5ea176586f78aa8b3dc09316e32b260cfec894062031781d
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import gyp.common
import gyp.simple_copy
import multiprocessing
import optparse
import os.path
import re
import shlex
import signal
import subprocess
import sys
import threading
import time
import traceback
from gyp.common import GypError
from gyp.common import OrderedSet
_PYTHON3 = sys.version_info >= (3, 0, 0)
if not _PYTHON3:
from compiler.ast import Const, Dict, Discard, List, Module, Node, Stmt
import compiler
# A list of types that are treated as linkable.
linkable_types = ['executable', 'shared_library', 'loadable_module']
# A list of sections that contain links to other targets.
dependency_sections = ['dependencies', 'export_dependent_settings']
# base_path_sections is a list of sections defined by GYP that contain
# pathnames. The generators can provide more keys, the two lists are merged
# into path_sections, but you should call IsPathSection instead of using either
# list directly.
base_path_sections = [
'destination',
'files',
'include_dirs',
'inputs',
'libraries',
'outputs',
'sources',
]
path_sections = set()
# These per-process dictionaries are used to cache build file data when loading
# in parallel mode.
per_process_data = {}
per_process_aux_data = {}
def IsPathSection(section):
# If section ends in one of the '=+?!' characters, it's applied to a section
# without the trailing characters. '/' is notably absent from this list,
# because there's no way for a regular expression to be treated as a path.
while section[-1:] in '=+?!':
section = section[:-1]
if section in path_sections:
return True
# Sections mathing the regexp '_(dir|file|path)s?$' are also
# considered PathSections. Using manual string matching since that
# is much faster than the regexp and this can be called hundreds of
# thousands of times so micro performance matters.
if "_" in section:
tail = section[-6:]
if tail[-1] == 's':
tail = tail[:-1]
if tail[-5:] in ('_file', '_path'):
return True
return tail[-4:] == '_dir'
return False
# base_non_configuration_keys is a list of key names that belong in the target
# itself and should not be propagated into its configurations. It is merged
# with a list that can come from the generator to
# create non_configuration_keys.
base_non_configuration_keys = [
# Sections that must exist inside targets and not configurations.
'actions',
'configurations',
'copies',
'default_configuration',
'dependencies',
'dependencies_original',
'libraries',
'postbuilds',
'product_dir',
'product_extension',
'product_name',
'product_prefix',
'rules',
'run_as',
'sources',
'standalone_static_library',
'suppress_wildcard',
'target_name',
'toolset',
'toolsets',
'type',
# Sections that can be found inside targets or configurations, but that
# should not be propagated from targets into their configurations.
'variables',
]
non_configuration_keys = []
# Keys that do not belong inside a configuration dictionary.
invalid_configuration_keys = [
'actions',
'all_dependent_settings',
'configurations',
'dependencies',
'direct_dependent_settings',
'libraries',
'link_settings',
'sources',
'standalone_static_library',
'target_name',
'type',
]
# Controls whether or not the generator supports multiple toolsets.
multiple_toolsets = False
# Paths for converting filelist paths to output paths: {
# toplevel,
# qualified_output_dir,
# }
generator_filelist_paths = None
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
"""Return a list of all build files included into build_file_path.
The returned list will contain build_file_path as well as all other files
that it included, either directly or indirectly. Note that the list may
contain files that were included into a conditional section that evaluated
to false and was not merged into build_file_path's dict.
aux_data is a dict containing a key for each build file or included build
file. Those keys provide access to dicts whose "included" keys contain
lists of all other files included by the build file.
included should be left at its default None value by external callers. It
is used for recursion.
The returned list will not contain any duplicate entries. Each build file
in the list will be relative to the current directory.
"""
if included == None:
included = []
if build_file_path in included:
return included
included.append(build_file_path)
for included_build_file in aux_data[build_file_path].get('included', []):
GetIncludedBuildFiles(included_build_file, aux_data, included)
return included
def CheckedEval(file_contents):
"""Return the eval of a gyp file.
The gyp file is restricted to dictionaries and lists only, and
repeated keys are not allowed.
Note that this is slower than eval() is.
"""
if _PYTHON3:
return eval(build_file_contents, {'__builtins__': None}, None)
ast = compiler.parse(file_contents)
assert isinstance(ast, Module)
c1 = ast.getChildren()
assert c1[0] is None
assert isinstance(c1[1], Stmt)
c2 = c1[1].getChildren()
assert isinstance(c2[0], Discard)
c3 = c2[0].getChildren()
assert len(c3) == 1
return CheckNode(c3[0], [])
def CheckNode(node, keypath):
if isinstance(node, Dict):
c = node.getChildren()
dict = {}
for n in range(0, len(c), 2):
assert isinstance(c[n], Const)
key = c[n].getChildren()[0]
if key in dict:
raise GypError("Key '" + key + "' repeated at level " +
repr(len(keypath) + 1) + " with key path '" +
'.'.join(keypath) + "'")
kp = list(keypath) # Make a copy of the list for descending this node.
kp.append(key)
dict[key] = CheckNode(c[n + 1], kp)
return dict
elif isinstance(node, List):
c = node.getChildren()
children = []
for index, child in enumerate(c):
kp = list(keypath) # Copy list.
kp.append(repr(index))
children.append(CheckNode(child, kp))
return children
elif isinstance(node, Const):
return node.getChildren()[0]
else:
raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
"': " + repr(node))
def LoadOneBuildFile(build_file_path, data, aux_data, includes,
is_target, check):
if build_file_path in data:
return data[build_file_path]
if os.path.exists(build_file_path):
build_file_contents = open(build_file_path).read()
else:
raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
build_file_data = None
try:
if check:
build_file_data = CheckedEval(build_file_contents)
else:
build_file_data = eval(build_file_contents, {'__builtins__': None},
None)
except SyntaxError:
e = sys.exc_info()[1]
e.filename = build_file_path
raise
except Exception:
e = sys.exc_info()[1]
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
if type(build_file_data) is not dict:
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
data[build_file_path] = build_file_data
aux_data[build_file_path] = {}
# Scan for includes and merge them in.
if ('skip_includes' not in build_file_data or
not build_file_data['skip_includes']):
try:
if is_target:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
aux_data, includes, check)
else:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
aux_data, None, check)
except Exception:
e = sys.exc_info()[1]
gyp.common.ExceptionAppend(e,
'while reading includes of ' + build_file_path)
raise
return build_file_data
def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
includes, check):
includes_list = []
if includes != None:
includes_list.extend(includes)
if 'includes' in subdict:
for include in subdict['includes']:
# "include" is specified relative to subdict_path, so compute the real
# path to include by appending the provided "include" to the directory
# in which subdict_path resides.
relative_include = \
os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
includes_list.append(relative_include)
# Unhook the includes list, it's no longer needed.
del subdict['includes']
# Merge in the included files.
for include in includes_list:
if not 'included' in aux_data[subdict_path]:
aux_data[subdict_path]['included'] = []
aux_data[subdict_path]['included'].append(include)
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
MergeDicts(subdict,
LoadOneBuildFile(include, data, aux_data, None, False, check),
subdict_path, include)
# Recurse into subdictionaries.
for k, v in subdict.items():
if type(v) is dict:
LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
None, check)
elif type(v) is list:
LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
check)
# This recurses into lists so that it can look for dicts.
def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
for item in sublist:
if type(item) is dict:
LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
None, check)
elif type(item) is list:
LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
# Processes toolsets in all the targets. This recurses into condition entries
# since they can contain toolsets as well.
def ProcessToolsetsInDict(data):
if 'targets' in data:
target_list = data['targets']
new_target_list = []
for target in target_list:
# If this target already has an explicit 'toolset', and no 'toolsets'
# list, don't modify it further.
if 'toolset' in target and 'toolsets' not in target:
new_target_list.append(target)
continue
if multiple_toolsets:
toolsets = target.get('toolsets', ['target'])
else:
toolsets = ['target']
# Make sure this 'toolsets' definition is only processed once.
if 'toolsets' in target:
del target['toolsets']
if len(toolsets) > 0:
# Optimization: only do copies if more than one toolset is specified.
for build in toolsets[1:]:
new_target = gyp.simple_copy.deepcopy(target)
new_target['toolset'] = build
new_target_list.append(new_target)
target['toolset'] = toolsets[0]
new_target_list.append(target)
data['targets'] = new_target_list
if 'conditions' in data:
for condition in data['conditions']:
if type(condition) is list:
for condition_dict in condition[1:]:
if type(condition_dict) is dict:
ProcessToolsetsInDict(condition_dict)
# TODO(mark): I don't love this name. It just means that it's going to load
# a build file that contains targets and is expected to provide a targets dict
# that contains the targets...
def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
depth, check, load_dependencies):
# If depth is set, predefine the DEPTH variable to be a relative path from
# this build file's directory to the directory identified by depth.
if depth:
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
# temporary measure. This should really be addressed by keeping all paths
# in POSIX until actual project generation.
d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
if d == '':
variables['DEPTH'] = '.'
else:
variables['DEPTH'] = d.replace('\\', '/')
# The 'target_build_files' key is only set when loading target build files in
# the non-parallel code path, where LoadTargetBuildFile is called
# recursively. In the parallel code path, we don't need to check whether the
# |build_file_path| has already been loaded, because the 'scheduled' set in
# ParallelState guarantees that we never load the same |build_file_path|
# twice.
if 'target_build_files' in data:
if build_file_path in data['target_build_files']:
# Already loaded.
return False
data['target_build_files'].add(build_file_path)
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
"Loading Target Build File '%s'", build_file_path)
build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
includes, True, check)
# Store DEPTH for later use in generators.
build_file_data['_DEPTH'] = depth
# Set up the included_files key indicating which .gyp files contributed to
# this target dict.
if 'included_files' in build_file_data:
raise GypError(build_file_path + ' must not contain included_files key')
included = GetIncludedBuildFiles(build_file_path, aux_data)
build_file_data['included_files'] = []
for included_file in included:
# included_file is relative to the current directory, but it needs to
# be made relative to build_file_path's directory.
included_relative = \
gyp.common.RelativePath(included_file,
os.path.dirname(build_file_path))
build_file_data['included_files'].append(included_relative)
# Do a first round of toolsets expansion so that conditions can be defined
# per toolset.
ProcessToolsetsInDict(build_file_data)
# Apply "pre"/"early" variable expansions and condition evaluations.
ProcessVariablesAndConditionsInDict(
build_file_data, PHASE_EARLY, variables, build_file_path)
# Since some toolsets might have been defined conditionally, perform
# a second round of toolsets expansion now.
ProcessToolsetsInDict(build_file_data)
# Look at each project's target_defaults dict, and merge settings into
# targets.
if 'target_defaults' in build_file_data:
if 'targets' not in build_file_data:
raise GypError("Unable to find targets in build file %s" %
build_file_path)
index = 0
while index < len(build_file_data['targets']):
# This procedure needs to give the impression that target_defaults is
# used as defaults, and the individual targets inherit from that.
# The individual targets need to be merged into the defaults. Make
# a deep copy of the defaults for each target, merge the target dict
# as found in the input file into that copy, and then hook up the
# copy with the target-specific data merged into it as the replacement
# target dict.
old_target_dict = build_file_data['targets'][index]
new_target_dict = gyp.simple_copy.deepcopy(
build_file_data['target_defaults'])
MergeDicts(new_target_dict, old_target_dict,
build_file_path, build_file_path)
build_file_data['targets'][index] = new_target_dict
index += 1
# No longer needed.
del build_file_data['target_defaults']
# Look for dependencies. This means that dependency resolution occurs
# after "pre" conditionals and variable expansion, but before "post" -
# in other words, you can't put a "dependencies" section inside a "post"
# conditional within a target.
dependencies = []
if 'targets' in build_file_data:
for target_dict in build_file_data['targets']:
if 'dependencies' not in target_dict:
continue
for dependency in target_dict['dependencies']:
dependencies.append(
gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
if load_dependencies:
for dependency in dependencies:
try:
LoadTargetBuildFile(dependency, data, aux_data, variables,
includes, depth, check, load_dependencies)
except Exception:
e = sys.exc_info()[1]
gyp.common.ExceptionAppend(
e, 'while loading dependencies of %s' % build_file_path)
raise
else:
return (build_file_path, dependencies)
def CallLoadTargetBuildFile(global_flags,
build_file_path, variables,
includes, depth, check,
generator_input_info):
"""Wrapper around LoadTargetBuildFile for parallel processing.
This wrapper is used when LoadTargetBuildFile is executed in
a worker process.
"""
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Apply globals so that the worker process behaves the same.
for key, value in global_flags.items():
globals()[key] = value
SetGeneratorGlobals(generator_input_info)
result = LoadTargetBuildFile(build_file_path, per_process_data,
per_process_aux_data, variables,
includes, depth, check, False)
if not result:
return result
(build_file_path, dependencies) = result
# We can safely pop the build_file_data from per_process_data because it
# will never be referenced by this process again, so we don't need to keep
# it in the cache.
build_file_data = per_process_data.pop(build_file_path)
# This gets serialized and sent back to the main process via a pipe.
# It's handled in LoadTargetBuildFileCallback.
return (build_file_path,
build_file_data,
dependencies)
except GypError:
e = sys.exc_info()[1]
sys.stderr.write("gyp: %s\n" % e)
return None
except Exception:
e = sys.exc_info()[1]
print('Exception:', e, file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
return None
class ParallelProcessingError(Exception):
pass
class ParallelState(object):
"""Class to keep track of state when processing input files in parallel.
If build files are loaded in parallel, use this to keep track of
state during farming out and processing parallel jobs. It's stored
in a global so that the callback function can have access to it.
"""
def __init__(self):
# The multiprocessing pool.
self.pool = None
# The condition variable used to protect this object and notify
# the main loop when there might be more data to process.
self.condition = None
# The "data" dict that was passed to LoadTargetBuildFileParallel
self.data = None
# The number of parallel calls outstanding; decremented when a response
# was received.
self.pending = 0
# The set of all build files that have been scheduled, so we don't
# schedule the same one twice.
self.scheduled = set()
# A list of dependency build file paths that haven't been scheduled yet.
self.dependencies = []
# Flag to indicate if there was an error in a child process.
self.error = False
def LoadTargetBuildFileCallback(self, result):
"""Handle the results of running LoadTargetBuildFile in another process.
"""
self.condition.acquire()
if not result:
self.error = True
self.condition.notify()
self.condition.release()
return
(build_file_path0, build_file_data0, dependencies0) = result
self.data[build_file_path0] = build_file_data0
self.data['target_build_files'].add(build_file_path0)
for new_dependency in dependencies0:
if new_dependency not in self.scheduled:
self.scheduled.add(new_dependency)
self.dependencies.append(new_dependency)
self.pending -= 1
self.condition.notify()
self.condition.release()
def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
check, generator_input_info):
parallel_state = ParallelState()
parallel_state.condition = threading.Condition()
# Make copies of the build_files argument that we can modify while working.
parallel_state.dependencies = list(build_files)
parallel_state.scheduled = set(build_files)
parallel_state.pending = 0
parallel_state.data = data
try:
parallel_state.condition.acquire()
while parallel_state.dependencies or parallel_state.pending:
if parallel_state.error:
break
if not parallel_state.dependencies:
parallel_state.condition.wait()
continue
dependency = parallel_state.dependencies.pop()
parallel_state.pending += 1
global_flags = {
'path_sections': globals()['path_sections'],
'non_configuration_keys': globals()['non_configuration_keys'],
'multiple_toolsets': globals()['multiple_toolsets']}
if not parallel_state.pool:
parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
parallel_state.pool.apply_async(
CallLoadTargetBuildFile,
args = (global_flags, dependency,
variables, includes, depth, check, generator_input_info),
callback = parallel_state.LoadTargetBuildFileCallback)
except KeyboardInterrupt:
e = sys.exc_info()[1]
parallel_state.pool.terminate()
raise e
parallel_state.condition.release()
parallel_state.pool.close()
parallel_state.pool.join()
parallel_state.pool = None
if parallel_state.error:
sys.exit(1)
# Look for the bracket that matches the first bracket seen in a
# string, and return the start and end as a tuple. For example, if
# the input is something like "<(foo <(bar)) blah", then it would
# return (1, 13), indicating the entire string except for the leading
# "<" and trailing " blah".
LBRACKETS= set('{[(')
BRACKETS = {'}': '{', ']': '[', ')': '('}
def FindEnclosingBracketGroup(input_str):
stack = []
start = -1
for index, char in enumerate(input_str):
if char in LBRACKETS:
stack.append(char)
if start == -1:
start = index
elif char in BRACKETS:
if not stack:
return (-1, -1)
if stack.pop() != BRACKETS[char]:
return (-1, -1)
if not stack:
return (start, index + 1)
return (-1, -1)
def IsStrCanonicalInt(string):
"""Returns True if |string| is in its canonical integer form.
The canonical form is such that str(int(string)) == string.
"""
if type(string) is str:
# This function is called a lot so for maximum performance, avoid
# involving regexps which would otherwise make the code much
# shorter. Regexps would need twice the time of this function.
if string:
if string == "0":
return True
if string[0] == "-":
string = string[1:]
if not string:
return False
if '1' <= string[0] <= '9':
return string.isdigit()
return False
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
# In the last case, the inner "<()" is captured in match['content'].
early_variable_re = re.compile(
r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
r'\((?P<is_array>\s*\[?)'
r'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '>' instead of '<'.
late_variable_re = re.compile(
r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
r'\((?P<is_array>\s*\[?)'
r'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '^' instead of '<'.
latelate_variable_re = re.compile(
r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
r'\((?P<is_array>\s*\[?)'
r'(?P<content>.*?)(\]?)\))')
# Global cache of results from running commands so they don't have to be run
# more then once.
cached_command_results = {}
def FixupPlatformCommand(cmd):
if sys.platform == 'win32':
if type(cmd) is list:
cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
else:
cmd = re.sub('^cat ', 'type ', cmd)
return cmd
PHASE_EARLY = 0
PHASE_LATE = 1
PHASE_LATELATE = 2
def ExpandVariables(input, phase, variables, build_file):
# Look for the pattern that gets expanded into variables
if phase == PHASE_EARLY:
variable_re = early_variable_re
expansion_symbol = '<'
elif phase == PHASE_LATE:
variable_re = late_variable_re
expansion_symbol = '>'
elif phase == PHASE_LATELATE:
variable_re = latelate_variable_re
expansion_symbol = '^'
else:
assert False
input_str = str(input)
if IsStrCanonicalInt(input_str):
return int(input_str)
# Do a quick scan to determine if an expensive regex search is warranted.
if expansion_symbol not in input_str:
return input_str
# Get the entire list of matches as a list of MatchObject instances.
# (using findall here would return strings instead of MatchObjects).
matches = list(variable_re.finditer(input_str))
if not matches:
return input_str
output = input_str
# Reverse the list of matches so that replacements are done right-to-left.
# That ensures that earlier replacements won't mess up the string in a
# way that causes later calls to find the earlier substituted text instead
# of what's intended for replacement.
matches.reverse()
for match_group in matches:
match = match_group.groupdict()
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
# match['replace'] is the substring to look for, match['type']
# is the character code for the replacement type (< > <! >! <| >| <@
# >@ <!@ >!@), match['is_array'] contains a '[' for command
# arrays, and match['content'] is the name of the variable (< >)
# or command to run (<! >!). match['command_string'] is an optional
# command string. Currently, only 'pymod_do_main' is supported.
# run_command is true if a ! variant is used.
run_command = '!' in match['type']
command_string = match['command_string']
# file_list is true if a | variant is used.
file_list = '|' in match['type']
# Capture these now so we can adjust them later.
replace_start = match_group.start('replace')
replace_end = match_group.end('replace')
# Find the ending paren, and re-evaluate the contained string.
(c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
# Adjust the replacement range to match the entire command
# found by FindEnclosingBracketGroup (since the variable_re
# probably doesn't match the entire command if it contained
# nested variables).
replace_end = replace_start + c_end
# Find the "real" replacement, matching the appropriate closing
# paren, and adjust the replacement start and end.
replacement = input_str[replace_start:replace_end]
# Figure out what the contents of the variable parens are.
contents_start = replace_start + c_start + 1
contents_end = replace_end - 1
contents = input_str[contents_start:contents_end]
# Do filter substitution now for <|().
# Admittedly, this is different than the evaluation order in other
# contexts. However, since filtration has no chance to run on <|(),
# this seems like the only obvious way to give them access to filters.
if file_list:
processed_variables = gyp.simple_copy.deepcopy(variables)
ProcessListFiltersInDict(contents, processed_variables)
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase,
processed_variables, build_file)
else:
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase, variables, build_file)
# Strip off leading/trailing whitespace so that variable matches are
# simpler below (and because they are rarely needed).
contents = contents.strip()
# expand_to_list is true if an @ variant is used. In that case,
# the expansion should result in a list. Note that the caller
# is to be expecting a list in return, and not all callers do
# because not all are working in list context. Also, for list
# expansions, there can be no other text besides the variable
# expansion in the input string.
expand_to_list = '@' in match['type'] and input_str == replacement
if run_command or file_list:
# Find the build file's directory, so commands can be run or file lists
# generated relative to it.
build_file_dir = os.path.dirname(build_file)
if build_file_dir == '' and not file_list:
# If build_file is just a leaf filename indicating a file in the
# current directory, build_file_dir might be an empty string. Set
# it to None to signal to subprocess.Popen that it should run the
# command in the current directory.
build_file_dir = None
# Support <|(listfile.txt ...) which generates a file
# containing items from a gyp list, generated at gyp time.
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
if type(contents) is list:
contents_list = contents
else:
contents_list = contents.split(' ')
replacement = contents_list[0]
if os.path.isabs(replacement):
raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
if not generator_filelist_paths:
path = os.path.join(build_file_dir, replacement)
else:
if os.path.isabs(build_file_dir):
toplevel = generator_filelist_paths['toplevel']
rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
else:
rel_build_file_dir = build_file_dir
qualified_out_dir = generator_filelist_paths['qualified_out_dir']
path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
gyp.common.EnsureDirExists(path)
replacement = gyp.common.RelativePath(path, build_file_dir)
f = gyp.common.WriteOnDiff(path)
for i in contents_list[1:]:
f.write('%s\n' % i)
f.close()
elif run_command:
use_shell = True
if match['is_array']:
contents = eval(contents)
use_shell = False
# Check for a cached value to avoid executing commands, or generating
# file lists more than once. The cache key contains the command to be
# run as well as the directory to run it from, to account for commands
# that depend on their current directory.
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
# someone could author a set of GYP files where each time the command
# is invoked it produces different output by design. When the need
# arises, the syntax should be extended to support no caching off a
# command's output so it is run every time.
cache_key = (str(contents), build_file_dir)
cached_value = cached_command_results.get(cache_key, None)
if cached_value is None:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Executing command '%s' in directory '%s'",
contents, build_file_dir)
replacement = ''
if command_string == 'pymod_do_main':
# <!pymod_do_main(modulename param eters) loads |modulename| as a
# python module and then calls that module's DoMain() function,
# passing ["param", "eters"] as a single list argument. For modules
# that don't load quickly, this can be faster than
# <!(python modulename param eters). Do this in |build_file_dir|.
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
if build_file_dir: # build_file_dir may be None (see above).
os.chdir(build_file_dir)
try:
parsed_contents = shlex.split(contents)
try:
py_module = __import__(parsed_contents[0])
except ImportError as e:
raise GypError("Error importing pymod_do_main"
"module (%s): %s" % (parsed_contents[0], e))
replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
finally:
os.chdir(oldwd)
assert replacement != None
elif command_string:
raise GypError("Unknown command string '%s' in '%s'." %
(command_string, contents))
else:
# Fix up command with platform specific workarounds.
contents = FixupPlatformCommand(contents)
p = subprocess.Popen(contents, shell=use_shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
cwd=build_file_dir)
p_stdout, p_stderr = p.communicate('')
if p.wait() != 0 or p_stderr:
sys.stderr.write(p_stderr)
# Simulate check_call behavior, since check_call only exists
# in python 2.5 and later.
raise GypError("Call to '%s' returned exit status %d." %
(contents, p.returncode))
replacement = p_stdout.rstrip()
cached_command_results[cache_key] = replacement
else:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Had cache value for command '%s' in directory '%s'",
contents,build_file_dir)
replacement = cached_value
else:
if not contents in variables:
if contents[-1] in ['!', '/']:
# In order to allow cross-compiles (nacl) to happen more naturally,
# we will allow references to >(sources/) etc. to resolve to
# and empty list if undefined. This allows actions to:
# 'action!': [
# '>@(_sources!)',
# ],
# 'action/': [
# '>@(_sources/)',
# ],
replacement = []
else:
raise GypError('Undefined variable ' + contents +
' in ' + build_file)
else:
replacement = variables[contents]
if type(replacement) is list:
for item in replacement:
if not contents[-1] == '/' and type(item) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'list contains a ' +
item.__class__.__name__)
# Run through the list and handle variable expansions in it. Since
# the list is guaranteed not to contain dicts, this won't do anything
# with conditions sections.
ProcessVariablesAndConditionsInList(replacement, phase, variables,
build_file)
elif type(replacement) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'found a ' + replacement.__class__.__name__)
if expand_to_list:
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
if type(replacement) is list:
# If it's already a list, make a copy.
output = replacement[:]
else:
# Split it the same way sh would split arguments.
output = shlex.split(str(replacement))
else:
# Expanding in string context.
encoded_replacement = ''
if type(replacement) is list:
# When expanding a list into string context, turn the list items
# into a string in a way that will work with a subprocess call.
#
# TODO(mark): This isn't completely correct. This should
# call a generator-provided function that observes the
# proper list-to-argument quoting rules on a specific
# platform instead of just calling the POSIX encoding
# routine.
encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
else:
encoded_replacement = replacement
output = output[:replace_start] + str(encoded_replacement) + \
output[replace_end:]
# Prepare for the next match iteration.
input_str = output
if output == input:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Found only identity matches on %r, avoiding infinite "
"recursion.",
output)
else:
# Look for more matches now that we've replaced some, to deal with
# expanding local variables (variables defined in the same
# variables block as this one).
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
if type(output) is list:
if output and type(output[0]) is list:
# Leave output alone if it's a list of lists.
# We don't want such lists to be stringified.
pass
else:
new_output = []
for item in output:
new_output.append(
ExpandVariables(item, phase, variables, build_file))
output = new_output
else:
output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers.
if type(output) is list:
for index in range(0, len(output)):
if IsStrCanonicalInt(output[index]):
output[index] = int(output[index])
elif IsStrCanonicalInt(output):
output = int(output)
return output
# The same condition is often evaluated over and over again so it
# makes sense to cache as much as possible between evaluations.
cached_conditions_asts = {}
def EvalCondition(condition, conditions_key, phase, variables, build_file):
"""Returns the dict that should be used or None if the result was
that nothing should be used."""
if type(condition) is not list:
raise GypError(conditions_key + ' must be a list')
if len(condition) < 2:
# It's possible that condition[0] won't work in which case this
# attempt will raise its own IndexError. That's probably fine.
raise GypError(conditions_key + ' ' + condition[0] +
' must be at least length 2, not ' + str(len(condition)))
i = 0
result = None
while i < len(condition):
cond_expr = condition[i]
true_dict = condition[i + 1]
if type(true_dict) is not dict:
raise GypError('{} {} must be followed by a dictionary, not {}'.format(
conditions_key, cond_expr, type(true_dict)))
if len(condition) > i + 2 and type(condition[i + 2]) is dict:
false_dict = condition[i + 2]
i = i + 3
if i != len(condition):
raise GypError('{} {} has {} unexpected trailing items'.format(
conditions_key, cond_expr, len(condition) - i))
else:
false_dict = None
i = i + 2
if result == None:
result = EvalSingleCondition(
cond_expr, true_dict, false_dict, phase, variables, build_file)
return result
def EvalSingleCondition(
cond_expr, true_dict, false_dict, phase, variables, build_file):
"""Returns true_dict if cond_expr evaluates to true, and false_dict
otherwise."""
# Do expansions on the condition itself. Since the conditon can naturally
# contain variable references without needing to resort to GYP expansion
# syntax, this is of dubious value for variables, but someone might want to
# use a command expansion directly inside a condition.
cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
build_file)
if type(cond_expr_expanded) not in (str, int):
raise ValueError(
'Variable expansion in this context permits str and int ' + \
'only, found ' + cond_expr_expanded.__class__.__name__)
try:
if cond_expr_expanded in cached_conditions_asts:
ast_code = cached_conditions_asts[cond_expr_expanded]
else:
ast_code = compile(cond_expr_expanded, '<string>', 'eval')
cached_conditions_asts[cond_expr_expanded] = ast_code
if eval(ast_code, {'__builtins__': None}, variables):
return true_dict
return false_dict
except SyntaxError:
e = sys.exc_info()[1]
syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
'at character %d.' %
(str(e.args[0]), e.text, build_file, e.offset),
e.filename, e.lineno, e.offset, e.text)
raise syntax_error
except NameError:
e = sys.exc_info()[1]
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
(cond_expr_expanded, build_file))
raise GypError(e)
def ProcessConditionsInDict(the_dict, phase, variables, build_file):
# Process a 'conditions' or 'target_conditions' section in the_dict,
# depending on phase.
# early -> conditions
# late -> target_conditions
# latelate -> no conditions
#
# Each item in a conditions list consists of cond_expr, a string expression
# evaluated as the condition, and true_dict, a dict that will be merged into
# the_dict if cond_expr evaluates to true. Optionally, a third item,
# false_dict, may be present. false_dict is merged into the_dict if
# cond_expr evaluates to false.
#
# Any dict merged into the_dict will be recursively processed for nested
# conditionals and other expansions, also according to phase, immediately
# prior to being merged.
if phase == PHASE_EARLY:
conditions_key = 'conditions'
elif phase == PHASE_LATE:
conditions_key = 'target_conditions'
elif phase == PHASE_LATELATE:
return
else:
assert False
if not conditions_key in the_dict:
return
conditions_list = the_dict[conditions_key]
# Unhook the conditions list, it's no longer needed.
del the_dict[conditions_key]
for condition in conditions_list:
merge_dict = EvalCondition(condition, conditions_key, phase, variables,
build_file)
if merge_dict != None:
# Expand variables and nested conditinals in the merge_dict before
# merging it.
ProcessVariablesAndConditionsInDict(merge_dict, phase,
variables, build_file)
MergeDicts(the_dict, merge_dict, build_file, build_file)
def LoadAutomaticVariablesFromDict(variables, the_dict):
# Any keys with plain string values in the_dict become automatic variables.
# The variable name is the key name with a "_" character prepended.
for key, value in the_dict.items():
if type(value) in (str, int, list):
variables['_' + key] = value
# PYK: If `toolset` is in `the_dict`, map all {toolset}_x variables to the
# value of `x`.
if 'toolset' in the_dict:
prefix = '%s_' % the_dict['toolset']
for key, value in variables.items():
if key.startswith(prefix) and isinstance(value, (str, int, list)):
variables[key[len(prefix):]] = value
if key == '%s_os' % the_dict['toolset']:
variables['OS'] = value
def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
# Any keys in the_dict's "variables" dict, if it has one, becomes a
# variable. The variable name is the key name in the "variables" dict.
# Variables that end with the % character are set only if they are unset in
# the variables dict. the_dict_key is the name of the key that accesses
# the_dict in the_dict's parent dict. If the_dict's parent is not a dict
# (it could be a list or it could be parentless because it is a root dict),
# the_dict_key will be None.
for key, value in the_dict.get('variables', {}).items():
if type(value) not in (str, int, list):
continue
if key.endswith('%'):
variable_name = key[:-1]
if variable_name in variables:
# If the variable is already set, don't set it.
continue
if the_dict_key is 'variables' and variable_name in the_dict:
# If the variable is set without a % in the_dict, and the_dict is a
# variables dict (making |variables| a varaibles sub-dict of a
# variables dict), use the_dict's definition.
value = the_dict[variable_name]
else:
variable_name = key
variables[variable_name] = value
def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
build_file, the_dict_key=None):
"""Handle all variable and command expansion and conditional evaluation.
This function is the public entry point for all variable expansions and
conditional evaluations. The variables_in dictionary will not be modified
by this function.
"""
# Make a copy of the variables_in dict that can be modified during the
# loading of automatics and the loading of the variables dict.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
if 'variables' in the_dict:
# Make sure all the local variables are added to the variables
# list before we process them so that you can reference one
# variable from another. They will be fully expanded by recursion
# in ExpandVariables.
for key, value in the_dict['variables'].items():
variables[key] = value
# Handle the associated variables dict first, so that any variable
# references within can be resolved prior to using them as variables.
# Pass a copy of the variables dict to avoid having it be tainted.
# Otherwise, it would have extra automatics added for everything that
# should just be an ordinary variable in this scope.
ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
variables, build_file, 'variables')
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
for key, value in the_dict.items():
# Skip "variables", which was already processed if present.
if key != 'variables' and type(value) is str:
expanded = ExpandVariables(value, phase, variables, build_file)
if type(expanded) not in (str, int):
raise ValueError(
'Variable expansion in this context permits str and int ' + \
'only, found ' + expanded.__class__.__name__ + ' for ' + key)
the_dict[key] = expanded
# Variable expansion may have resulted in changes to automatics. Reload.
# TODO(mark): Optimization: only reload if no changes were made.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
# Process conditions in this dict. This is done after variable expansion
# so that conditions may take advantage of expanded variables. For example,
# if the_dict contains:
# {'type': '<(library_type)',
# 'conditions': [['_type=="static_library"', { ... }]]},
# _type, as used in the condition, will only be set to the value of
# library_type if variable expansion is performed before condition
# processing. However, condition processing should occur prior to recursion
# so that variables (both automatic and "variables" dict type) may be
# adjusted by conditions sections, merged into the_dict, and have the
# intended impact on contained dicts.
#
# This arrangement means that a "conditions" section containing a "variables"
# section will only have those variables effective in subdicts, not in
# the_dict. The workaround is to put a "conditions" section within a
# "variables" section. For example:
# {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
# 'defines': ['<(define)'],
# 'my_subdict': {'defines': ['<(define)']}},
# will not result in "IS_MAC" being appended to the "defines" list in the
# current scope but would result in it being appended to the "defines" list
# within "my_subdict". By comparison:
# {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
# 'defines': ['<(define)'],
# 'my_subdict': {'defines': ['<(define)']}},
# will append "IS_MAC" to both "defines" lists.
# Evaluate conditions sections, allowing variable expansions within them
# as well as nested conditionals. This will process a 'conditions' or
# 'target_conditions' section, perform appropriate merging and recursive
# conditional and variable processing, and then remove the conditions section
# from the_dict if it is present.
ProcessConditionsInDict(the_dict, phase, variables, build_file)
# Conditional processing may have resulted in changes to automatics or the
# variables dict. Reload.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
# Recurse into child dicts, or process child lists which may result in
# further recursion into descendant dicts.
for key, value in the_dict.items():
# Skip "variables" and string values, which were already processed if
# present.
if key == 'variables' or type(value) is str:
continue
if type(value) is dict:
# Pass a copy of the variables dict so that subdicts can't influence
# parents.
ProcessVariablesAndConditionsInDict(value, phase, variables,
build_file, key)
elif type(value) is list:
# The list itself can't influence the variables dict, and
# ProcessVariablesAndConditionsInList will make copies of the variables
# dict if it needs to pass it to something that can influence it. No
# copy is necessary here.
ProcessVariablesAndConditionsInList(value, phase, variables,
build_file)
elif type(value) is not int:
raise TypeError('Unknown type ' + value.__class__.__name__ + \
' for ' + key)
def ProcessVariablesAndConditionsInList(the_list, phase, variables,
build_file):
# Iterate using an index so that new values can be assigned into the_list.
index = 0
while index < len(the_list):
item = the_list[index]
if type(item) is dict:
# Make a copy of the variables dict so that it won't influence anything
# outside of its own scope.
ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
elif type(item) is list:
ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
elif type(item) is str:
expanded = ExpandVariables(item, phase, variables, build_file)
if type(expanded) in (str, int):
the_list[index] = expanded
elif type(expanded) is list:
the_list[index:index+1] = expanded
index += len(expanded)
# index now identifies the next item to examine. Continue right now
# without falling into the index increment below.
continue
else:
raise ValueError(
'Variable expansion in this context permits strings and ' + \
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
index)
elif type(item) is not int:
raise TypeError('Unknown type ' + item.__class__.__name__ + \
' at index ' + index)
index = index + 1
def BuildTargetsDict(data):
"""Builds a dict mapping fully-qualified target names to their target dicts.
|data| is a dict mapping loaded build files by pathname relative to the
current directory. Values in |data| are build file contents. For each
|data| value with a "targets" key, the value of the "targets" key is taken
as a list containing target dicts. Each target's fully-qualified name is
constructed from the pathname of the build file (|data| key) and its
"target_name" property. These fully-qualified names are used as the keys
in the returned dict. These keys provide access to the target dicts,
the dicts in the "targets" lists.
"""
targets = {}
for build_file in data['target_build_files']:
for target in data[build_file].get('targets', []):
target_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if target_name in targets:
raise GypError('Duplicate target definitions for ' + target_name)
targets[target_name] = target
return targets
def QualifyDependencies(targets):
"""Make dependency links fully-qualified relative to the current directory.
|targets| is a dict mapping fully-qualified target names to their target
dicts. For each target in this dict, keys known to contain dependency
links are examined, and any dependencies referenced will be rewritten
so that they are fully-qualified and relative to the current directory.
All rewritten dependencies are suitable for use as keys to |targets| or a
similar dict.
"""
all_dependency_sections = [dep + op
for dep in dependency_sections
for op in ('', '!', '/')]
for target, target_dict in targets.items():
target_build_file = gyp.common.BuildFile(target)
toolset = target_dict['toolset']
for dependency_key in all_dependency_sections:
dependencies = target_dict.get(dependency_key, [])
for index in range(0, len(dependencies)):
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
target_build_file, dependencies[index], toolset)
if not multiple_toolsets:
# Ignore toolset specification in the dependency if it is specified.
dep_toolset = toolset
dependency = gyp.common.QualifiedTarget(dep_file,
dep_target,
dep_toolset)
dependencies[index] = dependency
# Make sure anything appearing in a list other than "dependencies" also
# appears in the "dependencies" list.
if dependency_key != 'dependencies' and \
dependency not in target_dict['dependencies']:
raise GypError('Found ' + dependency + ' in ' + dependency_key +
' of ' + target + ', but not in dependencies')
def ExpandWildcardDependencies(targets, data):
"""Expands dependencies specified as build_file:*.
For each target in |targets|, examines sections containing links to other
targets. If any such section contains a link of the form build_file:*, it
is taken as a wildcard link, and is expanded to list each target in
build_file. The |data| dict provides access to build file dicts.
Any target that does not wish to be included by wildcard can provide an
optional "suppress_wildcard" key in its target dict. When present and
true, a wildcard dependency link will not include such targets.
All dependency names, including the keys to |targets| and the values in each
dependency list, must be qualified when this function is called.
"""
for target, target_dict in targets.items():
toolset = target_dict['toolset']
target_build_file = gyp.common.BuildFile(target)
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
# Loop this way instead of "for dependency in" or "for index in range"
# because the dependencies list will be modified within the loop body.
index = 0
while index < len(dependencies):
(dependency_build_file, dependency_target, dependency_toolset) = \
gyp.common.ParseQualifiedTarget(dependencies[index])
if dependency_target != '*' and dependency_toolset != '*':
# Not a wildcard. Keep it moving.
index = index + 1
continue
if dependency_build_file == target_build_file:
# It's an error for a target to depend on all other targets in
# the same file, because a target cannot depend on itself.
raise GypError('Found wildcard in ' + dependency_key + ' of ' +
target + ' referring to same build file')
# Take the wildcard out and adjust the index so that the next
# dependency in the list will be processed the next time through the
# loop.
del dependencies[index]
index = index - 1
# Loop through the targets in the other build file, adding them to
# this target's list of dependencies in place of the removed
# wildcard.
dependency_target_dicts = data[dependency_build_file]['targets']
for dependency_target_dict in dependency_target_dicts:
if int(dependency_target_dict.get('suppress_wildcard', False)):
continue
dependency_target_name = dependency_target_dict['target_name']
if (dependency_target != '*' and
dependency_target != dependency_target_name):
continue
dependency_target_toolset = dependency_target_dict['toolset']
if (dependency_toolset != '*' and
dependency_toolset != dependency_target_toolset):
continue
dependency = gyp.common.QualifiedTarget(dependency_build_file,
dependency_target_name,
dependency_target_toolset)
index = index + 1
dependencies.insert(index, dependency)
index = index + 1
def Unify(l):
"""Removes duplicate elements from l, keeping the first element."""
seen = {}
return [seen.setdefault(e, e) for e in l if e not in seen]
def RemoveDuplicateDependencies(targets):
"""Makes sure every dependency appears only once in all targets's dependency
lists."""
for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
target_dict[dependency_key] = Unify(dependencies)
def Filter(l, item):
"""Removes item from l."""
res = {}
return [res.setdefault(e, e) for e in l if e != item]
def RemoveSelfDependencies(targets):
"""Remove self dependencies from targets that have the prune_self_dependency
variable set."""
for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
for t in dependencies:
if t == target_name:
if targets[t].get('variables', {}).get('prune_self_dependency', 0):
target_dict[dependency_key] = Filter(dependencies, target_name)
def RemoveLinkDependenciesFromNoneTargets(targets):
"""Remove dependencies having the 'link_dependency' attribute from the 'none'
targets."""
for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
for t in dependencies:
if target_dict.get('type', None) == 'none':
if targets[t].get('variables', {}).get('link_dependency', 0):
target_dict[dependency_key] = \
Filter(target_dict[dependency_key], t)
class DependencyGraphNode(object):
"""
Attributes:
ref: A reference to an object that this DependencyGraphNode represents.
dependencies: List of DependencyGraphNodes on which this one depends.
dependents: List of DependencyGraphNodes that depend on this one.
"""
class CircularException(GypError):
pass
def __init__(self, ref):
self.ref = ref
self.dependencies = []
self.dependents = []
def __repr__(self):
return '<DependencyGraphNode: %r>' % self.ref
def FlattenToList(self):
# flat_list is the sorted list of dependencies - actually, the list items
# are the "ref" attributes of DependencyGraphNodes. Every target will
# appear in flat_list after all of its dependencies, and before all of its
# dependents.
flat_list = OrderedSet()
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
# of this node, because when the graph was built, nodes with no
# dependencies were made implicit dependents of the root node.
in_degree_zeros = set(self.dependents[:])
while in_degree_zeros:
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
# can be appended to flat_list. Take these nodes out of in_degree_zeros
# as work progresses, so that the next node to process from the list can
# always be accessed at a consistent position.
node = in_degree_zeros.pop()
flat_list.add(node.ref)
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
for node_dependent in node.dependents:
is_in_degree_zero = True
# TODO: We want to check through the
# node_dependent.dependencies list but if it's long and we
# always start at the beginning, then we get O(n^2) behaviour.
for node_dependent_dependency in node_dependent.dependencies:
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
# will be more chances to add it to flat_list when examining
# it again as a dependent of those other dependencies, provided
# that there are no cycles.
is_in_degree_zero = False
break
if is_in_degree_zero:
# All of the dependent's dependencies are already in flat_list. Add
# it to in_degree_zeros where it will be processed in a future
# iteration of the outer loop.
in_degree_zeros.add(node_dependent)
return list(flat_list)
def FindCycles(self):
"""
Returns a list of cycles in the graph, where each cycle is its own list.
"""
results = []
visited = set()
def Visit(node, path):
for child in node.dependents:
if child in path:
results.append([child] + path[:path.index(child) + 1])
elif not child in visited:
visited.add(child)
Visit(child, [child] + path)
visited.add(self)
Visit(self, [self])
return results
def DirectDependencies(self, dependencies=None):
"""Returns a list of just direct dependencies."""
if dependencies == None:
dependencies = []
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
if dependency.ref != None and dependency.ref not in dependencies:
dependencies.append(dependency.ref)
return dependencies
def _AddImportedDependencies(self, targets, dependencies=None):
"""Given a list of direct dependencies, adds indirect dependencies that
other dependencies have declared to export their settings.
This method does not operate on self. Rather, it operates on the list
of dependencies in the |dependencies| argument. For each dependency in
that list, if any declares that it exports the settings of one of its
own dependencies, those dependencies whose settings are "passed through"
are added to the list. As new items are added to the list, they too will
be processed, so it is possible to import settings through multiple levels
of dependencies.
This method is not terribly useful on its own, it depends on being
"primed" with a list of direct dependencies such as one provided by
DirectDependencies. DirectAndImportedDependencies is intended to be the
public entry point.
"""
if dependencies == None:
dependencies = []
index = 0
while index < len(dependencies):
dependency = dependencies[index]
dependency_dict = targets[dependency]
# Add any dependencies whose settings should be imported to the list
# if not already present. Newly-added items will be checked for
# their own imports when the list iteration reaches them.
# Rather than simply appending new items, insert them after the
# dependency that exported them. This is done to more closely match
# the depth-first method used by DeepDependencies.
add_index = 1
for imported_dependency in \
dependency_dict.get('export_dependent_settings', []):
if imported_dependency not in dependencies:
dependencies.insert(index + add_index, imported_dependency)
add_index = add_index + 1
index = index + 1
return dependencies
def DirectAndImportedDependencies(self, targets, dependencies=None):
"""Returns a list of a target's direct dependencies and all indirect
dependencies that a dependency has advertised settings should be exported
through the dependency for.
"""
dependencies = self.DirectDependencies(dependencies)
return self._AddImportedDependencies(targets, dependencies)
def DeepDependencies(self, dependencies=None):
"""Returns an OrderedSet of all of a target's dependencies, recursively."""
if dependencies is None:
# Using a list to get ordered output and a set to do fast "is it
# already added" checks.
dependencies = OrderedSet()
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
if dependency.ref is None:
continue
if dependency.ref not in dependencies:
dependencies.add(dependency.ref)
dependency.DeepDependencies(dependencies)
return dependencies
def _LinkDependenciesInternal(self, targets, include_shared_libraries,
dependencies=None, initial=True):
"""Returns an OrderedSet of dependency targets that are linked
into this target.
This function has a split personality, depending on the setting of
|initial|. Outside callers should always leave |initial| at its default
setting.
When adding a target to the list of dependencies, this function will
recurse into itself with |initial| set to False, to collect dependencies
that are linked into the linkable target for which the list is being built.
If |include_shared_libraries| is False, the resulting dependencies will not
include shared_library targets that are linked into this target.
"""
if dependencies is None:
# Using a list to get ordered output and a set to do fast "is it
# already added" checks.
dependencies = OrderedSet()
# Check for None, corresponding to the root node.
if self.ref is None:
return dependencies
# It's kind of sucky that |targets| has to be passed into this function,
# but that's presently the easiest way to access the target dicts so that
# this function can find target types.
if 'target_name' not in targets[self.ref]:
raise GypError("Missing 'target_name' field in target.")
if 'type' not in targets[self.ref]:
raise GypError("Missing 'type' field in target %s" %
targets[self.ref]['target_name'])
target_type = targets[self.ref]['type']
is_linkable = target_type in linkable_types
if initial and not is_linkable:
# If this is the first target being examined and it's not linkable,
# return an empty list of link dependencies, because the link
# dependencies are intended to apply to the target itself (initial is
# True) and this target won't be linked.
return dependencies
# Don't traverse 'none' targets if explicitly excluded.
if (target_type == 'none' and
not targets[self.ref].get('dependencies_traverse', True)):
dependencies.add(self.ref)
return dependencies
# Executables and loadable modules are already fully and finally linked.
# Nothing else can be a link dependency of them, there can only be
# dependencies in the sense that a dependent target might run an
# executable or load the loadable_module.
if not initial and target_type in ('executable', 'loadable_module'):
return dependencies
# Shared libraries are already fully linked. They should only be included
# in |dependencies| when adjusting static library dependencies (in order to
# link against the shared_library's import lib), but should not be included
# in |dependencies| when propagating link_settings.
# The |include_shared_libraries| flag controls which of these two cases we
# are handling.
if (not initial and target_type == 'shared_library' and
not include_shared_libraries):
return dependencies
# The target is linkable, add it to the list of link dependencies.
if self.ref not in dependencies:
dependencies.add(self.ref)
if initial or not is_linkable:
# If this is a subsequent target and it's linkable, don't look any
# further for linkable dependencies, as they'll already be linked into
# this target linkable. Always look at dependencies of the initial
# target, and always look at dependencies of non-linkables.
for dependency in self.dependencies:
dependency._LinkDependenciesInternal(targets,
include_shared_libraries,
dependencies, False)
return dependencies
def DependenciesForLinkSettings(self, targets):
"""
Returns a list of dependency targets whose link_settings should be merged
into this target.
"""
# TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
# link_settings are propagated. So for now, we will allow it, unless the
# 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
# False. Once chrome is fixed, we can remove this flag.
include_shared_libraries = \
targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
return self._LinkDependenciesInternal(targets, include_shared_libraries)
def DependenciesToLinkAgainst(self, targets):
"""
Returns a list of dependency targets that are linked into this target.
"""
return self._LinkDependenciesInternal(targets, True)
def BuildDependencyList(targets):
# Create a DependencyGraphNode for each target. Put it into a dict for easy
# access.
dependency_nodes = {}
for target, spec in targets.items():
if target not in dependency_nodes:
dependency_nodes[target] = DependencyGraphNode(target)
# Set up the dependency links. Targets that have no dependencies are treated
# as dependent on root_node.
root_node = DependencyGraphNode(None)
for target, spec in targets.items():
target_node = dependency_nodes[target]
target_build_file = gyp.common.BuildFile(target)
dependencies = spec.get('dependencies')
if not dependencies:
target_node.dependencies = [root_node]
root_node.dependents.append(target_node)
else:
for dependency in dependencies:
dependency_node = dependency_nodes.get(dependency)
if not dependency_node:
raise GypError("Dependency '%s' not found while "
"trying to load target %s" % (dependency, target))
target_node.dependencies.append(dependency_node)
dependency_node.dependents.append(target_node)
flat_list = root_node.FlattenToList()
# If there's anything left unvisited, there must be a circular dependency
# (cycle).
if len(flat_list) != len(targets):
if not root_node.dependents:
# If all targets have dependencies, add the first target as a dependent
# of root_node so that the cycle can be discovered from root_node.
target = targets.keys()[0]
target_node = dependency_nodes[target]
target_node.dependencies.append(root_node)
root_node.dependents.append(target_node)
cycles = []
for cycle in root_node.FindCycles():
paths = [node.ref for node in cycle]
cycles.append('Cycle: %s' % ' -> '.join(paths))
raise DependencyGraphNode.CircularException(
'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
return [dependency_nodes, flat_list]
def VerifyNoGYPFileCircularDependencies(targets):
# Create a DependencyGraphNode for each gyp file containing a target. Put
# it into a dict for easy access.
dependency_nodes = {}
for target in targets.keys():
build_file = gyp.common.BuildFile(target)
if not build_file in dependency_nodes:
dependency_nodes[build_file] = DependencyGraphNode(build_file)
# Set up the dependency links.
for target, spec in targets.items():
build_file = gyp.common.BuildFile(target)
build_file_node = dependency_nodes[build_file]
target_dependencies = spec.get('dependencies', [])
for dependency in target_dependencies:
try:
dependency_build_file = gyp.common.BuildFile(dependency)
except GypError:
e = sys.exc_info()[1]
gyp.common.ExceptionAppend(
e, 'while computing dependencies of .gyp file %s' % build_file)
raise
if dependency_build_file == build_file:
# A .gyp file is allowed to refer back to itself.
continue
dependency_node = dependency_nodes.get(dependency_build_file)
if not dependency_node:
raise GypError("Dependancy '%s' not found" % dependency_build_file)
if dependency_node not in build_file_node.dependencies:
build_file_node.dependencies.append(dependency_node)
dependency_node.dependents.append(build_file_node)
# Files that have no dependencies are treated as dependent on root_node.
root_node = DependencyGraphNode(None)
for build_file_node in dependency_nodes.values():
if len(build_file_node.dependencies) == 0:
build_file_node.dependencies.append(root_node)
root_node.dependents.append(build_file_node)
flat_list = root_node.FlattenToList()
# If there's anything left unvisited, there must be a circular dependency
# (cycle).
if len(flat_list) != len(dependency_nodes):
if not root_node.dependents:
# If all files have dependencies, add the first file as a dependent
# of root_node so that the cycle can be discovered from root_node.
file_node = dependency_nodes.values()[0]
file_node.dependencies.append(root_node)
root_node.dependents.append(file_node)
cycles = []
for cycle in root_node.FindCycles():
paths = [node.ref for node in cycle]
cycles.append('Cycle: %s' % ' -> '.join(paths))
raise DependencyGraphNode.CircularException(
'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
# key should be one of all_dependent_settings, direct_dependent_settings,
# or link_settings.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
if key == 'all_dependent_settings':
dependencies = dependency_nodes[target].DeepDependencies()
elif key == 'direct_dependent_settings':
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
elif key == 'link_settings':
dependencies = \
dependency_nodes[target].DependenciesForLinkSettings(targets)
else:
raise GypError("DoDependentSettings doesn't know how to determine "
'dependencies for ' + key)
for dependency in dependencies:
dependency_dict = targets[dependency]
if not key in dependency_dict:
continue
dependency_build_file = gyp.common.BuildFile(dependency)
MergeDicts(target_dict, dependency_dict[key],
build_file, dependency_build_file)
def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
sort_dependencies):
# Recompute target "dependencies" properties. For each static library
# target, remove "dependencies" entries referring to other static libraries,
# unless the dependency has the "hard_dependency" attribute set. For each
# linkable target, add a "dependencies" entry referring to all of the
# target's computed list of link dependencies (including static libraries
# if no such entry is already present.
for target in flat_list:
target_dict = targets[target]
target_type = target_dict['type']
if target_type == 'static_library':
if not 'dependencies' in target_dict:
continue
target_dict['dependencies_original'] = target_dict.get(
'dependencies', [])[:]
# A static library should not depend on another static library unless
# the dependency relationship is "hard," which should only be done when
# a dependent relies on some side effect other than just the build
# product, like a rule or action output. Further, if a target has a
# non-hard dependency, but that dependency exports a hard dependency,
# the non-hard dependency can safely be removed, but the exported hard
# dependency must be added to the target to keep the same dependency
# ordering.
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
index = 0
while index < len(dependencies):
dependency = dependencies[index]
dependency_dict = targets[dependency]
# Remove every non-hard static library dependency and remove every
# non-static library dependency that isn't a direct dependency.
if (dependency_dict['type'] == 'static_library' and \
not dependency_dict.get('hard_dependency', False)) or \
(dependency_dict['type'] != 'static_library' and \
not dependency in target_dict['dependencies']):
# Take the dependency out of the list, and don't increment index
# because the next dependency to analyze will shift into the index
# formerly occupied by the one being removed.
del dependencies[index]
else:
index = index + 1
# Update the dependencies. If the dependencies list is empty, it's not
# needed, so unhook it.
if len(dependencies) > 0:
target_dict['dependencies'] = dependencies
else:
del target_dict['dependencies']
elif target_type in linkable_types:
# Get a list of dependency targets that should be linked into this
# target. Add them to the dependencies list if they're not already
# present.
link_dependencies = \
dependency_nodes[target].DependenciesToLinkAgainst(targets)
for dependency in link_dependencies:
if dependency == target:
continue
if not 'dependencies' in target_dict:
target_dict['dependencies'] = []
if not dependency in target_dict['dependencies']:
target_dict['dependencies'].append(dependency)
# Sort the dependencies list in the order from dependents to dependencies.
# e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
# Note: flat_list is already sorted in the order from dependencies to
# dependents.
if sort_dependencies and 'dependencies' in target_dict:
target_dict['dependencies'] = [dep for dep in reversed(flat_list)
if dep in target_dict['dependencies']]
# Initialize this here to speed up MakePathRelative.
exception_re = re.compile(r'''["']?[-/$<>^]''')
def MakePathRelative(to_file, fro_file, item):
# If item is a relative path, it's relative to the build file dict that it's
# coming from. Fix it up to make it relative to the build file dict that
# it's going into.
# Exception: any |item| that begins with these special characters is
# returned without modification.
# / Used when a path is already absolute (shortcut optimization;
# such paths would be returned as absolute anyway)
# $ Used for build environment variables
# - Used for some build environment flags (such as -lapr-1 in a
# "libraries" section)
# < Used for our own variable and command expansions (see ExpandVariables)
# > Used for our own variable and command expansions (see ExpandVariables)
# ^ Used for our own variable and command expansions (see ExpandVariables)
#
# "/' Used when a value is quoted. If these are present, then we
# check the second character instead.
#
if to_file == fro_file or exception_re.match(item):
return item
else:
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
# temporary measure. This should really be addressed by keeping all paths
# in POSIX until actual project generation.
ret = os.path.normpath(os.path.join(
gyp.common.RelativePath(os.path.dirname(fro_file),
os.path.dirname(to_file)),
item)).replace('\\', '/')
if item[-1] == '/':
ret += '/'
return ret
def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
# Python documentation recommends objects which do not support hash
# set this value to None. Python library objects follow this rule.
is_hashable = lambda val: val.__hash__
# If x is hashable, returns whether x is in s. Else returns whether x is in l.
def is_in_set_or_list(x, s, l):
if is_hashable(x):
return x in s
return x in l
prepend_index = 0
# Make membership testing of hashables in |to| (in particular, strings)
# faster.
hashable_to_set = set(x for x in to if is_hashable(x))
for item in fro:
singleton = False
if type(item) in (str, int):
# The cheap and easy case.
if is_paths:
to_item = MakePathRelative(to_file, fro_file, item)
else:
to_item = item
if not (type(item) is str and item.startswith('-')):
# Any string that doesn't begin with a "-" is a singleton - it can
# only appear once in a list, to be enforced by the list merge append
# or prepend.
singleton = True
elif type(item) is dict:
# Make a copy of the dictionary, continuing to look for paths to fix.
# The other intelligent aspects of merge processing won't apply because
# item is being merged into an empty dict.
to_item = {}
MergeDicts(to_item, item, to_file, fro_file)
elif type(item) is list:
# Recurse, making a copy of the list. If the list contains any
# descendant dicts, path fixing will occur. Note that here, custom
# values for is_paths and append are dropped; those are only to be
# applied to |to| and |fro|, not sublists of |fro|. append shouldn't
# matter anyway because the new |to_item| list is empty.
to_item = []
MergeLists(to_item, item, to_file, fro_file)
else:
raise TypeError(
'Attempt to merge list item of unsupported type ' + \
item.__class__.__name__)
if append:
# If appending a singleton that's already in the list, don't append.
# This ensures that the earliest occurrence of the item will stay put.
if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
to.append(to_item)
if is_hashable(to_item):
hashable_to_set.add(to_item)
else:
# If prepending a singleton that's already in the list, remove the
# existing instance and proceed with the prepend. This ensures that the
# item appears at the earliest possible position in the list.
while singleton and to_item in to:
to.remove(to_item)
# Don't just insert everything at index 0. That would prepend the new
# items to the list in reverse order, which would be an unwelcome
# surprise.
to.insert(prepend_index, to_item)
if is_hashable(to_item):
hashable_to_set.add(to_item)
prepend_index = prepend_index + 1
def MergeDicts(to, fro, to_file, fro_file):
# I wanted to name the parameter "from" but it's a Python keyword...
for k, v in fro.items():
# It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
# copy semantics. Something else may want to merge from the |fro| dict
# later, and having the same dict ref pointed to twice in the tree isn't
# what anyone wants considering that the dicts may subsequently be
# modified.
if k in to:
bad_merge = False
if type(v) in (str, int):
if type(to[k]) not in (str, int):
bad_merge = True
elif type(v) is not type(to[k]):
bad_merge = True
if bad_merge:
raise TypeError(
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[k].__class__.__name__ + \
' for key ' + k)
if type(v) in (str, int):
# Overwrite the existing value, if any. Cheap and easy.
is_path = IsPathSection(k)
if is_path:
to[k] = MakePathRelative(to_file, fro_file, v)
else:
to[k] = v
elif type(v) is dict:
# Recurse, guaranteeing copies will be made of objects that require it.
if not k in to:
to[k] = {}
MergeDicts(to[k], v, to_file, fro_file)
elif type(v) is list:
# Lists in dicts can be merged with different policies, depending on
# how the key in the "from" dict (k, the from-key) is written.
#
# If the from-key has ...the to-list will have this action
# this character appended:... applied when receiving the from-list:
# = replace
# + prepend
# ? set, only if to-list does not yet exist
# (none) append
#
# This logic is list-specific, but since it relies on the associated
# dict key, it's checked in this dict-oriented function.
ext = k[-1]
append = True
if ext == '=':
list_base = k[:-1]
lists_incompatible = [list_base, list_base + '?']
to[list_base] = []
elif ext == '+':
list_base = k[:-1]
lists_incompatible = [list_base + '=', list_base + '?']
append = False
elif ext == '?':
list_base = k[:-1]
lists_incompatible = [list_base, list_base + '=', list_base + '+']
else:
list_base = k
lists_incompatible = [list_base + '=', list_base + '?']
# Some combinations of merge policies appearing together are meaningless.
# It's stupid to replace and append simultaneously, for example. Append
# and prepend are the only policies that can coexist.
for list_incompatible in lists_incompatible:
if list_incompatible in fro:
raise GypError('Incompatible list policies ' + k + ' and ' +
list_incompatible)
if list_base in to:
if ext == '?':
# If the key ends in "?", the list will only be merged if it doesn't
# already exist.
continue
elif type(to[list_base]) is not list:
# This may not have been checked above if merging in a list with an
# extension character.
raise TypeError(
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[list_base].__class__.__name__ + \
' for key ' + list_base + '(' + k + ')')
else:
to[list_base] = []
# Call MergeLists, which will make copies of objects that require it.
# MergeLists can recurse back into MergeDicts, although this will be
# to make copies of dicts (with paths fixed), there will be no
# subsequent dict "merging" once entering a list because lists are
# always replaced, appended to, or prepended to.
is_paths = IsPathSection(list_base)
MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
else:
raise TypeError(
'Attempt to merge dict value of unsupported type ' + \
v.__class__.__name__ + ' for key ' + k)
def MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, visited):
# Skip if previously visted.
if configuration in visited:
return
# Look at this configuration.
configuration_dict = target_dict['configurations'][configuration]
# Merge in parents.
for parent in configuration_dict.get('inherit_from', []):
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, parent, visited + [configuration])
# Merge it into the new config.
MergeDicts(new_configuration_dict, configuration_dict,
build_file, build_file)
# Drop abstract.
if 'abstract' in new_configuration_dict:
del new_configuration_dict['abstract']
def SetUpConfigurations(target, target_dict):
# key_suffixes is a list of key suffixes that might appear on key names.
# These suffixes are handled in conditional evaluations (for =, +, and ?)
# and rules/exclude processing (for ! and /). Keys with these suffixes
# should be treated the same as keys without.
key_suffixes = ['=', '+', '?', '!', '/']
build_file = gyp.common.BuildFile(target)
# Provide a single configuration by default if none exists.
# TODO(mark): Signal an error if default_configurations exists but
# configurations does not.
if not 'configurations' in target_dict:
target_dict['configurations'] = {'Default': {}}
if not 'default_configuration' in target_dict:
concrete = [i for (i, config) in target_dict['configurations'].items()
if not config.get('abstract')]
target_dict['default_configuration'] = sorted(concrete)[0]
merged_configurations = {}
configs = target_dict['configurations']
for (configuration, old_configuration_dict) in configs.items():
# Skip abstract configurations (saves work only).
if old_configuration_dict.get('abstract'):
continue
# Configurations inherit (most) settings from the enclosing target scope.
# Get the inheritance relationship right by making a copy of the target
# dict.
new_configuration_dict = {}
for (key, target_val) in target_dict.items():
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
if not key_base in non_configuration_keys:
new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
# Merge in configuration (with all its parents first).
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, [])
merged_configurations[configuration] = new_configuration_dict
# Put the new configurations back into the target dict as a configuration.
for configuration in merged_configurations.keys():
target_dict['configurations'][configuration] = (
merged_configurations[configuration])
# Now drop all the abstract ones.
for configuration in target_dict['configurations'].keys():
old_configuration_dict = target_dict['configurations'][configuration]
if old_configuration_dict.get('abstract'):
del target_dict['configurations'][configuration]
# Now that all of the target's configurations have been built, go through
# the target dict's keys and remove everything that's been moved into a
# "configurations" section.
delete_keys = []
for key in target_dict:
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
if not key_base in non_configuration_keys:
delete_keys.append(key)
for key in delete_keys:
del target_dict[key]
# Check the configurations to see if they contain invalid keys.
for configuration in target_dict['configurations'].keys():
configuration_dict = target_dict['configurations'][configuration]
for key in configuration_dict.keys():
if key in invalid_configuration_keys:
raise GypError('%s not allowed in the %s configuration, found in '
'target %s' % (key, configuration, target))
def ProcessListFiltersInDict(name, the_dict):
"""Process regular expression and exclusion-based filters on lists.
An exclusion list is in a dict key named with a trailing "!", like
"sources!". Every item in such a list is removed from the associated
main list, which in this example, would be "sources". Removed items are
placed into a "sources_excluded" list in the dict.
Regular expression (regex) filters are contained in dict keys named with a
trailing "/", such as "sources/" to operate on the "sources" list. Regex
filters in a dict take the form:
'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
['include', '_mac\\.cc$'] ],
The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
_win.cc. The second filter then includes all files ending in _mac.cc that
are now or were once in the "sources" list. Items matching an "exclude"
filter are subject to the same processing as would occur if they were listed
by name in an exclusion list (ending in "!"). Items matching an "include"
filter are brought back into the main list if previously excluded by an
exclusion list or exclusion regex filter. Subsequent matching "exclude"
patterns can still cause items to be excluded after matching an "include".
"""
# Look through the dictionary for any lists whose keys end in "!" or "/".
# These are lists that will be treated as exclude lists and regular
# expression-based exclude/include lists. Collect the lists that are
# needed first, looking for the lists that they operate on, and assemble
# then into |lists|. This is done in a separate loop up front, because
# the _included and _excluded keys need to be added to the_dict, and that
# can't be done while iterating through it.
lists = []
del_lists = []
for key, value in the_dict.items():
operation = key[-1]
if operation != '!' and operation != '/':
continue
if type(value) is not list:
raise ValueError(name + ' key ' + key + ' must be list, not ' + \
value.__class__.__name__)
list_key = key[:-1]
if list_key not in the_dict:
# This happens when there's a list like "sources!" but no corresponding
# "sources" list. Since there's nothing for it to operate on, queue up
# the "sources!" list for deletion now.
del_lists.append(key)
continue
if type(the_dict[list_key]) is not list:
value = the_dict[list_key]
raise ValueError(name + ' key ' + list_key + \
' must be list, not ' + \
value.__class__.__name__ + ' when applying ' + \
{'!': 'exclusion', '/': 'regex'}[operation])
if not list_key in lists:
lists.append(list_key)
# Delete the lists that are known to be unneeded at this point.
for del_list in del_lists:
del the_dict[del_list]
for list_key in lists:
the_list = the_dict[list_key]
# Initialize the list_actions list, which is parallel to the_list. Each
# item in list_actions identifies whether the corresponding item in
# the_list should be excluded, unconditionally preserved (included), or
# whether no exclusion or inclusion has been applied. Items for which
# no exclusion or inclusion has been applied (yet) have value -1, items
# excluded have value 0, and items included have value 1. Includes and
# excludes override previous actions. All items in list_actions are
# initialized to -1 because no excludes or includes have been processed
# yet.
list_actions = list((-1,) * len(the_list))
exclude_key = list_key + '!'
if exclude_key in the_dict:
for exclude_item in the_dict[exclude_key]:
for index in range(0, len(the_list)):
if exclude_item == the_list[index]:
# This item matches the exclude_item, so set its action to 0
# (exclude).
list_actions[index] = 0
# The "whatever!" list is no longer needed, dump it.
del the_dict[exclude_key]
regex_key = list_key + '/'
if regex_key in the_dict:
for regex_item in the_dict[regex_key]:
[action, pattern] = regex_item
pattern_re = re.compile(pattern)
if action == 'exclude':
# This item matches an exclude regex, so set its value to 0 (exclude).
action_value = 0
elif action == 'include':
# This item matches an include regex, so set its value to 1 (include).
action_value = 1
else:
# This is an action that doesn't make any sense.
raise ValueError('Unrecognized action ' + action + ' in ' + name + \
' key ' + regex_key)
for index in range(0, len(the_list)):
list_item = the_list[index]
if list_actions[index] == action_value:
# Even if the regex matches, nothing will change so continue (regex
# searches are expensive).
continue
if pattern_re.search(list_item):
# Regular expression match.
list_actions[index] = action_value
# The "whatever/" list is no longer needed, dump it.
del the_dict[regex_key]
# Add excluded items to the excluded list.
#
# Note that exclude_key ("sources!") is different from excluded_key
# ("sources_excluded"). The exclude_key list is input and it was already
# processed and deleted; the excluded_key list is output and it's about
# to be created.
excluded_key = list_key + '_excluded'
if excluded_key in the_dict:
raise GypError(name + ' key ' + excluded_key +
' must not be present prior '
' to applying exclusion/regex filters for ' + list_key)
excluded_list = []
# Go backwards through the list_actions list so that as items are deleted,
# the indices of items that haven't been seen yet don't shift. That means
# that things need to be prepended to excluded_list to maintain them in the
# same order that they existed in the_list.
for index in range(len(list_actions) - 1, -1, -1):
if list_actions[index] == 0:
# Dump anything with action 0 (exclude). Keep anything with action 1
# (include) or -1 (no include or exclude seen for the item).
excluded_list.insert(0, the_list[index])
del the_list[index]
# If anything was excluded, put the excluded list into the_dict at
# excluded_key.
if len(excluded_list) > 0:
the_dict[excluded_key] = excluded_list
# Now recurse into subdicts and lists that may contain dicts.
for key, value in the_dict.items():
if type(value) is dict:
ProcessListFiltersInDict(key, value)
elif type(value) is list:
ProcessListFiltersInList(key, value)
def ProcessListFiltersInList(name, the_list):
for item in the_list:
if type(item) is dict:
ProcessListFiltersInDict(name, item)
elif type(item) is list:
ProcessListFiltersInList(name, item)
def ValidateTargetType(target, target_dict):
"""Ensures the 'type' field on the target is one of the known types.
Arguments:
target: string, name of target.
target_dict: dict, target spec.
Raises an exception on error.
"""
VALID_TARGET_TYPES = ('executable', 'loadable_module',
'static_library', 'shared_library',
'none')
target_type = target_dict.get('type', None)
if target_type not in VALID_TARGET_TYPES:
raise GypError("Target %s has an invalid target type '%s'. "
"Must be one of %s." %
(target, target_type, '/'.join(VALID_TARGET_TYPES)))
if (target_dict.get('standalone_static_library', 0) and
not target_type == 'static_library'):
raise GypError('Target %s has type %s but standalone_static_library flag is'
' only valid for static_library type.' % (target,
target_type))
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
"""Ensures that the rules sections in target_dict are valid and consistent,
and determines which sources they apply to.
Arguments:
target: string, name of target.
target_dict: dict, target spec containing "rules" and "sources" lists.
extra_sources_for_rules: a list of keys to scan for rule matches in
addition to 'sources'.
"""
# Dicts to map between values found in rules' 'rule_name' and 'extension'
# keys and the rule dicts themselves.
rule_names = {}
rule_extensions = {}
rules = target_dict.get('rules', [])
for rule in rules:
# Make sure that there's no conflict among rule names and extensions.
rule_name = rule['rule_name']
if rule_name in rule_names:
raise GypError('rule %s exists in duplicate, target %s' %
(rule_name, target))
rule_names[rule_name] = rule
rule_extension = rule['extension']
if rule_extension.startswith('.'):
rule_extension = rule_extension[1:]
if rule_extension in rule_extensions:
raise GypError(('extension %s associated with multiple rules, ' +
'target %s rules %s and %s') %
(rule_extension, target,
rule_extensions[rule_extension]['rule_name'],
rule_name))
rule_extensions[rule_extension] = rule
# Make sure rule_sources isn't already there. It's going to be
# created below if needed.
if 'rule_sources' in rule:
raise GypError(
'rule_sources must not exist in input, target %s rule %s' %
(target, rule_name))
rule_sources = []
source_keys = ['sources']
source_keys.extend(extra_sources_for_rules)
for source_key in source_keys:
for source in target_dict.get(source_key, []):
(source_root, source_extension) = os.path.splitext(source)
if source_extension.startswith('.'):
source_extension = source_extension[1:]
if source_extension == rule_extension:
rule_sources.append(source)
if len(rule_sources) > 0:
rule['rule_sources'] = rule_sources
def ValidateRunAsInTarget(target, target_dict, build_file):
target_name = target_dict.get('target_name')
run_as = target_dict.get('run_as')
if not run_as:
return
if type(run_as) is not dict:
raise GypError("The 'run_as' in target %s from file %s should be a "
"dictionary." %
(target_name, build_file))
action = run_as.get('action')
if not action:
raise GypError("The 'run_as' in target %s from file %s must have an "
"'action' section." %
(target_name, build_file))
if type(action) is not list:
raise GypError("The 'action' for 'run_as' in target %s from file %s "
"must be a list." %
(target_name, build_file))
working_directory = run_as.get('working_directory')
if working_directory and type(working_directory) is not str:
raise GypError("The 'working_directory' for 'run_as' in target %s "
"in file %s should be a string." %
(target_name, build_file))
environment = run_as.get('environment')
if environment and type(environment) is not dict:
raise GypError("The 'environment' for 'run_as' in target %s "
"in file %s should be a dictionary." %
(target_name, build_file))
def ValidateActionsInTarget(target, target_dict, build_file):
'''Validates the inputs to the actions in a target.'''
target_name = target_dict.get('target_name')
actions = target_dict.get('actions', [])
for action in actions:
action_name = action.get('action_name')
if not action_name:
raise GypError("Anonymous action in target %s. "
"An action must have an 'action_name' field." %
target_name)
inputs = action.get('inputs', None)
if inputs is None:
raise GypError('Action in target %s has no inputs.' % target_name)
action_command = action.get('action')
if action_command and not action_command[0]:
raise GypError("Empty action as command in target %s." % target_name)
def TurnIntIntoStrInDict(the_dict):
"""Given dict the_dict, recursively converts all integers into strings.
"""
# Use items instead of items because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
if type(v) is int:
v = str(v)
the_dict[k] = v
elif type(v) is dict:
TurnIntIntoStrInDict(v)
elif type(v) is list:
TurnIntIntoStrInList(v)
if type(k) is int:
del the_dict[k]
the_dict[str(k)] = v
def TurnIntIntoStrInList(the_list):
"""Given list the_list, recursively converts all integers into strings.
"""
for index in range(0, len(the_list)):
item = the_list[index]
if type(item) is int:
the_list[index] = str(item)
elif type(item) is dict:
TurnIntIntoStrInDict(item)
elif type(item) is list:
TurnIntIntoStrInList(item)
def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
data):
"""Return only the targets that are deep dependencies of |root_targets|."""
qualified_root_targets = []
for target in root_targets:
target = target.strip()
qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
if not qualified_targets:
raise GypError("Could not find target %s" % target)
qualified_root_targets.extend(qualified_targets)
wanted_targets = {}
for target in qualified_root_targets:
wanted_targets[target] = targets[target]
for dependency in dependency_nodes[target].DeepDependencies():
wanted_targets[dependency] = targets[dependency]
wanted_flat_list = [t for t in flat_list if t in wanted_targets]
# Prune unwanted targets from each build_file's data dict.
for build_file in data['target_build_files']:
if not 'targets' in data[build_file]:
continue
new_targets = []
for target in data[build_file]['targets']:
qualified_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if qualified_name in wanted_targets:
new_targets.append(target)
data[build_file]['targets'] = new_targets
return wanted_targets, wanted_flat_list
def VerifyNoCollidingTargets(targets):
"""Verify that no two targets in the same directory share the same name.
Arguments:
targets: A list of targets in the form 'path/to/file.gyp:target_name'.
"""
# Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
used = {}
for target in targets:
# Separate out 'path/to/file.gyp, 'target_name' from
# 'path/to/file.gyp:target_name'.
path, name = target.rsplit(':', 1)
# Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
subdir, gyp = os.path.split(path)
# Use '.' for the current directory '', so that the error messages make
# more sense.
if not subdir:
subdir = '.'
# Prepare a key like 'path/to:target_name'.
key = subdir + ':' + name
if key in used:
# Complain if this target is already used.
raise GypError('Duplicate target name "%s" in directory "%s" used both '
'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
used[key] = gyp
def SetGeneratorGlobals(generator_input_info):
# Set up path_sections and non_configuration_keys with the default data plus
# the generator-specific data.
global path_sections
path_sections = set(base_path_sections)
path_sections.update(generator_input_info['path_sections'])
global non_configuration_keys
non_configuration_keys = base_non_configuration_keys[:]
non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
global multiple_toolsets
multiple_toolsets = generator_input_info[
'generator_supports_multiple_toolsets']
global generator_filelist_paths
generator_filelist_paths = generator_input_info['generator_filelist_paths']
def Load(build_files, variables, includes, depth, generator_input_info, check,
circular_check, parallel, root_targets):
SetGeneratorGlobals(generator_input_info)
# A generator can have other lists (in addition to sources) be processed
# for rules.
extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
# Load build files. This loads every target-containing build file into
# the |data| dictionary such that the keys to |data| are build file names,
# and the values are the entire build file contents after "early" or "pre"
# processing has been done and includes have been resolved.
# NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
# well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
# track of the keys corresponding to "target" files.
data = {'target_build_files': set()}
# Normalize paths everywhere. This is important because paths will be
# used as keys to the data dict and for references between input files.
build_files = set(map(os.path.normpath, build_files))
if parallel:
LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
check, generator_input_info)
else:
aux_data = {}
for build_file in build_files:
try:
LoadTargetBuildFile(build_file, data, aux_data,
variables, includes, depth, check, True)
except Exception:
e = sys.exc_info()[1]
gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
raise
# Build a dict to access each target's subdict by qualified name.
targets = BuildTargetsDict(data)
# Fully qualify all dependency links.
QualifyDependencies(targets)
# Remove self-dependencies from targets that have 'prune_self_dependencies'
# set to 1.
RemoveSelfDependencies(targets)
# Expand dependencies specified as build_file:*.
ExpandWildcardDependencies(targets, data)
# Remove all dependencies marked as 'link_dependency' from the targets of
# type 'none'.
RemoveLinkDependenciesFromNoneTargets(targets)
# Apply exclude (!) and regex (/) list filters only for dependency_sections.
for target_name, target_dict in targets.items():
tmp_dict = {}
for key_base in dependency_sections:
for op in ('', '!', '/'):
key = key_base + op
if key in target_dict:
tmp_dict[key] = target_dict[key]
del target_dict[key]
ProcessListFiltersInDict(target_name, tmp_dict)
# Write the results back to |target_dict|.
for key in tmp_dict:
target_dict[key] = tmp_dict[key]
# Make sure every dependency appears at most once.
RemoveDuplicateDependencies(targets)
if circular_check:
# Make sure that any targets in a.gyp don't contain dependencies in other
# .gyp files that further depend on a.gyp.
VerifyNoGYPFileCircularDependencies(targets)
[dependency_nodes, flat_list] = BuildDependencyList(targets)
if root_targets:
# Remove, from |targets| and |flat_list|, the targets that are not deep
# dependencies of the targets specified in |root_targets|.
targets, flat_list = PruneUnwantedTargets(
targets, flat_list, dependency_nodes, root_targets, data)
# Check that no two targets in the same directory have the same name.
VerifyNoCollidingTargets(flat_list)
# Handle dependent settings of various types.
for settings_type in ['all_dependent_settings',
'direct_dependent_settings',
'link_settings']:
DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
# Take out the dependent settings now that they've been published to all
# of the targets that require them.
for target in flat_list:
if settings_type in targets[target]:
del targets[target][settings_type]
# Make sure static libraries don't declare dependencies on other static
# libraries, but that linkables depend on all unlinked static libraries
# that they need so that their link steps will be correct.
gii = generator_input_info
if gii['generator_wants_static_library_dependencies_adjusted']:
AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
gii['generator_wants_sorted_dependencies'])
# Apply "post"/"late"/"target" variable expansions and condition evaluations.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(
target_dict, PHASE_LATE, variables, build_file)
# Move everything that can go into a "configurations" section into one.
for target in flat_list:
target_dict = targets[target]
SetUpConfigurations(target, target_dict)
# Apply exclude (!) and regex (/) list filters.
for target in flat_list:
target_dict = targets[target]
ProcessListFiltersInDict(target, target_dict)
# Apply "latelate" variable expansions and condition evaluations.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(
target_dict, PHASE_LATELATE, variables, build_file)
# Make sure that the rules make sense, and build up rule_sources lists as
# needed. Not all generators will need to use the rule_sources lists, but
# some may, and it seems best to build the list in a common spot.
# Also validate actions and run_as elements in targets.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ValidateTargetType(target, target_dict)
ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
ValidateRunAsInTarget(target, target_dict, build_file)
ValidateActionsInTarget(target, target_dict, build_file)
# Generators might not expect ints. Turn them into strs.
TurnIntIntoStrInDict(data)
# TODO(mark): Return |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
return [flat_list, targets, data]
# vim: set ft=python expandtab tabstop=2 shiftwidth=2:
|
pyokagan/gyp
|
pylib/gyp/input.py
|
Python
|
bsd-3-clause
| 114,987
|
[
"VisIt"
] |
10683fcc40c5524ada6a3da4400baf4c735feb5badae30c2c641c399c846e2f7
|
import json
import glob
import time
import re
import os
from collections import Counter
from itertools import chain
import requests
import settings
from nltk.tokenize import word_tokenize
PROFILE_URL = u'https://www.okcupid.com/profile/{username}'
QUESTIONS_URL = u'https://www.okcupid.com/profile/{username}/questions'
LOGIN_URL = 'http://www.okcupid.com/login'
MATCH_URL = 'https://www.okcupid.com/match'
QUICKMATCH_URL = 'https://www.okcupid.com/quickmatch/{username}'
VISITORS_URL = 'https://www.okcupid.com/visitors/{username}'
HEADERS = {
'User-agent' : settings.USER_AGENT,
'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
}
USER_DATA_FILE = 'USER_DATA'
LOCATIONS = {
'melbourne' : 976925,
'sydney' : 974455,
'san_francisco' : 4265540,
}
MATCH_ORDERS = (
'MATCH',
'SPECIAL_BLEND',
'RANDOM',
'ENEMY',
'JOIN',
'LOGIN',
'MATCH_AND_NEW',
'MATCH_AND_LOGIN',
'MATCH_AND_DISTANCE',
)
def get_dataframe(path):
users = okc.load_user_dicts(path)
return pd.DataFrame(okc.get_stats(u) for u in users)
def get_user_paths(path):
"""Returns an iterator with all sub-paths paths corresponding to *.json"""
return glob.glob(os.path.join(path, '*.json'))
def load_user(json_path):
"""Returns a User based on JSON profile file path"""
with open(json_path, encoding='utf-8') as file:
return User(json.loads(file.read()))
def load_users(collection_path):
"""Return an iterator of User objects from a directory of JSON profiles."""
for path in get_user_paths(collection_path):
try:
yield load_user(path)
except (ValueError, OkcIncompleteProfileError) as e:
print(e)
def load_user_dicts(collection_path):
"""Return an iterator of user dicts from a directory of JSON profiles."""
for path in get_user_paths(collection_path):
try:
with open(path, encoding='utf-8') as file:
yield json.loads(file.read())
except (ValueError, OkcIncompleteProfileError) as e:
print(e)
def filter_users(paths, question_min):
"""Takes a list of user paths and filters out ones with fewer than question_min answered."""
filtered_paths = []
# load the additional user data stored alongside profiles
user_data_path = os.path.join(os.path.split(paths[0])[0], USER_DATA_FILE)
with open(user_data_path, encoding='utf-8') as f:
user_data = json.loads(f.read())
for user_path in paths:
username = os.path.splitext(os.path.basename(user_path))[0]
if username not in user_data:
print("user {} missing from user data dictionary".format(username))
else:
data = user_data[username]
if data['num_questions'] >= question_min:
yield user_path
def get_user_data(path):
# TODO: Doesn't actually handle file not existing
user_data_path = os.path.join(path, USER_DATA_FILE)
if not os.path.exists(user_data_path):
return {}
with open(user_data_path) as f:
return json.loads(f.read())
def write_user_data(data, path):
user_data_path = os.path.join(path, USER_DATA_FILE)
with open(user_data_path, 'w', encoding='utf-8') as f:
f.write(json.dumps(data))
class OkcError(Exception):
pass
class OkcNoSuchUserError(OkcError):
def __str__(self):
return 'No such user: {}'.format(self.message)
class OkcIncompleteProfileError(OkcError):
def __str__(self):
return 'Incomplete profile: {}'.format(self.message)
def get_stats(data):
if 'matchpercentage' not in data:
raise OkcIncompleteProfileError(data['username'])
height_str = data['skinny']['height']
if height_str == "":
height = None
height_inches = None
else:
height = int(100*float(re.search(r'\(([^()]+)\)', height_str).group(1)[:-1]))
bits = height_str.split()
#height_inches = int(bits[0].strip("'"))*12 + int(bits[1].strip('"'))
stats = {
'username': data['username'],
'age': int(data['age']),
'gender': int(data['gender']),
'match': int(data['matchpercentage']),
'enemy': int(data['enemypercentage']),
'status': int(data['status']),
'orientation': int(data['orientation']),
'height': height,
}
# could also add a gender_str based field. either m, f, or other
# or could actually pull out each of the multi-tags
#eg:
# gender_str = data['gender_str']
# if gender_str not in ('M', 'F'):
# for label in gender_str.split(','):
# stats[label.strip()] = True
#what to do about "M" and "F""?
# need to record these also so we know who didn't add extra things
# orientation
# 1 == straightish
# 2 == gayish
# 3 == bisexualish
return stats
class User(object):
"""Models an OKC user profile.
Instance attributes:
stats
essay_titles list of essay titles (strings)
essays list of essay contents (strings)
text combined text from all essays (string)
tokens
The nth item in essay_titles is the title of the essay in the nth
position in essays list. A value of None in the essays list
indicates the user did not fillout that essay.
"""
def __init__(self, data):
self.stats = get_stats(data)
self.essays = self.process_essays(data)
self._tokens = None
self._words = None
self._vocabulary = None
def process_essays(self, data):
found_essays = []
for essay in data['essays']:
this_essay = essay['essay']
if this_essay == []:
# User did not fill this essay out
found_essays.append([])
else:
text = this_essay[0]['rawtext']
found_essays.append(text)
return found_essays
def get_tokens(self, tokenize=word_tokenize):
"""Returns an iterator that yields tokens from all essays"""
essay_tokens = (tokenize(essay) for essay in self.essays if essay)
self._tokens = list(chain.from_iterable(essay_tokens))
return self._tokens
def get_words(self):
self._words = [token.lower() for token in self.tokens if token.isalpha()]
return self._words
def get_vocabulary(self):
self._vocabulary = set(self.words)
return self._vocabulary
@property
def lexical_diversity(self):
if len(self.words) == 0:
return 0
return len(self.vocabulary) / len(self.words)
@property
def vocabulary(self):
if self._vocabulary is None:
self.get_vocabulary()
return self._vocabulary
@property
def words(self):
if self._words is None:
self.get_words()
return self._words
@property
def tokens(self):
if self._tokens is None:
self.get_tokens()
return self._tokens
@property
def text(self):
"""Returns the complete text from all essays in a user's profile"""
text = '\n'.join(essay for essay in self.essays if essay)
return text
def __str__(self):
return self.username
class Session(object):
"""Class for interacting with okcupid.com."""
def __init__(self, username=settings.USERNAME, password=settings.PASSWORD):
"""Logs into okcupid.com. Uses parameters for logging in if provided,
otherwise defaults to credentials specified in settings.py.
"""
self.login(username, password)
def login(self, username, password):
"""Logs into okcupid.com."""
params = {
'username': username,
'password': password,
'okc_api' : '1',
}
r = requests.post(LOGIN_URL, params=params, headers=HEADERS)
self.cookies = r.cookies
def search(self, count=1000, matchorder='MATCH', location=None, distance=settings.DISTANCE,
min_age=settings.MIN_AGE, max_age=settings.MAX_AGE, gender='all', orientation='all',
time='year'):
"""Make a search GET request to OKC API. Note: POST does not work.
Returns a dictionary with the following keys:
'username' : user logged in as
'foundany' : 0 or 1
'maxmatch' : highest match percentage found
'lquery', : ??
'amateur_results' : list of user dictionary results
'total_matches' : the number of results
'cache_timekey' : ??
'alist_results' : ??
'last_online' : unknown format
'filters' : dictionary of filters applied
'numanswered' : number of match questions answered
"""
# Note that gender numbers depend on what has been specified
# for orientation. This first set is for selecting
# fine-grained every orientation possible -- ie 4095
genders = {
'male' : 21,
'female' : 42,
'all' : 63,
}
# for orientation = 'everybody'; ie no orientation param
genders_orientation_everybody = {
'male' : 16,
'female' : 32,
'all' : 48,
}
#last online
times = {
'now' : 3600,
'day' : 86400,
'week' : 604800,
'month' : 2678400,
'year' : 31536000,
}
# use most promiscuous values possible for rest...
# This orientation number of 4095 is for selecting every tick
# box possible in orientation. If you just want 'everyone',
# omit this param.
orientation = 4095
status = 0
# location
if location is None:
locid = 0
else:
locid = LOCATIONS[location]
# the filter names (eg 'filter1, 'filter2') are not relevant,
# just indicate the nth filter applied. The value of the
# filter parameters are themselves <key,value> pairs separated
# by commas, indicating the filter type and value to be
# filtered on.
params = {
'okc_api' : 1,
'timekey' : 1,
'discard_prefs' : 1,
'count' : count,
'matchOrderBy' : matchorder,
'locid' : locid,
'filter1' : '0,{}'.format(genders[gender]),
'filter2' : '76,{}'.format(orientation),
'filter3' : '2,{},{}'.format(min_age, max_age),
'filter4' : '3,{}'.format(distance),
'filter5' : '5,{}'.format(times[time]),
'filter6' : '35,{}'.format(status),
}
result = requests.get(MATCH_URL, cookies=self.cookies, params=params)
return result.json()['amateur_results']
def get_profile(self, username):
"""Given a username, return their profile as a JSON string."""
params = {'okc_api' : 1}
url = PROFILE_URL.format(username=username)
result = requests.get(url, cookies=self.cookies, params=params)
if result.status_code != requests.codes.ok:
message = "Error getting profile: {}\n{}".format(username, result.text)
raise OkcNoSuchUserError(message)
return result.json()
def get_num_questions(self, username):
params = {'okc_api' : 1}
url = QUESTIONS_URL.format(username=username)
result = requests.get(url, cookies=self.cookies, params=params)
if result.status_code != requests.codes.ok:
message = "Error getting profile: {}\n{}".format(username, result.text)
raise OkcNoSuchUserError(message)
json_data = result.json()
num_questions = int(json_data['pagination']['raw']['total_num_results'])
return num_questions
def dump_profiles(self, usernames, path, resume=False):
"""Retrieves user profiles and write them all to disk."""
user_data = get_user_data(path)
for count, username in enumerate(usernames):
outpath = os.path.join(path, "{}.json".format(username))
if resume and os.path.exists(outpath):
continue
try:
user = self.get_profile(username)
num_questions = self.get_num_questions(username)
if username not in user_data:
user_data[username] = {}
user_data[username]['num_questions'] = num_questions
with open(outpath, 'w', encoding='utf-8') as file:
json_string = json.dumps(user)
file.write(json_string)
write_user_data(user_data, path)
print("{}: Wrote {}".format(count+1, username))
except OkcNoSuchUserError as error:
print("NO SUCH USER: {}".format(username))
except requests.ConnectionError as error:
print("CONNECTION ERROR: {}".format(username))
time.sleep(settings.SLEEP_TIME)
def visit_profiles(self, usernames):
"""Retrieves the profiles of a sequence of usernames. Nothing is done
with the profiles, but this has the effect of making you appear in
their visitors list if you are not in invisible browsing mode.
"""
for count, username in enumerate(usernames):
try:
self.get_profile(username)
print("{}: Visited {}".format(count+1, username))
except OkcNoSuchUserError as error:
print("NO SUCH USER: {}".format(username))
except requests.ConnectionError as error:
print("CONNECTION ERROR: {}".format(username))
time.sleep(settings.SLEEP_TIME)
def find_and_visit_profiles(self, cutoff=None, threshold=None, **kwargs):
"""Perform a custom search for users and then visit their profiles.
threshold argument: minimum match percentage score to stop at.
cuttoff argument: number of profiles to stop at.
Also accepts all keyword arguments accepted by the search()
method.
"""
profiles = self.search(**kwargs)
if threshold is not None:
usernames = [profile['username'] for profile in profiles if
int(profile['matchpercentage']) > threshold]
if cutoff is not None:
usernames = usernames[:cutoff]
self.visit_profiles(usernames)
def find_users(self, **kwargs):
"""Given search parameters as keyword arguments, returns a set of
usernames. Avoids the 1000 user response limit by repeating
search for randomly sorted results until no new users are
found.
"""
usernames = set()
num_found = True
while num_found:
num_before = len(usernames)
profiles = self.search(matchorder='RANDOM', **kwargs)
usernames.update(profile['username'] for profile in profiles)
num_after = len(usernames)
num_found = num_after - num_before
print("Found {} new users".format(num_found))
time.sleep(settings.SLEEP_TIME)
return usernames
def find_all_users(self, binsize=5, **kwargs):
"""In addition to the 1000 user result limit, OKC seems to also
silently filter out some users from the search results if
there are too many *potential* matches that could be returned.
ie there are just some users that won't be returned no matter
how many times you hit the RANDOM buttom. This can be avoided
by searching over a smaller age range.
This function gets around the limitation by invoking
self.find_users() across a series of ranges of length
specified by binsize.
"""
usernames = set()
pairs = []
curr = settings.MIN_AGE
while curr <= settings.MAX_AGE:
if curr + binsize > settings.MAX_AGE:
this_max = settings.MAX_AGE
else:
this_max = curr + binsize
pairs.append((curr, this_max))
curr += binsize + 1
kwargs.pop('min_age', None)
kwargs.pop('max_age', None)
for min_age, max_age in pairs:
found = self.find_users(min_age=min_age, max_age=max_age, **kwargs)
usernames.update(found)
print("====================================")
print("Found {} users in age bracket {},{}\n".format(len(found), min_age, max_age))
return usernames
|
ned2/okdata
|
okc.py
|
Python
|
mit
| 16,915
|
[
"VisIt"
] |
038678a7ca9dcc7b200a76ce66fcc0829e66b42de9fd17626ee151004ee20b32
|
"""
=======
Plotter
=======
.. moduleauthor:: Adam Ginsburg <adam.g.ginsburg@gmail.com>
"""
from __future__ import print_function
import matplotlib
import matplotlib.pyplot
import matplotlib.figure
import numpy as np
import astropy.units as u
import copy
import inspect
try:
from matplotlib.cbook import BoundMethodProxy
except ImportError:
from matplotlib.cbook import _BoundMethodProxy as BoundMethodProxy
from . import widgets
from ..specwarnings import warn
interactive_help_message = """
Interactive key commands for plotter. An additional help message may appear if
you have initiated the fitter.
'?' - bring up this message
'f' - initiate the /f/itter
'b' - initiate the /b/aseliner
'B' - initiate the /b/aseliner (reset the selection too)
'r' - re-attach matplotlib keys
'R' - redraw the plot cleanly
'i' : individual components / show each fitted component
"""
xlabel_table = {'speed': 'Velocity'}
class Plotter(object):
"""
Class to plot a spectrum
"""
def __init__(self, Spectrum, autorefresh=True, title="",
xlabel="", silent=True, plotscale=1.0, **kwargs):
self.figure = None
self.axis = None
self.Spectrum = Spectrum
self._xunit = Spectrum.xarr.unit
# plot parameters
self.offset = 0.0 # vertical offset
self.autorefresh = autorefresh
self.xlabel = xlabel
self.title = title
self.errorplot = None
self.plotkwargs = kwargs
self._xlim = [None,None]
self._ylim = [None,None]
self.debug = False
self.keyclick = None
self.silent = silent
self.plotscale = plotscale
self._xclick1 = None
self._xclick2 = None
self.automake_fitter_tool = False
def _get_prop(xy, minmax):
def getprop(self):
if self.Spectrum.xarr.unit != self._xunit:
self._xunit = self.Spectrum.xarr.unit
if xy == 'x':
if minmax == 'min':
if self._xlim[0] and self._xunit:
try:
self._xlim[0]._unit = self._xunit
except AttributeError:
self._xlim[0] = u.Quantity(self._xlim[0], self._xunit)
return self._xlim[0]
elif minmax == 'max':
if self._xlim[1] and self._xunit:
try:
self._xlim[1]._unit = self._xunit
except AttributeError:
self._xlim[1] = u.Quantity(self._xlim[1], self._xunit)
return self._xlim[1]
elif xy == 'y':
if minmax == 'min':
return self._ylim[0]
elif minmax == 'max':
return self._ylim[1]
return getprop
def _set_prop(xy, minmax):
def setprop(self, value):
if self.debug:
frm = inspect.stack()
print(frm[1],"Setting %s%s to %s" % (xy,minmax,value))
if xy == 'x':
if minmax == 'min':
self._xlim[0] = value
elif minmax == 'max':
self._xlim[1] = value
elif xy == 'y':
if minmax == 'min':
self._ylim[0] = value
elif minmax == 'max':
self._ylim[1] = value
return setprop
xmin = property(fget=_get_prop('x','min'),fset=_set_prop('x','min'))
xmax = property(fget=_get_prop('x','max'),fset=_set_prop('x','max'))
ymin = property(fget=_get_prop('y','min'),fset=_set_prop('y','min'))
ymax = property(fget=_get_prop('y','max'),fset=_set_prop('y','max'))
def _disconnect_matplotlib_keys(self):
"""
Disconnected the matplotlib key-press callbacks
"""
if self.figure is not None:
cbs = self.figure.canvas.callbacks.callbacks
# this may cause problems since the dict of key press events is a
# dict, i.e. not ordered, and we want to pop the first one...
mpl_keypress_handler = self.figure.canvas.manager.key_press_handler_id
try:
self._mpl_key_callbacks = {mpl_keypress_handler:
cbs['key_press_event'].pop(mpl_keypress_handler)}
except KeyError:
bmp = BoundMethodProxy(self.figure.canvas.manager.key_press)
self._mpl_key_callbacks = {mpl_keypress_handler:
bmp}
def _reconnect_matplotlib_keys(self):
"""
Reconnect the previously disconnected matplotlib keys
"""
if self.figure is not None and hasattr(self,'_mpl_key_callbacks'):
self.figure.canvas.callbacks.callbacks['key_press_event'].update(self._mpl_key_callbacks)
elif self.figure is not None:
mpl_keypress_handler = self.figure.canvas.manager.key_press_handler_id
bmp = BoundMethodProxy(self.figure.canvas.manager.key_press)
self.figure.canvas.callbacks.callbacks['key_press_event'].update({mpl_keypress_handler:
bmp})
def __call__(self, figure=None, axis=None, clear=True, autorefresh=None,
plotscale=1.0, override_plotkwargs=False, **kwargs):
"""
Plot a spectrum
Keywords:
figure - either a matplotlib figure instance or a figure number
to pass into pyplot.figure.
axis - Alternative to figure, can pass an axis instance and use
it as the plotting canvas
clear - Clear the axis before plotting?
"""
# figure out where to put the plot
if isinstance(figure,matplotlib.figure.Figure):
self.figure = figure
self.axis = self.figure.gca()
elif type(figure) is int:
self.figure = matplotlib.pyplot.figure(figure)
self.axis = self.figure.gca()
elif self.figure is None:
if isinstance(axis,matplotlib.axes.Axes):
self.axis = axis
self.figure = axis.figure
else:
self.figure = matplotlib.pyplot.figure()
if not matplotlib.pyplot.fignum_exists(self.figure.number):
self.figure = matplotlib.pyplot.figure(self.figure.number)
# always re-connect the interactive keys to avoid frustration...
self._mpl_reconnect()
if axis is not None:
self._mpl_disconnect()
self.axis = axis
self.figure = axis.figure
self._mpl_connect()
elif len(self.figure.axes) > 0 and self.axis is None:
self.axis = self.figure.axes[0] # default to first axis
elif self.axis is None:
self.axis = self.figure.gca()
# A check to deal with issue #117: if you close the figure, the axis
# still exists, but it cannot be reattached to a figure
if not (self.axis.get_figure() is matplotlib.pyplot.figure(self.axis.get_figure().number)):
self.axis = self.figure.gca()
if self.axis is not None and self.axis not in self.figure.axes:
# if you've cleared the axis, but the figure is still open, you
# need a new axis
self.figure.add_axes(self.axis)
if clear and self.axis is not None:
self.axis.clear()
# Need to empty the stored model plots
if hasattr(self.Spectrum, 'fitter'):
self.Spectrum.fitter.clear()
if autorefresh is not None:
self.autorefresh = autorefresh
self.plotscale = plotscale
if self.plotkwargs and not override_plotkwargs:
self.plotkwargs.update(kwargs)
else:
self.plotkwargs = kwargs
self.plot(**kwargs)
def _mpl_connect(self):
if self.keyclick is None:
self.keyclick = self.figure.canvas.mpl_connect('key_press_event',self.parse_keys)
def _mpl_disconnect(self):
self.figure.canvas.mpl_disconnect(self.keyclick)
self.keyclick = None
def _mpl_reconnect(self):
self._mpl_disconnect()
self._mpl_connect()
# disable fullscreen & grid
matplotlib.pyplot.rcParams['keymap.fullscreen'] = 'ctrl+f'
matplotlib.pyplot.rcParams['keymap.grid'] = 'ctrl+g'
def plot(self, offset=0.0, xoffset=0.0, color='k', linestyle='steps-mid',
linewidth=0.5, errstyle=None, erralpha=0.2, errcolor=None,
silent=None, reset=True, refresh=True, use_window_limits=None,
useOffset=False, **kwargs):
"""
Plot the spectrum!
Tries to automatically find a reasonable plotting range if one is not
set.
Parameters
----------
offset : float
vertical offset to add to the spectrum before plotting. Useful if
you want to overlay multiple spectra on a single plot
xoffset: float
An x-axis shift. I don't know why you'd want this...
color : str
default to plotting spectrum in black
linestyle : 'steps-mid' or str
'steps-mid' for histogram-style plotting. See matplotlib's plot
for more information
linewidth : float
Line width in pixels. Narrow lines are helpful when histo-plotting
errstyle : 'fill', 'bars', or None
can be "fill", which draws partially transparent boxes around the
data to show the error region, or "bars" which draws standard
errorbars. ``None`` will display no errorbars
useOffset : bool
Use offset-style X/Y coordinates (e.g., 1 + 1.483e10)? Defaults to
False because these are usually quite annoying.
xmin/xmax/ymin/ymax : float
override defaults for plot range. Once set, these parameters are
sticky (i.e., replotting will use the same ranges). Passed to
`reset_limits`
reset_[xy]limits : bool
Reset the limits to "sensible defaults". Passed to `reset_limits`
ypeakscale : float
Scale up the Y maximum value. Useful to keep the annotations away
from the data. Passed to `reset_limits`
reset : bool
Reset the x/y axis limits? If set, `reset_limits` will be called.
"""
if self.axis is None:
raise Exception("You must call the Plotter class to initiate the canvas before plotting.")
self.offset = offset
# there is a bug where this only seems to update the second time it is called
self.label(**kwargs)
self.label(**kwargs)
for arg in ['title','xlabel','ylabel']:
if arg in kwargs:
kwargs.pop(arg)
reset_kwargs = {}
for arg in ['xmin', 'xmax', 'ymin', 'ymax', 'reset_xlimits',
'reset_ylimits', 'ypeakscale']:
if arg in kwargs:
reset_kwargs[arg] = kwargs.pop(arg)
if (use_window_limits is None and any(k in reset_kwargs for k in
('xmin','xmax','reset_xlimits'))):
use_window_limits = False
if use_window_limits:
self._stash_window_limits()
# for filled errorbars, order matters.
inds = np.argsort(self.Spectrum.xarr)
if errstyle is not None:
if errcolor is None:
errcolor = color
if errstyle == 'fill':
self.errorplot = [self.axis.fill_between(steppify(self.Spectrum.xarr.value[inds]+xoffset, isX=True),
steppify((self.Spectrum.data*self.plotscale+self.offset-self.Spectrum.error*self.plotscale)[inds]),
steppify((self.Spectrum.data*self.plotscale+self.offset+self.Spectrum.error*self.plotscale)[inds]),
facecolor=errcolor, edgecolor=errcolor, alpha=erralpha, **kwargs)]
elif errstyle == 'bars':
self.errorplot = self.axis.errorbar(self.Spectrum.xarr[inds].value+xoffset,
self.Spectrum.data[inds]*self.plotscale+self.offset,
yerr=self.Spectrum.error[inds]*self.plotscale,
ecolor=errcolor, fmt='none',
**kwargs)
self._spectrumplot = self.axis.plot(self.Spectrum.xarr.value[inds]+xoffset,
self.Spectrum.data[inds]*self.plotscale+self.offset,
color=color,
linestyle=linestyle,
linewidth=linewidth, **kwargs)
self.axis.ticklabel_format(useOffset=useOffset)
if use_window_limits:
self._reset_to_stashed_limits()
if silent is not None:
self.silent = silent
if reset:
self.reset_limits(use_window_limits=use_window_limits, **reset_kwargs)
if self.autorefresh and refresh:
self.refresh()
def _stash_window_limits(self):
self._window_limits = self.axis.get_xlim(),self.axis.get_ylim()
if self.debug:
print("Stashed window limits: ",self._window_limits)
def _reset_to_stashed_limits(self):
self.axis.set_xlim(*self._window_limits[0])
self.axis.set_ylim(*self._window_limits[1])
self.xmin,self.xmax = self._window_limits[0]
self.ymin,self.ymax = self._window_limits[1]
if self.debug:
print("Recovered window limits: ",self._window_limits)
def reset_limits(self, xmin=None, xmax=None, ymin=None, ymax=None,
reset_xlimits=True, reset_ylimits=True, ypeakscale=1.2,
silent=None, use_window_limits=False, **kwargs):
"""
Automatically or manually reset the plot limits
"""
# if not use_window_limits: use_window_limits = False
if self.debug:
frame = inspect.currentframe()
args, _, _, values = inspect.getargvalues(frame)
print(zip(args,values))
if use_window_limits:
# this means DO NOT reset!
# it simply sets self.[xy][min/max] = current value
self.set_limits_from_visible_window()
else:
if silent is not None:
self.silent = silent
# if self.xmin and self.xmax:
if (reset_xlimits or self.Spectrum.xarr.min().value < self.xmin or self.Spectrum.xarr.max().value > self.xmax):
if not self.silent:
warn("Resetting X-axis min/max because the plot is out of bounds.")
self.xmin = None
self.xmax = None
if xmin is not None:
self.xmin = u.Quantity(xmin, self._xunit)
elif self.xmin is None:
self.xmin = u.Quantity(self.Spectrum.xarr.min().value, self._xunit)
if xmax is not None:
self.xmax = u.Quantity(xmax, self._xunit)
elif self.xmax is None:
self.xmax = u.Quantity(self.Spectrum.xarr.max().value, self._xunit)
xpixmin = np.argmin(np.abs(self.Spectrum.xarr.value-self.xmin.value))
xpixmax = np.argmin(np.abs(self.Spectrum.xarr.value-self.xmax.value))
if xpixmin>xpixmax:
xpixmin,xpixmax = xpixmax,xpixmin
elif xpixmin == xpixmax:
if reset_xlimits:
raise Exception("Infinite recursion error. Maybe there are no valid data?")
if not self.silent:
warn("ERROR: the X axis limits specified were invalid. Resetting.")
self.reset_limits(reset_xlimits=True, ymin=ymin, ymax=ymax,
reset_ylimits=reset_ylimits,
ypeakscale=ypeakscale, **kwargs)
return
if self.ymin and self.ymax:
# this is utter nonsense....
if (self.Spectrum.data.max() < self.ymin or self.Spectrum.data.min() > self.ymax
or reset_ylimits):
if not self.silent and not reset_ylimits:
warn("Resetting Y-axis min/max because the plot is out of bounds.")
self.ymin = None
self.ymax = None
if ymin is not None:
self.ymin = ymin
elif self.ymin is None:
if hasattr(self.Spectrum.data, 'mask'):
yminval = self.Spectrum.data[xpixmin:xpixmax].min()
else:
yminval = np.nanmin(self.Spectrum.data[xpixmin:xpixmax])
# Increase the range fractionally. This means dividing a positive #, multiplying a negative #
if yminval < 0:
self.ymin = float(yminval)*float(ypeakscale)
else:
self.ymin = float(yminval)/float(ypeakscale)
if ymax is not None:
self.ymax = ymax
elif self.ymax is None:
if hasattr(self.Spectrum.data, 'mask'):
ymaxval = ((self.Spectrum.data[xpixmin:xpixmax]).max()-self.ymin)
else:
ymaxval = (np.nanmax(self.Spectrum.data[xpixmin:xpixmax])-self.ymin)
if ymaxval > 0:
self.ymax = float(ymaxval) * float(ypeakscale) + self.ymin
else:
self.ymax = float(ymaxval) / float(ypeakscale) + self.ymin
self.ymin += self.offset
self.ymax += self.offset
self.axis.set_xlim(self.xmin.value if hasattr(self.xmin, 'value') else self.xmin,
self.xmax.value if hasattr(self.xmax, 'value') else self.xmax)
self.axis.set_ylim(self.ymin, self.ymax)
def label(self, title=None, xlabel=None, ylabel=None, verbose_label=False,
**kwargs):
"""
Label the plot, with an attempt to parse standard units into nice latex labels
Parameters
----------
title : str
xlabel : str
ylabel : str
verbose_label: bool
"""
if title is not None:
self.title = title
elif hasattr(self.Spectrum,'specname'):
self.title = self.Spectrum.specname
if self.title is not "":
self.axis.set_title(self.title)
if xlabel is not None:
self.xlabel = xlabel
elif self._xunit:
# WAS: self.xlabel = self.Spectrum.xarr.xtype.title()
try:
self.xlabel = xlabel_table[self._xunit.physical_type.lower()]
except KeyError:
self.xlabel = self._xunit.physical_type.title()
# WAS: self.xlabel += " ("+u.Unit(self._xunit).to_string()+")"
self.xlabel += " ({0})".format(self._xunit.to_string())
if verbose_label:
self.xlabel = "%s %s" % (self.Spectrum.xarr.velocity_convention.title(),
self.xlabel)
if self.xlabel is not None:
self.axis.set_xlabel(self.xlabel)
if ylabel is not None:
self.axis.set_ylabel(ylabel)
elif self.Spectrum.unit in ['Ta*','Tastar']:
self.axis.set_ylabel("$T_A^*$ (K)")
elif self.Spectrum.unit in ['K']:
self.axis.set_ylabel("Brightness Temperature $T$ (K)")
elif self.Spectrum.unit == 'mJy':
self.axis.set_ylabel("$S_\\nu$ (mJy)")
elif self.Spectrum.unit == 'Jy':
self.axis.set_ylabel("$S_\\nu$ (Jy)")
else:
if isinstance(self.Spectrum.unit, str) and "$" in self.Spectrum.unit:
# assume LaTeX already
self.axis.set_ylabel(self.Spectrum.unit)
elif isinstance(self.Spectrum.unit, str):
self.axis.set_ylabel(self.Spectrum.unit)
else:
label_units = self.Spectrum.unit.to_string(format='latex')
if 'mathring{A}' in label_units:
label_units = label_units.replace('\mathring{A}', 'A')
if '\overset' in label_units:
label_units = label_units.replace('\overset', '^')
self.axis.set_ylabel(label_units)
@property
def ylabel(self):
return self.axis.get_ylabel()
def refresh(self):
if self.axis is not None:
self.axis.figure.canvas.draw()
def savefig(self,fname,bbox_inches='tight',**kwargs):
"""
simple wrapper of maplotlib's savefig.
"""
self.axis.figure.savefig(fname,bbox_inches=bbox_inches,**kwargs)
def parse_keys(self,event):
"""
Parse key commands entered from the keyboard
"""
if hasattr(event,'key'):
if event.key == '?':
print(interactive_help_message)
elif event.key == 'f':
print("\n\nFitter initiated from the interactive plotter.")
# extra optional text:
# Matplotlib shortcut keys ('g','l','p',etc.) are disabled. Re-enable with 'r'"
self._disconnect_matplotlib_keys()
self.Spectrum.specfit(interactive=True)
if not hasattr(self,'FitterTool') and self.automake_fitter_tool:
self.FitterTool = widgets.FitterTools(self.Spectrum.specfit, self.figure)
elif hasattr(self,'FitterTool') and self.FitterTool.toolfig.number not in matplotlib.pyplot.get_fignums():
self.FitterTool = widgets.FitterTools(self.Spectrum.specfit, self.figure)
elif event.key is not None and event.key.lower() == 'b':
if event.key == 'b':
print("\n\nBaseline initiated from the interactive plotter")
elif event.key == 'B':
print("\n\nBaseline initiated from the interactive plotter (with reset)")
print("Matplotlib shortcut keys ('g','l','p',etc.) are disabled. Re-enable with 'r'")
self._disconnect_matplotlib_keys()
self.Spectrum.baseline(interactive=True, reset_selection=(event.key=='B'))
if not hasattr(self,'FitterTool') and self.automake_fitter_tool:
self.FitterTool = widgets.FitterTools(self.Spectrum.specfit, self.figure)
elif hasattr(self,'FitterTool') and self.FitterTool.toolfig.number not in matplotlib.pyplot.get_fignums():
self.FitterTool = widgets.FitterTools(self.Spectrum.specfit, self.figure)
elif event.key == 'r':
# print("\n\nReconnected matplotlib shortcut keys.")
self._reconnect_matplotlib_keys()
elif event.key == 'R':
self()
elif event.key == 'i':
self.Spectrum.specfit.plot_fit(show_components=True)
def get_two_clicks(self,event):
if self._xclick1 is None:
self._xclick1 = event.xdata
elif self._xclick2 is None:
self._xclick2 = event.xdata
def set_limits_from_visible_window(self, debug=False):
""" Hopefully self-descriptive: set the x and y limits from the
currently visible window (use this if you use the pan/zoom tools or
manually change the limits) """
if debug:
print("Changing x limits from %f,%f to %f,%f" % (self.xmin,self.xmax,self.axis.get_xlim()[0],self.axis.get_xlim()[1]))
print("Changing y limits from %f,%f to %f,%f" % (self.ymin,self.ymax,self.axis.get_ylim()[0],self.axis.get_ylim()[1]))
self.xmin, self.xmax = self.axis.get_xlim()
self.ymin, self.ymax = self.axis.get_ylim()
if debug:
print("New x limits %f,%f == %f,%f" % (self.xmin,self.xmax,self.axis.get_xlim()[0],self.axis.get_xlim()[1]))
print("New y limits %f,%f == %f,%f" % (self.ymin,self.ymax,self.axis.get_ylim()[0],self.axis.get_ylim()[1]))
def copy(self, parent=None):
"""
Create a copy of the plotter with blank (uninitialized) axis & figure
[ parent ]
A spectroscopic axis instance that is the parent of the specfit
instance. This needs to be specified at some point, but defaults
to None to prevent overwriting a previous plot.
"""
newplotter = copy.copy(self)
newplotter.Spectrum = parent
newplotter.axis = None
newplotter.figure = None
return newplotter
def line_ids(self, line_names, line_xvals, xval_units=None, auto_yloc=True,
velocity_offset=None, velocity_convention='radio',
auto_yloc_fraction=0.9, **kwargs):
"""
Add line ID labels to a plot using lineid_plot
http://oneau.wordpress.com/2011/10/01/line-id-plot/
https://github.com/phn/lineid_plot
http://packages.python.org/lineid_plot/
Parameters
----------
line_names : list
A list of strings to label the specified x-axis values
line_xvals : list
List of x-axis values (e.g., wavelengths) at which to label the lines.
Can be a list of quantities.
xval_units : string
The unit of the line_xvals if they are not given as quantities
velocity_offset : quantity
A velocity offset to apply to the inputs if they are in frequency
or wavelength units
velocity_convention : 'radio' or 'optical' or 'doppler'
Used if the velocity offset is given
auto_yloc : bool
If set, overrides box_loc and arrow_tip (the vertical position of
the lineid labels) in kwargs to be `auto_yloc_fraction` of the plot
range
auto_yloc_fraction: float in range [0,1]
The fraction of the plot (vertically) at which to place labels
Examples
--------
>>> import numpy as np
>>> import pyspeckit
>>> sp = pyspeckit.Spectrum(
xarr=pyspeckit.units.SpectroscopicAxis(np.linspace(-50,50,101),
unit='km/s', refX=6562.8, refX_unit='angstrom'),
data=np.random.randn(101), error=np.ones(101))
>>> sp.plotter()
>>> sp.plotter.line_ids(['H$\\alpha$'],[6562.8],xval_units='angstrom')
"""
import lineid_plot
if velocity_offset is not None:
assert velocity_offset.unit.is_equivalent(u.km/u.s)
doppler = getattr(u, 'doppler_{0}'.format(velocity_convention))
equivalency = doppler(self.Spectrum.xarr.refX)
xvals = []
for xv in line_xvals:
if hasattr(xv, 'unit'):
pass
else:
xv = u.Quantity(xv, xval_units)
xv = xv.to(u.km/u.s,
equivalencies=equivalency)
if velocity_offset is not None:
xv = xv + velocity_offset
xv = xv.to(self.Spectrum.xarr.unit, equivalencies=equivalency)
xvals.append(xv.value)
if auto_yloc:
yr = self.axis.get_ylim()
kwargs['box_loc'] = (yr[1]-yr[0])*auto_yloc_fraction + yr[0]
kwargs['arrow_tip'] = (yr[1]-yr[0])*(auto_yloc_fraction*0.9) + yr[0]
lineid_plot.plot_line_ids(self.Spectrum.xarr,
self.Spectrum.data,
xvals,
line_names,
ax=self.axis,
**kwargs)
def line_ids_from_measurements(self, auto_yloc=True,
auto_yloc_fraction=0.9, **kwargs):
"""
Add line ID labels to a plot using lineid_plot
http://oneau.wordpress.com/2011/10/01/line-id-plot/
https://github.com/phn/lineid_plot
http://packages.python.org/lineid_plot/
Parameters
----------
auto_yloc : bool
If set, overrides box_loc and arrow_tip (the vertical position of
the lineid labels) in kwargs to be `auto_yloc_fraction` of the plot
range
auto_yloc_fraction: float in range [0,1]
The fraction of the plot (vertically) at which to place labels
Examples
--------
>>> import numpy as np
>>> import pyspeckit
>>> sp = pyspeckit.Spectrum(
xarr=pyspeckit.units.SpectroscopicAxis(np.linspace(-50,50,101),
units='km/s', refX=6562.8, refX_unit='angstroms'),
data=np.random.randn(101), error=np.ones(101))
>>> sp.plotter()
>>> sp.specfit(multifit=None, fittype='gaussian', guesses=[1,0,1]) # fitting noise....
>>> sp.measure()
>>> sp.plotter.line_ids_from_measurements()
"""
import lineid_plot
if hasattr(self.Spectrum,'measurements'):
measurements = self.Spectrum.measurements
if auto_yloc:
yr = self.axis.get_ylim()
kwargs['box_loc'] = (yr[1]-yr[0])*auto_yloc_fraction + yr[0]
kwargs['arrow_tip'] = (yr[1]-yr[0])*(auto_yloc_fraction*0.9) + yr[0]
lineid_plot.plot_line_ids(self.Spectrum.xarr, self.Spectrum.data,
[v['pos'] for v in
measurements.lines.values()],
measurements.lines.keys(), ax=self.axis,
**kwargs)
else:
warn("Cannot add line IDs from measurements unless measurements have been made!")
def parse_units(labelstring):
import re
labelstring = re.sub("um","$\mu$m",labelstring)
labelstring = re.sub("-1","$^{-1}$",labelstring)
labelstring = re.sub("-2","$^{-2}$",labelstring)
labelstring = re.sub("-3","$^{-3}$",labelstring)
labelstring = re.sub("ergss","ergs s",labelstring)
return labelstring
def parse_norm(norm):
"""
Expected format: norm = 10E15
"""
try:
base, exp = norm.split('E')
except ValueError:
base, exp = norm.split('e')
if float(base) == 1.0:
norm = '10'
else:
norm = base
norm += '^{%s}' % exp
return norm
def steppify(arr,isX=False):
"""
*support function*
Converts an array to double-length for step plotting
"""
if isX:
interval = abs(arr[1:]-arr[:-1]) / 2.0
newarr = np.array(list(zip(arr[:-1]-interval,arr[:-1]+interval))).ravel()
newarr = np.concatenate([newarr,2*[newarr[-1]+interval[-1]]])
else:
newarr = np.array(list(zip(arr,arr))).ravel()
return newarr
|
mikelum/pyspeckit
|
pyspeckit/spectrum/plotters.py
|
Python
|
mit
| 31,130
|
[
"Gaussian"
] |
9cc62897613a994bdd8d2f4170d1b87e32bd5b29b76e3db73821152e187871ed
|
# Copyright 2000 by Andrew Dalke. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
# The Prosite patterns are defined at http://www.expasy.ch/txt/prosuser.txt
#
# The PA (PAttern) lines contains the definition of a PROSITE pattern. The
# patterns are described using the following conventions:
#
# - The standard IUPAC one-letter codes for the amino acids are used.
# - The symbol `x' is used for a position where any amino acid is accepted.
# - Ambiguities are indicated by listing the acceptable amino acids for a
# given position, between square parentheses `[ ]'. For example: [ALT]
# stands for Ala or Leu or Thr.
# - Ambiguities are also indicated by listing between a pair of curly
# brackets `{ }' the amino acids that are not accepted at a given
# position. For example: {AM} stands for any amino acid except Ala and
# Met.
# - Each element in a pattern is separated from its neighbor by a `-'.
# - Repetition of an element of the pattern can be indicated by following
# that element with a numerical value or a numerical range between
# parenthesis. Examples: x(3) corresponds to x-x-x, x(2,4) corresponds to
# x-x or x-x-x or x-x-x-x.
# - When a pattern is restricted to either the N- or C-terminal of a
# sequence, that pattern either starts with a `<' symbol or respectively
# ends with a `>' symbol.
# - A period ends the pattern.
#
# That boils down to doing these conversions
#
# [] -> []
# {} -> [^ ]
# - ->
# () -> {}
# < -> ^
# > -> $
# x->X
# . ->
# Note:
# [G>] is a valid Prosite pattern, equivalent to "([G]|$)"
# I assume then that
# [>G] is equivalent to "(^|[G])"
# It is conceivable that [G>]-G-G is valid, meaning a "G" at the end
# of the sequence or followed by two more Gs. I did not implement
# this. I haven't gotten an answer to my query on either of these two
# non-documented possibilities.
import string, re
from Bio import Seq, Alphabet
# Syntactic conversion to two types of regular expressions
_prosite_trans = string.maketrans("abcdefghijklmnopqrstuvwxyzX}()<>",
"ABCDEFGHIJKLMNOPQRSTUVW.YZ.]{}^$")
# This does not verify that the pattern is correct - invalid patterns
# can be converted!
def prosite_to_re(pattern):
"""convert a valid Prosite pattern into an re string"""
flg = (pattern[:2] == "[<")
s = pattern.replace("{", "[^")
s = s.translate(_prosite_trans, "-.")
# special case "[<" and ">]", if they exist
if flg:
i = s.index("]")
s = "(?:^|[" + s[2:i] + "])" + s[i+1:]
if s[-2:] == "$]":
i = s.rindex("[")
s = s[:i] + "(?:" + s[i:-2] + "]|$)"
elif s[-3:] == "$]$":
i = s.rindex("[")
s = s[:i] + "(?:" + s[i:-3] + "]|$)$"
return s
# This does not verify the pattern is correct - invalid patterns can
# be converted!
def prosite_to_grouped_re(pattern):
"""convert a valid Prosite pattern into an re with groups for each term"""
flg = (pattern[:2] == "[<")
s = pattern.replace("{", "[^")
# Don't delete the "-" characters: use them to place the ()s
s = s.translate(_prosite_trans, ".")
# Get the [< and >] terms correct
if flg:
i = s.index("]")
s = "(?:^|[" + s[2:i] + "])" + s[i+1:]
if s[-2:] == "$]":
i = s.rindex("[")
s = s[:i] + "(?:" + s[i:-2] + "]|$)"
if s[-3:] == "$]$":
i = s.rindex("[")
s = s[:i] + "(?:" + s[i:-3] + "]|$)$"
# Watch out for unescaped < and > terms
if s[:1] == "^":
s = "^(" + s[1:]
else:
s = "(" + s
if s[-1:] == "$":
s = s[:-1] + ")$"
else:
s = s + ")"
return s.replace("-", ")(")
# Both the Prosite pattern and match result act like sequences.
class PrositeAlphabet(Alphabet.Alphabet):
pass
prosite_alphabet = PrositeAlphabet()
def compile(pattern):
if not verify_pattern(pattern):
raise TypeError("not a legal prosite pattern")
return Prosite(pattern = pattern)
class Prosite:
alphabet = prosite_alphabet
# Don't like having two different types of input - not very pythonic
# However, it is faster since I can assume the input has already been
# verified (if it's a pattern).
def __init__(self, pattern = None, data = None):
assert (pattern is None and data is not None) ^ \
(pattern is not None and data is None), \
"one and only one of pattern and data can have a value"
if pattern is not None:
self.pattern = pattern
if data is not None:
self.data = data
def __repr__(self):
return "Prosite(%s)" % repr(str(self))
def __str__(self):
return '-'.join(map(str, self.data)) + "."
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __getslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
return Prosite(data = self.data[i:j])
def __getattr__(self, name):
# Lazy creation of these elements / cache results
if name == "re":
self.re = re.compile(prosite_to_re(self.pattern))
return self.re
elif name == "grouped_re":
self.grouped_re = re.compile(prosite_to_grouped_re(self.pattern))
return self.grouped_re
elif name == "data":
self.data = find_terms(self.pattern)
return self.data
elif name == "pattern":
self.pattern = str(self)
return self.pattern
raise AttributeError(name)
def tostring(self):
return str(self)
def search(self, seq, pos=0, endpos=None):
if endpos is not None:
m = self.grouped_re.search(buffer(seq.tostring()), pos, endpos)
else:
m = self.grouped_re.search(buffer(seq.tostring()), pos)
if m is None:
return None
return PrositeMatch(self, seq, m)
def match(self, seq, pos=0, endpos=None):
if endpos is not None:
m = self.grouped_re.match(buffer(seq.tostring()), pos, endpos)
else:
m = self.grouped_re.match(buffer(seq.tostring()), pos)
if m is None:
return None
return PrositeMatch(self, seq, m)
# I was thinking about adding sub, subn, findall, etc., but either
# you just want the string (in which case, use the ".re") or
# you could be changing to a different alphabet (eg, T->U).
# Elements of a Prosite pattern
class PrositeTerm:
def __init__(self, letters, ignore, is_begin, is_end, \
min_count, max_count, can_begin, can_end):
self.letters = letters
self.ignore = ignore
self.is_begin = is_begin
self.is_end = is_end
self.min_count = min_count
self.max_count = max_count
self.can_begin = can_begin
self.can_end = can_end
def copy(self):
return PrositeTerm(self.letters, self.ignore, self.is_begin,
self.is_end, self.min_count, self.max_count,
self.can_begin, self.can_end)
def __str__(self):
# Convert the term back into Prosite form
s = self.base_str()
if self.min_count == self.max_count:
if self.min_count == 1:
pass
else:
s = s + "(%d)" % self.min_count
else:
s = s + "(%d,%d)" % (self.min_count, self.max_count)
if self.is_end:
s = s + ">"
return s
def base_str(self):
# Convert the term back into Prosite form, without the repeat
# count fields.
if self.is_begin:
s = "<"
else:
s = ""
if self.ignore:
s = s + "{" + self.letters + "}"
elif len(self.letters) == 1 and \
(not self.can_begin and not self.can_end):
s = s + self.letters
else:
s = s + "["
if self.can_begin:
s = s + "<"
s = s + self.letters
if self.can_end:
s = s + ">"
s = s + "]"
return s
# Results of a Prosite match. Wrapper to the re.MatchObj, but returns
# Seq objects instead of strings. And lookee - it implements the Seq
# interface too!
class PrositeMatch:
def __init__(self, prosite, seq, match):
self.prosite = prosite
self.seq = seq
self.match = match
self.pos = match.pos
self.endpos = match.pos
# for Seq.Seq initialization
self.data = match.group(0)
self.alphabet = seq.alphabet
def __repr__(self):
# XXX this isn't the right way
return "<PrositeMatch instance at %x>" % id(self)
def __str__(self):
return str(self.data)
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __getslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
return Seq.Seq(self.data[i:j], self.alphabet)
def mapping(self):
"""return a list of numbers mapping to items of the original pattern
For example, if the Prosite pattern is "[AP](2)-D." matched against
"PAD", then the mapping is [1, 1, 2], meaning the first character
of the match ("P") is from the first Prosite group ("[AP]"), as
is the second letter ("A"). The 3rd letter ("D") is mapped to
group 2 of the pattern.
"""
vals = []
i = 0
start = self.start(0)
try:
while 1:
end = self.match.end(i+1)
while start < end:
vals.append(i)
start = start + 1
i = i + 1
except IndexError:
pass
return vals
def mapped_pattern(self):
"""returns the specific Prosite pattern used to find this sequence
>>> p = Prosite.compile("[AP](2,3)-D.")
>>> m = p.search(Seq.Seq("PAD"))
>>> mapping = m.mapping()
>>> mapped = m.mapped_pattern()
>>> print str(m[1]), str(p[mapping[1]]), str(mapped[1])
P [AP](2,3) [AP]
>>> print str(mapped)
[AP]-[AP]-D.
>>>
Note that the original term includes the count, while the
mapped pattern does the expansion.
"""
return pattern_mapping(self.prosite, self.mapping())
def start(self, g=0):
return self.match.start(g)
def end(self, g=0):
return self.match.end(g)
def span(self, g):
return self.match.span(g)
def groups(self, default=None):
result = []
alphabet = self.alphabet
for g in self.match.groups(default):
result.append( Seq.Seq(g, alphabet) )
return tuple(result)
def group(self, *groups):
result = self.match.group(*groups)
if result == ():
return result
if len(result) == 1:
return Seq.Seq(result, self.alphabet)
retval = []
for x in result:
retval.append(Seq.Seq(x, self.alphabet))
return tuple(retval)
def pattern_mapping(prosite, mapping):
data = []
for i in mapping:
x = prosite[i].copy()
x.min_count = x.max_count = 1
data.append(x)
return Prosite(data=data)
prosite_term_re = re.compile(r"""
(?:
([ABCDEFGHIKLMNPQRSTVWXYZx])| # a character OR
\[(<?)([ABCDEFGHIKLMNPQRSTVWXYZ]+)(>?)\]| # something in []s OR
\{([ABCDEFGHIKLMNPQRSTVWXYZ]+)\} # something in {}s
)(?:\((\d+)(,\d+)?\))? # optional count of the form "(i,j)", ",j" optional
$
""", re.VERBOSE)
# This does not verify the pattern is correct - invalid patterns can
# be converted!
def find_terms(pattern):
if pattern[-1:] != ".":
raise TypeError("not a prosite pattern - needs a final '.'")
pattern = pattern[:-1]
terms = pattern.split("-")
result = []
i = 0
for term in terms:
can_begin = can_end = 0
# Starts with a "<"?
if term[:1] == "<":
term = term[1:]
is_begin = 1
else:
is_begin = 0
# Ends with a ">"?
if term[-1:] == ">":
term = term[:-1]
is_end = 1
else:
is_end = 0
match = prosite_term_re.match(term)
if match is None:
raise TypeError("not a Prosite term (%s)" % repr(term))
if match.group(1) is not None:
# Single letter
ignore = 0
letters = match.group(1)
elif match.group(3) is not None:
# Letters inside of "[]"s
ignore = 0
letters = match.group(3)
if match.group(2):
can_begin = 1
if i != 0:
raise TypeError("[<] only allowed for first term (%s)" \
% repr(term))
if match.group(4):
can_end = 1
if i != len(terms) - 1:
raise TypeError("[>] only allowed for last term (%s)" \
% repr(term))
elif match.group(5) is not None:
# Letters inside of "{}"s
ignore = 1
letters = match.group(5)
else:
raise TypeError("not a prosite term (%s)" % repr(term))
if match.group(6) is not None:
# there is a minimum number
min_count = int(match.group(6))
else:
# no min, so it's 1
min_count = 1
if match.group(7) is not None:
# there is a maximum number
max_count = int(match.group(7)[1:])
else:
# no max specified, so use the same as the min
max_count = min_count
result.append(PrositeTerm(letters, ignore, is_begin,
is_end, min_count, max_count,
can_begin, can_end))
i = i + 1
return result
prosite_re = re.compile(r"""
^<? # starts with an optional "<"
(
[ABCDEFGHIKLMNPQRSTVWXYZx]| # a character OR
(\[<?[ABCDEFGHIKLMNPQRSTVWXYZ]+>?\])| # something in []s OR
\{[ABCDEFGHIKLMNPQRSTVWXYZ]+\} # something in {}s
)(\(\d+(,\d+)?\))? # optional count of the form "(i,j)" (",j" is optional)
(- # new terms seperated by a '-'
(
[ABCDEFGHIKLMNPQRSTVWXYZx]| # a character OR
\[[ABCDEFGHIKLMNPQRSTVWXYZ]+>?\]| # something in []s OR
\{[ABCDEFGHIKLMNPQRSTVWXYZ]+\} # something in {}s
)(\(\d+(,\d+)?\))? # optional count
)* # repeat until done
>? # pattern ends with an optional ">"
\.$ # description ends with a required "."
""", re.VERBOSE)
# This verifies the pattern is correct.
def verify_pattern(pattern):
"""returns 1 if the Prosite pattern is syntactically correct, else 0"""
x = prosite_re.match(pattern)
if x is None:
return 0
# check there's only one [< at the beginning, or >] at the end
if pattern.find("[<", 1) != -1:
return 0
if pattern.find(">]", 0, len(pattern)-2) != -1:
return 0
return 1
def _verify_test(infile):
"""verify the patterns from a Prosite file handle"""
pattern = ""
while 1:
line = infile.readline()
if not line:
break
if line[:2] != "PA":
continue
pattern = pattern + line[5:-1]
if line[-2] == ".":
try:
print "*" * 60
print pattern
p = compile(pattern)
print prosite_to_re(pattern)
print repr(p.re)
print prosite_to_grouped_re(pattern)
print repr(p.grouped_re)
terms = str(p)
if terms != pattern:
print "DIFFER", terms, pattern
except TypeError, msg:
print "PROBLEM", pattern, msg
pattern = ""
# Commented out by jchang 4/13/00.
# Specific to Andrew's test environment.
#if __name__ == "__main__":
# import os
# infile = os.popen("bzcat /home/dalke/ftps/prosite/prosite.dat.bz2 | grep ^PA")
# _verify_test(infile)
|
BlogomaticProject/Blogomatic
|
opt/blog-o-matic/usr/lib/python/Bio/Prosite/Pattern.py
|
Python
|
gpl-2.0
| 16,587
|
[
"Biopython"
] |
61bc6d0fe983802639276b2f58443f70563a26a73b7b9eecbf2c3d6428db449a
|
"""
Uniweb validator project
w3c.py : Checks URL against W3C validators in various ways
Copyright (c) 2009 Brian Shumate
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import datetime
import httplib2
import urllib2
from datetime import timedelta
from BeautifulSoup import BeautifulSoup
from surfbot.validator.models import Website
def checkhtml(site_pk):
"""
Checks URL against W3C HTML validator website
Eventually, this will be pointed directly at a local instance as
part of the whole appliance idea.
"""
today = datetime.date.today()
w3chtml = "http://validator.w3.org/check?uri="
w = Website.objects.get(pk=site_pk)
w.checkok = False
w.htmlval = 'Fail'
w.lastcheck = today
week = timedelta(days=7)
day = timedelta(hours=24)
h = httplib2.Http(".cache")
resp, content = h.request(w3chtml + w.rooturl, "GET")
validator = BeautifulSoup(content)
if validator.find('h3', 'invalid'):
w.checkok = True
w.nextcheck = today + day
w.checktotal += 1
w.htmlval = 'Fail'
w.htmlval_fcount += 1
w.save()
return w.htmlval
else:
w.checkok = True
w.nextcheck = today + week
w.checktotal += 1
w.htmlval = 'Pass'
w.htmlval_pcount += 1
w.save()
return w.htmlval
def checkcss(site_pk):
"""
Checks URL against W3C CSS validator website
"""
today = datetime.date.today()
w3ccss = "http://jigsaw.w3.org/css-validator/validator?uri="
w = Website.objects.get(pk=site_pk)
w.checkok = False
w.cssval = 'Fail'
w.lastcheck = today
week = timedelta(days=7)
day = timedelta(hours=24)
h = httplib2.Http(".cache")
resp, content = h.request(w3ccss + w.rooturl, "GET")
validator = BeautifulSoup(content)
if validator.find('div', id='errors'):
w.checkok = True
w.nextcheck = today + day
w.checktotal += 1
w.cssval = 'Fail'
w.cssval_fcount += 1
w.save()
return w.cssval
else:
w.checkok = True
w.nextcheck = today + week
w.checktotal += 1
w.cssval = 'Pass'
w.cssval_pcount += 1
w.save()
return w.cssval
|
brianshumate/uniweb
|
surfbot/utils/w3c.py
|
Python
|
bsd-2-clause
| 3,219
|
[
"Brian"
] |
13f910fd6b559f01446cac87f4b287fc2abd08956a2775895d7a9174147b2657
|
"""
Extremely simple utility class to send mails
"""
import os
import socket
from smtplib import SMTP, SMTP_SSL
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from getpass import getuser
from DIRAC import gLogger, S_OK, S_ERROR
class Mail(object):
def __init__(self):
self._subject = ""
self._message = ""
self._mailAddress = ""
self._html = False
self._fromAddress = getuser() + "@" + socket.getfqdn()
self._attachments = []
self.esmtp_features = {}
self._smtpPtcl = None
self._smtpHost = None
self._smtpPort = None
self._smtpLogin = None
self._smtpPasswd = None
def _create(self, addresses):
"""create a mail object
:param list addresses: addresses
:return: S_OK(object)/S_ERROR() -- contain MIMEMultipart object
"""
if not isinstance(addresses, list):
addresses = [addresses]
if not self._mailAddress:
gLogger.warn("No mail address was provided. Mail not sent.")
return S_ERROR("No mail address was provided. Mail not sent.")
if not self._message:
gLogger.warn("Message body is empty")
if not self._subject:
gLogger.warn("Subject and body empty. Mail not sent")
return S_ERROR("Subject and body empty. Mail not sent")
if self._html:
mail = MIMEText(self._message, "html")
else:
mail = MIMEText(self._message, "plain")
msg = MIMEMultipart()
msg.attach(mail)
msg["Subject"] = self._subject
msg["From"] = self._fromAddress
msg["To"] = ", ".join(addresses)
for attachment in self._attachments:
try:
with open(attachment, "rb") as fil:
part = MIMEApplication(fil.read(), Name=os.path.basename(attachment))
part["Content-Disposition"] = 'attachment; filename="%s"' % os.path.basename(attachment)
msg.attach(part)
except IOError as e:
gLogger.exception("Could not attach %s" % attachment, lException=e)
return S_OK(msg)
def _send(self, msg=None):
"""send a single email message. If msg is in input, it is expected to be of email type, otherwise it will create it.
:param object msg: MIMEMultipart object
:return: S_OK()/S_ERROR()
"""
if msg is None:
addresses = self._mailAddress
if isinstance(self._mailAddress, str):
addresses = self._mailAddress.split(", ")
result = self._create(addresses)
if not result["OK"]:
return result
msg = result["Value"]
if self._smtpPtcl == "SSL":
smtp = SMTP_SSL()
else:
smtp = SMTP()
smtp.set_debuglevel(0)
try:
connParams = {}
if self._smtpHost:
connParams["host"] = self._smtpHost
if self._smtpPort:
connParams["port"] = int(self._smtpPort)
smtp.connect(**connParams)
smtp.ehlo_or_helo_if_needed()
if self._smtpPtcl == "TLS":
smtp.starttls()
if self._smtpLogin and self._smtpPasswd:
smtp.login(self._smtpLogin, self._smtpPasswd)
smtp.ehlo_or_helo_if_needed()
smtp.sendmail(self._fromAddress, addresses, msg.as_string())
except Exception as x:
return S_ERROR("Sending mail failed %s" % str(x))
smtp.quit()
return S_OK("The mail was successfully sent")
def __eq__(self, other):
"""Comparing an email object to another"""
if isinstance(other, Mail):
if self.__dict__ == other.__dict__:
return True
return False
def __hash__(self):
"""Comparing for sets"""
return hash(self._subject + self._message + self._fromAddress + self._mailAddress)
|
DIRACGrid/DIRAC
|
src/DIRAC/Core/Utilities/Mail.py
|
Python
|
gpl-3.0
| 4,124
|
[
"DIRAC"
] |
e1a7568481552cec7beb8ab3ae2fa1917ece72acc0281d8c538724fff3ff2fbb
|
# Adapted from Bio.AlignIO.FastaIO copyright 2008-2011 by Peter Cock.
# Copyright 2012 by Wibowo Arindrarto.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Bio.SearchIO support for Bill Pearson's FASTA tools.
This module adds support for parsing FASTA outputs. FASTA is a suite of
programs that finds regions of local or global similarity between protein
or nucleotide sequences, either by searching databases or identifying
local duplications.
Bio.SearchIO.FastaIO was tested on the following FASTA flavors and versions:
- flavors: fasta, ssearch, tfastx
- versions: 35, 36
Other flavors and/or versions may introduce some bugs. Please file a bug report
if you see such problems to Biopython's bug tracker.
More information on FASTA are available through these links:
- Website: http://fasta.bioch.virginia.edu/fasta_www2/fasta_list2.shtml
- User guide: http://fasta.bioch.virginia.edu/fasta_www2/fasta_guide.pdf
Supported Formats
=================
Bio.SearchIO.FastaIO supports parsing and indexing FASTA outputs triggered by
the -m 10 flag. Other formats that mimic other programs (e.g. the BLAST tabular
format using the -m 8 flag) may be parseable but using SearchIO's other parsers
(in this case, using the 'blast-tab' parser).
fasta-m10
=========
Note that in FASTA -m 10 outputs, HSPs from different strands are considered to
be from different hits. They are listed as two separate entries in the hit
table. FastaIO recognizes this and will group HSPs with the same hit ID into a
single Hit object, regardless of strand.
FASTA also sometimes output extra sequences adjacent to the HSP match. These
extra sequences are discarded by FastaIO. Only regions containing the actual
sequence match are extracted.
The following object attributes are provided:
+-----------------+-------------------------+----------------------------------+
| Object | Attribute | Value |
+=================+=========================+==================================+
| QueryResult | description | query sequence description |
| +-------------------------+----------------------------------+
| | id | query sequence ID |
| +-------------------------+----------------------------------+
| | program | FASTA flavor |
| +-------------------------+----------------------------------+
| | seq_len | full length of query sequence |
| +-------------------------+----------------------------------+
| | target | target search database |
| +-------------------------+----------------------------------+
| | version | FASTA version |
+-----------------+-------------------------+----------------------------------+
| Hit | seq_len | full length of the hit sequence |
+-----------------+-------------------------+----------------------------------+
| HSP | bitscore | \*_bits line |
| +-------------------------+----------------------------------+
| | evalue | \*_expect line |
| +-------------------------+----------------------------------+
| | ident_pct | \*_ident line |
| +-------------------------+----------------------------------+
| | init1_score | \*_init1 line |
| +-------------------------+----------------------------------+
| | initn_score | \*_initn line |
| +-------------------------+----------------------------------+
| | opt_score | \*_opt line, \*_s-w opt line |
| +-------------------------+----------------------------------+
| | pos_pct | \*_sim line |
| +-------------------------+----------------------------------+
| | sw_score | \*_score line |
| +-------------------------+----------------------------------+
| | z_score | \*_z-score line |
+-----------------+-------------------------+----------------------------------+
| HSPFragment | aln_annotation | al_cons block, if present |
| (also via HSP) +-------------------------+----------------------------------+
| | hit | hit sequence |
| +-------------------------+----------------------------------+
| | hit_end | hit sequence end coordinate |
| +-------------------------+----------------------------------+
| | hit_start | hit sequence start coordinate |
| +-------------------------+----------------------------------+
| | hit_strand | hit sequence strand |
| +-------------------------+----------------------------------+
| | query | query sequence |
| +-------------------------+----------------------------------+
| | query_end | query sequence end coordinate |
| +-------------------------+----------------------------------+
| | query_start | query sequence start coordinate |
| +-------------------------+----------------------------------+
| | query_strand | query sequence strand |
+-----------------+-------------------------+----------------------------------+
"""
import re
from Bio._py3k import _as_bytes, _bytes_to_string
from Bio.Alphabet import generic_dna, generic_protein
from Bio.File import UndoHandle
from Bio.SearchIO._index import SearchIndexer
from Bio.SearchIO._model import QueryResult, Hit, HSP, HSPFragment
__all__ = ['FastaM10Parser', 'FastaM10Indexer']
__docformat__ = "restructuredtext en"
# precompile regex patterns
# regex for program name
_RE_FLAVS = re.compile(r't?fast[afmsxy]|pr[sf][sx]|lalign|[gs]?[glso]search')
# regex for sequence ID and length ~ deals with both \n and \r\n
_PTR_ID_DESC_SEQLEN = r'>>>(.+?)\s+(.*?) *- (\d+) (?:aa|nt)\s*$'
_RE_ID_DESC_SEQLEN = re.compile(_PTR_ID_DESC_SEQLEN)
_RE_ID_DESC_SEQLEN_IDX = re.compile(_as_bytes(_PTR_ID_DESC_SEQLEN))
# regex for qresult, hit, or hsp attribute value
_RE_ATTR = re.compile(r'^; [a-z]+(_[ \w-]+):\s+(.*)$')
# regex for capturing excess start and end sequences in alignments
_RE_START_EXC = re.compile(r'^-*')
_RE_END_EXC = re.compile(r'-*$')
# attribute name mappings
_HSP_ATTR_MAP = {
'_initn': ('initn_score', int),
'_init1': ('init1_score', int),
'_opt': ('opt_score', int),
'_s-w opt': ('opt_score', int),
'_z-score': ('z_score', float),
'_bits': ('bitscore', float),
'_expect': ('evalue', float),
'_score': ('sw_score', int),
'_ident': ('ident_pct', float),
'_sim': ('pos_pct', float),
}
# state flags
_STATE_NONE = 0
_STATE_QUERY_BLOCK = 1
_STATE_HIT_BLOCK = 2
_STATE_CONS_BLOCK = 3
def _set_qresult_hits(qresult, hit_rows=[]):
"""Helper function for appending Hits without alignments into QueryResults."""
for hit_row in hit_rows:
hit_id, remainder = hit_row.split(' ', 1)
# TODO: parse hit and hsp properties properly; by dealing with:
# - any character in the description (brackets, spaces, etc.)
# - possible [f] or [r] presence (for frame info)
# - possible presence of E2() column
# - possible incomplete hit_id due to column length limit
# The current method only looks at the Hit ID, none of the things above
if hit_id not in qresult:
frag = HSPFragment(hit_id, qresult.id)
hsp = HSP([frag])
hit = Hit([hsp])
qresult.append(hit)
return qresult
def _set_hsp_seqs(hsp, parsed, program):
"""Helper function for the main parsing code.
:param hsp: HSP whose properties will be set
:type hsp: HSP
:param parsed: parsed values of the HSP attributes
:type parsed: dictionary {string: object}
:param program: program name
:type program: string
"""
# get aligned sequences and check if they have equal lengths
start = 0
for seq_type in ('hit', 'query'):
if 'tfast' not in program:
pseq = parsed[seq_type]
# adjust start and end coordinates based on the amount of
# filler characters
start, stop = _get_aln_slice_coords(pseq)
start_adj = len(re.search(_RE_START_EXC, pseq['seq']).group(0))
stop_adj = len(re.search(_RE_END_EXC, pseq['seq']).group(0))
start = start + start_adj
stop = stop + start_adj - stop_adj
parsed[seq_type]['seq'] = pseq['seq'][start:stop]
assert len(parsed['query']['seq']) == len(parsed['hit']['seq']), "%r %r" \
% (len(parsed['query']['seq']), len(parsed['hit']['seq']))
if 'similarity' in hsp.aln_annotation:
# only using 'start' since FASTA seems to have trimmed the 'excess'
# end part
hsp.aln_annotation['similarity'] = hsp.aln_annotation['similarity'][start:]
# hit or query works equally well here
assert len(hsp.aln_annotation['similarity']) == len(parsed['hit']['seq'])
# query and hit sequence types must be the same
assert parsed['query']['_type'] == parsed['hit']['_type']
type_val = parsed['query']['_type'] # hit works fine too
alphabet = generic_dna if type_val == 'D' else generic_protein
setattr(hsp.fragment, 'alphabet', alphabet)
for seq_type in ('hit', 'query'):
# get and set start and end coordinates
start = int(parsed[seq_type]['_start'])
end = int(parsed[seq_type]['_stop'])
setattr(hsp.fragment, seq_type + '_start', min(start, end) - 1)
setattr(hsp.fragment, seq_type + '_end', max(start, end))
# set seq and alphabet
setattr(hsp.fragment, seq_type, parsed[seq_type]['seq'])
if alphabet is not generic_protein:
# get strand from coordinate; start <= end is plus
# start > end is minus
if start <= end:
setattr(hsp.fragment, seq_type + '_strand', 1)
else:
setattr(hsp.fragment, seq_type + '_strand', -1)
else:
setattr(hsp.fragment, seq_type + '_strand', 0)
def _get_aln_slice_coords(parsed_hsp):
"""Helper function for the main parsing code.
To get the actual pairwise alignment sequences, we must first
translate the un-gapped sequence based coordinates into positions
in the gapped sequence (which may have a flanking region shown
using leading - characters). To date, I have never seen any
trailing flanking region shown in the m10 file, but the
following code should also cope with that.
Note that this code seems to work fine even when the "sq_offset"
entries are prsent as a result of using the -X command line option.
"""
seq = parsed_hsp['seq']
seq_stripped = seq.strip('-')
disp_start = int(parsed_hsp['_display_start'])
start = int(parsed_hsp['_start'])
stop = int(parsed_hsp['_stop'])
if start <= stop:
start = start - disp_start
stop = stop - disp_start + 1
else:
start = disp_start - start
stop = disp_start - stop + 1
stop += seq_stripped.count('-')
assert 0 <= start and start < stop and stop <= len(seq_stripped), \
"Problem with sequence start/stop,\n%s[%i:%i]\n%s" \
% (seq, start, stop, parsed_hsp)
return start, stop
class FastaM10Parser(object):
"""Parser for Bill Pearson's FASTA suite's -m 10 output."""
def __init__(self, handle, __parse_hit_table=False):
self.handle = UndoHandle(handle)
self._preamble = self._parse_preamble()
def __iter__(self):
for qresult in self._parse_qresult():
# re-set desc, for hsp query description
qresult.description = qresult.description
yield qresult
def _parse_preamble(self):
"""Parses the Fasta preamble for Fasta flavor and version."""
preamble = {}
while True:
self.line = self.handle.readline()
# this should be the line just before the first qresult
if self.line.startswith('Query'):
break
# try to match for version line
elif self.line.startswith(' version'):
preamble['version'] = self.line.split(' ')[2]
else:
# try to match for flavor line
flav_match = re.match(_RE_FLAVS, self.line.lower())
if flav_match:
preamble['program'] = flav_match.group(0)
return preamble
def __parse_hit_table(self):
"""Parses hit table rows."""
# move to the first row
self.line = self.handle.readline()
# parse hit table until we see an empty line
hit_rows = []
while self.line and not self.line.strip():
hit_rows.append(self.line.strip())
self.line = self.handle.readline()
return hit_rows
def _parse_qresult(self):
# initial qresult value
qresult = None
hit_rows = []
# state values
state_QRES_NEW = 1
state_QRES_HITTAB = 3
state_QRES_CONTENT = 5
state_QRES_END = 7
while True:
# one line before the hit table
if self.line.startswith('The best scores are:'):
qres_state = state_QRES_HITTAB
# the end of a query or the file altogether
elif self.line.strip() == '>>>///' or not self.line:
qres_state = state_QRES_END
# the beginning of a new query
elif not self.line.startswith('>>>') and '>>>' in self.line:
qres_state = state_QRES_NEW
# the beginning of the query info and its hits + hsps
elif self.line.startswith('>>>') and not \
self.line.strip() == '>>><<<':
qres_state = state_QRES_CONTENT
# default qres mark
else:
qres_state = None
if qres_state is not None:
if qres_state == state_QRES_HITTAB:
# parse hit table if flag is set
hit_rows = self.__parse_hit_table()
elif qres_state == state_QRES_END:
yield _set_qresult_hits(qresult, hit_rows)
break
elif qres_state == state_QRES_NEW:
# if qresult is filled, yield it first
if qresult is not None:
yield _set_qresult_hits(qresult, hit_rows)
regx = re.search(_RE_ID_DESC_SEQLEN, self.line)
query_id = regx.group(1)
seq_len = regx.group(3)
desc = regx.group(2)
qresult = QueryResult(id=query_id)
qresult.seq_len = int(seq_len)
# get target from the next line
self.line = self.handle.readline()
qresult.target = [x for x in self.line.split(' ') if x][1].strip()
if desc is not None:
qresult.description = desc
# set values from preamble
for key, value in self._preamble.items():
setattr(qresult, key, value)
elif qres_state == state_QRES_CONTENT:
assert self.line[3:].startswith(qresult.id), self.line
for hit, strand in self._parse_hit(query_id):
# HACK: re-set desc, for hsp hit and query description
hit.description = hit.description
hit.query_description = qresult.description
# if hit is not in qresult, append it
if hit.id not in qresult:
qresult.append(hit)
# otherwise, it might be the same hit with a different strand
else:
# make sure strand is different and then append hsp to
# existing hit
for hsp in hit.hsps:
assert strand != hsp.query_strand
qresult[hit.id].append(hsp)
self.line = self.handle.readline()
def _parse_hit(self, query_id):
while True:
self.line = self.handle.readline()
if self.line.startswith('>>'):
break
state = _STATE_NONE
strand = None
hsp_list = []
while True:
peekline = self.handle.peekline()
# yield hit if we've reached the start of a new query or
# the end of the search
if peekline.strip() in [">>><<<", ">>>///"] or \
(not peekline.startswith('>>>') and '>>>' in peekline):
# append last parsed_hsp['hit']['seq'] line
if state == _STATE_HIT_BLOCK:
parsed_hsp['hit']['seq'] += self.line.strip()
elif state == _STATE_CONS_BLOCK:
hsp.aln_annotation['similarity'] += \
self.line.strip('\r\n')
# process HSP alignment and coordinates
_set_hsp_seqs(hsp, parsed_hsp, self._preamble['program'])
hit = Hit(hsp_list)
hit.description = hit_desc
hit.seq_len = seq_len
yield hit, strand
hsp_list = []
break
# yield hit and create a new one if we're still in the same query
elif self.line.startswith('>>'):
# try yielding, if we have hsps
if hsp_list:
_set_hsp_seqs(hsp, parsed_hsp, self._preamble['program'])
hit = Hit(hsp_list)
hit.description = hit_desc
hit.seq_len = seq_len
yield hit, strand
hsp_list = []
# try to get the hit id and desc, and handle cases without descs
try:
hit_id, hit_desc = self.line[2:].strip().split(' ', 1)
except ValueError:
hit_id = self.line[2:].strip().split(' ', 1)[0]
hit_desc = ''
# create the HSP object for Hit
frag = HSPFragment(hit_id, query_id)
hsp = HSP([frag])
hsp_list.append(hsp)
# set or reset the state to none
state = _STATE_NONE
parsed_hsp = {'query': {}, 'hit': {}}
# create and append a new HSP if line starts with '>--'
elif self.line.startswith('>--'):
# set seq attributes of previous hsp
_set_hsp_seqs(hsp, parsed_hsp, self._preamble['program'])
# and create a new one
frag = HSPFragment(hit_id, query_id)
hsp = HSP([frag])
hsp_list.append(hsp)
# set the state ~ none yet
state = _STATE_NONE
parsed_hsp = {'query': {}, 'hit': {}}
# this is either query or hit data in the HSP, depending on the state
elif self.line.startswith('>'):
if state == _STATE_NONE:
# make sure it's the correct query
assert query_id.startswith(self.line[1:].split(' ')[0]), \
"%r vs %r" % (query_id, self.line)
state = _STATE_QUERY_BLOCK
parsed_hsp['query']['seq'] = ''
elif state == _STATE_QUERY_BLOCK:
# make sure it's the correct hit
assert hit_id.startswith(self.line[1:].split(' ')[0])
state = _STATE_HIT_BLOCK
parsed_hsp['hit']['seq'] = ''
# check for conservation block
elif self.line.startswith('; al_cons'):
state = _STATE_CONS_BLOCK
hsp.fragment.aln_annotation['similarity'] = ''
elif self.line.startswith(';'):
# Fasta outputs do not make a clear distinction between Hit
# and HSPs, so we check the attribute names to determine
# whether it belongs to a Hit or HSP
regx = re.search(_RE_ATTR, self.line.strip())
name = regx.group(1)
value = regx.group(2)
# for values before the '>...' query block
if state == _STATE_NONE:
if name in _HSP_ATTR_MAP:
attr_name, caster = _HSP_ATTR_MAP[name]
if caster is not str:
value = caster(value)
if name in ['_ident', '_sim']:
value *= 100
setattr(hsp, attr_name, value)
# otherwise, pool the values for processing later
elif state == _STATE_QUERY_BLOCK:
parsed_hsp['query'][name] = value
elif state == _STATE_HIT_BLOCK:
if name == '_len':
seq_len = int(value)
else:
parsed_hsp['hit'][name] = value
# for values in the hit block
else:
raise ValueError("Unexpected line: %r" % self.line)
# otherwise, it must be lines containing the sequences
else:
assert '>' not in self.line
# if we're in hit, parse into hsp.hit
if state == _STATE_HIT_BLOCK:
parsed_hsp['hit']['seq'] += self.line.strip()
elif state == _STATE_QUERY_BLOCK:
parsed_hsp['query']['seq'] += self.line.strip()
elif state == _STATE_CONS_BLOCK:
hsp.fragment.aln_annotation['similarity'] += \
self.line.strip('\r\n')
# we should not get here!
else:
raise ValueError("Unexpected line: %r" % self.line)
self.line = self.handle.readline()
class FastaM10Indexer(SearchIndexer):
"""Indexer class for Bill Pearson's FASTA suite's -m 10 output."""
_parser = FastaM10Parser
def __init__(self, filename):
SearchIndexer.__init__(self, filename)
self._handle = UndoHandle(self._handle)
def __iter__(self):
handle = self._handle
handle.seek(0)
start_offset = handle.tell()
qresult_key = None
query_mark = _as_bytes('>>>')
while True:
line = handle.readline()
peekline = handle.peekline()
end_offset = handle.tell()
if not line.startswith(query_mark) and query_mark in line:
regx = re.search(_RE_ID_DESC_SEQLEN_IDX, line)
qresult_key = _bytes_to_string(regx.group(1))
start_offset = end_offset - len(line)
# yield whenever we encounter a new query or at the end of the file
if qresult_key is not None:
if (not peekline.startswith(query_mark)
and query_mark in peekline) or not line:
yield qresult_key, start_offset, end_offset - start_offset
if not line:
break
start_offset = end_offset
def get_raw(self, offset):
handle = self._handle
qresult_raw = _as_bytes('')
query_mark = _as_bytes('>>>')
# read header first
handle.seek(0)
while True:
line = handle.readline()
peekline = handle.peekline()
qresult_raw += line
if not peekline.startswith(query_mark) and query_mark in peekline:
break
# and read the qresult raw string
handle.seek(offset)
while True:
# preserve whitespace, don't use read_forward
line = handle.readline()
peekline = handle.peekline()
qresult_raw += line
# break when we've reached qresult end
if (not peekline.startswith(query_mark) and query_mark in peekline) or \
not line:
break
# append mock end marker to qresult_raw, since it's not always present
return qresult_raw + _as_bytes('>>><<<\n')
# if not used as a module, run the doctest
if __name__ == "__main__":
from Bio._utils import run_doctest
run_doctest()
|
poojavade/Genomics_Docker
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Bio/SearchIO/FastaIO.py
|
Python
|
apache-2.0
| 25,572
|
[
"BLAST",
"Biopython"
] |
bd8d3032fba626e4e600202a7d56298d0e352f1c5ed17de1be0f8edf7d535d2e
|
import os
import glob
import sys
import shutil
import pysam
from bcbio.pipeline import config_utils
from bcbio.distributed.transaction import file_transaction, tx_tmpdir
from bcbio.utils import (safe_makedir, file_exists)
from bcbio.provenance import do
from bcbio import utils
from bcbio.log import logger
from bcbio.pipeline import datadict as dd
from bcbio import bam
from bcbio import broad
from bcbio.wgbsseq import kits
def align(fastq_file, pair_file, ref_file, names, align_dir, data):
assert data["analysis"].lower().startswith("wgbs-seq"), "No comparible alignment."
config = data["config"]
sample = dd.get_sample_name(data)
out_prefix = os.path.join(align_dir, dd.get_lane(data))
out_dir = os.path.join(align_dir, "%s_bismark" % dd.get_lane(data))
if not ref_file:
logger.error("bismark index not found. You can install "
"the index for your genome with: bcbio_nextgen.py upgrade "
"--aligners bismark --genomes genome-build-name --data")
sys.exit(1)
final_out = os.path.join(align_dir, "{0}.bam".format(sample))
if file_exists(final_out):
data = dd.set_work_bam(data, final_out)
data["bam_report"] = glob.glob(os.path.join(out_dir, "*report.txt"))[0]
data = dd.update_summary_qc(data, "bismark", base=data["bam_report"])
return data
bismark = config_utils.get_program("bismark", config)
# bismark uses 5 threads/sample and ~12GB RAM/sample (hg38)
resources = config_utils.get_resources("bismark", data["config"])
max_cores = dd.get_num_cores(data)
max_mem = config_utils.convert_to_bytes(resources.get("memory", "1G")) / (1024.0 * 1024.0)
instances = calculate_bismark_instances(max_cores, max_mem * max_cores)
kit = kits.KITS.get(dd.get_kit(data), None)
directional = "--non_directional" if kit and not kit.is_directional else ""
other_opts = resources.get("options", [])
other_opts = " ".join([str(x) for x in other_opts]).strip()
fastq_files = " ".join([fastq_file, pair_file]) if pair_file else fastq_file
safe_makedir(align_dir)
cmd = "{bismark} {other_opts} {directional} --bowtie2 --temp_dir {tx_out_dir} --gzip --parallel {instances} -o {tx_out_dir} --unmapped {ref_file} {fastq_file} "
if pair_file:
fastq_file = "-1 %s -2 %s" % (fastq_file, pair_file)
raw_bam = glob.glob(out_dir + "/*bismark*bt2*bam")
if not raw_bam:
with tx_tmpdir() as tx_out_dir:
run_message = "Running Bismark aligner on %s and %s" % (fastq_file, ref_file)
do.run(cmd.format(**locals()), run_message, None)
shutil.move(tx_out_dir, out_dir)
raw_bam = glob.glob(out_dir + "/*bismark*bt2*bam")
# don't process bam in the bismark pipeline!
utils.symlink_plus(raw_bam[0], final_out)
data = dd.set_work_bam(data, final_out)
data["bam_report"] = glob.glob(os.path.join(out_dir, "*report.txt"))[0]
data = dd.update_summary_qc(data, "bismark", base=data["bam_report"])
return data
def _process_bam(bam_file, in_fastq, sample, reference, config):
broad_runner = broad.runner_from_config(config)
names = {'rg': in_fastq, 'library': 'WGBS_LIB', 'pl': 'Illumina', 'pu': 'R1', 'sm': in_fastq, 'sample': sample}
out_fix_bam = broad_runner.run_fn("picard_fix_rgs", bam_file, names)
order_bam = utils.append_stem(out_fix_bam, "_order")
broad_runner.run_fn("picard_reorder", out_fix_bam, reference, order_bam)
bam.index(order_bam, config)
# order_bam = _set_quality(order_bam)
# bam.index(order_bam, config)
return order_bam
def remap_index_fn(ref_file):
"""Map sequence references to equivalent bismark indexes
"""
return os.path.join(os.path.dirname(os.path.dirname(ref_file)), "bismark")
def _set_quality(in_bam):
"""
change all quality to 255
"""
bam = pysam.AlignmentFile(in_bam, "rb")
out_file = utils.append_stem(in_bam, "_normqual")
if file_exists(out_file):
return out_file
with file_transaction(out_file) as tx_out:
with pysam.AlignmentFile(tx_out, "wb", template=bam) as out_handle:
for read in bam.fetch():
read.mapping_quality = 255
out_handle.write(read)
return out_file
def index(ref_file, out_dir, data):
"""Create a bismark index in the defined reference directory.
"""
(ref_dir, local_file) = os.path.split(ref_file)
gtf_file = dd.get_transcriptome_gtf(data, default=dd.get_gtf_file(data))
bismark = config_utils.find_program("bismark", data["config"])
if not utils.file_exists(gtf_file):
raise ValueError("%s not found, could not create a bismark index." % (gtf_file))
if not utils.file_exists(out_dir):
with tx_tmpdir(data, os.path.dirname(out_dir)) as tx_out_dir:
num_cores = dd.get_cores(data)
other_opts = config_utils.get_resources("bismark", data["config"]).get("options", [])
other_opts = " ".join([str(x) for x in other_opts]).strip()
cmd = "{bismark} {other_opts} --bowtie2 -p {num_cores} -n 1 -o {tx_out_dir} --basename {sample} --unmapped {ref_file} {in_fastq}"
do.run(cmd.format(**locals()), "Index STAR")
if os.path.exists(out_dir):
shutil.rmtree(out_dir)
shutil.move(tx_out_dir, out_dir)
return out_dir
def calculate_bismark_instances(cores, memory):
"""
calculate number of parallel bismark instances to run, based on disussion here
https://github.com/FelixKrueger/Bismark/issues/96
cores and memory here are the maximum amounts available for us to use
"""
BISMARK_CORES = 1
BOWTIE_CORES_PER_INSTANCE = 2
SAMTOOLS_CORES_PER_INSTANCE = 1
CORES_PER_INSTANCE = BOWTIE_CORES_PER_INSTANCE + SAMTOOLS_CORES_PER_INSTANCE
GENOME_MEMORY_GB = 12
INSTANCE_MEMORY_GB = 10
available_instance_memory = memory - GENOME_MEMORY_GB
instances_in_memory = max(available_instance_memory / INSTANCE_MEMORY_GB, 1)
available_instance_cores = cores - BISMARK_CORES
instances_in_cores = max(available_instance_cores / CORES_PER_INSTANCE, 1)
instances = int(min(instances_in_memory, instances_in_cores))
logger.info(f"{cores} cores and {memory} memory are available. Spinning up {instances} instances of bismark.")
return instances
|
chapmanb/bcbio-nextgen
|
bcbio/ngsalign/bismark.py
|
Python
|
mit
| 6,398
|
[
"pysam"
] |
69365195a18e0530b7d8eb86b36268b1487ca523a86e49cf415b70de43c73404
|
#!/usr/bin/env python3
#pylint: disable=missing-docstring
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import chigger
import time
import shutil
# Open the result
shutil.copy('../input/diffusion_1.e', 'new_file.e')
reader = chigger.exodus.ExodusReader('new_file.e')
mug = chigger.exodus.ExodusResult(reader, variable='u', range=[0, 1], cmap='viridis')
# Create the window
window = chigger.RenderWindow(mug, size=[600,600], test=True)
# Render the results and write a file
filenames = ['../input/diffusion_2.e', '../input/diffusion_3.e', '../input/diffusion_4.e']
for i in range(4):
window.write('new_files_' + str(i) + '.png')
window.update()
# Update the file
if i < 3:
time.sleep(1.5)
print("{} --> {}".format(filenames[i], 'new_file.e'))
shutil.copy(filenames[i], 'new_file.e')
window.start()
|
nuclear-wizard/moose
|
python/chigger/tests/new_files/new_files.py
|
Python
|
lgpl-2.1
| 1,104
|
[
"MOOSE"
] |
83a177d53678b899e842277ee96e78f45be0fc35a29ca6a2841cb530342c0ce4
|
#mono_gauss_coil model
#conversion of DebyeModel.py
#converted by Steve King, Mar 2016
r"""
This Debye Gaussian coil model strictly describes the scattering from
*monodisperse* polymer chains in theta solvents or polymer melts, conditions
under which the distances between segments follow a Gaussian distribution.
Provided the number of segments is large (ie, high molecular weight polymers)
the single-chain form factor P(Q) is that described by Debye (1947).
To describe the scattering from *polydisperse* polymer chains see the
:ref:`poly-gauss-coil` model.
Definition
----------
.. math::
I(q) = \text{scale} \cdot I_0 \cdot P(q) + \text{background}
where
.. math::
I_0 &= \phi_\text{poly} \cdot V
\cdot (\rho_\text{poly} - \rho_\text{solv})^2 \\
P(q) &= 2 [\exp(-Z) + Z - 1] / Z^2 \\
Z &= (q R_g)^2 \\
V &= M / (N_A \delta)
Here, $\phi_\text{poly}$ is the volume fraction of polymer, $V$ is the
volume of a polymer coil, *M* is the molecular weight of the polymer,
$N_A$ is Avogadro's Number, $\delta$ is the bulk density of the polymer,
$\rho_\text{poly}$ is the sld of the polymer, $\rho\text{solv}$ is the
sld of the solvent, and $R_g$ is the radius of gyration of the polymer coil.
The 2D scattering intensity is calculated in the same way as the 1D,
but where the *q* vector is redefined as
.. math::
q = \sqrt{q_x^2 + q_y^2}
References
----------
#. P Debye, *J. Phys. Colloid. Chem.*, 51 (1947) 18.
#. R J Roe, *Methods of X-Ray and Neutron Scattering in Polymer Science*,
Oxford University Press, New York (2000).
#. http://www.ncnr.nist.gov/staff/hammouda/distance_learning/chapter_28.pdf
Authorship and Verification
----------------------------
* **Author:**
* **Last Modified by:**
* **Last Reviewed by:**
"""
import numpy as np
from numpy import inf
name = "mono_gauss_coil"
title = "Scattering from monodisperse polymer coils"
description = """
Evaluates the scattering from
monodisperse polymer chains.
"""
category = "shape-independent"
# pylint: disable=bad-whitespace, line-too-long
# ["name", "units", default, [lower, upper], "type", "description"],
parameters = [
["i_zero", "1/cm", 70.0, [0.0, inf], "", "Intensity at q=0"],
["rg", "Ang", 75.0, [0.0, inf], "volume", "Radius of gyration"],
]
# pylint: enable=bad-whitespace, line-too-long
source = ["mono_gauss_coil.c"]
have_Fq = False
radius_effective_modes = ["R_g", "2R_g", "3R_g", "sqrt(5/3)*R_g"]
def random():
"""Return a random parameter set for the model."""
rg = 10**np.random.uniform(0, 4)
#rg = 1e3
pars = dict(
#scale=1, background=0,
i_zero=1e7, # i_zero is a simple scale
rg=rg,
)
return pars
# these unit test values taken from SasView 3.1.2
tests = [
[{'scale': 1.0, 'i_zero': 70.0, 'rg': 75.0, 'background': 0.0},
[0.0106939, 0.469418], [57.1241, 0.112859]],
]
|
SasView/sasmodels
|
sasmodels/models/mono_gauss_coil.py
|
Python
|
bsd-3-clause
| 2,912
|
[
"Avogadro",
"Gaussian"
] |
120bbf6625d47904d80564e342134261f9d192ab6f7416fee5971d5883c6a183
|
import subprocess
import sys
import time
from datetime import datetime
#print(sys.argv[0])
print("""
Wifi Packet Sniffer v0.1
-------------------------------
Options:
-c Channel to listen on
-m Target address
-i Interface to use
Credits:
Re-scripted by scriptedp0ison
Original program airodump-ng created by Thomas d'Otreppe
Visit http://www.aircrack-ng.org for more info
""")
if sys.argv > 1 and sys.argv > 2 and sys.argv > 3 and sys.argv > 4 and sys.argv > 5 and sys.argv > 6:
#subprocess.call(sys.argv[1], shell=True)
print('[+] Channel set to: {}'.format(sys.argv[2]))
time.sleep(2)
print('[+] Target mac set to: {}'.format(sys.argv[4]))
time.sleep(2)
print('[+] Network card set to listen on: {}'.format(sys.argv[6]))
time.sleep(2)
print('[+] Compiling system arguments...')
time.sleep(1)
print('[+] Executing system arguments...')
time.sleep(2)
launch_attack = 'airodump-ng -c {} --bssid {} {}'.format(sys.argv[2], sys.argv[4], sys.argv[6])
subprocess.call(launch_attack, shell=True)
if sys.argv[1] == '-h' or sys.argv[1] == '-help' or sys.argv[1] == '--help':
print(""" Wifi Dosser v0.1 - Do not use for illegal purposes! Do not test on networks you dont own or have permission to test!
_____________________________________________________________________________________________________________________
Options:
-h Display the help screen
-m Target MAC Address
-c channel to listen on
-i Network Interface card
Examples:
python get_client.py -c <channel to listen on> -m <target mac address> -i <NIC>
""")
|
scriptedp0ison/Kali-Linux-Wifi-Short-Cut-Scripts
|
get_client.py
|
Python
|
gpl-3.0
| 1,554
|
[
"VisIt"
] |
022c12fff59a58346adccb3782c70b2e9f9c1f80c58ee7cda6f2914a468b57ab
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/client/client_rpc.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import code
import collections
import functools
import logging
import os
import queue
import ssl
import sys
from king_phisher import errors
from king_phisher import find
from king_phisher import geoip
from king_phisher import serializers
from king_phisher import utilities
from king_phisher.client import gui_utilities
import advancedhttpserver
import boltons.typeutils
import smoke_zephyr.utilities
from gi.repository import Gtk
_tag_mixin_slots = ('id', 'name', 'description')
_tag_mixin_types = (int, str, str)
_tag_tables = ('campaign_types', 'campaigns', 'companies', 'company_departments', 'industries')
database_table_objects = utilities.FreezableDict()
UNRESOLVED = boltons.typeutils.make_sentinel('UNRESOLVED', var_name='UNRESOLVED')
"""A sentinel value used for values in rows to indicate that the data has not been loaded from the server."""
_WorkItem = collections.namedtuple('_WorkItem',
('callback_on_success', 'callback_on_error', 'callback_when_idle', 'callback_args', 'callback_kwargs', 'method', 'args', 'kwargs')
)
class RemoteRowMeta(type):
def __new__(mcs, name, bases, dct):
dct['__slots__'] = ('__rpc__',) + dct.get('__slots__', ())
return super(RemoteRowMeta, mcs).__new__(mcs, name, bases, dct)
def __init__(cls, *args, **kwargs):
table_name = getattr(cls, '__table__', None)
if table_name:
database_table_objects[table_name] = cls
super(RemoteRowMeta, cls).__init__(*args, **kwargs)
# stylized metaclass definition to be Python 2.7 and 3.x compatible
class RemoteRow(RemoteRowMeta('_RemoteRow', (object,), {})):
"""
A generic class representing a row of data from the remote King Phisher
server.
"""
__table__ = None
__xref_attr__ = None
__slots__ = ()
def __init__(self, rpc, *args, **kwargs):
if not isinstance(rpc, KingPhisherRPCClient):
raise ValueError('rpc is not a KingPhisherRPCClient instance')
self.__rpc__ = rpc
slots = self.__slots__[1:]
values = collections.defaultdict(lambda: UNRESOLVED)
if args:
values.update(dict(zip(slots, args)))
if kwargs:
values.update(kwargs)
for key in slots:
value = values[key]
if isinstance(value, bytes):
value = value.decode('utf-8')
setattr(self, key, value)
def __getattr__(self, item):
if hasattr(self, item + '_id'):
row_id = getattr(self, item + '_id', None)
for table, table_cls in database_table_objects.items():
if table_cls.__xref_attr__ == item:
return self.__rpc__.remote_table_row(table, row_id)
raise AttributeError("object has no attribute '{0}'".format(item))
def _asdict(self):
return dict(zip(self.__slots__[1:], (getattr(self, prop) for prop in self.__slots__[1:])))
def commit(self):
"""Send this object to the server to update the remote instance."""
values = tuple(getattr(self, attr) for attr in self.__slots__[1:])
values = collections.OrderedDict(((k, v) for (k, v) in zip(self.__slots__[1:], values) if v is not UNRESOLVED))
self.__rpc__('db/table/set', self.__table__, self.id, tuple(values.keys()), tuple(values.values()))
class AlertSubscription(RemoteRow):
__table__ = 'alert_subscriptions'
__slots__ = ('id', 'user_id', 'campaign_id', 'expiration')
class Campaign(RemoteRow):
__table__ = 'campaigns'
__xref_attr__ = 'campaign'
__slots__ = ('id', 'name', 'description', 'user_id', 'created', 'max_credentials', 'expiration', 'campaign_type_id', 'company_id')
class CampaignType(RemoteRow):
__table__ = 'campaign_types'
__xref_attr__ = 'campaign_type'
__slots__ = _tag_mixin_slots
class Company(RemoteRow):
__table__ = 'companies'
__xref_attr__ = 'company'
__slots__ = ('id', 'name', 'description', 'industry_id', 'url_main', 'url_email', 'url_remote_access')
class CompanyDepartment(RemoteRow):
__table__ = 'company_departments'
__xref_attr__ = 'company_department'
__slots__ = _tag_mixin_slots
class Credential(RemoteRow):
__table__ = 'credentials'
__slots__ = ('id', 'visit_id', 'message_id', 'campaign_id', 'username', 'password', 'submitted')
class DeaddropConnection(RemoteRow):
__table__ = 'deaddrop_connections'
__slots__ = ('id', 'deployment_id', 'campaign_id', 'count', 'ip', 'local_username', 'local_hostname', 'local_ip_addresses', 'first_seen', 'last_seen')
class DeaddropDeployment(RemoteRow):
__table__ = 'deaddrop_deployments'
__xref_attr__ = 'deployment'
__slots__ = ('id', 'campaign_id', 'destination')
class Industry(RemoteRow):
__table__ = 'industries'
__xref_attr__ = 'industry'
__slots__ = _tag_mixin_slots
class LandingPage(RemoteRow):
__table__ = 'landing_pages'
__slots__ = ('id', 'campaign_id', 'hostname', 'page')
class Message(RemoteRow):
__table__ = 'messages'
__xref_attr__ = 'message'
__slots__ = ('id', 'campaign_id', 'target_email', 'first_name', 'last_name', 'opened', 'opener_ip', 'opener_user_agent', 'reported', 'delivery_status', 'delivery_details', 'testing', 'sent', 'trained', 'company_department_id')
class User(RemoteRow):
__table__ = 'users'
__xref_attr__ = 'user'
__slots__ = ('id', 'phone_carrier', 'phone_number', 'email_address', 'otp_secret', 'last_login', 'name', 'expiration', 'description')
class Visit(RemoteRow):
__table__ = 'visits'
__xref_attr__ = 'visit'
__slots__ = ('id', 'message_id', 'campaign_id', 'first_landing_page_id', 'count', 'ip', 'details', 'first_seen', 'last_seen', 'user_agent')
database_table_objects.freeze()
def _graphql_file(file_or_path):
if isinstance(file_or_path, str):
with open(file_or_path, 'r') as file_h:
query = file_h.read()
else:
query = file_or_path.read()
return query
def _graphql_find_file(query_file):
path = find.data_file(os.path.join('queries', query_file))
if path is None:
raise errors.KingPhisherResourceError('could not find GraphQL query file: ' + query_file)
return _graphql_file(path)
class KingPhisherRPCClient(advancedhttpserver.RPCClientCached):
"""
The main RPC object for communicating with the King Phisher Server over RPC.
.. _client-rpc-async-methods:
.. versionadded:: 1.14.0 Asynchronous Methods
This RPC object provides a few methods for asynchronously making RPC calls
to the server. This makes it easier to issue and RPC call and then process
the results without having to either wait (and by extension lock the GUI
thread) or start and manage a separate thread. These methods use the name
``async_`` prefix and have many of the same arguments.
In all cases, the callback parameters *on_success* and *on_error* are called
with the signature :samp:`callback(*(cb_args + ({results},)), **cb_kwargs)`
where ``results`` is either the return value of the RPC method in the case
of *on_success* or the exception instance in the case of *on_error*. The
*when_idle* parameter can be used to specify that the callbacks must be
executed within the main GUI thread and can thus access GObjects such as
widgets.
"""
def __init__(self, *args, **kwargs):
self.logger = logging.getLogger('KingPhisher.Client.RPC')
super(KingPhisherRPCClient, self).__init__(*args, **kwargs)
self.set_serializer('binary/message-pack')
self._async_queue = queue.Queue()
self._async_thread = utilities.Thread(target=self._async_thread_routine, name='RPCAsyncWorker')
self._async_thread.start()
def __repr__(self):
return "<{0} '{1}@{2}:{3}{4}'>".format(self.__class__.__name__, self.username, self.host, self.port, self.uri_base)
def _async_thread_routine(self):
logger = logging.getLogger('KingPhisher.Client.RPC.Async')
logger.debug('the async RPC worker has started')
while True:
work_item = self._async_queue.get()
if work_item is None:
self._async_queue.task_done()
break
args = work_item.args or ()
kwargs = work_item.kwargs or {}
callback_args = work_item.callback_args or ()
callback_kwargs = work_item.callback_kwargs or {}
try:
results = work_item.method(*args, **kwargs)
except Exception as error:
logger.error("async rpc method: {} encountered an error".format(work_item.method.__name__), exc_info=True)
callback = work_item.callback_on_error
callback_args = callback_args + (error,)
else:
callback = work_item.callback_on_success
callback_args = callback_args + (results,)
if callback is not None:
if work_item.callback_when_idle:
gui_utilities.glib_idle_add_once(callback, *callback_args, **callback_kwargs)
else:
try:
callback(*callback_args, **callback_kwargs)
except Exception:
logger.error("async rpc callback: {} encountered an error".format(callback.__name__), exc_info=True)
self._async_queue.task_done()
logger.debug('the async RPC worker is exiting')
def async_call(self, method, args=None, kwargs=None, on_success=None, on_error=None, when_idle=False, cb_args=None, cb_kwargs=None):
"""
Perform an asynchronous RPC call to the server. This will queue a work
item for a thread to issue the RPC call and then specifies the behavior
for completion. See :ref:`Asynchronous Methods
<client-rpc-async-methods>` for more information.
.. versionadded:: 1.14.0
:param str method: The RPC method name to call.
:param tuple args: The arguments to the RPC method.
:param tuple kwargs: The keyword arguments to the RPC method.
:param on_success: A callback function to be called after the RPC method
returns successfully.
:param on_error: A callback function to be called if the RPC method
raises an exception.
:param when_idle: Whether or not the *on_success* and *on_error*
callback functions should be called from the main GUI thread while
it is idle.
:param cb_args: The arguments to the *on_success* and *on_error*
callback functions.
:param cb_kwargs: The keyword arguments to the *on_success* and
*on_error* callback functions.
"""
self._async_queue.put(_WorkItem(
callback_on_success=on_success,
callback_on_error=on_error,
callback_when_idle=when_idle,
callback_args=cb_args,
callback_kwargs=cb_kwargs,
method=self.call,
args=(method,) + (args or ()),
kwargs=kwargs
))
def async_graphql(self, query, query_vars=None, on_success=None, on_error=None, when_idle=False, cb_args=None, cb_kwargs=None):
"""
Perform an asynchronous RPC GraphQL query to the server. This will queue
a work item for a thread to issue the RPC call and then specifies the
behavior for completion. See :ref:`Asynchronous Methods
<client-rpc-async-methods>` for more information.
.. versionadded:: 1.14.0
:param str query: The GraphQL query string to execute asynchronously.
:param dict query_vars: Any variable definitions required by the GraphQL
query.
:param on_success: A callback function to be called after the RPC method
returns successfully.
:param on_error: A callback function to be called if the RPC method
raises an exception.
:param when_idle: Whether or not the *on_success* and *on_error*
callback functions should be called from the main GUI thread while
it is idle.
:param cb_args: The arguments to the *on_success* and *on_error*
callback functions.
:param cb_kwargs: The keyword arguments to the *on_success* and
*on_error* callback functions.
"""
self._async_queue.put(_WorkItem(
callback_on_success=on_success,
callback_on_error=on_error,
callback_when_idle=when_idle,
callback_args=cb_args,
callback_kwargs=cb_kwargs,
method=self.graphql,
args=(query,),
kwargs={'query_vars': query_vars}
))
def async_graphql_file(self, file_or_path, *args, **kwargs):
"""
Perform an asynchronous RPC GraphQL query from a file on the server.
This will queue a work item for a thread to issue the RPC call and then
specifies the behavior for completion. See :ref:`Asynchronous Methods
<client-rpc-async-methods>` for more information.
.. versionadded:: 1.14.0
:param file_or_path: The file object or path to the file from which to read.
"""
query = _graphql_file(file_or_path)
return self.async_graphql(query, *args, **kwargs)
def graphql(self, query, query_vars=None):
"""
Execute a GraphQL query on the server and return the results. This will
raise :py:exc:`~king_phisher.errors.KingPhisherGraphQLQueryError` if
the query fails.
:param str query: The GraphQL query string to execute.
:param query_vars: Any variable definitions required by the GraphQL
*query*.
:return: The query results.
:rtype: dict
"""
response = self.call('graphql', query, query_vars=query_vars)
if response['errors']:
raise errors.KingPhisherGraphQLQueryError(
'the query failed',
errors=response['errors'],
query=query,
query_vars=query_vars
)
return response['data']
def graphql_file(self, file_or_path, query_vars=None):
"""
This method wraps :py:meth:`~.graphql` to provide a convenient way to
execute GraphQL queries from files.
:param file_or_path: The file object or path to the file from which to read.
:param query_vars: The variables for *query*.
:return: The query results.
:rtype: dict
"""
query = _graphql_file(file_or_path)
return self.graphql(query, query_vars=query_vars)
def graphql_find_file(self, query_file, **query_vars):
"""
This method is similar to :py:meth:`~.graphql_file`. The first argument
(*query_file*) is the name of a query file that will be located using
:py:func:`find.data_file`. Additional keyword arguments are passed as
the variables to the query.
:param str query_file: The name of the query file to locate.
:param query_vars: These keyword arguments are passed as the variables to the query.
:return: The query results.
:rtype: dict
"""
query = _graphql_find_file(query_file)
return self.graphql(query, query_vars=query_vars)
def reconnect(self):
"""Reconnect to the remote server."""
self.lock.acquire()
if self.use_ssl:
if (sys.version_info[0] == 2 and sys.version_info >= (2, 7, 9)) or sys.version_info >= (3, 4, 3):
context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
self.client = advancedhttpserver.http.client.HTTPSConnection(self.host, self.port, context=context)
else:
self.client = advancedhttpserver.http.client.HTTPSConnection(self.host, self.port)
else:
self.client = advancedhttpserver.http.client.HTTPConnection(self.host, self.port)
self.lock.release()
def remote_row_resolve(self, row):
"""
Take a :py:class:`~.RemoteRow` instance and load all fields which are
:py:data:`~.UNRESOLVED`. If all fields are present, no modifications
are made.
:param row: The row who's data is to be resolved.
:rtype: :py:class:`~.RemoteRow`
:return: The row with all of it's fields fully resolved.
:rtype: :py:class:`~.RemoteRow`
"""
utilities.assert_arg_type(row, RemoteRow)
slots = getattr(row, '__slots__')[1:]
if not any(prop for prop in slots if getattr(row, prop) is UNRESOLVED):
return row
for key, value in self.call('db/table/get', getattr(row, '__table__'), row.id).items():
setattr(row, key, value)
return row
def remote_table(self, table, query_filter=None):
"""
Iterate over a remote database table hosted on the server. Rows are
yielded as named tuples whose fields are the columns of the specified
table.
:param str table: The table name to retrieve.
:return: A generator which yields rows of named tuples.
:rtype: tuple
"""
page = 0
results = self.call('db/table/view', table, page, query_filter=query_filter)
if results is None:
return
results_length = len(results['rows'])
row_cls = database_table_objects[table]
while results:
for row in results['rows']:
row = dict(zip(results['columns'], row))
yield row_cls(self, **row)
page += 1
if 'page_size' in results and 'total_rows' in results:
if results['page_size'] * page >= results['total_rows']:
break
if len(results['rows']) < results_length:
break
results = self.call('db/table/view', table, page, query_filter=query_filter)
def remote_table_row(self, table, row_id, cache=False, refresh=False):
"""
Get a row from the specified table by it's id, optionally caching it.
:param str table: The table in which the row exists.
:param row_id: The value of the row's id column.
:param bool cache: Whether to use the cache for this row.
:param bool refresh: If *cache* is True, get the current row value and store it.
:return: The remote row as a named tuple of the specified table.
:rtype: tuple
"""
if cache and refresh:
row = self.cache_call_refresh('db/table/get', table, row_id)
elif cache and not refresh:
row = self.cache_call('db/table/get', table, row_id)
else:
row = self.call('db/table/get', table, row_id)
if row is None:
return None
row_cls = database_table_objects[table]
return row_cls(self, **row)
def remote_table_row_set(self, table, row_id, attributes):
keys, values = zip(*attributes.items())
return self.call('db/table/set', table, row_id, keys, values)
def geoip_lookup(self, ip):
"""
Look up the geographic location information for the specified IP
address in the server's geoip database.
:param ip: The IP address to lookup.
:type ip: :py:class:`ipaddress.IPv4Address`, str
:return: The geographic location information for the specified IP address.
:rtype: :py:class:`~king_phisher.geoip.GeoLocation`
"""
result = self.cache_call('geoip/lookup', str(ip))
if result:
result = geoip.GeoLocation(ip, result=result)
return result
def geoip_lookup_multi(self, ips):
"""
Look up the geographic location information for the specified IP
addresses in the server's geoip database. Because results are cached
for optimal performance, IP addresses to be queried should be grouped
and sorted in a way that is unlikely to change, i.e. by a timestamp.
:param ips: The IP addresses to lookup.
:type ips: list, set, tuple
:return: The geographic location information for the specified IP address.
:rtype: dict
"""
ips = [str(ip) for ip in ips]
results = self.cache_call('geoip/lookup/multi', ips)
for ip, data in results.items():
results[ip] = geoip.GeoLocation(ip, result=data)
return results
def get_tag_model(self, tag_table, model=None):
"""
Load tag information from a remote table into a
:py:class:`Gtk.ListStore` instance. Tables compatible with the tag
interface must have id, name and description fields. If no *model* is
provided a new one will be created, else the current model will be
cleared.
:param str tag_table: The name of the table to load tag information from.
:param model: The model to place the information into.
:type model: :py:class:`Gtk.ListStore`
:return: The model with the loaded data from the server.
:rtype: :py:class:`Gtk.ListStore`
"""
if tag_table not in _tag_tables:
raise ValueError('tag_table is not a valid tag interface exposing table')
tag_table = smoke_zephyr.utilities.parse_case_snake_to_camel(tag_table, upper_first=False)
if model is None:
model = Gtk.ListStore(str, str, str)
# sort by the name column, ascending
model.set_sort_column_id(1, Gtk.SortType.ASCENDING)
else:
model.clear()
graphql_query = 'query getTags { db { ' + tag_table + ' { edges { node { id name description } } } } }'
tags = self.graphql(graphql_query)['db'][tag_table]['edges']
for tag in tags:
tag = tag['node']
model.append((tag['id'], tag['name'], tag['description']))
return model
def login(self, username, password, otp=None):
"""
Authenticate to the remote server. This is required before calling RPC
methods which require an authenticated session.
:param str username: The username to authenticate with.
:param str password: The password to authenticate with.
:param str otp: An optional one time password as a 6 digit string to provide if the account requires it.
:return: The login result and an accompanying reason.
:rtype: tuple
"""
login_result, login_reason, login_session = self.call('login', username, password, otp)
if login_result:
if self.headers is None:
self.headers = {}
self.headers['X-RPC-Auth'] = login_session
return login_result, login_reason
def ping(self):
"""
Call the ping RPC method on the remote server to ensure that it is
responsive. On success this method will always return True, otherwise
an exception will be thrown.
:return: True
:rtype: bool
"""
return self.call('ping')
def shutdown(self):
self._async_queue.put(None)
self._async_queue.join()
self._async_thread.join()
def _magic_graphql(rpc, mode, line):
if mode == 'file':
line = os.path.expandvars(line)
line = os.path.expanduser(line)
if not os.access(line, os.R_OK):
print('GraphQL Exception: invalid query file')
return
with open(line, 'r') as file_h:
query = file_h.read()
elif mode == 'query':
query = line
else:
raise RuntimeError('unsupported magic mode: ' + mode)
try:
result = rpc.graphql(query)
except errors.KingPhisherGraphQLQueryError as error:
print('GraphQL Exception: ' + error.message)
for message in error.errors:
print(message.rstrip())
return
return result
def vte_child_routine(config):
"""
This is the method which is executed within the child process spawned
by VTE. It expects additional values to be set in the *config*
object so it can initialize a new :py:class:`.KingPhisherRPCClient`
instance. It will then drop into an interpreter where the user may directly
interact with the rpc object.
:param str config: A JSON encoded client configuration.
"""
config = serializers.JSON.loads(config)
try:
import readline
import rlcompleter # pylint: disable=unused-variable
except ImportError:
has_readline = False
else:
has_readline = True
try:
import IPython.terminal.embed
except ImportError:
has_ipython = False
else:
has_ipython = True
for plugins_directory in ('rpc_plugins', 'rpc-plugins'):
plugins_directory = find.data_directory(plugins_directory)
if not plugins_directory:
continue
sys.path.append(plugins_directory)
headers = config['rpc_data'].pop('headers')
rpc = KingPhisherRPCClient(**config['rpc_data'])
if rpc.headers is None:
rpc.headers = {}
for name, value in headers.items():
rpc.headers[str(name)] = str(value)
user_data_path = config['user_data_path']
sys.path.append(config['user_library_path'])
print("Python {0} on {1}".format(sys.version, sys.platform)) # pylint: disable=superfluous-parens
print("Campaign Name: '{0}' ID: {1}".format(config['campaign_name'], config['campaign_id'])) # pylint: disable=superfluous-parens
print('The \'rpc\' object holds the connected KingPhisherRPCClient instance')
console_vars = {
'CAMPAIGN_NAME': config['campaign_name'],
'CAMPAIGN_ID': config['campaign_id'],
'os': os,
'rpc': rpc,
'sys': sys
}
if has_ipython:
console = IPython.terminal.embed.InteractiveShellEmbed(ipython_dir=os.path.join(user_data_path, 'ipython'))
console.register_magic_function(functools.partial(_magic_graphql, rpc, 'query'), 'line', 'graphql')
console.register_magic_function(functools.partial(_magic_graphql, rpc, 'file'), 'line', 'graphql_file')
console.mainloop(console_vars)
else:
if has_readline:
readline.parse_and_bind('tab: complete')
console = code.InteractiveConsole(console_vars)
for var in tuple(console_vars.keys()):
console.push("__builtins__['{0}'] = {0}".format(var))
console.interact('')
return
|
securestate/king-phisher
|
king_phisher/client/client_rpc.py
|
Python
|
bsd-3-clause
| 24,921
|
[
"VisIt"
] |
41742e828a64655a879610b5d744472cd750075df2b071d30fb2da4808fb5189
|
# electronics.py ---
#
# Filename: electronics.py
# Description:
# Author: Subhasis Ray
# Maintainer: Dilawar Singh
import numpy as np
import moose
class ClampCircuit(object):
"""Container for a Voltage-Clamp/Current clamp circuit."""
defaults = {
'level1': 25.0,
'width1': 50.0,
'delay1': 2.0,
'delay2': 1e6,
'trigMode': 0,
'delay3': 1e9
}
def __init__(self, path, squid):
self.path = path
moose.Neutral(path)
self.pulsegen = moose.PulseGen(path+"/pulse") # holding voltage/current generator
self.pulsegen.count = 2
self.pulsegen.firstLevel = 25.0
self.pulsegen.firstWidth = 50.0
self.pulsegen.firstDelay = 2.0
self.pulsegen.secondDelay = 0.0
self.pulsegen.trigMode = 2
self.gate = moose.PulseGen(path + "/gate") # holding voltage/current generator
self.gate.level[0] = 1.0
self.gate.delay[0] = 0.0
self.gate.width[0] = 1e9
moose.connect(self.gate, "output", self.pulsegen, "input")
self.lowpass = moose.RC(path + "/lowpass") # lowpass filter
self.lowpass.R = 1.0
self.lowpass.C = 0.03
self.vclamp = moose.DiffAmp(path + "/vclamp")
self.vclamp.gain = 0.0
self.vclamp.saturation = 1e10
self.iclamp = moose.DiffAmp(path + "/iclamp")
self.iclamp.gain = 0.0
self.iclamp.saturation = 1e10
self.pid = moose.PIDController(path + "/pid")
self.pid.gain = 0.5
self.pid.tauI = 0.02
self.pid.tauD = 0.005
self.pid.saturation = 1e10
# Connect current clamp circuitry
moose.connect(self.pulsegen, "output", self.iclamp, "plusIn")
moose.connect(self.iclamp, "output", squid.C, "injectMsg")
# Connect voltage clamp circuitry
moose.connect(self.pulsegen, "output", self.lowpass, "injectIn")
moose.connect(self.lowpass, "output", self.vclamp, "plusIn")
moose.connect(self.vclamp, "output", self.pid, "commandIn")
moose.connect(squid.C, "VmOut", self.pid, "sensedIn")
moose.connect(self.pid, "output", squid.C, "injectMsg")
current_table = moose.Table("/data/Im")
moose.connect(current_table, "requestOut", squid.C, "getIm")
def configure_pulses(
self,
baseLevel=0.0,
firstLevel=0.1,
firstDelay=5.0,
firstWidth=40.0,
secondLevel=0.0,
secondDelay=1e6,
secondWidth=0.0,
singlePulse=True,
):
"""Set up the pulse generator."""
self.pulsegen.baseLevel = baseLevel
self.pulsegen.firstLevel = firstLevel
self.pulsegen.firstWidth = firstWidth
self.pulsegen.firstDelay = firstDelay
self.pulsegen.secondLevel = secondLevel
self.pulsegen.secondDelay = secondDelay
self.pulsegen.secondWidth = secondWidth
if singlePulse:
self.pulsegen.trigMode = 1
else:
self.pulsegen.trigMode = 0
def do_voltage_clamp(self):
"""Switch to voltage clamp circuitry. After this the simdt may
need to be changed for correct performance."""
self.vclamp.gain = 1.0
self.iclamp.gain = 0.0
self.pid.gain = 0.5
self.pid.tauD = 0.005
self.pid.tauI = 0.02
def do_current_clamp(self):
"""Switch to current clamp circuitry. After this the simdt may
need to be changed for correct performance."""
self.iclamp.gain = 1.0
self.vclamp.gain = 0.0
self.pid.gain = 0.0
|
BhallaLab/moose-examples
|
squid/electronics.py
|
Python
|
gpl-2.0
| 3,586
|
[
"MOOSE"
] |
585d9b83d5a7faee10ee1619bfc57d541d6baf9a6de101d11ce80c459a8c7858
|
'''-------------------------------------------------------------------------------
Tool Name: CreateInflowFileFromECMWFRunoff
Source Name: CreateInflowFileFromECMWFRunoff.py
Version: ArcGIS 10.3
Author: Environmental Systems Research Institute Inc.
Updated by: Environmental Systems Research Institute Inc.
Description: Creates RAPID inflow file based on the WRF_Hydro land model output
and the weight table previously created.
History: Initial coding - 10/21/2014, version 1.0
Updated: Version 1.0, 10/23/2014, modified names of tool and parameters
Version 1.0, 10/28/2014, added data validation
Version 1.0, 10/30/2014, initial version completed
Version 1.1, 11/05/2014, modified the algorithm for extracting runoff
variable from the netcdf dataset to improve computation efficiency
Version 1.2, 02/03/2015, bug fixing - output netcdf3-classic instead
of netcdf4 as the format of RAPID inflow file
Version 1.2, 02/03/2015, bug fixing - calculate inflow assuming that
ECMWF runoff data is cumulative instead of incremental through time
-------------------------------------------------------------------------------'''
import os
import netCDF4 as NET
import numpy as NUM
import csv
class CreateInflowFileFromECMWFRunoff(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create Inflow File From ECMWF Runoff"
self.description = ("Creates RAPID NetCDF input of water inflow " +
"based on ECMWF runoff results and previously created weight table.")
self.canRunInBackground = False
self.header_wt = ['StreamID', 'area_sqm', 'lon_index', 'lat_index', 'npoints', 'weight', 'Lon', 'Lat']
self.dims_oi = ['lon', 'lat', 'time']
self.vars_oi = ["lon", "lat", "time", "RO"]
self.length_time = {"LowRes": 61, "HighRes": 125}
self.length_time_opt = {"LowRes": 61, "HighRes-1hr": 91, "HighRes-3hr": 49, "HighRes-6hr": 41}
self.errorMessages = ["Missing Variable 'time'",
"Incorrect dimensions in the input ECMWF runoff file.",
"Incorrect variables in the input ECMWF runoff file.",
"Incorrect time variable in the input ECMWF runoff file",
"Incorrect number of columns in the weight table",
"No or incorrect header in the weight table",
"Incorrect sequence of rows in the weight table"]
def dataValidation(self, in_nc):
"""Check the necessary dimensions and variables in the input netcdf data"""
data_nc = NET.Dataset(in_nc)
dims = data_nc.dimensions.keys()
if dims != self.dims_oi:
raise Exception(self.errorMessages[1])
vars = data_nc.variables.keys()
if vars != self.vars_oi:
raise Exception(self.errorMessages[2])
return
def dataIdentify(self, in_nc):
"""Check if the data is Ensemble 1-51 (low resolution) or 52 (high resolution)"""
data_nc = NET.Dataset(in_nc)
name_time = self.vars_oi[2]
time = data_nc.variables[name_time][:]
diff = NUM.unique(NUM.diff(time))
data_nc.close()
time_interval_highres = NUM.array([1.0,3.0,6.0],dtype=float)
time_interval_lowres = NUM.array([6.0],dtype=float)
if (diff == time_interval_highres).all():
return "HighRes"
elif (diff == time_interval_lowres).all():
return "LowRes"
else:
return None
def execute(self, in_nc, in_weight_table, out_nc, in_time_interval="6hr"):
"""The source code of the tool."""
# Validate the netcdf dataset
self.dataValidation(in_nc)
# identify if the input netcdf data is the High Resolution data with three different time intervals
id_data = self.dataIdentify(in_nc)
if id_data is None:
raise Exception(self.errorMessages[3])
''' Read the netcdf dataset'''
data_in_nc = NET.Dataset(in_nc)
time = data_in_nc.variables[self.vars_oi[2]][:]
# Check the size of time variable in the netcdf data
if len(time) != self.length_time[id_data]:
raise Exception(self.errorMessages[3])
''' Read the weight table '''
print "Reading the weight table..."
dict_list = {self.header_wt[0]:[], self.header_wt[1]:[], self.header_wt[2]:[],
self.header_wt[3]:[], self.header_wt[4]:[], self.header_wt[5]:[],
self.header_wt[6]:[], self.header_wt[7]:[]}
streamID = ""
with open(in_weight_table, "rb") as csvfile:
reader = csv.reader(csvfile)
count = 0
for row in reader:
if count == 0:
#check number of columns in the weight table
if len(row) != len(self.header_wt):
raise Exception(self.errorMessages[4])
#check header
if row[1:len(self.header_wt)] != self.header_wt[1:len(self.header_wt)]:
raise Exception(self.errorMessages[5])
streamID = row[0]
count += 1
else:
for i in range(0,8):
dict_list[self.header_wt[i]].append(row[i])
count += 1
'''Calculate water inflows'''
print "Calculating water inflows..."
# Obtain size information
if id_data == "LowRes":
size_time = self.length_time_opt["LowRes"]
else:
if in_time_interval == "1hr":
size_time = self.length_time_opt["HighRes-1hr"]
elif in_time_interval == "3hr":
size_time = self.length_time_opt["HighRes-3hr"]
else:
size_time = self.length_time_opt["HighRes-6hr"]
size_streamID = len(set(dict_list[self.header_wt[0]]))
# Create output inflow netcdf data
# data_out_nc = NET.Dataset(out_nc, "w") # by default format = "NETCDF4"
data_out_nc = NET.Dataset(out_nc, "w", format = "NETCDF3_CLASSIC")
dim_Time = data_out_nc.createDimension('Time', size_time)
dim_RiverID = data_out_nc.createDimension(streamID, size_streamID)
var_m3_riv = data_out_nc.createVariable('m3_riv', 'f4', ('Time', streamID))
data_temp = NUM.empty(shape = [size_time, size_streamID])
lon_ind_all = [long(i) for i in dict_list[self.header_wt[2]]]
lat_ind_all = [long(j) for j in dict_list[self.header_wt[3]]]
# Obtain a subset of runoff data based on the indices in the weight table
min_lon_ind_all = min(lon_ind_all)
max_lon_ind_all = max(lon_ind_all)
min_lat_ind_all = min(lat_ind_all)
max_lat_ind_all = max(lat_ind_all)
data_subset_all = data_in_nc.variables[self.vars_oi[3]][:, min_lat_ind_all:max_lat_ind_all+1, min_lon_ind_all:max_lon_ind_all+1]
len_time_subset_all = data_subset_all.shape[0]
len_lat_subset_all = data_subset_all.shape[1]
len_lon_subset_all = data_subset_all.shape[2]
data_subset_all = data_subset_all.reshape(len_time_subset_all, (len_lat_subset_all * len_lon_subset_all))
# compute new indices based on the data_subset_all
index_new = []
for r in range(0,count-1):
ind_lat_orig = lat_ind_all[r]
ind_lon_orig = lon_ind_all[r]
index_new.append((ind_lat_orig - min_lat_ind_all)*len_lon_subset_all + (ind_lon_orig - min_lon_ind_all))
# obtain a new subset of data
data_subset_new = data_subset_all[:,index_new]
# start compute inflow
pointer = 0
for s in range(0, size_streamID):
npoints = int(dict_list[self.header_wt[4]][pointer])
# Check if all npoints points correspond to the same streamID
if len(set(dict_list[self.header_wt[0]][pointer : (pointer + npoints)])) != 1:
print "ROW INDEX", pointer
print "COMID", dict_list[self.header_wt[0]][pointer]
raise Exception(self.errorMessages[2])
area_sqm_npoints = [float(k) for k in dict_list[self.header_wt[1]][pointer : (pointer + npoints)]]
area_sqm_npoints = NUM.array(area_sqm_npoints)
area_sqm_npoints = area_sqm_npoints.reshape(1, npoints)
data_goal = data_subset_new[:, pointer:(pointer + npoints)]
''''IMPORTANT NOTE: runoff variable in ECMWF dataset is cumulative instead of incremental through time'''
# For data with Low Resolution, there's only one time interval 6 hrs
if id_data == "LowRes":
#ro_stream = data_goal * area_sqm_npoints
ro_stream = NUM.concatenate([data_goal[0:1,],
NUM.subtract(data_goal[1:,],data_goal[:-1,])]) * area_sqm_npoints
#For data with High Resolution, from Hour 0 to 90 (the first 91 time points) are of 1 hr time interval,
# then from Hour 90 to 144 (19 time points) are of 3 hour time interval, and from Hour 144 to 240 (15 time points)
# are of 6 hour time interval
else:
if in_time_interval == "1hr":
ro_stream = NUM.concatenate([data_goal[0:1,],
NUM.subtract(data_goal[1:91,],data_goal[:90,])]) * area_sqm_npoints
elif in_time_interval == "3hr":
# Hour = 0 is a single data point
ro_3hr_a = data_goal[0:1,]
# calculate time series of 3 hr data from 1 hr data
ro_3hr_b = NUM.subtract(data_goal[3:91:3,],data_goal[:88:3,])
# get the time series of 3 hr data
ro_3hr_c = NUM.subtract(data_goal[91:109,], data_goal[90:108,])
# concatenate all time series
ro_stream = NUM.concatenate([ro_3hr_a, ro_3hr_b, ro_3hr_c]) * area_sqm_npoints
else: # in_time_interval == "6hr"
#arcpy.AddMessage("6hr")
# Hour = 0 is a single data point
ro_6hr_a = data_goal[0:1,]
# calculate time series of 6 hr data from 1 hr data
ro_6hr_b = NUM.subtract(data_goal[6:91:6,], data_goal[:85:6,])
# calculate time series of 6 hr data from 3 hr data
ro_6hr_c = NUM.subtract(data_goal[92:109:2,], data_goal[90:107:2,])
# get the time series of 6 hr data
ro_6hr_d = NUM.subtract(data_goal[109:,], data_goal[108:124,])
# concatenate all time series
ro_stream = NUM.concatenate([ro_6hr_a, ro_6hr_b, ro_6hr_c, ro_6hr_d]) * area_sqm_npoints
data_temp[:,s] = ro_stream.sum(axis = 1)
pointer += npoints
'''Write inflow data'''
print "Writing inflow data..."
var_m3_riv[:] = data_temp
# close the input and output netcdf datasets
data_in_nc.close()
data_out_nc.close()
return
|
CI-WATER/erfp_data_process_ubuntu_aws
|
CreateInflowFileFromECMWFRunoff.py
|
Python
|
mit
| 11,619
|
[
"NetCDF"
] |
88fe13085be57276bc40cd97349ed41642501b727e3eccb1cc1106d818ae8130
|
# DEPTH-FIRST-SEARCH
# A graph is searched depth first by visiting an initial
# vertex and then visiting one of its neighbours. After
# visiting this neighbour you visit one of its neighbours
# and so on. If you come to a vertex with no neighbors you
# backtrack one stage and visit another neighbour of the
# previous vertex.
# The algorithm
# ------------------------------------------------------
# set the colour of v to black
# ITERATE over all w that are neighbours of v
# IF the colour of w is white
# depth first search of G from w
# ------------------------------------------------------
def dfs(v, g):
g[v]['colour'] = 'black'
for w in g[v]['neighbours']:
if g[w]['colour'] is 'white':
dfs(w, g)
# An algorithm traversing the vertices of a graph needs
# some way in which to detect when a vertex has already
# been visited, or it might continue endlessly revisiting
# vertices and never terminate. The traditional way to
# record when a vertex has been visited is to give each
# vertex a colour, white or black. Unvisited vertices are
# coloured white. When a vertex is visited, its colour is
# changed to black and black vertices are not revisited.
def chatty_dfs(vertex, graph):
v = vertex
g = graph
print( "Visited: ", v)
g[v]['colour'] = 'black'
for w in g[v]['neighbours']:
if g[w]['colour'] is 'white':
g = chatty_dfs(w, g)
return g
graph1 = {
1 : { 'colour' : 'white', 'neighbours' : [2, 3, 4] },
2 : { 'colour' : 'white', 'neighbours' : [1, 4, 5] },
3 : { 'colour' : 'white', 'neighbours' : [1, 4] },
4 : { 'colour' : 'white', 'neighbours' : [1, 2, 3] },
5 : { 'colour' : 'white', 'neighbours' : [2]}
}
print( graph1 )
print( chatty_dfs(3, graph1) )
|
melayev/algods
|
dfs.py
|
Python
|
mit
| 1,714
|
[
"VisIt"
] |
c482ea83aa7c25cf86e5f87a0b563f2d08bc0814c00a9376a91478c27b33743f
|
"""Module for reading, writing, compressing and converting files.
Please note, some of the functions in this module were created and tested using
VTK 5. VTK 6 introduced a number of backwards-incompatible changes, including
replacing 'SetInput()' with 'SetInputData()' and 'SetInputConnection'.
"""
import glob
import os
import csv
import vtk
import gzip
import StringIO
def compress(path='test.vtp'):
"""Compress file with gzip."""
with open(path, 'rb') as ifile:
with gzip.open(path + '.gz', 'wb') as ofile:
ofile.writelines(ifile)
def decompress(path='test.vtp.gz'):
"""Decompress file with gzip."""
with gzip.open(path, 'rb') as ifile:
with open(path[:-3], 'wb') as ofile:
ofile.write(ifile.read())
def csv_to_list(path):
"""Convert CSV-file to a nested list of strings."""
with open(path, 'rb') as f:
reader = csv.reader(f)
return list(reader)
def csv_to_dict(path):
"""Create nested dictionary from csv file. Workaround for when pandas is
unavailable and you want to select 2D array elements with row and column
names rather than integers.
* First row is used for column names
* First column is used for row names.
* Access data from dictionary x using x['rowname']['columnname']
* Extract all row names with x.keys()
* Extract all column names with x.values()[0].keys()
Note: Expects '\n' as newline character.
"""
x = {}
with open(path, 'rb') as f:
header = f.next().strip().split(',')[1:]
for line in f:
row = line.strip().split(',')
x[row[0]] = dict(
(header[i], v) for i, v in enumerate(row[1:]))
return x
def listdir(path, match='*', dirname=False, extension=False):
"""List all files and folders in specified directory.
Args:
path: Path to directory.
match: Specify file name pattern according to rules used by Unix
shell. For instance, 'match=*.pdf' gives you a list of names of all
the pdf-files in 'path'.
dirname (bool): Include whole path name.
extension (bool): Include file extension.
"""
items = glob.glob(os.path.join(path, match))
if not dirname:
items = [os.path.basename(item) for item in items]
if not extension:
items = [os.path.splitext(item)[0] for item in items]
return items
def readvti(path):
"""Read VTI-file, i.e. image in VTK XML format."""
reader = vtk.vtkXMLImageDataReader()
reader.SetFileName(path)
reader.Update()
return reader.GetOutput()
def readvtk(path, datatype='polydata'):
"""Read VTK-file.
Args:
path: Path to file.
type: 'imagedata', 'polydata', 'unstructeredgrid'
"""
if datatype=='imagedata':
reader = vtk.vtkStructuredPointsReader()
elif datatype=='polydata':
reader = vtk.vtkPolyDataReader()
elif datatype=='unstructeredgrid':
reader = vtk.vtkUnstructuredGridReader()
else:
print 'Invalid datatype'
reader.SetFileName(path)
reader.Update()
return reader.GetOutput()
def readvtp(path, dataarrays=True):
"""Read VTP-file, i.e. polydata in VTK XML format.
Args:
dataarrays (bool): Include point and cell data.
"""
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(path)
reader.Update()
if dataarrays == False:
for i in range(reader.GetNumberOfPointArrays()):
arrayname = reader.GetPointArrayName(i)
reader.SetPointArrayStatus(arrayname, 0)
for i in range(reader.GetNumberOfCellArrays()):
arrayname = reader.GetCellArrayName(i)
reader.SetPointArrayStatus(arrayname, 0)
reader.Update()
return reader.GetOutput()
def readvtu(path):
"""Read VTU-file, i.e. unstructured grid in VTK XML format."""
reader = vtk.vtkXMLUnstructuredGridReader()
reader.SetFileName(path)
reader.Update()
return reader.GetOutput()
def replacestring(lines, tag, value):
"""Replace string in list of strings.
Args:
lines: List of strings.
tag: String to replace.
value: String with which to replace 'tag'.
"""
output = []
for line in lines:
line = line.replace(tag, value)
output.append(line)
return output
def writepoints(points, filename):
"""Write points as VTP-file."""
polydata = vtk.vtkPolyData()
cellarray = vtk.vtkCellArray()
for i in range(points.GetNumberOfPoints()):
cellarray.InsertNextCell(1)
cellarray.InsertCellPoint(i)
polydata.SetPoints(points)
polydata.SetVerts(cellarray)
writer = vtk.vtkXMLPolyDataWriter()
writer.SetFileName(filename)
writer.SetInput(polydata)
writer.Write()
def writevti(image, path):
"""Write VTI-files, i.e. images in VTK XML format."""
writer = vtk.vtkXMLImageDataWriter()
writer.SetInput(image)
writer.SetFileName(path)
writer.Write()
def writevtp(polydata, path):
"""Write VTP-files, i.e. polydata in VTK XML format."""
writer = vtk.vtkXMLPolyDataWriter()
writer.SetInput(polydata)
writer.SetFileName(path)
writer.Write()
def writevtu(grid, path):
"""Write VTU-files, i.e. unstructured grids in VTK XML format."""
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetInput(grid)
writer.SetFileName(path)
writer.Write()
#-------------------------------------------------------------------------------
# CFX
#-------------------------------------------------------------------------------
def cfx2vtp(inputfile, outputfile, surface=True, ascii=False):
"""Convert polydata exported from CFX-Post to VTP.
Args:
surface (bool): Convert surface or line polydata.
ascii (bool): Return VTP file in ASCII format.
Export surface in CFX-Post with following options:
* file extension: csv
* export geometry information: line and face connectivity
* (optional) select variable(s)
* vector display: scalar
* separator: comma space
* include header
"""
f = open(inputfile, 'rb')
# derive data size from csv file
if surface:
for i, line in enumerate(f):
if line.strip() == '[Data]':
datalinenumber = i
if line.strip() == '[Faces]':
faceslinenumber = i
lastlinenumber = i
numberofnodes = faceslinenumber - datalinenumber - 3
numberofelements = lastlinenumber - faceslinenumber - 1
else:
for i, line in enumerate(f):
if line.strip() == '[Data]':
datalinenumber = i
if line.strip() == '[Lines]':
lineslinenumber = i
numberofnodes = lineslinenumber - datalinenumber - 3
# obtain list of variables names
f.seek(0)
for i in range(datalinenumber + 2):
arrayline = f.readline()
arraynames = arrayline.strip().split(', ')
arraynames[0:3] = []
# define polydata
points = vtk.vtkPoints()
cells = vtk.vtkCellArray()
points.SetNumberOfPoints(numberofnodes)
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetPolys(cells) if surface else polydata.SetLines(cells)
for arrayname in arraynames:
array = vtk.vtkDoubleArray()
array.SetName(arrayname)
array.SetNumberOfTuples(numberofnodes)
polydata.GetPointData().AddArray(array)
# parse through the rest of the file using the csv module
reader = csv.reader(f)
# assign x,y,z coordinates and variable values to points
for i in range(numberofnodes):
dataline = reader.next()
point = [float(dataline[0]), float(dataline[1]), float(dataline[2])]
points.SetPoint(i, point)
for j in range(len(arraynames)):
dataarray = polydata.GetPointData().GetArray(arraynames[j])
dataarray.SetComponent(i, 0, float(dataline[j + 3]))
# skip element '[Faces]' (or '[Lines]') in csv-file
reader.next()
reader.next()
if surface:
# obtain and set connectivity
cellids = vtk.vtkIdList()
for i in range(numberofelements):
facesline = reader.next()
cellids.Initialize()
for j in range(len(facesline)):
cellids.InsertNextId(int(facesline[j]))
cells.InsertNextCell(cellids)
else:
# obtain connectivity
connectivitylist = []
for row in reader:
row = [int(item) for item in row]
connectivitylist.append(row)
connectivitylist = filter(None, connectivitylist)
# rearrange connectivity
linecounter = 0
for i in range(len(connectivitylist)):
if i == 0:
connectivity = [connectivitylist[i]]
elif connectivitylist[i][0] == connectivitylist[i - 1][1]:
connectivity[linecounter].append(connectivitylist[i][1])
else:
connectivity.append([])
linecounter += 1
connectivity[linecounter].append(connectivitylist[i][0])
connectivity[linecounter].append(connectivitylist[i][1])
# set connectivity
cellids = vtk.vtkIdList()
for i in range(len(connectivity)):
cellids.Initialize()
for j in range(len(connectivity[i])):
cellids.InsertNextId(int(connectivity[i][j]))
cells.InsertNextCell(cellids)
f.close()
# write vtk polydata
writer = vtk.vtkXMLPolyDataWriter()
writer.SetInput(polydata)
if ascii: writer.SetDataModeToAscii()
writer.SetFileName(outputfile)
writer.Write()
def vtp2cfx(inputfile, outputfile, surface=True):
"""Convert VTP polydata to format that can be imported into CFX-Post.
Args:
surface (bool): Convert surface or line polydata.
"""
# read vtp file
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(inputfile)
reader.Update()
polydata = reader.GetOutput()
# read names of data arrays
arraynames = []
dataarrays = polydata.GetPointData()
numberofdataarrays = dataarrays.GetNumberOfArrays()
for i in range(numberofdataarrays):
array = dataarrays.GetArray(i)
arrayname = array.GetName()
arraynames.append(arrayname)
# append names of data arrays to header and write header
f = open(outputfile, 'wb')
header = "\n[Name]\nSEGMENT\n\n[Data]\nX [ m ], Y [ m ], Z [ m ]"
for i in range(numberofdataarrays):
header += ", " + arraynames[i]
header += "\n"
f.write(header)
# write values of x,y,z and data arrays row by row
for i in range(polydata.GetNumberOfPoints()):
point = polydata.GetPoint(i)
line = str(point[0]) + ', ' + str(point[1]) + ', ' + str(point[2])
for arrayname in arraynames:
array = dataarrays.GetArray(arrayname)
line += ', ' + str(array.GetComponent(i, 0))
line += '\n'
f.write(line)
# write list of connectivity
if surface:
line = '\n[Faces]\n'
f.write(line)
for i in range(polydata.GetNumberOfCells()):
cellpointids = polydata.GetCell(i).GetPointIds()
line = ''
for j in range(cellpointids.GetNumberOfIds()):
if (j > 0):
line += ', '
line += str(cellpointids.GetId(j))
line += '\n'
f.write(line)
else:
line = '\n[Lines]\n'
f.write(line)
for i in range(polydata.GetNumberOfCells()):
cellpointids = polydata.GetCell(i).GetPointIds()
line = ''
for j in range(cellpointids.GetNumberOfIds() - 1):
line += (str(cellpointids.GetId(j)) + ', ' +
str(cellpointids.GetId(j + 1)) + '\n')
f.write(line)
# add blank line to mimic exact same file structure as CFX-generated
# csv-file
line = '\n'
f.write(line)
f.close()
|
ajgeers/utils
|
utils/iolib.py
|
Python
|
bsd-2-clause
| 12,457
|
[
"VTK"
] |
f14f80c2c947198e53f32800d8fabbb92bea1cbc0693bc2450c45be41462f8ec
|
try:
import hashlib as md5
except:
import md5
from DIRAC import S_OK, S_ERROR, gConfig
from DIRAC.ConfigurationSystem.Client.PathFinder import getServiceSection
from DIRAC.AccountingSystem.private.Plotters import gPlottersList
from DIRAC.AccountingSystem.private.Policies import gPoliciesList
class MainReporter:
def __init__( self, db, setup ):
self._db = db
self.setup = setup
self.csSection = getServiceSection( "Accounting/ReportGenerator", setup = setup )
def __calculateReportHash( self, reportRequest ):
requestToHash = dict( reportRequest )
granularity = gConfig.getValue( "%s/CacheTimeGranularity" % self.csSection, 300 )
for key in ( 'startTime', 'endTime' ):
epoch = requestToHash[ key ]
requestToHash[ key ] = epoch - epoch % granularity
md5Hash = md5.md5()
md5Hash.update( repr( requestToHash ) )
md5Hash.update( self.setup )
return md5Hash.hexdigest()
def generate( self, reportRequest, credDict ):
typeName = reportRequest[ 'typeName' ]
plotterClass = gPlottersList.getPlotterClass( typeName )
if not plotterClass:
return S_ERROR( "There's no reporter registered for type %s" % typeName )
if typeName in gPoliciesList:
retVal = gPoliciesList[ typeName ].checkRequest( reportRequest[ 'reportName' ],
credDict,
reportRequest[ 'condDict' ],
reportRequest[ 'grouping' ] )
if not retVal[ 'OK' ]:
return retVal
reportRequest[ 'hash' ] = self.__calculateReportHash( reportRequest )
plotter = plotterClass( self._db, self.setup, reportRequest[ 'extraArgs' ] )
return plotter.generate( reportRequest )
def list( self, typeName ):
plotterClass = gPlottersList.getPlotterClass( typeName )
if not plotterClass:
return S_ERROR( "There's no plotter registered for type %s" % typeName )
plotter = plotterClass( self._db, self.setup )
return S_OK( plotter.plotsList() )
|
sposs/DIRAC
|
AccountingSystem/private/MainReporter.py
|
Python
|
gpl-3.0
| 2,070
|
[
"DIRAC"
] |
505b6b1fa46df3aefa8eb7a19d7302124bf6ac32142463fdf5b24dc9f5ff5f19
|
##########################################################################
#
# Copyright 2008-2010 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""GL tracing generator."""
import re
import sys
from trace import Tracer
from dispatch import function_pointer_type, function_pointer_value
import specs.stdapi as stdapi
import specs.glapi as glapi
import specs.glparams as glparams
from specs.glxapi import glxapi
class TypeGetter(stdapi.Visitor):
'''Determine which glGet*v function that matches the specified type.'''
def __init__(self, prefix = 'glGet', long_suffix = True, ext_suffix = ''):
self.prefix = prefix
self.long_suffix = long_suffix
self.ext_suffix = ext_suffix
def visitConst(self, const):
return self.visit(const.type)
def visitAlias(self, alias):
if alias.expr == 'GLboolean':
if self.long_suffix:
suffix = 'Booleanv'
arg_type = alias.expr
else:
suffix = 'iv'
arg_type = 'GLint'
elif alias.expr == 'GLdouble':
if self.long_suffix:
suffix = 'Doublev'
arg_type = alias.expr
else:
suffix = 'dv'
arg_type = alias.expr
elif alias.expr == 'GLfloat':
if self.long_suffix:
suffix = 'Floatv'
arg_type = alias.expr
else:
suffix = 'fv'
arg_type = alias.expr
elif alias.expr in ('GLint', 'GLuint', 'GLsizei'):
if self.long_suffix:
suffix = 'Integerv'
arg_type = 'GLint'
else:
suffix = 'iv'
arg_type = 'GLint'
else:
print alias.expr
assert False
function_name = self.prefix + suffix + self.ext_suffix
return function_name, arg_type
def visitEnum(self, enum):
return self.visit(glapi.GLint)
def visitBitmask(self, bitmask):
return self.visit(glapi.GLint)
def visitOpaque(self, pointer):
return self.prefix + 'Pointerv' + self.ext_suffix, 'GLvoid *'
class GlTracer(Tracer):
arrays = [
("Vertex", "VERTEX"),
("Normal", "NORMAL"),
("Color", "COLOR"),
("Index", "INDEX"),
("TexCoord", "TEXTURE_COORD"),
("EdgeFlag", "EDGE_FLAG"),
("FogCoord", "FOG_COORD"),
("SecondaryColor", "SECONDARY_COLOR"),
]
arrays.reverse()
# arrays available in ES1
arrays_es1 = ("Vertex", "Normal", "Color", "TexCoord")
def header(self, api):
Tracer.header(self, api)
print '#include <algorithm>'
print
print '#include "gltrace.hpp"'
print
# Which glVertexAttrib* variant to use
print 'enum vertex_attrib {'
print ' VERTEX_ATTRIB,'
print ' VERTEX_ATTRIB_NV,'
print '};'
print
print 'static vertex_attrib _get_vertex_attrib(void) {'
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' if (ctx->user_arrays_nv) {'
print ' GLboolean _vertex_program = GL_FALSE;'
print ' _glGetBooleanv(GL_VERTEX_PROGRAM_ARB, &_vertex_program);'
print ' if (_vertex_program) {'
print ' if (ctx->user_arrays_nv) {'
print ' GLint _vertex_program_binding_nv = _glGetInteger(GL_VERTEX_PROGRAM_BINDING_NV);'
print ' if (_vertex_program_binding_nv) {'
print ' return VERTEX_ATTRIB_NV;'
print ' }'
print ' }'
print ' }'
print ' }'
print ' return VERTEX_ATTRIB;'
print '}'
print
self.defineShadowBufferHelper()
# Whether we need user arrays
print 'static inline bool _need_user_arrays(void)'
print '{'
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' if (!ctx->user_arrays) {'
print ' return false;'
print ' }'
print
print ' glprofile::Profile profile = ctx->profile;'
print ' bool es1 = profile.es() && profile.major == 1;'
print
for camelcase_name, uppercase_name in self.arrays:
# in which profile is the array available?
profile_check = 'profile.desktop()'
if camelcase_name in self.arrays_es1:
profile_check = '(' + profile_check + ' || es1)';
function_name = 'gl%sPointer' % camelcase_name
enable_name = 'GL_%s_ARRAY' % uppercase_name
binding_name = 'GL_%s_ARRAY_BUFFER_BINDING' % uppercase_name
print ' // %s' % function_name
print ' if (%s) {' % profile_check
self.array_prolog(api, uppercase_name)
print ' if (_glIsEnabled(%s) &&' % enable_name
print ' _glGetInteger(%s) == 0) {' % binding_name
self.array_cleanup(api, uppercase_name)
print ' return true;'
print ' }'
self.array_epilog(api, uppercase_name)
print ' }'
print
print ' // ES1 does not support generic vertex attributes'
print ' if (es1)'
print ' return false;'
print
print ' vertex_attrib _vertex_attrib = _get_vertex_attrib();'
print
print ' // glVertexAttribPointer'
print ' if (_vertex_attrib == VERTEX_ATTRIB) {'
print ' GLint _max_vertex_attribs = _glGetInteger(GL_MAX_VERTEX_ATTRIBS);'
print ' for (GLint index = 0; index < _max_vertex_attribs; ++index) {'
print ' if (_glGetVertexAttribi(index, GL_VERTEX_ATTRIB_ARRAY_ENABLED) &&'
print ' _glGetVertexAttribi(index, GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING) == 0) {'
print ' return true;'
print ' }'
print ' }'
print ' }'
print
print ' // glVertexAttribPointerNV'
print ' if (_vertex_attrib == VERTEX_ATTRIB_NV) {'
print ' for (GLint index = 0; index < 16; ++index) {'
print ' if (_glIsEnabled(GL_VERTEX_ATTRIB_ARRAY0_NV + index)) {'
print ' return true;'
print ' }'
print ' }'
print ' }'
print
print ' return false;'
print '}'
print
print r'static void _trace_user_arrays(GLuint count);'
print
print r'static void _fakeStringMarker(GLsizei len, const GLvoid * string);'
print
print r'static inline void'
print r'_fakeStringMarker(const std::string &s) {'
print r' _fakeStringMarker(s.length(), s.data());'
print r'}'
print
print '// whether glLockArraysEXT() has ever been called'
print 'static bool _checkLockArraysEXT = false;'
print
# Buffer mappings
print '// whether glMapBufferRange(GL_MAP_WRITE_BIT) has ever been called'
print 'static bool _checkBufferMapRange = false;'
print
print '// whether glBufferParameteriAPPLE(GL_BUFFER_FLUSHING_UNMAP_APPLE, GL_FALSE) has ever been called'
print 'static bool _checkBufferFlushingUnmapAPPLE = false;'
print
# Generate a helper function to determine whether a parameter name
# refers to a symbolic value or not
print 'static bool'
print 'is_symbolic_pname(GLenum pname) {'
print ' switch (pname) {'
for function, type, count, name in glparams.parameters:
if type is glapi.GLenum:
print ' case %s:' % name
print ' return true;'
print ' default:'
print ' return false;'
print ' }'
print '}'
print
# Generate a helper function to determine whether a parameter value is
# potentially symbolic or not; i.e., if the value can be represented in
# an enum or not
print 'template<class T>'
print 'static inline bool'
print 'is_symbolic_param(T param) {'
print ' return static_cast<T>(static_cast<GLenum>(param)) == param;'
print '}'
print
# Generate a helper function to know how many elements a parameter has
print 'static size_t'
print '_gl_param_size(GLenum pname) {'
print ' switch (pname) {'
for function, type, count, name in glparams.parameters:
if name == 'GL_PROGRAM_BINARY_FORMATS':
count = 0
if type is not None:
print ' case %s: return %s;' % (name, count)
print ' default:'
print r' os::log("apitrace: warning: %s: unknown GLenum 0x%04X\n", __FUNCTION__, pname);'
print ' return 1;'
print ' }'
print '}'
print
# states such as GL_UNPACK_ROW_LENGTH are not available in GLES
print 'static inline bool'
print 'can_unpack_subimage(void) {'
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' return ctx->profile.desktop();'
print '}'
print
# VMWX_map_buffer_debug
print r'extern "C" PUBLIC'
print r'void APIENTRY'
print r'glNotifyMappedBufferRangeVMWX(const void * start, GLsizeiptr length) {'
self.emit_memcpy('start', 'length')
print r'}'
print
getProcAddressFunctionNames = []
def traceApi(self, api):
if self.getProcAddressFunctionNames:
# Generate a function to wrap proc addresses
getProcAddressFunction = api.getFunctionByName(self.getProcAddressFunctionNames[0])
argType = getProcAddressFunction.args[0].type
retType = getProcAddressFunction.type
print 'static %s _wrapProcAddress(%s procName, %s procPtr);' % (retType, argType, retType)
print
Tracer.traceApi(self, api)
print 'static %s _wrapProcAddress(%s procName, %s procPtr) {' % (retType, argType, retType)
# Provide fallback functions to missing debug functions
print ' if (!procPtr) {'
else_ = ''
for function_name in self.debug_functions:
if self.api.getFunctionByName(function_name):
print ' %sif (strcmp("%s", (const char *)procName) == 0) {' % (else_, function_name)
print ' return (%s)&%s;' % (retType, function_name)
print ' }'
else_ = 'else '
print ' %s{' % else_
print ' return NULL;'
print ' }'
print ' }'
for function in api.getAllFunctions():
ptype = function_pointer_type(function)
pvalue = function_pointer_value(function)
print ' if (strcmp("%s", (const char *)procName) == 0) {' % function.name
print ' assert(procPtr != (%s)&%s);' % (retType, function.name)
print ' %s = (%s)procPtr;' % (pvalue, ptype)
print ' return (%s)&%s;' % (retType, function.name,)
print ' }'
print ' os::log("apitrace: warning: unknown function \\"%s\\"\\n", (const char *)procName);'
print ' return procPtr;'
print '}'
print
else:
Tracer.traceApi(self, api)
def defineShadowBufferHelper(self):
print 'void _shadow_glGetBufferSubData(GLenum target, GLintptr offset,'
print ' GLsizeiptr size, GLvoid *data)'
print '{'
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' if (!ctx->needsShadowBuffers() || target != GL_ELEMENT_ARRAY_BUFFER) {'
print ' _glGetBufferSubData(target, offset, size, data);'
print ' return;'
print ' }'
print
print ' GLint buffer_binding = _glGetInteger(GL_ELEMENT_ARRAY_BUFFER_BINDING);'
print ' if (buffer_binding > 0) {'
print ' gltrace::Buffer & buf = ctx->buffers[buffer_binding];'
print ' buf.getSubData(offset, size, data);'
print ' }'
print '}'
def shadowBufferMethod(self, method):
# Emit code to fetch the shadow buffer, and invoke a method
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' if (ctx->needsShadowBuffers() && target == GL_ELEMENT_ARRAY_BUFFER) {'
print ' GLint buffer_binding = _glGetInteger(GL_ELEMENT_ARRAY_BUFFER_BINDING);'
print ' if (buffer_binding > 0) {'
print ' gltrace::Buffer & buf = ctx->buffers[buffer_binding];'
print ' buf.' + method + ';'
print ' }'
print ' }'
print
def shadowBufferProlog(self, function):
if function.name == 'glBufferData':
self.shadowBufferMethod('bufferData(size, data)')
if function.name == 'glBufferSubData':
self.shadowBufferMethod('bufferSubData(offset, size, data)')
if function.name == 'glDeleteBuffers':
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' if (ctx->needsShadowBuffers()) {'
print ' for (GLsizei i = 0; i < n; i++) {'
print ' ctx->buffers.erase(buffer[i]);'
print ' }'
print ' }'
array_pointer_function_names = set((
"glVertexPointer",
"glNormalPointer",
"glColorPointer",
"glIndexPointer",
"glTexCoordPointer",
"glEdgeFlagPointer",
"glFogCoordPointer",
"glSecondaryColorPointer",
"glInterleavedArrays",
"glVertexPointerEXT",
"glNormalPointerEXT",
"glColorPointerEXT",
"glIndexPointerEXT",
"glTexCoordPointerEXT",
"glEdgeFlagPointerEXT",
"glFogCoordPointerEXT",
"glSecondaryColorPointerEXT",
"glVertexAttribPointer",
"glVertexAttribPointerARB",
"glVertexAttribPointerNV",
"glVertexAttribIPointer",
"glVertexAttribIPointerEXT",
"glVertexAttribLPointer",
"glVertexAttribLPointerEXT",
#"glMatrixIndexPointerARB",
))
# XXX: We currently ignore the gl*Draw*ElementArray* functions
draw_function_regex = re.compile(r'^gl([A-Z][a-z]+)*Draw(Range)?(Arrays|Elements)([A-Z][a-zA-Z]*)?$' )
interleaved_formats = [
'GL_V2F',
'GL_V3F',
'GL_C4UB_V2F',
'GL_C4UB_V3F',
'GL_C3F_V3F',
'GL_N3F_V3F',
'GL_C4F_N3F_V3F',
'GL_T2F_V3F',
'GL_T4F_V4F',
'GL_T2F_C4UB_V3F',
'GL_T2F_C3F_V3F',
'GL_T2F_N3F_V3F',
'GL_T2F_C4F_N3F_V3F',
'GL_T4F_C4F_N3F_V4F',
]
def traceFunctionImplBody(self, function):
# Defer tracing of user array pointers...
if function.name in self.array_pointer_function_names:
print ' GLint _array_buffer = _glGetInteger(GL_ARRAY_BUFFER_BINDING);'
print ' if (!_array_buffer) {'
print ' static bool warned = false;'
print ' if (!warned) {'
print ' warned = true;'
print ' os::log("apitrace: warning: %s: call will be faked due to pointer to user memory (https://github.com/apitrace/apitrace/blob/master/docs/BUGS.markdown#tracing)\\n", __FUNCTION__);'
print ' }'
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' ctx->user_arrays = true;'
if function.name == "glVertexAttribPointerNV":
print ' ctx->user_arrays_nv = true;'
self.invokeFunction(function)
# And also break down glInterleavedArrays into the individual calls
if function.name == 'glInterleavedArrays':
print
# Initialize the enable flags
for camelcase_name, uppercase_name in self.arrays:
flag_name = '_' + uppercase_name.lower()
print ' GLboolean %s = GL_FALSE;' % flag_name
print
# Switch for the interleaved formats
print ' switch (format) {'
for format in self.interleaved_formats:
print ' case %s:' % format
for camelcase_name, uppercase_name in self.arrays:
flag_name = '_' + uppercase_name.lower()
if format.find('_' + uppercase_name[0]) >= 0:
print ' %s = GL_TRUE;' % flag_name
print ' break;'
print ' default:'
print ' return;'
print ' }'
print
# Emit fake glEnableClientState/glDisableClientState flags
for camelcase_name, uppercase_name in self.arrays:
flag_name = '_' + uppercase_name.lower()
enable_name = 'GL_%s_ARRAY' % uppercase_name
# Emit a fake function
print ' {'
print ' static const trace::FunctionSig &_sig = %s ? _glEnableClientState_sig : _glDisableClientState_sig;' % flag_name
print ' unsigned _call = trace::localWriter.beginEnter(&_sig, true);'
print ' trace::localWriter.beginArg(0);'
self.serializeValue(glapi.GLenum, enable_name)
print ' trace::localWriter.endArg();'
print ' trace::localWriter.endEnter();'
print ' trace::localWriter.beginLeave(_call);'
print ' trace::localWriter.endLeave();'
print ' }'
# Warn about buggy glGet(GL_*ARRAY_SIZE) not returning GL_BGRA
buggyFunctions = {
'glColorPointer': ('glGetIntegerv', '', 'GL_COLOR_ARRAY_SIZE'),
'glSecondaryColorPointer': ('glGetIntegerv', '', 'GL_SECONDARY_COLOR_ARRAY_SIZE'),
'glVertexAttribPointer': ('glGetVertexAttribiv', 'index, ', 'GL_VERTEX_ATTRIB_ARRAY_SIZE'),
'glVertexAttribPointerARB': ('glGetVertexAttribivARB', 'index, ', 'GL_VERTEX_ATTRIB_ARRAY_SIZE_ARB'),
}
if function.name in buggyFunctions:
getter, extraArg, pname = buggyFunctions[function.name]
print r' static bool _checked = false;'
print r' if (!_checked && size == GL_BGRA) {'
print r' GLint _size = 0;'
print r' _%s(%s%s, &_size);' % (getter, extraArg, pname)
print r' if (_size != GL_BGRA) {'
print r' os::log("apitrace: warning: %s(%s) does not return GL_BGRA; trace will be incorrect (https://github.com/apitrace/apitrace/issues/261)\n");' % (getter, pname)
print r' }'
print r' _checked = true;'
print r' }'
print ' return;'
print ' }'
# ... to the draw calls
if self.draw_function_regex.match(function.name):
print ' if (_need_user_arrays()) {'
if 'Indirect' in function.name:
print r' os::log("apitrace: warning: %s: indirect user arrays not supported\n");' % (function.name,)
else:
arg_names = ', '.join([arg.name for arg in function.args[1:]])
print ' GLuint _count = _%s_count(%s);' % (function.name, arg_names)
# Some apps, in particular Quake3, can tell the driver to lock more
# vertices than those actually required for the draw call.
print ' if (_checkLockArraysEXT) {'
print ' GLuint _locked_count = _glGetInteger(GL_ARRAY_ELEMENT_LOCK_FIRST_EXT)'
print ' + _glGetInteger(GL_ARRAY_ELEMENT_LOCK_COUNT_EXT);'
print ' _count = std::max(_count, _locked_count);'
print ' }'
print ' _trace_user_arrays(_count);'
print ' }'
if function.name == 'glLockArraysEXT':
print ' _checkLockArraysEXT = true;'
# Warn if user arrays are used with glBegin/glArrayElement/glEnd.
if function.name == 'glBegin':
print r' gltrace::Context *ctx = gltrace::getContext();'
print r' ctx->userArraysOnBegin = _need_user_arrays();'
if function.name.startswith('glArrayElement'):
print r' gltrace::Context *ctx = gltrace::getContext();'
print r' if (ctx->userArraysOnBegin) {'
print r' os::log("apitrace: warning: user arrays with glArrayElement not supported (https://github.com/apitrace/apitrace/issues/276)\n");'
print r' ctx->userArraysOnBegin = false;'
print r' }'
# Emit a fake memcpy on buffer uploads
if function.name == 'glBufferParameteriAPPLE':
print ' if (pname == GL_BUFFER_FLUSHING_UNMAP_APPLE && param == GL_FALSE) {'
print ' _checkBufferFlushingUnmapAPPLE = true;'
print ' }'
if function.name in ('glUnmapBuffer', 'glUnmapBufferARB'):
if function.name.endswith('ARB'):
suffix = 'ARB'
else:
suffix = ''
print ' GLint access_flags = 0;'
print ' GLint access = 0;'
print ' bool flush;'
print ' // GLES3 does not have GL_BUFFER_ACCESS;'
print ' if (_checkBufferMapRange) {'
print ' _glGetBufferParameteriv%s(target, GL_BUFFER_ACCESS_FLAGS, &access_flags);' % suffix
print ' flush = (access_flags & GL_MAP_WRITE_BIT) && !(access_flags & (GL_MAP_FLUSH_EXPLICIT_BIT | GL_MAP_PERSISTENT_BIT));'
print ' } else {'
print ' _glGetBufferParameteriv%s(target, GL_BUFFER_ACCESS, &access);' % suffix
print ' flush = access != GL_READ_ONLY;'
print ' }'
print ' if (flush) {'
print ' GLvoid *map = NULL;'
print ' _glGetBufferPointerv%s(target, GL_BUFFER_MAP_POINTER, &map);' % suffix
print ' if (map) {'
print ' GLint length = -1;'
print ' if (_checkBufferMapRange) {'
print ' _glGetBufferParameteriv%s(target, GL_BUFFER_MAP_LENGTH, &length);' % suffix
print ' if (length == -1) {'
print ' // Mesa drivers refuse GL_BUFFER_MAP_LENGTH without GL 3.0 up-to'
print ' // http://cgit.freedesktop.org/mesa/mesa/commit/?id=ffee498fb848b253a7833373fe5430f8c7ca0c5f'
print ' static bool warned = false;'
print ' if (!warned) {'
print ' os::log("apitrace: warning: glGetBufferParameteriv%s(GL_BUFFER_MAP_LENGTH) failed\\n");' % suffix
print ' warned = true;'
print ' }'
print ' }'
print ' } else {'
print ' length = 0;'
print ' _glGetBufferParameteriv%s(target, GL_BUFFER_SIZE, &length);' % suffix
print ' }'
print ' if (_checkBufferFlushingUnmapAPPLE) {'
print ' GLint flushing_unmap = GL_TRUE;'
print ' _glGetBufferParameteriv%s(target, GL_BUFFER_FLUSHING_UNMAP_APPLE, &flushing_unmap);' % suffix
print ' flush = flush && flushing_unmap;'
print ' }'
print ' if (flush && length > 0) {'
self.emit_memcpy('map', 'length')
print ' }'
print ' }'
print ' }'
if function.name == 'glUnmapBufferOES':
print ' GLint access_flags = 0;'
print ' GLint access = 0;'
print ' bool flush;'
print ' // GLES3 does not have GL_BUFFER_ACCESS;'
print ' if (_checkBufferMapRange) {'
print ' _glGetBufferParameteriv(target, GL_BUFFER_ACCESS_FLAGS, &access_flags);'
print ' flush = (access_flags & GL_MAP_WRITE_BIT) && !(access_flags & (GL_MAP_FLUSH_EXPLICIT_BIT | GL_MAP_PERSISTENT_BIT));'
print ' } else {'
print ' _glGetBufferParameteriv(target, GL_BUFFER_ACCESS, &access);'
print ' flush = access != GL_READ_ONLY;'
print ' }'
print ' if (flush) {'
print ' GLvoid *map = NULL;'
print ' _glGetBufferPointervOES(target, GL_BUFFER_MAP_POINTER, &map);'
print ' if (map) {'
print ' GLint length = 0;'
print ' GLint offset = 0;'
print ' if (_checkBufferMapRange) {'
print ' _glGetBufferParameteriv(target, GL_BUFFER_MAP_LENGTH, &length);'
print ' _glGetBufferParameteriv(target, GL_BUFFER_MAP_OFFSET, &offset);'
print ' } else {'
print ' _glGetBufferParameteriv(target, GL_BUFFER_SIZE, &length);'
print ' }'
print ' if (flush && length > 0) {'
self.emit_memcpy('map', 'length')
self.shadowBufferMethod('bufferSubData(offset, length, map)')
print ' }'
print ' }'
print ' }'
if function.name == 'glUnmapNamedBuffer':
print ' GLint access_flags = 0;'
print ' _glGetNamedBufferParameteriv(buffer, GL_BUFFER_ACCESS_FLAGS, &access_flags);'
print ' if ((access_flags & GL_MAP_WRITE_BIT) &&'
print ' !(access_flags & (GL_MAP_FLUSH_EXPLICIT_BIT | GL_MAP_PERSISTENT_BIT))) {'
print ' GLvoid *map = NULL;'
print ' _glGetNamedBufferPointerv(buffer, GL_BUFFER_MAP_POINTER, &map);'
print ' GLint length = 0;'
print ' _glGetNamedBufferParameteriv(buffer, GL_BUFFER_MAP_LENGTH, &length);'
print ' if (map && length > 0) {'
self.emit_memcpy('map', 'length')
print ' }'
print ' }'
if function.name == 'glUnmapNamedBufferEXT':
print ' GLint access_flags = 0;'
print ' _glGetNamedBufferParameterivEXT(buffer, GL_BUFFER_ACCESS_FLAGS, &access_flags);'
print ' if ((access_flags & GL_MAP_WRITE_BIT) &&'
print ' !(access_flags & (GL_MAP_FLUSH_EXPLICIT_BIT | GL_MAP_PERSISTENT_BIT))) {'
print ' GLvoid *map = NULL;'
print ' _glGetNamedBufferPointervEXT(buffer, GL_BUFFER_MAP_POINTER, &map);'
print ' GLint length = 0;'
print ' _glGetNamedBufferParameterivEXT(buffer, GL_BUFFER_MAP_LENGTH, &length);'
print ' if (map && length > 0) {'
self.emit_memcpy('map', 'length')
print ' }'
print ' }'
if function.name == 'glFlushMappedBufferRange':
print ' GLvoid *map = NULL;'
print ' _glGetBufferPointerv(target, GL_BUFFER_MAP_POINTER, &map);'
print ' if (map && length > 0) {'
self.emit_memcpy('(const char *)map + offset', 'length')
print ' }'
if function.name == 'glFlushMappedBufferRangeEXT':
print ' GLvoid *map = NULL;'
print ' _glGetBufferPointervOES(target, GL_BUFFER_MAP_POINTER_OES, &map);'
print ' if (map && length > 0) {'
self.emit_memcpy('(const char *)map + offset', 'length')
print ' }'
if function.name == 'glFlushMappedBufferRangeAPPLE':
print ' GLvoid *map = NULL;'
print ' _glGetBufferPointerv(target, GL_BUFFER_MAP_POINTER, &map);'
print ' if (map && size > 0) {'
self.emit_memcpy('(const char *)map + offset', 'size')
print ' }'
if function.name == 'glFlushMappedNamedBufferRange':
print ' GLvoid *map = NULL;'
print ' _glGetNamedBufferPointerv(buffer, GL_BUFFER_MAP_POINTER, &map);'
print ' if (map && length > 0) {'
self.emit_memcpy('(const char *)map + offset', 'length')
print ' }'
if function.name == 'glFlushMappedNamedBufferRangeEXT':
print ' GLvoid *map = NULL;'
print ' _glGetNamedBufferPointervEXT(buffer, GL_BUFFER_MAP_POINTER, &map);'
print ' if (map && length > 0) {'
self.emit_memcpy('(const char *)map + offset', 'length')
print ' }'
# FIXME: We don't support coherent/pinned memory mappings
if function.name in ('glBufferStorage', 'glNamedBufferStorage', 'glNamedBufferStorageEXT'):
print r' if (!(flags & GL_MAP_PERSISTENT_BIT)) {'
print r' os::log("apitrace: warning: %s: MAP_NOTIFY_EXPLICIT_BIT_VMWX set w/o MAP_PERSISTENT_BIT\n", __FUNCTION__);'
print r' }'
print r' flags &= ~GL_MAP_NOTIFY_EXPLICIT_BIT_VMWX;'
if function.name in ('glMapBufferRange', 'glMapBufferRangeEXT', 'glMapNamedBufferRange', 'glMapNamedBufferRangeEXT'):
print r' if (access & GL_MAP_NOTIFY_EXPLICIT_BIT_VMWX) {'
print r' if (!(access & GL_MAP_PERSISTENT_BIT)) {'
print r' os::log("apitrace: warning: %s: MAP_NOTIFY_EXPLICIT_BIT_VMWX set w/o MAP_PERSISTENT_BIT\n", __FUNCTION__);'
print r' }'
print r' if (access & GL_MAP_FLUSH_EXPLICIT_BIT) {'
print r' os::log("apitrace: warning: %s: MAP_NOTIFY_EXPLICIT_BIT_VMWX set w/ MAP_FLUSH_EXPLICIT_BIT\n", __FUNCTION__);'
print r' }'
print r' access &= ~GL_MAP_NOTIFY_EXPLICIT_BIT_VMWX;'
print r' } else if (access & GL_MAP_COHERENT_BIT) {'
print r' os::log("apitrace: warning: %s: MAP_COHERENT_BIT unsupported (https://github.com/apitrace/apitrace/issues/232)\n", __FUNCTION__);'
print r' } else if ((access & GL_MAP_PERSISTENT_BIT) &&'
print r' !(access & GL_MAP_FLUSH_EXPLICIT_BIT)) {'
print r' os::log("apitrace: warning: %s: MAP_PERSISTENT_BIT w/o FLUSH_EXPLICIT_BIT unsupported (https://github.com/apitrace/apitrace/issues/232)\n", __FUNCTION__);'
print r' }'
if function.name in ('glBufferData', 'glBufferDataARB'):
print r' if (target == GL_EXTERNAL_VIRTUAL_MEMORY_BUFFER_AMD) {'
print r' os::log("apitrace: warning: GL_AMD_pinned_memory not fully supported\n");'
print r' }'
# TODO: We don't track GL_INTEL_map_texture mappings
if function.name == 'glMapTexture2DINTEL':
print r' if (access & GL_MAP_WRITE_BIT) {'
print r' os::log("apitrace: warning: GL_INTEL_map_texture not fully supported\n");'
print r' }'
# Don't leave vertex attrib locations to chance. Instead emit fake
# glBindAttribLocation calls to ensure that the same locations will be
# used when retracing. Trying to remap locations after the fact would
# be an herculian task given that vertex attrib locations appear in
# many entry-points, including non-shader related ones.
if function.name == 'glLinkProgram':
Tracer.invokeFunction(self, function)
print ' GLint active_attributes = 0;'
print ' _glGetProgramiv(program, GL_ACTIVE_ATTRIBUTES, &active_attributes);'
print ' for (GLint attrib = 0; attrib < active_attributes; ++attrib) {'
print ' GLint size = 0;'
print ' GLenum type = 0;'
print ' GLchar name[256];'
# TODO: Use ACTIVE_ATTRIBUTE_MAX_LENGTH instead of 256
print ' _glGetActiveAttrib(program, attrib, sizeof name, NULL, &size, &type, name);'
print " if (name[0] != 'g' || name[1] != 'l' || name[2] != '_') {"
print ' GLint location = _glGetAttribLocation(program, name);'
print ' if (location >= 0) {'
bind_function = glapi.glapi.getFunctionByName('glBindAttribLocation')
self.fake_call(bind_function, ['program', 'location', 'name'])
print ' }'
print ' }'
print ' }'
if function.name == 'glLinkProgramARB':
Tracer.invokeFunction(self, function)
print ' GLint active_attributes = 0;'
print ' _glGetObjectParameterivARB(programObj, GL_OBJECT_ACTIVE_ATTRIBUTES_ARB, &active_attributes);'
print ' for (GLint attrib = 0; attrib < active_attributes; ++attrib) {'
print ' GLint size = 0;'
print ' GLenum type = 0;'
print ' GLcharARB name[256];'
# TODO: Use ACTIVE_ATTRIBUTE_MAX_LENGTH instead of 256
print ' _glGetActiveAttribARB(programObj, attrib, sizeof name, NULL, &size, &type, name);'
print " if (name[0] != 'g' || name[1] != 'l' || name[2] != '_') {"
print ' GLint location = _glGetAttribLocationARB(programObj, name);'
print ' if (location >= 0) {'
bind_function = glapi.glapi.getFunctionByName('glBindAttribLocationARB')
self.fake_call(bind_function, ['programObj', 'location', 'name'])
print ' }'
print ' }'
print ' }'
self.shadowBufferProlog(function)
Tracer.traceFunctionImplBody(self, function)
# These entrypoints are only expected to be implemented by tools;
# drivers will probably not implement them.
marker_functions = [
# GL_GREMEDY_string_marker
'glStringMarkerGREMEDY',
# GL_GREMEDY_frame_terminator
'glFrameTerminatorGREMEDY',
]
# These entrypoints may be implemented by drivers, but are also very useful
# for debugging / analysis tools.
debug_functions = [
# GL_KHR_debug
'glDebugMessageControl',
'glDebugMessageInsert',
'glDebugMessageCallback',
'glGetDebugMessageLog',
'glPushDebugGroup',
'glPopDebugGroup',
'glObjectLabel',
'glGetObjectLabel',
'glObjectPtrLabel',
'glGetObjectPtrLabel',
# GL_KHR_debug (for OpenGL ES)
'glDebugMessageControlKHR',
'glDebugMessageInsertKHR',
'glDebugMessageCallbackKHR',
'glGetDebugMessageLogKHR',
'glPushDebugGroupKHR',
'glPopDebugGroupKHR',
'glObjectLabelKHR',
'glGetObjectLabelKHR',
'glObjectPtrLabelKHR',
'glGetObjectPtrLabelKHR',
# GL_ARB_debug_output
'glDebugMessageControlARB',
'glDebugMessageInsertARB',
'glDebugMessageCallbackARB',
'glGetDebugMessageLogARB',
# GL_AMD_debug_output
'glDebugMessageEnableAMD',
'glDebugMessageInsertAMD',
'glDebugMessageCallbackAMD',
'glGetDebugMessageLogAMD',
# GL_EXT_debug_label
'glLabelObjectEXT',
'glGetObjectLabelEXT',
# GL_EXT_debug_marker
'glInsertEventMarkerEXT',
'glPushGroupMarkerEXT',
'glPopGroupMarkerEXT',
]
def invokeFunction(self, function):
if function.name in ('glLinkProgram', 'glLinkProgramARB'):
# These functions have been dispatched already
return
# Force glProgramBinary to fail. Per ARB_get_program_binary this
# should signal the app that it needs to recompile.
if function.name in ('glProgramBinary', 'glProgramBinaryOES'):
print r' binaryFormat = 0xDEADDEAD;'
print r' binary = &binaryFormat;'
print r' length = sizeof binaryFormat;'
Tracer.invokeFunction(self, function)
def doInvokeFunction(self, function):
# Same as invokeFunction() but called both when trace is enabled or disabled.
#
# Used to modify the behavior of GL entry-points.
# Override GL extensions
if function.name in ('glGetString', 'glGetIntegerv', 'glGetStringi'):
Tracer.doInvokeFunction(self, function, prefix = 'gltrace::_', suffix = '_override')
return
# We implement GL_GREMEDY_*, etc., and not the driver
if function.name in self.marker_functions:
return
# We may be faking KHR_debug, so ensure the pointer queries result is
# always zeroed to prevent dereference of unitialized pointers
if function.name == 'glGetPointerv':
print ' if (params &&'
print ' (pname == GL_DEBUG_CALLBACK_FUNCTION ||'
print ' pname == GL_DEBUG_CALLBACK_USER_PARAM)) {'
print ' *params = NULL;'
print ' }'
if function.name in self.getProcAddressFunctionNames:
nameArg = function.args[0].name
print ' if (strcmp("glNotifyMappedBufferRangeVMWX", (const char *)%s) == 0) {' % (nameArg,)
print ' _result = (%s)&glNotifyMappedBufferRangeVMWX;' % (function.type,)
for marker_function in self.marker_functions:
if self.api.getFunctionByName(marker_function):
print ' } else if (strcmp("%s", (const char *)%s) == 0) {' % (marker_function, nameArg)
print ' _result = (%s)&%s;' % (function.type, marker_function)
print ' } else {'
Tracer.doInvokeFunction(self, function)
# Replace function addresses with ours
# XXX: Doing this here instead of wrapRet means that the trace will
# contain the addresses of the wrapper functions, and not the real
# functions, but in practice this should make no difference.
if function.name in self.getProcAddressFunctionNames:
print ' _result = _wrapProcAddress(%s, _result);' % (nameArg,)
print ' }'
return
if function.name in ('glGetProgramBinary', 'glGetProgramBinaryOES'):
print r' bufSize = 0;'
Tracer.doInvokeFunction(self, function)
if function.name == 'glGetProgramiv':
print r' if (params && pname == GL_PROGRAM_BINARY_LENGTH) {'
print r' *params = 0;'
print r' }'
if function.name in ('glGetProgramBinary', 'glGetProgramBinaryOES'):
print r' if (length) {'
print r' *length = 0;'
print r' }'
buffer_targets = [
'ARRAY_BUFFER',
'ELEMENT_ARRAY_BUFFER',
'PIXEL_PACK_BUFFER',
'PIXEL_UNPACK_BUFFER',
'UNIFORM_BUFFER',
'TEXTURE_BUFFER',
'TRANSFORM_FEEDBACK_BUFFER',
'COPY_READ_BUFFER',
'COPY_WRITE_BUFFER',
'DRAW_INDIRECT_BUFFER',
'ATOMIC_COUNTER_BUFFER',
]
def wrapRet(self, function, instance):
Tracer.wrapRet(self, function, instance)
# Keep track of buffer mappings
if function.name in ('glMapBufferRange', 'glMapBufferRangeEXT'):
print ' if (access & GL_MAP_WRITE_BIT) {'
print ' _checkBufferMapRange = true;'
print ' }'
boolean_names = [
'GL_FALSE',
'GL_TRUE',
]
def gl_boolean(self, value):
return self.boolean_names[int(bool(value))]
# Regular expression for the names of the functions that unpack from a
# pixel buffer object. See the ARB_pixel_buffer_object specification.
unpack_function_regex = re.compile(r'^gl(' + r'|'.join([
r'Bitmap',
r'PolygonStipple',
r'PixelMap[a-z]+v',
r'DrawPixels',
r'Color(Sub)?Table',
r'(Convolution|Separable)Filter[12]D',
r'(Compressed)?(Multi)?Tex(ture)?(Sub)?Image[1-4]D',
]) + r')[0-9A-Z]*$')
def serializeArgValue(self, function, arg):
# Recognize offsets instead of blobs when a PBO is bound
if self.unpack_function_regex.match(function.name) \
and (isinstance(arg.type, stdapi.Blob) \
or (isinstance(arg.type, stdapi.Const) \
and isinstance(arg.type.type, stdapi.Blob))):
print ' {'
print ' gltrace::Context *ctx = gltrace::getContext();'
print ' GLint _unpack_buffer = 0;'
print ' if (ctx->profile.desktop())'
print ' _glGetIntegerv(GL_PIXEL_UNPACK_BUFFER_BINDING, &_unpack_buffer);'
print ' if (_unpack_buffer) {'
print ' trace::localWriter.writePointer((uintptr_t)%s);' % arg.name
print ' } else {'
Tracer.serializeArgValue(self, function, arg)
print ' }'
print ' }'
return
# Several GL state functions take GLenum symbolic names as
# integer/floats; so dump the symbolic name whenever possible
if function.name.startswith('gl') \
and arg.type in (glapi.GLint, glapi.GLfloat, glapi.GLdouble) \
and arg.name == 'param':
assert arg.index > 0
assert function.args[arg.index - 1].name == 'pname'
assert function.args[arg.index - 1].type == glapi.GLenum
print ' if (is_symbolic_pname(pname) && is_symbolic_param(%s)) {' % arg.name
self.serializeValue(glapi.GLenum, arg.name)
print ' } else {'
Tracer.serializeArgValue(self, function, arg)
print ' }'
return
Tracer.serializeArgValue(self, function, arg)
def footer(self, api):
Tracer.footer(self, api)
# A simple state tracker to track the pointer values
# update the state
print 'static void _trace_user_arrays(GLuint count)'
print '{'
print ' gltrace::Context *ctx = gltrace::getContext();'
print
print ' glprofile::Profile profile = ctx->profile;'
print ' bool es1 = profile.es() && profile.major == 1;'
print
# Temporarily unbind the array buffer
print ' GLint _array_buffer = _glGetInteger(GL_ARRAY_BUFFER_BINDING);'
print ' if (_array_buffer) {'
self.fake_glBindBuffer(api, 'GL_ARRAY_BUFFER', '0')
print ' }'
print
for camelcase_name, uppercase_name in self.arrays:
# in which profile is the array available?
profile_check = 'profile.desktop()'
if camelcase_name in self.arrays_es1:
profile_check = '(' + profile_check + ' || es1)';
function_name = 'gl%sPointer' % camelcase_name
enable_name = 'GL_%s_ARRAY' % uppercase_name
binding_name = 'GL_%s_ARRAY_BUFFER_BINDING' % uppercase_name
function = api.getFunctionByName(function_name)
print ' // %s' % function.prototype()
print ' if (%s) {' % profile_check
self.array_trace_prolog(api, uppercase_name)
self.array_prolog(api, uppercase_name)
print ' if (_glIsEnabled(%s)) {' % enable_name
print ' GLint _binding = _glGetInteger(%s);' % binding_name
print ' if (!_binding) {'
# Get the arguments via glGet*
for arg in function.args:
arg_get_enum = 'GL_%s_ARRAY_%s' % (uppercase_name, arg.name.upper())
arg_get_function, arg_type = TypeGetter().visit(arg.type)
print ' %s %s = 0;' % (arg_type, arg.name)
print ' _%s(%s, &%s);' % (arg_get_function, arg_get_enum, arg.name)
arg_names = ', '.join([arg.name for arg in function.args[:-1]])
print ' size_t _size = _%s_size(%s, count);' % (function.name, arg_names)
# Emit a fake function
self.array_trace_intermezzo(api, uppercase_name)
print ' unsigned _call = trace::localWriter.beginEnter(&_%s_sig, true);' % (function.name,)
for arg in function.args:
assert not arg.output
print ' trace::localWriter.beginArg(%u);' % (arg.index,)
if arg.name != 'pointer':
self.serializeValue(arg.type, arg.name)
else:
print ' trace::localWriter.writeBlob((const void *)%s, _size);' % (arg.name)
print ' trace::localWriter.endArg();'
print ' trace::localWriter.endEnter();'
print ' trace::localWriter.beginLeave(_call);'
print ' trace::localWriter.endLeave();'
print ' }'
print ' }'
self.array_epilog(api, uppercase_name)
self.array_trace_epilog(api, uppercase_name)
print ' }'
print
# Samething, but for glVertexAttribPointer*
#
# Some variants of glVertexAttribPointer alias conventional and generic attributes:
# - glVertexAttribPointer: no
# - glVertexAttribPointerARB: implementation dependent
# - glVertexAttribPointerNV: yes
#
# This means that the implementations of these functions do not always
# alias, and they need to be considered independently.
#
print ' // ES1 does not support generic vertex attributes'
print ' if (es1)'
print ' return;'
print
print ' vertex_attrib _vertex_attrib = _get_vertex_attrib();'
print
for suffix in ['', 'NV']:
if suffix:
SUFFIX = '_' + suffix
else:
SUFFIX = suffix
function_name = 'glVertexAttribPointer' + suffix
function = api.getFunctionByName(function_name)
print ' // %s' % function.prototype()
print ' if (_vertex_attrib == VERTEX_ATTRIB%s) {' % SUFFIX
if suffix == 'NV':
print ' GLint _max_vertex_attribs = 16;'
else:
print ' GLint _max_vertex_attribs = _glGetInteger(GL_MAX_VERTEX_ATTRIBS);'
print ' for (GLint index = 0; index < _max_vertex_attribs; ++index) {'
print ' GLint _enabled = 0;'
if suffix == 'NV':
print ' _glGetIntegerv(GL_VERTEX_ATTRIB_ARRAY0_NV + index, &_enabled);'
else:
print ' _glGetVertexAttribiv%s(index, GL_VERTEX_ATTRIB_ARRAY_ENABLED%s, &_enabled);' % (suffix, SUFFIX)
print ' if (_enabled) {'
print ' GLint _binding = 0;'
if suffix != 'NV':
# It doesn't seem possible to use VBOs with NV_vertex_program.
print ' _glGetVertexAttribiv%s(index, GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING%s, &_binding);' % (suffix, SUFFIX)
print ' if (!_binding) {'
# Get the arguments via glGet*
for arg in function.args[1:]:
if suffix == 'NV':
arg_get_enum = 'GL_ATTRIB_ARRAY_%s%s' % (arg.name.upper(), SUFFIX)
else:
arg_get_enum = 'GL_VERTEX_ATTRIB_ARRAY_%s%s' % (arg.name.upper(), SUFFIX)
arg_get_function, arg_type = TypeGetter('glGetVertexAttrib', False, suffix).visit(arg.type)
print ' %s %s = 0;' % (arg_type, arg.name)
print ' _%s(index, %s, &%s);' % (arg_get_function, arg_get_enum, arg.name)
arg_names = ', '.join([arg.name for arg in function.args[1:-1]])
print ' size_t _size = _%s_size(%s, count);' % (function.name, arg_names)
# Emit a fake function
print ' unsigned _call = trace::localWriter.beginEnter(&_%s_sig, true);' % (function.name,)
for arg in function.args:
assert not arg.output
print ' trace::localWriter.beginArg(%u);' % (arg.index,)
if arg.name != 'pointer':
self.serializeValue(arg.type, arg.name)
else:
print ' trace::localWriter.writeBlob((const void *)%s, _size);' % (arg.name)
print ' trace::localWriter.endArg();'
print ' trace::localWriter.endEnter();'
print ' trace::localWriter.beginLeave(_call);'
print ' trace::localWriter.endLeave();'
print ' }'
print ' }'
print ' }'
print ' }'
print
# Restore the original array_buffer
print ' if (_array_buffer) {'
self.fake_glBindBuffer(api, 'GL_ARRAY_BUFFER', '_array_buffer')
print ' }'
print
print '}'
print
# Fake glStringMarkerGREMEDY
print r'static void _fakeStringMarker(GLsizei len, const GLvoid * string) {'
glStringMarkerGREMEDY = api.getFunctionByName('glStringMarkerGREMEDY')
self.fake_call(glStringMarkerGREMEDY, ['len', 'string'])
print r'}'
#
# Hooks for glTexCoordPointer, which is identical to the other array
# pointers except the fact that it is indexed by glClientActiveTexture.
#
def array_prolog(self, api, uppercase_name):
if uppercase_name == 'TEXTURE_COORD':
print ' GLint max_units = 0;'
print ' if (ctx->profile.desktop())'
print ' _glGetIntegerv(GL_MAX_TEXTURE_COORDS, &max_units);'
print ' else'
print ' _glGetIntegerv(GL_MAX_TEXTURE_UNITS, &max_units);'
print ' GLint client_active_texture = GL_TEXTURE0;'
print ' if (max_units > 0) {'
print ' _glGetIntegerv(GL_CLIENT_ACTIVE_TEXTURE, &client_active_texture);'
print ' }'
print ' GLint unit = 0;'
print ' do {'
print ' GLint texture = GL_TEXTURE0 + unit;'
print ' if (max_units > 0) {'
print ' _glClientActiveTexture(texture);'
print ' }'
def array_trace_prolog(self, api, uppercase_name):
if uppercase_name == 'TEXTURE_COORD':
print ' bool client_active_texture_dirty = false;'
def array_epilog(self, api, uppercase_name):
if uppercase_name == 'TEXTURE_COORD':
print ' } while (++unit < max_units);'
self.array_cleanup(api, uppercase_name)
def array_cleanup(self, api, uppercase_name):
if uppercase_name == 'TEXTURE_COORD':
print ' if (max_units > 0) {'
print ' _glClientActiveTexture(client_active_texture);'
print ' }'
def array_trace_intermezzo(self, api, uppercase_name):
if uppercase_name == 'TEXTURE_COORD':
print ' if (texture != client_active_texture || client_active_texture_dirty) {'
print ' client_active_texture_dirty = true;'
self.fake_glClientActiveTexture_call(api, "texture");
print ' }'
def array_trace_epilog(self, api, uppercase_name):
if uppercase_name == 'TEXTURE_COORD':
print ' if (client_active_texture_dirty) {'
self.fake_glClientActiveTexture_call(api, "client_active_texture");
print ' }'
def fake_glBindBuffer(self, api, target, buffer):
function = api.getFunctionByName('glBindBuffer')
self.fake_call(function, [target, buffer])
def fake_glClientActiveTexture_call(self, api, texture):
function = api.getFunctionByName('glClientActiveTexture')
self.fake_call(function, [texture])
def emitFakeTexture2D(self):
function = glapi.glapi.getFunctionByName('glTexImage2D')
instances = function.argNames()
print ' unsigned _fake_call = trace::localWriter.beginEnter(&_%s_sig, true);' % (function.name,)
for arg in function.args:
assert not arg.output
self.serializeArg(function, arg)
print ' trace::localWriter.endEnter();'
print ' trace::localWriter.beginLeave(_fake_call);'
print ' trace::localWriter.endLeave();'
|
EoD/apitrace
|
wrappers/gltrace.py
|
Python
|
mit
| 55,234
|
[
"VisIt"
] |
ceec5b69fc3bb5024a8f3e82ac6378b982d01f707413fb5c2df99f1717cf693a
|
import numpy as np
import scipy.special
from scipy import constants
import computations
import data_processing
def test_mie():
"""Test the functions involved in the Mie coefficients computation.
Compare to values obtained with Wolfram Alpha.
"""
n = 1
a = 10e-9
omega = 2.48 / constants.hbar * constants.eV
eps1 = 1.0
eps2 = -2.377 + 1j*2.856
k1 = np.sqrt(eps1) * omega / constants.c
k2 = np.sqrt(eps2) * omega / constants.c
rho1 = k1*a
rho2 = k2*a
jn1 = scipy.special.spherical_jn(n, rho1)
assert np.isclose(jn1, 0.041827106236)
jn2 = scipy.special.spherical_jn(n, rho2)
assert np.isclose(np.real(jn2), 0.034734565982)
assert np.isclose(np.imag(jn2), 0.073239296279)
hn1 = computations.spherical_hankel(n, rho1, jn1)
assert np.isclose(np.real(hn1), 0.0418271062361)
assert np.isclose(np.imag(hn1), -63.8076276033)
jnprime1 = scipy.special.spherical_jn(n, rho1, derivative=True)
assert np.isclose(jnprime1, 0.331755278538607)
jnprime2 = scipy.special.spherical_jn(n, rho2, derivative=True)
assert np.isclose(np.real(jnprime2), 0.337084148426481)
assert np.isclose(np.imag(jnprime2), -0.004531343067615)
psinprime1 = computations.psi_n_prime(rho1, jn1, jnprime1)
assert np.isclose(psinprime1, 0.0835220176842857)
psinprime2 = computations.psi_n_prime(rho2, jn2, jnprime2)
assert np.isclose(np.real(psinprime2), 0.070389454025903)
assert np.isclose(np.imag(psinprime2), 0.146716099221274)
zetanprime1 = computations.zeta_n_prime(n, rho1, jnprime1, hn1)
assert np.isclose(np.real(zetanprime1), 0.0835220176842857)
assert np.isclose(np.imag(zetanprime1), 62.8155149095646)
an = computations.mie_bn(1.0, 1.0, jn1, jn2, hn1, psinprime1, psinprime2, zetanprime1, 0.0)
assert np.isclose(np.real(an), -1.96544240e-6)
assert np.isclose(np.imag(an), -2.34432221e-6)
bn = computations.mie_bn(eps1, eps2, jn1, jn2, hn1, psinprime1, psinprime2, zetanprime1, 0.0)
assert np.isclose(np.real(bn), -0.00140178)
assert np.isclose(np.imag(bn), 0.00149810)
return 'Tests pass: Mie local'
def test_mie_nonlocal():
"""Test the functions involved in the nonlocal Mie coefficients computation.
Compare to values obtained with Wolfram Alpha.
"""
n = 1
a = 10e-9
omega = 2.48 / constants.hbar * constants.eV
omega_p = 8.1 / constants.hbar * constants.eV
gamma = 0.047 / constants.hbar * constants.eV
v_F = 1.40e6
D = 8.62e-4
eps1 = 1.0
eps2 = -2.377 + 1j*2.856
eps_inf = 1.0
k1 = np.sqrt(eps1) * omega / constants.c
k2 = np.sqrt(eps2) * omega / constants.c
k2_nl = computations.k_longitudinal(True, eps2, eps_inf, omega_p, gamma, v_F, D, omega)
assert np.isclose(np.real(k2_nl), -2.98397e9, atol=0.0, rtol=1.0e-3)
assert np.isclose(np.imag(k2_nl), 5.25972e9, atol=0.0, rtol=1.0e-3)
rho1 = k1*a
rho2 = k2*a
rho2_nl = k2_nl*a
jn1 = scipy.special.spherical_jn(n, rho1)
jn2 = scipy.special.spherical_jn(n, rho2)
hn1 = computations.spherical_hankel(n, rho1, jn1)
jnprime1 = scipy.special.spherical_jn(n, rho1, derivative=True)
jnprime2 = scipy.special.spherical_jn(n, rho2, derivative=True)
psinprime1 = computations.psi_n_prime(rho1, jn1, jnprime1)
psinprime2 = computations.psi_n_prime(rho2, jn2, jnprime2)
zetanprime1 = computations.zeta_n_prime(n, rho1, jnprime1, hn1)
jn2_nl = scipy.special.spherical_jn(n, rho2_nl)
assert np.isclose(np.real(jn2_nl), 5.119867079e20)
assert np.isclose(np.imag(jn2_nl), -2.786558951e20)
jnprime2_nl = scipy.special.spherical_jn(n, rho2_nl, derivative=True)
assert np.isclose(np.real(jnprime2_nl), -2.7063597583597e20)
assert np.isclose(np.imag(jnprime2_nl), -5.0690425090715e20)
deltan = computations.delta_n(n, rho2_nl, eps2, eps_inf, jn2)
assert np.isclose(np.real(deltan), -0.0119636)
assert np.isclose(np.imag(deltan), 0.00117763)
an = computations.mie_bn(1.0, 1.0, jn1, jn2, hn1, psinprime1, psinprime2, zetanprime1, 0.0)
assert np.isclose(np.real(an), -1.96544240e-6)
assert np.isclose(np.imag(an), -2.34432221e-6)
bn = computations.mie_bn(eps1, eps2, jn1, jn2, hn1, psinprime1, psinprime2, zetanprime1, 0.0)
assert np.isclose(np.real(bn), -0.00140178)
assert np.isclose(np.imag(bn), 0.00149810)
return 'Tests pass: Mie nonlocal'
def test_decay_rates():
"""Test the functions involved in the decay rates computation.
Compare to values obtained with finite element methods.
"""
r = data_processing.convert_units(30, 'nm')
metal = 'Drude'
eps_inf = 1.0
hbar_omega_p = 8.1
omega_p = data_processing.convert_eV_to_Hz(hbar_omega_p)
hbar_gamma = constants.hbar / (14.0e-15 * constants.eV)
gamma = data_processing.convert_eV_to_Hz(hbar_gamma)
n_max = 111
test_fem_local_emission_air(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max)
test_fem_local_emission_dielectric(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max)
test_fem_local_distance_air(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max)
test_fem_nonlocal_emission_air(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max)
metal = 'Olmon single-crystal gold'
test_fem_emission_exp_eps(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max)
return 'Tests pass: FEM comparison'
def test_fem_local_emission_air(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max):
"""Compare decay rates with FEM calculations.
The comparison is for a varying emission frequency in air.
Only the relative difference is evaluated.
"""
nonlocal = False
v_F = 0.0
D = 0.0
d = data_processing.convert_units(np.array([5]), 'nm')
emission = np.linspace(1.0, 4.0, num=10)
omega = data_processing.convert_emission_to_omega(emission, 'hbar omega (eV)')
eps_medium = 1.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_inf = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
orientation = 'radial'
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_inf, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_01.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-3)
orientation = 'tangential'
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_inf, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_02.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-2)
return 'Tests pass: FEM comparison for changing emission parameter in air with local Drude metal'
def test_fem_local_emission_dielectric(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max):
"""Compare decay rates with FEM calculations.
The comparison is for a varying emission frequency in a dielectric medium.
Only the relative difference is evaluated.
"""
nonlocal = False
v_F = 0.0
D = 0.0
d = data_processing.convert_units(np.array([5]), 'nm')
emission = np.linspace(1.0, 4.0, num=10)
omega = data_processing.convert_emission_to_omega(emission, 'hbar omega (eV)')
gamma = data_processing.convert_eV_to_Hz(hbar_gamma)
eps_medium = 2.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
orientation = 'radial'
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_03.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-3)
orientation = 'tangential'
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_04.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=2.0e-2)
return 'Tests pass: FEM comparison for changing emission parameter in dielectric with local Drude metal'
def test_fem_local_distance_air(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max):
"""Compare decay rates with FEM calculations.
The comparison is for a varying distance frequency in air.
Only the relative difference is evaluated.
"""
nonlocal = False
v_F = 0.0
D = 0.0
distance = np.linspace(1.0, 10.0, num=10)
d = data_processing.convert_units(distance, 'nm')
emission = 2.5
omega = data_processing.convert_emission_to_omega(np.array([emission]), 'hbar omega (eV)')
gamma = data_processing.convert_eV_to_Hz(hbar_gamma)
eps_medium = 1.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
orientation = 'radial'
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_05.txt', skiprows=17)
assert np.allclose(d, fem_data[:, 0])
assert np.allclose(gamma_tot, fem_data[:, 1], atol=0.0, rtol=3.0e-2)
orientation = 'tangential'
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_06.txt', skiprows=17)
assert np.allclose(d, fem_data[:, 0])
assert np.allclose(gamma_tot, fem_data[:, 1], atol=0.0, rtol=3.0e-2)
return 'Tests pass: FEM comparison for changing distance in air with local Drude metal'
def test_fem_nonlocal_emission_air(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max):
"""Compare decay rates with FEM calculations.
The comparison is for a Drude metal with a nonlocal response.
Only the relative difference is evaluated.
"""
nonlocal = True
v_F = 1.40e6
D = 0.0
d = data_processing.convert_units(np.array([5]), 'nm')
emission = np.linspace(1.0, 4.0, num=10)
omega = data_processing.convert_emission_to_omega(emission, 'hbar omega (eV)')
eps_medium = 1.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
orientation = 'radial'
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_07.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=2.0e-2)
D = 8.62e-4
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_08.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-2)
return 'Tests pass: FEM comparison for changing emission parameter in air with nonlocal Drude metal'
def test_fem_emission_exp_eps(r, metal, eps_inf, hbar_omega_p, omega_p, hbar_gamma, gamma, n_max):
"""Compare decay rates with FEM calculations.
The comparison is for a metal with permittivity of gold given by Olmon.
Only the relative difference is evaluated.
"""
emission = np.linspace(1.0, 4.0, num=10)
omega = data_processing.convert_emission_to_omega(emission, 'hbar omega (eV)')
orientation = 'radial'
d = data_processing.convert_units(np.array([5]), 'nm')
nonlocal = False
v_F = 0.0
D = 0.0
eps_medium = 1.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_09.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-3)
eps_medium = 2.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_10.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-3)
nonlocal = True
v_F = 1.40e6
D = 0.0
eps_medium = 1.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_11.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=2.0e-2)
eps_medium = 2.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_12.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=2.0e-2)
D = 8.62e-4
eps_medium = 1.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_13.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-3)
eps_medium = 2.0
eps_metal = data_processing.permittivity(omega, metal, eps_inf, hbar_omega_p, hbar_gamma)
eps_local = data_processing.bound_response(eps_metal, omega, hbar_omega_p, hbar_gamma)
gamma_tot, gamma_r = computations.decay_rates_vectorized(n_max, nonlocal, eps_medium, eps_metal, eps_local, omega_p, gamma, v_F, D, omega, r, d, orientation)
fem_data = np.loadtxt('Tests/FEM_14.txt', skiprows=17)
assert np.allclose(emission, fem_data[:, 0])
assert np.allclose(np.transpose(gamma_tot), fem_data[:, 1], atol=0.0, rtol=1.0e-3)
return 'Tests pass: FEM comparison for metal with permittivity of gold given by Olmon'
if __name__ == "__main__":
print(test_mie())
print(test_mie_nonlocal())
print(test_decay_rates())
|
rjurga/plasmon-fluorescence
|
tests.py
|
Python
|
mit
| 16,104
|
[
"CRYSTAL"
] |
74b5f81a13fe5e64c44ed9726bacbfa50303489ad0fad9674c5c4d530aa2e7ca
|
# -*- coding: UTF-8 -*-
"""
``trinity_mapping``
-----------------------------------------------------------------
:Authors: Menachem Sklarz
:Affiliation: Bioinformatics core facility
:Organization: National Institute of Biotechnology in the Negev, Ben Gurion University.
A class that defines a module for running ``align_and_estimate_abundance.pl`` on a Trinity assembly and the raw reads.
Tested on versions 2.4.0 and 2.5.0 of Trinity.
See the `align_and_estimate_abundance.pl`_ script documentation.
.. _align_and_estimate_abundance.pl: https://github.com/trinityrnaseq/trinityrnaseq/wiki/Trinity-Transcript-Quantification#estimating-transcript-abundance
Requires
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* ``fastq`` files in at least one of the following slots:
* ``sample_data[<sample>]["fastq.F"]``
* ``sample_data[<sample>]["fastq.R"]``
* ``sample_data[<sample>]["fastq.S"]``
* A Trinity assembly in one of (depending on ``scope``)
* ``sample_data[<sample>]["fasta.nucl"]``
* ``sample_data["fasta.nucl"]``
Output:
~~~~~~~~~~~~~
* Puts output files in the following slots:
* ``sample_data[<sample>]["bam"]``
* ``sample_data[<sample>]["unsorted_bam"]`` (If ``--coordsort_bam`` is passed in redirects)
* ``sample_data[<sample>]["isoforms.results"]``
* ``sample_data[<sample>]["genes.results"]``
Parameters that can be set
~~~~~~~~~~~~~~~~~~~~~~~~~~
.. csv-table::
:header: "Parameter", "Values", "Comments"
"scope", "sample|project", "Set if project-wide fasta slot should be used"
"redirects: --gene_trans_map", "path or empty", "If empty, use internal gene_trans_map. If path, use path as gene_trans_map for all samples. If not passed, performs analysis on isoform level only"
"redirects: --trinity_mode", "", "If set, will create a gene_trans_map for each sample and store it as sample gene_trans_map"
Lines for parameter file
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
::
trin_map1:
module: trinity_mapping
base: trinity1
script_path: {Vars.paths.align_and_estimate_abundance}
redirects:
--est_method: RSEM
--aln_method: bowtie
--trinity_mode:
--seqType: fq
References
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Grabherr, M.G., Haas, B.J., Yassour, M., Levin, J.Z., Thompson, D.A., Amit, I., Adiconis, X., Fan, L., Raychowdhury, R., Zeng, Q. and Chen, Z., 2011. **Trinity: reconstructing a full-length transcriptome without a genome from RNA-Seq data**. *Nature biotechnology*, 29(7), p.644.
"""
import os
import sys
import re
from neatseq_flow.PLC_step import Step,AssertionExcept
__author__ = "Menachem Sklarz"
__version__ = "1.6.0"
class Step_trinity_mapping(Step):
def step_specific_init(self):
self.shell = "bash" # Can be set to "bash" by inheriting instances
self.file_tag = "trinity_mapping"
if "--est_method" not in self.params["redir_params"]:
raise AssertionExcept("You must pass an --est_method to trinity_mapping.\n")
# Is used below...
self.est_method = self.params["redir_params"]["--est_method"]
if self.est_method == "kallisto":
raise AssertionExcept("Method 'kallisto' is not defined yet!")
# To define, find out what the per isoform and per gene output files are named and fill the names in the dictionary called file_suffix_ind, below.
if "--aln_method" in self.params["redir_params"]:
# raise AssertionExcept("You must pass an --aln_method to trinity_mapping\n")
self.params["aln_method"] = self.params["redir_params"]["--aln_method"]
del self.params["redir_params"]["--aln_method"]
else:
self.params["aln_method"] = None
if not self.params["aln_method"] and self.est_method.lower() in ["rsem","express"]:
raise AssertionExcept("For RSEM and eXpress, you must supply an 'aln_method' parameter")
if "scope" not in self.params:
raise AssertionExcept("Please specify a 'scope': Either 'sample' or 'project'.")
for redir2remove in ["--transcripts", "--output_dir", "--left", "--right", "--single", "--prep_reference"]:
if redir2remove in self.params["redir_params"]:
del self.params["redir_params"][redir2remove]
self.write_warning("You are not supposed to specify %s in redirects. We set it automatically" % redir2remove)
def step_sample_initiation(self):
""" A place to do initiation stages following setting of sample_data
Here you should do testing for dependency output. These will NOT exist at initiation of this instance. They are set only following sample_data updating
"""
if self.params["scope"] == "sample":
# Check that "fasta" and "assembly" exist (signs that trinity has been executed)
for sample in self.sample_data["samples"]:
if "fasta.nucl" not in self.sample_data[sample]:
raise AssertionExcept("It seems there is no sample-wide assembly.", sample)
elif self.params["scope"] == "project":
# print self.sample_data.keys()
if "fasta.nucl" not in self.sample_data["project_data"]:
raise AssertionExcept("It seems there is no project-wide assembly.")
else:
raise AssertionExcept("'scope' must be either 'sample' or 'project'.")
# If "bam" required as input method, make sure a bam exists for all samples:
if self.params["aln_method"] == "bam":
for sample in self.sample_data["samples"]:
if "bam" not in self.sample_data[sample]:
raise AssertionExcept("It seems there is no BAM file for the sample.", sample)
# Dealing with gene_trans_map:
if self.params["scope"] == "project":
if "--gene_trans_map" in self.params["redir_params"]:
self.use_gene_trans_map = True
if self.params["redir_params"]["--gene_trans_map"]: # If value was passed
self.sample_data["project_data"]["gene_trans_map"] = self.params["redir_params"]["--gene_trans_map"]
else: # If passed empty, use internal:
if "gene_trans_map" in self.sample_data["project_data"]:
self.params["redir_params"]["--gene_trans_map"] = self.sample_data["project_data"]["gene_trans_map"]
else:
raise AssertionExcept("Expecting 'gene_trans_map' in project but none found.\n")
elif "--trinity_mode" in self.params["redir_params"]:
self.use_gene_trans_map = True
else:
self.use_gene_trans_map = False
else: # sample scope
if "--gene_trans_map" in self.params["redir_params"]:
self.use_gene_trans_map = True
if self.params["redir_params"]["--gene_trans_map"]: # If value was passed
for sample in self.sample_data["samples"]:
self.sample_data[sample]["gene_trans_map"] = self.params["redir_params"]["--gene_trans_map"]
else: # If passed empty, use internal:
if "gene_trans_map" in self.sample_data[sample]:
self.params["redir_params"]["--gene_trans_map"] = self.sample_data[sample]["gene_trans_map"]
else:
raise AssertionExcept("Expecting 'gene_trans_map' in sample but none found.\n", sample)
elif "--trinity_mode" in self.params["redir_params"]:
self.sample_data[sample]["gene_trans_map"] = "%s.gene_trans_map" % self.sample_data[sample]["fasta.nucl"]
self.use_gene_trans_map = True
else:
self.use_gene_trans_map = False
def create_spec_wrapping_up_script(self):
""" Add stuff to check and agglomerate the output data
"""
pass
def create_spec_preliminary_script(self):
""" Add script to run BEFORE all other steps
"""
if all([self.params["scope"] == "project",
self.params["aln_method"] not in ["bam", None]]):
self.script = ""
# 1. Create link to fasta file in Reference dir
# 2. Create link to gene_trans_map file as well, if it exists
self.script += """\
# Creating a local sost link to the reference
# The purpose is that the reference will not be built in the original location
mkdir -p {dir}
cp -rsf \\
{ref} \\{map}
{dir}
""".format(ref=self.sample_data["project_data"]["fasta.nucl"],
map=("\n\t"+self.sample_data["project_data"]["gene_trans_map"]+" \\")
if "gene_trans_map" in self.sample_data["project_data"]
else "",
dir=self.base_dir+"Reference")
# Set fasta.nucl to new link to original fasta nucl
self.sample_data["project_data"]["fasta.nucl"] = "{dir}Reference/{fn}".\
format(dir=self.base_dir,
fn=os.path.basename(self.sample_data["project_data"]["fasta.nucl"]))
# If it exists, and therefore linked, set gene_trans_map to new link to original file:
if "gene_trans_map" in self.sample_data["project_data"]:
self.sample_data["project_data"]["gene_trans_map"] = "{dir}Reference/{fn}".\
format(dir=self.base_dir,
fn=os.path.basename(self.sample_data["project_data"]["gene_trans_map"]))
# "%s.gene_trans_map" % self.sample_data["project_data"]["fasta.nucl"]
# Create script and write to SCRPT
# First: transcript preparation (with --pre_reference arg)
self.script += """
{const}--aln_method {aln} \\
\t--transcripts {fasta} \\
\t--prep_reference
""".format(const=self.get_script_const(),
aln=self.params["aln_method"],
fasta=self.sample_data["project_data"]["fasta.nucl"])
else:
pass
def build_scripts(self):
file_suffix_ind = {
"rsem": {
"isoforms": "RSEM.isoforms.results",
"genes": "RSEM.genes.results"},
"salmon": {
"isoforms": "quant.sf",
"genes": "quant.sf.genes"},
"kallisto": {
"isoforms": "",
"genes": "" },
"express": {
"isoforms": "results.xprs",
"genes": "results.xprs.genes"}
}
# Loop over samples and concatenate read files to $forward and $reverse respectively
# add check if paired or single !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
for sample in self.sample_data["samples"]: # Getting list of samples out of samples_hash
# Name of specific script:
self.spec_script_name = self.set_spec_script_name(sample)
self.script = ""
# Make a dir for the current sample:
sample_dir = self.make_folder_for_sample(sample)
# This line should be left before every new script. It sees to local issues.
# Use the dir it returns as the base_dir for this step.
use_dir = self.local_start(sample_dir)
# Procedure for preparing
# Repeating procedure as done in trinity step:
# If both F and R reads exist, adding them to forward and reverse
# Assuming upstream input testing to check that if there are F reads then there are also R reads.
# Setting variables to empty strings
single = forward = reverse = ""
if "fastq.F" in self.sample_data[sample]:
forward = self.sample_data[sample]["fastq.F"]
reverse = self.sample_data[sample]["fastq.R"]
if "fastq.S" in self.sample_data[sample]:
single = self.sample_data[sample]["fastq.S"]
# # Adding single reads to end of left (=forward) reads
# if single != "" and forward != "":
# forward = ",".join([forward,single])
transcripts = self.sample_data[sample]["fasta.nucl"] \
if self.params["scope"] == "sample" \
else self.sample_data["project_data"]["fasta.nucl"]
if all([self.params["scope"] == "sample", \
"aln_method" in self.params, \
self.params["aln_method"] not in ["bam", None]]):
self.script += "# Preperaing the reference for analysis:\n\n"
self.script += self.get_script_const()
self.script += "--aln_method %s \\\n\t" % self.params["aln_method"]
self.script += "--transcripts %s \\\n\t" % transcripts
self.script += "--prep_reference \n\n"
# Create script and write to SCRPT
# First: transcript preparation (with --pre_reference arg)
# - This is done with preliminary script (see create_spec_preliminary_script())
self.script += self.get_script_const()
if self.params["aln_method"] == "bam":
self.script += "--aln_method %s \\\n\t" % self.sample_data[sample]["bam"] # Checked above. BAM must exist.
elif self.params["aln_method"] == None:
pass
else:
self.script += "--aln_method %s \\\n\t" % self.params["aln_method"]
self.script += "--transcripts %s \\\n\t" % transcripts
self.script += "--output_dir %s \\\n\t" % use_dir
if (forward):
self.script += "--left %s \\\n\t" % forward
self.script += "--right %s \\\n\t" % reverse
elif (single):
self.script += "--single %s \\\n\t" % single
else:
pass # No reads. This should be caught above...
self.script = self.script.rstrip("\\\n\t") + "\n\n"
# Storing files:
mv_data = {"dir" : use_dir,
"src" : file_suffix_ind[self.est_method.lower()]["isoforms"],
"trg" : ".".join([sample,file_suffix_ind[self.est_method.lower()]["isoforms"]])}
self.script += "mv {dir}{src} {dir}{trg}\n".format(**mv_data)
self.sample_data[sample]["isoforms.results"] = "{dir}{trg}".format(**mv_data)
self.stamp_file(self.sample_data[sample]["isoforms.results"])
if self.use_gene_trans_map: # Produce gene files:
mv_data["src"] = file_suffix_ind[self.est_method.lower()]["genes"]
mv_data["trg"] = ".".join([sample,file_suffix_ind[self.est_method.lower()]["genes"]])
self.script += "mv {dir}{src} {dir}{trg}\n".format(**mv_data)
self.sample_data[sample]["genes.results"] = "{dir}{trg}".format(**mv_data)
self.stamp_file(self.sample_data[sample]["genes.results"])
# Store bam files
if self.est_method.lower() in ["rsem","express"]:
self.sample_data[sample]["bam"] = "{dir}{method}.bam".format(dir = sample_dir, \
method = self.params["aln_method"])
self.stamp_file(self.sample_data[sample]["bam"])
self.sample_data[sample]["mapper"] = "%s" % self.params["aln_method"]
if "--coordsort_bam" in self.params["redir_params"]:
self.sample_data[sample]["unsorted_bam"] = self.sample_data[sample]["bam"]
self.stamp_file(self.sample_data[sample]["unsorted_bam"])
self.sample_data[sample]["bam"] = "{dir}{method}.csorted.bam".format(dir = sample_dir, \
method = self.params["aln_method"])
self.stamp_file(self.sample_data[sample]["bam"])
self.sample_data[sample]["reference"] = transcripts
# Move all files from temporary local dir to permanent base_dir
self.local_finish(use_dir,self.base_dir) # Sees to copying local files to final destination (and other stuff)
self.create_low_level_script()
|
bioinfo-core-BGU/neatseq-flow_modules
|
neatseq_flow_modules/RNA_seq/trinity_mapping.py
|
Python
|
gpl-3.0
| 17,105
|
[
"Bowtie"
] |
6f1a6baa94708be445935242dc409ac60d0ddac45614a084d6d906edf6e17aae
|
##############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""Configuration variables for controlling specific things in VisTrails.
"""
from __future__ import division
import atexit
import email.utils
import json
import os
import requests
import tempfile
import usagestats
import weakref
from vistrails.core import debug
from vistrails.core.system import vistrails_version, \
vistrails_examples_directory
usage_report = None
def setup_usage_report():
"""Sets up the usagestats module.
"""
global usage_report
certificate_file = get_ca_certificate()
usage_report = usagestats.Stats(
'~/.vistrails/usage_stats',
usagestats.Prompt(
"\nUploading usage statistics is currently disabled\n"
"Please help us by providing anonymous usage statistics; "
"you can enable this\neither from the GUI or by using "
"--enable-usage-stats\n"
"If you do not want to see this message again, you can disable "
"it from the GUI or with --disable-usage-stats\n"
"Nothing will be uploaded before you opt in.\n"),
'https://reprozip-stats.poly.edu/',
version='VisTrails %s' % vistrails_version(),
unique_user_id=True,
env_var='VISTRAILS_USAGE_STATS',
ssl_verify=certificate_file)
cwd = os.getcwd()
record_usage(cwd_spaces=b' ' in cwd)
try:
cwd.decode('ascii')
except UnicodeDecodeError:
record_usage(cwd_ascii=False)
else:
record_usage(cwd_ascii=True)
def update_config(configuration):
if getattr(configuration, 'enableUsage', False):
usage_report.enable_reporting()
configuration.reportUsage = 1
return True
elif getattr(configuration, 'disableUsage', False):
usage_report.disable_reporting()
configuration.reportUsage = 0
return True
return False
def record_usage(**kwargs):
"""Records some info in the current usage report.
"""
if usage_report is not None:
debug.debug("record_usage %r" % (kwargs,))
usage_report.note(kwargs)
saved_vistrails = weakref.WeakValueDictionary()
features = set()
features_for_vistrails = {}
def record_vistrail(what, vistrail):
"""Record info about a vistrail we used.
"""
if not usage_report.recording:
return
from vistrails.core.vistrail.controller import VistrailController
from vistrails.core.vistrail.pipeline import Pipeline
from vistrails.core.vistrail.vistrail import Vistrail
from vistrails.db.services.locator import XMLFileLocator
if isinstance(vistrail, VistrailController):
vistrail = vistrail.vistrail
if what == 'save':
# Don't report now, but mark it for reporting when it gets closed
saved_vistrails[id(vistrail)] = vistrail
return
elif what == 'close':
i = id(vistrail)
if i in saved_vistrails:
del saved_vistrails[i]
what = 'saved_close'
else:
return
if isinstance(vistrail, Vistrail):
upgrade_from = set()
upgrade_to = set()
nb_notes = 0
nb_paramexplorations = 0
for annotation in vistrail.action_annotations:
if annotation.key == Vistrail.UPGRADE_ANNOTATION:
upgrade_from.add(annotation.action_id)
upgrade_to.add(int(annotation.value))
elif annotation.key == Vistrail.NOTES_ANNOTATION:
nb_notes += 1
elif annotation.key == Vistrail.PARAMEXP_ANNOTATION:
nb_paramexplorations += 1
nb_upgrades = len(upgrade_from - upgrade_to)
if isinstance(vistrail.locator, XMLFileLocator):
usage_report.note({'in_examples_dir':
os.path.realpath(vistrail.locator._name).startswith(
os.path.realpath(vistrails_examples_directory()))})
nb_modules = 0
nb_groups = 0
nb_abstractions = 0
for action in vistrail.actions:
if action.id in upgrade_to or action.description == "Upgrade":
continue
for operation in action.operations:
if operation.vtType == 'add' or operation.vtType == 'change':
if operation.what == 'module':
nb_modules += 1
if operation.data.is_group():
nb_groups += 1
elif operation.data.is_abstraction():
nb_abstractions += 1
usage_report.note(dict(use_vistrail=what,
nb_versions=len(vistrail.actionMap),
nb_tags=len(vistrail.get_tagMap()),
nb_notes=nb_notes,
nb_paramexplorations=nb_paramexplorations,
nb_upgrades=nb_upgrades,
nb_variables=len(vistrail.vistrail_variables),
nb_modules=nb_modules,
nb_groups=nb_groups,
nb_abstractions=nb_abstractions))
for feature in features_for_vistrails.pop(id(vistrail), ()):
usage_report.note({'feature_for_vistrail': feature})
elif isinstance(vistrail, Pipeline):
usage_report.note(dict(use_workflow=what,
nb_modules=len(vistrail.module_list)))
else:
raise TypeError
def record_feature(feature, vistrail=None):
"""Record that a feature was used.
"""
from vistrails.core.vistrail.controller import VistrailController
if vistrail is not None:
if isinstance(vistrail, VistrailController):
vistrail = vistrail.vistrail
features_for_vistrails.setdefault(id(vistrail), set()).add(feature)
else:
features.add(feature)
def submit_usage_report(**kwargs):
"""Submits the current usage report to the usagestats server.
"""
debug.debug("submit_usage_report %r" % (kwargs,))
for pkg in ('numpy', 'scipy', 'matplotlib'):
try:
pkg_o = __import__(pkg, globals(), locals())
usage_report.note({pkg: getattr(pkg_o, '__version__', '')})
except ImportError:
pass
try:
import vtk
usage_report.note({'vtk': vtk.vtkVersion().GetVTKVersion()})
except ImportError:
pass
features.update(*features_for_vistrails.values())
for feature in features:
usage_report.note({'feature': feature})
usage_report.submit(kwargs,
usagestats.OPERATING_SYSTEM,
usagestats.SESSION_TIME,
usagestats.PYTHON_VERSION)
_server_news = None
def get_server_news():
global _server_news
if _server_news is not None:
return _server_news
dot_vistrails = os.path.expanduser('~/.vistrails')
if not os.path.exists(dot_vistrails):
os.mkdir(dot_vistrails)
file_name = os.path.join(dot_vistrails, 'server_news.json')
file_exists = os.path.exists(file_name)
headers = {}
if file_exists:
mtime = email.utils.formatdate(os.path.getmtime(file_name),
usegmt=True)
headers['If-Modified-Since'] = mtime
try:
resp = requests.get(
'https://reprozip-stats.poly.edu/vistrails_news/%s' %
vistrails_version(), headers=headers,
timeout=2 if file_exists else 10,
stream=True, verify=get_ca_certificate())
resp.raise_for_status()
if resp.status_code == 304:
raise requests.HTTPError(
'304 File is up to date, no data returned',
response=resp)
except requests.RequestException, e:
if not e.response or e.response.status_code != 304:
debug.warning("Can't download server news", e)
else:
try:
with open(file_name, 'wb') as f:
for chunk in resp.iter_content(4096):
f.write(chunk)
resp.close()
except Exception, e:
try:
os.remove(file_name)
except OSError:
pass
raise e
debug.log("Downloaded server news")
if os.path.exists(file_name):
with open(file_name, 'r') as f:
_server_news = json.load(f)
else:
_server_news = _default_news
return _server_news
def get_ca_certificate():
fd, certificate_file = tempfile.mkstemp(prefix='vistrails_stats_ca_',
suffix='.pem')
with open(certificate_file, 'wb') as fp:
fp.write(_ca_certificate)
os.close(fd)
atexit.register(os.remove, certificate_file)
return certificate_file
_default_news = {
'version': '20160304',
'news_html': None,
'usage_report_prompt_html':
u"<p>Please help us by reporting anonymous statistics about how you "
u"use VisTrails.</p><p>We would like to collect high-level details "
u"like which packages you use, which features, the size of your "
u"workflows and version trees, ... This information is reported "
u"anonymously and will only be used by the VisTrails team, to help "
u"guide our efforts.</p>",
}
_ca_certificate = b'''\
-----BEGIN CERTIFICATE-----
MIIDzzCCAregAwIBAgIJAMmlcDnTidBEMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNV
BAYTAlVTMREwDwYDVQQIDAhOZXcgWW9yazERMA8GA1UEBwwITmV3IFlvcmsxDDAK
BgNVBAoMA05ZVTERMA8GA1UEAwwIUmVwcm9aaXAxKDAmBgkqhkiG9w0BCQEWGXJl
cHJvemlwLWRldkB2Z2MucG9seS5lZHUwHhcNMTQxMTA3MDUxOTA5WhcNMjQxMTA0
MDUxOTA5WjB+MQswCQYDVQQGEwJVUzERMA8GA1UECAwITmV3IFlvcmsxETAPBgNV
BAcMCE5ldyBZb3JrMQwwCgYDVQQKDANOWVUxETAPBgNVBAMMCFJlcHJvWmlwMSgw
JgYJKoZIhvcNAQkBFhlyZXByb3ppcC1kZXZAdmdjLnBvbHkuZWR1MIIBIjANBgkq
hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1fuTW2snrVji51vGVl9hXAAZbNJ+dxG+
/LOOxZrF2f1RRNy8YWpeCfGbsZqiIEjorBv8lvdd9P+tD3M5sh9L0zQPU9dFvDb+
OOrV0jx59hbK3QcCQju3YFuAtD1lu8TBIPgGEab0eJhLVIX+XU5cYXrfoBmwCpN/
1wXWkUhN91ZVMA0ylATAxTpnoNuMKzfTxT8pyOWajiTskYkKmVBAxgYJQe1YDFA8
fglBNkQuHqP8jgYAniEBCAPZRMMq8WpOtyFx+L9LX9/WcHtAQyDPPb9M81KKgPQq
urtCqtuDKxuqcX9zg4/O8l4nZ50pwaJjbH4kMW/wnLzTPvzZCPtJYQIDAQABo1Aw
TjAdBgNVHQ4EFgQUJjhDDOup4P0cdrAVq1F9ap3yTj8wHwYDVR0jBBgwFoAUJjhD
DOup4P0cdrAVq1F9ap3yTj8wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
AQEAeKpTiy2WYPqevHseTCJDIL44zghDJ9w5JmECOhFgPXR9Hl5Nh9S1j4qHBs4G
cn8d1p2+8tgcJpNAysjuSl4/MM6hQNecW0QVqvJDQGPn33bruMB4DYRT5du1Zpz1
YIKRjGU7Of3CycOCbaT50VZHhEd5GS2Lvg41ngxtsE8JKnvPuim92dnCutD0beV+
4TEvoleIi/K4AZWIaekIyqazd0c7eQjgSclNGgePcdbaxIo0u6tmdTYk3RNzo99t
DCfXxuMMg3wo5pbqG+MvTdECaLwt14zWU259z8JX0BoeVG32kHlt2eUpm5PCfxqc
dYuwZmAXksp0T0cWo0DnjJKRGQ==
-----END CERTIFICATE-----
'''
|
VisTrails/VisTrails
|
vistrails/core/reportusage.py
|
Python
|
bsd-3-clause
| 12,618
|
[
"VTK"
] |
68d73abebf4c6c69c6bf8f048be1792fc6af5cdc6139deba2aaee297b8eca5fe
|
from __future__ import print_function
import psi4
from psi4.driver import qcdb
#! A test of the basis specification. Various basis sets are specified outright and in blocks, both
#! orbital and auxiliary. Constructs libmints BasisSet objects through the constructor that calls
#! qcdb.BasisSet infrastructure. Checks that the resulting bases are of the right size and checks
#! that symmetry of the Molecule observes the basis assignment to atoms.
# cc-pvdz aug-cc-pvdz
# BASIS H 5/ 5 C 14/15 H +4/ 4 C +9/10
# RIFIT H 14/15 C 56/66 H +9/10 C +16/20
# JKFIT H 23/25 C 70/81 H +9/10 C +16/20
mymol = psi4.geometry("""
C 0.0 0.0 0.0
O 1.4 0.0 0.0
H_r -0.5 -0.7 0.0
H_l -0.5 0.7 0.0
""")
psi4.set_options({'basis': 'cc-pvdz'})
print('[1] <<< uniform cc-pVDZ >>>')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
psi4.compare_strings('CC-PVDZ', psi4.core.get_global_option('BASIS'), 'name') #TEST
psi4.compare_integers(38, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(40, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('CC-PVDZ', wert.name(), 'callby') #TEST
psi4.compare_strings('CC-PVDZ', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[2] <<< RIFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(140, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(162, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('(CC-PVDZ AUX)', wert.name(), 'callby') #TEST
psi4.compare_strings('CC-PVDZ-RI', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[3] <<< cc-pVDZ w/ aug-cc-pVDZ on C >>>')
psi4.basis_helper("""
assign cc-pvdz
assign c aug-cc-pvdz
""", name='dz_PLUS')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(47, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(50, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('DZ_PLUS', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[4] <<< RIFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
mymol.print_out()
wert.print_out()
psi4.compare_integers(156, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(182, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('(DZ_PLUS AUX)', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ-RI + CC-PVDZ-RI', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[5] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H_R >>>')
psi4.basis_helper("""
assign cc-pvdz
assign c aug-cc-pvdz
assign h_r aug-cc-pvdz
""",
name='dz_PLUSplus',
key='BASis')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
psi4.compare_strings('DZ_PLUSPLUS', psi4.core.get_global_option('BASIS'), 'name') #TEST
psi4.compare_integers(51, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(54, wert.nao(), 'nao()') #TEST
psi4.compare_strings('cs', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('DZ_PLUSPLUS', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[6] <<< RIFIT (custom: force cc-pVDZ on H, default on C, O) >>>')
psi4.basis_helper("""
assign h cc-pvdz-ri
""",
name='dz_PLUSplusRI',
key='df_basis_mp2')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', psi4.core.get_global_option('DF_BASIS_MP2'), 'RIFIT', psi4.core.get_global_option('BASIS'))
mymol.print_out()
psi4.compare_integers(156, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(182, wert.nao(), 'nao()') #TEST
psi4.compare_strings('cs', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('DZ_PLUSPLUSRI', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ-RI + CC-PVDZ-RI', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[7] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
psi4.basis_helper("""
assign cc-pvdz
assign c aug-cc-pvdz
assign h aug-cc-pvdz
""",
name = 'dz_PLUSplusplus')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(55, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(58, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('DZ_PLUSPLUSPLUS', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[8] <<< JKFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(220, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(252, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('(DZ_PLUSPLUSPLUS AUX)', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ-JKFIT + CC-PVDZ-JKFIT', wert.blend(), 'blend') #TEST
mymol.print_out()
psi4.set_options({'basis': 'aug-cc-pvdz'})
print('[9] <<< aug-cc-pVDZ >>>')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(64, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(68, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('AUG-CC-PVDZ', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ', wert.blend(), 'blend') #TEST
mymol.print_out()
print('[10] <<< JKFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(236, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(272, wert.nao(), 'nao()') #TEST
psi4.compare_strings('c2v', mymol.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('(AUG-CC-PVDZ AUX)', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ-JKFIT', wert.blend(), 'blend') #TEST
mymol.print_out()
mymol2 = psi4.geometry("""
C 0.0 0.0 0.0
O 1.4 0.0 0.0
H_r -0.5 -0.6 0.3
H_l -0.5 0.6 0.3
H_c -0.5 0.0 0.7
""")
psi4.set_options({'basis': 'dz_plusplusplus'})
print('[11] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
wert = psi4.core.BasisSet.build(mymol2, 'BASIS', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(64, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(67, wert.nao(), 'nao()') #TEST
psi4.compare_strings('cs', mymol2.schoenflies_symbol(), 'symm') #TEST
psi4.compare_strings('DZ_PLUSPLUSPLUS', wert.name(), 'callby') #TEST
psi4.compare_strings('AUG-CC-PVDZ + CC-PVDZ', wert.blend(), 'blend') #TEST
mymol2.print_out()
hene = psi4.geometry("""
He
Ne 1 2.0
""")
psi4.basis_helper("""
assign cc-pv5z
""", name='disguised5z')
psi4.core.set_global_option('DF_BASIS_MP2', '') # clear df_basis_mp2 {...} to get autoaux below
print('[12] <<< cc-pV5Z on HeNe >>>')
wert = psi4.core.BasisSet.build(hene, 'BASIS', psi4.core.get_global_option('BASIS'))
hene.print_out()
psi4.compare_integers(146, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(196, wert.nao(), 'nao()') #TEST
psi4.compare_strings('DISGUISED5Z', wert.name(), 'callby') #TEST
psi4.compare_strings('CC-PV5Z', wert.blend(), 'blend') #TEST
print('[13] <<< RI for cc-pV5Z on HeNe >>>')
wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
hene.print_out()
psi4.compare_integers(284, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(413, wert.nao(), 'nao()') #TEST
psi4.compare_strings('(DISGUISED5Z AUX)', wert.name(), 'callby') #TEST
psi4.compare_strings('CC-PV5Z-RI', wert.blend(), 'blend') #TEST
print('[14] <<< impossible JK for cc-pV5Z on HeNe >>>')
error_tripped = 0
try:
wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
except qcdb.BasisSetNotFound:
error_tripped = 1
psi4.compare_integers(1, error_tripped, 'squashed 4z aux for 5z orb') #TEST
psi4.basis_helper(key='df_basis_scf', name='uggh', block="""
assign he DEF2-QZVPP-JKFIT
""")
hene.print_out()
print('[15] <<< forced JK for cc-pV5Z on HeNe >>>')
wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
psi4.compare_integers(169, wert.nbf(), 'nbf()') #TEST
psi4.compare_integers(241, wert.nao(), 'nao()') #TEST
psi4.compare_strings('UGGH', wert.name(), 'callby') #TEST
psi4.compare_strings('CC-PV5Z-JKFIT + DEF2-QZVPP-JKFIT', wert.blend(), 'blend') #TEST
|
kratman/psi4public
|
tests/python/mints9/input.py
|
Python
|
gpl-2.0
| 8,970
|
[
"Psi4"
] |
6e5ebbef360a1a6cf2bb4bf50d6d44da19cfe56f43bc5bf83451f1fc46a2bf4a
|
#!/usr/bin/env python
"""
modules for large scale experiment runs includes:
- reading of genome sequence in fasta format
- manual cleaning of genome and annotation files
- feature annotation db for querying details
- creating star genome indicies
Requirement:
STAR aligner: https://github.com/alexdobin/STAR
gfftools: https://github.com/vipints/genomeutils/tree/master/gfftools
Biopython: http://biopython.org
"""
import os
import re
import sys
import shutil
import subprocess
from Bio import SeqIO
from gfftools import helper, GFFParser
def stop_err(msg):
"""
stop the execution and print out the captured error message.
"""
sys.stderr.write('%s\n' % msg)
sys.exit(-1)
def clean_anno_file(chr_names, gtf_file, gtf_out):
"""
make stable annotation file with valid contig name
@args chr_names: different contig names with a valid genome sequence
@type chr_names: dict
@args gtf_file: genome annotation in gtf/gff form
@type gtf_file: str
@args gtf_out: new genome annotation in gtf/gff form
@type gtf_out: str
"""
# get the filehandler from input file
try:
fh = helper.open_file(gtf_file)
except Exception, errmsg:
stop_err('error %s in reading file %s' % (errmsg, gtf_file))
# check the out filehandler
try:
outfh = open(gtf_out, "w")
except Exception, errmsg:
stop_err('error %s in writing file %s' % (errmsg, gtf_out))
for line in fh:
line = line.strip('\n\r')
## preserving the fasta header if present
if line[0] in ['#', '>']:
outfh.write(line + '\n')
continue
## preserving the genome sequence if present
if not re.search('\t', line):
outfh.write(line + '\n')
continue
## looking for gtf/gff files
fields = line.split('\t')
assert len(fields) >= 8, fields
if fields[0] in chr_names:
outfh.write(line + '\n')
fh.close()
outfh.close()
def read_genome_file(fas_file):
"""
read genome file in fasta and return the list of chromosomes/contigs
@args fas_file: genome sequence in fasta file
@type fas_file: str
returns a list with contig_names and length
"""
# get the filehandler from input file
try:
fh = helper.open_file(fas_file)
except Exception, errmsg:
stop_err('error in reading file '+ errmsg)
chrom_names = []
for rec in SeqIO.parse(fh, "fasta"):
print "parsing contig %s details" % rec.id
chrom_names.append((rec.id, len(rec.seq)))
fh.close()
# return the list with chromosome identifier and its sequence length
return chrom_names
"""
based on eye inspection, the returned list can be trimmed and
create a dictionary with the best chromosomes , something like:
Take the list 0-15
chr_best = chrom_names[0:15]
change to dict
chr_best = dict(chr_best)
and finally this dictionary can be passed to the genome cleaning
function - clean_genome_file
"""
def clean_genome_file(chr_names, fas_file, fas_out):
"""
make a stable genome file with valid contigs
@args chr_names: different contig names with a valid genome sequence
@type chr_names: dict
@args fas_file: genome sequence in fasta file
@type fas_file: str
@args fas_out: new genome sequence file in fasta format
@type fas_out: str
"""
# get the filehandler from input file
try:
fh = helper.open_file(fas_file)
except Exception, errmsg:
stop_err('error in reading file '+ errmsg)
# check the out filehandler
try:
outfh = open(fas_out, "w")
except Exception, errmsg:
stop_err('error in writing file '+ errmsg)
# writing stable contig genome sequence in FASTA format
for rec in SeqIO.parse(fh, "fasta"):
if rec.id in chr_names:
outfh.write(rec.format("fasta"))
print "writing the contig %s details" % rec.id
fh.close()
outfh.close()
def genome_file_rec_extract(chr_pattn, fas_file, fas_out):
"""
get all contings based on a matiching string in the record identifier
@args chr_pattn: pattern to be searched in contig names
@type chr_pattn: str
@args fas_file: genome sequence in fasta file
@type fas_file: str
@args fas_out: new genome sequence file in fasta format
@type fas_out: str
"""
# get the filehandler from input file
try:
fh = helper.open_file(fas_file)
except Exception, errmsg:
stop_err('error in reading file '+ errmsg)
# check the out filehandler
try:
outfh = open(fas_out, "w")
except Exception, errmsg:
stop_err('error in writing file '+ errmsg)
# writing stable contig genome sequence in FASTA format
for rec in SeqIO.parse(fh, "fasta"):
if re.search(chr_pattn, rec.id):
outfh.write(rec.format("fasta"))
print "writing the contig %s details" % rec.id
fh.close()
outfh.close()
def make_anno_db(gff_file):
"""
extract the features from a gtf/gff file and store efficiently to query
@args gff_file: genome annotation file
@type gff_file: str
"""
gff_cont = GFFParser.Parse(gff_file)
intron_size = dict()
exon_size = dict()
for rec in gff_cont:
for idx, tid in enumerate(rec['transcripts']):
if not rec['exons'][idx].any():
continue
try: # (Pdb) rec['exons'][0] -> array(nan)
import numpy as np
if np.isnan(rec['exons'][idx]):
continue
except:
pass
try:
exon_cnt = len(rec['exons'][idx])
except:
continue
if exon_cnt > 1:
intron_start = 0
for xq, excod in enumerate(rec['exons'][idx]):
if xq > 0:
#print intron_start, excod[0]-1
if excod[0]-intron_start==1:
intron_start = excod[1]+1
exon_size[intron_start-excod[0]] = 1
continue
intron_size[excod[0]-intron_start] = 1
#print excod[0]-intron_start
intron_start = excod[1]+1
exon_size[intron_start-excod[0]] = 1
#print intron_start-excod[0]
feat_db = dict()
if intron_size:
keys_int = sorted(intron_size)
keys_ex = sorted(exon_size)
#print 'MaxIntronLength %d %d %d' %(keys_int[-1], keys_int[-2], keys_int[-3])
feat_db['min_intron'] = int(keys_int[0])
feat_db['max_intron'] = int(keys_int[-3])
feat_db['min_exon'] = int(keys_ex[0])
feat_db['max_exon'] = int(keys_ex[-3])
#print 'MaxExonLength %d %d %d' %(keys_ex[-1], keys_ex[-2], keys_ex[-3])
return feat_db
else:
print "Error in feature mapping in file %s, please check the source of parent child features" % gff_file
sys.exit(-1)
def create_star_genome_index(fasta_file, out_dir, genome_anno=None, num_workers=1, onematelength=100):
"""
Creating STAR genome index with or without using genome annotation
@args fasta_file: reference genome sequence file .fasta format
@type fasta_file: str
@args out_dir: genome index binary file storage place
@type out_dir: str
@args genome_anno: genome annotation file (optional)
@type genome_anno: str
@args num_workers: number of threads to run (default value = 1)
@type num_workers: int
@args onematelength: One Mate Length (default value=100)
@type onematelength: int
"""
try:
subprocess.call(["STAR"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except:
exit("Please make sure that the `STAR` binary is in your $PATH")
file_prefx, ext = os.path.splitext(fasta_file)
if ext in [".bz2", ".gz", ".lzma"]: ## checking for the compressed form of the file extension
exit("error: STAR - Generating genome indexes - recommended to use the uncompressed FASTA file %s." % fasta_file)
if not genome_anno:
cli_cmd = 'STAR \
--runMode genomeGenerate \
--genomeDir %s \
--genomeFastaFiles %s \
--runThreadN %d' % (out_dir, fasta_file, num_workers)
else:
file_prefx, ext = os.path.splitext(genome_anno)
if ext in [".bz2", ".gz", ".lzma"]:
exit("error: STAR - Generating genome indexes - recommended to use the uncompressed GTF/GFF file %s." % genome_anno)
## check for the file type
gff_hand = helper.open_file(genome_anno)
for rec in gff_hand:
rec = rec.strip('\n\r')
# skip empty line fasta identifier and commented line
if not rec or rec[0] in ['#', '>']:
continue
# skip the genome sequence
if not re.search('\t', rec):
continue
parts = rec.split('\t')
assert len(parts) >= 8, rec
ftype, tags = GFFParser.attribute_tags(parts[-1])
break
gff_hand.close()
## according to the file type
if ftype:
cli_cmd = 'STAR \
--runMode genomeGenerate \
--genomeDir %s \
--genomeFastaFiles %s \
--runThreadN %d \
--sjdbGTFfile %s \
--sjdbGTFtagExonParentTranscript Parent \
--sjdbOverhang %d' % (out_dir, fasta_file, num_workers, genome_anno, onematelength)
else:
cli_cmd = 'STAR \
--runMode genomeGenerate \
--genomeDir %s \
--genomeFastaFiles %s \
--runThreadN %d \
--sjdbGTFfile %s \
--sjdbGTFfeatureExon exon \
--sjdbOverhang %d' % (out_dir, fasta_file, num_workers, genome_anno, onematelength)
## create downloadpath if doesnot exists
if not os.path.exists(out_dir):
try:
os.makedirs(out_dir)
except OSError:
exit("error: cannot create the directory %s." % out_dir)
else:## if present any other old index files clean up the folder
for the_file in os.listdir(out_dir):
file_path = os.path.join(out_dir, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception, e:
print(e)
## start the indexing job
sys.stdout.write('\trunning program as: %s \n' % cli_cmd)
try:
## changing the working dir to run STAR
os.chdir(out_dir)
## Run command.
process = subprocess.Popen(cli_cmd, shell=True)
returncode = process.wait()
## Error checking.
if returncode != 0:
raise Exception, "return code = %i" % returncode
print("\nGenome index files are stored at %s\n" % out_dir)
except Exception, e:
exit('Error running STAR.\n%s' % str( e ))
def parse_list(line, nb_elts):
"""
specific to BWA-MEM stderr format
"""
return map(lambda x: int(float(x)), ' '.join(line.strip().replace(',','').split()[-nb_elts:])[1:-1].split())
def calculate_insert_size_fastq(ref_genome, fastq_dir, fq_files):
"""
calculate the library insert size from raw read files and reference genome sequence
@args ref_genome: genome sequence file
@type ref_genome: str
@args fastq_dir: fastq directory for the experiment
@type fastq_dir: str
@args fq_files: fastq file names in python list
@type fq_files: list
"""
try:
subprocess.call(["bwa"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except:
exit("Please make sure that the `bwa` binary is in your $PATH")
## check for the bwa index
if not os.path.isfile("%s.sa" % ref_genome):
try:
cmd_idx = "bwa index %s" % ref_genome
sys.stdout.write("bwa indexing starting...\n")
process = subprocess.Popen(cmd_idx, shell=True)
returncode = process.wait()
if returncode !=0:
raise Exception, "Exit status return code = %i" % returncode
sys.stdout.write("bwa indexing finished\n")
except Exception, e:
exit('Error running bwa index.\n%s' % str( e ))
##adapted from:
##Quickly estimates insert sizes of read datasets, given some sequence(s) they can be mapped to.
##Author: Rayan Chikhi
##example:
## estimate-insert-sizes contigs.fa readsA_1.fq readsA_2.fq readsB_1.fq readsB_2.fq
if len(fq_files) > 1:
nb_cpu = 1
zip_type = {".gz" : "gzip -c", ".bz2" : "bzip2 -d -c"}
file_prefx, ext = os.path.splitext(fq_files[0])
stats = dict()
read1 = "%s/%s" % (fastq_dir, fq_files[0])
read2 = "%s/%s" % (fastq_dir, fq_files[1])
print("processing : \n %s \n %s " % (read1, read2))
cmd = ["bwa", "mem"] + ["-t %d" % nb_cpu, ref_genome] + ["<(%s %s)" % (zip_type[ext], read1)] \
+["<(%s %s)" % ( zip_type[ext], read2)]
DEVNULL = open(os.devnull, 'wb')
process = subprocess.Popen(cmd, stdout=DEVNULL, stderr=subprocess.PIPE)
seen_candidate_line = False
while True:
line = process.stderr.readline()
if line == '' and process.poll() != None:
break
if "worker" in line:
break
if "pestat" not in line:
continue
if "candidate unique pairs for" in line:
if seen_candidate_line:
break
seen_candidate_line = True
nb_pairs = parse_list(line,4)
for i in xrange(4):
stats[['FF', 'FR', 'RF', 'RR'][i]] = { 'nb_pairs' : nb_pairs[i] }
if "orientation" in line:
orientation = line.strip().split()[-1].replace('.','')
if "mem_pestat] mean and std.dev:" in line:
mean, stdev = parse_list(line,2)
stats[orientation]['mean'] = mean
stats[orientation]['stdev'] = stdev
if orientation == 'RR':
# stats are complete
break
sys.stdout.write(line)
sys.stdout.flush()
if process.poll() is None:
process.terminate()
results = sorted(stats.items(), key = lambda x: x[1]['nb_pairs'], reverse=True)
most_likely = results[0]
mean = most_likely[1]['mean']
stdev = most_likely[1]['stdev']
print "Orientation", most_likely[0], "mean", mean, "stdev", stdev
if __name__=="__main__":
print __doc__
|
vipints/genomeutils
|
fetch_remote_data/prepare_data.py
|
Python
|
bsd-3-clause
| 15,195
|
[
"BWA",
"Biopython"
] |
51ac0fb0ed692f6e7fc0ae882fe9ff6e7492eecc5430ad614efcb9e0429469fe
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# shapes.py
#
# Copyright 2015 Carlos Eduardo Sequeiros Borja <casebor@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import math
from optparse import OptionParser
"""
##############################################################################
# #
# This script increment the X position of an Atom a determined quantity #
# #
##############################################################################
"""
def amberAtomType(atom):
'''amberAtoms = ('H', 'HC', 'HO', 'HS', 'HW', 'H2', 'H3', 'C', 'CA', 'CB', 'CC', 'CK', 'CM', 'CN',\
'CQ', 'CR', 'CT', 'CV', 'CW', 'C*', 'CD', 'CE', 'CF', 'CG', 'CH', 'CI', 'CJ', 'CP', 'C2', 'C3',\
'N', 'NA', 'NB', 'NC', 'NT', 'N2', 'N3', 'N*', 'O', 'OH', 'OS', 'OW', 'O2', 'S', 'SH', 'P',\
'CU', 'C0', 'I', 'IM', 'MG', 'QC', 'QK', 'QL', 'QN', 'QR', 'LP')'''
amberAtoms = {'C', 'O', 'N', 'S', 'P', 'MG', 'F', 'Ar', 'CL', 'NA', 'H', 'BR', 'CA', 'ZN'}
return atom in amberAtoms
def isANearB(elemA, aX, aY, aZ, radA, cadPdb):
resp = False
cads = cadPdb.split('\n')
i = 0
while i<len(cads) and not resp:
if elemA in cads[i]:
cadsA = cads[i].split()
x = float(cadsA[5])
y = float(cadsA[6])
z = float(cadsA[7])
distAB = ((aX-x)**2 + (aY-y)**2 + (aZ-z)**2)**0.5
if distAB<radA:
resp = True
i += 1
return resp
def complexOK():
if len(options.complexF)<7:
return False
else:
elems = options.complexF.split('-')
if len(elems)<>4:
parse.error('Option complex must have 4 elements, and they must be valid Amber ATOMS!!!')
quit()
else:
for i in elems:
if not amberAtomType(i):
parse.error('Option complex must have 4 elements, and they must be valid Amber ATOMS!!!')
quit()
return True
def add_tapa():
global AT_POS
cad = ''
i = -3.6
temp = int(options.Ri/3.6)
#AT_POS = 1
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem, options.elem, AT_POS, i, 0.0, 0.0)
AT_POS += 1
for t in range(1, temp+1):
rad = 3.6 * t
perim = 2*math.pi*rad
razon = perim/(int(perim/3.6))
ang = razon/rad
temp = razon/rad
while temp <= (2*math.pi)+0.1:
j = rad*math.cos(temp)
k = rad*math.sin(temp)
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem, options.elem, AT_POS, i, j, k)
AT_POS += 1
temp += ang
return cad
def main():
global AT_POS
AT_POS = 1
cad = add_tapa()
dAtAt = 3.55
dAtAt2 = 3.55
cont = 1
i = 0.0
arco = 3.6
if options.shapeS.upper() == 'CONE':
while (i<options.TxC):
perim = 2*math.pi*options.Ri
razon = perim/(int(perim/arco))
ang = razon/options.Ri
temp = ang
while ang <= (2*math.pi)+0.1:
j = options.Ri*math.cos(ang)
k = options.Ri*math.sin(ang)
ang += temp
if len(options.elem2)>0 and not isANearB(options.elem2, i, j, k, options.interval, cad):
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem2, options.elem2, AT_POS, i, j, k)
else:
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem, options.elem, AT_POS, i, j, k)
AT_POS += 1
if cont%3 == 0:
dAtAt2 -= 0.2
cont += 1
i += dAtAt2
options.Ri = options.Ri+1.3*math.log10(options.Ri)**2
elif options.shapeS.upper() == 'TUBE':
while (i<options.TxT):
perim = 2*math.pi*options.Ri
razon = perim/(int(perim/arco))
ang = razon/options.Ri
temp = ang
while ang < (2*math.pi)+0.1:
j = options.Ri*math.cos(ang)
k = options.Ri*math.sin(ang)
ang += temp
if len(options.elem2)>0 and not isANearB(options.elem2, i, j, k, options.interval, cad):
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem2, options.elem2, AT_POS, i, j, k)
else:
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem, options.elem, AT_POS, i, j, k)
AT_POS += 1
i += dAtAt
elif options.shapeS.upper() == 'FUNNEL' and not complexOK():
while (i<options.TxT+options.TxC):
if i<=options.TxT:
perim = 2*math.pi*options.Ri
razon = perim/(int(perim/arco))
ang = razon/options.Ri
temp = ang
while ang < (2*math.pi)+0.1:
j = options.Ri*math.cos(ang)
k = options.Ri*math.sin(ang)
ang += temp
if len(options.elem2)>0 and not isANearB(options.elem2, i, j, k, options.interval, cad):
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem2, options.elem2, AT_POS, i, j, k)
else:
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem, options.elem, AT_POS, i, j, k)
AT_POS += 1
else:
perim = 2*math.pi*options.Ri
razon = perim/(int(perim/arco))
ang = razon/options.Ri
temp = ang
while ang <= (2*math.pi)+0.1:
j = options.Ri*math.cos(ang)
k = options.Ri*math.sin(ang)
ang += temp
if len(options.elem2)>0 and not isANearB(options.elem2, i, j, k, options.interval, cad):
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem2, options.elem2, AT_POS, i, j, k)
else:
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, options.elem, options.elem, AT_POS, i, j, k)
AT_POS += 1
options.Ri += 1.3*math.log10(options.Ri)**2
i += dAtAt
elif options.shapeS.upper() == 'FUNNEL' and complexOK():
elems = options.complexF.split('-')
elem1 = elems[0]
elem2 = elems[1]
elem3 = elems[2]
elem4 = elems[3]
while (i<options.TxT+options.TxC):
if i<=options.TxT:
perim = 2*math.pi*options.Ri
razon = perim/(int(perim/arco))
ang = razon/options.Ri
temp = ang
while ang < (2*math.pi)+0.1:
j = options.Ri*math.cos(ang)
k = options.Ri*math.sin(ang)
ang += temp
if not isANearB(elem2, i, j, k, options.interval, cad):
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, elem2, elem2, AT_POS, i, j, k)
else:
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, elem1, elem1, AT_POS, i, j, k)
AT_POS += 1
else:
perim = 2*math.pi*options.Ri
razon = perim/(int(perim/arco))
ang = razon/options.Ri
temp = ang
while ang <= (2*math.pi)+0.1:
j = options.Ri*math.cos(ang)
k = options.Ri*math.sin(ang)
ang += temp
if not isANearB(elem4, i, j, k, options.interval, cad):
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, elem4, elem4, AT_POS, i, j, k)
else:
cad += 'ATOM %5d %4s %3s %5d %8.3f%8.3f%8.3f 1.00 0.00\n' %(AT_POS, elem3, elem3, AT_POS, i, j, k)
AT_POS += 1
#options.Ri += 1.3*math.log10(options.Ri)**2
dAtAt -= 0.2
options.Ri += 3.6
i += dAtAt
else:
print 'Error in shape type'
parser.print_help()
quit()
if len(options.outFile) > 0:
arch = open(options.outFile, 'w')
else:
if not complexOK():
if len(options.elem2)>0:
outF = options.shapeS + '-' + options.elem + '-' + options.elem2 + '.pdb'
else:
outF = options.shapeS + '-' + options.elem + '.pdb'
else:
elems = options.complexF.split('-')
outF = options.shapeS + '-' + elems[0] + '-' + elems[1] + '-' + elems[2] + '-' + elems[3] + '.pdb'
arch = open(outF, 'w')
arch.write(cad)
arch.close()
return 0
if __name__ == '__main__':
usage = 'usage: \"%prog [options] args\" or \"%prog\"'
parser = OptionParser(usage)
parser.add_option('-o', '--out', action='store', type='string', dest='outFile', help='Defines the name of the output pdb file. Default is the SHAPE+_+ELEMENT+.pdb', default='')
parser.add_option('-w', '--with', action='store', type='string', dest='elem2', help='Use this option to add other ATOM in your shape in regular intervals. You can change the interval with -i or --interval', default='')
parser.add_option('-s', '--shape', action='store', type='string', dest='shapeS', help='Defines the shape of the object to be created. Values admitted are CONE, TUBE or FUNNEL. Default is CONE', default='CONE')
parser.add_option('-r', '--radius', action='store', type='float', dest='Ri', help='This option set the radius of the desired shape in ANGSTROMS. Default is 10.0 A', default='10.0')
parser.add_option('-e', '--element', action='store', type='string', dest='elem', help='Is the element used to make the structure', default='Ar')
parser.add_option('-c', '--complex', action='store', type='string', dest='complexF', help='Use this option ONLY if you want to make your FUNNEL with different elements on the TUBE part than those in the CONE part. The syntax is: elem1TUBE-elem2TUBE-elem1CONE-elem2CONE, note that you need to use the \'-\' between elements. You can change the interval with -i or --interval', default='')
parser.add_option('-d', '--distcone', action='store', type='float', dest='TxC', help='If you have chosen the cone shape, this option set the HEIGHT of the cone in ANGSTROMS; if you have chosen the funnel shape, this set the HEIGHT part of the funnel. Default is 40.0 A', default='40.0')
parser.add_option('-i', '--interval', action='store', type='float', dest='interval', help='Set the interval for the addition of other ATOMS. Default 6.0A. Use this option only if you use -w, --with, -c or --complex!!!', default='6.0')
parser.add_option('-l', '--disttunnel', action='store', type='float', dest='TxT', help='If you have chosen the tunnel shape, this option ser the LENGTH of the tunnel in ANGSTROMS; if you have chosen the funnel shape, this set the LENGTH of the tunnel part of the funnel. Default is 40.0 A', default='40.0')
(options, args) = parser.parse_args()
if (options.TxT<0) or (options.TxC<0) or (options.Ri<0):
parser.error('Options -l, -d and -r must be positive or 0!!!')
exit
if not amberAtomType(options.elem):
parse.error('Option element must be a valid Amber ATOM!!!')
quit()
if len(options.elem2)>0 and not amberAtomType(options.elem2):
parse.error('Option with must be a valid Amber ATOM!!!')
quit()
main()
|
casebor/labioscripts
|
python/shapes_tapa.py
|
Python
|
gpl-3.0
| 10,847
|
[
"Amber"
] |
f3b28716bd3e807e6b0d3a1084800076584283a0c67b59e0db19a9887eebf09f
|
# -*- coding: utf-8 -*-
"""Tests for :mod:`pybel.manager`."""
|
pybel/pybel
|
tests/test_manager/__init__.py
|
Python
|
mit
| 63
|
[
"Pybel"
] |
c559c00364647dcb3fce6dde582359d525e79dc9e80c70d172fb19aedbf0b35a
|
########################################################################
# $Id$
########################################################################
""" FileManagerBase is a base class for all the specific File Managers
"""
__RCSID__ = "$Id$"
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Utilities.List import intListToString
from DIRAC.Core.Utilities.Pfn import pfnparse, pfnunparse
import os
import stat
class FileManagerBase( object ):
def __init__( self, database = None ):
self.db = database
self.statusDict = {}
def _getConnection( self, connection ):
if connection:
return connection
res = self.db._getConnection()
if res['OK']:
return res['Value']
gLogger.warn( "Failed to get MySQL connection", res['Message'] )
return connection
def setDatabase( self, database ):
self.db = database
def getFileCounters( self, connection = False ):
""" Get a number of counters to verify the sanity of the Files in the catalog
"""
connection = self._getConnection( connection )
resultDict = {}
req = "SELECT COUNT(*) FROM FC_Files;"
res = self.db._query( req, connection )
if not res['OK']:
return res
resultDict['Files'] = res['Value'][0][0]
req = "SELECT COUNT(FileID) FROM FC_Files WHERE FileID NOT IN ( SELECT FileID FROM FC_Replicas )"
res = self.db._query( req, connection )
if not res['OK']:
return res
resultDict['Files w/o Replicas'] = res['Value'][0][0]
req = "SELECT COUNT(RepID) FROM FC_Replicas WHERE FileID NOT IN ( SELECT FileID FROM FC_Files )"
res = self.db._query( req, connection )
if not res['OK']:
return res
resultDict['Replicas w/o Files'] = res['Value'][0][0]
treeTable = self.db.dtree.getTreeTable()
req = "SELECT COUNT(FileID) FROM FC_Files WHERE DirID NOT IN ( SELECT DirID FROM %s)" % treeTable
res = self.db._query( req, connection )
if not res['OK']:
return res
resultDict['Orphan Files'] = res['Value'][0][0]
req = "SELECT COUNT(FileID) FROM FC_Files WHERE FileID NOT IN ( SELECT FileID FROM FC_FileInfo)"
res = self.db._query( req, connection )
if not res['OK']:
resultDict['Files w/o FileInfo'] = 0
else:
resultDict['Files w/o FileInfo'] = res['Value'][0][0]
req = "SELECT COUNT(FileID) FROM FC_FileInfo WHERE FileID NOT IN ( SELECT FileID FROM FC_Files)"
res = self.db._query( req, connection )
if not res['OK']:
resultDict['FileInfo w/o Files'] = 0
else:
resultDict['FileInfo w/o Files'] = res['Value'][0][0]
return S_OK( resultDict )
def getReplicaCounters( self, connection = False ):
""" Get a number of counters to verify the sanity of the Replicas in the catalog
"""
connection = self._getConnection( connection )
req = "SELECT COUNT(*) FROM FC_Replicas;"
res = self.db._query( req, connection )
if not res['OK']:
return res
return S_OK( {'Replicas':res['Value'][0][0]} )
######################################################
#
# File write methods
#
def _insertFiles( self, lfns, uid, gid, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _deleteFiles( self, toPurge, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _insertReplicas( self, lfns, master = False, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _findFiles( self, lfns, metadata = ["FileID"], allStatus = False, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _getFileReplicas( self, fileIDs, fields_input = ['PFN'], allStatus = False, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _getFileIDFromGUID( self, guid, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def getLFNForGUID( self, guids, connection = False ):
"""Returns the LFN matching a given GUID
"""
return S_ERROR( "To be implemented on derived class" )
def _setFileParameter( self, fileID, paramName, paramValue, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _deleteReplicas( self, lfns, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _setReplicaStatus( self, fileID, se, status, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _setReplicaHost( self, fileID, se, newSE, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _getDirectoryFiles( self, dirID, fileNames, metadata, allStatus = False, connection = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _getDirectoryFileIDs( self, dirID, requestString = False ):
"""To be implemented on derived class
"""
return S_ERROR( "To be implemented on derived class" )
def _findFileIDs( self, lfns, connection=False ):
""" To be implemented on derived class
Should return following the successful/failed convention
Successful is a dictionary with keys the lfn, and values the FileID"""
return S_ERROR( "To be implemented on derived class" )
def _getDirectoryReplicas( self, dirID, allStatus = False, connection = False ):
""" To be implemented on derived class
Should return with only one value, being a list of all the replicas (FileName,FileID,SEID,PFN)
"""
return S_ERROR( "To be implemented on derived class" )
def countFilesInDir( self, dirId ):
""" Count how many files there is in a given Directory
:param dirID : directory id
:returns S_OK(value) or S_ERROR
"""
return S_ERROR( "To be implemented on derived class" )
def _getFileLFNs(self,fileIDs):
""" Get the file LFNs for a given list of file IDs
"""
stringIDs = intListToString(fileIDs)
treeTable = self.db.dtree.getTreeTable()
req = "SELECT F.FileID, CONCAT(D.DirName,'/',F.FileName) from FC_Files as F, %s as D WHERE F.FileID IN ( %s ) AND F.DirID=D.DirID" % (treeTable,stringIDs)
result = self.db._query(req)
if not result['OK']:
return result
fileNameDict = {}
for row in result['Value']:
fileNameDict[row[0]] = row[1]
failed = {}
successful = fileNameDict
if len(fileNameDict) != len(fileIDs):
for id_ in fileIDs:
if not id_ in fileNameDict:
failed[id_] = "File ID not found"
return S_OK({'Successful':successful,'Failed':failed})
def addFile( self, lfns, credDict, connection = False ):
""" Add files to the catalog
:param lfns : dict { lfn : info}. 'info' is a dict containing PFN, SE, Size and Checksum
the SE parameter can be a list if we have several replicas to register
"""
connection = self._getConnection( connection )
successful = {}
failed = {}
for lfn, info in lfns.items():
res = self._checkInfo( info, ['PFN', 'SE', 'Size', 'Checksum'] )
if not res['OK']:
failed[lfn] = res['Message']
lfns.pop( lfn )
res = self._addFiles( lfns, credDict, connection = connection )
if not res['OK']:
for lfn in lfns.keys():
failed[lfn] = res['Message']
else:
failed.update( res['Value']['Failed'] )
successful.update( res['Value']['Successful'] )
return S_OK( {'Successful':successful, 'Failed':failed} )
def _addFiles( self, lfns, credDict, connection = False ):
""" Main file adding method
"""
connection = self._getConnection( connection )
successful = {}
result = self.db.ugManager.getUserAndGroupID( credDict )
if not result['OK']:
return result
uid, gid = result['Value']
# prepare lfns with master replicas - the first in the list or a unique replica
masterLfns = {}
extraLfns = {}
for lfn in lfns:
masterLfns[lfn] = dict( lfns[lfn] )
if isinstance( lfns[lfn].get( 'SE' ), list ):
masterLfns[lfn]['SE'] = lfns[lfn]['SE'][0]
if len( lfns[lfn]['SE'] ) > 1:
extraLfns[lfn] = dict( lfns[lfn] )
extraLfns[lfn]['SE'] = lfns[lfn]['SE'][1:]
# Check whether the supplied files have been registered already
existingMetadata, failed = self._getExistingMetadata( masterLfns.keys(), connection = connection )
if existingMetadata:
success, fail = self._checkExistingMetadata( existingMetadata, masterLfns )
successful.update( success )
failed.update( fail )
for lfn in ( success.keys() + fail.keys() ):
masterLfns.pop( lfn )
# If GUIDs are supposed to be unique check their pre-existance
if self.db.uniqueGUID:
fail = self._checkUniqueGUID( masterLfns, connection = connection )
failed.update( fail )
for lfn in fail:
masterLfns.pop( lfn )
# If we have files left to register
if masterLfns:
# Create the directories for the supplied files and store their IDs
directories = self._getFileDirectories( masterLfns.keys() )
for directory, fileNames in directories.items():
res = self.db.dtree.makeDirectories( directory, credDict )
if not res['OK']:
for fileName in fileNames:
lfn = os.path.join( directory, fileName )
failed[lfn] = res['Message']
masterLfns.pop( lfn )
continue
for fileName in fileNames:
if not fileName:
failed[directory] = "Is no a valid file"
masterLfns.pop( directory )
continue
lfn = "%s/%s" % ( directory, fileName )
lfn = lfn.replace( '//', '/' )
# This condition should never be true, we would not be here otherwise...
if not res['OK']:
failed[lfn] = "Failed to create directory for file"
masterLfns.pop( lfn )
else:
masterLfns[lfn]['DirID'] = res['Value']
# If we still have files left to register
if masterLfns:
res = self._insertFiles( masterLfns, uid, gid, connection = connection )
if not res['OK']:
for lfn in masterLfns.keys():
failed[lfn] = res['Message']
masterLfns.pop( lfn )
else:
for lfn, error in res['Value']['Failed'].items():
failed[lfn] = error
masterLfns.pop( lfn )
masterLfns = res['Value']['Successful']
# Add the ancestors
if masterLfns:
res = self._populateFileAncestors( masterLfns, connection = connection )
toPurge = []
if not res['OK']:
for lfn in masterLfns.keys():
failed[lfn] = "Failed while registering ancestors"
toPurge.append( masterLfns[lfn]['FileID'] )
else:
failed.update( res['Value']['Failed'] )
for lfn, error in res['Value']['Failed'].items():
toPurge.append( masterLfns[lfn]['FileID'] )
if toPurge:
self._deleteFiles( toPurge, connection = connection )
# Register the replicas
newlyRegistered = {}
if masterLfns:
res = self._insertReplicas( masterLfns, master = True, connection = connection )
toPurge = []
if not res['OK']:
for lfn in masterLfns.keys():
failed[lfn] = "Failed while registering replica"
toPurge.append( masterLfns[lfn]['FileID'] )
else:
newlyRegistered = res['Value']['Successful']
successful.update( newlyRegistered )
failed.update( res['Value']['Failed'] )
for lfn, error in res['Value']['Failed'].items():
toPurge.append( masterLfns[lfn]['FileID'] )
if toPurge:
self._deleteFiles( toPurge, connection = connection )
# Add extra replicas for successfully registered LFNs
for lfn in extraLfns.keys():
if not lfn in successful:
extraLfns.pop( lfn )
if extraLfns:
res = self._findFiles( extraLfns.keys(), ['FileID','DirID'], connection=connection )
if not res['OK']:
for lfn in lfns.keys():
failed[lfn] = 'Failed while registering extra replicas'
successful.pop( lfn )
extraLfns.pop( lfn )
else:
failed.update(res['Value']['Failed'])
for lfn in res['Value']['Failed'].keys():
successful.pop(lfn)
extraLfns.pop( lfn )
for lfn,fileDict in res['Value']['Successful'].items():
extraLfns[lfn]['FileID'] = fileDict['FileID']
extraLfns[lfn]['DirID'] = fileDict['DirID']
if extraLfns:
res = self._insertReplicas( extraLfns, master = False, connection = connection )
if not res['OK']:
for lfn in extraLfns.keys():
failed[lfn] = "Failed while registering extra replicas"
successful.pop( lfn )
else:
newlyRegistered = res['Value']['Successful']
successful.update( newlyRegistered )
failed.update( res['Value']['Failed'] )
return S_OK( {'Successful':successful, 'Failed':failed} )
def _updateDirectoryUsage( self, directorySEDict, change, connection = False ):
connection = self._getConnection( connection )
for directoryID in directorySEDict.keys():
result = self.db.dtree.getPathIDsByID( directoryID )
if not result['OK']:
return result
parentIDs = result['Value']
dirDict = directorySEDict[directoryID]
for seID in dirDict.keys() :
seDict = dirDict[seID]
files = seDict['Files']
size = seDict['Size']
insertTuples = []
for dirID in parentIDs:
insertTuples.append( '(%d,%d,%d,%d,UTC_TIMESTAMP())' % ( dirID, seID, size, files ) )
req = "INSERT INTO FC_DirectoryUsage (DirID,SEID,SESize,SEFiles,LastUpdate) "
req += "VALUES %s" % ','.join( insertTuples )
req += " ON DUPLICATE KEY UPDATE SESize=SESize%s%d, SEFiles=SEFiles%s%d, LastUpdate=UTC_TIMESTAMP() " \
% ( change, size, change, files )
res = self.db._update( req )
if not res['OK']:
gLogger.warn( "Failed to update FC_DirectoryUsage", res['Message'] )
return S_OK()
def _populateFileAncestors( self, lfns, connection = False ):
connection = self._getConnection( connection )
successful = {}
failed = {}
for lfn, lfnDict in lfns.items():
originalFileID = lfnDict['FileID']
originalDepth = lfnDict.get( 'AncestorDepth', 1 )
ancestors = lfnDict.get( 'Ancestors', [] )
if type( ancestors ) == type( ' ' ):
ancestors = [ancestors]
if lfn in ancestors:
ancestors.remove( lfn )
if not ancestors:
successful[lfn] = True
continue
res = self._findFiles( ancestors, connection = connection )
if res['Value']['Failed']:
failed[lfn] = "Failed to resolve ancestor files"
continue
ancestorIDs = res['Value']['Successful']
fileIDLFNs = {}
toInsert = {}
for ancestor in ancestorIDs.keys():
fileIDLFNs[ancestorIDs[ancestor]['FileID']] = ancestor
toInsert[ancestorIDs[ancestor]['FileID']] = originalDepth
res = self._getFileAncestors( fileIDLFNs.keys() )
if not res['OK']:
failed[lfn] = "Failed to obtain all ancestors"
continue
fileIDAncestorDict = res['Value']
for fileIDDict in fileIDAncestorDict.values():
for ancestorID, relativeDepth in fileIDDict.items():
toInsert[ancestorID] = relativeDepth + originalDepth
res = self._insertFileAncestors( originalFileID, toInsert, connection = connection )
if not res['OK']:
if "Duplicate" in res['Message']:
failed[lfn] = "Failed to insert ancestor files: duplicate entry"
else:
failed[lfn] = "Failed to insert ancestor files"
else:
successful[lfn] = True
return S_OK( {'Successful':successful, 'Failed':failed} )
def _insertFileAncestors( self, fileID, ancestorDict, connection = False ):
connection = self._getConnection( connection )
ancestorTuples = []
for ancestorID, depth in ancestorDict.items():
ancestorTuples.append( "(%d,%d,%d)" % ( fileID, ancestorID, depth ) )
if not ancestorTuples:
return S_OK()
req = "INSERT INTO FC_FileAncestors (FileID, AncestorID, AncestorDepth) VALUES %s" \
% intListToString( ancestorTuples )
return self.db._update( req, connection )
def _getFileAncestors( self, fileIDs, depths = [], connection = False ):
connection = self._getConnection( connection )
req = "SELECT FileID, AncestorID, AncestorDepth FROM FC_FileAncestors WHERE FileID IN (%s)" \
% intListToString( fileIDs )
if depths:
req = "%s AND AncestorDepth IN (%s);" % ( req, intListToString( depths ) )
res = self.db._query( req, connection )
if not res['OK']:
return res
fileIDAncestors = {}
for fileID, ancestorID, depth in res['Value']:
if not fileIDAncestors.has_key( fileID ):
fileIDAncestors[fileID] = {}
fileIDAncestors[fileID][ancestorID] = depth
return S_OK( fileIDAncestors )
def _getFileDescendents( self, fileIDs, depths, connection = False ):
connection = self._getConnection( connection )
req = "SELECT AncestorID, FileID, AncestorDepth FROM FC_FileAncestors WHERE AncestorID IN (%s)" \
% intListToString( fileIDs )
if depths:
req = "%s AND AncestorDepth IN (%s);" % ( req, intListToString( depths ) )
res = self.db._query( req, connection )
if not res['OK']:
return res
fileIDAncestors = {}
for ancestorID, fileID, depth in res['Value']:
if not fileIDAncestors.has_key( ancestorID ):
fileIDAncestors[ancestorID] = {}
fileIDAncestors[ancestorID][fileID] = depth
return S_OK( fileIDAncestors )
def addFileAncestors(self,lfns, connection = False ):
""" Add file ancestors to the catalog """
connection = self._getConnection( connection )
failed = {}
successful = {}
result = self._findFiles( lfns.keys(), connection = connection )
if not result['OK']:
return result
if result['Value']['Failed']:
failed.update(result['Value']['Failed'])
for lfn in result['Value']['Failed']:
lfns.pop(lfn)
if not lfns:
return S_OK({'Successful':successful,'Failed':failed})
for lfn in result['Value']['Successful']:
lfns[lfn]['FileID'] = result['Value']['Successful'][lfn]['FileID']
result = self._populateFileAncestors(lfns, connection)
if not result['OK']:
return result
failed.update(result['Value']['Failed'])
successful = result['Value']['Successful']
return S_OK({'Successful':successful,'Failed':failed})
def _getFileRelatives( self, lfns, depths, relation, connection = False ):
connection = self._getConnection( connection )
failed = {}
successful = {}
result = self._findFiles( lfns.keys(), connection = connection )
if not result['OK']:
return result
if result['Value']['Failed']:
failed.update(result['Value']['Failed'])
for lfn in result['Value']['Failed']:
lfns.pop(lfn)
if not lfns:
return S_OK({'Successful':successful,'Failed':failed})
inputIDDict = {}
for lfn in result['Value']['Successful']:
inputIDDict[ result['Value']['Successful'][lfn]['FileID'] ] = lfn
inputIDs = inputIDDict.keys()
if relation == 'ancestor':
result = self._getFileAncestors(inputIDs,depths, connection)
else:
result = self._getFileDescendents(inputIDs,depths, connection)
if not result['OK']:
return result
failed = {}
successful = {}
relDict = result['Value']
for id_ in inputIDs:
if id_ in relDict:
aList = relDict[id_].keys()
result = self._getFileLFNs(aList)
if not result['OK']:
failed[inputIDDict[id]] = "Failed to find %s" % relation
else:
if result['Value']['Successful']:
resDict = {}
for aID in result['Value']['Successful']:
resDict[ result['Value']['Successful'][aID] ] = relDict[id_][aID]
successful[inputIDDict[id_]] = resDict
for aID in result['Value']['Failed']:
failed[inputIDDict[id_]] = "Failed to get the ancestor LFN"
else:
successful[inputIDDict[id_]] = {}
return S_OK({'Successful':successful,'Failed':failed})
def getFileAncestors( self, lfns, depths, connection = False ):
return self._getFileRelatives(lfns, depths, 'ancestor', connection)
def getFileDescendents( self, lfns, depths, connection = False ):
return self._getFileRelatives(lfns, depths, 'descendent', connection)
def _getExistingMetadata( self, lfns, connection = False ):
connection = self._getConnection( connection )
# Check whether the files already exist before adding
res = self._findFiles( lfns, ['FileID', 'Size', 'Checksum', 'GUID'], connection = connection )
successful = res['Value']['Successful']
failed = res['Value']['Failed']
for lfn, error in res['Value']['Failed'].items():
if error == 'No such file or directory':
failed.pop( lfn )
return successful, failed
def _checkExistingMetadata( self, existingLfns, lfns ):
failed = {}
successful = {}
fileIDLFNs = {}
for lfn, fileDict in existingLfns.items():
fileIDLFNs[fileDict['FileID']] = lfn
# For those that exist get the replicas to determine whether they are already registered
res = self._getFileReplicas( fileIDLFNs.keys() )
if not res['OK']:
for lfn in fileIDLFNs.values():
failed[lfn] = 'Failed checking pre-existing replicas'
else:
replicaDict = res['Value']
for fileID, lfn in fileIDLFNs.items():
fileMetadata = existingLfns[lfn]
existingGuid = fileMetadata['GUID']
existingSize = fileMetadata['Size']
existingChecksum = fileMetadata['Checksum']
newGuid = lfns[lfn]['GUID']
newSize = lfns[lfn]['Size']
newChecksum = lfns[lfn]['Checksum']
# Ensure that the key file metadata is the same
if ( existingGuid != newGuid ) or \
( existingSize != newSize ) or \
( existingChecksum != newChecksum ):
failed[lfn] = "File already registered with alternative metadata"
# If the DB does not have replicas for this file return an error
elif not fileID in replicaDict or not replicaDict[fileID]:
failed[lfn] = "File already registered with no replicas"
# If the supplied SE is not in the existing replicas return an error
elif not lfns[lfn]['SE'] in replicaDict[fileID].keys():
failed[lfn] = "File already registered with alternative replicas"
# If we get here the file being registered already exists exactly in the DB
else:
successful[lfn] = True
return successful, failed
def _checkUniqueGUID( self, lfns, connection = False ):
connection = self._getConnection( connection )
guidLFNs = {}
failed = {}
for lfn, fileDict in lfns.items():
guidLFNs[fileDict['GUID']] = lfn
res = self._getFileIDFromGUID( guidLFNs.keys(), connection = connection )
if not res['OK']:
return dict.fromkeys( lfns, res['Message'] )
for guid, fileID in res['Value'].items():
failed[guidLFNs[guid]] = "GUID already registered for another file %s" % fileID # resolve this to LFN
return failed
def removeFile( self, lfns, connection = False ):
connection = self._getConnection( connection )
""" Remove file from the catalog """
successful = {}
failed = {}
res = self._findFiles( lfns, ['DirID', 'FileID', 'Size'], connection = connection )
if not res['OK']:
return res
for lfn, error in res['Value']['Failed'].items():
if error == 'No such file or directory':
successful[lfn] = True
else:
failed[lfn] = error
fileIDLfns = {}
lfns = res['Value']['Successful']
for lfn, lfnDict in lfns.items():
fileIDLfns[lfnDict['FileID']] = lfn
res = self._computeStorageUsageOnRemoveFile( lfns, connection = connection )
if not res['OK']:
return res
directorySESizeDict = res['Value']
# Now do removal
res = self._deleteFiles( fileIDLfns.keys(), connection = connection )
if not res['OK']:
for lfn in fileIDLfns.values():
failed[lfn] = res['Message']
else:
# Update the directory usage
self._updateDirectoryUsage( directorySESizeDict, '-', connection = connection )
for lfn in fileIDLfns.values():
successful[lfn] = True
return S_OK( {"Successful":successful, "Failed":failed} )
def _computeStorageUsageOnRemoveFile( self, lfns, connection = False ):
# Resolve the replicas to calculate reduction in storage usage
fileIDLfns = {}
for lfn, lfnDict in lfns.items():
fileIDLfns[lfnDict['FileID']] = lfn
res = self._getFileReplicas( fileIDLfns.keys(), connection = connection )
if not res['OK']:
return res
directorySESizeDict = {}
for fileID, seDict in res['Value'].items():
dirID = lfns[fileIDLfns[fileID]]['DirID']
size = lfns[fileIDLfns[fileID]]['Size']
directorySESizeDict.setdefault( dirID, {} )
directorySESizeDict[dirID].setdefault( 0, {'Files':0,'Size':0} )
directorySESizeDict[dirID][0]['Size'] += size
directorySESizeDict[dirID][0]['Files'] += 1
for seName in seDict.keys():
res = self.db.seManager.findSE( seName )
if not res['OK']:
return res
seID = res['Value']
size = lfns[fileIDLfns[fileID]]['Size']
directorySESizeDict[dirID].setdefault( seID, {'Files':0,'Size':0} )
directorySESizeDict[dirID][seID]['Size'] += size
directorySESizeDict[dirID][seID]['Files'] += 1
return S_OK( directorySESizeDict )
def setFileStatus( self, lfns, connection = False ):
""" Get set the group for the supplied files """
connection = self._getConnection( connection )
res = self._findFiles( lfns, ['FileID', 'UID'], connection = connection )
if not res['OK']:
return res
failed = res['Value']['Failed']
successful = {}
for lfn in res['Value']['Successful'].keys():
status = lfns[lfn]
if isinstance( status, basestring ):
if not status in self.db.validFileStatus:
failed[lfn] = 'Invalid file status %s' % status
continue
result = self._getStatusInt( status, connection = connection )
if not result['OK']:
failed[lfn] = res['Message']
continue
status = result['Value']
fileID = res['Value']['Successful'][lfn]['FileID']
res = self._setFileParameter( fileID, "Status", status, connection = connection )
if not res['OK']:
failed[lfn] = res['Message']
else:
successful[lfn] = True
return S_OK( {'Successful':successful, 'Failed':failed} )
######################################################
#
# Replica write methods
#
def addReplica( self, lfns, connection = False ):
""" Add replica to the catalog """
connection = self._getConnection( connection )
successful = {}
failed = {}
for lfn, info in lfns.items():
res = self._checkInfo( info, ['PFN', 'SE'] )
if not res['OK']:
failed[lfn] = res['Message']
lfns.pop( lfn )
res = self._addReplicas( lfns, connection = connection )
if not res['OK']:
for lfn in lfns.keys():
failed[lfn] = res['Message']
else:
failed.update( res['Value']['Failed'] )
successful.update( res['Value']['Successful'] )
return S_OK( {'Successful':successful, 'Failed':failed} )
def _addReplicas( self, lfns, connection = False ):
connection = self._getConnection( connection )
successful = {}
res = self._findFiles( lfns.keys(), ['DirID', 'FileID', 'Size'], connection = connection )
if not res['OK']:
return res
failed = res['Value']['Failed']
for lfn in failed.keys():
lfns.pop( lfn )
lfnFileIDDict = res['Value']['Successful']
for lfn, fileDict in lfnFileIDDict.items():
lfns[lfn].update( fileDict )
res = self._insertReplicas( lfns, connection = connection )
if not res['OK']:
for lfn in lfns.keys():
failed[lfn] = res['Message']
else:
successful = res['Value']['Successful']
failed.update( res['Value']['Failed'] )
return S_OK( {'Successful':successful, 'Failed':failed} )
def removeReplica( self, lfns, connection = False ):
""" Remove replica from catalog """
connection = self._getConnection( connection )
successful = {}
failed = {}
for lfn, info in lfns.items():
res = self._checkInfo( info, ['SE'] )
if not res['OK']:
failed[lfn] = res['Message']
lfns.pop( lfn )
res = self._deleteReplicas( lfns, connection = connection )
if not res['OK']:
for lfn in lfns.keys():
failed[lfn] = res['Message']
else:
failed.update( res['Value']['Failed'] )
successful.update( res['Value']['Successful'] )
return S_OK( {'Successful':successful, 'Failed':failed} )
def setReplicaStatus( self, lfns, connection = False ):
""" Set replica status in the catalog """
connection = self._getConnection( connection )
successful = {}
failed = {}
for lfn, info in lfns.items():
res = self._checkInfo( info, ['SE', 'Status'] )
if not res['OK']:
failed[lfn] = res['Message']
continue
status = info['Status']
se = info['SE']
res = self._findFiles( [lfn], ['FileID'], connection = connection )
if not res['Value']['Successful'].has_key( lfn ):
failed[lfn] = res['Value']['Failed'][lfn]
continue
fileID = res['Value']['Successful'][lfn]['FileID']
res = self._setReplicaStatus( fileID, se, status, connection = connection )
if res['OK']:
successful[lfn] = res['Value']
else:
failed[lfn] = res['Message']
return S_OK( {'Successful':successful, 'Failed':failed} )
def setReplicaHost( self, lfns, connection = False ):
""" Set replica host in the catalog """
connection = self._getConnection( connection )
successful = {}
failed = {}
for lfn, info in lfns.items():
res = self._checkInfo( info, ['SE', 'NewSE'] )
if not res['OK']:
failed[lfn] = res['Message']
continue
newSE = info['NewSE']
se = info['SE']
res = self._findFiles( [lfn], ['FileID'], connection = connection )
if not res['Value']['Successful'].has_key( lfn ):
failed[lfn] = res['Value']['Failed'][lfn]
continue
fileID = res['Value']['Successful'][lfn]['FileID']
res = self._setReplicaHost( fileID, se, newSE, connection = connection )
if res['OK']:
successful[lfn] = res['Value']
else:
failed[lfn] = res['Message']
return S_OK( {'Successful':successful, 'Failed':failed} )
######################################################
#
# File read methods
#
def exists( self, lfns, connection = False ):
""" Determine whether a file exists in the catalog """
connection = self._getConnection( connection )
res = self._findFiles( lfns, allStatus = True, connection = connection )
successful = res['Value']['Successful']
origFailed = res['Value']['Failed']
for lfn in successful:
successful[lfn] = lfn
failed = {}
if self.db.uniqueGUID:
guidList = []
val = None
#Try to identify if the GUID is given
# We consider only 2 options :
# either {lfn : guid}
# or P lfn : {PFN : .., GUID : ..} }
if isinstance( lfns, dict ):
val = lfns.values()
# We have values, take the first to identify the type
if val:
val = val[0]
if isinstance( val, dict ) and 'GUID' in val:
# We are in the case {lfn : {PFN:.., GUID:..}}
guidList = [lfns[lfn]['GUID'] for lfn in lfns]
pass
elif isinstance( val, basestring ):
# We hope that it is the GUID which is given
guidList = lfns.values()
if guidList:
# A dict { guid: lfn to which it is supposed to be associated }
guidToGivenLfn = dict( zip( guidList, lfns ) )
res = self.getLFNForGUID( guidList, connection )
if not res['OK']:
return res
guidLfns = res['Value']['Successful']
for guid, realLfn in guidLfns.items():
successful[guidToGivenLfn[guid]] = realLfn
for lfn, error in origFailed.items():
# It could be in successful because the guid exists with another lfn
if lfn in successful:
continue
if error == 'No such file or directory':
successful[lfn] = False
else:
failed[lfn] = error
return S_OK( {"Successful":successful, "Failed":failed} )
def isFile( self, lfns, connection = False ):
""" Determine whether a path is a file in the catalog """
connection = self._getConnection( connection )
#TO DO, should check whether it is a directory if it fails
return self.exists( lfns, connection = connection )
def getFileSize( self, lfns, connection = False ):
""" Get file size from the catalog """
connection = self._getConnection( connection )
#TO DO, should check whether it is a directory if it fails
res = self._findFiles( lfns, ['Size'], connection = connection )
if not res['OK']:
return res
totalSize = 0
for lfn in res['Value']['Successful'].keys():
size = res['Value']['Successful'][lfn]['Size']
res['Value']['Successful'][lfn] = size
totalSize += size
res['TotalSize'] = totalSize
return res
def getFileMetadata( self, lfns, connection = False ):
""" Get file metadata from the catalog """
connection = self._getConnection( connection )
#TO DO, should check whether it is a directory if it fails
return self._findFiles( lfns, ['Size', 'Checksum',
'ChecksumType', 'UID',
'GID', 'GUID',
'CreationDate', 'ModificationDate',
'Mode', 'Status'], connection = connection )
def getPathPermissions( self, paths, credDict, connection = False ):
""" Get the permissions for the supplied paths """
connection = self._getConnection( connection )
res = self.db.ugManager.getUserAndGroupID( credDict )
if not res['OK']:
return res
uid, gid = res['Value']
res = self._findFiles( paths, metadata = ['Mode', 'UID', 'GID'], connection = connection )
if not res['OK']:
return res
successful = {}
for dirName, dirDict in res['Value']['Successful'].items():
mode = dirDict['Mode']
p_uid = dirDict['UID']
p_gid = dirDict['GID']
successful[dirName] = {}
if p_uid == uid:
successful[dirName]['Read'] = mode & stat.S_IRUSR
successful[dirName]['Write'] = mode & stat.S_IWUSR
successful[dirName]['Execute'] = mode & stat.S_IXUSR
elif p_gid == gid:
successful[dirName]['Read'] = mode & stat.S_IRGRP
successful[dirName]['Write'] = mode & stat.S_IWGRP
successful[dirName]['Execute'] = mode & stat.S_IXGRP
else:
successful[dirName]['Read'] = mode & stat.S_IROTH
successful[dirName]['Write'] = mode & stat.S_IWOTH
successful[dirName]['Execute'] = mode & stat.S_IXOTH
return S_OK( {'Successful':successful, 'Failed':res['Value']['Failed']} )
######################################################
#
# Replica read methods
#
def __getReplicasForIDs( self, fileIDLfnDict, allStatus, connection = False ):
""" Get replicas for files with already resolved IDs
"""
replicas = {}
if fileIDLfnDict:
fields = []
if not self.db.lfnPfnConvention or self.db.lfnPfnConvention == "Weak":
fields = ['PFN']
res = self._getFileReplicas( fileIDLfnDict.keys(), fields_input=fields,
allStatus = allStatus, connection = connection )
if not res['OK']:
return res
for fileID, seDict in res['Value'].items():
lfn = fileIDLfnDict[fileID]
replicas[lfn] = {}
for se, repDict in seDict.items():
pfn = repDict.get('PFN','')
#if not pfn or self.db.lfnPfnConvention:
# res = self._resolvePFN( lfn, se )
# if res['OK']:
# pfn = res['Value']
replicas[lfn][se] = pfn
result = S_OK( replicas )
return result
def getReplicas( self, lfns, allStatus, connection = False ):
""" Get file replicas from the catalog """
connection = self._getConnection( connection )
# Get FileID <-> LFN correspondence first
res = self._findFileIDs( lfns, connection = connection )
if not res['OK']:
return res
failed = res['Value']['Failed']
fileIDLFNs = {}
for lfn, fileID in res['Value']['Successful'].items():
fileIDLFNs[fileID] = lfn
result = self.__getReplicasForIDs( fileIDLFNs, allStatus, connection)
if not result['OK']:
return result
replicas = result['Value']
result = S_OK( { "Successful": replicas, 'Failed': failed } )
if self.db.lfnPfnConvention:
sePrefixDict = {}
resSE = self.db.seManager.getSEPrefixes()
if resSE['OK']:
sePrefixDict = resSE['Value']
result['Value']['SEPrefixes'] = sePrefixDict
return result
def getReplicasByMetadata( self, metaDict, path, allStatus, credDict, connection = False ):
""" Get file replicas for files corresponding to the given metadata """
connection = self._getConnection( connection )
# Get FileID <-> LFN correspondence first
failed = {}
result = self.db.fmeta.findFilesByMetadata( metaDict, path, credDict, extra = True)
if not result['OK']:
return result
fileIDLFNs = result['Value']
result = self.__getReplicasForIDs( fileIDLFNs, allStatus, connection)
if not result['OK']:
return result
replicas = result['Value']
result = S_OK( { "Successful": replicas, 'Failed': failed } )
if self.db.lfnPfnConvention:
sePrefixDict = {}
resSE = self.db.seManager.getSEPrefixes()
if resSE['OK']:
sePrefixDict = resSE['Value']
result['Value']['SEPrefixes'] = sePrefixDict
return result
def _resolvePFN(self,lfn,se):
resSE = self.db.seManager.getSEDefinition(se)
if not resSE['OK']:
return resSE
pfnDict = dict(resSE['Value']['SEDict'])
if "PFNPrefix" in pfnDict:
return S_OK(pfnDict['PFNPrefix']+lfn)
else:
pfnDict['FileName'] = lfn
return pfnunparse(pfnDict)
def getReplicaStatus( self, lfns, connection = False ):
""" Get replica status from the catalog """
connection = self._getConnection( connection )
res = self._findFiles( lfns, connection = connection )
failed = res['Value']['Failed']
fileIDLFNs = {}
for lfn, fileDict in res['Value']['Successful'].items():
fileID = fileDict['FileID']
fileIDLFNs[fileID] = lfn
successful = {}
if fileIDLFNs:
res = self._getFileReplicas( fileIDLFNs.keys(), allStatus = True, connection = connection )
if not res['OK']:
return res
for fileID, seDict in res['Value'].items():
lfn = fileIDLFNs[fileID]
requestedSE = lfns[lfn]
if not requestedSE:
failed[lfn] = "Replica info not supplied"
elif requestedSE not in seDict.keys():
failed[lfn] = "No replica at supplied site"
else:
successful[lfn] = seDict[requestedSE]['Status']
return S_OK( {'Successful':successful, 'Failed':failed} )
######################################################
#
# General usage methods
#
def _getStatusInt( self, status, connection = False ):
connection = self._getConnection( connection )
req = "SELECT StatusID FROM FC_Statuses WHERE Status = '%s';" % status
res = self.db._query( req, connection )
if not res['OK']:
return res
if res['Value']:
return S_OK( res['Value'][0][0] )
req = "INSERT INTO FC_Statuses (Status) VALUES ('%s');" % status
res = self.db._update( req, connection )
if not res['OK']:
return res
return S_OK( res['lastRowId'] )
def _getIntStatus(self,statusID,connection=False):
if statusID in self.statusDict:
return S_OK(self.statusDict[statusID])
connection = self._getConnection(connection)
req = "SELECT StatusID,Status FROM FC_Statuses"
res = self.db._query(req,connection)
if not res['OK']:
return res
if res['Value']:
for row in res['Value']:
self.statusDict[int(row[0])] = row[1]
if statusID in self.statusDict:
return S_OK(self.statusDict[statusID])
return S_OK('Unknown')
def getFileIDsInDirectory( self, dirID, requestString = False ):
""" Get a list of IDs for all the files stored in given directories or their
subdirectories
:param mixt dirID: single directory ID or a list of directory IDs
:param boolean requestString: if True return result as a SQL SELECT string
:return: list of file IDs or SELECT string
"""
return self._getDirectoryFileIDs( dirID, requestString = requestString )
def getFilesInDirectory( self, dirID, verbose = False, connection = False ):
connection = self._getConnection( connection )
files = {}
res = self._getDirectoryFiles( dirID, [], ['FileID', 'Size', 'GUID',
'Checksum', 'ChecksumType',
'Type', 'UID',
'GID', 'CreationDate',
'ModificationDate', 'Mode',
'Status'], connection = connection )
if not res['OK']:
return res
if not res['Value']:
return S_OK( files )
fileIDNames = {}
for fileName, fileDict in res['Value'].items():
files[fileName] = {}
files[fileName]['MetaData'] = fileDict
fileIDNames[fileDict['FileID']] = fileName
if verbose:
result = self._getFileReplicas( fileIDNames.keys(), connection = connection )
if not result['OK']:
return result
for fileID, seDict in result['Value'].items():
fileName = fileIDNames[fileID]
files[fileName]['Replicas'] = seDict
return S_OK( files )
def getDirectoryReplicas( self, dirID, path, allStatus = False, connection = False ):
""" Get the replicas for all the Files in the given Directory
:param DirID : ID of the directory
:param path : useless
:param allStatus : whether all replicas and file status are considered
If False, take the visibleFileStatus and visibleReplicaStatus values from the configuration
"""
connection = self._getConnection( connection )
result = self._getDirectoryReplicas( dirID, allStatus, connection)
if not result['OK']:
return result
resultDict = {}
seDict = {}
for fileName, fileID, seID, pfn in result['Value']:
resultDict.setdefault( fileName, {} )
if not seID in seDict:
res = self.db.seManager.getSEName(seID)
if not res['OK']:
seDict[seID] = 'Unknown'
else:
seDict[seID] = res['Value']
se = seDict[seID]
resultDict[fileName][se] = pfn
return S_OK( resultDict )
def _getFileDirectories( self, lfns ):
""" For a list of lfn, returns a dictionary with key the directory, and value
the files in that directory. It does not make any query, just splits the names
:param lfns list of lfns
"""
dirDict = {}
for lfn in lfns:
lfnDir = os.path.dirname( lfn )
lfnFile = os.path.basename( lfn )
dirDict.setdefault( lfnDir, [] )
dirDict[lfnDir].append( lfnFile )
return dirDict
def _checkInfo( self, info, requiredKeys ):
if not info:
return S_ERROR( "Missing parameters" )
for key in requiredKeys:
if not key in info:
return S_ERROR( "Missing '%s' parameter" % key )
return S_OK()
# def _checkLFNPFNConvention( self, lfn, pfn, se ):
# """ Check that the PFN corresponds to the LFN-PFN convention """
# if pfn == lfn:
# return S_OK()
# if ( len( pfn ) < len( lfn ) ) or ( pfn[-len( lfn ):] != lfn ) :
# return S_ERROR( 'PFN does not correspond to the LFN convention' )
# return S_OK()
def changeFileGroup( self, lfns ):
""" Get set the group for the supplied files
:param lfns : dictionary < lfn : group >
:param int/str newGroup: optional new group/groupID the same for all the supplied lfns
"""
res = self._findFiles( lfns, ['FileID', 'GID'] )
if not res['OK']:
return res
failed = res['Value']['Failed']
successful = {}
for lfn in res['Value']['Successful'].keys():
group = lfns[lfn]
if isinstance( group, basestring ):
groupRes = self.db.ugManager.findGroup( group )
if not groupRes['OK']:
return groupRes
group = groupRes['Value']
currentGroup = res['Value']['Successful'][lfn]['GID']
if int( group ) == int( currentGroup ):
successful[lfn] = True
else:
fileID = res['Value']['Successful'][lfn]['FileID']
res = self._setFileParameter( fileID, "GID", group )
if not res['OK']:
failed[lfn] = res['Message']
else:
successful[lfn] = True
return S_OK( {'Successful':successful, 'Failed':failed} )
def changeFileOwner( self, lfns ):
""" Set the owner for the supplied files
:param lfns : dictionary < lfn : owner >
:param int/str newOwner: optional new user/userID the same for all the supplied lfns
"""
res = self._findFiles( lfns, ['FileID', 'UID'] )
if not res['OK']:
return res
failed = res['Value']['Failed']
successful = {}
for lfn in res['Value']['Successful'].keys():
owner = lfns[lfn]
if isinstance( owner, basestring ):
userRes = self.db.ugManager.findUser( owner )
if not userRes['OK']:
return userRes
owner = userRes['Value']
currentOwner = res['Value']['Successful'][lfn]['UID']
if int( owner ) == int( currentOwner ):
successful[lfn] = True
else:
fileID = res['Value']['Successful'][lfn]['FileID']
res = self._setFileParameter( fileID, "UID", owner )
if not res['OK']:
failed[lfn] = res['Message']
else:
successful[lfn] = True
return S_OK( {'Successful':successful, 'Failed':failed} )
def changeFileMode( self, lfns ):
"""" Set the mode for the supplied files
:param lfns : dictionary < lfn : mode >
:param int newMode: optional new mode the same for all the supplied lfns
"""
res = self._findFiles( lfns, ['FileID', 'Mode'] )
if not res['OK']:
return res
failed = res['Value']['Failed']
successful = {}
for lfn in res['Value']['Successful'].keys():
mode = lfns[lfn]
currentMode = res['Value']['Successful'][lfn]['Mode']
if int( currentMode ) == int( mode ):
successful[lfn] = True
else:
fileID = res['Value']['Successful'][lfn]['FileID']
res = self._setFileParameter( fileID, "Mode", mode )
if not res['OK']:
failed[lfn] = res['Message']
else:
successful[lfn] = True
return S_OK( {'Successful':successful, 'Failed':failed} )
def setFileOwner( self, path, owner ):
""" Set the file owner
:param mixed path: file path as a string or int or list of ints or select statement
:param mixt group: new user as a string or int uid
"""
result = self.db.ugManager.findUser( owner )
if not result['OK']:
return result
uid = result['Value']
return self._setFileParameter( path, 'UID', uid )
def setFileGroup( self, path, gname ):
""" Set the file group
:param mixed path: file path as a string or int or list of ints or select statement
:param mixt group: new group as a string or int gid
"""
result = self.db.ugManager.findGroup( gname )
if not result['OK']:
return result
gid = result['Value']
return self._setFileParameter( path, 'GID', gid )
def setFileMode( self, path, mode ):
""" Set the file mode
:param mixed path: file path as a string or int or list of ints or select statement
:param int mode: new mode
"""
return self._setFileParameter( path, 'Mode', mode )
|
vmendez/DIRAC
|
DataManagementSystem/DB/FileCatalogComponents/FileManagerBase.py
|
Python
|
gpl-3.0
| 49,658
|
[
"DIRAC"
] |
e797ffd57e4f23510a8f12a6674886299038f0268225ec538bfaff0e0844d4b5
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for Chromium.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
_EXCLUDED_PATHS = (
r"^breakpad[\\\/].*",
r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
r"^skia[\\\/].*",
r"^v8[\\\/].*",
r".*MakeFile$",
r".+_autogen\.h$",
r".+[\\\/]pnacl_shim\.c$",
r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
)
# The NetscapePlugIn library is excluded from pan-project as it will soon
# be deleted together with the rest of the NPAPI and it's not worthwhile to
# update the coding style until then.
_TESTRUNNER_PATHS = (
r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
)
# Fragment of a regular expression that matches C++ and Objective-C++
# implementation files.
_IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
# Regular expression that matches code only used for test binaries
# (best effort).
_TEST_CODE_EXCLUDED_PATHS = (
r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
_IMPLEMENTATION_EXTENSIONS,
r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
r'.*[\\\/](test|tool(s)?)[\\\/].*',
# content_shell is used for running layout tests.
r'content[\\\/]shell[\\\/].*',
# At request of folks maintaining this folder.
r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
# Non-production example code.
r'mojo[\\\/]examples[\\\/].*',
# Launcher for running iOS tests on the simulator.
r'testing[\\\/]iossim[\\\/]iossim\.mm$',
)
_TEST_ONLY_WARNING = (
'You might be calling functions intended only for testing from\n'
'production code. It is OK to ignore this warning if you know what\n'
'you are doing, as the heuristics used to detect the situation are\n'
'not perfect. The commit queue will not block on this warning.')
_INCLUDE_ORDER_WARNING = (
'Your #include order seems to be broken. Remember to use the right '
'collation (LC_COLLATE=C) and check https://google-styleguide.googlecode'
'.com/svn/trunk/cppguide.html#Names_and_Order_of_Includes')
_BANNED_OBJC_FUNCTIONS = (
(
'addTrackingRect:',
(
'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
'prohibited. Please use CrTrackingArea instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
False,
),
(
r'/NSTrackingArea\W',
(
'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
'instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
False,
),
(
'convertPointFromBase:',
(
'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
'Please use |convertPoint:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertPointToBase:',
(
'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
'Please use |convertPoint:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertRectFromBase:',
(
'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
'Please use |convertRect:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertRectToBase:',
(
'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
'Please use |convertRect:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertSizeFromBase:',
(
'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
'Please use |convertSize:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertSizeToBase:',
(
'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
'Please use |convertSize:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
)
_BANNED_CPP_FUNCTIONS = (
# Make sure that gtest's FRIEND_TEST() macro is not used; the
# FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
# used instead since that allows for FLAKY_ and DISABLED_ prefixes.
(
'FRIEND_TEST(',
(
'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
),
False,
(),
),
(
'ScopedAllowIO',
(
'New code should not use ScopedAllowIO. Post a task to the blocking',
'pool or the FILE thread instead.',
),
True,
(
r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
"customization_document_browsertest\.cc$",
r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
r"simple_platform_shared_buffer_posix\.cc$",
r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
r"^ui[\\\/]ozone[\\\/]platform[\\\/]drm[\\\/]host[\\\/]"
"drm_native_display_delegate\.cc$",
),
),
(
'SkRefPtr',
(
'The use of SkRefPtr is prohibited. ',
'Please use skia::RefPtr instead.'
),
True,
(),
),
(
'SkAutoRef',
(
'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
'Please use skia::RefPtr instead.'
),
True,
(),
),
(
'SkAutoTUnref',
(
'The use of SkAutoTUnref is dangerous because it implicitly ',
'converts to a raw pointer. Please use skia::RefPtr instead.'
),
True,
(),
),
(
'SkAutoUnref',
(
'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
'because it implicitly converts to a raw pointer. ',
'Please use skia::RefPtr instead.'
),
True,
(),
),
(
r'/HANDLE_EINTR\(.*close',
(
'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
'descriptor will be closed, and it is incorrect to retry the close.',
'Either call close directly and ignore its return value, or wrap close',
'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
),
True,
(),
),
(
r'/IGNORE_EINTR\((?!.*close)',
(
'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
'calls, use HANDLE_EINTR. See http://crbug.com/269623',
),
True,
(
# Files that #define IGNORE_EINTR.
r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
),
),
(
r'/v8::Extension\(',
(
'Do not introduce new v8::Extensions into the code base, use',
'gin::Wrappable instead. See http://crbug.com/334679',
),
True,
(
r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
),
),
)
_IPC_ENUM_TRAITS_DEPRECATED = (
'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
_VALID_OS_MACROS = (
# Please keep sorted.
'OS_ANDROID',
'OS_ANDROID_HOST',
'OS_BSD',
'OS_CAT', # For testing.
'OS_CHROMEOS',
'OS_FREEBSD',
'OS_IOS',
'OS_LINUX',
'OS_MACOSX',
'OS_NACL',
'OS_NACL_NONSFI',
'OS_NACL_SFI',
'OS_OPENBSD',
'OS_POSIX',
'OS_QNX',
'OS_SOLARIS',
'OS_WIN',
)
def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
"""Attempts to prevent use of functions intended only for testing in
non-testing code. For now this is just a best-effort implementation
that ignores header files and may have some false positives. A
better implementation would probably need a proper C++ parser.
"""
# We only scan .cc files and the like, as the declaration of
# for-testing functions in header files are hard to distinguish from
# calls to such functions without a proper C++ parser.
file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
exclusion_pattern = input_api.re.compile(
r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
base_function_pattern, base_function_pattern))
def FilterFile(affected_file):
black_list = (_EXCLUDED_PATHS +
_TEST_CODE_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST)
return input_api.FilterSourceFile(
affected_file,
white_list=(file_inclusion_pattern, ),
black_list=black_list)
problems = []
for f in input_api.AffectedSourceFiles(FilterFile):
local_path = f.LocalPath()
for line_number, line in f.ChangedContents():
if (inclusion_pattern.search(line) and
not comment_pattern.search(line) and
not exclusion_pattern.search(line)):
problems.append(
'%s:%d\n %s' % (local_path, line_number, line.strip()))
if problems:
return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
else:
return []
def _CheckNoIOStreamInHeaders(input_api, output_api):
"""Checks to make sure no .h files include <iostream>."""
files = []
pattern = input_api.re.compile(r'^#include\s*<iostream>',
input_api.re.MULTILINE)
for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
if not f.LocalPath().endswith('.h'):
continue
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if len(files):
return [ output_api.PresubmitError(
'Do not #include <iostream> in header files, since it inserts static '
'initialization into every file including the header. Instead, '
'#include <ostream>. See http://crbug.com/94794',
files) ]
return []
def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
"""Checks to make sure no source files use UNIT_TEST"""
problems = []
for f in input_api.AffectedFiles():
if (not f.LocalPath().endswith(('.cc', '.mm'))):
continue
for line_num, line in f.ChangedContents():
if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
problems.append(' %s:%d' % (f.LocalPath(), line_num))
if not problems:
return []
return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
'\n'.join(problems))]
def _FindHistogramNameInLine(histogram_name, line):
"""Tries to find a histogram name or prefix in a line."""
if not "affected-histogram" in line:
return histogram_name in line
# A histogram_suffixes tag type has an affected-histogram name as a prefix of
# the histogram_name.
if not '"' in line:
return False
histogram_prefix = line.split('\"')[1]
return histogram_prefix in histogram_name
def _CheckUmaHistogramChanges(input_api, output_api):
"""Check that UMA histogram names in touched lines can still be found in other
lines of the patch or in histograms.xml. Note that this check would not catch
the reverse: changes in histograms.xml not matched in the code itself."""
touched_histograms = []
histograms_xml_modifications = []
pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
for f in input_api.AffectedFiles():
# If histograms.xml itself is modified, keep the modified lines for later.
if f.LocalPath().endswith(('histograms.xml')):
histograms_xml_modifications = f.ChangedContents()
continue
if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
continue
for line_num, line in f.ChangedContents():
found = pattern.search(line)
if found:
touched_histograms.append([found.group(1), f, line_num])
# Search for the touched histogram names in the local modifications to
# histograms.xml, and, if not found, on the base histograms.xml file.
unmatched_histograms = []
for histogram_info in touched_histograms:
histogram_name_found = False
for line_num, line in histograms_xml_modifications:
histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
if histogram_name_found:
break
if not histogram_name_found:
unmatched_histograms.append(histogram_info)
histograms_xml_path = 'tools/metrics/histograms/histograms.xml'
problems = []
if unmatched_histograms:
with open(histograms_xml_path) as histograms_xml:
for histogram_name, f, line_num in unmatched_histograms:
histograms_xml.seek(0)
histogram_name_found = False
for line in histograms_xml:
histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
if histogram_name_found:
break
if not histogram_name_found:
problems.append(' [%s:%d] %s' %
(f.LocalPath(), line_num, histogram_name))
if not problems:
return []
return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
'been modified and the associated histogram name has no match in either '
'%s or the modifications of it:' % (histograms_xml_path), problems)]
def _CheckNoNewWStrings(input_api, output_api):
"""Checks to make sure we don't introduce use of wstrings."""
problems = []
for f in input_api.AffectedFiles():
if (not f.LocalPath().endswith(('.cc', '.h')) or
f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
'/win/' in f.LocalPath()):
continue
allowWString = False
for line_num, line in f.ChangedContents():
if 'presubmit: allow wstring' in line:
allowWString = True
elif not allowWString and 'wstring' in line:
problems.append(' %s:%d' % (f.LocalPath(), line_num))
allowWString = False
else:
allowWString = False
if not problems:
return []
return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
' If you are calling a cross-platform API that accepts a wstring, '
'fix the API.\n' +
'\n'.join(problems))]
def _CheckNoDEPSGIT(input_api, output_api):
"""Make sure .DEPS.git is never modified manually."""
if any(f.LocalPath().endswith('.DEPS.git') for f in
input_api.AffectedFiles()):
return [output_api.PresubmitError(
'Never commit changes to .DEPS.git. This file is maintained by an\n'
'automated system based on what\'s in DEPS and your changes will be\n'
'overwritten.\n'
'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
'for more information')]
return []
def _CheckValidHostsInDEPS(input_api, output_api):
"""Checks that DEPS file deps are from allowed_hosts."""
# Run only if DEPS file has been modified to annoy fewer bystanders.
if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
return []
# Outsource work to gclient verify
try:
input_api.subprocess.check_output(['gclient', 'verify'])
return []
except input_api.subprocess.CalledProcessError, error:
return [output_api.PresubmitError(
'DEPS file must have only git dependencies.',
long_text=error.output)]
def _CheckNoBannedFunctions(input_api, output_api):
"""Make sure that banned functions are not used."""
warnings = []
errors = []
file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
for f in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in f.ChangedContents():
for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
matched = False
if func_name[0:1] == '/':
regex = func_name[1:]
if input_api.re.search(regex, line):
matched = True
elif func_name in line:
matched = True
if matched:
problems = warnings;
if error:
problems = errors;
problems.append(' %s:%d:' % (f.LocalPath(), line_num))
for message_line in message:
problems.append(' %s' % message_line)
file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
for f in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in f.ChangedContents():
for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
def IsBlacklisted(affected_file, blacklist):
local_path = affected_file.LocalPath()
for item in blacklist:
if input_api.re.match(item, local_path):
return True
return False
if IsBlacklisted(f, excluded_paths):
continue
matched = False
if func_name[0:1] == '/':
regex = func_name[1:]
if input_api.re.search(regex, line):
matched = True
elif func_name in line:
matched = True
if matched:
problems = warnings;
if error:
problems = errors;
problems.append(' %s:%d:' % (f.LocalPath(), line_num))
for message_line in message:
problems.append(' %s' % message_line)
result = []
if (warnings):
result.append(output_api.PresubmitPromptWarning(
'Banned functions were used.\n' + '\n'.join(warnings)))
if (errors):
result.append(output_api.PresubmitError(
'Banned functions were used.\n' + '\n'.join(errors)))
return result
def _CheckNoPragmaOnce(input_api, output_api):
"""Make sure that banned functions are not used."""
files = []
pattern = input_api.re.compile(r'^#pragma\s+once',
input_api.re.MULTILINE)
for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
if not f.LocalPath().endswith('.h'):
continue
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if files:
return [output_api.PresubmitError(
'Do not use #pragma once in header files.\n'
'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
files)]
return []
def _CheckNoTrinaryTrueFalse(input_api, output_api):
"""Checks to make sure we don't introduce use of foo ? true : false."""
problems = []
pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
continue
for line_num, line in f.ChangedContents():
if pattern.match(line):
problems.append(' %s:%d' % (f.LocalPath(), line_num))
if not problems:
return []
return [output_api.PresubmitPromptWarning(
'Please consider avoiding the "? true : false" pattern if possible.\n' +
'\n'.join(problems))]
def _CheckUnwantedDependencies(input_api, output_api):
"""Runs checkdeps on #include statements added in this
change. Breaking - rules is an error, breaking ! rules is a
warning.
"""
import sys
# We need to wait until we have an input_api object and use this
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
import checkdeps
from cpp_checker import CppChecker
from rules import Rule
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
added_includes = []
for f in input_api.AffectedFiles():
if not CppChecker.IsCppFile(f.LocalPath()):
continue
changed_lines = [line for line_num, line in f.ChangedContents()]
added_includes.append([f.LocalPath(), changed_lines])
deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
error_descriptions = []
warning_descriptions = []
for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
added_includes):
description_with_path = '%s\n %s' % (path, rule_description)
if rule_type == Rule.DISALLOW:
error_descriptions.append(description_with_path)
else:
warning_descriptions.append(description_with_path)
results = []
if error_descriptions:
results.append(output_api.PresubmitError(
'You added one or more #includes that violate checkdeps rules.',
error_descriptions))
if warning_descriptions:
results.append(output_api.PresubmitPromptOrNotify(
'You added one or more #includes of files that are temporarily\n'
'allowed but being removed. Can you avoid introducing the\n'
'#include? See relevant DEPS file(s) for details and contacts.',
warning_descriptions))
return results
def _CheckFilePermissions(input_api, output_api):
"""Check that all files have their permissions properly set."""
if input_api.platform == 'win32':
return []
args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
'--root', input_api.change.RepositoryRoot()]
for f in input_api.AffectedFiles():
args += ['--file', f.LocalPath()]
checkperms = input_api.subprocess.Popen(args,
stdout=input_api.subprocess.PIPE)
errors = checkperms.communicate()[0].strip()
if errors:
return [output_api.PresubmitError('checkperms.py failed.',
errors.splitlines())]
return []
def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
"""Makes sure we don't include ui/aura/window_property.h
in header files.
"""
pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
errors = []
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith('.h'):
continue
for line_num, line in f.ChangedContents():
if pattern.match(line):
errors.append(' %s:%d' % (f.LocalPath(), line_num))
results = []
if errors:
results.append(output_api.PresubmitError(
'Header files should not include ui/aura/window_property.h', errors))
return results
def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
"""Checks that the lines in scope occur in the right order.
1. C system files in alphabetical order
2. C++ system files in alphabetical order
3. Project's .h files
"""
c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
state = C_SYSTEM_INCLUDES
previous_line = ''
previous_line_num = 0
problem_linenums = []
for line_num, line in scope:
if c_system_include_pattern.match(line):
if state != C_SYSTEM_INCLUDES:
problem_linenums.append((line_num, previous_line_num))
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
elif cpp_system_include_pattern.match(line):
if state == C_SYSTEM_INCLUDES:
state = CPP_SYSTEM_INCLUDES
elif state == CUSTOM_INCLUDES:
problem_linenums.append((line_num, previous_line_num))
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
elif custom_include_pattern.match(line):
if state != CUSTOM_INCLUDES:
state = CUSTOM_INCLUDES
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
else:
problem_linenums.append(line_num)
previous_line = line
previous_line_num = line_num
warnings = []
for (line_num, previous_line_num) in problem_linenums:
if line_num in changed_linenums or previous_line_num in changed_linenums:
warnings.append(' %s:%d' % (file_path, line_num))
return warnings
def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
"""Checks the #include order for the given file f."""
system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
# Exclude the following includes from the check:
# 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
# specific order.
# 2) <atlbase.h>, "build/build_config.h"
excluded_include_pattern = input_api.re.compile(
r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
# Match the final or penultimate token if it is xxxtest so we can ignore it
# when considering the special first include.
test_file_tag_pattern = input_api.re.compile(
r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
if_pattern = input_api.re.compile(
r'\s*#\s*(if|elif|else|endif|define|undef).*')
# Some files need specialized order of includes; exclude such files from this
# check.
uncheckable_includes_pattern = input_api.re.compile(
r'\s*#include '
'("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
contents = f.NewContents()
warnings = []
line_num = 0
# Handle the special first include. If the first include file is
# some/path/file.h, the corresponding including file can be some/path/file.cc,
# some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
# etc. It's also possible that no special first include exists.
# If the included file is some/path/file_platform.h the including file could
# also be some/path/file_xxxtest_platform.h.
including_file_base_name = test_file_tag_pattern.sub(
'', input_api.os_path.basename(f.LocalPath()))
for line in contents:
line_num += 1
if system_include_pattern.match(line):
# No special first include -> process the line again along with normal
# includes.
line_num -= 1
break
match = custom_include_pattern.match(line)
if match:
match_dict = match.groupdict()
header_basename = test_file_tag_pattern.sub(
'', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
if header_basename not in including_file_base_name:
# No special first include -> process the line again along with normal
# includes.
line_num -= 1
break
# Split into scopes: Each region between #if and #endif is its own scope.
scopes = []
current_scope = []
for line in contents[line_num:]:
line_num += 1
if uncheckable_includes_pattern.match(line):
continue
if if_pattern.match(line):
scopes.append(current_scope)
current_scope = []
elif ((system_include_pattern.match(line) or
custom_include_pattern.match(line)) and
not excluded_include_pattern.match(line)):
current_scope.append((line_num, line))
scopes.append(current_scope)
for scope in scopes:
warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
changed_linenums))
return warnings
def _CheckIncludeOrder(input_api, output_api):
"""Checks that the #include order is correct.
1. The corresponding header for source files.
2. C system files in alphabetical order
3. C++ system files in alphabetical order
4. Project's .h files in alphabetical order
Each region separated by #if, #elif, #else, #endif, #define and #undef follows
these rules separately.
"""
def FileFilterIncludeOrder(affected_file):
black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
return input_api.FilterSourceFile(affected_file, black_list=black_list)
warnings = []
for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
if f.LocalPath().endswith(('.cc', '.h')):
changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
results = []
if warnings:
results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
warnings))
return results
def _CheckForVersionControlConflictsInFile(input_api, f):
pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
errors = []
for line_num, line in f.ChangedContents():
if pattern.match(line):
errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
return errors
def _CheckForVersionControlConflicts(input_api, output_api):
"""Usually this is not intentional and will cause a compile failure."""
errors = []
for f in input_api.AffectedFiles():
errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
results = []
if errors:
results.append(output_api.PresubmitError(
'Version control conflict markers found, please resolve.', errors))
return results
def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
def FilterFile(affected_file):
"""Filter function for use with input_api.AffectedSourceFiles,
below. This filters out everything except non-test files from
top-level directories that generally speaking should not hard-code
service URLs (e.g. src/android_webview/, src/content/ and others).
"""
return input_api.FilterSourceFile(
affected_file,
white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
black_list=(_EXCLUDED_PATHS +
_TEST_CODE_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST))
base_pattern = '"[^"]*google\.com[^"]*"'
comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
pattern = input_api.re.compile(base_pattern)
problems = [] # items are (filename, line_number, line)
for f in input_api.AffectedSourceFiles(FilterFile):
for line_num, line in f.ChangedContents():
if not comment_pattern.search(line) and pattern.search(line):
problems.append((f.LocalPath(), line_num, line))
if problems:
return [output_api.PresubmitPromptOrNotify(
'Most layers below src/chrome/ should not hardcode service URLs.\n'
'Are you sure this is correct?',
[' %s:%d: %s' % (
problem[0], problem[1], problem[2]) for problem in problems])]
else:
return []
def _CheckNoAbbreviationInPngFileName(input_api, output_api):
"""Makes sure there are no abbreviations in the name of PNG files.
The native_client_sdk directory is excluded because it has auto-generated PNG
files for documentation.
"""
errors = []
white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
black_list = (r'^native_client_sdk[\\\/]',)
file_filter = lambda f: input_api.FilterSourceFile(
f, white_list=white_list, black_list=black_list)
for f in input_api.AffectedFiles(include_deletes=False,
file_filter=file_filter):
errors.append(' %s' % f.LocalPath())
results = []
if errors:
results.append(output_api.PresubmitError(
'The name of PNG files should not have abbreviations. \n'
'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
'Contact oshima@chromium.org if you have questions.', errors))
return results
def _FilesToCheckForIncomingDeps(re, changed_lines):
"""Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
a set of DEPS entries that we should look up.
For a directory (rather than a specific filename) we fake a path to
a specific filename by adding /DEPS. This is chosen as a file that
will seldom or never be subject to per-file include_rules.
"""
# We ignore deps entries on auto-generated directories.
AUTO_GENERATED_DIRS = ['grit', 'jni']
# This pattern grabs the path without basename in the first
# parentheses, and the basename (if present) in the second. It
# relies on the simple heuristic that if there is a basename it will
# be a header file ending in ".h".
pattern = re.compile(
r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
results = set()
for changed_line in changed_lines:
m = pattern.match(changed_line)
if m:
path = m.group(1)
if path.split('/')[0] not in AUTO_GENERATED_DIRS:
if m.group(2):
results.add('%s%s' % (path, m.group(2)))
else:
results.add('%s/DEPS' % path)
return results
def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
"""When a dependency prefixed with + is added to a DEPS file, we
want to make sure that the change is reviewed by an OWNER of the
target file or directory, to avoid layering violations from being
introduced. This check verifies that this happens.
"""
changed_lines = set()
for f in input_api.AffectedFiles():
filename = input_api.os_path.basename(f.LocalPath())
if filename == 'DEPS':
changed_lines |= set(line.strip()
for line_num, line
in f.ChangedContents())
if not changed_lines:
return []
virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
changed_lines)
if not virtual_depended_on_files:
return []
if input_api.is_committing:
if input_api.tbr:
return [output_api.PresubmitNotifyResult(
'--tbr was specified, skipping OWNERS check for DEPS additions')]
if not input_api.change.issue:
return [output_api.PresubmitError(
"DEPS approval by OWNERS check failed: this change has "
"no Rietveld issue number, so we can't check it for approvals.")]
output = output_api.PresubmitError
else:
output = output_api.PresubmitNotifyResult
owners_db = input_api.owners_db
owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
input_api,
owners_db.email_regexp,
approval_needed=input_api.is_committing)
owner_email = owner_email or input_api.change.author_email
reviewers_plus_owner = set(reviewers)
if owner_email:
reviewers_plus_owner.add(owner_email)
missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
reviewers_plus_owner)
# We strip the /DEPS part that was added by
# _FilesToCheckForIncomingDeps to fake a path to a file in a
# directory.
def StripDeps(path):
start_deps = path.rfind('/DEPS')
if start_deps != -1:
return path[:start_deps]
else:
return path
unapproved_dependencies = ["'+%s'," % StripDeps(path)
for path in missing_files]
if unapproved_dependencies:
output_list = [
output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
'\n '.join(sorted(unapproved_dependencies)))]
if not input_api.is_committing:
suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
output_list.append(output(
'Suggested missing target path OWNERS:\n %s' %
'\n '.join(suggested_owners or [])))
return output_list
return []
def _CheckSpamLogging(input_api, output_api):
file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
black_list = (_EXCLUDED_PATHS +
_TEST_CODE_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST +
(r"^base[\\\/]logging\.h$",
r"^base[\\\/]logging\.cc$",
r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
r"startup_browser_creator\.cc$",
r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
r"diagnostics_writer\.cc$",
r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
r"^chromecast[\\\/]",
r"^cloud_print[\\\/]",
r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
r"gl_helper_benchmark\.cc$",
r"^courgette[\\\/]courgette_tool\.cc$",
r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
r"^ipc[\\\/]ipc_logging\.cc$",
r"^native_client_sdk[\\\/]",
r"^remoting[\\\/]base[\\\/]logging\.h$",
r"^remoting[\\\/]host[\\\/].*",
r"^sandbox[\\\/]linux[\\\/].*",
r"^tools[\\\/]",
r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
r"dump_file_system.cc$",))
source_file_filter = lambda x: input_api.FilterSourceFile(
x, white_list=(file_inclusion_pattern,), black_list=black_list)
log_info = []
printf = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
log_info.append(f.LocalPath())
elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
log_info.append(f.LocalPath())
if input_api.re.search(r"\bprintf\(", contents):
printf.append(f.LocalPath())
elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
printf.append(f.LocalPath())
if log_info:
return [output_api.PresubmitError(
'These files spam the console log with LOG(INFO):',
items=log_info)]
if printf:
return [output_api.PresubmitError(
'These files spam the console log with printf/fprintf:',
items=printf)]
return []
def _CheckForAnonymousVariables(input_api, output_api):
"""These types are all expected to hold locks while in scope and
so should never be anonymous (which causes them to be immediately
destroyed)."""
they_who_must_be_named = [
'base::AutoLock',
'base::AutoReset',
'base::AutoUnlock',
'SkAutoAlphaRestore',
'SkAutoBitmapShaderInstall',
'SkAutoBlitterChoose',
'SkAutoBounderCommit',
'SkAutoCallProc',
'SkAutoCanvasRestore',
'SkAutoCommentBlock',
'SkAutoDescriptor',
'SkAutoDisableDirectionCheck',
'SkAutoDisableOvalCheck',
'SkAutoFree',
'SkAutoGlyphCache',
'SkAutoHDC',
'SkAutoLockColors',
'SkAutoLockPixels',
'SkAutoMalloc',
'SkAutoMaskFreeImage',
'SkAutoMutexAcquire',
'SkAutoPathBoundsUpdate',
'SkAutoPDFRelease',
'SkAutoRasterClipValidate',
'SkAutoRef',
'SkAutoTime',
'SkAutoTrace',
'SkAutoUnref',
]
anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
# bad: base::AutoLock(lock.get());
# not bad: base::AutoLock lock(lock.get());
bad_pattern = input_api.re.compile(anonymous)
# good: new base::AutoLock(lock.get())
good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
errors = []
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
continue
for linenum, line in f.ChangedContents():
if bad_pattern.search(line) and not good_pattern.search(line):
errors.append('%s:%d' % (f.LocalPath(), linenum))
if errors:
return [output_api.PresubmitError(
'These lines create anonymous variables that need to be named:',
items=errors)]
return []
def _CheckCygwinShell(input_api, output_api):
source_file_filter = lambda x: input_api.FilterSourceFile(
x, white_list=(r'.+\.(gyp|gypi)$',))
cygwin_shell = []
for f in input_api.AffectedSourceFiles(source_file_filter):
for linenum, line in f.ChangedContents():
if 'msvs_cygwin_shell' in line:
cygwin_shell.append(f.LocalPath())
break
if cygwin_shell:
return [output_api.PresubmitError(
'These files should not use msvs_cygwin_shell (the default is 0):',
items=cygwin_shell)]
return []
def _CheckUserActionUpdate(input_api, output_api):
"""Checks if any new user action has been added."""
if any('actions.xml' == input_api.os_path.basename(f) for f in
input_api.LocalPaths()):
# If actions.xml is already included in the changelist, the PRESUBMIT
# for actions.xml will do a more complete presubmit check.
return []
file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
current_actions = None
for f in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in f.ChangedContents():
match = input_api.re.search(action_re, line)
if match:
# Loads contents in tools/metrics/actions/actions.xml to memory. It's
# loaded only once.
if not current_actions:
with open('tools/metrics/actions/actions.xml') as actions_f:
current_actions = actions_f.read()
# Search for the matched user action name in |current_actions|.
for action_name in match.groups():
action = 'name="{0}"'.format(action_name)
if action not in current_actions:
return [output_api.PresubmitPromptWarning(
'File %s line %d: %s is missing in '
'tools/metrics/actions/actions.xml. Please run '
'tools/metrics/actions/extract_actions.py to update.'
% (f.LocalPath(), line_num, action_name))]
return []
def _GetJSONParseError(input_api, filename, eat_comments=True):
try:
contents = input_api.ReadFile(filename)
if eat_comments:
json_comment_eater = input_api.os_path.join(
input_api.PresubmitLocalPath(),
'tools', 'json_comment_eater', 'json_comment_eater.py')
process = input_api.subprocess.Popen(
[input_api.python_executable, json_comment_eater],
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
universal_newlines=True)
(contents, _) = process.communicate(input=contents)
input_api.json.loads(contents)
except ValueError as e:
return e
return None
def _GetIDLParseError(input_api, filename):
try:
contents = input_api.ReadFile(filename)
idl_schema = input_api.os_path.join(
input_api.PresubmitLocalPath(),
'tools', 'json_schema_compiler', 'idl_schema.py')
process = input_api.subprocess.Popen(
[input_api.python_executable, idl_schema],
stdin=input_api.subprocess.PIPE,
stdout=input_api.subprocess.PIPE,
stderr=input_api.subprocess.PIPE,
universal_newlines=True)
(_, error) = process.communicate(input=contents)
return error or None
except ValueError as e:
return e
def _CheckParseErrors(input_api, output_api):
"""Check that IDL and JSON files do not contain syntax errors."""
actions = {
'.idl': _GetIDLParseError,
'.json': _GetJSONParseError,
}
# These paths contain test data and other known invalid JSON files.
excluded_patterns = [
r'test[\\\/]data[\\\/]',
r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
]
# Most JSON files are preprocessed and support comments, but these do not.
json_no_comments_patterns = [
r'^testing[\\\/]',
]
# Only run IDL checker on files in these directories.
idl_included_patterns = [
r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
r'^extensions[\\\/]common[\\\/]api[\\\/]',
]
def get_action(affected_file):
filename = affected_file.LocalPath()
return actions.get(input_api.os_path.splitext(filename)[1])
def MatchesFile(patterns, path):
for pattern in patterns:
if input_api.re.search(pattern, path):
return True
return False
def FilterFile(affected_file):
action = get_action(affected_file)
if not action:
return False
path = affected_file.LocalPath()
if MatchesFile(excluded_patterns, path):
return False
if (action == _GetIDLParseError and
not MatchesFile(idl_included_patterns, path)):
return False
return True
results = []
for affected_file in input_api.AffectedFiles(
file_filter=FilterFile, include_deletes=False):
action = get_action(affected_file)
kwargs = {}
if (action == _GetJSONParseError and
MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
kwargs['eat_comments'] = False
parse_error = action(input_api,
affected_file.AbsoluteLocalPath(),
**kwargs)
if parse_error:
results.append(output_api.PresubmitError('%s could not be parsed: %s' %
(affected_file.LocalPath(), parse_error)))
return results
def _CheckJavaStyle(input_api, output_api):
"""Runs checkstyle on changed java files and returns errors if any exist."""
import sys
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
import checkstyle
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
return checkstyle.RunCheckstyle(
input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
def _CheckForCopyrightedCode(input_api, output_api):
"""Verifies that newly added code doesn't contain copyrighted material
and is properly licensed under the standard Chromium license.
As there can be false positives, we maintain a whitelist file. This check
also verifies that the whitelist file is up to date.
"""
import sys
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'android_webview', 'tools')]
import copyright_scanner
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
return copyright_scanner.ScanAtPresubmit(input_api, output_api)
def _CheckSingletonInHeaders(input_api, output_api):
"""Checks to make sure no header files have |Singleton<|."""
def FileFilter(affected_file):
# It's ok for base/memory/singleton.h to have |Singleton<|.
black_list = (_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST +
(r"^base[\\\/]memory[\\\/]singleton\.h$",))
return input_api.FilterSourceFile(affected_file, black_list=black_list)
pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
files = []
for f in input_api.AffectedSourceFiles(FileFilter):
if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
contents = input_api.ReadFile(f)
for line in contents.splitlines(False):
if (not input_api.re.match(r'//', line) and # Strip C++ comment.
pattern.search(line)):
files.append(f)
break
if files:
return [ output_api.PresubmitError(
'Found Singleton<T> in the following header files.\n' +
'Please move them to an appropriate source file so that the ' +
'template gets instantiated in a single compilation unit.',
files) ]
return []
_DEPRECATED_CSS = [
# Values
( "-webkit-box", "flex" ),
( "-webkit-inline-box", "inline-flex" ),
( "-webkit-flex", "flex" ),
( "-webkit-inline-flex", "inline-flex" ),
( "-webkit-min-content", "min-content" ),
( "-webkit-max-content", "max-content" ),
# Properties
( "-webkit-background-clip", "background-clip" ),
( "-webkit-background-origin", "background-origin" ),
( "-webkit-background-size", "background-size" ),
( "-webkit-box-shadow", "box-shadow" ),
# Functions
( "-webkit-gradient", "gradient" ),
( "-webkit-repeating-gradient", "repeating-gradient" ),
( "-webkit-linear-gradient", "linear-gradient" ),
( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
( "-webkit-radial-gradient", "radial-gradient" ),
( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
]
def _CheckNoDeprecatedCSS(input_api, output_api):
""" Make sure that we don't use deprecated CSS
properties, functions or values. Our external
documentation is ignored by the hooks as it
needs to be consumed by WebKit. """
results = []
file_inclusion_pattern = (r".+\.css$",)
black_list = (_EXCLUDED_PATHS +
_TEST_CODE_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST +
(r"^chrome/common/extensions/docs",
r"^chrome/docs",
r"^native_client_sdk"))
file_filter = lambda f: input_api.FilterSourceFile(
f, white_list=file_inclusion_pattern, black_list=black_list)
for fpath in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in fpath.ChangedContents():
for (deprecated_value, value) in _DEPRECATED_CSS:
if deprecated_value in line:
results.append(output_api.PresubmitError(
"%s:%d: Use of deprecated CSS %s, use %s instead" %
(fpath.LocalPath(), line_num, deprecated_value, value)))
return results
_DEPRECATED_JS = [
( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
( "__defineGetter__", "Object.defineProperty" ),
( "__defineSetter__", "Object.defineProperty" ),
]
def _CheckNoDeprecatedJS(input_api, output_api):
"""Make sure that we don't use deprecated JS in Chrome code."""
results = []
file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST)
file_filter = lambda f: input_api.FilterSourceFile(
f, white_list=file_inclusion_pattern, black_list=black_list)
for fpath in input_api.AffectedFiles(file_filter=file_filter):
for lnum, line in fpath.ChangedContents():
for (deprecated, replacement) in _DEPRECATED_JS:
if deprecated in line:
results.append(output_api.PresubmitError(
"%s:%d: Use of deprecated JS %s, use %s instead" %
(fpath.LocalPath(), lnum, deprecated, replacement)))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api,
excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
results.extend(_CheckAuthorizedAuthor(input_api, output_api))
results.extend(
_CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
results.extend(_CheckNoNewWStrings(input_api, output_api))
results.extend(_CheckNoDEPSGIT(input_api, output_api))
results.extend(_CheckNoBannedFunctions(input_api, output_api))
results.extend(_CheckNoPragmaOnce(input_api, output_api))
results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
results.extend(_CheckUnwantedDependencies(input_api, output_api))
results.extend(_CheckFilePermissions(input_api, output_api))
results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
results.extend(_CheckIncludeOrder(input_api, output_api))
results.extend(_CheckForVersionControlConflicts(input_api, output_api))
results.extend(_CheckPatchFiles(input_api, output_api))
results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
results.extend(_CheckForInvalidOSMacros(input_api, output_api))
results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
# TODO(danakj): Remove this when base/move.h is removed.
results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
results.extend(
input_api.canned_checks.CheckChangeHasNoTabs(
input_api,
output_api,
source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
results.extend(_CheckSpamLogging(input_api, output_api))
results.extend(_CheckForAnonymousVariables(input_api, output_api))
results.extend(_CheckCygwinShell(input_api, output_api))
results.extend(_CheckUserActionUpdate(input_api, output_api))
results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
results.extend(_CheckNoDeprecatedJS(input_api, output_api))
results.extend(_CheckParseErrors(input_api, output_api))
results.extend(_CheckForIPCRules(input_api, output_api))
results.extend(_CheckForCopyrightedCode(input_api, output_api))
results.extend(_CheckForWindowsLineEndings(input_api, output_api))
results.extend(_CheckSingletonInHeaders(input_api, output_api))
if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api,
input_api.PresubmitLocalPath(),
whitelist=[r'^PRESUBMIT_test\.py$']))
return results
def _CheckAuthorizedAuthor(input_api, output_api):
"""For non-googler/chromites committers, verify the author's email address is
in AUTHORS.
"""
# TODO(maruel): Add it to input_api?
import fnmatch
author = input_api.change.author_email
if not author:
input_api.logging.info('No author, skipping AUTHOR check')
return []
authors_path = input_api.os_path.join(
input_api.PresubmitLocalPath(), 'AUTHORS')
valid_authors = (
input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
for line in open(authors_path))
valid_authors = [item.group(1).lower() for item in valid_authors if item]
if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
return [output_api.PresubmitPromptWarning(
('%s is not in AUTHORS file. If you are a new contributor, please visit'
'\n'
'http://www.chromium.org/developers/contributing-code and read the '
'"Legal" section\n'
'If you are a chromite, verify the contributor signed the CLA.') %
author)]
return []
def _CheckPatchFiles(input_api, output_api):
problems = [f.LocalPath() for f in input_api.AffectedFiles()
if f.LocalPath().endswith(('.orig', '.rej'))]
if problems:
return [output_api.PresubmitError(
"Don't commit .rej and .orig files.", problems)]
else:
return []
def _DidYouMeanOSMacro(bad_macro):
try:
return {'A': 'OS_ANDROID',
'B': 'OS_BSD',
'C': 'OS_CHROMEOS',
'F': 'OS_FREEBSD',
'L': 'OS_LINUX',
'M': 'OS_MACOSX',
'N': 'OS_NACL',
'O': 'OS_OPENBSD',
'P': 'OS_POSIX',
'S': 'OS_SOLARIS',
'W': 'OS_WIN'}[bad_macro[3].upper()]
except KeyError:
return ''
def _CheckForInvalidOSMacrosInFile(input_api, f):
"""Check for sensible looking, totally invalid OS macros."""
preprocessor_statement = input_api.re.compile(r'^\s*#')
os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
results = []
for lnum, line in f.ChangedContents():
if preprocessor_statement.search(line):
for match in os_macro.finditer(line):
if not match.group(1) in _VALID_OS_MACROS:
good = _DidYouMeanOSMacro(match.group(1))
did_you_mean = ' (did you mean %s?)' % good if good else ''
results.append(' %s:%d %s%s' % (f.LocalPath(),
lnum,
match.group(1),
did_you_mean))
return results
def _CheckForInvalidOSMacros(input_api, output_api):
"""Check all affected files for invalid OS macros."""
bad_macros = []
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
if not bad_macros:
return []
return [output_api.PresubmitError(
'Possibly invalid OS macro[s] found. Please fix your code\n'
'or add your macro to src/PRESUBMIT.py.', bad_macros)]
def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
"""Check all affected files for invalid "if defined" macros."""
ALWAYS_DEFINED_MACROS = (
"TARGET_CPU_PPC",
"TARGET_CPU_PPC64",
"TARGET_CPU_68K",
"TARGET_CPU_X86",
"TARGET_CPU_ARM",
"TARGET_CPU_MIPS",
"TARGET_CPU_SPARC",
"TARGET_CPU_ALPHA",
"TARGET_IPHONE_SIMULATOR",
"TARGET_OS_EMBEDDED",
"TARGET_OS_IPHONE",
"TARGET_OS_MAC",
"TARGET_OS_UNIX",
"TARGET_OS_WIN32",
)
ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
results = []
for lnum, line in f.ChangedContents():
for match in ifdef_macro.finditer(line):
if match.group(1) in ALWAYS_DEFINED_MACROS:
always_defined = ' %s is always defined. ' % match.group(1)
did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
lnum,
always_defined,
did_you_mean))
return results
def _CheckForInvalidIfDefinedMacros(input_api, output_api):
"""Check all affected files for invalid "if defined" macros."""
bad_macros = []
for f in input_api.AffectedFiles():
if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
if not bad_macros:
return []
return [output_api.PresubmitError(
'Found ifdef check on always-defined macro[s]. Please fix your code\n'
'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
bad_macros)]
def _CheckForUsingSideEffectsOfPass(input_api, output_api):
"""Check all affected files for using side effects of Pass."""
errors = []
for f in input_api.AffectedFiles():
if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
for lnum, line in f.ChangedContents():
# Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
errors.append(output_api.PresubmitError(
('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
'See crbug.com/418297.') % (f.LocalPath(), lnum)))
return errors
def _CheckForIPCRules(input_api, output_api):
"""Check for same IPC rules described in
http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
"""
base_pattern = r'IPC_ENUM_TRAITS\('
inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
problems = []
for f in input_api.AffectedSourceFiles(None):
local_path = f.LocalPath()
if not local_path.endswith('.h'):
continue
for line_number, line in f.ChangedContents():
if inclusion_pattern.search(line) and not comment_pattern.search(line):
problems.append(
'%s:%d\n %s' % (local_path, line_number, line.strip()))
if problems:
return [output_api.PresubmitPromptWarning(
_IPC_ENUM_TRAITS_DEPRECATED, problems)]
else:
return []
def _CheckForWindowsLineEndings(input_api, output_api):
"""Check source code and known ascii text files for Windows style line
endings.
"""
known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
file_inclusion_pattern = (
known_text_files,
r'.+%s' % _IMPLEMENTATION_EXTENSIONS
)
filter = lambda f: input_api.FilterSourceFile(
f, white_list=file_inclusion_pattern, black_list=None)
files = [f.LocalPath() for f in
input_api.AffectedSourceFiles(filter)]
problems = []
for file in files:
fp = open(file, 'r')
for line in fp:
if line.endswith('\r\n'):
problems.append(file)
break
fp.close()
if problems:
return [output_api.PresubmitPromptWarning('Are you sure that you want '
'these files to contain Windows style line endings?\n' +
'\n'.join(problems))]
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckValidHostsInDEPS(input_api, output_api))
results.extend(_CheckJavaStyle(input_api, output_api))
results.extend(
input_api.canned_checks.CheckGNFormatted(input_api, output_api))
results.extend(_CheckUmaHistogramChanges(input_api, output_api))
return results
def GetTryServerMasterForBot(bot):
"""Returns the Try Server master for the given bot.
It tries to guess the master from the bot name, but may still fail
and return None. There is no longer a default master.
"""
# Potentially ambiguous bot names are listed explicitly.
master_map = {
'chromium_presubmit': 'tryserver.chromium.linux',
'blink_presubmit': 'tryserver.chromium.linux',
'tools_build_presubmit': 'tryserver.chromium.linux',
}
master = master_map.get(bot)
if not master:
if 'linux' in bot or 'android' in bot or 'presubmit' in bot:
master = 'tryserver.chromium.linux'
elif 'win' in bot:
master = 'tryserver.chromium.win'
elif 'mac' in bot or 'ios' in bot:
master = 'tryserver.chromium.mac'
return master
def GetDefaultTryConfigs(bots):
"""Returns a list of ('bot', set(['tests']), filtered by [bots].
"""
builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
# Build up the mapping from tryserver master to bot/test.
out = dict()
for bot, tests in builders_and_tests.iteritems():
out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
return out
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
# TODO(thestig) temporarily disabled, doesn't work in third_party/
#results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
# input_api, output_api, sources))
# Make sure the tree is 'open'.
results.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api,
output_api,
json_url='http://chromium-status.appspot.com/current?format=json'))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
return results
def GetPreferredTryMasters(project, change):
import re
files = change.LocalPaths()
import os
import json
with open(os.path.join(
change.RepositoryRoot(), 'testing', 'commit_queue', 'config.json')) as f:
cq_config = json.load(f)
cq_verifiers = cq_config.get('verifiers_no_patch', {})
cq_try_jobs = cq_verifiers.get('try_job_verifier', {})
builders = cq_try_jobs.get('launched', {})
for master, master_config in cq_try_jobs.get('triggered', {}).iteritems():
for triggered_bot in master_config:
builders.get(master, {}).pop(triggered_bot, None)
# Explicitly iterate over copies of dicts since we mutate them.
for master in builders.keys():
for builder in builders[master].keys():
# Do not trigger presubmit builders, since they're likely to fail
# (e.g. OWNERS checks before finished code review), and we're
# running local presubmit anyway.
if 'presubmit' in builder:
builders[master].pop(builder)
return builders
|
mou4e/zirconium
|
PRESUBMIT.py
|
Python
|
bsd-3-clause
| 64,946
|
[
"VisIt"
] |
73c03dc325168a0a5d63c158ec65589a3800f14bea235698dcf21bc3646d5706
|
# coding=utf-8
"""Create contour from shakemap raster layer."""
import logging
import os
import shutil
from datetime import datetime
import numpy as np
from osgeo import gdal, ogr
from osgeo.gdalconst import GA_ReadOnly
from qgis.core import QgsFeatureRequest, QgsVectorLayer
from safe.common.exceptions import (
ContourCreationError,
FileNotFoundError,
InvalidLayerError
)
from safe.common.utilities import (
romanise,
temp_dir,
unique_filename
)
from safe.definitions import contour_id_field
from safe.definitions.constants import NUMPY_SMOOTHING
from safe.definitions.fields import (
contour_colour_field,
contour_fields,
contour_halign_field,
contour_length_field, contour_mmi_field,
contour_roman_field, contour_valign_field,
contour_x_field, contour_y_field
)
from safe.definitions.layer_geometry import layer_geometry_line
from safe.definitions.layer_modes import layer_mode_classified
from safe.definitions.layer_purposes import layer_purpose_earthquake_contour
from safe.gis.vector.tools import (
create_ogr_field_from_definition,
field_index_from_definition
)
from safe.utilities.i18n import tr
from safe.utilities.metadata import write_iso19115_metadata
from safe.utilities.resources import resources_path
from safe.utilities.styling import mmi_colour
__copyright__ = "Copyright 2017, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
LOGGER = logging.getLogger('InaSAFE')
def gaussian_kernel(sigma, truncate=4.0):
"""Return Gaussian that truncates at the given number of std deviations.
Adapted from https://github.com/nicjhan/gaussian-filter
"""
sigma = float(sigma)
radius = int(truncate * sigma + 0.5)
x, y = np.mgrid[-radius:radius + 1, -radius:radius + 1]
sigma = sigma ** 2
k = 2 * np.exp(-0.5 * (x ** 2 + y ** 2) / sigma)
k = k / np.sum(k)
return k
def tile_and_reflect(input):
"""Make 3x3 tiled array.
Central area is 'input', surrounding areas are reflected.
Adapted from https://github.com/nicjhan/gaussian-filter
"""
tiled_input = np.tile(input, (3, 3))
rows = input.shape[0]
cols = input.shape[1]
# Now we have a 3x3 tiles - do the reflections.
# All those on the sides need to be flipped left-to-right.
for i in range(3):
# Left hand side tiles
tiled_input[i * rows:(i + 1) * rows, 0:cols] = \
np.fliplr(tiled_input[i * rows:(i + 1) * rows, 0:cols])
# Right hand side tiles
tiled_input[i * rows:(i + 1) * rows, -cols:] = \
np.fliplr(tiled_input[i * rows:(i + 1) * rows, -cols:])
# All those on the top and bottom need to be flipped up-to-down
for i in range(3):
# Top row
tiled_input[0:rows, i * cols:(i + 1) * cols] = \
np.flipud(tiled_input[0:rows, i * cols:(i + 1) * cols])
# Bottom row
tiled_input[-rows:, i * cols:(i + 1) * cols] = \
np.flipud(tiled_input[-rows:, i * cols:(i + 1) * cols])
# The central array should be unchanged.
assert (np.array_equal(input, tiled_input[rows:2 * rows, cols:2 * cols]))
# All sides of the middle array should be the same as those bordering them.
# Check this starting at the top and going around clockwise. This can be
# visually checked by plotting the 'tiled_input' array.
assert (np.array_equal(input[0, :], tiled_input[rows - 1, cols:2 * cols]))
assert (np.array_equal(input[:, -1], tiled_input[rows:2 * rows, 2 * cols]))
assert (np.array_equal(input[-1, :], tiled_input[2 * rows, cols:2 * cols]))
assert (np.array_equal(input[:, 0], tiled_input[rows:2 * rows, cols - 1]))
return tiled_input
def convolve(input, weights, mask=None, slow=False):
"""2 dimensional convolution.
This is a Python implementation of what will be written in Fortran.
Borders are handled with reflection.
Masking is supported in the following way:
* Masked points are skipped.
* Parts of the input which are masked have weight 0 in the kernel.
* Since the kernel as a whole needs to have value 1, the weights of the
masked parts of the kernel are evenly distributed over the non-masked
parts.
Adapted from https://github.com/nicjhan/gaussian-filter
"""
assert (len(input.shape) == 2)
assert (len(weights.shape) == 2)
# Only one reflection is done on each side so the weights array cannot be
# bigger than width/height of input +1.
assert (weights.shape[0] < input.shape[0] + 1)
assert (weights.shape[1] < input.shape[1] + 1)
if mask is not None:
# The slow convolve does not support masking.
assert (not slow)
assert (input.shape == mask.shape)
tiled_mask = tile_and_reflect(mask)
output = np.copy(input)
tiled_input = tile_and_reflect(input)
rows = input.shape[0]
cols = input.shape[1]
# Stands for half weights row.
hw_row = np.int(weights.shape[0] / 2)
hw_col = np.int(weights.shape[1] / 2)
# Stands for full weights row.
fw_row = weights.shape[0]
fw_col = weights.shape[0]
# Now do convolution on central array.
# Iterate over tiled_input.
for i, io in zip(list(range(rows, rows * 2)), list(range(rows))):
for j, jo in zip(list(range(cols, cols * 2)), list(range(cols))):
# The current central pixel is at (i, j)
# Skip masked points.
if mask is not None and tiled_mask[i, j]:
continue
average = 0.0
if slow:
# Iterate over weights/kernel.
for k in range(weights.shape[0]): # NOQA
for l in range(weights.shape[1]): # NOQA
# Get coordinates of tiled_input array that match given
# weights
m = i + k - hw_row
n = j + l - hw_col
average += tiled_input[m, n] * weights[k, l]
else:
# Find the part of the tiled_input array that overlaps with the
# weights array.
overlapping = tiled_input[
i - hw_row:i - hw_row + fw_row,
j - hw_col:j - hw_col + fw_col]
assert (overlapping.shape == weights.shape)
# If any of 'overlapping' is masked then set the corresponding
# points in the weights matrix to 0 and redistribute these to
# non-masked points.
if mask is not None:
overlapping_mask = tiled_mask[
i - hw_row:i - hw_row + fw_row,
j - hw_col:j - hw_col + fw_row]
assert (overlapping_mask.shape == weights.shape)
# Total value and number of weights clobbered by the mask.
clobber_total = np.sum(weights[overlapping_mask])
remaining_num = np.sum(np.logical_not(overlapping_mask))
# This is impossible since at least i, j is not masked.
assert (remaining_num > 0)
correction = clobber_total / remaining_num
# It is OK if nothing is masked - the weights will not be
# changed.
if correction == 0:
assert (not overlapping_mask.any())
# Redistribute to non-masked points.
tmp_weights = np.copy(weights)
tmp_weights[overlapping_mask] = 0.0
tmp_weights[np.where(tmp_weights != 0)] += correction
# Should be very close to 1. May not be exact due to
# rounding.
assert (abs(np.sum(tmp_weights) - 1) < 1e-15)
else:
tmp_weights = weights
merged = tmp_weights[:] * overlapping
average = np.sum(merged)
# Set new output value.
output[io, jo] = average
return output
def create_smooth_contour(
shakemap_layer,
output_file_path='',
active_band=1,
smoothing_method=NUMPY_SMOOTHING,
smoothing_sigma=0.9):
"""Create contour from a shake map layer by using smoothing method.
:param shakemap_layer: The shake map raster layer.
:type shakemap_layer: QgsRasterLayer
:param active_band: The band which the data located, default to 1.
:type active_band: int
:param smoothing_method: The smoothing method that wanted to be used.
:type smoothing_method: NONE_SMOOTHING, NUMPY_SMOOTHING, SCIPY_SMOOTHING
:param smooth_sigma: parameter for gaussian filter used in smoothing
function.
:type smooth_sigma: float
:returns: The contour of the shake map layer path.
:rtype: basestring
"""
timestamp = datetime.now()
temp_smoothed_shakemap_path = unique_filename(
prefix='temp-shake-map' + timestamp.strftime('%Y%m%d-%H%M%S'),
suffix='.tif',
dir=temp_dir('temp'))
temp_smoothed_shakemap_path = smooth_shakemap(
shakemap_layer.source(),
output_file_path=temp_smoothed_shakemap_path,
active_band=active_band,
smoothing_method=smoothing_method,
smoothing_sigma=smoothing_sigma
)
return shakemap_contour(
temp_smoothed_shakemap_path,
output_file_path=output_file_path,
active_band=active_band
)
def smooth_shakemap(
shakemap_layer_path,
output_file_path='',
active_band=1,
smoothing_method=NUMPY_SMOOTHING,
smoothing_sigma=0.9):
"""Make a smoother shakemap layer from a shake map.
:param shakemap_layer_path: The shake map raster layer path.
:type shakemap_layer_path: basestring
:param active_band: The band which the data located, default to 1.
:type active_band: int
:param smoothing_method: The smoothing method that wanted to be used.
:type smoothing_method: NONE_SMOOTHING, NUMPY_SMOOTHING, SCIPY_SMOOTHING
:param smooth_sigma: parameter for gaussian filter used in smoothing
function.
:type smooth_sigma: float
:returns: The contour of the shake map layer.
:rtype: QgsRasterLayer
"""
# Set output path
if not output_file_path:
output_file_path = unique_filename(suffix='.tiff', dir=temp_dir())
# convert to numpy
shakemap_file = gdal.Open(shakemap_layer_path)
shakemap_array = np.array(
shakemap_file.GetRasterBand(active_band).ReadAsArray())
# do smoothing
if smoothing_method == NUMPY_SMOOTHING:
smoothed_array = convolve(shakemap_array, gaussian_kernel(
smoothing_sigma))
else:
smoothed_array = shakemap_array
# Create smoothed shakemap raster layer
driver = gdal.GetDriverByName('GTiff')
smoothed_shakemap_file = driver.Create(
output_file_path,
shakemap_file.RasterXSize,
shakemap_file.RasterYSize,
1,
gdal.GDT_Float32 # Important, since the default is integer
)
smoothed_shakemap_file.GetRasterBand(1).WriteArray(smoothed_array)
# CRS
smoothed_shakemap_file.SetProjection(shakemap_file.GetProjection())
smoothed_shakemap_file.SetGeoTransform(shakemap_file.GetGeoTransform())
smoothed_shakemap_file.FlushCache()
del smoothed_shakemap_file
if not os.path.isfile(output_file_path):
raise FileNotFoundError(tr(
'The smoothed shakemap is not created. It should be at '
'{output_file_path}'.format(output_file_path=output_file_path)))
return output_file_path
def shakemap_contour(shakemap_layer_path, output_file_path='', active_band=1):
"""Creating contour from a shakemap layer.
:param shakemap_layer_path: The shake map raster layer path.
:type shakemap_layer_path: basestring
:param output_file_path: The path where the contour will be saved.
:type output_file_path: basestring
:param active_band: The band which the data located, default to 1.
:type active_band: int
:returns: The contour of the shake map layer path.
:rtype: basestring
"""
# Set output path
if not output_file_path:
# There are minor issues with shapefile, so we switch to gpkg
# See https://github.com/inasafe/inasafe/issues/5063
output_file_path = unique_filename(suffix='.gpkg', dir=temp_dir())
output_directory = os.path.dirname(output_file_path)
output_file_name = os.path.basename(output_file_path)
output_base_name = os.path.splitext(output_file_name)[0]
# Based largely on
# http://svn.osgeo.org/gdal/trunk/autotest/alg/contour.py
# Use Geopackage driver to overcome this:
# See https://github.com/inasafe/inasafe/issues/5063
driver = ogr.GetDriverByName('GPKG')
ogr_dataset = driver.CreateDataSource(output_file_path)
if ogr_dataset is None:
# Probably the file existed and could not be overriden
raise ContourCreationError(
'Could not create datasource for:\n%s. Check that the file '
'does not already exist and that you do not have file system '
'permissions issues' % output_file_path)
# Set default fid
options = ['FID={}'.format(contour_id_field['field_name'])]
layer = ogr_dataset.CreateLayer('contour', options=options)
for contour_field in contour_fields:
field_definition = create_ogr_field_from_definition(contour_field)
layer.CreateField(field_definition)
shakemap_data = gdal.Open(shakemap_layer_path, GA_ReadOnly)
# see http://gdal.org/java/org/gdal/gdal/gdal.html for these options
contour_interval = 0.5
contour_base = 0
fixed_level_list = []
use_no_data_flag = 0
no_data_value = -9999
id_field = 0 # first field defined above
elevation_field = 1 # second (MMI) field defined above
try:
gdal.ContourGenerate(
shakemap_data.GetRasterBand(active_band),
contour_interval,
contour_base,
fixed_level_list,
use_no_data_flag,
no_data_value,
layer,
id_field,
elevation_field)
except Exception as e:
LOGGER.exception('Contour creation failed')
raise ContourCreationError(str(e))
finally:
ogr_dataset.Release()
# Copy over the standard .prj file since ContourGenerate does not
# create a projection definition
projection_path = os.path.join(
output_directory, output_base_name + '.prj')
source_projection_path = resources_path(
'converter_data', 'mmi-contours.prj')
shutil.copyfile(source_projection_path, projection_path)
# Lastly copy over the standard qml (QGIS Style file)
qml_path = os.path.join(
output_directory, output_base_name + '.qml')
source_qml_path = resources_path('converter_data', 'mmi-contours.qml')
shutil.copyfile(source_qml_path, qml_path)
# Create metadata file
create_contour_metadata(output_file_path)
# Now update the additional columns - X,Y, ROMAN and RGB
try:
set_contour_properties(output_file_path)
except InvalidLayerError:
raise
del shakemap_data
return output_file_path
def set_contour_properties(contour_file_path):
"""Set the X, Y, RGB, ROMAN attributes of the contour layer.
:param contour_file_path: Path of the contour layer.
:type contour_file_path: str
:raise: InvalidLayerError if anything is amiss with the layer.
"""
LOGGER.debug(
'Set_contour_properties requested for %s.' % contour_file_path)
layer = QgsVectorLayer(contour_file_path, 'mmi-contours', "ogr")
if not layer.isValid():
raise InvalidLayerError(contour_file_path)
layer.startEditing()
# Now loop through the db adding selected features to mem layer
request = QgsFeatureRequest()
for feature in layer.getFeatures(request):
if not feature.isValid():
LOGGER.debug('Skipping feature')
continue
# Work out x and y
line = feature.geometry().asPolyline()
y = line[0].y()
x_max = line[0].x()
x_min = x_max
for point in line:
if point.y() < y:
y = point.y()
x = point.x()
if x < x_min:
x_min = x
if x > x_max:
x_max = x
x = x_min + ((x_max - x_min) / 2)
# Get length
length = feature.geometry().length()
mmi_value = float(feature[contour_mmi_field['field_name']])
# We only want labels on the whole number contours
if mmi_value != round(mmi_value):
roman = ''
else:
roman = romanise(mmi_value)
# RGB from http://en.wikipedia.org/wiki/Mercalli_intensity_scale
rgb = mmi_colour(mmi_value)
# Now update the feature
feature_id = feature.id()
layer.changeAttributeValue(
feature_id, field_index_from_definition(layer, contour_x_field), x)
layer.changeAttributeValue(
feature_id, field_index_from_definition(layer, contour_y_field), y)
layer.changeAttributeValue(
feature_id,
field_index_from_definition(layer, contour_colour_field), rgb)
layer.changeAttributeValue(
feature_id,
field_index_from_definition(layer, contour_roman_field), roman)
layer.changeAttributeValue(
feature_id,
field_index_from_definition(layer, contour_halign_field), 'Center')
layer.changeAttributeValue(
feature_id,
field_index_from_definition(layer, contour_valign_field), 'HALF')
layer.changeAttributeValue(
feature_id,
field_index_from_definition(layer, contour_length_field), length)
layer.commitChanges()
def create_contour_metadata(contour_path):
"""Create metadata file for contour layer.
:param contour_path: Path where the contour is located.
:type contour_path: basestring
"""
metadata = {
'title': tr('Earthquake Contour'),
'layer_purpose': layer_purpose_earthquake_contour['key'],
'layer_geometry': layer_geometry_line['key'],
'layer_mode': layer_mode_classified['key'],
'inasafe_fields': {}
}
for contour_field in contour_fields:
metadata['inasafe_fields'][contour_field['key']] = contour_field[
'field_name']
write_iso19115_metadata(contour_path, metadata)
|
AIFDR/inasafe
|
safe/gis/raster/contour.py
|
Python
|
gpl-3.0
| 18,679
|
[
"Gaussian"
] |
61ad390e4f573a37be00e1e748a00f5c41563a5635e07af7e996311935e6f936
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RSummarizedexperiment(RPackage):
"""SummarizedExperiment container.
The SummarizedExperiment container contains one or more assays, each
represented by a matrix-like object of numeric or other mode. The rows
typically represent genomic ranges of interest and the columns represent
samples."""
homepage = "https://bioconductor.org/packages/SummarizedExperiment"
git = "https://git.bioconductor.org/packages/SummarizedExperiment.git"
version('1.18.2', commit='e22fafe')
version('1.14.1', commit='2c68d99e11c7345e5ed388370822ea48395c64a4')
version('1.12.0', commit='5f8416864636add121ec1d6737ebb89a42227fd7')
version('1.10.1', commit='7ad2e991c8285bfc4b2e15b29d94cc86d07f8f2b')
version('1.8.1', commit='9d8a29aa9c78bbc7dcc6472537e13fc0d11dc1f7')
version('1.6.5', commit='ec69cd5cfbccaef148a9f6abdfb3e22e888695d0')
depends_on('r@3.2:', type=('build', 'run'))
depends_on('r-genomicranges@1.27.22:', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
depends_on('r-delayedarray@0.1.9:', type=('build', 'run'))
depends_on('r-matrix', type=('build', 'run'))
depends_on('r-biocgenerics@0.15.3:', type=('build', 'run'))
depends_on('r-s4vectors@0.13.13:', type=('build', 'run'))
depends_on('r-iranges@2.7.2:', type=('build', 'run'))
depends_on('r-genomeinfodb@1.11.4:', type=('build', 'run'))
depends_on('r-genomicranges@1.29.14:', when='@1.8.1:', type=('build', 'run'))
depends_on('r-delayedarray@0.3.20:', when='@1.8.1:', type=('build', 'run'))
depends_on('r-iranges@2.11.17:', when='@1.8.1:', type=('build', 'run'))
depends_on('r-genomeinfodb@1.13.1:', when='@1.8.1:', type=('build', 'run'))
depends_on('r-genomicranges@1.31.17:', when='@1.10.1:', type=('build', 'run'))
depends_on('r-s4vectors@0.17.25:', when='@1.10.1:', type=('build', 'run'))
depends_on('r-iranges@2.13.16:', when='@1.10.1:', type=('build', 'run'))
depends_on('r-genomicranges@1.33.6:', when='@1.12.0:', type=('build', 'run'))
|
iulian787/spack
|
var/spack/repos/builtin/packages/r-summarizedexperiment/package.py
|
Python
|
lgpl-2.1
| 2,278
|
[
"Bioconductor"
] |
822b0de0d413974025eb6111d683bbb51713ed1eebc3db6bf9b50b02c38ebe2a
|
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: 2016-2019 The Atlite Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Base class for Atlite.
"""
# There is a binary incompatibility between the pip wheels of netCDF4 and
# rasterio, which leads to the first one to work correctly while the second
# loaded one fails by loading netCDF4 first, we ensure that most of atlite's
# functionality works fine, even when the pip wheels have been used, only for
# resampling the sarah dataset it is important to use conda.
# Refer to
# https://github.com/pydata/xarray/issues/2535,
# https://github.com/rasterio/rasterio-wheels/issues/12
import xarray as xr
import pandas as pd
import numpy as np
import dask
import rasterio as rio
import geopandas as gpd
from tempfile import mktemp
from numpy import atleast_1d, append
from warnings import warn
from shapely.geometry import box
from pathlib import Path
from pyproj import CRS
from .utils import CachedAttribute
from .data import cutout_prepare, available_features
from .gis import get_coords, compute_indicatormatrix, compute_availabilitymatrix
from .convert import (convert_and_aggregate, heat_demand, hydro, temperature,
wind, pv, runoff, solar_thermal, soil_temperature)
from .datasets import modules as datamodules
import logging
logger = logging.getLogger(__name__)
class Cutout:
"""
Cutout base class.
This class builds the starting point for most atlite functionalities.
"""
def __init__(self, path, **cutoutparams):
"""
Provide an Atlite cutout object.
Create a cutout object to use atlite operations on it. Based on the
provided parameters, atlite first checks whether this cutout already
exists on disk and if yes, loads this cutout.
If the cutout does not yet exist on disk, then atlite creates an
"unprepared" cutout object. This does not yet contain the full data.
The process of preparing (loading the data) can then be started with
`cutout.prepare()`.
Parameters
----------
path : str | path-like
NetCDF from which to load or where to store the cutout.
module : str or list
The dataset(s) which works as a basis for the cutout. Available
modules are "era5", "sarah" and "gebco".
This is necessary when building a new cutout.
If more than one module is given, their order determines how atlite
fills up missing features when preparing the cutout with
`Cutout.prepare()`. For example `influx_diffuse` is provided by
the `sarah` and the `era5` module. Prioritizing sarah and setting
module=['sarah', 'era5'] will load `influx_diffuse` from the sarah
module and ignoring the era5 'influx_diffuse' data.
time : str | slice
Time range to include in the cutout, e.g. "2011" or
("2011-01-05", "2011-01-25")
This is necessary when building a new cutout.
bounds : GeoSeries.bounds | DataFrame, optional
The outer bounds of the cutout or as a DataFrame
containing (min.long, min.lat, max.long, max.lat).
x : slice, optional
Outer longitudinal bounds for the cutout (west, east).
y : slice, optional
Outer latitudinal bounds for the cutout (south, north).
dx : float, optional
Step size of the x coordinate. The default is 0.25.
dy : float, optional
Step size of the y coordinate. The default is 0.25.
dt : str, optional
Frequency of the time coordinate. The default is 'h'. Valid are all
pandas offset aliases.
chunks : dict
Chunks when opening netcdf files. For cutout preparation recommand
to chunk only along the time dimension. Defaults to {'time': 20}
data : xr.Dataset
User provided cutout data. Save the cutout using `Cutout.to_file()`
afterwards.
Other Parameters
----------------
sanitize : bool, default True
Whether to sanitize the data when preparing the cutout. Takes
effect for 'era5' data loading.
sarah_dir : str, Path
Directory of on-disk sarah data. This must be given when using the
sarah module.
sarah_interpolate : bool, default True
Whether to interpolate NaN's in the SARAH data. This takes effect for
sarah data which has missing data for areas where dawn and
nightfall happens (ca. 30 min gap).
gebco_path: str
Path to find the gebco netcdf file. Only necessary when including
the gebco module.
parallel : bool, default False
Whether to open dataset in parallel mode. Take effect for all
xr.open_mfdataset usages.
"""
name = cutoutparams.get("name", None)
cutout_dir = cutoutparams.get("cutout_dir", None)
if cutout_dir or name or Path(path).is_dir():
raise ValueError(
"Old style format not supported. You can migrate the old "
"cutout directory using the function "
"`atlite.utils.migrate_from_cutout_directory()`. The argument "
"`cutout_dir` and `name` have been deprecated in favour of `path`.")
path = Path(path).with_suffix(".nc")
chunks = cutoutparams.pop('chunks', {'time': 100})
storable_chunks = {f'chunksize_{k}': v for k, v in (chunks or {}).items()}
# Backward compatibility for xs, ys, months and years
if {'xs', 'ys'}.intersection(cutoutparams):
warn(
"The arguments `xs` and `ys` have been deprecated in favour of "
"`x` and `y`", DeprecationWarning)
if 'xs' in cutoutparams:
cutoutparams['x'] = cutoutparams.pop('xs')
if 'ys' in cutoutparams:
cutoutparams['y'] = cutoutparams.pop('ys')
if {'years', 'months'}.intersection(cutoutparams):
warn("The arguments `years` and `months` have been deprecated in "
"favour of `time`", DeprecationWarning)
assert 'years' in cutoutparams
months = cutoutparams.pop("months", slice(1, 12))
years = cutoutparams.pop("years")
cutoutparams["time"] = slice(f"{years.start}-{months.start}",
f"{years.stop}-{months.stop}")
# Three cases. First, cutout exists -> take the data.
# Second, data is given -> take it. Third, else -> build a new cutout
if path.is_file():
data = xr.open_dataset(str(path), chunks=chunks)
data.attrs.update(storable_chunks)
if cutoutparams:
warn(f'Arguments {", ".join(cutoutparams)} are ignored, since '
'cutout is already built.')
elif 'data' in cutoutparams:
data = cutoutparams.pop('data')
else:
logger.info(f"Building new cutout {path}")
if 'bounds' in cutoutparams:
x1, y1, x2, y2 = cutoutparams.pop('bounds')
cutoutparams.update(x=slice(x1, x2), y=slice(y1, y2))
try:
x = cutoutparams.pop('x')
y = cutoutparams.pop('y')
time = cutoutparams.pop('time')
module = cutoutparams.pop('module')
except KeyError as exc:
raise TypeError("Arguments 'time' and 'module' must be "
"specified. Spatial bounds must either be "
"passed via argument 'bounds' or 'x' and 'y'.") from exc
# TODO: check for dx, dy, x, y fine with module requirements
coords = get_coords(x, y, time, **cutoutparams)
attrs = {'module': module, 'prepared_features': [],
**storable_chunks, **cutoutparams}
data = xr.Dataset(coords=coords, attrs=attrs)
# Check compatibility of CRS
modules = atleast_1d(data.attrs.get('module'))
crs = set(CRS(datamodules[m].crs) for m in modules)
assert len(crs) == 1, f'CRS of {module} not compatible'
self.path = path
self.data = data
@property
def name(self):
return self.path.stem
@property
def module(self):
return self.data.attrs.get('module')
@property
def crs(self):
return CRS(datamodules[atleast_1d(self.module)[0]].crs)
@property
def available_features(self):
return available_features(self.module)
@property
def chunks(self):
chunks = {k.lstrip('chunksize_'): v for k, v in self.data.attrs.items()
if k.startswith('chunksize_')}
return None if chunks == {} else chunks
@property
def coords(self):
return self.data.coords
@property
def meta(self):
warn("The `meta` attribute is deprecated in favour of direct "
"access to `data`", DeprecationWarning)
return xr.Dataset(self.coords, attrs=self.data.attrs)
@property
def shape(self):
return len(self.coords["y"]), len(self.coords["x"])
@property
def extent(self):
"""Total extent of the area covered by the cutout (x, X, y, Y)."""
xs, ys = self.coords['x'].values, self.coords['y'].values
dx , dy = self.dx, self.dy
return np.array([xs[0]-dx/2, xs[-1]+dx/2, ys[0]-dy/2, ys[-1]+dy/2])
@property
def bounds(self):
"""Total bounds of the area covered by the cutout (x, y, X, Y)."""
return self.extent[[0,2,1,3]]
@property
def transform(self):
"""Get the affine transform of the cutout. """
return rio.Affine(self.dx, 0, self.coords['x'].values[0] - self.dx/2,
0, self.dy, self.coords['y'].values[0] - self.dy/2)
@property
def transform_r(self):
"""Get the affine transform of the cutout with reverse y-order."""
return rio.Affine(self.dx, 0, self.coords['x'].values[0] - self.dx/2,
0, -self.dy, self.coords['y'].values[-1] + self.dy/2)
@property
def dx(self):
x = self.coords['x']
return round((x[-1] - x[0]).item() / (x.size - 1), 8)
@property
def dy(self):
y = self.coords['y']
return round((y[-1] - y[0]).item() / (y.size - 1), 8)
@property
def dt(self):
return pd.infer_freq(self.coords['time'].to_index())
@property
def prepared(self):
return (self.prepared_features.sort_index()
.equals(self.available_features.sort_index()))
@property
def prepared_features(self):
index = [(self.data[v].attrs['module'], self.data[v].attrs['feature'])
for v in self.data]
index = pd.MultiIndex.from_tuples(index, names=['module', 'feature'])
return pd.Series(list(self.data), index, dtype=object)
def grid_coordinates(self):
warn("The function `grid_coordinates` has been deprecated in favour of "
"`grid`", DeprecationWarning)
logger.warning("The order of elements returned by `grid_coordinates` changed. "
"Check the output of your workflow for correctness.")
return self.grid[['x', 'y']].values
def grid_cells(self):
warn("The function `grid_cells` has been deprecated in favour of `grid`",
DeprecationWarning)
logger.warning("The order of elements in `grid_cells` changed. "
"Check the output of your workflow for correctness.")
return self.grid.geometry.to_list()
@CachedAttribute
def grid(self):
xs, ys = np.meshgrid(self.coords["x"], self.coords["y"])
coords = np.asarray((np.ravel(xs), np.ravel(ys))).T
span = (coords[self.shape[1] + 1] - coords[0]) / 2
cells = [box(*c) for c in np.hstack((coords - span, coords + span))]
return gpd.GeoDataFrame({'x': coords[:, 0], 'y': coords[:, 1],
'geometry': cells,}, crs=self.crs)
def sel(self, path=None, bounds=None, buffer=0, **kwargs):
'''
Select parts of the cutout.
Parameters
----------
path : str | path-like
File where to store the sub-cutout. Defaults to a temporary file.
bounds : GeoSeries.bounds | DataFrame, optional
The outer bounds of the cutout or as a DataFrame
containing (min.long, min.lat, max.long, max.lat).
buffer : float, optional
Buffer around the bounds. The default is 0.
**kwargs :
Passed to `xr.Dataset.sel` for data selection.
Returns
-------
selected : Cutout
Selected cutout.
'''
if path is None:
path = mktemp(prefix=f"{self.path.stem}-", suffix=self.path.suffix,
dir=self.path.parent)
if bounds is not None:
if buffer > 0:
bounds = box(*bounds).buffer(buffer).bounds
x1, y1, x2, y2 = bounds
kwargs.update(x=slice(x1, x2), y=slice(y1, y2))
data = self.data.sel(**kwargs)
return Cutout(path, data=data)
def merge(self, other, path=None, **kwargs):
'''
Merge two cutouts into a single cutout.
Parameters
----------
other : atlite.Cutout
Other cutout to merge.
path : str | path-like
File where to store the merged cutout. Defaults to a temporary file.
**kwargs
Keyword arguments passed to `xarray.merge()`.
Returns
-------
merged : Cutout
Merged cutout.
'''
assert isinstance(other, Cutout)
if path is None:
path = mktemp(prefix=f"{self.path.stem}-", suffix=self.path.suffix,
dir=self.path.parent)
attrs = {**self.data.attrs, **other.data.attrs}
attrs['module'] = list(set(append(*atleast_1d(self.module, other.module))))
features = self.prepared_features.index.unique('feature')
otherfeatures = other.prepared_features.index.unique('feature')
attrs['prepared_features'] = list(features.union(otherfeatures))
data = self.data.merge(other.data, **kwargs).assign_attrs(**attrs)
return Cutout(path, data=data)
def to_file(self, fn=None):
'''
Save cutout to a netcdf file.
Parameters
----------
fn : str | path-like
File name where to store the cutout, defaults to `cutout.path`.
'''
if fn is None:
fn = self.path
self.data.to_netcdf(fn)
def __repr__(self):
start = np.datetime_as_string(self.coords['time'].values[0], unit='D')
end = np.datetime_as_string(self.coords['time'].values[-1], unit='D')
return ('<Cutout "{}">\n'
' x = {:.2f} ⟷ {:.2f}, dx = {:.2f}\n'
' y = {:.2f} ⟷ {:.2f}, dy = {:.2f}\n'
' time = {} ⟷ {}, dt = {}\n'
' module = {}\n'
' prepared_features = {}'
.format(self.name, self.coords['x'].values[0],
self.coords['x'].values[-1], self.dx,
self.coords['y'].values[0],
self.coords['y'].values[-1], self.dy,
start, end, self.dt,
self.module,
list(self.prepared_features.index.unique('feature'))))
def indicatormatrix(self, shapes, shapes_crs=4326):
"""
Compute the indicatormatrix.
The indicatormatrix I[i,j] is a sparse representation of the ratio
of the area in orig[j] lying in dest[i], where orig and dest are
collections of polygons, i.e.
A value of I[i,j] = 1 indicates that the shape orig[j] is fully
contained in shape dest[j].
Note that the polygons must be in the same crs.
Parameters
---------
shapes : Collection of shapely polygons
Returns
-------
I : sp.sparse.lil_matrix
Indicatormatrix
"""
return compute_indicatormatrix(self.grid, shapes, self.crs, shapes_crs)
def uniform_layout(self):
"""Get a uniform capacity layout for all grid cells."""
return xr.DataArray(1, [self.coords['y'], self.coords['x']])
def layout_from_capacity_list(self, data, col='Capacity'):
"""
Get a capacity layout aligned to the cutout based on a capacity list.
Parameters
----------
data : pandas.DataFrame
Capacity list with columns 'x', 'y' and col. Each capacity entry
is added to the grid cell intersecting with the coordinate (x,y).
col : str, optional
Name of the column with capacity values. The default is 'Capacity'.
Returns
-------
xr.DataArray
Capacity layout with dimensions 'x' and 'y' indicating the total
capacity placed within one grid cell.
Example
-------
>>> import atlite
>>> import powerplantmatching as pm
>>> data = pm.data.OPSD_VRE_country('DE')
>>> data = (data.query('Fueltype == "Solar"')
.rename(columns={'lon':'x', 'lat':'y'}))
>>> cutout = atlite.Cutout('Germany', x = slice(-5, 15), y = slice(40, 55),
time='2013-06-01', module='era5')
>>> cutout.prepare(features=['influx', 'temperature'])
>>> layout = cutout.layout_from_capacity_list(data)
>>> pv = cutout.pv('CdTe', 'latitude_optimal', layout=layout)
>>> pv.plot()
"""
with dask.config.set(**{'array.slicing.split_large_chunks': False}):
nearest = (self.uniform_layout().chunk()
.sel({'x': data.x.values, 'y': data.y.values}, 'nearest'))
data = (data.assign(x=nearest.x.data, y=nearest.y.data)
.groupby(['y', 'x'])[col].sum())
return data.to_xarray().reindex_like(self.data).fillna(0)
availabilitymatrix = compute_availabilitymatrix
# Preparation functions
prepare = cutout_prepare
# Conversion and aggregation functions
convert_and_aggregate = convert_and_aggregate
heat_demand = heat_demand
temperature = temperature
soil_temperature = soil_temperature
solar_thermal = solar_thermal
wind = wind
pv = pv
runoff = runoff
hydro = hydro
|
FRESNA/atlite
|
atlite/cutout.py
|
Python
|
gpl-3.0
| 18,723
|
[
"NetCDF"
] |
fa0ce0dd611a34f2b050fc135f65e2aed2ba29d12100772c6d8eef2f9a9df09c
|
__RCSID__ = "$Id$"
import socket
import select
import time
import os
from DIRAC.Core.DISET.private.Transports.BaseTransport import BaseTransport
from DIRAC.FrameworkSystem.Client.Logger import gLogger
from DIRAC.Core.Utilities.ReturnValues import S_ERROR, S_OK
class PlainTransport(BaseTransport):
def initAsClient(self):
timeout = None
if 'timeout' in self.extraArgsDict:
timeout = self.extraArgsDict['timeout']
try:
self.oSocket = socket.create_connection(self.stServerAddress, timeout)
except socket.error as e:
if e.args[0] != 115:
return S_ERROR("Can't connect: %s" % str(e))
#Connect in progress
oL = select.select([], [self.oSocket], [], self.extraArgsDict['timeout'])[1]
if len(oL) == 0:
self.oSocket.close()
return S_ERROR("Connection timeout")
errno = self.oSocket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if errno != 0:
return S_ERROR("Can't connect: %s" % str((errno, os.strerror(errno))))
self.remoteAddress = self.oSocket.getpeername()
return S_OK(self.oSocket)
def initAsServer(self):
if not self.serverMode():
raise RuntimeError("Must be initialized as server mode")
try:
self.oSocket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
except socket.error:
# IPv6 is probably disabled on this node, try IPv4 only instead
self.oSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.bAllowReuseAddress:
self.oSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.oSocket.bind(self.stServerAddress)
self.oSocket.listen(self.iListenQueueSize)
return S_OK(self.oSocket)
def close(self):
gLogger.debug("Closing socket")
try:
self.oSocket.shutdown(socket.SHUT_RDWR)
except BaseException:
pass
self.oSocket.close()
def setClientSocket(self, oSocket):
if self.serverMode():
raise RuntimeError("Mustbe initialized as client mode")
self.oSocket = oSocket
if 'timeout' in self.extraArgsDict:
self.oSocket.settimeout(self.extraArgsDict['timeout'])
self.remoteAddress = self.oSocket.getpeername()
def acceptConnection(self):
# HACK: Was = PlainTransport( self )
oClientTransport = PlainTransport(self.stServerAddress)
oClientSocket, stClientAddress = self.oSocket.accept()
oClientTransport.setClientSocket(oClientSocket)
return S_OK(oClientTransport)
def _read(self, bufSize=4096, skipReadyCheck=False):
start = time.time()
timeout = False
if 'timeout' in self.extraArgsDict:
timeout = self.extraArgsDict['timeout']
while True:
if timeout:
if time.time() - start > timeout:
return S_ERROR("Socket read timeout exceeded")
try:
data = self.oSocket.recv(bufSize)
return S_OK(data)
except socket.error as e:
if e[0] == 11:
time.sleep(0.001)
else:
return S_ERROR("Exception while reading from peer: %s" % str(e))
except Exception as e:
return S_ERROR("Exception while reading from peer: %s" % str(e))
def _write(self, buf):
sentBytes = 0
timeout = False
if 'timeout' in self.extraArgsDict:
timeout = self.extraArgsDict['timeout']
if timeout:
start = time.time()
while sentBytes < len(buf):
try:
if timeout:
if time.time() - start > timeout:
return S_ERROR("Socket write timeout exceeded")
sent = self.oSocket.send(buf[sentBytes:])
if sent == 0:
return S_ERROR("Connection closed by peer")
if sent > 0:
sentBytes += sent
except socket.error as e:
if e[0] == 11:
time.sleep(0.001)
else:
return S_ERROR("Exception while sending to peer: %s" % str(e))
except Exception as e:
return S_ERROR("Error while sending: %s" % str(e))
return S_OK(sentBytes)
def checkSanity(*args, **kwargs):
return S_OK({})
def delegate(delegationRequest, kwargs):
"""
Check delegate!
"""
return S_OK()
|
fstagni/DIRAC
|
Core/DISET/private/Transports/PlainTransport.py
|
Python
|
gpl-3.0
| 4,076
|
[
"DIRAC"
] |
cfd65e8191edeee8f91b2c7c23251d1293863b536f2ec1f8bda9fe03a9e808df
|
"""
This is the boilerplate default configuration file.
Changes and additions to settings should be done in the config module
located in the application root rather than this config.
"""
config = {
# webapp2 sessions
'webapp2_extras.sessions' : {'secret_key': 'Force_be_with'},
# webapp2 authentication
'webapp2_extras.auth' : {'user_model': 'boilerplate.models.User',
'cookie_name': 'session_name'},
# jinja2 templates
'webapp2_extras.jinja2' : {'template_path': ['templates','boilerplate/templates', 'admin/templates'],
'environment_args': {'extensions': ['jinja2.ext.i18n']}},
# application name
'app_name' : "reviewringer",
# the default language code for the application.
# should match whatever language the site uses when i18n is disabled
'app_lang' : 'en',
# Locale code = <language>_<territory> (ie 'en_US')
# to pick locale codes see http://cldr.unicode.org/index/cldr-spec/picking-the-right-language-code
# also see http://www.sil.org/iso639-3/codes.asp
# Language codes defined under iso 639-1 http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes
# Territory codes defined under iso 3166-1 alpha-2 http://en.wikipedia.org/wiki/ISO_3166-1
# disable i18n if locales array is empty or None
'locales' : ['en_US', 'es_ES', 'it_IT', 'zh_CN', 'id_ID', 'fr_FR', 'de_DE', 'ru_RU', 'pt_BR', 'cs_CZ'],
# contact page email settings
'contact_sender' : "PUT_SENDER_EMAIL_HERE",
'contact_recipient' : "PUT_RECIPIENT_EMAIL_HERE",
# Password AES Encryption Parameters
'aes_key' : "12_24_32_BYTES_KEY_FOR_PASSWORDS",
'salt' : "_PUT_SALT_HERE_TO_SHA512_PASSWORDS_",
# get your own consumer key and consumer secret by registering at https://dev.twitter.com/apps
# callback url must be: http://[YOUR DOMAIN]/login/twitter/complete
'twitter_consumer_key' : 'PUT_YOUR_TWITTER_CONSUMER_KEY_HERE',
'twitter_consumer_secret' : 'PUT_YOUR_TWITTER_CONSUMER_SECRET_HERE',
#Facebook Login
# get your own consumer key and consumer secret by registering at https://developers.facebook.com/apps
#Very Important: set the site_url= your domain in the application settings in the facebook app settings page
# callback url must be: http://[YOUR DOMAIN]/login/facebook/complete
'fb_api_key' : 'PUT_YOUR_FACEBOOK_PUBLIC_KEY_HERE',
'fb_secret' : 'PUT_YOUR_FACEBOOK_PUBLIC_KEY_HERE',
#Linkedin Login
#Get you own api key and secret from https://www.linkedin.com/secure/developer
'linkedin_api' : 'PUT_YOUR_LINKEDIN_PUBLIC_KEY_HERE',
'linkedin_secret' : 'PUT_YOUR_LINKEDIN_PUBLIC_KEY_HERE',
# Github login
# Register apps here: https://github.com/settings/applications/new
'github_server' : 'github.com',
'github_redirect_uri' : 'http://www.example.com/social_login/github/complete',
'github_client_id' : 'PUT_YOUR_GITHUB_CLIENT_ID_HERE',
'github_client_secret' : 'PUT_YOUR_GITHUB_CLIENT_SECRET_HERE',
# get your own recaptcha keys by registering at http://www.google.com/recaptcha/
'captcha_public_key' : "PUT_YOUR_RECAPCHA_PUBLIC_KEY_HERE",
'captcha_private_key' : "PUT_YOUR_RECAPCHA_PRIVATE_KEY_HERE",
# Leave blank "google_analytics_domain" if you only want Analytics code
'google_analytics_domain' : "YOUR_PRIMARY_DOMAIN (e.g. google.com)",
'google_analytics_code' : "UA-XXXXX-X",
# add status codes and templates used to catch and display errors
# if a status code is not listed here it will use the default app engine
# stacktrace error page or browser error page
'error_templates' : {
403: 'errors/default_error.html',
404: 'errors/default_error.html',
500: 'errors/default_error.html',
},
# Enable Federated login (OpenID and OAuth)
# Google App Engine Settings must be set to Authentication Options: Federated Login
'enable_federated_login' : True,
# jinja2 base layout template
'base_layout' : 'base.html',
# send error emails to developers
'send_mail_developer' : False,
# fellas' list
'developers' : (
('Santa Klauss', 'snowypal@northpole.com'),
),
# If true, it will write in datastore a log of every email sent
'log_email' : True,
# If true, it will write in datastore a log of every visit
'log_visit' : True,
# ----> ADD MORE CONFIGURATION OPTIONS HERE <----
} # end config
|
LuckDragon82/demo
|
config/localhost.py
|
Python
|
lgpl-3.0
| 4,151
|
[
"VisIt"
] |
c70d2e63d2557596455290420b4700112f05bf3dca829fcd9fd18f60ba6d5347
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path as osp
import multiworld.envs.mujoco as mwmj
import rlkit.util.hyperparameter as hyp
from multiworld.envs.mujoco.cameras import sawyer_door_env_camera_v0
from rlkit.launchers.launcher_util import run_experiment
import rlkit.torch.vae.vae_schedules as vae_schedules
from rlkit.launchers.skewfit_experiments import \
skewfit_full_experiment
from rlkit.torch.vae.conv_vae import imsize48_default_architecture
if __name__ == "__main__":
variant = dict(
algorithm='Skew-Fit-SAC',
double_algo=False,
online_vae_exploration=False,
imsize=48,
env_id='SawyerDoorHookResetFreeEnv-v1',
init_camera=sawyer_door_env_camera_v0,
skewfit_variant=dict(
save_video=True,
custom_goal_sampler='replay_buffer',
online_vae_trainer_kwargs=dict(
beta=20,
lr=1e-3,
),
save_video_period=50,
qf_kwargs=dict(
hidden_sizes=[400, 300],
),
policy_kwargs=dict(
hidden_sizes=[400, 300],
),
twin_sac_trainer_kwargs=dict(
reward_scale=1,
discount=0.99,
soft_target_tau=1e-3,
target_update_period=1,
use_automatic_entropy_tuning=True,
),
max_path_length=100,
algo_kwargs=dict(
batch_size=1024,
num_epochs=170,
num_eval_steps_per_epoch=500,
num_expl_steps_per_train_loop=500,
num_trains_per_train_loop=1000,
min_num_steps_before_training=10000,
vae_training_schedule=vae_schedules.custom_schedule,
oracle_data=False,
vae_save_period=50,
parallel_vae_train=False,
),
replay_buffer_kwargs=dict(
start_skew_epoch=10,
max_size=int(100000),
fraction_goals_rollout_goals=0.2,
fraction_goals_env_goals=0.5,
exploration_rewards_type='None',
vae_priority_type='vae_prob',
priority_function_kwargs=dict(
sampling_method='importance_sampling',
decoder_distribution='gaussian_identity_variance',
num_latents_to_sample=10,
),
power=-0.5,
relabeling_goal_sampling_mode='custom_goal_sampler',
),
exploration_goal_sampling_mode='custom_goal_sampler',
evaluation_goal_sampling_mode='presampled',
training_mode='train',
testing_mode='test',
reward_params=dict(
type='latent_distance',
),
observation_key='latent_observation',
desired_goal_key='latent_desired_goal',
presampled_goals_path=osp.join(
osp.dirname(mwmj.__file__),
"goals",
"door_goals.npy",
),
presample_goals=True,
vae_wrapped_env_kwargs=dict(
sample_from_true_prior=True,
),
),
train_vae_variant=dict(
representation_size=16,
beta=20,
num_epochs=0,
dump_skew_debug_plots=False,
decoder_activation='gaussian',
generate_vae_dataset_kwargs=dict(
N=2,
test_p=.9,
use_cached=True,
show=False,
oracle_dataset=False,
n_random_steps=1,
non_presampled_goal_img_is_garbage=True,
),
vae_kwargs=dict(
decoder_distribution='gaussian_identity_variance',
input_channels=3,
architecture=imsize48_default_architecture,
),
algo_kwargs=dict(
lr=1e-3,
),
save_period=1,
),
)
search_space = {
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
n_seeds = 1
mode = 'local'
exp_prefix = 'dev-{}'.format(
__file__.replace('/', '-').replace('_', '-').split('.')[0]
)
for exp_id, variant in enumerate(sweeper.iterate_hyperparameters()):
for _ in range(n_seeds):
run_experiment(
skewfit_full_experiment,
exp_prefix=exp_prefix,
mode=mode,
variant=variant,
use_gpu=True,
)
|
google-research/DBAP-algorithm
|
third_party/rlkit_library/examples/skewfit/sawyer_door.py
|
Python
|
apache-2.0
| 5,220
|
[
"Gaussian"
] |
36815222cc8bc60bb7c582b9f511a6c9a9d150cf860b48da6cca4b61183b7845
|
import logging
import multiprocessing
import re
import os
import tempfile
import yaml
import sys
from collections import OrderedDict
import click
import urllib
# Adapted from: https://github.com/pnnl/atlas/blob/master/atlas/conf.py
logging.basicConfig(level=logging.INFO, datefmt="%Y-%m-%d %H:%M", format="[%(asctime)s %(levelname)s] %(message)s")
host = "ftp.sra.ebi.ac.uk"
project = "PRJEB14409"
#project = "PRJNA319605"
# http://stackoverflow.com/a/3675423
def replace_last(source_string, replace_what, replace_with):
head, _sep, tail = source_string.rpartition(replace_what)
if _sep == '':
return tail
else:
return head + replace_with + tail
def get_ena(project):
from urllib import request
samples = ""
try:
samples = request.urlopen("http://www.ebi.ac.uk/ena/data/warehouse/filereport?accession=%s&result=read_run&fields=fastq_ftp" % project).readlines()[1:]
except urllib.error.HTTPError:
print("Not a valid ENA project")
for sample in samples:
for fastq in sample.strip().split(b';'):
dirpath = os.path.dirname(fastq).decode("utf-8")
filename = os.path.basename(fastq).decode("utf-8")
yield (dirpath,"",[filename])
def get_sample_files(path, remote):
samples = OrderedDict()
seen = set()
walker = ""
if remote != None:
walker = get_ena(remote)
else:
walker = os.walk(path, followlinks=True)
for dir_name, sub_dirs, files in walker:
for fname in files:
if ".fastq" in fname or ".fq" in fname:
sample_id = fname.partition(".fastq")[0]
if ".fq" in sample_id:
sample_id = fname.partition(".fq")[0].replace("_","-")
sample_id = sample_id.replace("_R1", "").replace("_r1", "").replace("_R2", "").replace("_r2", "")
sample_id = re.sub("_1$", "", sample_id)
sample_id = re.sub("_2$", "", sample_id)
sample_id = sample_id.replace("_", "-").replace(" ", "-")
fq_path = os.path.join(dir_name, fname)
fastq_paths = [fq_path]
if fq_path in seen: continue
if "_R1" in fname or "_r1" in fname or "_1" in fname:
fname = replace_last(fname,"_1.","_2.")
r2_path = os.path.join(dir_name, fname.replace("_R1", "_R2").replace("_r1", "_r2"))
if not r2_path == fq_path:
seen.add(r2_path)
fastq_paths.append(r2_path)
if "_R2" in fname or "_r2" in fname or "_2" in fname:
fname = replace_last(fname,"_2.","_1.")
r1_path = os.path.join(dir_name, fname.replace("_R2", "_R1").replace("_r2", "_r1"))
if not r1_path == fq_path:
seen.add(r1_path)
fastq_paths.insert(0, r1_path)
if sample_id in samples:
logging.warn("Duplicate sample %s was found after renaming; skipping..." % sample_id)
continue
samples[sample_id] = {'path': fastq_paths }
return samples
def create_metadata_template(outfile, samples):
with open(outfile, "w") as f:
print("#SampleID\tAlias", file=f)
for sample in samples:
print("%s\t%s" % (sample,sample), file=f)
@click.command()
@click.option('--project', prompt="Give your project a unique name", required=True, help='Give your project a nice name')
@click.option('--config', default="config.yaml", show_default=True, help='File to write the configuration to')
@click.option('--remote', help='Specify a ENA project to use as remote data (for example PRJEB14409')
@click.option('--path', default="../data", show_default=True, help='path to data folder')
@click.option('--rename', required=False, help='provide a file for renaming samples')
@click.option('--forward_primer', prompt="Which forward primer did you use?", required=True, default="CCTACGGGNGGCWGCAG", help="Which forward primer did you use?")
@click.option('--reverse_primer', prompt="Which reverse primer did you use?", required=True, default="GACTACHVGGGTATCTAATCC", help="Which reverse primer did you use?")
@click.option('--mergepairs', prompt="Choose wich method to use for stitching paired reads (vsearch, pandaseq)", required=True, default="vsearch", type=click.Choice(['pandaseq', 'vsearch', 'none']), help="Choose wich method to use for stitching paired reads")
@click.option('--classification', prompt="Choose wich classification option you want to use (sina, stampa, rdp, blast)", required=True, type=click.Choice(['sina', 'stampa', 'rdp', 'blast']), help="Choose wich classification option you want to use")
@click.option('--reference_db', prompt="Choose wich reference database to use (silva, unite)", required=True, type=click.Choice(['silva', 'unite']), help="Choose wich reference database to use")
@click.option('--clustering', prompt="Choose wich clustering method you want to use (usearch_smallmem, swarm)", required=True, default="usearch_smallmem", type=click.Choice(['usearch_smallmem', 'swarm']), help="Choose wich clustering method you want to use")
def make_config(project,config,path,remote, rename, forward_primer, reverse_primer, mergepairs, classification, reference_db, clustering):
"""Write the file `config` and complete the sample names and paths for all files in `path`."""
represent_dict_order = lambda self, data: self.represent_mapping('tag:yaml.org,2002:map', data.items())
yaml.add_representer(OrderedDict, represent_dict_order)
path = os.path.realpath(path)
conf = OrderedDict()
samples = get_sample_files(path, remote)
if rename:
renamed = 0
for line in open(rename):
sample, newname = line.split()
if sample in samples:
newname = newname.replace("_","-")
samples[newname] = samples.pop(sample)
renamed += 1
create_metadata_template("metadata.txt", samples.keys())
logging.info("Found %d samples under %s" % (len(samples), path if remote == None else "remote project %s " % remote))
if rename:
logging.info("Renamed %d samples" % renamed)
conf["project"] = project
conf["minsize"] = 2
conf["adapters_fasta"] = "/data/ngs/adapters/contaminant_list.txt"
conf["pandaseq_overlap"] = "10"
conf["pandaseq_quality"] = "25"
conf["pandaseq_minlength"] = "100"
conf["pandaseq_maxlength"] = "700"
conf["quality_control"] = OrderedDict()
conf["quality_control"]["barcode"] = OrderedDict()
conf["quality_control"]["barcode"]["threshold"] = 5
conf["quality_control"]["barcode"]["length"] = 8
conf["quality_control"]["barcode"]["seperator"] = "#"
conf["quality_control"]["trimming"] = OrderedDict()
conf["quality_control"]["trimming"]["quality"] = 25
conf["forward_primer"] = forward_primer
conf["reverse_primer"] = reverse_primer
conf["mergepairs"] = mergepairs
conf["vsearch_minmergelen"] = "200"
conf["metadata"] = "metadata.txt"
if remote != None:
conf["remote"] = True
else:
conf["remote"] = False
conf["barcode_in_header"] = False
conf["its"] = False
conf["its_region"] = "ITS2"
conf["clustering"] = clustering
conf["classification"] = classification
conf["use_full_lineage"] = False
conf["rdp_confidence_cutoff"] = 0.80
conf["reference_db"] = reference_db
conf["convert_to_casava1.8"] = False
conf["data"] = samples
with open(config, "w") as f:
print(yaml.dump(conf, default_flow_style=False), file=f)
logging.info("Configuration file written to %s" % config)
if __name__ == "__main__":
make_config()
|
nioo-knaw/hydra
|
conf.py
|
Python
|
mit
| 7,810
|
[
"BLAST"
] |
21439bdfe8c043ee7defdea4c8918ad8d50dcb1cad23c72f055715701c5a66ea
|
"""
Cost layers.
TODO: write more documentation
"""
__docformat__ = 'restructedtext en'
__authors__ = ("Razvan Pascanu "
"KyungHyun Cho "
"Caglar Gulcehre ")
__contact__ = "Razvan Pascanu <r.pascanu@gmail>"
import numpy
import copy
import logging
import theano
import theano.tensor as TT
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from groundhog import utils
from groundhog.utils import sample_weights, sample_weights_classic,\
init_bias, constant_shape, sample_zeros
from .basic import Layer
logger = logging.getLogger(__name__)
class CostLayer(Layer):
"""
Base class for all cost layers
"""
def __init__(self, rng,
n_in,
n_out,
scale,
sparsity,
rank_n_approx=0,
rank_n_activ='lambda x: x',
weight_noise=False,
init_fn='sample_weights_classic',
bias_fn='init_bias',
bias_scale=0.,
sum_over_time=True,
additional_inputs=None,
grad_scale=1.,
use_nce=False,
# added by Zhaopeng Tu, 2015-11-07
use_coverage_cost=False,
name=None):
"""
:type rng: numpy random generator
:param rng: numpy random generator used to sample weights
:type n_in: int
:param n_in: number of input units
:type n_out: int
:param n_out: number of output units
:type scale: float or list of
:param scale: depending on the initialization function, it can be
the standard deviation of the Gaussian from which the weights
are sampled or the largest singular value. If a single value it
will be used for each layer, otherwise it has to have one value
for each layer
:type sparsity: int or list of
:param sparsity: if a single value, it will be used for each layer,
otherwise it has to be a list with as many values as layers. If
negative, it means the weight matrix is dense. Otherwise it
means this many randomly selected input units are connected to
an output unit
:type rank_n_approx: int
:param rank_n_approx: It applies to the first layer only. If
positive and larger than 0, the first weight matrix is
factorized into two matrices. The first one goes from input to
`rank_n_approx` hidden units, the second from `rank_n_approx` to
the number of units on the second layer
:type rank_n_activ: string or function
:param rank_n_activ: Function that is applied on on the intermediary
layer formed from factorizing the first weight matrix (Q: do we
need this?)
:type weight_noise: bool
:param weight_noise: If true, the model is used with weight noise
(and the right shared variable are constructed, to keep track
of the noise)
:type init_fn: string or function
:param init_fn: function used to initialize the weights of the
layer. We recommend using either `sample_weights_classic` or
`sample_weights` defined in the utils
:type bias_fn: string or function
:param bias_fn: function used to initialize the biases. We recommend
using `init_bias` defined in the utils
:type bias_scale: float
:param bias_scale: argument passed to `bias_fn`, depicting the scale
of the initial bias
:type sum_over_time: bool
:param sum_over_time: flag, stating if, when computing the cost, we
should take the sum over time, or the mean. If you have variable
length sequences, please take the sum over time
:type additional_inputs: None or list of ints
:param additional_inputs: dimensionality of each additional input
:type grad_scale: float or theano scalar
:param grad_scale: factor with which the gradients with respect to
the parameters of this layer are scaled. It is used for
differentiating between the different parameters of a model.
:type use_nce: bool
:param use_nce: flag, if true, do not use MLE, but NCE-like cost
:type name: string
:param name: name of the layer (used to name parameters). NB: in
this library names are very important because certain parts of the
code relies on name to disambiguate between variables, therefore
each layer should have a unique name.
"""
self.grad_scale = grad_scale
assert rank_n_approx >= 0, "Please enter a valid rank_n_approx"
self.rank_n_approx = rank_n_approx
if type(rank_n_activ) is str:
rank_n_activ = eval(rank_n_activ)
self.rank_n_activ = rank_n_activ
super(CostLayer, self).__init__(n_in, n_out, rng, name)
self.trng = RandomStreams(self.rng.randint(int(1e6)))
self.scale = scale
if isinstance(bias_fn, str):
self.bias_fn = eval(bias_fn)
else:
self.bias_fn = bias_fn
self.bias_scale = bias_scale
self.sum_over_time = sum_over_time
self.weight_noise = weight_noise
self.sparsity = sparsity
if self.sparsity < 0:
self.sparsity = n_out
if type(init_fn) is str:
init_fn = eval(init_fn)
self.init_fn = init_fn
self.additional_inputs = additional_inputs
self.use_nce = use_nce
# added by Zhaopeng Tu, 2015-11-07
self.use_coverage_cost = use_coverage_cost
self._init_params()
def _init_params(self):
"""
Initialize the parameters of the layer, either by using sparse
initialization or small isotropic noise.
"""
# added by Zhaopeng Tu, 2015-11-29
if self.use_coverage_cost:
self.CC = theano.shared(numpy.cast[theano.config.floatX](1.0), name='CC_%s' % self.name)
self.params += [self.CC]
if self.rank_n_approx:
W_em1 = self.init_fn(self.n_in,
self.rank_n_approx,
self.sparsity,
self.scale,
self.rng)
W_em2 = self.init_fn(self.rank_n_approx,
self.n_out,
self.sparsity,
self.scale,
self.rng)
self.W_em1 = theano.shared(W_em1,
name='W1_%s' % self.name)
self.W_em2 = theano.shared(W_em2,
name='W2_%s' % self.name)
self.b_em = theano.shared(
self.bias_fn(self.n_out, self.bias_scale, self.rng),
name='b_%s' % self.name)
self.params += [self.W_em1, self.W_em2, self.b_em]
if self.weight_noise:
self.nW_em1 = theano.shared(W_em1*0.,
name='noise_W1_%s' % self.name)
self.nW_em2 = theano.shared(W_em*0.,
name='noise_W2_%s' % self.name)
self.nb_em = theano.shared(b_em*0.,
name='noise_b_%s' % self.name)
self.noise_params = [self.nW_em1, self.nW_em2, self.nb_em]
self.noise_params_shape_fn = [
constant_shape(x.get_value().shape)
for x in self.noise_params]
else:
W_em = self.init_fn(self.n_in,
self.n_out,
self.sparsity,
self.scale,
self.rng)
self.W_em = theano.shared(W_em,
name='W_%s' % self.name)
self.b_em = theano.shared(
self.bias_fn(self.n_out, self.bias_scale, self.rng),
name='b_%s' % self.name)
self.params += [self.W_em, self.b_em]
if self.weight_noise:
self.nW_em = theano.shared(W_em*0.,
name='noise_W_%s' % self.name)
self.nb_em = theano.shared(
numpy.zeros((self.n_out,), dtype=theano.config.floatX),
name='noise_b_%s' % self.name)
self.noise_params = [self.nW_em, self.nb_em]
self.noise_params_shape_fn = [
constant_shape(x.get_value().shape)
for x in self.noise_params]
self.additional_weights = []
self.noise_additional_weights = []
if self.additional_inputs:
for pos, size in enumerate(self.additional_inputs):
W_add = self.init_fn(size,
self.n_out,
self.sparsity,
self.scale,
self.rng)
self.additional_weights += [theano.shared(W_add,
name='W_add%d_%s'%(pos, self.name))]
if self.weight_noise:
self.noise_additional_weights += [
theano.shared(W_add*0.,
name='noise_W_add%d_%s'%(pos, self.name))]
self.params = self.params + self.additional_weights
self.noise_params += self.noise_additional_weights
self.noise_params_shape_fn += [
constant_shape(x.get_value().shape)
for x in self.noise_additional_weights]
self.params_grad_scale = [self.grad_scale for x in self.params]
def compute_sample(self, state_below, temp=1, use_noise=False):
"""
Constructs the theano expression that samples from the output layer.
:type state_below: tensor or layer
:param state_below: The theano expression (or groundhog layer)
representing the input of the cost layer
:type temp: float or tensor scalar
:param temp: scalar representing the temperature that should be used
when sampling from the output distribution
:type use_noise: bool
:param use_noise: flag. If true, noise is used when computing the
output of the model
"""
raise NotImplemented
def get_cost(self,
state_below,
target=None,
mask=None,
temp=1,
reg=None,
scale=None,
sum_over_time=None,
use_noise=True,
additional_inputs=None,
no_noise_bias=False):
"""
Computes the expression of the cost of the model (given the type of
layer used).
:type state_below: tensor or layer
:param state_below: The theano expression (or groundhog layer)
representing the input of the cost layer
:type target: tensor or layer
:param target: The theano expression (or groundhog layer)
representing the target (used to evaluate the prediction of the
output layer)
:type mask: None or mask or layer
:param mask: Mask, depicting which of the predictions should be
ignored (e.g. due to them resulting from padding a sequence
with 0s)
:type temp: float or tensor scalar
:param temp: scalar representing the temperature that should be used
when sampling from the output distribution
:type reg: None or layer or theano scalar expression
:param reg: additional regularization term that should be added to
the cost
:type scale: float or None or theano scalar
:param scale: scaling factor with which the cost is multiplied
:type sum_over_time: bool or None
:param sum_over_time: this flag overwrites the value given to this
property in the constructor of the class
:type use_noise: bool
:param use_noise: flag. If true, noise is used when computing the
output of the model
:type additional_inputs: list theano variable or layers
:param additional_inputs: list of theano variables or layers
representing the additional inputs
:type no_noise_bias: bool
:param no_noise_bias: flag, stating if weight noise should be added
to the bias as well, or only to the weights
"""
raise NotImplemented
def get_grads(self,
state_below,
target=None,
mask=None,
temp=1,
reg=None,
scale=None,
additional_gradients=None,
sum_over_time=None,
use_noise=True,
additional_inputs=None,
# added by Zhaopeng Tu, 2015-11-07
coverages=None,
c_mask=None,
no_noise_bias=False):
"""
Computes the expression of the gradients of the cost with respect to
all parameters of the model.
:type state_below: tensor or layer
:param state_below: The theano expression (or groundhog layer)
representing the input of the cost layer
:type target: tensor or layer
:param target: The theano expression (or groundhog layer)
representing the target (used to evaluate the prediction of the
output layer)
:type mask: None or mask or layer
:param mask: Mask, depicting which of the predictions should be
ignored (e.g. due to them resulting from padding a sequence
with 0s)
:type temp: float or tensor scalar
:param temp: scalar representing the temperature that should be used
when sampling from the output distribution
:type reg: None or layer or theano scalar expression
:param reg: additional regularization term that should be added to
the cost
:type scale: float or None or theano scalar
:param scale: scaling factor with which the cost is multiplied
:type additional_gradients: list of tuples of the form
(param, gradient)
:param additional_gradiens: A list of tuples. Each tuple has as its
first element the parameter, and as second element a gradient
expression that should be added to the gradient resulting from the
cost. Not all parameters need to have an additional gradient.
:type sum_over_time: bool or None
:param sum_over_time: this flag overwrites the value given to this
property in the constructor of the class
:type use_noise: bool
:param use_noise: flag. If true, noise is used when computing the
output of the model
:type no_noise_bias: bool
:param no_noise_bias: flag, stating if weight noise should be added
to the bias as well, or only to the weights
"""
cost = self.get_cost(state_below,
target,
mask=mask,
reg=reg,
scale=scale,
sum_over_time=sum_over_time,
use_noise=use_noise,
additional_inputs=additional_inputs,
# added by Zhaopeng Tu, 2015-11-29
coverages=coverages,
c_mask=c_mask,
no_noise_bias=no_noise_bias)
logger.debug("Get grads")
grads = TT.grad(cost.mean(), self.params)
logger.debug("Got grads")
if additional_gradients:
for p, gp in additional_gradients:
if p in self.params:
grads[self.params.index(p)] += gp
if self.additional_gradients:
for new_grads, to_replace, properties in self.additional_gradients:
gparams, params = new_grads
prop_expr = [x[1] for x in properties]
replace = [(x[0], TT.grad(cost, x[1])) for x in to_replace]
rval = theano.clone(gparams + prop_expr,
replace=replace)
gparams = rval[:len(gparams)]
prop_expr = rval[len(gparams):]
self.properties += [(x[0], y)
for x, y in zip(properties, prop_expr)]
for gp, p in zip(gparams, params):
grads[self.params.index(p)] += gp
self.cost = cost
self.grads = grads
return cost, grads
def _get_samples(self, model, length=30, temp=1, *inps):
"""
Sample a sequence from the model `model` whose output layer is given
by `self`.
:type model: groundhog model class
:param model: model that has `self` as its output layer
:type length: int
:param length: length of the sequence to sample
:type temp: float
:param temp: temperature to use during sampling
"""
raise NotImplemented
class LinearLayer(CostLayer):
"""
Linear output layer.
"""
def _init_params(self):
"""
Initialize the parameters of the layer, either by using sparse initialization or small
isotropic noise.
"""
if self.rank_n_approx:
W_em1 = self.init_fn(self.nin,
self.rank_n_approx,
self.sparsity,
self.scale,
self.rng)
W_em2 = self.init_fn(self.rank_n_approx,
self.nout,
self.sparsity,
self.scale,
self.rng)
self.W_em1 = theano.shared(W_em1,
name='W1_%s'%self.name)
self.W_em2 = theano.shared(W_em2,
name='W2_%s'%self.name)
self.b_em = theano.shared(
numpy.zeros((self.nout,), dtype=theano.config.floatX),
name='b_%s'%self.name)
self.params += [self.W_em1, self.W_em2, self.b_em]
self.myparams = []#[self.W_em1, self.W_em2, self.b_em]
if self.weight_noise:
self.nW_em1 = theano.shared(W_em1*0.,
name='noise_W1_%s'%self.name)
self.nW_em2 = theano.shared(W_em*0.,
name='noise_W2_%s'%self.name)
self.nb_em = theano.shared(b_em*0.,
name='noise_b_%s'%self.name)
self.noise_params = [self.nW_em1, self.nW_em2, self.nb_em]
self.noise_params_shape_fn = [
constant_shape(x.get_value().shape)
for x in self.noise_params]
else:
W_em = self.init_fn(self.nin,
self.nout,
self.sparsity,
self.scale,
self.rng)
self.W_em = theano.shared(W_em,
name='W_%s'%self.name)
self.b_em = theano.shared(
numpy.zeros((self.nout,), dtype=theano.config.floatX),
name='b_%s'%self.name)
self.add_wghs = []
self.n_add_wghs = []
if self.additional_inputs:
for pos, sz in enumerate(self.additional_inputs):
W_add = self.init_fn(sz,
self.nout,
self.sparsity,
self.scale,
self.rng)
self.add_wghs += [theano.shared(W_add,
name='W_add%d_%s'%(pos, self.name))]
if self.weight_noise:
self.n_add_wghs += [theano.shared(W_add*0.,
name='noise_W_add%d_%s'%(pos,
self.name))]
self.params += [self.W_em, self.b_em] + self.add_wghs
self.myparams = []#[self.W_em, self.b_em] + self.add_wghs
if self.weight_noise:
self.nW_em = theano.shared(W_em*0.,
name='noise_W_%s'%self.name)
self.nb_em = theano.shared(numpy.zeros((self.nout,),
dtype=theano.config.floatX),
name='noise_b_%s'%self.name)
self.noise_params = [self.nW_em, self.nb_em] + self.n_add_wghs
self.noise_params_shape_fn = [
constant_shape(x.get_value().shape)
for x in self.noise_params]
def _check_dtype(self, matrix, inp):
if 'int' in inp.dtype and inp.ndim==2:
return matrix[inp.flatten()]
elif 'int' in inp.dtype:
return matrix[inp]
elif 'float' in inp.dtype and inp.ndim == 3:
shape0 = inp.shape[0]
shape1 = inp.shape[1]
shape2 = inp.shape[2]
return TT.dot(inp.reshape((shape0*shape1, shape2)), matrix)
else:
return TT.dot(inp, matrix)
def fprop(self, state_below, temp = numpy.float32(1), use_noise=True,
additional_inputs = None):
"""
Constructs the computational graph of this layer.
"""
if self.rank_n_approx:
if use_noise and self.noise_params:
emb_val = self._check_dtype(self.W_em1+self.nW_em1,
state_below)
emb_val = TT.dot(self.W_em2 + self.nW_em2, emb_val)
else:
emb_val = self._check_dtype(self.W_em1, state_below)
emb_val = TT.dot(self.W_em2, emb_val)
else:
if use_noise and self.noise_params:
emb_val = self._check_dtype(self.W_em + self.nW_em, state_below)
else:
emb_val = self._check_dtype(self.W_em, state_below)
if additional_inputs:
for st, wgs in zip(additional_inputs, self.add_wghs):
emb_val += self._check_dtype(wgs, st)
if use_noise and self.noise_params:
emb_val = (emb_val + self.b_em+ self.nb_em)
else:
emb_val = (emb_val + self.b_em)
self.out = emb_val
self.state_below = state_below
self.model_output = emb_val
return emb_val
def get_cost(self, state_below, target=None, mask = None, temp=1,
reg = None, scale=None, sum_over_time=True, use_noise=True,
additional_inputs=None):
"""
This function computes the cost of this layer.
:param state_below: theano variable representing the input to the
softmax layer
:param target: theano variable representing the target for this
layer
:return: mean cross entropy
"""
class_probs = self.fprop(state_below, temp = temp,
use_noise=use_noise,
additional_inputs=additional_inputs)
pvals = class_probs
assert target, 'Computing the cost requires a target'
if target.ndim == 3:
target = target.reshape((target.shape[0]*target.shape[1],
target.shape[2]))
assert 'float' in target.dtype
cost = (class_probs - target)**2
if mask:
mask = mask.flatten()
cost = cost * TT.cast(mask, theano.config.floatX)
if sum_over_time is None:
sum_over_time = self.sum_over_time
if sum_over_time:
if state_below.ndim ==3:
sh0 = TT.cast(state_below.shape[0],
theano.config.floatX)
sh1 = TT.cast(state_below.shape[1],
theano.config.floatX)
self.cost = cost.sum()/sh1
else:
self.cost =cost.sum()
else:
self.cost = cost.mean()
if scale:
self.cost = self.cost*scale
if reg:
self.cost = self.cost + reg
self.out = self.cost
self.mask = mask
self.cost_scale = scale
return self.cost
def get_grads(self, state_below, target, mask = None, reg = None,
scale=None, sum_over_time=True, use_noise=True,
additional_inputs=None):
"""
This function implements both the forward and backwards pass of this
layer. The reason we do this in a single function is because for the
factorized softmax layer is hard to rely on grad and get an
optimized graph. For uniformity I've implemented this method for
this layer as well (though one doesn't need to use it)
:param state_below: theano variable representing the input to the
softmax layer
:param target: theano variable representing the target for this
layer
:return: cost, dC_dstate_below, param_grads, new_properties
dC_dstate_below is a computational graph representing the
gradient of the cost wrt to state_below
param_grads is a list containing the gradients wrt to the
different parameters of the layer
new_properties is a dictionary containing additional properties
of the model; properties are theano expression that are
evaluated and reported by the model
"""
cost = self.get_cost(state_below,
target,
mask = mask,
reg = reg,
scale=scale,
sum_over_time=sum_over_time,
use_noise=use_noise,
additional_inputs=additional_inputs)
grads = TT.grad(cost, self.params)
if self.additional_gradients:
for new_grads, to_replace, properties in self.additional_gradients:
gparams, params = new_grads
prop_expr = [x[1] for x in properties]
replace = [(x[0], TT.grad(cost, x[1])) for x in to_replace]
rval = theano.clone(gparams + prop_expr,
replace=replace)
gparams = rval[:len(gparams)]
prop_expr = rval[len(gparams):]
self.properties += [(x[0], y) for x,y in zip(properties,
prop_expr)]
for gp, p in zip(gparams, params):
grads[self.params.index(p)] += gp
self.cost = cost
self.grads = grads
def Gvs_fn(*args):
w = (1 - self.model_output) * self.model_output * state_below.shape[1]
Gvs = TT.Lop(self.model_output, self.params,
TT.Rop(self.model_output, self.params, args)/w)
return Gvs
self.Gvs = Gvs_fn
return cost, grads
class SigmoidLayer(CostLayer):
"""
Sigmoid output layer.
"""
def _get_samples(self, model, length=30, temp=1, *inps):
"""
See parent class.
"""
if not hasattr(model, 'word_indxs_src'):
model.word_indxs_src = model.word_indxs
character_level = False
if hasattr(model, 'character_level'):
character_level = model.character_level
if model.del_noise:
model.del_noise()
if model.maintain_coverage:
[values, probs, coverages] = model.sample_fn(length, temp, *inps)
else:
[values, probs] = model.sample_fn(length, temp, *inps)
# Assumes values matrix
#print 'Generated sample is:'
#print
if values.ndim > 1:
for d in range(2):
print('%d-th sentence' % d)
print('Input: ', end=' ')
if character_level:
sen = []
for k in range(inps[0].shape[0]):
if model.word_indxs_src[inps[0][k][d]] == '<eol>':
break
sen.append(model.word_indxs_src[inps[0][k][d]])
print("".join(sen), end=' ')
else:
for k in range(inps[0].shape[0]):
print(model.word_indxs_src[inps[0][k][d]], end=' ')
if model.word_indxs_src[inps[0][k][d]] == '<eol>':
break
print('')
print('Output: ', end=' ')
if character_level:
sen = []
for k in range(values.shape[0]):
if model.word_indxs[values[k][d]] == '<eol>':
break
sen.append(model.word_indxs[values[k][d]])
print("".join(sen), end=' ')
else:
for k in range(values.shape[0]):
print(model.word_indxs[values[k][d]], end=' ')
if model.word_indxs[values[k][d]] == '<eol>':
break
print()
print()
else:
print('Output: ', end=' ')
coverage_step = 0
if character_level:
sen = []
for k in range(values.shape[0]):
if model.word_indxs[values[k]] == '<eol>':
coverage_step = k
break
sen.append(model.word_indxs[values[k]])
print("".join(sen), end=' ')
else:
for k in range(values.shape[0]):
print(model.word_indxs[values[k]], end=' ')
if model.word_indxs[values[k]] == '<eol>':
coverage_step = k
break
print()
if model.maintain_coverage and model.coverage_dim == 1:
coverage = coverages[coverage_step]
print('Coverage: ', end=' ')
if character_level:
sen = []
for k in range(inps[0].shape[0]):
if model.word_indxs_src[inps[0][k]] == '<eol>':
break
sen.append('%s/%.2f'%(model.word_indxs_src[inps[0][k]], coverage[k]))
print("".join(sen), end=' ')
else:
for k in range(inps[0].shape[0]):
print('%s/%.2f'%(model.word_indxs_src[inps[0][k]], coverage[k]), end=' ')
if model.word_indxs_src[inps[0][k]] == '<eol>':
break
print('')
print()
def fprop(self,
state_below,
temp=numpy.float32(1),
use_noise=True,
additional_inputs=None,
no_noise_bias=False):
"""
Forward pass through the cost layer.
:type state_below: tensor or layer
:param state_below: The theano expression (or groundhog layer)
representing the input of the cost layer
:type temp: float or tensor scalar
:param temp: scalar representing the temperature that should be used
when sampling from the output distribution
:type use_noise: bool
:param use_noise: flag. If true, noise is used when computing the
output of the model
:type no_noise_bias: bool
:param no_noise_bias: flag, stating if weight noise should be added
to the bias as well, or only to the weights
"""
if self.rank_n_approx:
if use_noise and self.noise_params:
emb_val = self.rank_n_activ(utils.dot(state_below,
self.W_em1+self.nW_em1))
emb_val = TT.dot(self.W_em2 + self.nW_em2, emb_val)
else:
emb_val = self.rank_n_activ(utils.dot(state_below, self.W_em1))
emb_val = TT.dot(self.W_em2, emb_val)
else:
if use_noise and self.noise_params:
emb_val = utils.dot(state_below, self.W_em + self.nW_em)
else:
emb_val = utils.dot(state_below, self.W_em)
if additional_inputs:
if use_noise and self.noise_params:
for inp, weight, noise_weight in zip(
additional_inputs, self.additional_weights,
self.noise_additional_weights):
emb_val += utils.dot(inp, (noise_weight + weight))
else:
for inp, weight in zip(additional_inputs, self.additional_weights):
emb_val += utils.dot(inp, weight)
self.preactiv = emb_val
if use_noise and self.noise_params and not no_noise_bias:
emb_val = TT.nnet.sigmoid(temp *
(emb_val + self.b_em + self.nb_em))
else:
emb_val = TT.nnet.sigmoid(temp * (emb_val + self.b_em))
self.out = emb_val
self.state_below = state_below
self.model_output = emb_val
return emb_val
def compute_sample(self,
state_below,
temp=1,
additional_inputs=None,
use_noise=False):
"""
See parent class.
"""
class_probs = self.fprop(state_below,
temp=temp,
additional_inputs=additional_inputs,
use_noise=use_noise)
pvals = class_probs
if pvals.ndim == 1:
pvals = pvals.dimshuffle('x', 0)
sample = self.trng.binomial(pvals.shape, p=pvals,
dtype='int64')
if class_probs.ndim == 1:
sample = sample[0]
self.sample = sample
return sample
def get_cost(self,
state_below,
target=None,
mask=None,
temp=1,
reg=None,
scale=None,
sum_over_time=None,
use_noise=True,
additional_inputs=None,
no_noise_bias=False):
"""
See parent class
"""
class_probs = self.fprop(state_below,
temp=temp,
use_noise=use_noise,
additional_inputs=additional_inputs,
no_noise_bias=no_noise_bias)
pvals = class_probs
assert target, 'Computing the cost requires a target'
if target.ndim == 3:
target = target.reshape((target.shape[0]*target.shape[1],
target.shape[2]))
assert 'float' in target.dtype
# Do we need the safety net of 1e-12 ?
cost = -TT.log(TT.maximum(1e-12, class_probs)) * target -\
TT.log(TT.maximum(1e-12, 1 - class_probs)) * (1 - target)
if cost.ndim > 1:
cost = cost.sum(1)
if mask:
mask = mask.flatten()
cost = cost * TT.cast(mask, theano.config.floatX)
if sum_over_time is None:
sum_over_time = self.sum_over_time
if sum_over_time:
if state_below.ndim == 3:
sh0 = TT.cast(state_below.shape[0],
theano.config.floatX)
sh1 = TT.cast(state_below.shape[1],
theano.config.floatX)
self.cost = cost.sum()/sh1
else:
self.cost = cost.sum()
else:
self.cost = cost.mean()
if scale:
self.cost = self.cost*scale
if reg:
self.cost = self.cost + reg
self.out = self.cost
self.mask = mask
self.cost_scale = scale
return self.cost
class SoftmaxLayer(CostLayer):
"""
Softmax output layer.
"""
def _get_samples(self, model, length=30, temp=1, *inps):
"""
See parent class
"""
if not hasattr(model, 'word_indxs_src'):
model.word_indxs_src = model.word_indxs
character_level = False
if hasattr(model, 'character_level'):
character_level = model.character_level
if model.del_noise:
model.del_noise()
if model.maintain_coverage:
[values, probs, coverages] = model.sample_fn(length, temp, *inps)
else:
[values, probs] = model.sample_fn(length, temp, *inps)
#print 'Generated sample is:'
#print
if values.ndim > 1:
for d in range(2):
print('%d-th sentence' % d)
print('Input: ', end=' ')
if character_level:
sen = []
for k in range(inps[0].shape[0]):
if model.word_indxs_src[inps[0][k][d]] == '<eol>':
break
sen.append(model.word_indxs_src[inps[0][k][d]])
print("".join(sen), end=' ')
else:
for k in range(inps[0].shape[0]):
print(model.word_indxs_src[inps[0][k][d]], end=' ')
if model.word_indxs_src[inps[0][k][d]] == '<eol>':
break
print('')
print('Output: ', end=' ')
if character_level:
sen = []
for k in range(values.shape[0]):
if model.word_indxs[values[k][d]] == '<eol>':
break
sen.append(model.word_indxs[values[k][d]])
print("".join(sen), end=' ')
else:
for k in range(values.shape[0]):
print(model.word_indxs[values[k][d]], end=' ')
if model.word_indxs[values[k][d]] == '<eol>':
break
print()
print()
else:
print('Output: ', end=' ')
coverage_step = 0
if character_level:
sen = []
for k in range(values.shape[0]):
if model.word_indxs[values[k]] == '<eol>':
coverage_step = k
break
sen.append(model.word_indxs[values[k]])
print("".join(sen), end=' ')
else:
for k in range(values.shape[0]):
print(model.word_indxs[values[k]], end=' ')
if model.word_indxs[values[k]] == '<eol>':
coverage_step = k
break
print()
if model.maintain_coverage and model.coverage_dim == 1:
coverage = coverages[coverage_step]
print('Coverage: ', end=' ')
if character_level:
sen = []
for k in range(inps[0].shape[0]):
if model.word_indxs_src[inps[0][k]] == '<eol>':
break
sen.append('%s/%.2f'%(model.word_indxs_src[inps[0][k]], coverage[k]))
print("".join(sen), end=' ')
else:
for k in range(inps[0].shape[0]):
print('%s/%.2f'%(model.word_indxs_src[inps[0][k]], coverage[k]), end=' ')
if model.word_indxs_src[inps[0][k]] == '<eol>':
break
print('')
print()
def fprop(self,
state_below,
temp=numpy.float32(1),
use_noise=True,
additional_inputs=None,
no_noise_bias=False,
target=None,
full_softmax=True):
"""
Forward pass through the cost layer.
:type state_below: tensor or layer
:param state_below: The theano expression (or groundhog layer)
representing the input of the cost layer
:type temp: float or tensor scalar
:param temp: scalar representing the temperature that should be used
when sampling from the output distribution
:type use_noise: bool
:param use_noise: flag. If true, noise is used when computing the
output of the model
:type no_noise_bias: bool
:param no_noise_bias: flag, stating if weight noise should be added
to the bias as well, or only to the weights
"""
if not full_softmax:
assert target != None, 'target must be given'
if self.rank_n_approx:
if self.weight_noise and use_noise and self.noise_params:
emb_val = self.rank_n_activ(utils.dot(state_below,
self.W_em1+self.nW_em1))
nW_em = self.nW_em2
else:
emb_val = self.rank_n_activ(utils.dot(state_below, self.W_em1))
W_em = self.W_em2
else:
W_em = self.W_em
if self.weight_noise:
nW_em = self.nW_em
emb_val = state_below
if full_softmax:
if self.weight_noise and use_noise and self.noise_params:
emb_val = TT.dot(emb_val, W_em + nW_em)
else:
emb_val = TT.dot(emb_val, W_em)
if additional_inputs:
if use_noise and self.noise_params:
for inp, weight, noise_weight in zip(
additional_inputs, self.additional_weights,
self.noise_additional_weights):
emb_val += utils.dot(inp, (noise_weight + weight))
else:
for inp, weight in zip(additional_inputs, self.additional_weights):
emb_val += utils.dot(inp, weight)
if self.weight_noise and use_noise and self.noise_params and \
not no_noise_bias:
emb_val = temp * (emb_val + self.b_em + self.nb_em)
else:
emb_val = temp * (emb_val + self.b_em)
else:
W_em = W_em[:, target]
if self.weight_noise:
nW_em = nW_em[:, target]
W_em += nW_em
if emb_val.ndim == 3:
emb_val = emb_val.reshape([emb_val.shape[0]*emb_val.shape[1], emb_val.shape[2]])
emb_val = (W_em.T * emb_val).sum(1) + self.b_em[target]
if self.weight_noise and use_noise:
emb_val += self.nb_em[target]
emb_val = temp * emb_val
self.preactiv = emb_val
if full_softmax:
emb_val = utils.softmax(emb_val)
else:
emb_val = TT.nnet.sigmoid(emb_val)
self.out = emb_val
self.state_below = state_below
self.model_output = emb_val
return emb_val
def compute_sample(self,
state_below,
temp=1,
use_noise=False,
additional_inputs=None):
class_probs = self.fprop(state_below,
temp=temp,
additional_inputs=additional_inputs,
use_noise=use_noise)
pvals = class_probs
if pvals.ndim == 1:
pvals = pvals.dimshuffle('x', 0)
sample = self.trng.multinomial(pvals=pvals,
dtype='int64').argmax(axis=-1)
if class_probs.ndim == 1:
sample = sample[0]
self.sample = sample
return sample
def get_cost(self,
state_below,
target=None,
mask=None,
temp=1,
reg=None,
scale=None,
sum_over_time=False,
no_noise_bias=False,
additional_inputs=None,
# added by Zhaopeng Tu, 2015-11-07
coverages=None,
c_mask=None,
use_noise=True):
"""
See parent class
"""
def _grab_probs(class_probs, target):
shape0 = class_probs.shape[0]
shape1 = class_probs.shape[1]
target_ndim = target.ndim
target_shape = target.shape
if target.ndim > 1:
target = target.flatten()
assert target.ndim == 1, 'make sure target is a vector of ints'
assert 'int' in target.dtype
pos = TT.arange(shape0)*shape1
new_targ = target + pos
return class_probs.flatten()[new_targ]
assert target, 'Computing the cost requires a target'
target_shape = target.shape
target_ndim = target.ndim
if self.use_nce:
logger.debug("Using NCE")
# positive samples: true targets
class_probs = self.fprop(state_below,
temp=temp,
use_noise=use_noise,
additional_inputs=additional_inputs,
no_noise_bias=no_noise_bias,
target=target.flatten(),
full_softmax=False)
# negative samples: a single uniform random sample per training sample
nsamples = TT.cast(self.trng.uniform(class_probs.shape[0].reshape([1])) * self.n_out, 'int64')
neg_probs = self.fprop(state_below,
temp=temp,
use_noise=use_noise,
additional_inputs=additional_inputs,
no_noise_bias=no_noise_bias,
target=nsamples.flatten(),
full_softmax=False)
cost_target = class_probs
cost_nsamples = 1. - neg_probs
cost = -TT.log(cost_target)
cost = cost - TT.cast(neg_probs.shape[0], 'float32') * TT.log(cost_nsamples)
else:
class_probs = self.fprop(state_below,
temp=temp,
use_noise=use_noise,
additional_inputs=additional_inputs,
no_noise_bias=no_noise_bias)
cost = -TT.log(_grab_probs(class_probs, target))
self.word_probs = TT.exp(-cost.reshape(target_shape))
# Set all the probs after the end-of-line to one
if mask:
self.word_probs = self.word_probs * mask + 1 - mask
if mask:
cost = cost * TT.cast(mask.flatten(), theano.config.floatX)
self.cost_per_sample = (cost.reshape(target_shape).sum(axis=0)
if target_ndim > 1
else cost)
# added by Zhaopeng Tu, 2015-11-27
# use coverage variance for cost
# we expect the coverage be in a uniform distribution, to make all the input words are touched equally
# coverages is only available in EVALUATION mode
if self.use_coverage_cost and coverages:
# here we only consider the variance for all non-zero elems (with mask)
# coverages consists of coverage at each time step
# coverages: (target_length, source_length, batch_size)
# we use the variance of final coverage as the bias
coverage = coverages[-1][:,:,0]
length = c_mask.sum(axis=0)
coverage_mean = coverage.sum(axis=0)/length
# samples in a batch have different lengths
# added by Zhaopeng Tu, 2015-12-02
# for additive or subtractive coverage, the coverage_var would be too large (especially for long sentence)
# which means the cost would be negative thus the traning would be stopped
# additive: .. iter 27502 cost -988.954 grad_norm 7.71e+03 log2_p_word -1.78e+00 log2_p_expl 4.89e+01 step time 0.713 sec whole time 12.753 h lr 1.00e+00
# therefore, we use standard variance
# coverage_var = TT.sqr((coverage-coverage_mean.dimshuffle('x', 0))*c_mask).sum(axis=0)/length
coverage_var = TT.sqrt(TT.sqr((coverage-coverage_mean.dimshuffle('x', 0))*c_mask).sum(axis=0)/length)
# added by Zhaopeng Tu, 2015-12-02
# we should not add the coverage_var to each word, which will remove the effect of normalizer length
# thus the longer the sentence, the heavier effect the coverage_var, which is not suitable
# therefore, we add the coverage_var to the final cost (as an overall bias)
'''
# expand the coverage_var to match the shape of targets
coverage_var = (coverage_var.dimshuffle('x', 0) * mask).flatten()
# self.cost = self.cost + coverage_var * self.CC.dimshuffle('x', 0)
cost = cost + coverage_var * self.CC
'''
if sum_over_time is None:
sum_over_time = self.sum_over_time
if sum_over_time:
if state_below.ndim == 3:
cost = cost.reshape((state_below.shape[0],
state_below.shape[1]))
self.cost = cost.mean(1).sum()
else:
self.cost = cost.sum()
# added by Zhaopeng Tu, 2015-12-02
if self.use_coverage_cost and coverages:
coverage_var = coverage_var.sum()
else:
self.cost = cost.mean()
# added by Zhaopeng Tu, 2015-12-02
if self.use_coverage_cost and coverages:
coverage_var = coverage_var.mean()
# added by Zhaopeng Tu, 2015-12-02
if self.use_coverage_cost and coverages:
self.cost = self.cost + self.CC * coverage_var
if scale:
self.cost = self.cost*scale
if reg:
self.cost = self.cost + reg
self.mask = mask
self.cost_scale = scale
return self.cost
class HierarchicalSoftmaxLayer(SoftmaxLayer):
"""
Hierarchical Softmax output layer (2 layer)
This is a preliminary implementation of 2-level hierarchical softmax layer (GPU only)
"""
def __init__(self, rng,
n_in,
n_out,
scale,
sparsity,
weight_noise=False,
init_fn='sample_weights_classic',
bias_fn='init_bias',
bias_scale=0.,
sum_over_time=True,
grad_scale=1.,
name=None,
**kwargs):
assert theano.config.device[:3] == 'gpu', 'Hierarchical softmax is not supported without GPU'
from theano.sandbox.cuda.blocksparse import sparse_block_dot_SS
self.sparse_block_dot_SS = sparse_block_dot_SS
self.grad_scale = grad_scale
super(CostLayer, self).__init__(n_in, n_out, rng, name)
self.n_words_class = numpy.ceil(numpy.sqrt(self.n_out)).astype('int64') # oSize
self.n_class = numpy.ceil(self.n_out/float(self.n_words_class)).astype('int64') # oBlocks
logger.debug("n_words_class = %d, n_class = %d"%(self.n_words_class, self.n_class))
self.trng = RandomStreams(self.rng.randint(int(1e6)))
if isinstance(bias_fn, str):
self.bias_fn = eval(bias_fn)
else:
self.bias_fn = bias_fn
self.bias_scale = bias_scale
self.scale = scale
self.sum_over_time = sum_over_time
self.weight_noise = weight_noise
self.sparsity = sparsity
if self.sparsity < 0:
self.sparsity = n_out
if type(init_fn) is str:
init_fn = eval(init_fn)
self.init_fn = init_fn
self._init_params()
def _init_params(self):
self.iBlocks = 1 # number of blocks in the input (from lower layer)
W_em = self.init_fn(self.n_in,
self.n_class,
self.sparsity,
self.scale,
self.rng)
self.W_em = theano.shared(W_em,
name='W_%s' % self.name)
self.b_em = theano.shared(
self.bias_fn(self.n_class, self.bias_scale, self.rng),
name='b_%s' % self.name)
U_em = theano.shared(((self.rng.rand(self.iBlocks, self.n_class,
self.n_in, self.n_words_class)-0.5)/(self.n_words_class*self.n_in)
).astype(theano.config.floatX), name='U_%s'%self.name)
self.U_em = U_em
c_em = numpy.zeros((self.n_class, self.n_words_class), dtype='float32')
n_words_last_class = self.n_out % self.n_words_class
#c_em[-1, n_words_last_class:] = -numpy.inf
self.c_em = theano.shared(c_em, name='c_%s' % self.name)
self.params = [self.W_em, self.b_em, self.U_em, self.c_em]
self.params_grad_scale = [self.grad_scale for x in self.params]
def fprop(self,
state_below,
temp=numpy.float32(1),
use_noise=True,
additional_inputs=None,
no_noise_bias=False,
target=None,
full_softmax=True,
**kwargs):
if not full_softmax:
assert target != None, 'target must be given'
W_em = self.W_em
U_em = self.U_em
b_em = self.b_em
c_em = self.c_em
emb_val = state_below
bs = emb_val.shape[0]
if full_softmax:
# compute the probability of every word using scan
# for all classes
class_vecs = TT.arange(self.n_class)
class_val = utils.softmax(TT.dot(emb_val, W_em) + b_em)
def _compute_inclass(classid):
# compute the word probabilities
outputIdx = TT.alloc(classid, bs)[:, None]
word_val = utils.softmax(TT.dot(emb_val, U_em[0, classid, :, :])+c_em[classid,:])
word_val = word_val * class_val[:, classid][:,None]
return word_val.T
rval = theano.scan(_compute_inclass, class_vecs, None, name='compute_inclass')
all_word_val = rval[0].reshape([rval[0].shape[0]*rval[0].shape[1], rval[0].shape[2]]).T
all_word_val = all_word_val[:,:self.n_out]
emb_val = all_word_val
else:
# compute only the probability of given targets
if emb_val.ndim == 3:
emb_val = emb_val.reshape([emb_val.shape[0]*emb_val.shape[1], emb_val.shape[2]])
# extract class id's from target indices
target = target.flatten()
class_vec = target // self.n_words_class # need to be int/int
class_idx_vec = target % self.n_words_class
outputIdx = class_vec[:, None]
# compute the class probabilities
class_val = utils.softmax(TT.dot(emb_val, W_em) + b_em)
# compute the word probabilities
word_val = utils.softmax(self.sparse_block_dot_SS(U_em,
emb_val[:, None, :], TT.zeros((bs, 1), dtype='int64'), c_em, outputIdx)[:, 0, :])
class_val = class_val[TT.arange(bs), class_vec]
word_val = word_val[TT.arange(bs), class_idx_vec]
emb_val = class_val * word_val
#self.preactiv = emb_val
self.out = emb_val
self.state_below = state_below
self.model_output = emb_val
return emb_val
def get_cost(self,
state_below,
target=None,
mask=None,
temp=1,
reg=None,
scale=None,
sum_over_time=False,
no_noise_bias=False,
additional_inputs=None,
use_noise=True):
"""
See parent class
"""
assert target, 'Computing the cost requires a target'
target_shape = target.shape
target_ndim = target.ndim
target_shape = target.shape
if state_below.ndim == 3:
shp = state_below.shape
state_below = state_below.reshape([shp[0]*shp[1], shp[2]])
class_probs = self.fprop(state_below,
temp=temp,
target=target,
full_softmax=False,
use_noise=use_noise,
additional_inputs=additional_inputs,
no_noise_bias=no_noise_bias)
cost = -TT.log(class_probs)
self.word_probs = TT.exp(-cost.reshape(target_shape))
# Set all the probs after the end-of-line to one
if mask:
self.word_probs = self.word_probs * mask + 1 - mask
if mask:
cost = cost * TT.cast(mask.flatten(), theano.config.floatX)
self.cost_per_sample = (cost.reshape(target_shape).sum(axis=0)
if target_ndim > 1
else cost)
if sum_over_time is None:
sum_over_time = self.sum_over_time
if sum_over_time:
if state_below.ndim == 3:
cost = cost.reshape((state_below.shape[0],
state_below.shape[1]))
self.cost = cost.mean(1).sum()
else:
self.cost = cost.sum()
else:
self.cost = cost.mean()
if scale:
self.cost = self.cost*scale
if reg:
self.cost = self.cost + reg
self.mask = mask
self.cost_scale = scale
return self.cost
|
neozhangthe1/coverage_model
|
build/lib/groundhog/layers/cost_layers.py
|
Python
|
bsd-3-clause
| 59,101
|
[
"Gaussian"
] |
97f8ad9d8431fd85098bdfcfca04910d0432d701d38b38fa2189e2dd2402f4fd
|
#!/usr/bin/env python3
##ON APP MACHINE
import sys
from os import listdir, mkdir
from os.path import isdir, dirname, abspath
import os
import subprocess
import weakref
from scipy import fftpack
import numpy as np
## some global variables, this needs to be fixed at some point
default_raw_data_loc = None#"/exp_app2/appexp1/public/raw_data"
default_processed_data_loc = None#"/home/brian/processed_files"
MetaData_directory = dirname(abspath(__file__)) + '/data' ## change this if antenna_response_model is in a folder different from this module
#### constants
C = 299792458.0
RTD = 180.0/3.1415926 ##radians to degrees
n_air = 1.000293
v_air = C/n_air
latlonCS002 = np.array([52.91512249, 6.869837540]) ## lattitude and longitude of CS002 in degrees
#### log data to screen and to a file
class logger(object):
class std_writer(object):
def __init__(self, logger):
self.logger_ref = weakref.ref(logger)
def write(self, msg):
logger=self.logger_ref()
logger.out_file.write(msg)
if logger.to_screen:
logger.old_stdout.write(msg)
def flush(self):
logger=self.logger_ref()
logger.out_file.flush()
def __init__(self):
self.has_stderr = False
self.has_stdout = False
self.old_stderr = sys.stderr
self.old_stdout = sys.stdout
self.set("out_log")
def set(self, fname, to_screen=True):
self.out_file = open(fname, 'w')
self.set_to_screen( to_screen )
def __call__(self, *args):
for a in args:
if self.to_screen:
self.old_stdout.write(str(a))
self.old_stdout.write(" ")
self.out_file.write(str(a))
self.out_file.write(" ")
self.out_file.write("\n")
if self.to_screen:
self.old_stdout.write("\n")
self.out_file.flush()
self.old_stdout.flush()
def set_to_screen(self, to_screen=True):
self.to_screen = to_screen
def take_stdout(self):
if not self.has_stdout:
sys.stdout = self.std_writer(self)
self.has_stdout = True
def take_stderr(self):
if not self.has_stderr:
sys.stderr = self.std_writer(self)
self.has_stderr = True
def restore_stdout(self):
if self.has_stdout:
sys.stdout = self.old_stdout
self.has_stdout = False
def restore_stderr(self):
if self.has_stderr:
sys.stderr = self.old_stderr
self.has_stderr = False
def flush(self):
self.out_file.flush()
# def __del__(self):
# self.restore_stderr()
# self.restore_stdout()
#log = logger()
def iterate_pairs(list_one, list_two, list_one_avoid=[], list_two_avoid=[]):
"""returns an iterator that loops over all pairs of the two lists"""
for item_one in list_one:
if item_one in list_one_avoid:
continue
for item_two in list_two:
if item_two in list_two_avoid:
continue
yield (item_one, item_two)
import re
natural_regex_pattern = re.compile('([0-9]+)')
def natural_sort( l ):
""" Sort the given iterable in the way that humans expect. Usefull for sorting station names."""
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [ convert(c) for c in natural_regex_pattern.split(key) ]
return sorted(l, key = alphanum_key)
#### some file utils
def Fname_data(Fpath):
""" takes both pulse data file names and h5 file names and returns UTC_time, station_name, Fpath"""
Fname = Fpath.split('/')[-1]
data = Fname.split('_')
timeID = data[1]
station_name = data[2]
if len(data[3][1:])==0:
file_number = 0
else:
file_number = int(data[3][1:])
return timeID, station_name, Fpath, file_number
##note that timeID is a string representing the datetime of a LOFAR trigger. such as: D20130619T094846.507Z
## the timeID is used to uniquely identify triggers
def get_timeID(fname):
data=fname.split("_")
return data[1]
def year_from_timeID(timeID):
return timeID[1:5]
def raw_data_dir(timeID, data_loc=None):
"""gives path to the raw data folder for a particular timeID, given location of data structure. Defaults to default_raw_data_loc"""
if data_loc is None:
data_loc = default_raw_data_loc
if default_raw_data_loc is None:
print("ERROR: 'default_raw_data_loc' in utilities is not set.")
quit()
path = data_loc + '/' + year_from_timeID(timeID)+"/"+timeID
return path
def processed_data_dir(timeID, data_loc=None):
"""gives path to the analysis folders for a particular timeID, given location of data structure. Defaults to default_processed_data_loc
makes the directory if it doesn't exist"""
if data_loc is None:
data_loc = default_processed_data_loc
if default_processed_data_loc is None:
print("ERROR: 'default_processed_data_loc' in utilities is not set.")
quit()
path=data_loc + "/" + year_from_timeID(timeID)+"/"+timeID
if not isdir(path):
mkdir(path)
return path
## a python list where the keys are the number of a station and the values are the station name
SId_to_Sname = [None]*209 #just to pre-initilize list, so syntax below is possible
SId_to_Sname[1] = "CS001"
SId_to_Sname[2] = "CS002"
SId_to_Sname[3] = "CS003"
SId_to_Sname[4] = "CS004"
SId_to_Sname[5] = "CS005"
SId_to_Sname[6] = "CS006"
SId_to_Sname[7] = "CS007"
#SId_to_Sname[8] = "CS008"
#SId_to_Sname[9] = "CS009"
#SId_to_Sname[10] = "CS010"
SId_to_Sname[11] = "CS011"
#SId_to_Sname[12] = "CS012"
SId_to_Sname[13] = "CS013"
#SId_to_Sname[14] = "CS014"
#SId_to_Sname[15] = "CS015"
#SId_to_Sname[16] = "CS016"
SId_to_Sname[17] = "CS017"
#SId_to_Sname[18] = "CS018"
#SId_to_Sname[19] = "CS019"
#SId_to_Sname[20] = "CS020"
SId_to_Sname[21] = "CS021"
#SId_to_Sname[22] = "CS022"
#SId_to_Sname[23] = "CS023"
SId_to_Sname[24] = "CS024"
#SId_to_Sname[25] = "CS025"
SId_to_Sname[26] = "CS026"
#SId_to_Sname[27] = "CS027"
SId_to_Sname[28] = "CS028"
#SId_to_Sname[29] = "CS029"
SId_to_Sname[30] = "CS030"
SId_to_Sname[31] = "CS031"
SId_to_Sname[32] = "CS032"
SId_to_Sname[101] = "CS101"
#SId_to_Sname[102] = "CS102"
SId_to_Sname[103] = "CS103"
SId_to_Sname[121] = "CS201"
SId_to_Sname[141] = "CS301"
SId_to_Sname[142] = "CS302"
SId_to_Sname[161] = "CS401"
SId_to_Sname[181] = "CS501"
#SId_to_Sname[104] = "RS104"
#SId_to_Sname[105] = "RS105"
SId_to_Sname[106] = "RS106"
#SId_to_Sname[107] = "RS107"
#SId_to_Sname[108] = "RS108"
#SId_to_Sname[109] = "RS109"
#SId_to_Sname[122] = "RS202"
#SId_to_Sname[123] = "RS203"
#SId_to_Sname[124] = "RS204"
SId_to_Sname[125] = "RS205"
#SId_to_Sname[126] = "RS206"
#SId_to_Sname[127] = "RS207"
SId_to_Sname[128] = "RS208"
#SId_to_Sname[129] = "RS209"
SId_to_Sname[130] = "RS210"
#SId_to_Sname[143] = "RS303"
#SId_to_Sname[144] = "RS304"
SId_to_Sname[145] = "RS305"
SId_to_Sname[146] = "RS306"
SId_to_Sname[147] = "RS307"
#SId_to_Sname[148] = "RS308"
#SId_to_Sname[149] = "RS309"
SId_to_Sname[150] = "RS310"
SId_to_Sname[166] = "RS406"
SId_to_Sname[167] = "RS407"
SId_to_Sname[169] = "RS409"
SId_to_Sname[183] = "RS503"
SId_to_Sname[188] = "RS508"
SId_to_Sname[189] = "RS509"
SId_to_Sname[201] = "DE601"
SId_to_Sname[202] = "DE602"
SId_to_Sname[203] = "DE603"
SId_to_Sname[204] = "DE604"
SId_to_Sname[205] = "DE605"
SId_to_Sname[206] = "FR606"
SId_to_Sname[207] = "SE607"
SId_to_Sname[208] = "UK608"
## this just "inverts" the previous list, discarding unused values
Sname_to_SId_dict = {name:ID for ID,name in enumerate(SId_to_Sname) if name is not None}
def even_antName_to_odd(even_ant_name):
even_num = int(even_ant_name)
odd_num = even_num + 1
return str( odd_num ).zfill( 9 )
def antName_is_even(ant_name):
return not int(ant_name)%2
def odd_antName_to_even(odd_ant_name):
odd_num = int(odd_ant_name)
even_num = odd_num - 1
return str( even_num ).zfill( 9 )
def antName_to_even(ant_name):
if antName_is_even(ant_name):
return ant_name
else:
odd_antName_to_even( ant_name )
def antName_to_odd(ant_name):
if antName_is_even(ant_name):
even_antName_to_odd( ant_name )
else:
return ant_name
#### plotting utilities ####
def set_axes_equal(ax):
'''Make axes of 3D plot have equal scale so that spheres appear as spheres,
cubes as cubes, etc.. This is one possible solution to Matplotlib's
ax.set_aspect('equal') and ax.axis('equal') not working for 3D.
Input
ax: a matplotlib axis, e.g., as output from plt.gca().
'''
x_limits = ax.get_xlim3d()
y_limits = ax.get_ylim3d()
z_limits = ax.get_zlim3d()
x_range = abs(x_limits[1] - x_limits[0])
x_middle = np.mean(x_limits)
y_range = abs(y_limits[1] - y_limits[0])
y_middle = np.mean(y_limits)
z_range = abs(z_limits[1] - z_limits[0])
z_middle = np.mean(z_limits)
# The plot bounding box is a sphere in the sense of the infinity
# norm, hence I call half the max range the plot radius.
plot_radius = 0.5*max([x_range, y_range, z_range])
ax.set_xlim3d([x_middle - plot_radius, x_middle + plot_radius])
ax.set_ylim3d([y_middle - plot_radius, y_middle + plot_radius])
ax.set_zlim3d([z_middle - plot_radius, z_middle + plot_radius])
### some math functions? ###
def normalize_angle_radians( angle_radians ):
"""For an angle in radians, return the equivalent angle that is garunteed be between -pi and pi"""
while angle_radians > np.pi:
angle_radians -= 2.0*np.pi
while angle_radians < -np.pi:
angle_radians += 2.0*np.pi
return angle_radians
def BoundingBox_collision(BB1, BB2):
""" return true if two N-D bounding boxes collide, False otherwise"""
for B1, B2 in zip(BB1,BB2):
if (B1[1] < B2[0]) or (B2[1] < B1[0]):
return False
return True
### some build tools ####
def GSL_include():
"""return directory for location of GSL headers, useful when combining GSL and cython"""
try:
gsl_include = subprocess.check_output('gsl-config --cflags', shell=True).decode('utf-8')[2:-1]
except subprocess.CalledProcessError:
gsl_include = os.getenv('LIB_GSL')
if gsl_include is None:
# Environmental variable LIB_GSL not set, use hardcoded path.
gsl_include = r"c:\Program Files\GnuWin32\include"
else:
gsl_include += "/include"
assert gsl_include != '', "Couldn't find gsl. Make sure it's installed and in the path."
return gsl_include
def GSL_library_dir():
"""return directory for location of GSL binaries, useful when combining GSL and cython"""
try:
lib_gsl_dir = subprocess.check_output('gsl-config --libs', shell=True).decode('utf-8').split()[0][2:]
except subprocess.CalledProcessError:
lib_gsl_dir = os.getenv('LIB_GSL')
if lib_gsl_dir is None:
# Environmental variable LIB_GSL not set, use hardcoded path.
lib_gsl_dir = r"c:\Program Files\GnuWin32\lib"
else:
lib_gsl_dir += "/lib"
return lib_gsl_dir
|
Bhare8972/LOFAR-LIM
|
LIM_scripts/utilities.py
|
Python
|
mit
| 11,568
|
[
"Brian"
] |
de9aee678156fe3e80ea9ddab17931f7cf83cd3694d606bc560ca66b24286ea8
|
#!/usr/bin/env python
# Script to convert Amber protein hydrogens (created by ambpdb) into Gromacs
# names. For some reason, eg. HB2 + HB3 are expected to be HB1 and HB2 in the
# Gromacs aminoacids.rtp parameter files.
# Requirements: Biopython
# Desired PDB file should be the first argument. Output is renamed PDB with
# 'GMX_' prepended.
import sys
from Bio.PDB import PDBIO
from Bio.PDB.PDBParser import PDBParser
proteins = ['ALA','ARG','ASH','ASP','ASN','CYS','CYX','GLH','GLN','GLU','GLY','HID','HIE','HIP','HIS','ILE','LEU','LYN','LYS','MET','PHE','PRO','SER','THR','TRP','TYR','VAL']
# Argparse this...
try:
pdbname = sys.argv[1]
except IndexError:
print "PDB file required as first argument. "
sys.exit()
try:
sys.argv[2]
print "Renaming termini (providing any character as a second argument will turn this on)"
termini = True
except IndexError:
termini = False
print "Not renaming termini (add a second argument if you want to turn this on)"
parser = PDBParser(QUIET=True) # Lack of atom symbols = warnings if verbose
structure = parser.get_structure('ambpdb',pdbname)
# Convert first and last residues to termini
def rename_termini(struct):
"""Rename termini of chains to be NXXX and CXXX. TAKE CARE - RESULTS IN
NON-STANDARD PDB FORMAT (residue names should be 3 characters).
Input = Biopython structure object, returns structure object with
modified first/last resnames in each chain."""
for chain in struct.get_chains():
first = chain.child_list[0]
last = chain.child_list[-1]
if first.resname in proteins:
first.resname = 'N'+first.resname
else:
print """The first residue is either already an N-terminus, or doesn't
appear to be a natural amino acid. Continuing anyway."""
if last.resname in proteins:
last.resname = 'C'+last.resname
else:
for idx,res in enumerate(chain.child_list,-1):
if res.resname in proteins:
continue
else:
last = chain.child_list[idx] # idx starts from -1
print "You seem to have non-protein residues in a chain. Renaming residue %s %s as C-terminus" % (last.resname,last.id[1])
last.resname = 'C'+last.resname
return struct
def amb2gmx(struct):
"""Rename Amber atoms to those expected in the Gromacs amber parameter files.
Mainly e.g. HB2/HB3 -> HB1/HB2. Cycles over all residues. Input =
Biopython structure object, returns structure object with modified atom
names"""
for residue in struct.get_residues():
if residue.resname in ['GLY','NGLY','CGLY']:
residue['HA2'].fullname = ' HA1'
residue['HA3'].fullname = ' HA2'
elif residue.resname in ['SER','NSER','CSER']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['LEU','NLEU','CLEU']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['ILE','NILE','CILE']:
residue['HG12'].fullname = 'HG11'
residue['HG13'].fullname = 'HG12'
elif residue.resname in ['ASN','NASN','CASN']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['GLN','NGLN','CGLN']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
residue['HG2'].fullname = ' HG1'
residue['HG3'].fullname = ' HG2'
elif residue.resname in ['ARG','NARG','CARG']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
residue['HG2'].fullname = ' HG1'
residue['HG3'].fullname = ' HG2'
residue['HD2'].fullname = ' HD1'
residue['HD3'].fullname = ' HD2'
elif residue.resname in ['HID','NHID','CHID','HIE','NHIE','CHIE','HIP','NHIP','CHIP','HIS','NHIS','CHIS']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['TRP','NTRP','CTRP']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['PHE','NPHE','CPHE']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['TYR','NTYR','CTYR']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['GLU','NGLU','CGLU','GLH','NGLH','CGLH']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
residue['HG2'].fullname = ' HG1'
residue['HG3'].fullname = ' HG2'
elif residue.resname in ['ASP','NASP','CASP','ASH','NASH','CASH']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['LYS','NLYS','CLYS','LYN','NLYN','CLYN']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
residue['HG2'].fullname = ' HG1'
residue['HG3'].fullname = ' HG2'
residue['HD2'].fullname = ' HD1'
residue['HD3'].fullname = ' HD2'
residue['HE2'].fullname = ' HE1'
residue['HE3'].fullname = ' HE2'
elif residue.resname in ['PRO','NPRO','CPRO']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
residue['HG2'].fullname = ' HG1'
residue['HG3'].fullname = ' HG2'
residue['HD2'].fullname = ' HD1'
residue['HD3'].fullname = ' HD2'
elif residue.resname in ['CYS','NCYS','CCYS','CYX','NCYX','CCYX']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
elif residue.resname in ['MET','NMET','CMET']:
residue['HB2'].fullname = ' HB1'
residue['HB3'].fullname = ' HB2'
residue['HG2'].fullname = ' HG1'
residue['HG3'].fullname = ' HG2'
return struct
def write_struct(struct,name):
"""Writes a corrected PDB file to disk with 'GMX_' prepended"""
w = PDBIO()
w.set_structure(struct)
w.save('GMX_'+name)
#### Main below here ####
if termini is True:
structure = rename_termini(structure)
print """If you have separate chains in your PDB (separated by a TER card),
make sure they have a chain identifier in column 22, in accordance
with the PDB version 3 format. Biopython is expecting this!"""
structure = amb2gmx(structure)
write_struct(structure,pdbname)
print "Done."
|
rtb1c13/scripts
|
SAXS_MD/AmbPDB_2_GMX.py
|
Python
|
gpl-2.0
| 6,501
|
[
"Amber",
"Biopython",
"Gromacs"
] |
ef2ce9d4ecd396bf5562d88bec49c12ec76cbe6a0c9dd41ca5ef1a6ce5a4eb81
|
"""
Test Logger Wrapper
"""
__RCSID__ = "$Id$"
import logging
from io import StringIO
from DIRAC.FrameworkSystem.private.standardLogging.LoggingRoot import LoggingRoot
from DIRAC.FrameworkSystem.private.standardLogging.Logging import Logging
gLogger = LoggingRoot()
def cleaningLog(log):
"""
Remove date and space from the log string
"""
log = log.split("UTC ")[-1]
return log
def captureBackend():
"""
Dirac logger is wrapped by LoggingRoot and represent the root of the DIRAC logging system
Modify the output to capture logs of LoggingRoot
"""
bufferDirac = StringIO()
if logging.getLogger("dirac").handlers:
logging.getLogger("dirac").handlers[0].stream = bufferDirac
return bufferDirac
def gLoggerReset():
"""
Reinitialize gLogger as only one instance exists
It avoids any unexpected behaviour due to multiple different usages
"""
# Reinitialize the system/component name after other tests
# because LoggingRoot is a singleton and can not be reinstancied
Logging._componentName = "Framework"
# reset gLogger
gLogger.setLevel("notice")
gLogger.showHeaders(True)
gLogger.showThreadIDs(False)
gLogger.showContexts(True)
gLogger.showTimeStamps(True)
# modify the output to capture the log records into a buffer
bufferDirac = captureBackend()
del logging.getLogger("dirac").handlers[1:]
del gLogger._backendsList[1:]
# reset log
logging.getLogger("dirac").getChild("log").setLevel(logging.NOTSET)
log = gLogger.getSubLogger("log")
log.showHeaders(True)
log.showThreadIDs(False)
log.showContexts(True)
log.showTimeStamps(True)
for option in log._optionsModified:
log._optionsModified[option] = False
del logging.getLogger("dirac.log").handlers[:]
del log._backendsList[:]
# reset sublog
logging.getLogger("dirac.log").getChild("sublog").setLevel(logging.NOTSET)
sublog = log.getSubLogger("sublog")
sublog.showHeaders(True)
sublog.showThreadIDs(False)
sublog.showContexts(True)
sublog.showTimeStamps(True)
for option in sublog._optionsModified:
sublog._optionsModified[option] = False
del logging.getLogger("dirac.log.sublog").handlers[:]
del sublog._backendsList[:]
return (bufferDirac, log, sublog)
|
ic-hep/DIRAC
|
src/DIRAC/FrameworkSystem/private/standardLogging/test/TestLogUtilities.py
|
Python
|
gpl-3.0
| 2,343
|
[
"DIRAC"
] |
0c73c24856295f6a80de3e83612c8a746db0d7a3b3ad20b15810cb2f7985bfa5
|
import unittest
from .enum import Enum
class TestEnum(unittest.TestCase):
def test_string_conversion(self):
column = Enum('test_column', 'ENUM', enum_values=['asdf', 'qwerty'], default='qwerty')
self.assertEquals("`test_column` ENUM('asdf', 'qwerty') DEFAULT \'qwerty\'", str(column))
self.assertEquals([], column.schema_errors)
self.assertEquals([], column.schema_warnings)
def test_default_errors(self):
column = Enum('test_column', 'ENUM', enum_values=['asdf', 'qwerty'], default='adf')
self.assertEquals(["Default value for 'ENUM' column 'test_column' is not in the list of allowed values"],
column.schema_errors)
self.assertEquals([], Enum('test_column', 'ENUM', enum_values=['asdf', 'qwerty']).schema_errors)
|
cmancone/mygrations
|
mygrations/core/definitions/columns/enum_test.py
|
Python
|
mit
| 808
|
[
"ADF"
] |
bc3fb2e51273b0caa965ff6bfee4401902a2ada14a27681259bade9939c00070
|
#!/usr/bin/env python
#
# Copyright (C) 2014 Brian Caswell <bmc@lungetech.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from generator.actions import Actions
from random import choice, randint
import imp
import os
import random
import string
print os.path.realpath(__file__)
filename = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-1] + ['books.py'])
books = imp.load_source('books', filename).books
def random_alpha(a, b):
return ''.join(choice(string.letters) for _ in range(randint(a, b)))
def random_string(a, b):
chars = string.letters + string.digits
return ''.join(choice(chars) for _ in range(randint(a, b)))
class Bookworm(Actions):
def start(self):
self.read(delim='\n', expect='Welcome to the book review server\n')
self.state['reviews'] = []
def menu(self):
self.read(delim='Selection: ', expect="1. List Available "
"Books\n2. Books you have reviewed\n3. Review "
"more books\n4. Exit\nSelection: ")
def list_books(self):
self.write_number(1)
expected = ['\nAvailable Books']
for i, book in enumerate(books, 1):
expected.append('%d: %s' % (i, book))
self.read(delim='\n\n', expect='\n'.join(expected) + '\n\n')
def reviewed(self):
self.write_number(2)
if 'reviews' not in self.state:
self.state['reviews'] = []
expected = ['Book Title: Your rating'] + self.state['reviews']
self.read(delim='\n\n', expect='\n' + '\n'.join(expected) + '\n\n')
def write_number(self, digit):
trail = ''
if self.chance(.01):
trail = random_alpha(1, 30)
self.write('%d%s\n' % (digit, trail))
def review(self):
self.write_number(3)
self.read(delim='?: ', expect='\nWhat book would you like to '
'review (1 - 100)?: ')
# 1% of the time, review an invalid book. sometimes with strings,
# sometimes with numbers, sometimes both
if self.chance(.01):
invalid = None
while invalid is None:
invalid = random_string(1, 30)
if re.match('^0*(\d|\d{2}|100)\D', invalid):
invalid = None
self.write("%s\n" % invalid)
self.read(delim='\n', expect='Invalid book\n')
else:
choice = randint(1, 100)
self.write_number(choice)
self.read(delim='? ', expect='What rating would you like '
'to give this book? ')
rating = 0
# 1% of the time, send random jiberish as a response
if self.chance(.01):
self.write(random_alpha(1, 30))
else:
rating = randint(0, 0xFFFF)
self.write_number(rating)
if 'reviews' not in self.state:
self.state['reviews'] = []
self.state['reviews'].append("%s: %d" % (books[choice - 1],
rating))
def bad_menu(self):
if self.chance(.5):
self.write_number(randint(6, 30))
else:
self.write('%s\n' % random_alpha(1, 98))
self.read(delim='\n', expect='Invalid menu selection\n')
def exit(self):
self.write_number(4)
self.read(delim='\n', expect='k, bye\n')
def hidden(self):
# This is the vulnerable condition. Do we want to expose this?
if 0:
self.write_number(5)
|
f0rki/cb-multios
|
tools/generate-polls/examples/bookworm/bookworm.py
|
Python
|
mit
| 4,548
|
[
"Brian"
] |
2195f59586e15efd01f33e7f70afe61bafd52618b64cdb1d9c8dafd3fefbe1f3
|
# This module is free software. You can redistribute it and/or modify it under
# the terms of the MIT License, see the file COPYING included with this
# distribution.
from __future__ import division
"""
General utils files for general purpose functions
"""
import re
import os
#Biopython
from Bio import SeqIO
import motif_utils
def getSeqMotifDict(fimoDict):
"""
Make a dict between the seq names and list of motifs that occur in it
Args:
fimoDict: dict between motif names and the seqs it hits
Returns:
a dict between seq names and a list of motif IDs that hit it
"""
seqMotifDict = {}
tmpCount = 0
for motifId, seqList in fimoDict.items():
for seqName in seqList:
if seqName not in seqMotifDict:
seqMotifDict[seqName] = []
if motifId not in seqMotifDict[seqName]:
seqMotifDict[seqName].append(motifId)
return seqMotifDict
def makeMotifDictFromPwmFile(pwmFileName):
"""
read a PWM file that we would like to scan and make a motif dict between motif ID and motif object
Args:
PWM file in MEME format
"""
#print 'PP:', pwmFileName
#dict between motif IDs and motif objects
motifDict = {}
motifName = ''
motifId = 0
#dicts between motif IDs and motif names
with open(pwmFileName, 'rb') as handler:
for line in handler:
line = line.strip()
if not line.strip() or re.search(r'letter', line) or re.search(r'version', line) or re.search(r'ALPHABET',line) or re.search(r'strands', line)\
or re.search(r'A 0.25000', line):
continue
#print 'l:',line
if re.search(r'MOTIF', line):
split = line.split()
line = '_'.join(split[1:])
motifName = line
motifId += 1
motifObj = motif_utils.MyMotif()
motifObj.Id = motifId
motifObj.regExp = motifName #notice here it is actually the motif name not a reg. expression
motifDict[motifName] = motifObj
continue
#add the PWM lines to the motif
motifDict[motifName].pwmLines.append(line)
##check the hash
#for motifName in motifDict.iterkeys():
#print motifName
#motifObj = motifDict[motifName]
#print '\t',motifObj.regExp
#for line in motifObj.pwmLines:
#print '\t', line
return motifDict
def processMotifHitFile_1(inFile):
""" Process a motif hit/occurrence file and return a universe set (U) and a dict of set of sets
The inFile should look like:
>motif_name_1
sequence name
sequence name
>motif_name_2
sequence name
sequence name
You can check the wiki page about set cover problem to get an idea about Universe (U) set and set S of subsets
Args:
inFile: name of file of motif hits in the format explained above
Returns:
motifIdDict: A dict between motif names and motif numbers (IDs)
idMotifDict: A dict between seq names and seq numbers (IDs)
seqIdDict: A dict between seq numbers (IDs) and seq names
idSeqDict: A dict between seq numbers (IDs) and seq names and
Uset : universe set of all seq IDs
Sdict: dict between motif IDs and the sequence IDs the motif hits
"""
#a dict between motif names in the file and motif numbers (IDs)
motifIdDict = {}
#a dict between motif numbers (IDs) and motif names
idMotifDict = {}
#a dict between seq names and seq numbers (IDs)
seqIdDict = {}
#a dict between seq numbers and seq names
idSeqDict = {}
#all numbers/IDs start from 1 inside the dicts/sets
motifId = 0
seqId = 0
#Universe set which is the set of all the sequences, the U set
Uset = set()
#A dict between motif names and the set of seqs that occur in them, the S set
Sdict = {}
print 'in process motif hit'
#start reading the file
with open(inFile, 'rb') as handler:
for line in handler:
line = line.strip()
#skip empty spaces
if not line.strip():
continue
if re.search(r'>', line):#found a motif
motifId += 1
motifName = line[1:]#take off the > character
if motifName not in motifIdDict:
motifIdDict[motifName] = motifId
else:
print 'There are motif names duplicates. the file should have unique motif names'
exit()
idMotifDict[motifId] = motifName
#initialize the element in the Sdict
Sdict[motifId] = set()
continue
#all the other lines are sequence names, check if the seq name has been checked and inserted or not
seqName = line
if seqName not in seqIdDict:
seqId += 1
seqIdDict[seqName] = seqId
idSeqDict[seqId] = seqName
tmpSeqId = seqId
else:
tmpSeqId = seqIdDict[seqName]
if tmpSeqId not in Sdict[motifId]:
Sdict[motifId].add(tmpSeqId)
#add the seqs to the U universe dict
if tmpSeqId not in Uset:
Uset.add(seqId)
#check the U dict
#print Uset
#check the S dict
#for motifId, seqSet in Sdict.items():
#print 'motifId:', motifId, 'set:', seqSet
return motifIdDict, idMotifDict, seqIdDict, idSeqDict, Uset, Sdict
def processMotifHitFile(foreMotifFile, backMotifFile):
"""Read the fore and back motif hit files and make motif dict and dict of motifs with their hits
The input motf hit file should look like:
>motif_name_1
sequence name
sequence name
>motif_name_2
sequence name
sequence name
"""
motifId = 0
#dict between motif names and motif object
motifObjDict = {}
#dict between the motif name and list of seqs it occurs in
foreHitDict = {}
#list of sequences in the foreground file
foreSeqList = []
motifName = ''
#start reading the foreground file
with open(foreMotifFile, 'rb') as handler:
for line in handler:
line = line.strip()
#skip empty spaces
if not line.strip():
continue
if re.search(r'>', line):#found a motif
motifId += 1
motifName = line[1:]#take off the > character
#make a motif object
motifObj = motif_utils.MyMotif()
motifObj.Id = motifId
motifObj.regExp = motifName #notice here it is actually the motif name not a reg. expression
if motifName not in motifObjDict:
motifObjDict[motifName] = motifObj
else:
print 'motif names should be unique. Exiting'
exit()
continue
#add the seqs to the motif
if motifName not in foreHitDict:
foreHitDict[motifName] = []
foreHitDict[motifName].append(line)
if line not in foreSeqList:
foreSeqList.append(line)
##check the motif dict
#for motifName in motifObjDict:
#print 'mName:', motifName
#for motifName in foreHitDict:
#print 'mName:', motifName,'seqs:', foreHitDict[motifName]
#print 'whole seq list:', foreSeqList
#process the background
#dict between the motif name and list of seqs it occurs in the background
backHitDict = {}
#list of sequences in the background file
backSeqList = []
#check if there is a background file as well and make data for it
backMotifName = ''
if backMotifFile != 'none':
with open(backMotifFile, 'rb') as handler:
for line in handler:
line = line.strip()
#skip empty spaces
if not line.strip():
continue
if re.search(r'>', line):#found a motif
backMotifName = line[1:]#take off the > character
continue
#add the seqs to the motif
if backMotifName not in backHitDict:
backHitDict[backMotifName] = []
backHitDict[backMotifName].append(line)
if line not in backSeqList:
backSeqList.append(line)
#for motifName in backHitDict:
# print 'mName:', motifName,'seqs:', backHitDict[motifName]
#print 'whole seq list:', backSeqList
return motifObjDict, foreHitDict, backHitDict, foreSeqList, backSeqList
def writePWMFromMotifs(motifDict, pwmFileName):
"""Go thru the motif dict og BioPython objects and write them to a file in MEME PWM format
Args:
motif dictionary between motif IDs and MyMotif objects
Returns:
name of PWM motif file
"""
pwmFile = open(pwmFileName, 'wb')
pwmFile.write('MEME version 4.4\nALPHABET= ACGT\nstrands: + -\nBackground letter frequencies (from web form):\nA 0.25000 C 0.25000 G 0.25000 T 0.25000\n\n')
alphaList = ['A', 'C', 'G', 'T']
#loop thru the motifs
for motifId in motifDict.iterkeys():
motifObj = motifDict[motifId].bioMotifObj
#get motif length
motifLength = len(motifObj)
pwmFile.write('\nMOTIF ' + str(motifId) + '\n')
pwmFile.write('letter-probability matrix:\n')
pwm = motifObj.counts.normalize(pseudocounts=0)
for i in range(motifLength):
for alpha in alphaList:
pwmFile.write(' ' + str(pwm[alpha][i]))
pwmFile.write('\n')
#close the file
pwmFile.close()
def writePWMFromMotifsSelected(motifDict, pwmFileName, selectIdList, idMotifDict):
"""Write the PWMs to a file, only write the IDs chosen in the selectIdList
"""
pwmFile = open(pwmFileName, 'wb')
pwmFile.write('MEME version 4.4\nALPHABET= ACGT\nstrands: + -\nBackground letter frequencies (from web form):\nA 0.25000 C 0.25000 G 0.25000 T 0.25000\n\n')
alphaList = ['A', 'C', 'G', 'T']
#loop thru the motifs
for motifId in selectIdList:
if motifId not in idMotifDict:
motifName = motifId
else:
motifName = idMotifDict[motifId]
#print 'motifId:', motifId,motifName
motifObj = motifDict[motifName].bioMotifObj
#get motif length
motifLength = len(motifObj)
pwmFile.write('\nMOTIF ' + str(motifName) + '\n')
pwmFile.write('letter-probability matrix:\n')
pwm = motifObj.counts.normalize(pseudocounts=0)
for i in range(motifLength):
for alpha in alphaList:
pwmFile.write(' ' + str(pwm[alpha][i]))
pwmFile.write('\n')
#close the file
pwmFile.close()
def writePWMFromMotifsSelectedScan(motifDict, pwmFileName, selectIdList, idMotifDict, finalSelectMotifList):
"""Write the PWMs to a file, only write the IDs chosen in the selectIdList
This is for the motif PWM scan problem where the PWm is already privided as input
"""
pwmFile = open(pwmFileName, 'wb')
pwmFile.write('MEME version 4.4\nALPHABET= ACGT\nstrands: + -\nBackground letter frequencies (from web form):\nA 0.25000 C 0.25000 G 0.25000 T 0.25000\n\n')
for motifId in selectIdList:
if motifId not in idMotifDict:
motifName = motifId
else:
motifName = idMotifDict[motifId]
if motifName not in finalSelectMotifList:
continue
motifObj = motifDict[motifName]
#motifName = motifObj.regExp
pwmFile.write('\nMOTIF ' + motifName + '\n')
pwmFile.write('letter-probability matrix:\n')
for line in motifObj.pwmLines:
pwmFile.write(line+'\n')
#close the file
pwmFile.close()
def writeMotifSeqFile(motifDict, outFileName):
"""take a dict between motif IDs and seqs it occurs in and write to a file
The format is:
>motif_name
seq_1
seq_2
Args:
motifDict: dict between motif IDs/names and a list of sequences the motif occurs in
outFileName" name of file to write to
"""
outFile = open(outFileName, 'wb')
for motifId in sorted(motifDict.iterkeys()):
seqList = motifDict[motifId]
outFile.write('>' + str(motifId) + '\n')
for seqName in seqList:
outFile.write(seqName + '\n')
outFile.close()
def writeMotifSeqFileFromMotifDict(motifDict, outFileName):
"""Take a motif dict beween motif Ids and MyMotif objects and write the seq of each motif to a file
use the seq coverage file format
>motif_name
seq_1
seq_2
Args:
motifDIct: dict between motif IDs and MyMotif objects
outFileName" name of file to write to
"""
outFile = open(outFileName, 'wb')
for motifId in motifDict.iterkeys():
motifObj = motifDict[motifId]
seqList = motifObj.foreSeqList
outFile.write('>' + str(motifId) + '\n')
for seqName in seqList:
outFile.write(seqName + '\n')
outFile.close()
def findNumSeqs(fastaFile):
"""Use Biopython to find the number of fasta seqeunces in a fasta file
Args:
fasta file
Returns:
number of fasta sequences
"""
handle = open(fastaFile, "rU")
numSeqs = 0
for record in SeqIO.parse(handle, "fasta"):
seqId = str(record.id)
numSeqs += 1
handle.close()
return numSeqs
def findSeqList(fastaFile):
"""
Read the fasta file and return a list of sequence names in the fasta file
"""
handle = open(fastaFile, "rU")
numSeqs = 0
seqList = []
for record in SeqIO.parse(handle, "fasta"):
seqId = str(record.id)
seqList.append(seqId)
numSeqs += 1
handle.close()
return seqList
def getSeqList(fastaFile):
"""Use Biopython to return a list of fasta seq names
Args:
fasta file
Returns:
list of seqNames
"""
handle = open(fastaFile, "rU")
numSeqs = 0
seqList = []
for record in SeqIO.parse(handle, "fasta"):
seqId = str(record.id)
seqList.append(seqId)
numSeqs += 1
handle.close()
return seqList
def makeMotifLogo(motifIdList, motifDict, outDirName, pwmFileName, idMotifDict, op, finalSelectMotifList):
"""Make motif logos for a set of motifs as specified in the motifIdList
Add a motif logo ID (logo file name) to each motif object
Args:
motifIdList: list of motif IDs to make logos for
motifDict: a dict between motif ID and a MyMotif object
idMotifDict: dict from IDs to motif names
op: operation; is it motif disocvery or a cov
finalSelectMotifList: list of motif names , final filtered list
Return:
"""
if op == 'disc':
writePWMFromMotifsSelected(motifDict, pwmFileName, motifIdList, idMotifDict)
if op== 'cov':
writePWMFromMotifsSelectedScan(motifDict, pwmFileName, motifIdList, idMotifDict, finalSelectMotifList)
#get the path of the module
utils_path = os.path.realpath(__file__)
#remove the name of the module from the end of it. Replace won't work since w ehave DME.py and DME.pyc
split = utils_path.split('/')
utils_path = '/'.join(split[:len(split)-1])
inPath = os.path.realpath(__file__)
split = inPath.split('/')
inPath = '/'.join(split[:len(split)-2])
command = inPath + '/' + 'meme2images -png ' + pwmFileName + ' ' + outDirName
#print 'meme2image command:', command
try:
os.system(command)
except:
print 'meme2image execution failed. Exiting'
exit()
def makeMotifLogoFromPwm(pwmFileName, outDirName):
"""
Given the pwm file in MEME format make the logos
"""
#get the path of the module
utils_path = os.path.realpath(__file__)
#remove the name of the module from the end of it. Replace won't work since w ehave DME.py and DME.pyc
split = utils_path.split('/')
utils_path = '/'.join(split[:len(split)-1])
inPath = os.path.realpath(__file__)
split = inPath.split('/')
inPath = '/'.join(split[:len(split)-2])
command = inPath + '/' + 'meme2images -png ' + pwmFileName + ' ' + outDirName
print 'meme2image command:', command
try:
os.system(command)
except:
print 'meme2image execution failed. Exiting'
exit()
def writeMotifBasicStat(motifDict, foreFimoDict, backFimoDict, foreNumSeqs, backNumSeqs, outFileName, tomtomDict):
"""
Write a file with basic info about the motif stats like coverage
Args:
motifDict: dict between motif names and motif objects
"""
outFile = open(outFileName, 'wb')
headerList = ['#MotifName','Num_Fg_seqs','Fg_cov','Num_Bg_seqs', 'Bg_cov','FG/BG','similar_motifs']
headerLine = ','.join(headerList)
outFile.write(headerLine + '\n')
#go thru the motif dict
for motifName in motifDict.iterkeys():
motifObj = motifDict[motifName]
if motifName in foreFimoDict:
foreSeqs = len(foreFimoDict[motifName])
foreCov = 100*(foreSeqs/foreNumSeqs)
else:
foreSeqs = 0
foreCov = 0
if motifName in backFimoDict:
backSeqs = len(backFimoDict[motifName])
backCov = 100*(backSeqs/backNumSeqs)
fg_over_bg = foreCov/backCov
else:
backSeqs = 0
backCov = 0
fg_over_bg = foreCov/1
#find similar motifs to this motif
simList = []
simStr = ''
if motifName in tomtomDict:
simList = tomtomDict[motifName]
simStr = '//'.join(simList)
lineList = [motifName, str(foreSeqs), str(foreCov), str(backSeqs), str(backCov), str(fg_over_bg), simStr]
lineStr = ','.join(lineList)
outFile.write(lineStr + '\n')
outFile.close()
def main(args):
pass
##
if( __name__ == "__main__" ):
main(sys.argv)
#--eof--#
|
RamiOran/SeqCov
|
utils/general_utils.py
|
Python
|
mit
| 15,763
|
[
"Biopython"
] |
388d15a4d3ae61d1a7589262922c3f9e662edcb16e5fc76211258538833db0bd
|
# (c) 2014, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import math
import collections
from ansible import errors
def unique(a):
if isinstance(a,collections.Hashable):
c = set(a)
else:
c = []
for x in a:
if x not in c:
c.append(x)
return c
def intersect(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) & set(b)
else:
c = unique(filter(lambda x: x in b, a))
return c
def difference(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) - set(b)
else:
c = unique(filter(lambda x: x not in b, a))
return c
def symmetric_difference(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) ^ set(b)
else:
c = unique(filter(lambda x: x not in intersect(a,b), union(a,b)))
return c
def union(a, b):
if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
c = set(a) | set(b)
else:
c = unique(a + b)
return c
def min(a):
_min = __builtins__.get('min')
return _min(a)
def max(a):
_max = __builtins__.get('max')
return _max(a)
def isnotanumber(x):
try:
return math.isnan(x)
except TypeError:
return False
def logarithm(x, base=math.e):
try:
if base == 10:
return math.log10(x)
else:
return math.log(x, base)
except TypeError as e:
raise errors.AnsibleFilterError('log() can only be used on numbers: %s' % str(e))
def power(x, y):
try:
return math.pow(x, y)
except TypeError as e:
raise errors.AnsibleFilterError('pow() can only be used on numbers: %s' % str(e))
def inversepower(x, base=2):
try:
if base == 2:
return math.sqrt(x)
else:
return math.pow(x, 1.0/float(base))
except TypeError as e:
raise errors.AnsibleFilterError('root() can only be used on numbers: %s' % str(e))
def human_readable(size, isbits=False, unit=None):
base = 'bits' if isbits else 'Bytes'
suffix = ''
ranges = (
(1<<70, 'Z'),
(1<<60, 'E'),
(1<<50, 'P'),
(1<<40, 'T'),
(1<<30, 'G'),
(1<<20, 'M'),
(1<<10, 'K'),
(1, base)
)
for limit, suffix in ranges:
if (unit is None and size >= limit) or \
unit is not None and unit.upper() == suffix:
break
if limit != 1:
suffix += base[0]
return '%.2f %s' % (float(size)/ limit, suffix)
class FilterModule(object):
''' Ansible math jinja2 filters '''
def filters(self):
return {
# general math
'isnan': isnotanumber,
'min' : min,
'max' : max,
# exponents and logarithms
'log': logarithm,
'pow': power,
'root': inversepower,
# set theory
'unique' : unique,
'intersect': intersect,
'difference': difference,
'symmetric_difference': symmetric_difference,
'union': union,
# computer theory
'human_readable' : human_readable,
}
|
pheanex/ansible
|
lib/ansible/plugins/filter/mathstuff.py
|
Python
|
gpl-3.0
| 4,023
|
[
"Brian"
] |
aa6a5ec4800e08af8d376ca09c52ff18e4728d95b885aba743cf79c1edd148c8
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Scientific Package. This package holds all simulators, and
# analysers necessary to run brain-simulations. You can use it stand alone or
# in conjunction with TheVirtualBrain-Framework Package. See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
The Data component of Spectral datatypes.
.. moduleauthor:: Stuart A. Knock <Stuart@tvb.invalid>
.. moduleauthor:: Paula Sanz Leon <Paula@tvb.invalid>
"""
import tvb.basic.traits.core as core
import tvb.basic.traits.types_basic as basic
import tvb.datatypes.arrays as arrays
import tvb.datatypes.time_series as time_series
from tvb.basic.traits.types_mapped import MappedType
class PrincipalComponentsData(MappedType):
"""
Result of a Principal Component Analysis (PCA).
"""
source = time_series.TimeSeries(
label="Source time-series",
doc="Links to the time-series on which the PCA is applied.")
weights = arrays.FloatArray(
label="Principal vectors",
doc="""The vectors of the 'weights' with which each time-series is
represented in each component.""",
file_storage=core.FILE_STORAGE_EXPAND)
fractions = arrays.FloatArray(
label="Fraction explained",
doc="""A vector or collection of vectors representing the fraction of
the variance explained by each principal component.""",
file_storage=core.FILE_STORAGE_EXPAND)
norm_source = arrays.FloatArray(
label="Normalised source time series",
file_storage=core.FILE_STORAGE_EXPAND)
component_time_series = arrays.FloatArray(
label="Component time series",
file_storage=core.FILE_STORAGE_EXPAND)
normalised_component_time_series = arrays.FloatArray(
label="Normalised component time series",
file_storage=core.FILE_STORAGE_EXPAND)
__generate_table__ = True
class IndependentComponentsData(MappedType):
"""
Result of TEMPORAL (Fast) Independent Component Analysis
"""
source = time_series.TimeSeries(
label="Source time-series",
doc="Links to the time-series on which the ICA is applied.")
mixing_matrix = arrays.FloatArray(
label="Mixing matrix - Spatial Maps",
doc="""The linear mixing matrix (Mixing matrix) """)
unmixing_matrix = arrays.FloatArray(
label="Unmixing matrix - Spatial maps",
doc="""The estimated unmixing matrix used to obtain the unmixed
sources from the data""")
prewhitening_matrix = arrays.FloatArray(
label="Pre-whitening matrix",
doc=""" """)
n_components = basic.Integer(
label="Number of independent components",
doc=""" Observed data matrix is considered to be a linear combination
of :math:`n` non-Gaussian independent components""")
norm_source = arrays.FloatArray(
label="Normalised source time series. Zero centered and whitened.",
file_storage=core.FILE_STORAGE_EXPAND)
component_time_series = arrays.FloatArray(
label="Component time series. Unmixed sources.",
file_storage=core.FILE_STORAGE_EXPAND)
normalised_component_time_series = arrays.FloatArray(
label="Normalised component time series",
file_storage=core.FILE_STORAGE_EXPAND)
__generate_table__ = True
|
rajul/tvb-library
|
tvb/datatypes/mode_decompositions_data.py
|
Python
|
gpl-2.0
| 4,446
|
[
"Gaussian"
] |
ccb5844d1e3a2c76a1fc63ab70b20c9914c2027022f29247faac97560e8f09a4
|
#!/usr/bin/env python
#
from distutils.core import setup,Extension
from distutils import sysconfig
import os, sys, re, glob, shutil
version='2009.Q1b2'
module_ext = sysconfig.get_config_var('SO')
if sys.platform=="win32":
install_base="Lib/site-packages"
else:
install_base = os.path.join(sysconfig.get_config_var('LIBDEST'),'site-packages')
ext_modules=[]
child_packages = [
"rdkit.Chem",
"rdkit.DataManip",
"rdkit.DataStructs",
"rdkit.Dbase",
"rdkit.DistanceGeometry",
"rdkit.ForceField",
"rdkit.Geometry",
"rdkit.Logger",
"rdkit.ML",
"rdkit.Numerics",
"rdkit.SimDivFilters",
"rdkit.VLib",
"rdkit.sping",
"rdkit.utils",
]
sos = [(os.path.join(install_base,'rdkit'),['rdkit/rdBase'+module_ext])]
py_packages = ["rdkit"]+child_packages
for pkg in child_packages:
for root,dirs,files in os.walk(pkg.replace('.','/')):
if '.svn' in dirs: dirs.remove('.svn')
if 'test_data' in dirs: dirs.remove('test_data')
modName=root.replace(os.path.sep,'.')
if '__init__.py' in files and modName not in py_packages:
py_packages.append(modName)
files=[os.path.join(root,file) for file in files if (os.path.splitext(file)[-1]==module_ext or\
'test_data' in root)]
sos.extend([(os.path.join(install_base,root),files)])
extraBase='share/rdkit'
projects=[]
for root,dirs,files in os.walk('Projects'):
if '.svn' in dirs: dirs.remove('.svn')
files=[os.path.join(root,filen) for filen in files]
projects.append((extraBase+'/'+root,files))
data_files = [(extraBase+'/Data',glob.glob('Data/*.*'))]
data_files.extend([(extraBase,glob.glob('./*.txt'))])
if sys.platform=='win32':
data_files.extend([(extraBase+'/lib',glob.glob('bin/*.dll'))])
else:
data_files.extend([(extraBase+'/lib',glob.glob('bin/*'))])
data_files.extend(sos)
documentation = []
for root,dirs,files in os.walk('Docs'):
if '.svn' in dirs: dirs.remove('.svn')
files=[os.path.join(root,filen) for filen in files]
documentation.append((extraBase+'/'+root,files))
setup(
name='rdkit',
version=version,
description='RDKit Cheminformatics Library',
long_description="""Data structures, algorithms, and scripts for cheminformatics.""",
author='Greg Landrum',
author_email='glandrum@users.sourceforge.net',
url='http://www.rdkit.org/',
download_url = 'http://code.google.com/p/rdkit/downloads/list',
license='BSD',
platforms=['Windows','Linux','Mac OS-X'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Programming Language :: Python',
'Programming Language :: C++',
'License :: OSI Approved :: BSD License',
'Topic :: Scientific/Engineering :: Chemistry',
],
packages=py_packages,
ext_modules=ext_modules,
package_dir={'rdkit':'rdkit'},
data_files=data_files+documentation+projects,
)
|
rdkit/rdkit-orig
|
setup.py
|
Python
|
bsd-3-clause
| 3,461
|
[
"RDKit"
] |
8ebfd24860d2f7a12d003e962f02b1ab2a9714329175073c423c7dbd49d517e6
|
'''Parameter sweep (2D): shared procedures.'''
from __future__ import absolute_import, print_function
import numpy as np
from grid_cell_model.submitting.factory import SubmitterFactory
from grid_cell_model.submitting.arguments import ArgumentCreator
from grid_cell_model.submitting.noise.slopes import (DefaultSelector,
NoThetaSelector)
from grid_cell_model.otherpkg.log import log_info
from simtools.storage import DataStorage
def submitParamSweep(p, startG, endG, Nvals, ENV, simRootDir, simLabel, appName,
rtLimit, numCPU, blocking, timePrefix, numRepeat, dry_run,
extraIterparams=None, rc=None, **kwargs):
'''Submit and save metadata for the gE vs gI parameter sweep.'''
printout = kwargs.pop('printout', True)
if extraIterparams is None:
extraIterparams = {}
ac = ArgumentCreator(p, printout=printout)
GArr = np.linspace(startG, endG, Nvals)
print(GArr)
g_AMPA_total_arr = []
g_GABA_total_arr = []
for E_coupling in GArr:
for I_coupling in GArr:
g_AMPA_total_arr.append(E_coupling)
g_GABA_total_arr.append(I_coupling)
iterparams = {
'g_AMPA_total' : np.array(g_AMPA_total_arr),
'g_GABA_total' : np.array(g_GABA_total_arr),
}
dimension_labels = ['g_AMPA_total', 'g_GABA_total']
dimensions = [Nvals, Nvals]
iterparams.update(extraIterparams)
ac.insertDict(iterparams, mult=False)
###############################################################################
submitter = SubmitterFactory.getSubmitter(
ac, appName, envType=ENV, rtLimit=rtLimit, output_dir=simRootDir,
label=simLabel, blocking=blocking, timePrefix=timePrefix, numCPU=numCPU,
**kwargs)
ac.setOption('output_dir', submitter.outputDir())
startJobNum = 0
filter = rc[0]*len(GArr) + rc[1] if rc is not None else None
submitter.submitAll(startJobNum, numRepeat, dry_run=dry_run, filter=filter)
submitter.saveIterParams(iterparams, dimension_labels, dimensions,
dry_run=dry_run)
###############################################################################
def getBumpCurrentSlope(noise_sigma, threshold=0, type=None):
'''
Parameters
----------
noise_sigma : int
Noise level (sigma of the Gaussian)
threshold : float
Threshold below which slope values will be replaced with ``NaN``.
type : string, optional
If ``None`` the regular bump slope files will be used. If ``no_theta``,
the bump slope files specific for the simulations wihtout theta
oscillations will be used.
'''
data_root = 'bump_slope_data'
selector_cls = None
if type is None:
selector_cls = DefaultSelector
elif type == 'no_theta':
selector_cls = NoThetaSelector
else:
raise ValueError('Invalid bump slope type.')
selector = selector_cls(data_root, threshold)
return selector.get_slopes(noise_sigma)
def getSpeedPercentile(p, path, grid_lambda, Nx):
'''
Retrieve the file containing animal positions and calculate the bump
speed value at the specified percentile.
Parameters
----------
p : float
The specified percentile.
path : string
Path to the file containing rat velocities
grid_lambda : float
Grid field spacing (cm)
Nx : int
Neural sheet size (neurons). THe bump has to travel this distance (in
units of neurons) in order to return back to its original position,
i.e. form a grid field.
output : float
The bump speed at the p-th percentile
'''
from scipy.io import loadmat
data = loadmat(path)
dt = float(data['dt'])
pos_x = data['pos_x'].flatten()
pos_y = data['pos_y'].flatten()
vel_x = np.diff(pos_x)/dt
vel_y = np.diff(pos_y)/dt
animal_s = np.abs(np.hstack((vel_x, vel_y)))
bump_s = float(Nx) / grid_lambda * animal_s
res = np.percentile(bump_s, p)
msg = "Loaded velocity data from: {0}".format(path)
log_info("getAnimalSpeedPercentile", msg)
msg = "{0:.2f}th percentile: {1:.3f}".format(p, res)
log_info("getAnimalSpeedPercentile", msg)
return res
|
MattNolanLab/ei-attractor
|
grid_cell_model/simulations/007_noise/param_sweep.py
|
Python
|
gpl-3.0
| 4,305
|
[
"Gaussian"
] |
4627cc6cbde8cb855d56bc2f334bbf45457401c9b88c9ae8074d576265cd0c2b
|
from ase import *
print [a.get_potential_energy() for a in PickleTrajectory('H.traj')]
images = [PickleTrajectory('H.traj')[-1]]
for i in range(4):
images.append(images[0].copy())
images[-1].positions[6, 1] = 2 - images[0].positions[6, 1]
neb = NEB(images)
neb.interpolate()
for image in images:
image.set_calculator(LennardJones())
for a in neb.images:
print a.positions[-1], a.get_potential_energy()
dyn = QuasiNewton(neb, trajectory='mep.traj')
print dyn.run(fmax=0.01, steps=25)
for a in neb.images:
print a.positions[-1], a.get_potential_energy()
|
freephys/python_ase
|
ase/test/neb.py
|
Python
|
gpl-3.0
| 571
|
[
"ASE"
] |
a5485ed1457edd09bf32274909b48ba1b9e3dfe381c1c620cd4f14b014f8578e
|
# This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import unittest
import pkg_resources
import mdtraj
import numpy as np
import pyemma.coordinates as coor
from pyemma.coordinates.data.fragmented_trajectory_reader import FragmentedTrajectoryReader
class TestFragmentedTrajectory(unittest.TestCase):
@classmethod
def setUpClass(cls):
d = np.array([[i] for i in range(0, 100)]) # np.atleast_2d(np.arange(100))
cls.d = d
return cls
def test_full_trajectory(self):
reader = FragmentedTrajectoryReader([self.d, self.d])
reader.chunksize = 0
expected = np.vstack((self.d, self.d))
output = reader.get_output(stride=1)[0]
np.testing.assert_array_almost_equal(expected, output)
def test_full_trajectory_random_access(self):
reader = FragmentedTrajectoryReader([self.d, self.d])
indices = np.asarray([[0, 1], [0, 3], [0, 3], [0, 99], [0, 100], [0, 199]])
out = reader.get_output(stride=indices, chunk=0)
np.testing.assert_array_equal(np.array(out).squeeze(), np.array([1, 3, 3, 99, 0, 99]))
def test_chunked_trajectory_random_access(self):
reader = FragmentedTrajectoryReader([self.d, self.d])
indices = np.asarray([[0, 1], [0, 3], [0, 3], [0, 99], [0, 100], [0, 199]])
out = reader.get_output(stride=indices, chunk=1)
np.testing.assert_array_equal(np.array(out).squeeze(), np.array([1,3,3,99,0,99]))
def test_full_trajectory_stridden(self):
for stride in [1, 3, 5, 7, 13, 20]:
reader = FragmentedTrajectoryReader([self.d, self.d])
reader.chunksize = 0
expected = np.vstack((self.d, self.d))[::stride]
out = reader.get_output(stride=stride)[0]
np.testing.assert_array_almost_equal(expected, out, err_msg="Failed for stride=%s" % stride)
def test_full_trajectory_stridden_with_lag(self):
reader = FragmentedTrajectoryReader([self.d, self.d])
data = np.vstack((self.d, self.d))
for lag in [1, 5, 7]:
for stride in [1, 3, 5, 7, 13, 20]:
reader.chunksize = 0
X, Y = None, None
# not chunked
for itraj, X, Y in reader.iterator(stride=stride, lag=lag):
pass
np.testing.assert_array_almost_equal(data[::stride][0:len(Y)], X)
np.testing.assert_array_almost_equal(data[lag::stride], Y, err_msg='lag={lag}, stride={stride}'.format(stride=stride, lag=lag))
def test_fragmented_xtc(self):
from pyemma.coordinates.tests.util import create_traj
top_file = pkg_resources.resource_filename(__name__, 'data/test.pdb')
trajfiles = []
for _ in range(3):
f, _, _ = create_traj(top_file)
trajfiles.append(f)
try:
# three trajectories: one consisting of all three, one consisting of the first,
# one consisting of the first and the last
source = coor.source([trajfiles, [trajfiles[0]], [trajfiles[0], trajfiles[2]]], top=top_file)
source.chunksize = 1000
out = source.get_output(stride=1)
trajs = [mdtraj.load(trajfiles[i], top=top_file).xyz.reshape(-1,9) for i in range(0,3)]
np.testing.assert_equal(out[0], np.vstack(trajs))
np.testing.assert_equal(out[1], trajs[0])
np.testing.assert_equal(out[2], np.vstack((trajs[0], trajs[2])))
finally:
for t in trajfiles:
try:
os.unlink(t)
except EnvironmentError:
pass
def test_multiple_input_trajectories_random_access(self):
indices = np.asarray([
[0, 1], [0, 3], [0, 3], [0, 99], [0, 100], [0, 199],
[1, 0], [1, 5], [1, 99],
[2, 5], [2, 7], [2, 23]
])
expected = [np.array([1, 3, 3, 99, 0, 99]), np.array([0, 5, 99]), np.array([5, 7, 23])]
for chunk_size in [0, 1, 3, 5, 13]:
reader = FragmentedTrajectoryReader([[self.d, self.d], self.d, [self.d, self.d]])
out_full_trajectory_mode = reader.get_output(chunk=chunk_size, stride=indices)
for i in range(3):
np.testing.assert_array_equal(expected[i], out_full_trajectory_mode[i].squeeze())
def test_multiple_input_trajectories(self):
reader = FragmentedTrajectoryReader([[self.d, self.d], self.d, [self.d, self.d]])
reader.chunksize = 37
out = reader.get_output()
reader.chunksize = 0
out2 = reader.get_output()
expected0_2 = np.vstack((self.d, self.d))
for itraj in range(0, 3):
np.testing.assert_array_almost_equal(out[itraj], out2[itraj])
np.testing.assert_array_almost_equal(out[0], expected0_2)
np.testing.assert_array_almost_equal(out[1], self.d)
np.testing.assert_array_almost_equal(out[2], expected0_2)
def test_chunked_trajectory_with_lag(self):
data = np.vstack((self.d, self.d))
reader = FragmentedTrajectoryReader([self.d, self.d])
for lag in [0, 1, 3]:
for stride in [1, 3, 5]:
for chunksize in [1, 34, 53, 72]:
reader.chunksize = chunksize
if lag > 0:
collected = None
collected_lagged = None
for itraj, X, Y in reader.iterator(stride=stride, lag=lag):
collected = X if collected is None else np.vstack((collected, X))
collected_lagged = Y if collected_lagged is None else np.vstack((collected_lagged, Y))
np.testing.assert_array_almost_equal(data[::stride][0:len(collected_lagged)], collected,
err_msg="lag={}, stride={}, cs={}".format(
lag, stride, chunksize
))
np.testing.assert_array_almost_equal(data[lag::stride], collected_lagged)
else:
collected = None
for itraj, X in reader.iterator(stride=stride):
collected = X if collected is None else np.vstack((collected, X))
np.testing.assert_array_almost_equal(data[::stride], collected)
def test_index_to_reader_index(self):
reader = FragmentedTrajectoryReader([self.d, self.d])
assert (0, 0) == reader._index_to_reader_index(0, 0), "first frame is first frame of first reader"
assert (0, 1) == reader._index_to_reader_index(1, 0), "second frame is second frame of first reader"
assert (1, 0) == reader._index_to_reader_index(100, 0), "101'st frame is first frame of second reader"
assert (1, 1) == reader._index_to_reader_index(101, 0), "102'nd frame is second frame of second reader"
with self.assertRaises(ValueError):
reader._index_to_reader_index(-1, 0)
with self.assertRaises(ValueError):
reader._index_to_reader_index(200, 0)
def test_cols(self):
dim = 5
arr = np.arange(60).reshape(-1, dim)
data = [(arr, arr), arr, (arr, arr, arr)]
reader = FragmentedTrajectoryReader(data)
cols = (0, 3)
for itraj, x in reader.iterator(chunk=0, return_trajindex=True, cols=cols):
if isinstance(data[itraj], tuple):
syn_traj = np.concatenate(data[itraj])
else:
syn_traj = data[itraj]
np.testing.assert_equal(x, syn_traj[:, cols])
def test_raise_different_dims(self):
data = [self.d, np.array([[1,2,3], [4,5,6]])]
with self.assertRaises(ValueError):
FragmentedTrajectoryReader(data)
def test_with_save_traj(self):
path = pkg_resources.resource_filename(__name__, 'data') + os.path.sep
pdb_file = os.path.join(path, 'bpti_ca.pdb')
traj_files = [
os.path.join(path, 'bpti_001-033.xtc'),
os.path.join(path, 'bpti_034-066.xtc'),
os.path.join(path, 'bpti_067-100.xtc')
]
source_frag = coor.source([traj_files], top=pdb_file)
full_data = source_frag.get_output()[0]
last_frame_fragment_0 = [0,32]
first_frame_fragment_1 = [0,33]
first_frame_fragment_2 = [0,66]
reshape = lambda f: f.xyz.reshape((f.xyz.shape[0],f.xyz.shape[1] * f.xyz.shape[2])).squeeze()
# Frames in the first fragment:
frames = coor.save_traj(source_frag, [last_frame_fragment_0], None)
np.testing.assert_equal(reshape(frames), full_data[32])
# Frames the first and second fragments
frames = coor.save_traj(source_frag, [last_frame_fragment_0, first_frame_fragment_1], None)
np.testing.assert_equal(reshape(frames), full_data[np.array([32, 33])])
# Frames only in the second fragment
frames = coor.save_traj(source_frag, [first_frame_fragment_1], None)
np.testing.assert_equal(reshape(frames), full_data[33])
# Frames only in the second and third fragment
frames = coor.save_traj(source_frag, [first_frame_fragment_1, first_frame_fragment_2], None)
np.testing.assert_equal(reshape(frames), full_data[np.array([33, 66])])
|
markovmodel/PyEMMA
|
pyemma/coordinates/tests/test_fragmented_trajectory.py
|
Python
|
lgpl-3.0
| 10,158
|
[
"MDTraj"
] |
30fe81e077be5276421e4b866bcad5eea5d7341a1a80220eab70e42df09a6612
|
"""
.. versionadded:: v6r20
FTS3Agent implementation.
It is in charge of submitting and monitoring all the transfers. It can be duplicated.
.. literalinclude:: ../ConfigTemplate.cfg
:start-after: ##BEGIN FTS3Agent
:end-before: ##END FTS3Agent
:dedent: 2
:caption: FTS3Agent options
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
import errno
import time
# from threading import current_thread
from multiprocessing.pool import ThreadPool
# We use the dummy module because we use the ThreadPool
from multiprocessing.dummy import current_process
from socket import gethostname
from DIRAC import S_OK, S_ERROR
from DIRAC.AccountingSystem.Client.Types.DataOperation import DataOperation
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Utilities.DErrno import cmpError
from DIRAC.Core.Utilities.DictCache import DictCache
from DIRAC.Core.Utilities.Time import fromString
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTS3ServerDict
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations as opHelper
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getDNForUsername
from DIRAC.FrameworkSystem.Client.Logger import gLogger
from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
from DIRAC.DataManagementSystem.private import FTS3Utilities
from DIRAC.DataManagementSystem.DB.FTS3DB import FTS3DB
from DIRAC.DataManagementSystem.Client.FTS3Job import FTS3Job
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
# pylint: disable=attribute-defined-outside-init
AGENT_NAME = "DataManagement/FTS3Agent"
# Lifetime in seconds of the proxy we download for submission
PROXY_LIFETIME = 43200 # 12 hours
class FTS3Agent(AgentModule):
"""
This Agent is responsible of interacting with the FTS3 services.
Several of them can run in parallel.
It first treats the Operations, by creating new FTS jobs and performing
callback.
Then, it monitors the current jobs.
CAUTION: This agent and the FTSAgent cannot run together.
"""
def __readConf(self):
""" Read configurations
:return: S_OK()/S_ERROR()
"""
# Getting all the possible servers
res = getFTS3ServerDict()
if not res['OK']:
gLogger.error(res['Message'])
return res
srvDict = res['Value']
serverPolicyType = opHelper().getValue('DataManagement/FTSPlacement/FTS3/ServerPolicy', 'Random')
self._serverPolicy = FTS3Utilities.FTS3ServerPolicy(srvDict, serverPolicy=serverPolicyType)
self.maxNumberOfThreads = self.am_getOption("MaxThreads", 10)
# Number of Operation we treat in one loop
self.operationBulkSize = self.am_getOption("OperationBulkSize", 20)
# Number of Jobs we treat in one loop
self.jobBulkSize = self.am_getOption("JobBulkSize", 20)
self.maxFilesPerJob = self.am_getOption("MaxFilesPerJob", 100)
self.maxAttemptsPerFile = self.am_getOption("MaxAttemptsPerFile", 256)
self.kickDelay = self.am_getOption("KickAssignedHours", 1)
self.maxKick = self.am_getOption("KickLimitPerCycle", 100)
self.deleteDelay = self.am_getOption("DeleteGraceDays", 180)
self.maxDelete = self.am_getOption("DeleteLimitPerCycle", 100)
# lifetime of the proxy we download to delegate to FTS
self.proxyLifetime = self.am_getOption("ProxyLifetime", PROXY_LIFETIME)
return S_OK()
def initialize(self):
""" Agent's initialization
:return: S_OK()/S_ERROR()
"""
self._globalContextCache = {}
# name that will be used in DB for assignment tag
self.assignmentTag = gethostname().split('.')[0]
res = self.__readConf()
# We multiply by two because of the two threadPools
self.fts3db = FTS3DB(pool_size=2 * self.maxNumberOfThreads)
self.jobsThreadPool = ThreadPool(self.maxNumberOfThreads)
self.opsThreadPool = ThreadPool(self.maxNumberOfThreads)
return res
def beginExecution(self):
""" Reload configurations before start of a cycle
:return: S_OK()/S_ERROR()
"""
return self.__readConf()
def getFTS3Context(self, username, group, ftsServer, threadID):
""" Returns an fts3 context for a given user, group and fts server
The context pool is per thread, and there is one context
per tuple (user, group, server).
We dump the proxy of a user to a file (shared by all the threads),
and use it to make the context.
The proxy needs a lifetime of self.proxyLifetime, is cached for cacheTime = (2*lifeTime/3) - 10mn,
and the lifetime of the context is 45mn
The reason for cacheTime to be what it is is because the FTS3 server will ask for a new proxy
after 2/3rd of the existing proxy has expired, so we renew it just before
:param str username: name of the user
:param str group: group of the user
:param str ftsServer: address of the server
:param str threadID: thread ID
:returns: S_OK with the context object
"""
log = gLogger.getSubLogger("getFTS3Context", child=True)
contextes = self._globalContextCache.setdefault(threadID, DictCache())
idTuple = (username, group, ftsServer)
log.debug("Getting context for %s" % (idTuple, ))
# We keep a context in the cache for 45 minutes
# (so it needs to be valid at least 15 since we add it for one hour)
if not contextes.exists(idTuple, 15 * 60):
res = getDNForUsername(username)
if not res['OK']:
return res
# We take the first DN returned
userDN = res['Value'][0]
log.debug("UserDN %s" % userDN)
# We dump the proxy to a file.
# It has to have a lifetime of self.proxyLifetime
# Because the FTS3 servers cache it for 2/3rd of the lifetime
# we should make our cache a bit less than 2/3rd of the lifetime
cacheTime = int(2 * self.proxyLifetime / 3) - 600
res = gProxyManager.downloadVOMSProxyToFile(
userDN, group, requiredTimeLeft=self.proxyLifetime, cacheTime=cacheTime)
if not res['OK']:
return res
proxyFile = res['Value']
log.debug("Proxy file %s" % proxyFile)
# We generate the context
# In practice, the lifetime will be less than proxyLifetime
# because we reuse a cached proxy. However, the cached proxy will
# never forced a redelegation, because it is recent enough for FTS3 servers.
# The delegation is forced when 2/3 rd of the lifetime are left, and we get a fresh
# one just before. So no problem
res = FTS3Job.generateContext(ftsServer, proxyFile, lifetime=self.proxyLifetime)
if not res['OK']:
return res
context = res['Value']
# we add it to the cache for this thread for 1h
contextes.add(idTuple, 3600, context)
return S_OK(contextes.get(idTuple))
def _monitorJob(self, ftsJob):
""" * query the FTS servers
* update the FTSFile status
* update the FTSJob status
:param ftsJob: FTS job
:return: ftsJob, S_OK()/S_ERROR()
"""
# General try catch to avoid that the tread dies
try:
threadID = current_process().name
log = gLogger.getSubLogger("_monitorJob/%s" % ftsJob.jobID, child=True)
res = self.getFTS3Context(
ftsJob.username, ftsJob.userGroup, ftsJob.ftsServer, threadID=threadID)
if not res['OK']:
log.error("Error getting context", res)
return ftsJob, res
context = res['Value']
res = ftsJob.monitor(context=context)
if not res['OK']:
log.error("Error monitoring job", res)
# If the job was not found on the server, update the DB
if cmpError(res, errno.ESRCH):
res = self.fts3db.cancelNonExistingJob(ftsJob.operationID, ftsJob.ftsGUID)
return ftsJob, res
# { fileID : { Status, Error } }
filesStatus = res['Value']
# Specify the job ftsGUID to make sure we do not overwrite
# status of files already taken by newer jobs
res = self.fts3db.updateFileStatus(filesStatus, ftsGUID=ftsJob.ftsGUID)
if not res['OK']:
log.error("Error updating file fts status", "%s, %s" % (ftsJob.ftsGUID, res))
return ftsJob, res
upDict = {
ftsJob.jobID: {
'status': ftsJob.status,
'error': ftsJob.error,
'completeness': ftsJob.completeness,
'operationID': ftsJob.operationID,
'lastMonitor': True,
}
}
res = self.fts3db.updateJobStatus(upDict)
if ftsJob.status in ftsJob.FINAL_STATES:
self.__sendAccounting(ftsJob)
return ftsJob, res
except Exception as e:
log.exception("Exception while monitoring job", repr(e))
return ftsJob, S_ERROR(0, "Exception %s" % repr(e))
@staticmethod
def _monitorJobCallback(returnedValue):
""" Callback when a job has been monitored
:param returnedValue: value returned by the _monitorJob method
(ftsJob, standard dirac return struct)
"""
ftsJob, res = returnedValue
log = gLogger.getSubLogger("_monitorJobCallback/%s" % ftsJob.jobID, child=True)
if not res['OK']:
log.error("Error updating job status", res)
else:
log.debug("Successfully updated job status")
def monitorJobsLoop(self):
""" * fetch the active FTSJobs from the DB
* spawn a thread to monitor each of them
:return: S_OK()/S_ERROR()
"""
log = gLogger.getSubLogger("monitorJobs", child=True)
log.debug("Size of the context cache %s" % len(self._globalContextCache))
log.debug("Getting active jobs")
# get jobs from DB
res = self.fts3db.getActiveJobs(limit=self.jobBulkSize, jobAssignmentTag=self.assignmentTag)
if not res['OK']:
log.error("Could not retrieve ftsJobs from the DB", res)
return res
activeJobs = res['Value']
log.info("%s jobs to queue for monitoring" % len(activeJobs))
# We store here the AsyncResult object on which we are going to wait
applyAsyncResults = []
# Starting the monitoring threads
for ftsJob in activeJobs:
log.debug("Queuing executing of ftsJob %s" % ftsJob.jobID)
# queue the execution of self._monitorJob( ftsJob ) in the thread pool
# The returned value is passed to _monitorJobCallback
applyAsyncResults.append(self.jobsThreadPool.apply_async(
self._monitorJob, (ftsJob, ), callback=self._monitorJobCallback))
log.debug("All execution queued")
# Waiting for all the monitoring to finish
while not all([r.ready() for r in applyAsyncResults]):
log.debug("Not all the tasks are finished")
time.sleep(0.5)
log.debug("All the tasks have completed")
return S_OK()
@staticmethod
def _treatOperationCallback(returnedValue):
""" Callback when an operation has been treated
:param returnedValue: value returned by the _treatOperation method
(ftsOperation, standard dirac return struct)
"""
operation, res = returnedValue
log = gLogger.getSubLogger("_treatOperationCallback/%s" % operation.operationID, child=True)
if not res['OK']:
log.error("Error treating operation", res)
else:
log.debug("Successfully treated operation")
def _treatOperation(self, operation):
""" Treat one operation:
* does the callback if the operation is finished
* generate new jobs and submits them
:param operation: the operation to treat
:return: operation, S_OK()/S_ERROR()
"""
try:
threadID = current_process().name
log = gLogger.getSubLogger("treatOperation/%s" % operation.operationID, child=True)
# If the operation is totally processed
# we perform the callback
if operation.isTotallyProcessed():
log.debug("FTS3Operation %s is totally processed" % operation.operationID)
res = operation.callback()
if not res['OK']:
log.error("Error performing the callback", res)
log.info("Putting back the operation")
dbRes = self.fts3db.persistOperation(operation)
if not dbRes['OK']:
log.error("Could not persist operation", dbRes)
return operation, res
else:
log.debug("FTS3Operation %s is not totally processed yet" % operation.operationID)
# This flag is set to False if we want to stop the ongoing processing
# of an operation, typically when the matching RMS Request has been
# canceled (see below)
continueOperationProcessing = True
# Check the status of the associated RMS Request.
# If it is canceled or does not exist anymore then we will not create new FTS3Jobs, and mark
# this as FTS3Operation canceled.
if operation.rmsReqID:
res = ReqClient().getRequestStatus(operation.rmsReqID)
if not res['OK']:
# If the Request does not exist anymore
if cmpError(res, errno.ENOENT):
log.info(
"The RMS Request does not exist anymore, canceling the FTS3Operation",
"rmsReqID: %s, FTS3OperationID: %s" %
(operation.rmsReqID,
operation.operationID))
operation.status = 'Canceled'
continueOperationProcessing = False
else:
log.error("Could not get request status", res)
return operation, res
else:
rmsReqStatus = res['Value']
if rmsReqStatus == 'Canceled':
log.info(
"The RMS Request is canceled, canceling the FTS3Operation",
"rmsReqID: %s, FTS3OperationID: %s" %
(operation.rmsReqID,
operation.operationID))
operation.status = 'Canceled'
continueOperationProcessing = False
if continueOperationProcessing:
res = operation.prepareNewJobs(
maxFilesPerJob=self.maxFilesPerJob, maxAttemptsPerFile=self.maxAttemptsPerFile)
if not res['OK']:
log.error("Cannot prepare new Jobs", "FTS3Operation %s : %s" %
(operation.operationID, res))
return operation, res
newJobs = res['Value']
log.debug("FTS3Operation %s: %s new jobs to be submitted" %
(operation.operationID, len(newJobs)))
for ftsJob in newJobs:
res = self._serverPolicy.chooseFTS3Server()
if not res['OK']:
log.error(res)
continue
ftsServer = res['Value']
log.debug("Use %s server" % ftsServer)
ftsJob.ftsServer = ftsServer
res = self.getFTS3Context(
ftsJob.username, ftsJob.userGroup, ftsServer, threadID=threadID)
if not res['OK']:
log.error("Could not get context", res)
continue
context = res['Value']
try:
tpcProtocols = operation.fts3Plugin.selectTPCProtocols(ftsJob=ftsJob)
except ValueError as e:
log.error("Could not select TPC list", repr(e))
continue
res = ftsJob.submit(context=context, protocols=tpcProtocols)
if not res['OK']:
log.error("Could not submit FTS3Job", "FTS3Operation %s : %s" %
(operation.operationID, res))
continue
operation.ftsJobs.append(ftsJob)
submittedFileIds = res['Value']
log.info("FTS3Operation %s: Submitted job for %s transfers" %
(operation.operationID, len(submittedFileIds)))
# new jobs are put in the DB at the same time
res = self.fts3db.persistOperation(operation)
if not res['OK']:
log.error("Could not persist operation", res)
return operation, res
except Exception as e:
log.exception('Exception in the thread', repr(e))
return operation, S_ERROR("Exception %s" % repr(e))
def treatOperationsLoop(self):
""" * Fetch all the FTSOperations which are not finished
* Spawn a thread to treat each operation
:return: S_OK()/S_ERROR()
"""
log = gLogger.getSubLogger("treatOperations", child=True)
log.debug("Size of the context cache %s" % len(self._globalContextCache))
log.info("Getting non finished operations")
res = self.fts3db.getNonFinishedOperations(
limit=self.operationBulkSize, operationAssignmentTag=self.assignmentTag)
if not res['OK']:
log.error("Could not get incomplete operations", res)
return res
incompleteOperations = res['Value']
log.info("Treating %s incomplete operations" % len(incompleteOperations))
applyAsyncResults = []
for operation in incompleteOperations:
log.debug("Queuing executing of operation %s" % operation.operationID)
# queue the execution of self._treatOperation( operation ) in the thread pool
# The returned value is passed to _treatOperationCallback
applyAsyncResults.append(self.opsThreadPool.apply_async(
self._treatOperation, (operation, ), callback=self._treatOperationCallback))
log.debug("All execution queued")
# Waiting for all the treatments to finish
while not all([r.ready() for r in applyAsyncResults]):
log.debug("Not all the tasks are finished")
time.sleep(0.5)
log.debug("All the tasks have completed")
return S_OK()
def kickOperations(self):
""" Kick stuck operations
:return: S_OK()/S_ERROR()
"""
log = gLogger.getSubLogger("kickOperations", child=True)
res = self.fts3db.kickStuckOperations(limit=self.maxKick, kickDelay=self.kickDelay)
if not res['OK']:
return res
kickedOperations = res['Value']
log.info("Kicked %s stuck operations" % kickedOperations)
return S_OK()
def kickJobs(self):
""" Kick stuck jobs
:return: S_OK()/S_ERROR()
"""
log = gLogger.getSubLogger("kickJobs", child=True)
res = self.fts3db.kickStuckJobs(limit=self.maxKick, kickDelay=self.kickDelay)
if not res['OK']:
return res
kickedJobs = res['Value']
log.info("Kicked %s stuck jobs" % kickedJobs)
return S_OK()
def deleteOperations(self):
""" Delete final operations
:return: S_OK()/S_ERROR()
"""
log = gLogger.getSubLogger("deleteOperations", child=True)
res = self.fts3db.deleteFinalOperations(limit=self.maxDelete, deleteDelay=self.deleteDelay)
if not res['OK']:
return res
deletedOperations = res['Value']
log.info("Deleted %s final operations" % deletedOperations)
return S_OK()
def finalize(self):
""" Finalize processing
:return: S_OK()/S_ERROR()
"""
# Joining all the ThreadPools
log = gLogger.getSubLogger("Finalize")
log.debug("Closing jobsThreadPool")
self.jobsThreadPool.close()
self.jobsThreadPool.join()
log.debug("jobsThreadPool joined")
log.debug("Closing opsThreadPool")
self.opsThreadPool.close()
self.opsThreadPool.join()
log.debug("opsThreadPool joined")
return S_OK()
def execute(self):
""" One cycle execution
:return: S_OK()/S_ERROR()
"""
log = gLogger.getSubLogger("execute", child=True)
log.info("Monitoring job")
res = self.monitorJobsLoop()
if not res['OK']:
log.error("Error monitoring jobs", res)
return res
log.info("Treating operations")
res = self.treatOperationsLoop()
if not res['OK']:
log.error("Error treating operations", res)
return res
log.info("Kicking stuck jobs")
res = self.kickJobs()
if not res['OK']:
log.error("Error kicking jobs", res)
return res
log.info("Kicking stuck operations")
res = self.kickOperations()
if not res['OK']:
log.error("Error kicking operations", res)
return res
log.info("Deleting final operations")
res = self.deleteOperations()
if not res['OK']:
log.error("Error deleting operations", res)
return res
return S_OK()
@staticmethod
def __sendAccounting(ftsJob):
""" prepare and send DataOperation to AccountingDB
:param ftsJob: the FTS3Job from which we send the accounting info
"""
dataOp = DataOperation()
dataOp.setStartTime(fromString(ftsJob.submitTime))
dataOp.setEndTime(fromString(ftsJob.lastUpdate))
dataOp.setValuesFromDict(ftsJob.accountingDict)
dataOp.delayedCommit()
|
yujikato/DIRAC
|
src/DIRAC/DataManagementSystem/Agent/FTS3Agent.py
|
Python
|
gpl-3.0
| 20,683
|
[
"DIRAC"
] |
71fa26820bd2db8ffe5b25e6d3df90c58dd7ac29bf9d4354921be905923ffd1a
|
# Rebinding to a cluster
# Run by:
# $ python run.py [N] [runs] [outFilename] [Logmode, default=False]
#
# Arguments:
# - N: Number of particles in cluster
# - runs: Number of simulation runs
# - outFilename: Name of output file
# - Logmode: false by default, if True, only 1 VTK-logged run is
# performed.
#
# E.g.:
# $ python run.py 7 1000 data.out
# Or:
# $ python run.py 7 1 data.out True
# Modules
# ===============================
import sys
#Also, set relative egfrd directory path
sys.path.append('../../')
import os
import shutil
import datetime
import math
from egfrd import *
import model
import gfrdbase
import _gfrd
from visualization import vtklogger
if __name__ == "__main__":
# Constants
# ===============================
# Number of particles in the cluster
N = int(sys.argv[1])
# Number of runs
runs = int(sys.argv[2])
# Output file
outFilename = sys.argv[3]
# LOGGING mode
try:
LOGGING = bool(sys.argv[4])
if (LOGGING==True):
print "* Performing only 1 logging run."
runs = 1
except: LOGGING = False
# Particle constants
sigma = 1e-5 # Diameter particle; big:1e-5
D = 1e-8 # Diffusion constant; big:1e-8
world_size = 1e-3 # Lengths of simulation box; normal: 1e-3
k1 = 1e-10
k2 = 1e2
# Spacing inbetween cluster particles AND cluster/B-particle
spacing = sigma/1e5
# Create "unique" seed
# ===============================
currenttime = (long(datetime.datetime.now().year*3600*24*365+
datetime.datetime.now().month*30.5*24*3600+
datetime.datetime.now().day*24*3600+datetime.datetime.now().hour*3600+
datetime.datetime.now().minute*60+datetime.datetime.now().second))
myrandom.seed(currenttime)
print str('Seed: '+str(currenttime))
# Functions
# ===============================
def cartesian(g1, g2, ng1, ng2):
""" Converts lattice coordinates (ng1, ng2) to cartesian coordinates
Arguments:
- Lattice vectors g1=[x1,y1] and g2=[x2,y2]
- Lattice coordinates (ng1, ng2)
"""
cartesian_x = (ng1 * (g1[0]) + ng2 * (g2[0]))
cartesian_y = (ng2 * (g2[1]) + ng1 * (g1[1]))
return cartesian_x, cartesian_y
def distance(x1, y1, x2, y2):
""" Calculates the distance between points (x1, y1) and (x2, y2) """
return math.sqrt(math.pow((x1-x2),2)+math.pow((y1-y2),2))
def generate_possible_positions(N):
""" Generates a list with possible coordinates and distances to center
This function does the following:
- It loops over square area of an hexagonal lattice (of size
calculated to facilitate N particles)
- Lattice coordinates and distance to the center of the area
are stored if they are within circular boundaries. (The
latter check is only done to be absolutely sure a circular
shape is formed.)
This information can be used in a later function. If the list of
generated coordinates is sorted by distance to the center, N
particles can be placed within a circular geometry by looping over
the coordinates.
Arguments:
- N: number of particles to place.
"""
global w, A, spacing, sigma #, g1, g2, cartesian_x, cartesian_y, ng1, ng2
# Lattice vectors
d1 = [1,0]
d2 = [math.cos((math.pi)/3.0),math.sin((math.pi)/3.0)]
# Scaled lattice vectors
lengthLatticeVector = (sigma+spacing)
g1 = [lengthLatticeVector*d1[0], lengthLatticeVector*d1[1]]
g2 = [lengthLatticeVector*d2[0], lengthLatticeVector*d2[1]]
# Factors to calculate square area of lattice needed
"""
factorNonSquareGrid = math.sqrt(1/(d1[0]*d2[1]-d1[1]*d2[0]))
factorSquareToCircle = math.pi/2 #math.sqrt(math.pow((math.pi*0.5),2))
# Calculate diameter of sphere
L = (spacing+sigma)*(math.ceil(math.sqrt(N))+2)*factorNonSquareGrid*factorSquareToCircle
# "(math.ceil(math.sqrt(N))+2)"; ceil and +2: some margin
diamondSide = (L/2)*math.tan(pi/6)+(L/2)/math.tan(pi/6)
diamondDiagonal = sin(pi/3)*L #2*sin(pi/3)*L/2
"""
# d1x*d2y-d1y*d2x
diamondToSquare = math.sqrt((4*d2[0]*d2[1])/(0.25*math.pi))
vectorsOnDiameter = math.ceil(math.sqrt(N)+2)
numberOfVectors = diamondToSquare * vectorsOnDiameter
diameterCircle = math.ceil(math.sqrt(N)+2)*lengthLatticeVector
radiusCircle = diameterCircle/2
# Cartesian coordinates
cartesian_x = 0
cartesian_y = 0
cartesian_z = 0
# The list with possible coordinates and distances to center
clusterParticleCoordinates = []
for ng1 in range(int(-math.ceil(numberOfVectors/2)),
int(math.ceil(numberOfVectors/2))):
for ng2 in range(int(-math.ceil(numberOfVectors/2)),
int(math.ceil(numberOfVectors/2))):
cartesian_x, cartesian_y = cartesian(g1, g2, ng1, ng2)
distanceToCenter = distance(cartesian_x, cartesian_y, 0, 0)
if (distanceToCenter < radiusCircle):
clusterParticleCoordinates.append([
distanceToCenter,
cartesian_x,
cartesian_y,
cartesian_z
])
return clusterParticleCoordinates
def make_cluster(N, coord_x, coord_y, coord_z):
""" Places N particles on hexagonal lattice in spherical symmetry
Arguments:
- N: number of particles to place
- coord_x, coord_y, coord_z: Coordinates where to place cluster
"""
# Generate list of positions and there distance to the center
clusterParticleCoordinates = generate_possible_positions(N)
# Sort this list
clusterParticleCoordinates.sort(key=lambda x: x[0])
# Place center particle
place_particle(w, C,
[clusterParticleCoordinates[0][1]+coord_x,
clusterParticleCoordinates[0][2]+coord_y,
clusterParticleCoordinates[0][3]+coord_z])
# Place N particles starting with the particle closest to the
# center, then placing the second closest particle, etc..
for i in range(1,N):
try:
place_particle(w, A,
[clusterParticleCoordinates[i][1]+coord_x,
clusterParticleCoordinates[i][2]+coord_y,
clusterParticleCoordinates[i][3]+coord_z])
except:
print "ERROR: couldn't place particle."
def single_run(N, LOGGING):
""" Single run of simulation """
global w, A, C, k1, k2
# Basic set up simulator
# ===============================
# Model
m = model.ParticleModel(world_size)
# Species
A = model.Species('A', 0, sigma/2)
m.add_species_type(A)
B = model.Species('B', D, sigma/2)
m.add_species_type(B)
C = model.Species('C', 0, sigma/2)
m.add_species_type(C)
# Reaction rules
r1 = model.create_binding_reaction_rule(A, B, C, k1)
m.network_rules.add_reaction_rule(r1)
r2 = model.create_unbinding_reaction_rule(C, A, B, k2)
m.network_rules.add_reaction_rule(r2)
# World
w = gfrdbase.create_world(m, 3)
# Simulator
s = EGFRDSimulator(w, myrandom.rng)
# Put in cluster
make_cluster(N, world_size/2, world_size/2, world_size/2)
# Put in reactants
# place_particle(w, B, [world_size/2, world_size/2, world_size/2+sigma+spacing])
# Enable VTK Logger
# ===============================
if (LOGGING == True):
vtk_output_directory = 'VTK_out'
if (os.path.exists(vtk_output_directory)):
print '** Warning: VTK directory already exists.'
l = vtklogger.VTKLogger(s, vtk_output_directory, extra_particle_step=True)
# Running
# ===============================
numberDetected = 0
if (LOGGING == True):
while 1:
l.log() # log
s.step() # and make eGFRD step
if s.last_reaction:
numberDetected = numberDetected+1
if (numberDetected == 2):
# print "2nd Reaction detected at: " + str(s.t) + "(" + str(s.last_reaction) + ")"
reaction_time = s.t - previous_time
break
else: previous_time = s.t
l.stop()
else:
while 1:
s.step() # make eGFRD step
if s.last_reaction:
numberDetected = numberDetected+1
if (numberDetected == 2):
# print "2nd Reaction detected at: " + str(s.t) + "(" + str(s.last_reaction) + ")"
reaction_time = s.t - previous_time
break
else: previous_time = s.t
s.stop(s.t)
#TODO
#del w
#del s # If this one is thrown in, the runs suddenly stop?!
#del m
return (reaction_time)
# Main part
# ===============================
if __name__ == "__main__":
# Output file
outFile = open(outFilename, 'w')
outFile.close()
outFile = open(outFilename, 'a')
for M in range(runs):
outFile.write(str(single_run(N, LOGGING)) + '\n')
outFile.flush
outFile.close()
timetaken = (long(datetime.datetime.now().year*3600*24*365+
datetime.datetime.now().month*30.5*24*3600+
datetime.datetime.now().day*24*3600+datetime.datetime.now().hour*3600+
datetime.datetime.now().minute*60+datetime.datetime.now().second)-currenttime)
print "Done in "+ str(timetaken) +" seconds."
# #### OLD CODE
#def in_circle(cartesian_x, cartesian_y, r_x, r_y, R):
# if (math.sqrt(math.pow((cartesian_x-r_x),2)+math.pow((cartesian_y-r_y),2)) < R):
# return True
# else:
# return False
# while end time hasn't come and #timesteps still below limit
# while ((s.get_next_time() < endTime) and (n < EARLY_STOP)): # TODO
# Terminate the simulation at appropriate time
#"""
#if (n >= EARLY_STOP):
# print "Early stop (timestep limit)."
# s.stop(s.t)
# reaction_time = INF
#else:
# print "Early stop (time limit)."
# s.stop(s.get_next_time())
# reaction_time = INF
#"""
# Make sure logger writes away information.
# l.stop() #TODO REMOVED LOGGING
|
gfrd/egfrd
|
samples/memorytools/cluster.py
|
Python
|
gpl-2.0
| 10,318
|
[
"VTK"
] |
1e5590b7745cf6758174b45f4093df4c6ea49e2ed2d5317b6521b7d139bef950
|
#! /usr/bin/env python
"""Calculate viral taxonomy abundance from BLAST and Bowtie inputs.
Use '-h' for parameter help.
"""
import os
import re
import argparse
import blast
parser = argparse.ArgumentParser(description =
('Calculate viral taxonomy abundance for each sample from PHAST'
'viral database BLAST results and bowtie mapping information.'))
parser.add_argument('--blast',
help=('BLAST output file from search against the viral db.'
'Output format should be "-m 8".'),
type=str,
required=True)
parser.add_argument('--bowtie_dir',
help=('Directory containing output from bowtie in which reads '
'were mapped to ORFs. Filenames should end in ".psl".'),
type=str,
required=True)
parser.add_argument('--viral_db',
help=('Phast viral database used for assigning taxonomy '
'downloaded via'
'wget http://www.phantome.org/Downloads/Viruses/2016_05_01.tgz'),
type=str,
required=True)
args = parser.parse_args()
orf_gene_dict = blast.get_orf_gene_dict(args.blast)
blast.setup_viral_taxa_sql_db()
viral_orf_fam_dict, viral_orf_genus_dict = blast.get_viral_orf_taxa_dicts(
orf_gene_dict, args.viral_db)
bowtie_files = [os.path.join(args.bowtie_dir, f)
for f in os.listdir(args.bowtie_dir) if f.endswith('.psl')]
count_mapped_reads = 0
count_fam_reads = 0
count_genus_reads = 0
count_total_reads = 0
for bowtie_file in bowtie_files:
match = re.search(r'([\w.]+/)([\w.]+)([.]psl)', bowtie_file)
sample = match.group(2)
read_count, mapped_reads = blast.count_sample_reads_bowtie(bowtie_file)
count_mapped_reads = count_mapped_reads + mapped_reads
count_total_reads = count_total_reads + read_count
orf_abundance_dict = blast.get_orf_abundance_dict(bowtie_file)
sample_viral_fam_dict, sample_viral_genus_dict = (
blast.get_sample_viral_taxa_abundances(
orf_abundance_dict,viral_orf_fam_dict,viral_orf_genus_dict)
)
fam_count = 0
viral_raw_fam_ab_file = sample + '.fam_raw_abundance.txt'
viral_raw_fam_ab_output = open(viral_raw_fam_ab_file, 'w')
viral_raw_fam_ab_output.write('ID\t%s\n' % sample)
for fam, ab in sample_viral_fam_dict.items():
fam_count += 1
viral_raw_fam_ab_output.write('%s\t%d\n' % (fam, ab))
viral_raw_fam_ab_output.close()
#viral_norm_fam_ab_file = sample + '.fam_norm_abundance.txt'
#viral_norm_fam_ab_output = open(viral_norm_fam_ab_file, 'w')
#viral_norm_fam_ab_output.write('ID\t%s\n' % sample)
#for fam, ab in sample_viral_fam_dict.items():
# norm_ab = float(ab) / float(count_total_reads)
# viral_norm_fam_ab_output.write('%s\t%e\n' % (fam, norm_ab))
#viral_norm_fam_ab_output.close()
genus_count = 0
viral_raw_genus_ab_file = sample + '.genus_raw_abundance.txt'
viral_raw_genus_ab_output = open(viral_raw_genus_ab_file, 'w')
viral_raw_genus_ab_output.write('ID\t%s\n' % sample)
for genus, ab in sample_viral_genus_dict.items():
genus_count += 1
viral_raw_genus_ab_output.write('%s\t%d\n' % (genus, ab))
viral_raw_genus_ab_output.close()
count_fam_reads = (count_fam_reads +
(sum(sample_viral_fam_dict.values())))
count_genus_reads = (count_genus_reads +
(sum(sample_viral_genus_dict.values())))
output = ('Sample: %s\nReads - %d\nFamilies: %s\nGenera: %s\n\n'
% (sample, read_count, fam_count, genus_count))
print(output)
output = ('Number of total reads: %d\nNumber of mapped reads: %d\n'
'Number of mapped reads with family annotation: %d\n'
'Number of mapped reads with genus annotation: %d') % (
count_total_reads, count_mapped_reads, count_fam_reads,
count_genus_reads)
print(output)
|
chrisLanderson/rumen_virome
|
scripts/viral_taxa_blast2tsv.py
|
Python
|
mit
| 3,865
|
[
"BLAST",
"Bowtie"
] |
48ad6214500daaa4c9527c3fca2bf10b925a68d821e8ff3834d5b1aa5d889467
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module implements representations of slabs and surfaces, as well as
algorithms for generating them. If you use this module, please consider
citing the following work::
R. Tran, Z. Xu, B. Radhakrishnan, D. Winston, W. Sun, K. A. Persson,
S. P. Ong, "Surface Energies of Elemental Crystals", Scientific Data,
2016, 3:160080, doi: 10.1038/sdata.2016.80.
as well as::
Sun, W.; Ceder, G. Efficient creation and convergence of surface slabs,
Surface Science, 2013, 617, 53–59, doi:10.1016/j.susc.2013.05.016.
"""
import copy
import itertools
import json
import logging
import math
import os
import warnings
from functools import reduce
from math import gcd
import numpy as np
from monty.fractions import lcm
from scipy.cluster.hierarchy import fcluster, linkage
from scipy.spatial.distance import squareform
from pymatgen.analysis.structure_matcher import StructureMatcher
from pymatgen.core.lattice import Lattice
from pymatgen.core.periodic_table import get_el_sp
from pymatgen.core.sites import PeriodicSite
from pymatgen.core.structure import Structure
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.util.coord import in_coord_list
__author__ = "Richard Tran, Wenhao Sun, Zihan Xu, Shyue Ping Ong"
logger = logging.getLogger(__name__)
class Slab(Structure):
"""
Subclass of Structure representing a Slab. Implements additional
attributes pertaining to slabs, but the init method does not
actually implement any algorithm that creates a slab. This is a
DUMMY class who's init method only holds information about the
slab. Also has additional methods that returns other information
about a slab such as the surface area, normal, and atom adsorption.
Note that all Slabs have the surface normal oriented perpendicular to the a
and b lattice vectors. This means the lattice vectors a and b are in the
surface plane and the c vector is out of the surface plane (though not
necessarily perpendicular to the surface).
.. attribute:: miller_index
Miller index of plane parallel to surface.
.. attribute:: scale_factor
Final computed scale factor that brings the parent cell to the
surface cell.
.. attribute:: shift
The shift value in Angstrom that indicates how much this
slab has been shifted.
"""
def __init__(
self,
lattice,
species,
coords,
miller_index,
oriented_unit_cell,
shift,
scale_factor,
reorient_lattice=True,
validate_proximity=False,
to_unit_cell=False,
reconstruction=None,
coords_are_cartesian=False,
site_properties=None,
energy=None,
):
"""
Makes a Slab structure, a structure object with additional information
and methods pertaining to slabs.
Args:
lattice (Lattice/3x3 array): The lattice, either as a
:class:`pymatgen.core.lattice.Lattice` or
simply as any 2D array. Each row should correspond to a lattice
vector. E.g., [[10,0,0], [20,10,0], [0,0,30]] specifies a
lattice with lattice vectors [10,0,0], [20,10,0] and [0,0,30].
species ([Species]): Sequence of species on each site. Can take in
flexible input, including:
i. A sequence of element / species specified either as string
symbols, e.g. ["Li", "Fe2+", "P", ...] or atomic numbers,
e.g., (3, 56, ...) or actual Element or Species objects.
ii. List of dict of elements/species and occupancies, e.g.,
[{"Fe" : 0.5, "Mn":0.5}, ...]. This allows the setup of
disordered structures.
coords (Nx3 array): list of fractional/cartesian coordinates of
each species.
miller_index ([h, k, l]): Miller index of plane parallel to
surface. Note that this is referenced to the input structure. If
you need this to be based on the conventional cell,
you should supply the conventional structure.
oriented_unit_cell (Structure): The oriented_unit_cell from which
this Slab is created (by scaling in the c-direction).
shift (float): The shift in the c-direction applied to get the
termination.
scale_factor (np.ndarray): scale_factor Final computed scale factor
that brings the parent cell to the surface cell.
reorient_lattice (bool): reorients the lattice parameters such that
the c direction is along the z axis.
validate_proximity (bool): Whether to check if there are sites
that are less than 0.01 Ang apart. Defaults to False.
reconstruction (str): Type of reconstruction. Defaults to None if
the slab is not reconstructed.
coords_are_cartesian (bool): Set to True if you are providing
coordinates in cartesian coordinates. Defaults to False.
site_properties (dict): Properties associated with the sites as a
dict of sequences, e.g., {"magmom":[5,5,5,5]}. The sequences
have to be the same length as the atomic species and
fractional_coords. Defaults to None for no properties.
energy (float): A value for the energy.
"""
self.oriented_unit_cell = oriented_unit_cell
self.miller_index = tuple(miller_index)
self.shift = shift
self.reconstruction = reconstruction
self.scale_factor = np.array(scale_factor)
self.energy = energy
self.reorient_lattice = reorient_lattice
if self.reorient_lattice:
if coords_are_cartesian:
coords = lattice.get_fractional_coords(coords)
coords_are_cartesian = False
lattice = Lattice.from_parameters(
lattice.a,
lattice.b,
lattice.c,
lattice.alpha,
lattice.beta,
lattice.gamma,
)
super().__init__(
lattice,
species,
coords,
validate_proximity=validate_proximity,
to_unit_cell=to_unit_cell,
coords_are_cartesian=coords_are_cartesian,
site_properties=site_properties,
)
def get_orthogonal_c_slab(self):
"""
This method returns a Slab where the normal (c lattice vector) is
"forced" to be exactly orthogonal to the surface a and b lattice
vectors. **Note that this breaks inherent symmetries in the slab.**
It should be pointed out that orthogonality is not required to get good
surface energies, but it can be useful in cases where the slabs are
subsequently used for postprocessing of some kind, e.g. generating
GBs or interfaces.
"""
a, b, c = self.lattice.matrix
new_c = np.cross(a, b)
new_c /= np.linalg.norm(new_c)
new_c = np.dot(c, new_c) * new_c
new_latt = Lattice([a, b, new_c])
return Slab(
lattice=new_latt,
species=self.species_and_occu,
coords=self.cart_coords,
miller_index=self.miller_index,
oriented_unit_cell=self.oriented_unit_cell,
shift=self.shift,
scale_factor=self.scale_factor,
coords_are_cartesian=True,
energy=self.energy,
reorient_lattice=self.reorient_lattice,
site_properties=self.site_properties,
)
def get_tasker2_slabs(self, tol=0.01, same_species_only=True):
"""
Get a list of slabs that have been Tasker 2 corrected.
Args:
tol (float): Tolerance to determine if atoms are within same plane.
This is a fractional tolerance, not an absolute one.
same_species_only (bool): If True, only that are of the exact same
species as the atom at the outermost surface are considered for
moving. Otherwise, all atoms regardless of species that is
within tol are considered for moving. Default is True (usually
the desired behavior).
Returns:
([Slab]) List of tasker 2 corrected slabs.
"""
sites = list(self.sites)
slabs = []
sortedcsites = sorted(sites, key=lambda site: site.c)
# Determine what fraction the slab is of the total cell size in the
# c direction. Round to nearest rational number.
nlayers_total = int(round(self.lattice.c / self.oriented_unit_cell.lattice.c))
nlayers_slab = int(round((sortedcsites[-1].c - sortedcsites[0].c) * nlayers_total))
slab_ratio = nlayers_slab / nlayers_total
a = SpacegroupAnalyzer(self)
symm_structure = a.get_symmetrized_structure()
def equi_index(site):
for i, equi_sites in enumerate(symm_structure.equivalent_sites):
if site in equi_sites:
return i
raise ValueError("Cannot determine equi index!")
for surface_site, shift in [
(sortedcsites[0], slab_ratio),
(sortedcsites[-1], -slab_ratio),
]:
tomove = []
fixed = []
for site in sites:
if abs(site.c - surface_site.c) < tol and (
(not same_species_only) or site.species == surface_site.species
):
tomove.append(site)
else:
fixed.append(site)
# Sort and group the sites by the species and symmetry equivalence
tomove = sorted(tomove, key=lambda s: equi_index(s))
grouped = [list(sites) for k, sites in itertools.groupby(tomove, key=lambda s: equi_index(s))]
if len(tomove) == 0 or any(len(g) % 2 != 0 for g in grouped):
warnings.warn(
"Odd number of sites to divide! Try changing "
"the tolerance to ensure even division of "
"sites or create supercells in a or b directions "
"to allow for atoms to be moved!"
)
continue
combinations = []
for g in grouped:
combinations.append(list(itertools.combinations(g, int(len(g) / 2))))
for selection in itertools.product(*combinations):
species = [site.species for site in fixed]
fcoords = [site.frac_coords for site in fixed]
for s in tomove:
species.append(s.species)
for group in selection:
if s in group:
fcoords.append(s.frac_coords)
break
else:
# Move unselected atom to the opposite surface.
fcoords.append(s.frac_coords + [0, 0, shift])
# sort by species to put all similar species together.
sp_fcoord = sorted(zip(species, fcoords), key=lambda x: x[0])
species = [x[0] for x in sp_fcoord]
fcoords = [x[1] for x in sp_fcoord]
slab = Slab(
self.lattice,
species,
fcoords,
self.miller_index,
self.oriented_unit_cell,
self.shift,
self.scale_factor,
energy=self.energy,
reorient_lattice=self.reorient_lattice,
)
slabs.append(slab)
s = StructureMatcher()
unique = [ss[0] for ss in s.group_structures(slabs)]
return unique
def is_symmetric(self, symprec=0.1):
"""
Checks if slab is symmetric, i.e., contains inversion symmetry.
Args:
symprec (float): Symmetry precision used for SpaceGroup analyzer.
Returns:
(bool) Whether slab contains inversion symmetry.
"""
sg = SpacegroupAnalyzer(self, symprec=symprec)
return sg.is_laue()
def get_sorted_structure(self, key=None, reverse=False):
"""
Get a sorted copy of the structure. The parameters have the same
meaning as in list.sort. By default, sites are sorted by the
electronegativity of the species. Note that Slab has to override this
because of the different __init__ args.
Args:
key: Specifies a function of one argument that is used to extract
a comparison key from each list element: key=str.lower. The
default value is None (compare the elements directly).
reverse (bool): If set to True, then the list elements are sorted
as if each comparison were reversed.
"""
sites = sorted(self, key=key, reverse=reverse)
s = Structure.from_sites(sites)
return Slab(
s.lattice,
s.species_and_occu,
s.frac_coords,
self.miller_index,
self.oriented_unit_cell,
self.shift,
self.scale_factor,
site_properties=s.site_properties,
reorient_lattice=self.reorient_lattice,
)
def copy(self, site_properties=None, sanitize=False):
"""
Convenience method to get a copy of the structure, with options to add
site properties.
Args:
site_properties (dict): Properties to add or override. The
properties are specified in the same way as the constructor,
i.e., as a dict of the form {property: [values]}. The
properties should be in the order of the *original* structure
if you are performing sanitization.
sanitize (bool): If True, this method will return a sanitized
structure. Sanitization performs a few things: (i) The sites are
sorted by electronegativity, (ii) a LLL lattice reduction is
carried out to obtain a relatively orthogonalized cell,
(iii) all fractional coords for sites are mapped into the
unit cell.
Returns:
A copy of the Structure, with optionally new site_properties and
optionally sanitized.
"""
props = self.site_properties
if site_properties:
props.update(site_properties)
return Slab(
self.lattice,
self.species_and_occu,
self.frac_coords,
self.miller_index,
self.oriented_unit_cell,
self.shift,
self.scale_factor,
site_properties=props,
reorient_lattice=self.reorient_lattice,
)
@property
def dipole(self):
"""
Calculates the dipole of the Slab in the direction of the surface
normal. Note that the Slab must be oxidation state-decorated for this
to work properly. Otherwise, the Slab will always have a dipole of 0.
"""
dipole = np.zeros(3)
mid_pt = np.sum(self.cart_coords, axis=0) / len(self)
normal = self.normal
for site in self:
charge = sum([getattr(sp, "oxi_state", 0) * amt for sp, amt in site.species.items()])
dipole += charge * np.dot(site.coords - mid_pt, normal) * normal
return dipole
def is_polar(self, tol_dipole_per_unit_area=1e-3):
"""
Checks whether the surface is polar by computing the dipole per unit
area. Note that the Slab must be oxidation state-decorated for this
to work properly. Otherwise, the Slab will always be non-polar.
Args:
tol_dipole_per_unit_area (float): A tolerance. If the dipole
magnitude per unit area is less than this value, the Slab is
considered non-polar. Defaults to 1e-3, which is usually
pretty good. Normalized dipole per unit area is used as it is
more reliable than using the total, which tends to be larger for
slabs with larger surface areas.
"""
dip_per_unit_area = self.dipole / self.surface_area
return np.linalg.norm(dip_per_unit_area) > tol_dipole_per_unit_area
@property
def normal(self):
"""
Calculates the surface normal vector of the slab
"""
normal = np.cross(self.lattice.matrix[0], self.lattice.matrix[1])
normal /= np.linalg.norm(normal)
return normal
@property
def surface_area(self):
"""
Calculates the surface area of the slab
"""
m = self.lattice.matrix
return np.linalg.norm(np.cross(m[0], m[1]))
@property
def center_of_mass(self):
"""
Calculates the center of mass of the slab
"""
weights = [s.species.weight for s in self]
center_of_mass = np.average(self.frac_coords, weights=weights, axis=0)
return center_of_mass
def add_adsorbate_atom(self, indices, specie, distance):
"""
Gets the structure of single atom adsorption.
slab structure from the Slab class(in [0, 0, 1])
Args:
indices ([int]): Indices of sites on which to put the absorbate.
Absorbed atom will be displaced relative to the center of
these sites.
specie (Species/Element/str): adsorbed atom species
distance (float): between centers of the adsorbed atom and the
given site in Angstroms.
"""
# Let's do the work in cartesian coords
center = np.sum([self[i].coords for i in indices], axis=0) / len(indices)
coords = center + self.normal * distance / np.linalg.norm(self.normal)
self.append(specie, coords, coords_are_cartesian=True)
def __str__(self):
comp = self.composition
outs = [
"Slab Summary (%s)" % comp.formula,
"Reduced Formula: %s" % comp.reduced_formula,
"Miller index: %s" % (self.miller_index,),
"Shift: %.4f, Scale Factor: %s" % (self.shift, self.scale_factor.__str__()),
]
def to_s(x):
return "%0.6f" % x
outs.append("abc : " + " ".join([to_s(i).rjust(10) for i in self.lattice.abc]))
outs.append("angles: " + " ".join([to_s(i).rjust(10) for i in self.lattice.angles]))
outs.append("Sites ({i})".format(i=len(self)))
for i, site in enumerate(self):
outs.append(
" ".join(
[
str(i + 1),
site.species_string,
" ".join([to_s(j).rjust(12) for j in site.frac_coords]),
]
)
)
return "\n".join(outs)
def as_dict(self):
"""
:return: MSONAble dict
"""
d = super().as_dict()
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
d["oriented_unit_cell"] = self.oriented_unit_cell.as_dict()
d["miller_index"] = self.miller_index
d["shift"] = self.shift
d["scale_factor"] = self.scale_factor.tolist()
d["reconstruction"] = self.reconstruction
d["energy"] = self.energy
return d
@classmethod
def from_dict(cls, d):
"""
:param d: dict
:return: Creates slab from dict.
"""
lattice = Lattice.from_dict(d["lattice"])
sites = [PeriodicSite.from_dict(sd, lattice) for sd in d["sites"]]
s = Structure.from_sites(sites)
return Slab(
lattice=lattice,
species=s.species_and_occu,
coords=s.frac_coords,
miller_index=d["miller_index"],
oriented_unit_cell=Structure.from_dict(d["oriented_unit_cell"]),
shift=d["shift"],
scale_factor=d["scale_factor"],
site_properties=s.site_properties,
energy=d["energy"],
)
def get_surface_sites(self, tag=False):
"""
Returns the surface sites and their indices in a dictionary. The
oriented unit cell of the slab will determine the coordination number
of a typical site. We use VoronoiNN to determine the
coordination number of bulk sites and slab sites. Due to the
pathological error resulting from some surface sites in the
VoronoiNN, we assume any site that has this error is a surface
site as well. This will work for elemental systems only for now. Useful
for analysis involving broken bonds and for finding adsorption sites.
Args:
tag (bool): Option to adds site attribute "is_surfsite" (bool)
to all sites of slab. Defaults to False
Returns:
A dictionary grouping sites on top and bottom of the slab
together.
{"top": [sites with indices], "bottom": [sites with indices}
TODO:
Is there a way to determine site equivalence between sites in a slab
and bulk system? This would allow us get the coordination number of
a specific site for multi-elemental systems or systems with more
than one unequivalent site. This will allow us to use this for
compound systems.
"""
from pymatgen.analysis.local_env import VoronoiNN
# Get a dictionary of coordination numbers
# for each distinct site in the structure
a = SpacegroupAnalyzer(self.oriented_unit_cell)
ucell = a.get_symmetrized_structure()
cn_dict = {}
v = VoronoiNN()
unique_indices = [equ[0] for equ in ucell.equivalent_indices]
for i in unique_indices:
el = ucell[i].species_string
if el not in cn_dict.keys():
cn_dict[el] = []
# Since this will get the cn as a result of the weighted polyhedra, the
# slightest difference in cn will indicate a different environment for a
# species, eg. bond distance of each neighbor or neighbor species. The
# decimal place to get some cn to be equal.
cn = v.get_cn(ucell, i, use_weights=True)
cn = float("%.5f" % (round(cn, 5)))
if cn not in cn_dict[el]:
cn_dict[el].append(cn)
v = VoronoiNN()
surf_sites_dict, properties = {"top": [], "bottom": []}, []
for i, site in enumerate(self):
# Determine if site is closer to the top or bottom of the slab
top = site.frac_coords[2] > self.center_of_mass[2]
try:
# A site is a surface site, if its environment does
# not fit the environment of other sites
cn = float("%.5f" % (round(v.get_cn(self, i, use_weights=True), 5)))
if cn < min(cn_dict[site.species_string]):
properties.append(True)
key = "top" if top else "bottom"
surf_sites_dict[key].append([site, i])
else:
properties.append(False)
except RuntimeError:
# or if pathological error is returned, indicating a surface site
properties.append(True)
key = "top" if top else "bottom"
surf_sites_dict[key].append([site, i])
if tag:
self.add_site_property("is_surf_site", properties)
return surf_sites_dict
def have_equivalent_surfaces(self):
"""
Check if we have same number of equivalent sites on both surfaces.
This is an alternative to checking Laue symmetry (is_symmetric())
if we want to ensure both surfaces in the slab are the same
"""
# tag the sites as either surface sites or not
self.get_surface_sites(tag=True)
a = SpacegroupAnalyzer(self)
symm_structure = a.get_symmetrized_structure()
# ensure each site on one surface has a
# corresponding equivalent site on the other
equal_surf_sites = []
for equ in symm_structure.equivalent_sites:
# Top and bottom are arbitrary, we will just determine
# if one site is on one side of the slab or the other
top, bottom = 0, 0
for s in equ:
if s.is_surf_site:
if s.frac_coords[2] > self.center_of_mass[2]:
top += 1
else:
bottom += 1
# Check to see if the number of equivalent sites
# on one side of the slab are equal to the other
equal_surf_sites.append(top == bottom)
return all(equal_surf_sites)
def get_symmetric_site(self, point, cartesian=False):
"""
This method uses symmetry operations to find equivalent sites on
both sides of the slab. Works mainly for slabs with Laue
symmetry. This is useful for retaining the non-polar and
symmetric properties of a slab when creating adsorbed
structures or symmetric reconstructions.
Arg:
point: Fractional coordinate.
Returns:
point: Fractional coordinate. A point equivalent to the
parameter point, but on the other side of the slab
"""
sg = SpacegroupAnalyzer(self)
ops = sg.get_symmetry_operations(cartesian=cartesian)
# Each operation on a point will return an equivalent point.
# We want to find the point on the other side of the slab.
for op in ops:
slab = self.copy()
site2 = op.operate(point)
if "%.6f" % (site2[2]) == "%.6f" % (point[2]):
continue
# Add dummy site to check the overall structure is symmetric
slab.append("O", point, coords_are_cartesian=cartesian)
slab.append("O", site2, coords_are_cartesian=cartesian)
sg = SpacegroupAnalyzer(slab)
if sg.is_laue():
break
# If not symmetric, remove the two added
# sites and try another symmetry operator
slab.remove_sites([len(slab) - 1])
slab.remove_sites([len(slab) - 1])
return site2
def symmetrically_add_atom(self, specie, point, coords_are_cartesian=False):
"""
Class method for adding a site at a specified point in a slab.
Will add the corresponding site on the other side of the
slab to maintain equivalent surfaces.
Arg:
specie (str): The specie to add
point (coords): The coordinate of the site in the slab to add.
coords_are_cartesian (bool): Is the point in cartesian coordinates
Returns:
(Slab): The modified slab
"""
# For now just use the species of the
# surface atom as the element to add
# Get the index of the corresponding site at the bottom
point2 = self.get_symmetric_site(point, cartesian=coords_are_cartesian)
self.append(specie, point, coords_are_cartesian=coords_are_cartesian)
self.append(specie, point2, coords_are_cartesian=coords_are_cartesian)
def symmetrically_remove_atoms(self, indices):
"""
Class method for removing sites corresponding to a list of indices.
Will remove the corresponding site on the other side of the
slab to maintain equivalent surfaces.
Arg:
indices ([indices]): The indices of the sites
in the slab to remove.
"""
slabcopy = SpacegroupAnalyzer(self.copy()).get_symmetrized_structure()
points = [slabcopy[i].frac_coords for i in indices]
removal_list = []
for pt in points:
# Get the index of the original site on top
cart_point = slabcopy.lattice.get_cartesian_coords(pt)
dist = [site.distance_from_point(cart_point) for site in slabcopy]
site1 = dist.index(min(dist))
# Get the index of the corresponding site at the bottom
for i, eq_sites in enumerate(slabcopy.equivalent_sites):
if slabcopy[site1] in eq_sites:
eq_indices = slabcopy.equivalent_indices[i]
break
i1 = eq_indices[eq_sites.index(slabcopy[site1])]
for i2 in eq_indices:
if i2 == i1:
continue
if slabcopy[i2].frac_coords[2] == slabcopy[i1].frac_coords[2]:
continue
# Test site remove to see if it results in symmetric slab
s = self.copy()
s.remove_sites([i1, i2])
if s.is_symmetric():
removal_list.extend([i1, i2])
break
# If expected, 2 atoms are removed per index
if len(removal_list) == 2 * len(indices):
self.remove_sites(removal_list)
else:
warnings.warn("Equivalent sites could not be found for removal for all indices. Surface unchanged.")
class SlabGenerator:
"""
This class generates different slabs using shift values determined by where
a unique termination can be found along with other criterias such as where a
termination doesn't break a polyhedral bond. The shift value then indicates
where the slab layer will begin and terminate in the slab-vacuum system.
.. attribute:: oriented_unit_cell
A unit cell of the parent structure with the miller
index of plane parallel to surface
.. attribute:: parent
Parent structure from which Slab was derived.
.. attribute:: lll_reduce
Whether or not the slabs will be orthogonalized
.. attribute:: center_slab
Whether or not the slabs will be centered between
the vacuum layer
.. attribute:: slab_scale_factor
Final computed scale factor that brings the parent cell to the
surface cell.
.. attribute:: miller_index
Miller index of plane parallel to surface.
.. attribute:: min_slab_size
Minimum size in angstroms of layers containing atoms
.. attribute:: min_vac_size
Minimize size in angstroms of layers containing vacuum
"""
def __init__(
self,
initial_structure,
miller_index,
min_slab_size,
min_vacuum_size,
lll_reduce=False,
center_slab=False,
in_unit_planes=False,
primitive=True,
max_normal_search=None,
reorient_lattice=True,
):
"""
Calculates the slab scale factor and uses it to generate a unit cell
of the initial structure that has been oriented by its miller index.
Also stores the initial information needed later on to generate a slab.
Args:
initial_structure (Structure): Initial input structure. Note that to
ensure that the miller indices correspond to usual
crystallographic definitions, you should supply a conventional
unit cell structure.
miller_index ([h, k, l]): Miller index of plane parallel to
surface. Note that this is referenced to the input structure. If
you need this to be based on the conventional cell,
you should supply the conventional structure.
min_slab_size (float): In Angstroms or number of hkl planes
min_vacuum_size (float): In Angstroms or number of hkl planes
lll_reduce (bool): Whether to perform an LLL reduction on the
eventual structure.
center_slab (bool): Whether to center the slab in the cell with
equal vacuum spacing from the top and bottom.
in_unit_planes (bool): Whether to set min_slab_size and min_vac_size
in units of hkl planes (True) or Angstrom (False/default).
Setting in units of planes is useful for ensuring some slabs
have a certain nlayer of atoms. e.g. for Cs (100), a 10 Ang
slab will result in a slab with only 2 layer of atoms, whereas
Fe (100) will have more layer of atoms. By using units of hkl
planes instead, we ensure both slabs
have the same number of atoms. The slab thickness will be in
min_slab_size/math.ceil(self._proj_height/dhkl)
multiples of oriented unit cells.
primitive (bool): Whether to reduce any generated slabs to a
primitive cell (this does **not** mean the slab is generated
from a primitive cell, it simply means that after slab
generation, we attempt to find shorter lattice vectors,
which lead to less surface area and smaller cells).
max_normal_search (int): If set to a positive integer, the code will
conduct a search for a normal lattice vector that is as
perpendicular to the surface as possible by considering
multiples linear combinations of lattice vectors up to
max_normal_search. This has no bearing on surface energies,
but may be useful as a preliminary step to generating slabs
for absorption and other sizes. It is typical that this will
not be the smallest possible cell for simulation. Normality
is not guaranteed, but the oriented cell will have the c
vector as normal as possible (within the search range) to the
surface. A value of up to the max absolute Miller index is
usually sufficient.
reorient_lattice (bool): reorients the lattice parameters such that
the c direction is the third vector of the lattice matrix
"""
# pylint: disable=E1130
# Add Wyckoff symbols of the bulk, will help with
# identfying types of sites in the slab system
sg = SpacegroupAnalyzer(initial_structure)
initial_structure.add_site_property("bulk_wyckoff", sg.get_symmetry_dataset()["wyckoffs"])
initial_structure.add_site_property("bulk_equivalent", sg.get_symmetry_dataset()["equivalent_atoms"].tolist())
latt = initial_structure.lattice
miller_index = _reduce_vector(miller_index)
# Calculate the surface normal using the reciprocal lattice vector.
recp = latt.reciprocal_lattice_crystallographic
normal = recp.get_cartesian_coords(miller_index)
normal /= np.linalg.norm(normal)
slab_scale_factor = []
non_orth_ind = []
eye = np.eye(3, dtype=np.int_)
for i, j in enumerate(miller_index):
if j == 0:
# Lattice vector is perpendicular to surface normal, i.e.,
# in plane of surface. We will simply choose this lattice
# vector as one of the basis vectors.
slab_scale_factor.append(eye[i])
else:
# Calculate projection of lattice vector onto surface normal.
d = abs(np.dot(normal, latt.matrix[i])) / latt.abc[i]
non_orth_ind.append((i, d))
# We want the vector that has maximum magnitude in the
# direction of the surface normal as the c-direction.
# Results in a more "orthogonal" unit cell.
c_index, dist = max(non_orth_ind, key=lambda t: t[1])
if len(non_orth_ind) > 1:
lcm_miller = lcm(*[miller_index[i] for i, d in non_orth_ind])
for (i, di), (j, dj) in itertools.combinations(non_orth_ind, 2):
l = [0, 0, 0]
l[i] = -int(round(lcm_miller / miller_index[i]))
l[j] = int(round(lcm_miller / miller_index[j]))
slab_scale_factor.append(l)
if len(slab_scale_factor) == 2:
break
if max_normal_search is None:
slab_scale_factor.append(eye[c_index])
else:
index_range = sorted(
reversed(range(-max_normal_search, max_normal_search + 1)),
key=lambda x: abs(x),
)
candidates = []
for uvw in itertools.product(index_range, index_range, index_range):
if (not any(uvw)) or abs(np.linalg.det(slab_scale_factor + [uvw])) < 1e-8:
continue
vec = latt.get_cartesian_coords(uvw)
l = np.linalg.norm(vec)
cosine = abs(np.dot(vec, normal) / l)
candidates.append((uvw, cosine, l))
if abs(abs(cosine) - 1) < 1e-8:
# If cosine of 1 is found, no need to search further.
break
# We want the indices with the maximum absolute cosine,
# but smallest possible length.
uvw, cosine, l = max(candidates, key=lambda x: (x[1], -x[2]))
slab_scale_factor.append(uvw)
slab_scale_factor = np.array(slab_scale_factor)
# Let's make sure we have a left-handed crystallographic system
if np.linalg.det(slab_scale_factor) < 0:
slab_scale_factor *= -1
# Make sure the slab_scale_factor is reduced to avoid
# unnecessarily large slabs
reduced_scale_factor = [_reduce_vector(v) for v in slab_scale_factor]
slab_scale_factor = np.array(reduced_scale_factor)
single = initial_structure.copy()
single.make_supercell(slab_scale_factor)
# When getting the OUC, lets return the most reduced
# structure as possible to reduce calculations
self.oriented_unit_cell = Structure.from_sites(single, to_unit_cell=True)
self.max_normal_search = max_normal_search
self.parent = initial_structure
self.lll_reduce = lll_reduce
self.center_slab = center_slab
self.slab_scale_factor = slab_scale_factor
self.miller_index = miller_index
self.min_vac_size = min_vacuum_size
self.min_slab_size = min_slab_size
self.in_unit_planes = in_unit_planes
self.primitive = primitive
self._normal = normal
a, b, c = self.oriented_unit_cell.lattice.matrix
self._proj_height = abs(np.dot(normal, c))
self.reorient_lattice = reorient_lattice
def get_slab(self, shift=0, tol=0.1, energy=None):
"""
This method takes in shift value for the c lattice direction and
generates a slab based on the given shift. You should rarely use this
method. Instead, it is used by other generation algorithms to obtain
all slabs.
Arg:
shift (float): A shift value in Angstrom that determines how much a
slab should be shifted.
tol (float): Tolerance to determine primitive cell.
energy (float): An energy to assign to the slab.
Returns:
(Slab) A Slab object with a particular shifted oriented unit cell.
"""
h = self._proj_height
p = round(h / self.parent.lattice.d_hkl(self.miller_index), 8)
if self.in_unit_planes:
nlayers_slab = int(math.ceil(self.min_slab_size / p))
nlayers_vac = int(math.ceil(self.min_vac_size / p))
else:
nlayers_slab = int(math.ceil(self.min_slab_size / h))
nlayers_vac = int(math.ceil(self.min_vac_size / h))
nlayers = nlayers_slab + nlayers_vac
species = self.oriented_unit_cell.species_and_occu
props = self.oriented_unit_cell.site_properties
props = {k: v * nlayers_slab for k, v in props.items()}
frac_coords = self.oriented_unit_cell.frac_coords
frac_coords = np.array(frac_coords) + np.array([0, 0, -shift])[None, :]
frac_coords -= np.floor(frac_coords)
a, b, c = self.oriented_unit_cell.lattice.matrix
new_lattice = [a, b, nlayers * c]
frac_coords[:, 2] = frac_coords[:, 2] / nlayers
all_coords = []
for i in range(nlayers_slab):
fcoords = frac_coords.copy()
fcoords[:, 2] += i / nlayers
all_coords.extend(fcoords)
slab = Structure(new_lattice, species * nlayers_slab, all_coords, site_properties=props)
scale_factor = self.slab_scale_factor
# Whether or not to orthogonalize the structure
if self.lll_reduce:
lll_slab = slab.copy(sanitize=True)
mapping = lll_slab.lattice.find_mapping(slab.lattice)
scale_factor = np.dot(mapping[2], scale_factor)
slab = lll_slab
# Whether or not to center the slab layer around the vacuum
if self.center_slab:
avg_c = np.average([c[2] for c in slab.frac_coords])
slab.translate_sites(list(range(len(slab))), [0, 0, 0.5 - avg_c])
if self.primitive:
prim = slab.get_primitive_structure(tolerance=tol)
if energy is not None:
energy = prim.volume / slab.volume * energy
slab = prim
# Reorient the lattice to get the correct reduced cell
ouc = self.oriented_unit_cell.copy()
if self.primitive:
# find a reduced ouc
slab_l = slab.lattice
ouc = ouc.get_primitive_structure(
constrain_latt={
"a": slab_l.a,
"b": slab_l.b,
"alpha": slab_l.alpha,
"beta": slab_l.beta,
"gamma": slab_l.gamma,
}
)
# Check this is the correct oriented unit cell
ouc = self.oriented_unit_cell if slab_l.a != ouc.lattice.a or slab_l.b != ouc.lattice.b else ouc
return Slab(
slab.lattice,
slab.species_and_occu,
slab.frac_coords,
self.miller_index,
ouc,
shift,
scale_factor,
energy=energy,
site_properties=slab.site_properties,
reorient_lattice=self.reorient_lattice,
)
def _calculate_possible_shifts(self, tol=0.1):
frac_coords = self.oriented_unit_cell.frac_coords
n = len(frac_coords)
if n == 1:
# Clustering does not work when there is only one data point.
shift = frac_coords[0][2] + 0.5
return [shift - math.floor(shift)]
# We cluster the sites according to the c coordinates. But we need to
# take into account PBC. Let's compute a fractional c-coordinate
# distance matrix that accounts for PBC.
dist_matrix = np.zeros((n, n))
h = self._proj_height
# Projection of c lattice vector in
# direction of surface normal.
for i, j in itertools.combinations(list(range(n)), 2):
if i != j:
cdist = frac_coords[i][2] - frac_coords[j][2]
cdist = abs(cdist - round(cdist)) * h
dist_matrix[i, j] = cdist
dist_matrix[j, i] = cdist
condensed_m = squareform(dist_matrix)
z = linkage(condensed_m)
clusters = fcluster(z, tol, criterion="distance")
# Generate dict of cluster# to c val - doesn't matter what the c is.
c_loc = {c: frac_coords[i][2] for i, c in enumerate(clusters)}
# Put all c into the unit cell.
possible_c = [c - math.floor(c) for c in sorted(c_loc.values())]
# Calculate the shifts
nshifts = len(possible_c)
shifts = []
for i in range(nshifts):
if i == nshifts - 1:
# There is an additional shift between the first and last c
# coordinate. But this needs special handling because of PBC.
shift = (possible_c[0] + 1 + possible_c[i]) * 0.5
if shift > 1:
shift -= 1
else:
shift = (possible_c[i] + possible_c[i + 1]) * 0.5
shifts.append(shift - math.floor(shift))
shifts = sorted(shifts)
return shifts
def _get_c_ranges(self, bonds):
c_ranges = []
bonds = {(get_el_sp(s1), get_el_sp(s2)): dist for (s1, s2), dist in bonds.items()}
for (sp1, sp2), bond_dist in bonds.items():
for site in self.oriented_unit_cell:
if sp1 in site.species:
for nn in self.oriented_unit_cell.get_neighbors(site, bond_dist):
if sp2 in nn.species:
c_range = tuple(sorted([site.frac_coords[2], nn.frac_coords[2]]))
if c_range[1] > 1:
# Takes care of PBC when c coordinate of site
# goes beyond the upper boundary of the cell
c_ranges.append((c_range[0], 1))
c_ranges.append((0, c_range[1] - 1))
elif c_range[0] < 0:
# Takes care of PBC when c coordinate of site
# is below the lower boundary of the unit cell
c_ranges.append((0, c_range[1]))
c_ranges.append((c_range[0] + 1, 1))
elif c_range[0] != c_range[1]:
c_ranges.append((c_range[0], c_range[1]))
return c_ranges
def get_slabs(
self,
bonds=None,
ftol=0.1,
tol=0.1,
max_broken_bonds=0,
symmetrize=False,
repair=False,
):
"""
This method returns a list of slabs that are generated using the list of
shift values from the method, _calculate_possible_shifts(). Before the
shifts are used to create the slabs however, if the user decides to take
into account whether or not a termination will break any polyhedral
structure (bonds is not None), this method will filter out any shift
values that do so.
Args:
bonds ({(specie1, specie2): max_bond_dist}: bonds are
specified as a dict of tuples: float of specie1, specie2
and the max bonding distance. For example, PO4 groups may be
defined as {("P", "O"): 3}.
tol (float): General tolerance paramter for getting primitive
cells and matching structures
ftol (float): Threshold parameter in fcluster in order to check
if two atoms are lying on the same plane. Default thresh set
to 0.1 Angstrom in the direction of the surface normal.
max_broken_bonds (int): Maximum number of allowable broken bonds
for the slab. Use this to limit # of slabs (some structures
may have a lot of slabs). Defaults to zero, which means no
defined bonds must be broken.
symmetrize (bool): Whether or not to ensure the surfaces of the
slabs are equivalent.
repair (bool): Whether to repair terminations with broken bonds
or just omit them. Set to False as repairing terminations can
lead to many possible slabs as oppose to just omitting them.
Returns:
([Slab]) List of all possible terminations of a particular surface.
Slabs are sorted by the # of bonds broken.
"""
c_ranges = [] if bonds is None else self._get_c_ranges(bonds)
slabs = []
for shift in self._calculate_possible_shifts(tol=ftol):
bonds_broken = 0
for r in c_ranges:
if r[0] <= shift <= r[1]:
bonds_broken += 1
slab = self.get_slab(shift, tol=tol, energy=bonds_broken)
if bonds_broken <= max_broken_bonds:
slabs.append(slab)
elif repair:
# If the number of broken bonds is exceeded,
# we repair the broken bonds on the slab
slabs.append(self.repair_broken_bonds(slab, bonds))
# Further filters out any surfaces made that might be the same
m = StructureMatcher(ltol=tol, stol=tol, primitive_cell=False, scale=False)
new_slabs = []
for g in m.group_structures(slabs):
# For each unique termination, symmetrize the
# surfaces by removing sites from the bottom.
if symmetrize:
slabs = self.nonstoichiometric_symmetrized_slab(g[0])
new_slabs.extend(slabs)
else:
new_slabs.append(g[0])
match = StructureMatcher(ltol=tol, stol=tol, primitive_cell=False, scale=False)
new_slabs = [g[0] for g in match.group_structures(new_slabs)]
return sorted(new_slabs, key=lambda s: s.energy)
def repair_broken_bonds(self, slab, bonds):
"""
This method will find undercoordinated atoms due to slab
cleaving specified by the bonds parameter and move them
to the other surface to make sure the bond is kept intact.
In a future release of surface.py, the ghost_sites will be
used to tell us how the repair bonds should look like.
Arg:
slab (structure): A structure object representing a slab.
bonds ({(specie1, specie2): max_bond_dist}: bonds are
specified as a dict of tuples: float of specie1, specie2
and the max bonding distance. For example, PO4 groups may be
defined as {("P", "O"): 3}.
Returns:
(Slab) A Slab object with a particular shifted oriented unit cell.
"""
for pair in bonds.keys():
blength = bonds[pair]
# First lets determine which element should be the
# reference (center element) to determine broken bonds.
# e.g. P for a PO4 bond. Find integer coordination
# numbers of the pair of elements wrt to each other
cn_dict = {}
for i, el in enumerate(pair):
cnlist = []
for site in self.oriented_unit_cell:
poly_coord = 0
if site.species_string == el:
for nn in self.oriented_unit_cell.get_neighbors(site, blength):
if nn[0].species_string == pair[i - 1]:
poly_coord += 1
cnlist.append(poly_coord)
cn_dict[el] = cnlist
# We make the element with the higher coordination our reference
if max(cn_dict[pair[0]]) > max(cn_dict[pair[1]]):
element1, element2 = pair
else:
element2, element1 = pair
for i, site in enumerate(slab):
# Determine the coordination of our reference
if site.species_string == element1:
poly_coord = 0
for neighbor in slab.get_neighbors(site, blength):
poly_coord += 1 if neighbor.species_string == element2 else 0
# suppose we find an undercoordinated reference atom
if poly_coord not in cn_dict[element1]:
# We get the reference atom of the broken bonds
# (undercoordinated), move it to the other surface
slab = self.move_to_other_side(slab, [i])
# find its NNs with the corresponding
# species it should be coordinated with
neighbors = slab.get_neighbors(slab[i], blength, include_index=True)
tomove = [nn[2] for nn in neighbors if nn[0].species_string == element2]
tomove.append(i)
# and then move those NNs along with the central
# atom back to the other side of the slab again
slab = self.move_to_other_side(slab, tomove)
return slab
def move_to_other_side(self, init_slab, index_of_sites):
"""
This method will Move a set of sites to the
other side of the slab (opposite surface).
Arg:
init_slab (structure): A structure object representing a slab.
index_of_sites (list of ints): The list of indices representing
the sites we want to move to the other side.
Returns:
(Slab) A Slab object with a particular shifted oriented unit cell.
"""
slab = init_slab.copy()
# Determine what fraction the slab is of the total cell size
# in the c direction. Round to nearest rational number.
h = self._proj_height
p = h / self.parent.lattice.d_hkl(self.miller_index)
if self.in_unit_planes:
nlayers_slab = int(math.ceil(self.min_slab_size / p))
nlayers_vac = int(math.ceil(self.min_vac_size / p))
else:
nlayers_slab = int(math.ceil(self.min_slab_size / h))
nlayers_vac = int(math.ceil(self.min_vac_size / h))
nlayers = nlayers_slab + nlayers_vac
slab_ratio = nlayers_slab / nlayers
# Sort the index of sites based on which side they are on
top_site_index = [i for i in index_of_sites if slab[i].frac_coords[2] > slab.center_of_mass[2]]
bottom_site_index = [i for i in index_of_sites if slab[i].frac_coords[2] < slab.center_of_mass[2]]
# Translate sites to the opposite surfaces
slab.translate_sites(top_site_index, [0, 0, slab_ratio])
slab.translate_sites(bottom_site_index, [0, 0, -slab_ratio])
return Slab(
init_slab.lattice,
slab.species,
slab.frac_coords,
init_slab.miller_index,
init_slab.oriented_unit_cell,
init_slab.shift,
init_slab.scale_factor,
energy=init_slab.energy,
)
def nonstoichiometric_symmetrized_slab(self, init_slab, tol=1e-3):
"""
This method checks whether or not the two surfaces of the slab are
equivalent. If the point group of the slab has an inversion symmetry (
ie. belong to one of the Laue groups), then it is assumed that the
surfaces should be equivalent. Otherwise, sites at the bottom of the
slab will be removed until the slab is symmetric. Note the removal of sites
can destroy the stoichiometry of the slab. For non-elemental
structures, the chemical potential will be needed to calculate surface energy.
Arg:
init_slab (Structure): A single slab structure
tol (float): Tolerance for SpaceGroupanalyzer.
Returns:
Slab (structure): A symmetrized Slab object.
"""
sg = SpacegroupAnalyzer(init_slab, symprec=tol)
if sg.is_laue():
return [init_slab]
nonstoich_slabs = []
# Build an equivalent surface slab for each of the different surfaces
for top in [True, False]:
asym = True
slab = init_slab.copy()
slab.energy = init_slab.energy
while asym:
# Keep removing sites from the bottom one by one until both
# surfaces are symmetric or the number of sites removed has
# exceeded 10 percent of the original slab
c_dir = [site[2] for i, site in enumerate(slab.frac_coords)]
if top:
slab.remove_sites([c_dir.index(max(c_dir))])
else:
slab.remove_sites([c_dir.index(min(c_dir))])
if len(slab) <= len(self.parent):
break
# Check if the altered surface is symmetric
sg = SpacegroupAnalyzer(slab, symprec=tol)
if sg.is_laue():
asym = False
nonstoich_slabs.append(slab)
if len(slab) <= len(self.parent):
warnings.warn("Too many sites removed, please use a larger slab " "size.")
return nonstoich_slabs
module_dir = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(module_dir, "reconstructions_archive.json")) as data_file:
reconstructions_archive = json.load(data_file)
class ReconstructionGenerator:
"""
This class takes in a pre-defined dictionary specifying the parameters
need to build a reconstructed slab such as the SlabGenerator parameters,
transformation matrix, sites to remove/add and slab/vacuum size. It will
then use the formatted instructions provided by the dictionary to build
the desired reconstructed slab from the initial structure.
.. attribute:: slabgen_params
Parameters for the SlabGenerator
.. trans_matrix::
A 3x3 transformation matrix to generate the reconstructed
slab. Only the a and b lattice vectors are actually
changed while the c vector remains the same. This
matrix is what the Wood's notation is based on.
.. reconstruction_json::
The full json or dictionary containing the instructions
for building the reconstructed slab
.. termination::
The index of the termination of the slab
TODO:
- Right now there is no way to specify what atom is being
added. In the future, use basis sets?
"""
def __init__(self, initial_structure, min_slab_size, min_vacuum_size, reconstruction_name):
"""
Generates reconstructed slabs from a set of instructions
specified by a dictionary or json file.
Args:
initial_structure (Structure): Initial input structure. Note
that to ensure that the miller indices correspond to usual
crystallographic definitions, you should supply a conventional
unit cell structure.
min_slab_size (float): In Angstroms
min_vacuum_size (float): In Angstroms
reconstruction (str): Name of the dict containing the instructions
for building a reconstructed slab. The dictionary can contain
any item the creator deems relevant, however any instructions
archived in pymatgen for public use needs to contain the
following keys and items to ensure compatibility with the
ReconstructionGenerator:
"name" (str): A descriptive name for the type of
reconstruction. Typically the name will have the type
of structure the reconstruction is for, the Miller
index, and Wood's notation along with anything to
describe the reconstruction: e.g.:
"fcc_110_missing_row_1x2"
"description" (str): A longer description of your
reconstruction. This is to help future contributors who
want to add other types of reconstructions to the
archive on pymatgen to check if the reconstruction
already exists. Please read the descriptions carefully
before adding a new type of reconstruction to ensure it
is not in the archive yet.
"reference" (str): Optional reference to where the
reconstruction was taken from or first observed.
"spacegroup" (dict): e.g. {"symbol": "Fm-3m", "number": 225}
Indicates what kind of structure is this reconstruction.
"miller_index" ([h,k,l]): Miller index of your reconstruction
"Woods_notation" (str): For a reconstruction, the a and b
lattice may change to accomodate the symmetry of the
reconstruction. This notation indicates the change in
the vectors relative to the primitive (p) or
conventional (c) slab cell. E.g. p(2x1):
Wood, E. A. (1964). Vocabulary of surface
crystallography. Journal of Applied Physics, 35(4),
1306–1312.
"transformation_matrix" (numpy array): A 3x3 matrix to
transform the slab. Only the a and b lattice vectors
should change while the c vector remains the same.
"SlabGenerator_parameters" (dict): A dictionary containing
the parameters for the SlabGenerator class excluding the
miller_index, min_slab_size and min_vac_size as the
Miller index is already specified and the min_slab_size
and min_vac_size can be changed regardless of what type
of reconstruction is used. Having a consistent set of
SlabGenerator parameters allows for the instructions to
be reused to consistently build a reconstructed slab.
"points_to_remove" (list of coords): A list of sites to
remove where the first two indices are fraction (in a
and b) and the third index is in units of 1/d (in c).
"points_to_add" (list of frac_coords): A list of sites to add
where the first two indices are fraction (in a an b) and
the third index is in units of 1/d (in c).
"base_reconstruction" (dict): Option to base a reconstruction on
an existing reconstruction model also exists to easily build
the instructions without repeating previous work. E.g. the
alpha reconstruction of halites is based on the octopolar
reconstruction but with the topmost atom removed. The dictionary
for the alpha reconstruction would therefore contain the item
"reconstruction_base": "halite_111_octopolar_2x2", and
additional sites for "points_to_remove" and "points_to_add"
can be added to modify this reconstruction.
For "points_to_remove" and "points_to_add", the third index for
the c vector is in units of 1/d where d is the spacing
between atoms along hkl (the c vector) and is relative to
the topmost site in the unreconstructed slab. e.g. a point
of [0.5, 0.25, 1] corresponds to the 0.5 frac_coord of a,
0.25 frac_coord of b and a distance of 1 atomic layer above
the topmost site. [0.5, 0.25, -0.5] where the third index
corresponds to a point half a atomic layer below the topmost
site. [0.5, 0.25, 0] corresponds to a point in the same
position along c as the topmost site. This is done because
while the primitive units of a and b will remain constant,
the user can vary the length of the c direction by changing
the slab layer or the vacuum layer.
NOTE: THE DICTIONARY SHOULD ONLY CONTAIN "points_to_remove" AND
"points_to_add" FOR THE TOP SURFACE. THE ReconstructionGenerator
WILL MODIFY THE BOTTOM SURFACE ACCORDINGLY TO RETURN A SLAB WITH
EQUIVALENT SURFACES.
"""
if reconstruction_name not in reconstructions_archive.keys():
raise KeyError(
"The reconstruction_name entered (%s) does not exist in the "
"archive. Please select from one of the following reconstructions: %s "
"or add the appropriate dictionary to the archive file "
"reconstructions_archive.json." % (reconstruction_name, list(reconstructions_archive.keys()))
)
# Get the instructions to build the reconstruction
# from the reconstruction_archive
recon_json = copy.deepcopy(reconstructions_archive[reconstruction_name])
new_points_to_add, new_points_to_remove = [], []
if "base_reconstruction" in recon_json.keys():
if "points_to_add" in recon_json.keys():
new_points_to_add = recon_json["points_to_add"]
if "points_to_remove" in recon_json.keys():
new_points_to_remove = recon_json["points_to_remove"]
# Build new instructions from a base reconstruction
recon_json = copy.deepcopy(reconstructions_archive[recon_json["base_reconstruction"]])
if "points_to_add" in recon_json.keys():
del recon_json["points_to_add"]
if "points_to_remove" in recon_json.keys():
del recon_json["points_to_remove"]
if new_points_to_add:
recon_json["points_to_add"] = new_points_to_add
if new_points_to_remove:
recon_json["points_to_remove"] = new_points_to_remove
slabgen_params = copy.deepcopy(recon_json["SlabGenerator_parameters"])
slabgen_params["initial_structure"] = initial_structure.copy()
slabgen_params["miller_index"] = recon_json["miller_index"]
slabgen_params["min_slab_size"] = min_slab_size
slabgen_params["min_vacuum_size"] = min_vacuum_size
self.slabgen_params = slabgen_params
self.trans_matrix = recon_json["transformation_matrix"]
self.reconstruction_json = recon_json
self.name = reconstruction_name
def build_slabs(self):
"""
Builds the reconstructed slab by:
(1) Obtaining the unreconstructed slab using the specified
parameters for the SlabGenerator.
(2) Applying the appropriate lattice transformation in the
a and b lattice vectors.
(3) Remove any specified sites from both surfaces.
(4) Add any specified sites to both surfaces.
Returns:
(Slab): The reconstructed slab.
"""
slabs = self.get_unreconstructed_slabs()
recon_slabs = []
for slab in slabs:
d = get_d(slab)
top_site = sorted(slab, key=lambda site: site.frac_coords[2])[-1].coords
# Remove any specified sites
if "points_to_remove" in self.reconstruction_json.keys():
pts_to_rm = copy.deepcopy(self.reconstruction_json["points_to_remove"])
for p in pts_to_rm:
p[2] = slab.lattice.get_fractional_coords([top_site[0], top_site[1], top_site[2] + p[2] * d])[2]
cart_point = slab.lattice.get_cartesian_coords(p)
dist = [site.distance_from_point(cart_point) for site in slab]
site1 = dist.index(min(dist))
slab.symmetrically_remove_atoms([site1])
# Add any specified sites
if "points_to_add" in self.reconstruction_json.keys():
pts_to_add = copy.deepcopy(self.reconstruction_json["points_to_add"])
for p in pts_to_add:
p[2] = slab.lattice.get_fractional_coords([top_site[0], top_site[1], top_site[2] + p[2] * d])[2]
slab.symmetrically_add_atom(slab[0].specie, p)
slab.reconstruction = self.name
setattr(slab, "recon_trans_matrix", self.trans_matrix)
# Get the oriented_unit_cell with the same axb area.
ouc = slab.oriented_unit_cell.copy()
ouc.make_supercell(self.trans_matrix)
slab.oriented_unit_cell = ouc
recon_slabs.append(slab)
return recon_slabs
def get_unreconstructed_slabs(self):
"""
Generates the unreconstructed or pristine super slab.
"""
slabs = []
for slab in SlabGenerator(**self.slabgen_params).get_slabs():
slab.make_supercell(self.trans_matrix)
slabs.append(slab)
return slabs
def get_d(slab):
"""
Determine the distance of space between
each layer of atoms along c
"""
sorted_sites = sorted(slab, key=lambda site: site.frac_coords[2])
for i, site in enumerate(sorted_sites):
if not "%.6f" % (site.frac_coords[2]) == "%.6f" % (sorted_sites[i + 1].frac_coords[2]):
d = abs(site.frac_coords[2] - sorted_sites[i + 1].frac_coords[2])
break
return slab.lattice.get_cartesian_coords([0, 0, d])[2]
def is_already_analyzed(miller_index: tuple, miller_list: list, symm_ops: list) -> bool:
"""
Helper function to check if a given Miller index is
part of the family of indices of any index in a list
Args:
miller_index (tuple): The Miller index to analyze
miller_list (list): List of Miller indices. If the given
Miller index belongs in the same family as any of the
indices in this list, return True, else return False
symm_ops (list): Symmetry operations of a
lattice, used to define family of indices
"""
for op in symm_ops:
if in_coord_list(miller_list, op.operate(miller_index)):
return True
return False
def get_symmetrically_equivalent_miller_indices(structure, miller_index, return_hkil=True):
"""
Returns all symmetrically equivalent indices for a given structure. Analysis
is based on the symmetry of the reciprocal lattice of the structure.
Args:
miller_index (tuple): Designates the family of Miller indices
to find. Can be hkl or hkil for hexagonal systems
return_hkil (bool): If true, return hkil form of Miller
index for hexagonal systems, otherwise return hkl
"""
# Change to hkl if hkil because in_coord_list only handles tuples of 3
miller_index = (miller_index[0], miller_index[1], miller_index[3]) if len(miller_index) == 4 else miller_index
mmi = max(np.abs(miller_index))
r = list(range(-mmi, mmi + 1))
r.reverse()
sg = SpacegroupAnalyzer(structure)
# Get distinct hkl planes from the rhombohedral setting if trigonal
if sg.get_crystal_system() == "trigonal":
prim_structure = SpacegroupAnalyzer(structure).get_primitive_standard_structure()
symm_ops = prim_structure.lattice.get_recp_symmetry_operation()
else:
symm_ops = structure.lattice.get_recp_symmetry_operation()
equivalent_millers = [miller_index]
for miller in itertools.product(r, r, r):
if miller == miller_index:
continue
if any(i != 0 for i in miller):
if is_already_analyzed(miller, equivalent_millers, symm_ops):
equivalent_millers.append(miller)
# include larger Miller indices in the family of planes
if all(mmi > i for i in np.abs(miller)) and not in_coord_list(equivalent_millers, miller):
if is_already_analyzed(mmi * np.array(miller), equivalent_millers, symm_ops):
equivalent_millers.append(miller)
if return_hkil and sg.get_crystal_system() in ["trigonal", "hexagonal"]:
return [(hkl[0], hkl[1], -1 * hkl[0] - hkl[1], hkl[2]) for hkl in equivalent_millers]
return equivalent_millers
def get_symmetrically_distinct_miller_indices(structure, max_index, return_hkil=False):
"""
Returns all symmetrically distinct indices below a certain max-index for
a given structure. Analysis is based on the symmetry of the reciprocal
lattice of the structure.
Args:
structure (Structure): input structure.
max_index (int): The maximum index. For example, a max_index of 1
means that (100), (110), and (111) are returned for the cubic
structure. All other indices are equivalent to one of these.
return_hkil (bool): If true, return hkil form of Miller
index for hexagonal systems, otherwise return hkl
"""
r = list(range(-max_index, max_index + 1))
r.reverse()
# First we get a list of all hkls for conventional (including equivalent)
conv_hkl_list = [miller for miller in itertools.product(r, r, r) if any(i != 0 for i in miller)]
sg = SpacegroupAnalyzer(structure)
# Get distinct hkl planes from the rhombohedral setting if trigonal
if sg.get_crystal_system() == "trigonal":
transf = sg.get_conventional_to_primitive_transformation_matrix()
miller_list = [hkl_transformation(transf, hkl) for hkl in conv_hkl_list]
prim_structure = SpacegroupAnalyzer(structure).get_primitive_standard_structure()
symm_ops = prim_structure.lattice.get_recp_symmetry_operation()
else:
miller_list = conv_hkl_list
symm_ops = structure.lattice.get_recp_symmetry_operation()
unique_millers, unique_millers_conv = [], []
for i, miller in enumerate(miller_list):
d = abs(reduce(gcd, miller))
miller = tuple(int(i / d) for i in miller)
if not is_already_analyzed(miller, unique_millers, symm_ops):
if sg.get_crystal_system() == "trigonal":
# Now we find the distinct primitive hkls using
# the primitive symmetry operations and their
# corresponding hkls in the conventional setting
unique_millers.append(miller)
d = abs(reduce(gcd, conv_hkl_list[i]))
cmiller = tuple(int(i / d) for i in conv_hkl_list[i])
unique_millers_conv.append(cmiller)
else:
unique_millers.append(miller)
unique_millers_conv.append(miller)
if return_hkil and sg.get_crystal_system() in ["trigonal", "hexagonal"]:
return [(hkl[0], hkl[1], -1 * hkl[0] - hkl[1], hkl[2]) for hkl in unique_millers_conv]
return unique_millers_conv
def hkl_transformation(transf, miller_index):
"""
Returns the Miller index from setting
A to B using a transformation matrix
Args:
transf (3x3 array): The transformation matrix
that transforms a lattice of A to B
miller_index ([h, k, l]): Miller index to transform to setting B
"""
# Get a matrix of whole numbers (ints)
def lcm(a, b):
return a * b // math.gcd(a, b)
reduced_transf = reduce(lcm, [int(1 / i) for i in itertools.chain(*transf) if i != 0]) * transf
reduced_transf = reduced_transf.astype(int)
# perform the transformation
t_hkl = np.dot(reduced_transf, miller_index)
d = abs(reduce(gcd, t_hkl))
t_hkl = np.array([int(i / d) for i in t_hkl])
# get mostly positive oriented Miller index
if len([i for i in t_hkl if i < 0]) > 1:
t_hkl *= -1
return tuple(t_hkl)
def generate_all_slabs(
structure,
max_index,
min_slab_size,
min_vacuum_size,
bonds=None,
tol=0.1,
ftol=0.1,
max_broken_bonds=0,
lll_reduce=False,
center_slab=False,
primitive=True,
max_normal_search=None,
symmetrize=False,
repair=False,
include_reconstructions=False,
in_unit_planes=False,
):
"""
A function that finds all different slabs up to a certain miller index.
Slabs oriented under certain Miller indices that are equivalent to other
slabs in other Miller indices are filtered out using symmetry operations
to get rid of any repetitive slabs. For example, under symmetry operations,
CsCl has equivalent slabs in the (0,0,1), (0,1,0), and (1,0,0) direction.
Args:
structure (Structure): Initial input structure. Note that to
ensure that the miller indices correspond to usual
crystallographic definitions, you should supply a conventional
unit cell structure.
max_index (int): The maximum Miller index to go up to.
min_slab_size (float): In Angstroms
min_vacuum_size (float): In Angstroms
bonds ({(specie1, specie2): max_bond_dist}: bonds are
specified as a dict of tuples: float of specie1, specie2
and the max bonding distance. For example, PO4 groups may be
defined as {("P", "O"): 3}.
tol (float): Threshold parameter in fcluster in order to check
if two atoms are lying on the same plane. Default thresh set
to 0.1 Angstrom in the direction of the surface normal.
max_broken_bonds (int): Maximum number of allowable broken bonds
for the slab. Use this to limit # of slabs (some structures
may have a lot of slabs). Defaults to zero, which means no
defined bonds must be broken.
lll_reduce (bool): Whether to perform an LLL reduction on the
eventual structure.
center_slab (bool): Whether to center the slab in the cell with
equal vacuum spacing from the top and bottom.
primitive (bool): Whether to reduce any generated slabs to a
primitive cell (this does **not** mean the slab is generated
from a primitive cell, it simply means that after slab
generation, we attempt to find shorter lattice vectors,
which lead to less surface area and smaller cells).
max_normal_search (int): If set to a positive integer, the code will
conduct a search for a normal lattice vector that is as
perpendicular to the surface as possible by considering
multiples linear combinations of lattice vectors up to
max_normal_search. This has no bearing on surface energies,
but may be useful as a preliminary step to generating slabs
for absorption and other sizes. It is typical that this will
not be the smallest possible cell for simulation. Normality
is not guaranteed, but the oriented cell will have the c
vector as normal as possible (within the search range) to the
surface. A value of up to the max absolute Miller index is
usually sufficient.
symmetrize (bool): Whether or not to ensure the surfaces of the
slabs are equivalent.
repair (bool): Whether to repair terminations with broken bonds
or just omit them
include_reconstructions (bool): Whether to include reconstructed
slabs available in the reconstructions_archive.json file.
"""
all_slabs = []
for miller in get_symmetrically_distinct_miller_indices(structure, max_index):
gen = SlabGenerator(
structure,
miller,
min_slab_size,
min_vacuum_size,
lll_reduce=lll_reduce,
center_slab=center_slab,
primitive=primitive,
max_normal_search=max_normal_search,
in_unit_planes=in_unit_planes,
)
slabs = gen.get_slabs(
bonds=bonds,
tol=tol,
ftol=ftol,
symmetrize=symmetrize,
max_broken_bonds=max_broken_bonds,
repair=repair,
)
if len(slabs) > 0:
logger.debug("%s has %d slabs... " % (miller, len(slabs)))
all_slabs.extend(slabs)
if include_reconstructions:
sg = SpacegroupAnalyzer(structure)
symbol = sg.get_space_group_symbol()
# enumerate through all posisble reconstructions in the
# archive available for this particular structure (spacegroup)
for name, instructions in reconstructions_archive.items():
if "base_reconstruction" in instructions.keys():
instructions = reconstructions_archive[instructions["base_reconstruction"]]
if instructions["spacegroup"]["symbol"] == symbol:
# check if this reconstruction has a max index
# equal or less than the given max index
if max(instructions["miller_index"]) > max_index:
continue
recon = ReconstructionGenerator(structure, min_slab_size, min_vacuum_size, name)
all_slabs.extend(recon.build_slabs())
return all_slabs
def get_slab_regions(slab, blength=3.5):
"""
Function to get the ranges of the slab regions. Useful for discerning where
the slab ends and vacuum begins if the slab is not fully within the cell
Args:
slab (Structure): Structure object modelling the surface
blength (float, Ang): The bondlength between atoms. You generally
want this value to be larger than the actual bondlengths in
order to find atoms that are part of the slab
"""
fcoords, indices, all_indices = [], [], []
for site in slab:
# find sites with c < 0 (noncontiguous)
neighbors = slab.get_neighbors(site, blength, include_index=True, include_image=True)
for nn in neighbors:
if nn[0].frac_coords[2] < 0:
# sites are noncontiguous within cell
fcoords.append(nn[0].frac_coords[2])
indices.append(nn[-2])
if nn[-2] not in all_indices:
all_indices.append(nn[-2])
if fcoords:
# If slab is noncontiguous, locate the lowest
# site within the upper region of the slab
while fcoords:
last_fcoords = copy.copy(fcoords)
last_indices = copy.copy(indices)
site = slab[indices[fcoords.index(min(fcoords))]]
neighbors = slab.get_neighbors(site, blength, include_index=True, include_image=True)
fcoords, indices = [], []
for nn in neighbors:
if 1 > nn[0].frac_coords[2] > 0 and nn[0].frac_coords[2] < site.frac_coords[2]:
# sites are noncontiguous within cell
fcoords.append(nn[0].frac_coords[2])
indices.append(nn[-2])
if nn[-2] not in all_indices:
all_indices.append(nn[-2])
# Now locate the highest site within the lower region of the slab
upper_fcoords = []
for site in slab:
if all(nn.index not in all_indices for nn in slab.get_neighbors(site, blength)):
upper_fcoords.append(site.frac_coords[2])
coords = copy.copy(last_fcoords) if not fcoords else copy.copy(fcoords)
min_top = slab[last_indices[coords.index(min(coords))]].frac_coords[2]
ranges = [[0, max(upper_fcoords)], [min_top, 1]]
else:
# If the entire slab region is within the slab cell, just
# set the range as the highest and lowest site in the slab
sorted_sites = sorted(slab, key=lambda site: site.frac_coords[2])
ranges = [[sorted_sites[0].frac_coords[2], sorted_sites[-1].frac_coords[2]]]
return ranges
def miller_index_from_sites(lattice, coords, coords_are_cartesian=True, round_dp=4, verbose=True):
"""
Get the Miller index of a plane from a list of site coordinates.
A minimum of 3 sets of coordinates are required. If more than 3 sets of
coordinates are given, the best plane that minimises the distance to all
points will be calculated.
Args:
lattice (list or Lattice): A 3x3 lattice matrix or `Lattice` object (for
example obtained from Structure.lattice).
coords (iterable): A list or numpy array of coordinates. Can be
cartesian or fractional coordinates. If more than three sets of
coordinates are provided, the best plane that minimises the
distance to all sites will be calculated.
coords_are_cartesian (bool, optional): Whether the coordinates are
in cartesian space. If using fractional coordinates set to False.
round_dp (int, optional): The number of decimal places to round the
miller index to.
verbose (bool, optional): Whether to print warnings.
Returns:
(tuple): The Miller index.
"""
if not isinstance(lattice, Lattice):
lattice = Lattice(lattice)
return lattice.get_miller_index_from_coords(
coords,
coords_are_cartesian=coords_are_cartesian,
round_dp=round_dp,
verbose=verbose,
)
def center_slab(slab):
"""
The goal here is to ensure the center of the slab region
is centered close to c=0.5. This makes it easier to
find the surface sites and apply operations like doping.
There are three cases where the slab in not centered:
1. The slab region is completely between two vacuums in the
box but not necessarily centered. We simply shift the
slab by the difference in its center of mass and 0.5
along the c direction.
2. The slab completely spills outside the box from the bottom
and into the top. This makes it incredibly difficult to
locate surface sites. We iterate through all sites that
spill over (z>c) and shift all sites such that this specific
site is now on the other side. Repeat for all sites with z>c.
3. This is a simpler case of scenario 2. Either the top or bottom
slab sites are at c=0 or c=1. Treat as scenario 2.
Args:
slab (Slab): Slab structure to center
Returns:
Returns a centered slab structure
"""
# get a reasonable r cutoff to sample neighbors
bdists = sorted([nn[1] for nn in slab.get_neighbors(slab[0], 10) if nn[1] > 0])
r = bdists[0] * 3
all_indices = [i for i, site in enumerate(slab)]
# check if structure is case 2 or 3, shift all the
# sites up to the other side until it is case 1
for site in slab:
if any(nn[1] > slab.lattice.c for nn in slab.get_neighbors(site, r)):
shift = 1 - site.frac_coords[2] + 0.05
slab.translate_sites(all_indices, [0, 0, shift])
# now the slab is case 1, shift the center of mass of the slab to 0.5
weights = [s.species.weight for s in slab]
center_of_mass = np.average(slab.frac_coords, weights=weights, axis=0)
shift = 0.5 - center_of_mass[2]
slab.translate_sites(all_indices, [0, 0, shift])
return slab
def _reduce_vector(vector):
# small function to reduce vectors
d = abs(reduce(gcd, vector))
vector = tuple(int(i / d) for i in vector)
return vector
|
richardtran415/pymatgen
|
pymatgen/core/surface.py
|
Python
|
mit
| 87,059
|
[
"pymatgen"
] |
fcd6904ea24f3d4fb398844871fb7b03f16e5f70221c17f3a63a111965856383
|
# -*- coding: utf-8 -*-
#
# tsodyks_facilitating.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""Tsodyks facilitating example
--------------------------------
This scripts simulates two neurons. One is driven with dc-input and
connected to the other one with a facilitating Tsodyks synapse. The
membrane potential trace of the second neuron is recorded.
This example reproduces figure 1B of [1]_
This example is analog to ``tsodyks_depressing.py``, except that
different synapse parameters are used. Here, a small facilitation
parameter ``U`` causes a slow saturation of the synaptic efficacy
(Eq. 2.2), enabling a facilitating behavior.
References
~~~~~~~~~~~~
.. [1] Tsodyks M, Pawelzik K, Markram H (1998). Neural networks with dynamic synapses. Neural
computation, http://dx.doi.org/10.1162/089976698300017502
See Also
~~~~~~~~~~
:doc:`tsodyks_depressing`
"""
###############################################################################
# First, we import all necessary modules for simulation and plotting.
import nest
import nest.voltage_trace
from numpy import exp
###############################################################################
# Second, the simulation parameters are assigned to variables. The neuron
# and synapse parameters are stored into a dictionary.
h = 0.1 # simulation step size (ms)
Tau = 40. # membrane time constant
Theta = 15. # threshold
E_L = 0. # reset potential of membrane potential
R = 1. # membrane resistance (GOhm)
C = Tau / R # Tau (ms)/R in NEST units
TauR = 2. # refractory time
Tau_psc = 1.5 # time constant of PSC (= Tau_inact)
Tau_rec = 130. # recovery time
Tau_fac = 530. # facilitation time
U = 0.03 # facilitation parameter U
A = 1540. # PSC weight in pA
f = 20. / 1000. # frequency in Hz converted to 1/ms
Tend = 1200. # simulation time
TIstart = 50. # start time of dc
TIend = 1050. # end time of dc
I0 = Theta * C / Tau / (1 - exp(-(1 / f - TauR) / Tau)) # dc amplitude
neuron_param = {"tau_m": Tau,
"t_ref": TauR,
"tau_syn_ex": Tau_psc,
"tau_syn_in": Tau_psc,
"C_m": C,
"V_reset": E_L,
"E_L": E_L,
"V_m": E_L,
"V_th": Theta}
syn_param = {"tau_psc": Tau_psc,
"tau_rec": Tau_rec,
"tau_fac": Tau_fac,
"U": U,
"delay": 0.1,
"weight": A,
"u": 0.0,
"x": 1.0}
###############################################################################
# Third, we reset the kernel and set the resolution using ``SetKernelStatus``.
nest.ResetKernel()
nest.SetKernelStatus({"resolution": h})
###############################################################################
# Fourth, the nodes are created using ``Create``. We store the returned
# handles in variables for later reference.
neurons = nest.Create("iaf_psc_exp", 2)
dc_gen = nest.Create("dc_generator")
volts = nest.Create("voltmeter")
###############################################################################
# Fifth, the ``iaf_psc_exp`` neurons, the ``dc_generator`` and the ``voltmeter``
# are configured using ``SetStatus``, which expects a list of node handles and
# a parameter dictionary or a list of parameter dictionaries.
neurons.set(neuron_param)
dc_gen.set(amplitude=I0, start=TIstart, stop=TIend)
volts.set(label="voltmeter", interval=1.)
###############################################################################
# Sixth, the ``dc_generator`` is connected to the first neuron
# (`neurons[0]`) and the `voltmeter` is connected to the second neuron
# (`neurons[1]`). The command `Connect` has different variants. Plain
# ``Connect`` just takes the handles of pre- and postsynaptic nodes and
# uses the default values for weight and delay. Note that the connection
# direction for the ``voltmeter`` reflects the signal flow in the simulation
# kernel, because it observes the neuron instead of receiving events from it.
nest.Connect(dc_gen, neurons[0])
nest.Connect(volts, neurons[1])
###############################################################################
# Seventh, the first neuron (`neurons[0]`) is connected to the second
# neuron (`neurons[1]`). The command ``CopyModel`` copies the
# ``tsodyks_synapse`` model to the new name ``syn`` with parameters
# ``syn_param``. The manually defined model ``syn`` is used in the
# connection routine via the ``syn_spec`` parameter.
nest.CopyModel("tsodyks_synapse", "syn", syn_param)
nest.Connect(neurons[0], neurons[1], syn_spec="syn")
###############################################################################
# Finally, we simulate the configuration using the command ``Simulate``,
# where the simulation time `Tend` is passed as the argument. We plot the
# target neuron's membrane potential as function of time.
nest.Simulate(Tend)
nest.voltage_trace.from_device(volts)
nest.voltage_trace.show()
|
SepehrMN/nest-simulator
|
pynest/examples/tsodyks_facilitating.py
|
Python
|
gpl-2.0
| 5,620
|
[
"NEURON"
] |
bfa8dd5e82947eb4a03baee8f29d510fd0dd9295ed84e02ee11602a5720cd14f
|
# Generate UV-Vis spectra from electronic structure TDHF/TDDFT output files.
# Copyright (C) 2014 Li Research Group (University of Washington)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Generate UV-Vis spectra from electronic structure TDHF/TDDFT output files.
The top-level and main functions for the ``uvspecgen`` script are stored
here. Functions for updating and resetting the Gaussian fit parameters are
defined here.
"""
import sys
from uvspec.config import settings
from uvspec.config.settings import ConfigFile
from uvspec.spectrum import AbsorptionSpectrum
def _update_fit_parameters():
# Update the fit parameters in the configuration file with the values
# provdied by the user at run-time. If no values are specified, the
# current default values are maintained.
config = ConfigFile()
for parameter, value in settings.parameters.iteritems():
config.update(parameter, value)
print ' Fit paramters have been updated'
def _reset_fit_parameters():
# Reset the fit parameters to their originally installed default values.
ConfigFile().reset()
print ' Fit parameters have been reset to their original default values'
def _generate_spectrum():
# Spectrum generation using the ``AbsorptionSpectrum`` class and methods.
spectrum = AbsorptionSpectrum()
if settings.join:
spectrum.join(settings.logfile)
else:
spectrum.extract(settings.logfile[0])
spectrum.generate(settings.parameters)
spectrum.write(settings.outfile, settings.output, settings.nometa)
if settings.plot:
spectrum.plot()
def main():
"""The core function that drives the ``uvspecgen`` program.
First handle updates/resetting of the configuration file containing the
Gaussian fit parameters. The ``uvspecgen`` program can be run without
an electronic structure output filename specified solely for the purposes
of updating/resetting the Gaussian fit parameters in the configuration
file.
If the ``--save`` or ``--reset`` flag is not specified, and a logfile is
not given, the program will terminate with an error message and usage
instructions.
"""
if settings.save:
_update_fit_parameters()
elif settings.reset:
_reset_fit_parameters()
if settings.logfile:
_generate_spectrum()
elif not settings.save and not settings.reset:
settings.parser.error('Must specify at least one logfile name')
else:
sys.exit()
|
liresearchgroup/uvspecgen
|
uvspec/generate.py
|
Python
|
gpl-3.0
| 3,106
|
[
"Gaussian"
] |
c708c61675dc0c8ed7e330569fd2ffcbf42c133336e46c11495d163fa9f02a38
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import with_statement
# Based on the work of Dave Peticolas for the P4poll
# Changed to svn (using xml.dom.minidom) by Niklaus Giger
# Hacked beyond recognition by Brian Warner
from twisted.python import log
from twisted.internet import defer, utils
from buildbot import util
from buildbot.changes import base
import xml.dom.minidom
import os, urllib
# these split_file_* functions are available for use as values to the
# split_file= argument.
def split_file_alwaystrunk(path):
return dict(path=path)
def split_file_branches(path):
# turn "trunk/subdir/file.c" into (None, "subdir/file.c")
# and "trunk/subdir/" into (None, "subdir/")
# and "trunk/" into (None, "")
# and "branches/1.5.x/subdir/file.c" into ("branches/1.5.x", "subdir/file.c")
# and "branches/1.5.x/subdir/" into ("branches/1.5.x", "subdir/")
# and "branches/1.5.x/" into ("branches/1.5.x", "")
pieces = path.split('/')
if len(pieces) > 1 and pieces[0] == 'trunk':
return (None, '/'.join(pieces[1:]))
elif len(pieces) > 2 and pieces[0] == 'branches':
return ('/'.join(pieces[0:2]), '/'.join(pieces[2:]))
else:
return None
def split_file_projects_branches(path):
# turn projectname/trunk/subdir/file.c into dict(project=projectname, branch=trunk, path=subdir/file.c)
if not "/" in path:
return None
project, path = path.split("/", 1)
f = split_file_branches(path)
if f:
info = dict(project=project, path=f[1])
if f[0]:
info['branch'] = f[0]
return info
return f
class SVNPoller(base.PollingChangeSource, util.ComparableMixin):
"""
Poll a Subversion repository for changes and submit them to the change
master.
"""
compare_attrs = ["svnurl", "split_file",
"svnuser", "svnpasswd", "project",
"pollInterval", "histmax",
"svnbin", "category", "cachepath"]
parent = None # filled in when we're added
last_change = None
loop = None
def __init__(self, svnurl, split_file=None,
svnuser=None, svnpasswd=None,
pollInterval=10*60, histmax=100,
svnbin='svn', revlinktmpl='', category=None,
project='', cachepath=None, pollinterval=-2,
extra_args=None):
# for backward compatibility; the parameter used to be spelled with 'i'
if pollinterval != -2:
pollInterval = pollinterval
base.PollingChangeSource.__init__(self, name=svnurl, pollInterval=pollInterval)
if svnurl.endswith("/"):
svnurl = svnurl[:-1] # strip the trailing slash
self.svnurl = svnurl
self.extra_args = extra_args
self.split_file = split_file or split_file_alwaystrunk
self.svnuser = svnuser
self.svnpasswd = svnpasswd
self.revlinktmpl = revlinktmpl
self.environ = os.environ.copy() # include environment variables
# required for ssh-agent auth
self.svnbin = svnbin
self.histmax = histmax
self._prefix = None
self.category = category
self.project = project
self.cachepath = cachepath
if self.cachepath and os.path.exists(self.cachepath):
try:
with open(self.cachepath, "r") as f:
self.last_change = int(f.read().strip())
log.msg("SVNPoller: SVNPoller(%s) setting last_change to %s" % (self.svnurl, self.last_change))
# try writing it, too
with open(self.cachepath, "w") as f:
f.write(str(self.last_change))
except:
self.cachepath = None
log.msg(("SVNPoller: SVNPoller(%s) cache file corrupt or unwriteable; " +
"skipping and not using") % self.svnurl)
log.err()
def describe(self):
return "SVNPoller: watching %s" % self.svnurl
def poll(self):
# Our return value is only used for unit testing.
# we need to figure out the repository root, so we can figure out
# repository-relative pathnames later. Each SVNURL is in the form
# (ROOT)/(PROJECT)/(BRANCH)/(FILEPATH), where (ROOT) is something
# like svn://svn.twistedmatrix.com/svn/Twisted (i.e. there is a
# physical repository at /svn/Twisted on that host), (PROJECT) is
# something like Projects/Twisted (i.e. within the repository's
# internal namespace, everything under Projects/Twisted/ has
# something to do with Twisted, but these directory names do not
# actually appear on the repository host), (BRANCH) is something like
# "trunk" or "branches/2.0.x", and (FILEPATH) is a tree-relative
# filename like "twisted/internet/defer.py".
# our self.svnurl attribute contains (ROOT)/(PROJECT) combined
# together in a way that we can't separate without svn's help. If the
# user is not using the split_file= argument, then self.svnurl might
# be (ROOT)/(PROJECT)/(BRANCH) . In any case, the filenames we will
# get back from 'svn log' will be of the form
# (PROJECT)/(BRANCH)/(FILEPATH), but we want to be able to remove
# that (PROJECT) prefix from them. To do this without requiring the
# user to tell us how svnurl is split into ROOT and PROJECT, we do an
# 'svn info --xml' command at startup. This command will include a
# <root> element that tells us ROOT. We then strip this prefix from
# self.svnurl to determine PROJECT, and then later we strip the
# PROJECT prefix from the filenames reported by 'svn log --xml' to
# get a (BRANCH)/(FILEPATH) that can be passed to split_file() to
# turn into separate BRANCH and FILEPATH values.
# whew.
if self.project:
log.msg("SVNPoller: polling " + self.project)
else:
log.msg("SVNPoller: polling")
d = defer.succeed(None)
if not self._prefix:
d.addCallback(lambda _ : self.get_prefix())
def set_prefix(prefix):
self._prefix = prefix
d.addCallback(set_prefix)
d.addCallback(self.get_logs)
d.addCallback(self.parse_logs)
d.addCallback(self.get_new_logentries)
d.addCallback(self.create_changes)
d.addCallback(self.submit_changes)
d.addCallback(self.finished_ok)
d.addErrback(log.err, 'SVNPoller: Error in while polling') # eat errors
return d
def getProcessOutput(self, args):
# this exists so we can override it during the unit tests
d = utils.getProcessOutput(self.svnbin, args, self.environ)
return d
def get_prefix(self):
args = ["info", "--xml", "--non-interactive", self.svnurl]
if self.svnuser:
args.extend(["--username=%s" % self.svnuser])
if self.svnpasswd:
args.extend(["--password=%s" % self.svnpasswd])
if self.extra_args:
args.extend(self.extra_args)
d = self.getProcessOutput(args)
def determine_prefix(output):
try:
doc = xml.dom.minidom.parseString(output)
except xml.parsers.expat.ExpatError:
log.msg("SVNPoller: SVNPoller._determine_prefix_2: ExpatError in '%s'"
% output)
raise
rootnodes = doc.getElementsByTagName("root")
if not rootnodes:
# this happens if the URL we gave was already the root. In this
# case, our prefix is empty.
self._prefix = ""
return self._prefix
rootnode = rootnodes[0]
root = "".join([c.data for c in rootnode.childNodes])
# root will be a unicode string
if not self.svnurl.startswith(root):
log.msg(format="svnurl='%(svnurl)s' doesn't start with <root>='%(root)s'",
svnurl=self.svnurl, root=root)
raise RuntimeError("Can't handle redirected svn connections!? "
"This shouldn't happen.")
prefix = self.svnurl[len(root):]
if prefix.startswith("/"):
prefix = prefix[1:]
log.msg("SVNPoller: svnurl=%s, root=%s, so prefix=%s" %
(self.svnurl, root, prefix))
return prefix
d.addCallback(determine_prefix)
return d
def get_logs(self, _):
args = []
args.extend(["log", "--xml", "--verbose", "--non-interactive"])
if self.svnuser:
args.extend(["--username=%s" % self.svnuser])
if self.svnpasswd:
args.extend(["--password=%s" % self.svnpasswd])
if self.extra_args:
args.extend(self.extra_args)
args.extend(["--limit=%d" % (self.histmax), self.svnurl])
d = self.getProcessOutput(args)
return d
def parse_logs(self, output):
# parse the XML output, return a list of <logentry> nodes
try:
doc = xml.dom.minidom.parseString(output)
except xml.parsers.expat.ExpatError:
log.msg("SVNPoller: SVNPoller.parse_logs: ExpatError in '%s'" % output)
raise
logentries = doc.getElementsByTagName("logentry")
return logentries
def get_new_logentries(self, logentries):
last_change = old_last_change = self.last_change
# given a list of logentries, calculate new_last_change, and
# new_logentries, where new_logentries contains only the ones after
# last_change
new_last_change = None
new_logentries = []
if logentries:
new_last_change = int(logentries[0].getAttribute("revision"))
if last_change is None:
# if this is the first time we've been run, ignore any changes
# that occurred before now. This prevents a build at every
# startup.
log.msg('SVNPoller: starting at change %s' % new_last_change)
elif last_change == new_last_change:
# an unmodified repository will hit this case
log.msg('SVNPoller: no changes')
else:
for el in logentries:
if last_change == int(el.getAttribute("revision")):
break
new_logentries.append(el)
new_logentries.reverse() # return oldest first
self.last_change = new_last_change
log.msg('SVNPoller: _process_changes %s .. %s' %
(old_last_change, new_last_change))
return new_logentries
def _get_text(self, element, tag_name):
try:
child_nodes = element.getElementsByTagName(tag_name)[0].childNodes
text = "".join([t.data for t in child_nodes])
except:
text = "<unknown>"
return text
def _transform_path(self, path):
if not path.startswith(self._prefix):
log.msg(format="SVNPoller: ignoring path '%(path)s' which doesn't"
"start with prefix '%(prefix)s'",
path=path, prefix=self._prefix)
return
relative_path = path[len(self._prefix):]
if relative_path.startswith("/"):
relative_path = relative_path[1:]
where = self.split_file(relative_path)
# 'where' is either None, (branch, final_path) or a dict
if not where:
return
if isinstance(where, tuple):
where = dict(branch=where[0], path=where[1])
return where
def create_changes(self, new_logentries):
changes = []
for el in new_logentries:
revision = str(el.getAttribute("revision"))
revlink=''
if self.revlinktmpl:
if revision:
revlink = self.revlinktmpl % urllib.quote_plus(revision)
log.msg("Adding change revision %s" % (revision,))
author = self._get_text(el, "author")
comments = self._get_text(el, "msg")
# there is a "date" field, but it provides localtime in the
# repository's timezone, whereas we care about buildmaster's
# localtime (since this will get used to position the boxes on
# the Waterfall display, etc). So ignore the date field, and
# addChange will fill in with the current time
branches = {}
try:
pathlist = el.getElementsByTagName("paths")[0]
except IndexError: # weird, we got an empty revision
log.msg("ignoring commit with no paths")
continue
for p in pathlist.getElementsByTagName("path"):
kind = p.getAttribute("kind")
action = p.getAttribute("action")
path = "".join([t.data for t in p.childNodes])
# the rest of buildbot is certaily not yet ready to handle
# unicode filenames, because they get put in RemoteCommands
# which get sent via PB to the buildslave, and PB doesn't
# handle unicode.
path = path.encode("ascii")
if path.startswith("/"):
path = path[1:]
if kind == "dir" and not path.endswith("/"):
path += "/"
where = self._transform_path(path)
# if 'where' is None, the file was outside any project that
# we care about and we should ignore it
if where:
branch = where.get("branch", None)
filename = where["path"]
if not branch in branches:
branches[branch] = { 'files': [], 'number_of_directories': 0}
if filename == "":
# root directory of branch
branches[branch]['files'].append(filename)
branches[branch]['number_of_directories'] += 1
elif filename.endswith("/"):
# subdirectory of branch
branches[branch]['files'].append(filename[:-1])
branches[branch]['number_of_directories'] += 1
else:
branches[branch]['files'].append(filename)
if not branches[branch].has_key('action'):
branches[branch]['action'] = action
for key in ("repository", "project", "codebase"):
if key in where:
branches[branch][key] = where[key]
for branch in branches.keys():
action = branches[branch]['action']
files = branches[branch]['files']
number_of_directories_changed = branches[branch]['number_of_directories']
number_of_files_changed = len(files)
if action == u'D' and number_of_directories_changed == 1 and number_of_files_changed == 1 and files[0] == '':
log.msg("Ignoring deletion of branch '%s'" % branch)
else:
chdict = dict(
author=author,
files=files,
comments=comments,
revision=revision,
branch=branch,
revlink=revlink,
category=self.category,
repository=branches[branch].get('repository', self.svnurl),
project=branches[branch].get('project', self.project),
codebase=branches[branch].get('codebase', None))
changes.append(chdict)
return changes
@defer.inlineCallbacks
def submit_changes(self, changes):
for chdict in changes:
yield self.master.addChange(src='svn', **chdict)
def finished_ok(self, res):
if self.cachepath:
with open(self.cachepath, "w") as f:
f.write(str(self.last_change))
log.msg("SVNPoller: finished polling %s" % res)
return res
|
denny820909/builder
|
lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/changes/svnpoller.py
|
Python
|
mit
| 17,125
|
[
"Brian"
] |
c6ee6cebb5316101c9122b06b364bf802cc40a90f4ed3ada65b71884900d110d
|
# Databricks notebook source
# MAGIC %md
# MAGIC
# MAGIC # [SDS-2.2, Scalable Data Science](https://lamastex.github.io/scalable-data-science/sds/2/2/)
# MAGIC
# MAGIC This is used in a non-profit educational setting with kind permission of [Adam Breindel](https://www.linkedin.com/in/adbreind).
# MAGIC This is not licensed by Adam for use in a for-profit setting. Please contact Adam directly at `adbreind@gmail.com` to request or report such use cases or abuses.
# MAGIC A few minor modifications and additional mathematical statistical pointers have been added by Raazesh Sainudiin when teaching PhD students in Uppsala University.
# COMMAND ----------
# MAGIC %md
# MAGIC Archived YouTube video of this live unedited lab-lecture:
# MAGIC
# MAGIC [](https://www.youtube.com/embed/Vwou20grUD4?start=378&end=2146&autoplay=1) [](https://www.youtube.com/embed/-LLL3MUl9ps?start=0&end=2467&autoplay=1)
# COMMAND ----------
# MAGIC %md
# MAGIC #### We can also implement the model with mini-batches -- this will let us see matrix ops in action:
# MAGIC
# MAGIC (N.b., feed_dict is intended for small data / experimentation. For more info on ingesting data at scale, see https://www.tensorflow.org/api_guides/python/reading_data)
# COMMAND ----------
# we know these params, but we're making TF learn them
REAL_SLOPE_X1 = 2 # slope along axis 1 (x-axis)
REAL_SLOPE_X2 = 3 # slope along axis 2 (y-axis)
REAL_INTERCEPT = 5 # intercept along axis 3 (z-axis), think of (x,y,z) axes in the usual way
# COMMAND ----------
import numpy as np
# GENERATE a batch of true data, with a little Gaussian noise added
def make_mini_batch(size=10):
X = np.random.rand(size, 2) #
Y = np.matmul(X, [REAL_SLOPE_X1, REAL_SLOPE_X2]) + REAL_INTERCEPT + 0.2 * np.random.randn(size)
return X.reshape(size,2), Y.reshape(size,1)
# COMMAND ----------
# MAGIC %md
# MAGIC To digest what's going on inside the function above, let's take it step by step.
# COMMAND ----------
Xex = np.random.rand(10, 2) # Xex is simulating PRNGs from independent Uniform [0,1] RVs
Xex # visualize these as 10 orddered pairs of points in the x-y plane that makes up our x-axis and y-axis (or x1 and x2 axes)
# COMMAND ----------
Yex = np.matmul(Xex, [REAL_SLOPE_X1, REAL_SLOPE_X2]) # + REAL_INTERCEPT + 0.2 * np.random.randn(size)
Yex
# COMMAND ----------
# MAGIC %md
# MAGIC The first entry in Yex is obtained as follows (change the numbers in the produc below if you reevaluated the cells above) and geometrically it is the location in z-axis of the plane with slopes given by REAL_SLOPE_X1 in the x-axis and REAL_SLOPE_X2 in the y-aixs with intercept 0 at the point in the x-y or x1-x2 plane given by (0.68729439, 0.58462379).
# COMMAND ----------
0.68729439*REAL_SLOPE_X1 + 0.58462379*REAL_SLOPE_X2
# COMMAND ----------
# MAGIC %md
# MAGIC The next steps are adding an intercept term to translate the plane in the z-axis and then a scaled (the multiplication by 0.2 here) gaussian noise from independetly drawn pseudo-random samples from the standard normal or Normal(0,1) random variable via `np.random.randn(size)`.
# COMMAND ----------
Yex = np.matmul(Xex, [REAL_SLOPE_X1, REAL_SLOPE_X2]) + REAL_INTERCEPT # + 0.2 * np.random.randn(10)
Yex
# COMMAND ----------
Yex = np.matmul(Xex, [REAL_SLOPE_X1, REAL_SLOPE_X2]) + REAL_INTERCEPT + 0.2 * np.random.randn(10)
Yex # note how each entry in Yex is jiggled independently a bit by 0.2 * np.random.randn()
# COMMAND ----------
# MAGIC %md
# MAGIC Thus we can now fully appreciate what is going on in `make_mini_batch`. This is meant to substitute for pulling random sub-samples of batches of the real data during stochastic gradient descent.
# COMMAND ----------
make_mini_batch() # our mini-batch of Xx and Ys
# COMMAND ----------
import tensorflow as tf
batch = 5 # size of batch
tf.reset_default_graph() # this is important to do before you do something new in TF
# we will work with single floating point precision and this is specified in the tf.float32 type argument to each tf object/method
x = tf.placeholder(tf.float32, shape=(batch, 2)) # placeholder node for the pairs of x variables (predictors) in batches of size batch
x_aug = tf.concat( (x, tf.ones((batch, 1))), 1 ) # x_aug is a concatenation of a vector of 1`s along the first dimension
y = tf.placeholder(tf.float32, shape=(batch, 1)) # placeholder node for the univariate response y with batch many rows and 1 column
model_params = tf.get_variable("model_params", [3,1]) # these are the x1 slope, x2 slope and the intercept (3 rows and 1 column)
y_model = tf.matmul(x_aug, model_params) # our two-factor regression model is defined by this matrix multiplication
# note that the noise is formally part of the model and what we are actually modeling is the mean response...
error = tf.reduce_sum(tf.square(y - y_model))/batch # this is mean square error where the sum is computed by a reduce call on addition
train_op = tf.train.GradientDescentOptimizer(0.02).minimize(error) # learning rate is set to 0.02
init = tf.global_variables_initializer() # our way into running the TF session
errors = [] # list to track errors over iterations
with tf.Session() as session:
session.run(init)
for i in range(500):
x_data, y_data = make_mini_batch(batch) # simulate the mini-batch of data x1,x2 and response y with noise
_, error_val = session.run([train_op, error], feed_dict={x: x_data, y: y_data})
errors.append(error_val)
out = session.run(model_params)
print(out)
# COMMAND ----------
REAL_SLOPE_X1, REAL_SLOPE_X2, REAL_INTERCEPT # compare with rue parameter values - it's not too far from the estimates
# COMMAND ----------
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
fig.set_size_inches((4,3))
plt.plot(errors)
display(fig)
|
raazesh-sainudiin/scalable-data-science
|
db/2/2/054_DLbyABr_03a-BatchTensorFlowWithMatrices.py
|
Python
|
unlicense
| 6,018
|
[
"Gaussian"
] |
92f383182ba078dd5dc208a7c21eb9590de9c853b924891811d41203a89d0556
|
#! CCSD dipole with user-specified basis set
import psi4
psi4.set_output_file("output.dat", False)
h2o = psi4.geometry("""
0 1
H
O 1 0.957
H 2 0.957 1 104.5
""")
psi4.set_options({'freeze_core': 'false'})
psi4.basis_helper("""
# Sadlej-pVTZ
spherical
****
H 0
S 4 1.00
33.8650140000 0.0060680000
5.0947880000 0.0453160000
1.1587860000 0.2028460000
0.3258400000 0.5037090000
S 1 1.00
0.1027410000 1.0000000000
S 1 1.00
0.0324000000 1.0000000000
P 2 1.00
1.1588000000 0.1884400000
0.3258000000 0.8824200000
P 2 1.00
0.1027000000 0.1178000000
0.0324000000 0.0042000000
****
C 0
S 5 1.00
5240.6353000000 0.0009370000
782.2048000000 0.0072280000
178.3508300000 0.0363440000
50.8159420000 0.1306000000
16.8235620000 0.3189310000
S 2 1.00
6.1757760000 0.4387420000
2.4180490000 0.2149740000
S 1 1.00
0.5119000000 1.0000000000
S 1 1.00
0.1565900000 1.0000000000
S 1 1.00
0.0479000000 1.0000000000
P 4 1.00
18.8418000000 0.0138870000
4.1592400000 0.0862790000
1.2067100000 0.2887440000
0.3855400000 0.4994110000
P 1 1.00
0.1219400000 1.0000000000
P 1 1.00
0.0385680000 1.0000000000
D 2 1.00
1.2067000000 0.2628500000
0.3855000000 0.8043000000
D 2 1.00
0.1219000000 0.6535000000
0.0386000000 0.8636000000
****
O 0
S 5 1.00
10662.2850000000 0.0007990000
1599.7097000000 0.0061530000
364.7252600000 0.0311570000
103.6517900000 0.1155960000
33.9058050000 0.3015520000
S 2 1.00
12.2874690000 0.4448700000
4.7568050000 0.2431720000
S 1 1.00
1.0042710000 1.0000000000
S 1 1.00
0.3006860000 1.0000000000
S 1 1.00
0.0900300000 1.0000000000
P 4 1.00
34.8564630000 0.0156480000
7.8431310000 0.0981970000
2.3062490000 0.3077680000
0.7231640000 0.4924700000
P 1 1.00
0.2148820000 1.0000000000
P 1 1.00
0.0638500000 1.0000000000
D 2 1.00
2.3062000000 0.2027000000
0.7232000000 0.5791000000
D 2 1.00
0.2149000000 0.7854500000
0.0639000000 0.5338700000
****
""")
ccsd_e, wfn = psi4.properties('ccsd',properties=['dipole'],return_wfn=True)
psi4.oeprop(wfn,"DIPOLE", "QUADRUPOLE", title="(OEPROP)CC")
import warnings #TEST
with warnings.catch_warnings(): #TEST
warnings.simplefilter("ignore") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC DIPOLE X"), 0.000000000000,6,"CC DIPOLE X") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC DIPOLE Y"), 0.000000000000,6,"CC DIPOLE Y") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC DIPOLE Z"),-1.840334899884,6,"CC DIPOLE Z") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC QUADRUPOLE XX"),-7.864006962064,6,"CC QUADRUPOLE XX") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC QUADRUPOLE XY"), 0.000000000000,6,"CC QUADRUPOLE XY") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC QUADRUPOLE XZ"), 0.000000000000,6,"CC QUADRUPOLE XZ") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC QUADRUPOLE YY"),-4.537386915305,6,"CC QUADRUPOLE YY") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC QUADRUPOLE YZ"), 0.000000000000,6,"CC QUADRUPOLE YZ") #TEST
psi4.compare_values(psi4.variable("(OEPROP)CC QUADRUPOLE ZZ"),-6.325836255265,6,"CC QUADRUPOLE ZZ") #TEST
psi4.core.print_variables()
|
ashutoshvt/psi4
|
tests/python/cc54/input.py
|
Python
|
lgpl-3.0
| 4,053
|
[
"Psi4"
] |
185c8664d8dbe6d1925c118436247e52e717768f8114d504ada7143e2181dfe2
|
from __future__ import print_function
import numpy as np
from numpy import zeros, ones, einsum
from .constants import tol6, tol8, tol12, Ha2eV, kb_HaK
from .mathutil import delta_lorentzian
from . import EigFile, Eigr2dFile, FanFile, DdbFile, GkkFile
__author__ = "Gabriel Antonius"
__all__ = ['QptAnalyzer']
class QptAnalyzer(object):
def __init__(self,
ddb_fname=None,
eigq_fname=None,
eigk_fname=None,
eigr2d_fname=None,
eigr2d0_fname=None,
eigi2d_fname=None,
fan_fname=None,
fan0_fname=None,
gkk_fname=None,
gkk0_fname=None,
wtq=1.0,
smearing=0.00367,
temperatures=None,
omegase=None,
asr=True,
mu=None,
):
# Files
self.ddb = DdbFile(ddb_fname, read=False, asr=asr)
self.eigq = EigFile(eigq_fname, read=False)
self.eigr2d = Eigr2dFile(eigr2d_fname, read=False)
self.eigi2d = Eigr2dFile(eigi2d_fname, read=False)
self.fan = FanFile(fan_fname, read=False)
self.eig0 = EigFile(eigk_fname, read=False)
self.eigr2d0 = Eigr2dFile(eigr2d0_fname, read=False)
self.fan0 = FanFile(fan0_fname, read=False)
self.gkk = GkkFile(gkk_fname, read=False)
self.gkk0 = GkkFile(gkk0_fname, read=False)
self.wtq = wtq
self.smearing = smearing
self.omegase = omegase if omegase else list()
self.temperatures = temperatures if temperatures else list()
self.mu = mu
@property
def nkpt(self):
if self.eigr2d.fname:
return self.eigr2d.nkpt
elif self.fan.fname:
return self.fan.nkpt
elif self.gkk.fname:
return self.gkk.nkpt
else:
raise Exception("Don't know nkpt. No files to read.")
@property
def nband(self):
if self.eigr2d.fname:
return self.eigr2d.nband
elif self.fan.fname:
return self.fan.nband
elif self.gkk.fname:
return self.gkk.nband
else:
raise Exception("Don't know nband. No files to read.")
@property
def natom(self):
return self.ddb.natom
@property
def nmode(self):
return self.ddb.nmode
@property
def is_gamma(self):
return self.ddb.is_gamma
@property
def qred(self):
return self.ddb.qred
@property
def omega(self):
return self.ddb.omega
@property
def nomegase(self):
return len(self.omegase)
@property
def ntemp(self):
return len(self.temperatures)
@property
def use_gkk(self):
return (bool(self.gkk.fname) and bool(self.gkk0.fname))
@property
def has_active(self):
return (bool(self.fan.fname) and bool(self.fan0.fname)) or self.use_gkk
def read_nonzero_files(self):
"""Read all nc files that are not specifically related to q=0."""
for f in (self.ddb, self.eigq, self.eigr2d, self.eigi2d,
self.fan, self.gkk):
if f.fname:
f.read_nc()
self.ddb.compute_dynmat()
def read_ddb(self):
"""Read the ddb and diagonalize the matrix, setting omega."""
self.ddb.read_nc()
self.ddb.compute_dynmat()
def read_zero_files(self):
"""Read all nc files that are not specifically related to q=0."""
for f in (self.eig0, self.eigr2d0, self.fan0, self.gkk0):
if f.fname:
f.read_nc()
def broadcast_zero_files(self):
"""Broadcast the data related to q=0 from master to all workers."""
if self.eig0.fname:
self.eig0.broadcast()
self.eig0.get_degen()
if self.eigr2d0.fname:
self.eigr2d0.broadcast()
if self.fan0.fname:
self.fan0.broadcast()
if self.gkk0.fname:
self.gkk0.broadcast()
def get_occ_kq_nospin(self):
"""
Get the occupations, being either 0 or 1, regardless of spinor.
Assumes a gapped system, where occupations are the same at all kpts.
Returns: occ[nband]
"""
if self.eigr2d.fname:
occ = self.eigr2d.occ[0,0,:]
elif self.fan.fname:
occ = self.fan.occ[0,0,:]
elif self.gkk.fname:
occ = self.gkk.occ[0,0,:]
else:
raise Exception("Don't know nband. No files to read.")
if any(occ == 2.0):
occ = occ / 2.0
return occ
def get_max_val(self):
"""Get the maximum valence band energy."""
occ0 = self.get_occ_kq_nospin()
eig = self.eigq.EIG[0,0,:]
E_last = eig[0]
for f, E in zip(occ0, eig):
if f < 0.5:
break
E_last = E
return E_last
def get_min_cond(self):
"""Get the minimum conduction band energy."""
occ0 = self.get_occ_kq_nospin()
eig = self.eigq.EIG[0,0,:]
for f, E in zip(occ0, eig):
if f <= 0.5:
break
return E
def find_fermi_level(self):
"""
Find the Fermi level locally, using the eigenvalues
at all k+q points available. Assuming a gapped system.
"""
return (self.get_max_val() + self.get_min_cond()) / 2.0
@staticmethod
def reduce_array(arr, mode=False, temperature=False, omega=False):
"""
Eliminate dimensions from an array of shape
(nmode, ntemp, nomegase, nkpt, nband)
by summing over any or all of the first three dimension.
mode:
Keep the first dimension
temperature:
Keep the second dimension
omega:
Keep the third dimension
"""
# Find the final order of
final_indices = ''
if mode:
final_indices += 'o'
if temperature:
final_indices += 't'
if omega:
final_indices += 'l'
final_indices += 'kn'
summation = 'otlkn->' + final_indices
return einsum(summation, arr)
def get_fan_ddw_sternheimer(self, mode=False, omega=False, temperature=False):
"""
Compute the fan and ddw contribution to the self-energy
obtained from the Sternheimer equation,
that is, the contribution of the upper bands.
Do not include the q-point weight.
Returns: fan, ddw
The return arrays vary in dimensions, depending on the input arguments.
These arrays are at most of dimension 5, as
fan[nmode, ntemp, nomegase, nkpt, nband]
ddw[nmode, ntemp, nomegase, nkpt, nband]
Depending on the truth value of the input arguments,
the dimension nomegase (omega) and ntemp (temperature)
will be eliminated.
The dimension nmode will be summed over in case mode=False.
In the semi-static approximation, these quantities do not actually
depend on omega, so the arrays are simply repated along the omega axis.
"""
nkpt = self.nkpt
nband = self.nband
natom = self.natom
nmode = self.nmode
nomegase = self.nomegase
ntemp = self.ntemp
# Get reduced displacement (scaled with frequency)
displ_red_FAN2, displ_red_DDW2 = self.ddb.get_reduced_displ_squared()
# FIXME this will not work for nsppol=2
# nmode, nkpt, nband
fan = einsum('knabij,objai->okn', self.eigr2d.EIG2D, displ_red_FAN2)
ddw = einsum('knabij,objai->okn', self.eigr2d0.EIG2D, displ_red_DDW2)
# Temperature dependence factor
n_B = self.ddb.get_bose(self.temperatures)
tdep = 2 * n_B + 1 if temperature else ones((nmode,1))
# Omega dependence factor
odep = ones(nomegase) if omega else ones(1)
# nmode, ntemp, nkpt, nband
fan = einsum('okn,ot->otkn', fan, tdep)
ddw = einsum('okn,ot->otkn', ddw, tdep)
# nmode, ntemp, nomega, nkpt, nband
fan = einsum('otkn,l->otlkn', fan, odep)
ddw = einsum('otkn,l->otlkn', ddw, odep)
# Reduce the arrays
fan = self.reduce_array(fan, mode=mode, temperature=temperature, omega=omega)
ddw = self.reduce_array(ddw, mode=mode, temperature=temperature, omega=omega)
return fan, ddw
def get_fan_ddw_gkk2_active(self):
"""
Compute the squared gkk elements for the fan ddw terms.
Returns:
fan[nkpt, nband, nband, nmode]
ddw[nkpt, nband, nband, nmode]
"""
if not self.has_active:
raise Exception('You should provide GKK files or FAN files '
'to compute active space contribution.')
# Get reduced displacement (scaled with frequency)
displ_red_FAN2, displ_red_DDW2 = self.ddb.get_reduced_displ_squared()
if self.use_gkk:
gkk2 = self.gkk.get_gkk_squared()
gkk02 = self.gkk0.get_gkk_squared()
else:
gkk2 = self.fan.FAN
gkk02 = self.fan0.FAN
# nkpt, nband, nband, nmode
fan = einsum('kniajbm,oabij->knmo', gkk2, displ_red_FAN2)
ddw = einsum('kniajbm,oabij->knmo', gkk02, displ_red_DDW2)
# Enforce the diagonal coupling terms to be zero at Gamma
ddw = self.eig0.symmetrize_fan_degen(ddw)
if self.is_gamma:
fan = self.eig0.symmetrize_fan_degen(fan)
return fan, ddw
def get_fan_ddw_active(self, mode=False, omega=False, temperature=False,
dynamical=True):
"""
Compute the fan and ddw contributions to the self-energy
from the active space, that is, the the lower bands.
Do not include the q-point weight.
Returns: fan, ddw
The return arrays vary in dimensions, depending on the input arguments.
These arrays are at most of dimension 5, as
fan[nmode, ntemp, nomegase, nkpt, nband]
ddw[nmode, ntemp, nomegase, nkpt, nband]
Depending on the truth value of the input arguments,
the dimension nomegase (omega) and ntemp (temperature)
will be eliminated.
The dimension nmode will be summed over in case mode=False.
The Debye-Waller term does not actually depends on omega,
but this dimension is kept anyway.
"""
nkpt = self.nkpt
nband = self.nband
nmode = self.nmode
if temperature:
ntemp = self.ntemp
temperatures = self.temperatures
# Bose-Enstein occupation number
# nmode, ntemp
n_B = self.ddb.get_bose(temperatures)
else:
ntemp = 1
temperatures = zeros(1)
n_B = zeros((nmode,1))
if omega:
nomegase = self.nomegase
omega_se = self.omegase
else:
# omega_se is measured from the bare eigenvalues
nomegase = 1
omega_se = zeros(1)
if dynamical:
omega_q = self.ddb.omega[:].real
else:
omega_q = zeros(nmode)
# Fermi-Dirac occupation number
# nspin, nkpt, nband, ntemp
occ = self.eigq.get_fermi_function(self.mu, temperatures)
# G^2
# nkpt, nband, nband, nmode
fan_g2, ddw_g2 = self.get_fan_ddw_gkk2_active()
# DDW term
# --------
# nkpt, nband
occ0 = self.eig0.get_fermi_function_T0(self.mu)[0,:,:]
# nkpt, nband, nband
delta_E_ddw = (einsum('kn,m->knm', self.eig0.EIG[0,:,:].real, ones(nband))
- einsum('kn,m->kmn', self.eig0.EIG[0,:,:].real, ones(nband))
- einsum('m,kn->knm', ones(nband), (2*occ0-1)) * self.smearing * 1j)
# nmode, nkpt, nband
ddw = einsum('knmo,knm->okn', ddw_g2, 1.0 / delta_E_ddw)
# nmode, ntemp
tdep = 2 * n_B + 1
# FIXME This is not optimal: The mode indices will be summed
# so there is no need to create an array this big.
# in case omega=True and mode=False
# nmode, ntemp, nkpt, nband
ddw = einsum('okn,ot->otkn', ddw, tdep)
odep = ones(nomegase) if omega else ones(0)
# ntemp, nomega, nkpt, nband
ddw = einsum('otkn,l->otlkn', ddw, ones(nomegase))
# Reduce the arrays
ddw = self.reduce_array(ddw, mode=mode,
temperature=temperature, omega=omega)
# Fan term
# --------
# nmode, ntemp, nomegase, nkpt, nband
fan = zeros((nmode, ntemp, nomegase, nkpt, nband), dtype=complex)
# n + 1 - f
# nkpt, nband, nmode, ntemp
num1 = (einsum('ot,kn->knot', n_B, ones((nkpt,nband)))
+ 1. - einsum('knt,o->knot', occ[0,:,:,:], ones(nmode)))
# n + f
# nkpt, nband, nmode, ntemp
num2 = (einsum('ot,kn->knot', n_B, ones((nkpt,nband)))
+ einsum('knt,o->knot', occ[0,:,:,:], ones(nmode)))
# nkpt, nband
eta = (2 * occ0 - 1) * self.smearing * 1j
for jband in range(nband):
# nkpt, nband
delta_E = (
self.eig0.EIG[0,:,:].real
- einsum('k,n->kn', self.eigq.EIG[0,:,jband].real, ones(nband))
- eta)
# nkpt, nband, nomegase
delta_E_omega = (einsum('kn,l->knl', delta_E, ones(nomegase))
+ einsum('kn,l->knl', ones((nkpt,nband)), omega_se))
# nkpt, nband, nomegase, nmode
deno1 = (einsum('knl,o->knlo', delta_E_omega, ones(nmode))
- einsum('knl,o->knlo', ones((nkpt,nband,nomegase)), omega_q))
# nmode, nkpt, nband, nomegase, ntemp
div1 = einsum('kot,knlo->oknlt', num1[:,jband,:,:], 1.0 / deno1)
del deno1
# nkpt, nband, nomegase, nmode
deno2 = (einsum('knl,o->knlo', delta_E_omega, ones(nmode))
+ einsum('knl,o->knlo', ones((nkpt,nband,nomegase)), omega_q))
# nmode, nkpt, nband, nomegase, ntemp
div2 = einsum('kot,knlo->oknlt', num2[:,jband,:,:], 1.0 / deno2)
del deno2
# FIXME This is not optimal: The mode indices will be summed
# so there is no need to create an array this big.
# in case omega=True and mode=False
# nmode, ntemp, nomegase, nkpt, nband
fan += einsum('kno,oknlt->otlkn', fan_g2[:,:,jband,:], div1 + div2)
del div1, div2
# Reduce the arrays
fan = self.reduce_array(fan, mode=mode, temperature=temperature, omega=omega)
return fan, ddw
def get_fan_ddw(self, mode=False, temperature=False,
omega=False, dynamical=False):
kwargs = dict(
mode=mode,
temperature=temperature,
omega=omega,
dynamical=dynamical)
fan_stern, ddw_stern = self.get_fan_ddw_sternheimer(
mode=mode,
temperature=temperature,
omega=omega,
)
fan_active, ddw_active = self.get_fan_ddw_active(
mode=mode,
temperature=temperature,
omega=omega,
dynamical=dynamical)
fan = fan_active + fan_stern
ddw = ddw_active + ddw_stern
return fan, ddw
def get_self_energy(self,
mode=False,
temperature=False,
omega=False,
dynamical=True,
only_sternheimer=False,
only_active=False,
only_fan=False,
only_ddw=False,
):
if only_sternheimer and only_active:
raise Exception(
'only_sternheimer and only_active cannot be True at the same time')
elif only_sternheimer:
fan, ddw = self.get_fan_ddw_sternheimer(
mode=mode,
temperature=temperature,
omega=omega,
)
elif only_active:
fan, ddw = self.get_fan_ddw_active(
mode=mode,
temperature=temperature,
omega=omega,
dynamical=dynamical)
else:
fan, ddw = self.get_fan_ddw(
mode=mode,
temperature=temperature,
omega=omega,
dynamical=dynamical)
if only_fan:
se_q = fan
elif only_ddw:
se_q = - ddw
else:
se_q = fan - ddw
se = self.wtq * se_q
se = self.eig0.make_average(se)
return se
def get_broadening(self, mode=False, temperature=False,
omega=False, dynamical=True):
"""
Compute the zp broadening contribution from one q-point in a dynamical scheme.
Only take the active space contribution.
"""
nkpt = self.nkpt
nband = self.nband
nmode = self.nmode
if temperature:
ntemp = self.ntemp
temperatures = self.temperatures
# Bose-Enstein occupation number
# nmode, ntemp
n_B = self.ddb.get_bose(temperatures)
else:
ntemp = 1
temperatures = zeros(1)
n_B = zeros((nmode,1))
if omega:
nomegase = self.nomegase
omega_se = self.omegase
else:
# omega_se is measured from the bare eigenvalues
nomegase = 1
omega_se = zeros(1)
if dynamical:
omega_q = self.ddb.omega[:].real
else:
omega_q = zeros(nmode)
# Fermi-Dirac occupation number
# nspin, nkpt, nband, ntemp
occ = self.eigq.get_fermi_function(self.mu, temperatures)
# nkpt, nband, ntemp
f = occ[0]
# nkpt, nband
occ0 = self.eig0.get_fermi_function_T0(self.mu)[0,:,:]
# nkpt, nband, nband, nmode
fan_g2, ddw_g2 = self.get_fan_ddw_gkk2_active()
# nmode, ntemp, nkpt
n_B = einsum('ot,q->otq', n_B, ones(nkpt))
# nmode, ntemp, nkpt,nband
f = einsum('qmt,o->otqm', f, ones(nmode))
# nkpt, nband
sign = np.sign(- (2 * occ0 - 1.))
broadening = zeros((nmode,ntemp,nomegase,nkpt,nband))
for jband in range(nband):
# nmode, ntemp, nkpt
num1 = (n_B + f[...,jband])
num2 = (n_B + 1 - f[...,jband])
# nkpt, nband
delta_E = (
self.eig0.EIG[0,:,:].real
- einsum('q,n->qn', self.eigq.EIG[0,:,jband].real, ones(nband))
)
# nkpt, nband, nomegase
delta_E_omega = (einsum('kn,l->knl', delta_E, ones(nomegase))
+ einsum('kn,l->knl', ones((nkpt,nband)), omega_se))
# nmode, nkpt, nband, nomegase
deno1 = (
einsum('knl,o->oknl', delta_E_omega, ones(nmode))
+ einsum('o,knl->oknl', omega_q, ones((nkpt,nband,nomegase)))
)
# nmode, nkpt, nband, nomegase
deno2 = (
einsum('knl,o->oknl', delta_E_omega, ones(nmode))
- einsum('o,knl->oknl', omega_q, ones((nkpt,nband,nomegase)))
)
# nmode, nkpt, nband, nomegase
delta1 = np.pi * delta_lorentzian(deno1, self.smearing)
delta2 = np.pi * delta_lorentzian(deno2, self.smearing)
# nmode, ntemp, nomegase, nkpt, nband
term1 = einsum('otk,oknl->otlkn', num1, delta1)
term2 = einsum('otk,oknl->otlkn', num2, delta2)
deltas = einsum('kn,otlkn->otlkn', sign, term1 + term2)
broadening_j = einsum('kno,otlkn->otlkn', fan_g2[:,:,jband,:], deltas)
broadening += broadening_j.real
# Reduce the arrays
broadening = self.reduce_array(broadening, mode=mode,
temperature=temperature, omega=omega)
broadening *= self.wtq
broadening = self.eig0.make_average(broadening)
return broadening
def get_zp_self_energy(self):
"""
Compute the zp frequency-dependent dynamical self-energy
from one q-point.
The self-energy is evaluated on a frequency mesh 'omegase'
that is shifted by the bare energies, such that, what is retured is
Simga'_kn(omega) = Sigma_kn(omega + E^0_kn)
Returns: sigma[nkpt,nband,nomegase]
"""
self.sigma = self.get_self_energy(
mode=False,
temperature=False,
omega=True,
dynamical=True,
only_sternheimer=False,
only_active=False,
)
# nkpt, nband, nomegase, nband
self.sigma = einsum('lkn->knl', self.sigma) # FIXME why??
return self.sigma
def get_td_self_energy(self):
"""
Compute the temperature depended and frequency-dependent
dynamical self-energy from one q-point.
The self-energy is evaluated on a frequency mesh 'omegase'
that is shifted by the bare energies, such that, what is retured is
Simga'_kn(omega,T) = Sigma_kn(omega + E^0_kn, T)
Returns: sigma[nkpt,nband,nomegase,ntemp]
"""
self.sigma = self.get_self_energy(
mode=False,
temperature=True,
omega=True,
dynamical=True,
only_sternheimer=False,
only_active=False,
)
# nkpt, nband, nomegase, nband
self.sigma = einsum('tlkn->knlt', self.sigma) # FIXME why??
return self.sigma
def get_zp_self_energy_active(self):
"""
Compute the zp frequency-dependent dynamical self-energy
from one q-point.
Only include the active space contribution.
The self-energy is evaluated on a frequency mesh 'omegase'
that is shifted by the bare energies, such that, what is retured is
Simga'_kn(omega) = Sigma_kn(omega + E^0_kn)
Returns: sigma[nkpt,nband,nomegase]
"""
self.sigma = self.get_self_energy(
mode=False,
temperature=False,
omega=True,
dynamical=True,
only_sternheimer=False,
only_active=True,
)
# nkpt, nband, nomegase, nband
self.sigma = einsum('lkn->knl', self.sigma) # FIXME why??
return self.sigma
def get_zp_self_energy_sternheimer(self):
"""
Compute the zp frequency-dependent dynamical self-energy
from one q-point.
Only include the Sternheimer contribution.
The self-energy is evaluated on a frequency mesh 'omegase'
that is shifted by the bare energies, such that, what is retured is
Simga'_kn(omega) = Sigma_kn(omega + E^0_kn)
Returns: sigma[nkpt,nband,nomegase]
"""
self.sigma = self.get_self_energy(
mode=False,
temperature=False,
omega=True,
dynamical=True,
only_sternheimer=True,
only_active=False,
)
# nkpt, nband, nomegase, nband
self.sigma = einsum('lkn->knl', self.sigma) # FIXME why??
return self.sigma
def get_td_self_energy_active(self):
"""
Compute the temperature depended and frequency-dependent
dynamical self-energy from one q-point.
Only include the active space contribution.
The self-energy is evaluated on a frequency mesh 'omegase'
that is shifted by the bare energies, such that, what is retured is
Simga'_kn(omega,T) = Sigma_kn(omega + E^0_kn, T)
Returns: sigma[nkpt,nband,nomegase,ntemp]
"""
self.sigma = self.get_self_energy(
mode=False,
temperature=True,
omega=True,
dynamical=True,
only_sternheimer=False,
only_active=True,
)
# nkpt, nband, nomegase, nband
self.sigma = einsum('tlkn->knlt', self.sigma) # FIXME why??
return self.sigma
def get_td_self_energy_sternheimer(self):
"""
Compute the temperature depended and frequency-dependent
dynamical self-energy from one q-point.
Only include the Sternheimer contribution.
The self-energy is evaluated on a frequency mesh 'omegase'
that is shifted by the bare energies, such that, what is retured is
Simga'_kn(omega,T) = Sigma_kn(omega + E^0_kn, T)
Returns: sigma[nkpt,nband,nomegase,ntemp]
"""
self.sigma = self.get_self_energy(
mode=False,
temperature=True,
omega=True,
dynamical=True,
only_sternheimer=True,
only_active=False,
)
# nkpt, nband, nomegase, nband
self.sigma = einsum('tlkn->knlt', self.sigma) # FIXME why??
return self.sigma
def get_zpr_static_sternheimer(self):
"""Compute the q-point zpr contribution in a static scheme."""
self.zpr = self.get_self_energy(
mode=False,
temperature=False,
omega=False,
dynamical=False,
only_sternheimer=True,
only_active=False,
).real
return self.zpr
def get_zpr_static(self):
"""
Compute the q-point zpr contribution in a static scheme,
with the transitions split between active and sternheimer.
"""
self.zpr = self.get_self_energy(
mode=False,
temperature=False,
omega=False,
dynamical=False,
only_sternheimer=False,
only_active=False,
).real
return self.zpr
def get_zpr_dynamical(self):
"""
Compute the q-point zpr contribution in a static scheme
with the transitions split between active and sternheimer.
"""
self.zpr = self.get_self_energy(
mode=False,
temperature=False,
omega=False,
dynamical=True,
only_sternheimer=False,
only_active=False,
).real
return self.zpr
def get_tdr_static(self):
"""
Compute the q-point contribution to the temperature-dependent
renormalization in a static scheme,
with the transitions split between active and sternheimer.
"""
self.tdr = self.get_self_energy(
mode=False,
temperature=True,
omega=False,
dynamical=False,
only_sternheimer=False,
only_active=False,
).real
# nkpt, nband, ntemp
self.tdr = einsum('tkn->knt', self.tdr) # FIXME why??
return self.tdr
def get_tdr_dynamical(self):
"""
Compute the q-point contribution to the temperature-dependent
renormalization in a dynamical scheme.
"""
self.tdr = self.get_self_energy(
mode=False,
temperature=True,
omega=False,
dynamical=True,
only_sternheimer=False,
only_active=False,
).real
# nkpt, nband, ntemp
self.tdr = einsum('tkn->knt', self.tdr) # FIXME why??
return self.tdr
def get_tdr_static_nosplit(self):
"""
Compute the q-point contribution to the temperature-dependent
renormalization in a static scheme.
"""
self.tdr = self.get_self_energy(
mode=False,
temperature=True,
omega=False,
dynamical=False,
only_sternheimer=True,
only_active=False,
).real
# nkpt, nband, ntemp
self.tdr = einsum('tkn->knt', self.tdr) # FIXME why??
return self.tdr
def get_tdr_dynamical_active(self):
"""
Compute the q-point contribution to the temperature-dependent
renormalization in a dynamical scheme,
taking only the active space contribution.
"""
self.tdr = self.get_self_energy(
mode=False,
temperature=True,
omega=False,
dynamical=True,
only_sternheimer=False,
only_active=True,
).real
# nkpt, nband, ntemp
self.tdr = einsum('tkn->knt', self.tdr) # FIXME why??
return self.tdr
def get_zpr_dynamical_active(self):
"""
Compute the q-point contribution to the zero point
renormalization in a dynamical scheme,
taking only the active space contribution.
"""
self.zpr = self.get_self_energy(
mode=False,
temperature=False,
omega=False,
dynamical=True,
only_sternheimer=False,
only_active=True,
).real
# nkpt, nband, ntemp
return self.zpr
def get_zpr_static_modes(self):
"""
Compute the q-point zpr contribution in a static scheme,
with the transitions split between active and sternheimer.
Retain the mode decomposition of the zpr.
"""
self.zpr = self.get_self_energy(
mode=True,
temperature=False,
omega=False,
dynamical=False,
only_sternheimer=False,
only_active=False,
).real
# nmode, nkpt, nband
return self.zpr # FIXME use self.zpr_mode?
def get_zpb_dynamical(self):
"""
Compute the zp broadening contribution from one q-point in a dynamical scheme.
Only take the active space contribution.
Returns: zpb[nkpt,nband]
"""
self.zpb = self.get_broadening(mode=False, temperature=False,
omega=False, dynamical=True)
return self.zpb
def get_tdb_dynamical(self):
"""
Compute the td broadening contribution from one q-point in a dynamical scheme.
Only take the active space contribution.
Returns: zpb[nkpt,nband,ntemp]
"""
self.tdb = self.get_broadening(mode=False, temperature=True,
omega=False, dynamical=True)
self.tdb = einsum('tkn->knt', self.tdb) # FIXME why??
return self.tdb
def get_zpb_static(self):
"""
Compute the zp broadening contribution from one q-point in a static scheme.
Only take the active space contribution.
Returns: zpb[nkpt,nband]
"""
self.zpb = self.get_broadening(mode=False, temperature=False,
omega=False, dynamical=False)
return self.zpb
def get_tdb_static(self):
"""
Compute the td broadening contribution from one q-point in a static scheme.
Only take the active space contribution.
Returns: zpb[nkpt,nband,ntemp]
"""
self.tdb = self.get_broadening(mode=False, temperature=True,
omega=False, dynamical=False)
self.tdb = einsum('tkn->knt', self.tdb) # FIXME why??
return self.tdb
def get_tdb_static_nosplit(self):
"""
Compute the q-point contribution to the temperature-dependent broadening
in a static scheme from the EIGI2D files.
"""
nkpt = self.nkpt
nband = self.nband
natom = self.natom
ntemp = self.ntemp
# These indicies be swapped at the end
self.tdb = zeros((ntemp, nkpt, nband), dtype=complex)
# Get reduced displacement (scaled with frequency)
displ_red_FAN2, displ_red_DDW2 = self.ddb.get_reduced_displ_squared()
bose = self.ddb.get_bose(self.temperatures)
fan_corrQ = einsum('ijklmn,olnkm->oij', self.eigi2d.EIG2D, displ_red_FAN2)
for imode in np.arange(3*natom):
for tt, T in enumerate(self.temperatures):
self.tdb[tt,:,:] += np.pi * fan_corrQ[imode,:,:] * (2*bose[imode,tt] + 1.)
self.tdb = self.tdb * self.wtq
self.tdb = self.eig0.make_average(self.tdb)
# nkpt, nband, ntemp
self.tdb = np.einsum('tkn->knt', self.tdb)
return self.tdb
def get_zpb_static_nosplit(self):
"""
Compute the zp broadening contribution from one q-point in a static scheme
from the EIGI2D files.
"""
nkpt = self.nkpt
nband = self.nband
natom = self.natom
self.zpb = zeros((nkpt, nband), dtype=complex)
# Get reduced displacement (scaled with frequency)
displ_red_FAN2, displ_red_DDW2 = self.ddb.get_reduced_displ_squared()
fan_corrQ = einsum('ijklmn,olnkm->oij', self.eigi2d.EIG2D, displ_red_FAN2)
self.zpb += np.pi * np.sum(fan_corrQ, axis=0)
self.zpb = self.zpb * self.wtq
if np.any(self.zpb[:,:].imag > tol12):
warnings.warn("The real part of the broadening is non zero: {}".format(broadening))
self.zpb = self.eig0.make_average(self.zpb)
return self.zpb
def get_zpr_ddw_active(self):
"""
Compute the q-point zpr contribution in a static scheme
with the transitions split between active and sternheimer.
"""
self.zpr = self.get_self_energy(
mode=False,
temperature=False,
omega=False,
dynamical=False,
only_sternheimer=False,
only_active=True,
only_ddw=True,
).real
return self.zpr
|
jmbeuken/abinit
|
scripts/post_processing/ElectronPhononCoupling/ElectronPhononCoupling/core/qptanalyzer.py
|
Python
|
gpl-3.0
| 34,212
|
[
"DIRAC"
] |
b0b8dbe6d1c83ea8b9b09f42206c7561554a5156367c4b677ffa30ccc2ff52aa
|
#
# tsne.py
#
# Implementation of t-SNE in Python. The implementation was tested on Python 2.5.1, and it requires a working
# installation of NumPy. The implementation comes with an example on the MNIST dataset. In order to plot the
# results of this example, a working installation of matplotlib is required.
# The example can be run by executing: ipython tsne.py -pylab
#
#
# Created by Laurens van der Maaten on 20-12-08.
# Copyright (c) 2008 Tilburg University. All rights reserved.
import numpy as np
def Hbeta(D = np.array([]), beta = 1.0):
"""Compute the perplexity and the P-row for a specific value of the precision of a Gaussian distribution."""
# Compute P-row and corresponding perplexity
P = np.exp(-D.copy() * beta);
sumP = sum(P);
H = np.log(sumP) + beta * np.sum(D * P) / sumP;
P = P / sumP;
return H, P;
def x2p(X = np.array([]), tol = 1e-5, perplexity = 30.0):
"""Performs a binary search to get P-values in such a way that each conditional Gaussian has the same perplexity."""
# Initialize some variables
print "Computing pairwise distances..."
(n, d) = X.shape;
sum_X = np.sum(np.square(X), 1);
D = np.add(np.add(-2 * np.dot(X, X.T), sum_X).T, sum_X);
P = np.zeros((n, n));
beta = np.ones((n, 1));
logU = np.log(perplexity);
# Loop over all datapoints
for i in range(n):
# Print progress
if i % 500 == 0:
print "Computing P-values for point ", i, " of ", n, "..."
# Compute the Gaussian kernel and entropy for the current precision
betamin = -np.inf;
betamax = np.inf;
Di = D[i, np.concatenate((np.r_[0:i], np.r_[i+1:n]))];
(H, thisP) = Hbeta(Di, beta[i]);
# Evaluate whether the perplexity is within tolerance
Hdiff = H - logU;
tries = 0;
while np.abs(Hdiff) > tol and tries < 50:
# If not, increase or decrease precision
if Hdiff > 0:
betamin = beta[i];
if betamax == np.inf or betamax == -np.inf:
beta[i] = beta[i] * 2;
else:
beta[i] = (beta[i] + betamax) / 2;
else:
betamax = beta[i];
if betamin == np.inf or betamin == -np.inf:
beta[i] = beta[i] / 2;
else:
beta[i] = (beta[i] + betamin) / 2;
# Recompute the values
(H, thisP) = Hbeta(Di, beta[i]);
Hdiff = H - logU;
tries = tries + 1;
# Set the final row of P
P[i, np.concatenate((np.r_[0:i], np.r_[i+1:n]))] = thisP;
# Return final P-matrix
print "Mean value of sigma: ", np.mean(np.sqrt(1 / beta))
return P;
def pca(X = np.array([]), no_dims = 50):
"""Runs PCA on the NxD array X in order to reduce its dimensionality to no_dims dimensions."""
print "Preprocessing the data using PCA..."
(n, d) = X.shape;
X = X - np.tile(np.mean(X, 0), (n, 1));
(l, M) = np.linalg.eig(np.dot(X.T, X));
Y = np.dot(X, M[:,0:no_dims]);
np.testing.assert_array_almost_equal(np.imag(Y), np.zeros(Y.shape))
return np.real(Y);
def tsne(X = np.array([]), no_dims = 2, initial_dims = 50, perplexity = 30.0):
"""Runs t-SNE on the dataset in the NxD array X to reduce its dimensionality to no_dims dimensions.
The syntaxis of the function is Y = tsne.tsne(X, no_dims, perplexity), where X is an NxD NumPy array."""
# Check inputs
if X.dtype != "float64":
print "Error: array X should have type float64.";
return -1;
#if no_dims.__class__ != "<type 'int'>": # doesn't work yet!
# print "Error: number of dimensions should be an integer.";
# return -1;
# Initialize variables
X = pca(X, initial_dims);
(n, d) = X.shape;
max_iter = 1000;
initial_momentum = 0.5;
final_momentum = 0.8;
eta = 500;
min_gain = 0.01;
Y = np.random.randn(n, no_dims);
dY = np.zeros((n, no_dims));
iY = np.zeros((n, no_dims));
gains = np.ones((n, no_dims));
# Compute P-values
P = x2p(X, 1e-5, perplexity);
P = P + np.transpose(P);
P = P / np.sum(P);
P = P * 4; # early exaggeration
P = np.maximum(P, 1e-12);
# Run iterations
for iter in range(max_iter):
# Compute pairwise affinities
sum_Y = np.sum(np.square(Y), 1);
num = 1 / (1 + np.add(np.add(-2 * np.dot(Y, Y.T), sum_Y).T, sum_Y));
num[range(n), range(n)] = 0;
Q = num / np.sum(num);
Q = np.maximum(Q, 1e-12);
# Compute gradient
PQ = P - Q;
for i in range(n):
dY[i,:] = np.sum(np.tile(PQ[:,i] * num[:,i], (no_dims, 1)).T * (Y[i,:] - Y), 0);
# Perform the update
if iter < 20:
momentum = initial_momentum
else:
momentum = final_momentum
gains = (gains + 0.2) * ((dY > 0) != (iY > 0)) + (gains * 0.8) * ((dY > 0) == (iY > 0));
gains[gains < min_gain] = min_gain;
iY = momentum * iY - eta * (gains * dY);
Y = Y + iY;
Y = Y - np.tile(np.mean(Y, 0), (n, 1));
# Compute current value of cost function
if (iter + 1) % 10 == 0:
C = np.sum(P * np.log(P / Q));
print "Iteration ", (iter + 1), ": error is ", C
# Stop lying about P-values
if iter == 100:
P = P / 4;
# Return solution
return Y;
if __name__ == "__main__":
import pylab
print "Run Y = tsne.tsne(X, no_dims, perplexity) to perform t-SNE on your dataset."
print "Running example on 2,500 MNIST digits..."
X = np.loadtxt("mnist2500_X.txt");
labels = np.loadtxt("mnist2500_labels.txt");
Y = tsne(X, 2, 50, 20.0);
pylab.scatter(Y[:,0], Y[:,1], 20, labels);
|
afraser/CellProfiler-Analyst
|
cpa/tsne.py
|
Python
|
gpl-2.0
| 5,205
|
[
"Gaussian"
] |
19443cc85cf3ba62aad79dc3d4331e460883e83ce4333dcc8455d6f9c891d732
|
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License. A copy of the License
# is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import logging
import math
from typing import Dict, Optional, Tuple, Union
import mxnet as mx
import numpy as np
from . import constants as C
from . import utils
logger = logging.getLogger(__name__)
def activation(data: mx.sym.Symbol, act_type: str) -> mx.sym.Symbol:
"""
Apply custom or standard activation.
Custom activation types include:
- Swish-1, also called Sigmoid-Weighted Linear Unit (SiLU): Ramachandran et
al. (https://arxiv.org/pdf/1710.05941.pdf), Elfwing et al.
(https://arxiv.org/pdf/1702.03118.pdf)
- Gaussian Error Linear Unit (GELU): Hendrycks and Gimpel
(https://arxiv.org/pdf/1606.08415.pdf)
:param data: input Symbol of any shape.
:param act_type: Type of activation.
:return: output Symbol with same shape as input.
"""
# TODO: Contribute these to MXNet? For now it appears that registered activation types must be implemented in C++.
if act_type == C.SWISH1:
return data * mx.sym.Activation(data, act_type="sigmoid")
elif act_type == C.GELU:
# Approximation of x * gaussian_cdf(x) used by Hendrycks and Gimpel
return 0.5 * data * (1 + mx.sym.Activation((math.sqrt(2 / math.pi) * (data + (0.044715 * (data**3)))),
act_type="tanh"))
else:
return mx.sym.Activation(data, act_type=act_type)
class LayerNormalization:
"""
Implements Ba et al, Layer Normalization (https://arxiv.org/abs/1607.06450).
:param prefix: Optional prefix of layer name.
:param scale: Optional variable for scaling of shape (num_hidden,). Will be created if None.
:param shift: Optional variable for shifting of shape (num_hidden,). Will be created if None.
:param scale_init: Initial value of scale variable if scale is None. Default 1.0.
:param shift_init: Initial value of shift variable if shift is None. Default 0.0.
"""
def __init__(self,
prefix: str = 'layernorm',
scale: Optional[mx.sym.Symbol] = None,
shift: Optional[mx.sym.Symbol] = None,
scale_init: float = 1.0,
shift_init: float = 0.0) -> None:
self.prefix = prefix
self.scale = scale if scale is not None else mx.sym.Variable('%s_gamma' % prefix,
init=mx.init.Constant(value=scale_init))
self.shift = shift if shift is not None else mx.sym.Variable('%s_beta' % prefix,
init=mx.init.Constant(value=shift_init))
def __call__(self, data: mx.sym.Symbol, eps: float = 1e-06) -> mx.sym.Symbol:
"""
Normalizes hidden units of data as follows:
data = scale * (data - mean) / sqrt(var + eps) + shift
Normalization is performed over the last dimension of the input data.
:param data: Data to normalize. Shape: (d0, ..., dn, num_hidden).
:param eps: Variance epsilon.
:return: inputs_norm: Normalized inputs. Shape: (d0, ..., dn, num_hidden).
"""
return mx.sym.LayerNorm(data=data, gamma=self.scale, beta=self.shift, axis=-1,
eps=eps, output_mean_var=False, name=self.prefix)
class LHUC:
"""
Learning Hidden Unit Contribution
David Vilar. "Learning Hidden Unit Contribution for Adapting Neural
Machine Translation Models" NAACL 2018
:param num_hidden: Number of hidden units of the layer to be modified.
:param weight: Optional parameter vector.
:param prefix: Optional prefix for created parameters (if not given as weight).
"""
def __init__(self,
num_hidden: int,
weight: Optional[mx.sym.Symbol] = None,
prefix: str = "") -> None:
self.num_hidden = num_hidden
self.prefix = prefix
if weight is None:
self.params = mx.sym.Variable(self.prefix + C.LHUC_NAME,
shape=(self.num_hidden,),
init=mx.init.Uniform(0.1),
dtype="float32")
else:
self.params = weight
def __call__(self,
inputs: mx.sym.Symbol,
name: Optional[str] = None) -> mx.sym.Symbol:
# We use a sigmoid with amplitude 2 for weighting the hidden units. The
# activation is dampened when the value of the sigmoid is close to 0, and
# strengthened when it's close to 2 (see also original paper)
weight_vector = 2 * mx.sym.Activation(data=self.params, act_type="sigmoid")
out = mx.sym.broadcast_mul(weight_vector, inputs, name=name)
return out
class WeightNormalization:
"""
Implements Weight Normalization, see Salimans & Kingma 2016 (https://arxiv.org/abs/1602.07868).
For a given tensor the normalization is done per hidden dimension.
:param weight: Weight tensor of shape: (num_hidden, d1, d2, ...).
:param num_hidden: Size of the first dimension.
:param ndim: The total number of dimensions of the weight tensor.
:param prefix: The prefix used for naming.
"""
def __init__(self, weight, num_hidden, ndim=2, prefix: str = '') -> None:
self.prefix = prefix
self.weight = weight
self.num_hidden = num_hidden
self.scale = mx.sym.Variable("%swn_scale" % prefix,
shape=tuple([num_hidden] + [1] * (ndim - 1)),
init=mx.init.Constant(value=1.0))
def __call__(self, weight: Optional[mx.nd.NDArray] = None, scale: Optional[mx.nd.NDArray] = None) -> mx.sym.Symbol:
"""
Normalize each hidden dimension and scale afterwards
:return: A weight normalized weight tensor.
"""
if weight is None and scale is None:
return mx.sym.broadcast_mul(lhs=mx.sym.L2Normalization(self.weight, mode='instance'),
rhs=self.scale, name="%swn_scale" % self.prefix)
else:
assert isinstance(weight, mx.nd.NDArray)
assert isinstance(scale, mx.nd.NDArray)
return mx.nd.broadcast_mul(lhs=mx.nd.L2Normalization(weight, mode='instance'), rhs=scale)
class OutputLayer:
"""
Defines the output layer of Sockeye decoders. Supports weight tying and weight normalization.
:param hidden_size: Decoder hidden size.
:param vocab_size: Target vocabulary size.
:param weight_normalization: Whether to apply weight normalization.
:param prefix: Prefix used for naming.
"""
def __init__(self,
hidden_size: int,
vocab_size: int,
weight: Optional[mx.sym.Symbol],
weight_normalization: bool,
prefix: str = C.DEFAULT_OUTPUT_LAYER_PREFIX) -> None:
self.vocab_size = vocab_size
self.prefix = prefix
if weight is None:
self.w = mx.sym.Variable("%sweight" % self.prefix, shape=(vocab_size, hidden_size))
else:
self.w = weight
self.weight_normalization = weight_normalization
if weight_normalization:
logger.info("Normalizing output layer weights.")
self.weight_norm = WeightNormalization(self.w,
num_hidden=vocab_size,
ndim=2,
prefix=self.prefix)
self.w = self.weight_norm()
self.b = mx.sym.Variable("%sbias" % self.prefix)
def __call__(self,
hidden: Union[mx.sym.Symbol, mx.nd.NDArray],
weight: Optional[mx.nd.NDArray] = None,
bias: Optional[mx.nd.NDArray] = None):
"""
Linear transformation to vocab size. Returns logits.
:param hidden: Decoder representation for n elements. Shape: (n, self.num_hidden).
:return: Logits. Shape(n, self.vocab_size).
"""
if isinstance(hidden, mx.sym.Symbol):
# TODO dropout?
return mx.sym.FullyConnected(data=hidden,
num_hidden=self.vocab_size,
weight=self.w,
bias=self.b,
flatten=False,
name=C.LOGITS_NAME)
# Equivalent NDArray implementation (requires passed weights/biases)
assert isinstance(hidden, mx.nd.NDArray)
utils.check_condition(weight is not None and bias is not None,
"OutputLayer NDArray implementation requires passing weight and bias NDArrays.")
return mx.nd.FullyConnected(data=hidden,
num_hidden=bias.shape[0],
weight=weight,
bias=bias,
flatten=False)
def split_heads(x: mx.sym.Symbol, depth_per_head: int, heads: int) -> mx.sym.Symbol:
"""
Returns a symbol with head dimension folded into batch and depth divided by the number of heads.
:param x: Symbol of shape (batch, length, depth).
:param depth_per_head: Depth per head.
:param heads: Number of heads.
:return: Symbol of shape (batch * heads, length, depth_per_heads).
"""
# (batch, length, heads, depth_per_head)
x = mx.sym.reshape(data=x, shape=(0, -1, heads, depth_per_head))
# (batch, heads, length, depth/heads)
x = mx.sym.transpose(data=x, axes=(0, 2, 1, 3))
# (batch * heads, length, depth/heads)
return mx.sym.reshape(data=x, shape=(-3, -1, depth_per_head))
def combine_heads(x: mx.sym.Symbol, depth_per_head: int, heads: int) -> mx.sym.Symbol:
"""
Returns a symbol with both batch & length, and head & depth dimensions combined.
:param x: Symbol of shape (batch * heads, length, depth_per_head).
:param depth_per_head: Depth per head.
:param heads: Number of heads.
:return: Symbol of shape (batch, length, depth).
"""
# (batch, heads, length, depth_per_head)
x = mx.sym.reshape(data=x, shape=(-4, -1, heads, 0, depth_per_head))
# (batch, length, heads, depth_per_head)
x = mx.sym.transpose(x, axes=(0, 2, 1, 3))
# (batch, length, depth)
return mx.sym.reshape(x, shape=(-1, 0, depth_per_head * heads))
def broadcast_to_heads(x: mx.sym.Symbol, num_heads: int, ndim: int, fold_heads: bool = True) -> mx.sym.Symbol:
"""
Broadcasts batch-major input of shape (batch, d1 ... dn-1) to (batch*heads, d1 ... dn-1).
:param x: Batch-major input. Shape: (batch, d1 ... dn-1).
:param num_heads: Number of heads.
:param ndim: Number of dimensions in x.
:param fold_heads: Whether to fold heads dimension into batch dimension.
:return: Tensor with each sample repeated heads-many times.
Shape: (batch * heads, d1 ... dn-1) if fold_heads == True, (batch, heads, d1 ... dn-1) else.
"""
dims = [0] * (ndim - 1)
# x: (batch, 1)
x = mx.sym.expand_dims(x, axis=1)
# x: (batch, heads, dims...)
x = mx.sym.broadcast_to(x, shape=[0, num_heads] + dims)
if fold_heads:
# (batch * heads, dims...)
return mx.sym.reshape(x, shape=[-3] + dims)
else:
# x: (batch, heads, dims...)
return x
def dot_attention(queries: mx.sym.Symbol,
keys: mx.sym.Symbol,
values: mx.sym.Symbol,
lengths: Optional[mx.sym.Symbol] = None,
dropout: float = 0.0,
bias: Optional[mx.sym.Symbol] = None,
prefix: Optional[str] = ''):
"""
Computes dot attention for a set of queries, keys, and values.
:param queries: Attention queries. Shape: (n, lq, d).
:param keys: Attention keys. Shape: (n, lk, d).
:param values: Attention values. Shape: (n, lk, dv).
:param lengths: Optional sequence lengths of the keys. Shape: (n,).
:param dropout: Dropout probability.
:param bias: Optional 3d bias tensor.
:param prefix: Optional prefix
:return: 'Context' vectors for each query. Shape: (n, lq, dv).
"""
utils.check_condition(lengths is not None or bias is not None,
"Must provide either length or bias argument for masking")
# (n, lq, lk)
logits = mx.sym.batch_dot(lhs=queries, rhs=keys, transpose_b=True, name='%sdot' % prefix)
if lengths is not None:
# mask lk dimension
# (lk, n, lq)
logits = mx.sym.transpose(data=logits, axes=(2, 0, 1))
logits = mx.sym.SequenceMask(data=logits,
use_sequence_length=True,
sequence_length=lengths,
value=C.LARGE_NEGATIVE_VALUE)
# (n, lq, lk)
logits = mx.sym.transpose(data=logits, axes=(1, 2, 0))
if bias is not None:
logits = mx.sym.broadcast_add(logits, bias, name='%sbias_add' % prefix)
probs = mx.sym.softmax(logits, axis=-1)
probs = mx.sym.Dropout(probs, p=dropout) if dropout > 0.0 else probs
# (n, lq, lk) x (n, lk, dv) -> (n, lq, dv)
return mx.sym.batch_dot(lhs=probs, rhs=values, name='%scontexts' % prefix)
class MultiHeadAttentionBase:
"""
Base class for Multi-head attention.
:param prefix: Attention prefix.
:param depth_att: Attention depth / number of hidden units.
:param heads: Number of attention heads.
:param depth_out: Output depth / number of output units.
:param dropout: Dropout probability on attention scores
"""
def __init__(self,
prefix: str,
depth_att: int = 512,
heads: int = 8,
depth_out: int = 512,
dropout: float = 0.0) -> None:
self.prefix = prefix
utils.check_condition(depth_att % heads == 0,
"Number of heads (%d) must divide attention depth (%d)" % (heads, depth_att))
self.depth = depth_att
self.heads = heads
self.depth_out = depth_out
self.dropout = dropout
self.depth_per_head = self.depth // self.heads
self.w_h2o = mx.sym.Variable("%sh2o_weight" % prefix)
def _attend(self,
queries: mx.sym.Symbol,
keys: mx.sym.Symbol,
values: mx.sym.Symbol,
lengths: Optional[mx.sym.Symbol] = None,
bias: Optional[mx.sym.Symbol] = None) -> mx.sym.Symbol:
"""
Returns context vectors of multi-head dot attention.
:param queries: Query tensor. Shape: (batch_size, query_max_length, depth).
:param keys: Keys. Shape: (batch_size, memory_max_length, depth).
:param values: Values. Shape: (batch_size, memory_max_length, depth).
:param lengths: Optional lengths of keys. Shape: (batch_size,).
:param bias: Optional 3d bias.
:return: Context vectors. Shape: (batch_size, query_max_length, output_depth).
"""
# scale by sqrt(depth_per_head)
queries = queries * (self.depth_per_head ** -0.5)
# (batch*heads, length, depth/heads)
queries = split_heads(queries, self.depth_per_head, self.heads)
keys = split_heads(keys, self.depth_per_head, self.heads)
values = split_heads(values, self.depth_per_head, self.heads)
lengths = broadcast_to_heads(lengths, self.heads, ndim=1, fold_heads=True) if lengths is not None else lengths
# (batch*heads, query_max_length, depth_per_head)
contexts = dot_attention(queries, keys, values,
lengths=lengths, dropout=self.dropout, bias=bias, prefix=self.prefix)
# (batch, query_max_length, depth)
contexts = combine_heads(contexts, self.depth_per_head, self.heads)
# contexts: (batch, query_max_length, output_depth)
contexts = mx.sym.FullyConnected(data=contexts,
weight=self.w_h2o,
no_bias=True,
num_hidden=self.depth_out,
flatten=False)
return contexts
class MultiHeadSelfAttention(MultiHeadAttentionBase):
"""
Multi-head self-attention. Independent linear projections of inputs serve as
queries, keys, and values for the attention.
:param prefix: Attention prefix.
:param depth_att: Attention depth / number of hidden units.
:param heads: Number of attention heads.
:param depth_out: Output depth / number of output units.
:param dropout: Dropout probability on attention scores
"""
def __init__(self,
prefix: str,
depth_att: int = 512,
heads: int = 8,
depth_out: int = 512,
dropout: float = 0.0) -> None:
super().__init__(prefix, depth_att, heads, depth_out, dropout)
self.w_i2h = mx.sym.Variable("%si2h_weight" % prefix)
def __call__(self,
inputs: mx.sym.Symbol,
input_lengths: Optional[mx.sym.Symbol] = None,
bias: Optional[mx.sym.Symbol] = None,
cache: Optional[Dict[str, Optional[mx.sym.Symbol]]] = None) -> mx.sym.Symbol:
"""
Computes multi-head attention on a set of inputs, serving as queries, keys, and values.
If sequence lengths are provided, they will be used to mask the attention scores.
A bias mask may also be used to mask the attention scores.
May also use a cache of previously computed inputs.
Returns a symbol of shape (batch, max_length, output_depth).
:param inputs: Input Data. Shape: (batch, max_length, input_depth).
:param input_lengths: Optional lengths of inputs to mask attention scores. Shape: (batch, 1).
:param bias: Optional 3d bias tensor to mask attention scores.
:param cache: Optional dictionary of previously computed keys and values.
:return: Symbol of shape (batch, max_length, output_depth).
"""
# combined: (batch, max_length, depth * 3)
combined = mx.sym.FullyConnected(data=inputs,
weight=self.w_i2h,
no_bias=True,
num_hidden=self.depth * 3,
flatten=False,
name="%sqkv_transform" % self.prefix)
# split into query, keys and values
# (batch, max_length, depth)
# pylint: disable=unbalanced-tuple-unpacking
queries, keys, values = mx.sym.split(data=combined, num_outputs=3, axis=2)
if cache is not None:
# append new keys & values to cache, update the cache
keys = cache['k'] = keys if cache['k'] is None else mx.sym.concat(cache['k'], keys, dim=1)
values = cache['v'] = values if cache['v'] is None else mx.sym.concat(cache['v'], values, dim=1)
return self._attend(queries,
keys,
values,
lengths=input_lengths,
bias=bias)
class MultiHeadAttention(MultiHeadAttentionBase):
"""
Multi-head attention layer for queries independent from keys/values.
:param prefix: Attention prefix.
:param depth_att: Attention depth / number of hidden units.
:param heads: Number of attention heads.
:param depth_out: Output depth / number of output units.
:param dropout: Dropout probability on attention scores
"""
def __init__(self,
prefix: str,
depth_att: int = 512,
heads: int = 8,
depth_out: int = 512,
dropout: float = 0.0) -> None:
super().__init__(prefix, depth_att, heads, depth_out, dropout)
self.w_q2h = mx.sym.Variable("%sq2h_weight" % prefix)
self.w_k2h = mx.sym.Variable("%sk2h_weight" % prefix)
self.w_v2h = mx.sym.Variable("%sv2h_weight" % prefix)
def __call__(self,
queries: mx.sym.Symbol,
memory: mx.sym.Symbol,
memory_lengths: Optional[mx.sym.Symbol] = None,
bias: Optional[mx.sym.Symbol] = None) -> mx.sym.Symbol:
"""
Computes multi-head attention for queries given a memory tensor.
If sequence lengths are provided, they will be used to mask the attention scores.
A bias mask may also be used to mask the attention scores.
Returns a symbol of shape (batch, max_length, output_depth).
:param queries: Query tensor. Shape: (batch, query_max_length, input_depth).
:param memory: Memory data to attend to. Shape: (batch, memory_max_length, input_depth).
:param memory_lengths: Optional lengths of memory to mask attention scores. Shape: (batch, 1).
:param bias: Optional 3d bias tensor to mask attention scores.
:return: Symbol of shape (batch, query_seq_len, output_depth).
"""
# (batch, query_max_length, depth)
queries = mx.sym.FullyConnected(data=queries,
weight=self.w_q2h,
no_bias=True,
num_hidden=self.depth,
flatten=False,
name="%sq_transform" % self.prefix)
# (batch, memory_max_length, depth)
keys = mx.sym.FullyConnected(data=memory,
weight=self.w_k2h,
no_bias=True,
num_hidden=self.depth,
flatten=False,
name="%sk_transform" % self.prefix)
# (batch, memory_max_length, depth)
values = mx.sym.FullyConnected(data=memory,
weight=self.w_v2h,
no_bias=True,
num_hidden=self.depth,
flatten=False,
name="%sv_transform" % self.prefix)
return self._attend(queries,
keys,
values,
bias=bias,
lengths=memory_lengths)
class ProjectedDotAttention:
"""
Dot attention layer for queries independent from keys/values.
:param prefix: Attention prefix.
:param num_hidden: Attention depth / number of hidden units.
"""
def __init__(self,
prefix: str,
num_hidden) -> None:
self.prefix = prefix
self.num_hidden = num_hidden
self.w_q2h = mx.sym.Variable("%sq2h_weight" % prefix)
self.b_q2h = mx.sym.Variable("%sq2h_bias" % prefix)
self.w_kv2h = mx.sym.Variable("%skv2h_weight" % prefix)
self.b_kv2h = mx.sym.Variable("%skv2h_bias" % prefix)
def __call__(self,
queries: mx.sym.Symbol,
memory: mx.sym.Symbol,
memory_lengths: mx.sym.Symbol) -> mx.sym.Symbol:
"""
Apply project, apply dot attention and return new context vectors.
:param queries: Symbol of shape (batch, queries_max_length, input_num_hidden).
:param memory: Symbol of shape (batch, memory_max_length, input_num_hidden).
:param memory_lengths: Symbol of shape (batch, 1).
:return: Symbol of shape (batch, queries_max_length, num_hidden).
"""
# (batch, memory_max_length, num_hidden * 2)
combined = mx.sym.FullyConnected(data=memory,
weight=self.w_kv2h,
bias=self.b_kv2h,
num_hidden=self.num_hidden * 2,
flatten=False,
name="%skv_transform" % self.prefix)
# split into keys and values
# pylint: disable=unbalanced-tuple-unpacking
keys, values = mx.sym.split(data=combined, num_outputs=2, axis=2)
# (batch, queries_max_length, num_hidden)
queries = mx.sym.FullyConnected(data=queries,
weight=self.w_q2h,
bias=self.b_q2h,
num_hidden=self.num_hidden,
flatten=False,
name="%sq_transform" % self.prefix)
# scale by sqrt(num_hidden)
queries = queries * (self.num_hidden ** -0.5)
# (batch, queries_max_length, num_hidden)
contexts = dot_attention(queries, keys, values, memory_lengths)
return contexts
class PlainDotAttention:
"""
Dot attention layer for queries independent from keys/values.
"""
def __call__(self,
queries: mx.sym.Symbol,
memory: mx.sym.Symbol,
memory_lengths: mx.sym.Symbol) -> mx.sym.Symbol:
"""
Returns a symbol of shape (batch, max_length, output_depth).
:param queries: Symbol of shape (batch, queries_max_length, input_depth).
:param memory: Symbol of shape (batch, memory_max_length, input_depth).
:param memory_lengths: Symbol of shape (batch, 1).
:return: Symbol of shape (batch, queries_max_length, output_depth).
"""
# (batch*heads, queries_max_length, depth_per_head)
contexts = dot_attention(queries, memory, memory, memory_lengths)
return contexts
class PositionalEncodings(mx.operator.CustomOp):
"""
Returns a symbol of shape (1, max_seq_len, num_embed)
with positional encodings as in Vaswani et al, 2017.
:param length: Maximum sequence length.
:param depth: Embedding size.
"""
def __init__(self, length: int, depth: int) -> None:
super().__init__()
self.encodings = self.get_encodings(length, depth)
@staticmethod
def get_encodings(length, depth) -> np.ndarray:
utils.check_condition(depth % 2 == 0, "Positional embeddings require an even embedding size it "
"is however %d." % depth)
# (1, depth)
channels = np.arange(depth // 2).reshape((1, -1))
# (length, 1)
positions = np.arange(0, length).reshape((-1, 1))
scaled_positions = positions / np.power(10000, (2 * channels) / depth)
# sinusoids:
sin = np.sin(scaled_positions)
# cosines:
cos = np.cos(scaled_positions)
# interleave: (1, length, num_embed)
encodings = np.hstack([sin, cos]).reshape(1, length, depth)
return encodings
def forward(self, is_train, req, in_data, out_data, aux):
self.assign(out_data[0], req[0], self.encodings)
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
pass
@mx.operator.register("positional_encodings")
class PositionalEncodingsProp(mx.operator.CustomOpProp):
def __init__(self, length: str, depth: str) -> None:
super().__init__()
self.length = int(length)
self.depth = int(depth)
def list_arguments(self):
return []
def list_outputs(self):
return ['output']
def infer_shape(self, in_shape):
return [], [(1, self.length, self.depth)], []
def infer_type(self, in_type):
return [], [np.float32], []
def create_operator(self, ctx, shapes, dtypes):
return PositionalEncodings(length=self.length, depth=self.depth)
|
artemsok/sockeye
|
sockeye/layers.py
|
Python
|
apache-2.0
| 28,551
|
[
"Gaussian"
] |
9a3f8675dfffca44bf2ab512c14ef444a2c07019fbbd675c1ad0352c1412c641
|
import copy
import hyperchamber as hc
import inspect
import math
import operator
import os
import re
from functools import reduce
import pyparsing
import hypergan
import torch
import torch.nn as nn
from .gan_component import GANComponent
from hypergan.gan_component import ValidationException
from hypergan.layer_shape import LayerShape
from hypergan.distributions.base_distribution import BaseDistribution
from hypergan.modules.adaptive_instance_norm import AdaptiveInstanceNorm
from hypergan.modules.attention import Attention
from hypergan.modules.const import Const
from hypergan.modules.learned_noise import LearnedNoise
from hypergan.modules.modulated_conv2d import ModulatedConv2d, Blur, EqualLinear
from hypergan.modules.multi_head_attention import MultiHeadAttention
from hypergan.modules.reshape import Reshape
from hypergan.modules.no_op import NoOp
from hypergan.modules.scaled_conv2d import ScaledConv2d
from hypergan.modules.variational import Variational
from hypergan.modules.pixel_norm import PixelNorm
import torchvision
import hypergan as hg
class ConfigurableComponent(GANComponent):
custom_layers = {}
def __init__(self, gan, config, input=None, input_shape=None, context_shapes = {}, input_is_latent=False):
self.current_size = LayerShape(gan.channels(), gan.height(), gan.width())
if isinstance(input, GANComponent):
if hasattr(input, 'current_height'):
self.current_size = LayerShape(input.current_channels, input.current_height, input.current_width)
elif hasattr(input, 'current_channels'):
self.current_size = LayerShape(input.current_channels)
else:
self.current_size = input.current_size
if input_shape is not None:
self.current_size = LayerShape(*input_shape)
self.layers = []
self.layer_shapes = []
self.untrainable_parameters = set()
self.layer_output_sizes = {}
self.nn_layers = []
self.layer_options = {}
self.parsed_layers = []
self.parser = hypergan.parser.Parser()
self.context_shapes = context_shapes
for key, shape in self.context_shapes.items():
self.layer_output_sizes[key] = shape
if isinstance(input, BaseDistribution):
self.is_latent = True
else:
self.is_latent = False
self._latent_parameters = []
self.layer_ops = {**self.activations(),
**ConfigurableComponent.custom_layers,
"add": hg.layers.Add,
"cat": hg.layers.Cat,
"channel_attention": hg.layers.ChannelAttention,
"efficient_attention": hg.layers.EfficientAttention,
"ez_norm": hg.layers.EzNorm,
"layer": hg.layers.Layer,
"minibatch": hg.layers.Minibatch,
"mul": hg.layers.Mul,
"multi_head_attention2": hg.layers.MultiHeadAttention, #TODO rename
"noise": hg.layers.Noise,
"pixel_shuffle": hg.layers.PixelShuffle,
"residual": hg.layers.Residual,
"resizable_stack": hg.layers.ResizableStack,
"segment_softmax": hg.layers.SegmentSoftmax,
"upsample": hg.layers.Upsample,
#easy to convert
"dropout": self.layer_dropout,
"identity": self.layer_identity,
"flatten": self.layer_flatten,
"pretrained": self.layer_pretrained,
"avg_pool": self.layer_avg_pool,#TODO handle dims
"pad": self.layer_pad,
"reshape": self.layer_reshape,
"split": self.layer_split,
#hard to convert
"adaptive_avg_pool": self.layer_adaptive_avg_pool,
"adaptive_avg_pool1d": self.layer_adaptive_avg_pool1d,
"adaptive_avg_pool3d": self.layer_adaptive_avg_pool3d,
"adaptive_instance_norm": self.layer_adaptive_instance_norm,
"attention": self.layer_attention,
"batch_norm": self.layer_batch_norm,
"batch_norm1d": self.layer_batch_norm1d,
"blur": self.layer_blur,
"const": self.layer_const,
"conv": self.layer_conv,
"conv1d": self.layer_conv1d,
"conv2d": self.layer_conv2d,
"conv3d": self.layer_conv3d,
"deconv": self.layer_deconv,
"equal_linear": self.layer_equal_linear,
"instance_norm": self.layer_instance_norm,
"instance_norm1d": self.layer_instance_norm1d,
"instance_norm3d": self.layer_instance_norm3d,
"latent": self.layer_latent,
"layer_norm": self.layer_norm,
"learned_noise": self.layer_learned_noise,
"linear": self.layer_linear,
"modulated_conv2d": self.layer_modulated_conv2d,
"multi_head_attention": self.layer_multi_head_attention,
"pixel_norm": self.layer_pixel_norm,
"resize_conv": self.layer_resize_conv,
"resize_conv2d": self.layer_resize_conv2d,
"resize_conv1d": self.layer_resize_conv1d,
"scaled_conv2d": self.layer_scaled_conv2d,
"subpixel": self.layer_subpixel,
"vae": self.layer_vae
#"linear_attention": hg.layers.LinearAttention,
#"make2d": self.layer_make2d,
#"make3d": self.layer_make3d,
# "crop": self.layer_crop,
# "dropout": self.layer_dropout,
# "noise": self.layer_noise, #TODO
#"attention": self.layer_attention, #TODO
#"const": self.layer_const, #TODO
#"gram_matrix": self.layer_gram_matrix, #TODO
#"image_statistics": self.layer_image_statistics, #TODO
#"knowledge_base": self.layer_knowledge_base, #TODO
#"layer_norm": self.layer_layer_norm,#TODO
#"mask": self.layer_mask,#TODO
#"match_support": self.layer_match_support,#TODO
#"pixel_norm": self.layer_pixel_norm,#TODO
#"progressive_replace": self.layer_progressive_replace,#TODO
#"reduce_sum": self.layer_reduce_sum,#TODO might want to just do "reduce sum" instead
#"relational": self.layer_relational,#TODO
#"unpool": self.layer_unpool, #TODO https://arxiv.org/abs/1505.04366
#"squash": self.layer_squash, #TODO
#"tensorflowcv": self.layer_tensorflowcv, #TODO layer torchvision instead?
#"turing_test": self.layer_turing_test, #TODO
#"two_sample_stack": self.layer_two_sample_stack, #TODO
#"zeros": self.layer_zeros, #TODO
#"zeros_like": self.layer_zeros_like #TODO
}
self.named_layers = {}
if not hasattr(gan, "named_layers"):
gan.named_layers = {}
self.subnets = hc.Config(hc.Config(config).subnets or {})
GANComponent.__init__(self, gan, config)
self.device = self.config.device or "cuda:0"
self.const_two = torch.Tensor([2.0]).float()[0].cuda()
self.const_one = torch.Tensor([1.0]).float()[0].cuda()
def required(self):
return "layers".split()
def layer(self, name):
if name in self.gan.named_layers:
return self.gan.named_layers[name]
if name in self.named_layers:
return self.named_layers[name]
return None
def create(self):
for layer in self.config.layers:
net = self.create_parsed_layer(layer)
self.nn_layers.append(net)
self.net = nn.ModuleList(self.nn_layers)
def create_parsed_layer(self, layer_defn):
config = self.config
parsed, layer = self.parse_layer(layer_defn)
self.parsed_layers.append(parsed)
self.layer_shapes.append(self.current_size)
return layer
def parse_layer(self, layer_defn):
print("Parsing layer:", layer_defn)
parsed = self.parser.parse_string(layer_defn)
parsed.parsed_options = hc.Config(parsed.options)
parsed.layer_defn = layer_defn
print("Parsed layer:", parsed.to_list())
layer = self.build_layer(parsed.layer_name, parsed.args, parsed.parsed_options)
return parsed, layer
def build_layer(self, op, args, options):
if self.layer_ops[op]:
try:
is_hg_layer = issubclass(self.layer_ops[op], hg.Layer)
except TypeError:
is_hg_layer = False
if is_hg_layer:
net = self.layer_ops[op](self, args, options)
self.current_size = net.output_size()
if self.is_latent:
self._latent_parameters += net.latent_parameters()
self.is_latent = False
elif isinstance(self.layer_ops[op], nn.Module):
net = self.layer_ops[op]
else:
net = self.layer_ops[op](None, args, options)
if 'name' in options:
self.set_layer(options['name'], net)
if options.trainable == False:
self.untrainable_parameters = self.untrainable_parameters.union(set(net.parameters()))
return net
else:
print("ConfigurableComponent: Op not defined", op)
def set_layer(self, name, net):
self.gan.named_layers[name] = net
self.named_layers[name] = net
self.layer_output_sizes[name] = self.current_size
def activations(self):
return {
"celu": nn.CELU(),
"gelu": nn.GELU(),
"lrelu": nn.LeakyReLU(0.2),
"prelu": nn.PReLU(),
"relu": nn.ReLU(),
"relu6": nn.ReLU6(),
"selu": nn.SELU(),
"sigmoid": nn.Sigmoid(),
"softplus": nn.Softplus(),
"softshrink": nn.Softshrink(),
"softsign": nn.Softsign(),
"hardsigmoid": nn.Hardsigmoid(),
"hardtanh": nn.Hardtanh(),
"tanh": nn.Tanh(),
"tanhshrink": nn.Tanhshrink()
}
def layer_dropout(self, net, args, options):
return nn.Dropout2d(float(args[0]))
def layer_identity(self, net, args, options):
return NoOp()
def layer_equal_linear(self, net, args, options):
lr_mul = 1
if options.lr_mul is not None:
lr_mul = options.lr_mul
result = EqualLinear(options.input_size or self.current_size.size(), args[0], lr_mul=lr_mul)
self.current_size = LayerShape(args[0])
return result
def get_device(self):
return torch.device(self.device or "cuda:0")
def get_same_padding(self, input_rows, filter_rows, stride, dilation):
out_rows = (input_rows + stride - 1) // stride
return max(0, (out_rows - 1) * stride + (filter_rows - 1) * dilation + 1 - input_rows) // 2
def layer_const(self, net, args, options):
return Const(*self.current_size.dims)
#from https://discuss.pytorch.org/t/utility-function-for-calculating-the-shape-of-a-conv-output/11173/3
def conv_output_shape(self, h_w, kernel_size=1, stride=1, pad=0, dilation=1):
if type(kernel_size) is not tuple:
kernel_size = (kernel_size, kernel_size)
h = math.floor( ((h_w[0] + (2 * pad) - ( dilation * (kernel_size[0] - 1) ) - 1 )/ stride) + 1)
w = math.floor( ((h_w[1] + (2 * pad) - ( dilation * (kernel_size[1] - 1) ) - 1 )/ stride) + 1)
return h, w
def layer_conv(self, net, args, options):
return self.layer_conv2d(net, args, options)
def layer_conv2d(self, net, args, options):
if len(args) > 0:
channels = args[0]
else:
channels = self.current_size.channels
options = hc.Config(options)
stride = 1
if options.stride is not None:
stride = options.stride
filter = 3
if options.filter is not None:
filter = options.filter
padding = 1
if options.padding is not None:
padding = options.padding
dilation = 1
layer = nn.Conv2d(options.input_channels or self.current_size.channels, channels, filter, stride, padding = (padding, padding))
self.nn_init(layer, options.initializer)
h, w = self.conv_output_shape((self.current_size.height, self.current_size.width), filter, stride, padding, dilation)
self.current_size = LayerShape(channels, h, w)
return layer
def layer_conv1d(self, net, args, options):
if len(args) > 0:
channels = args[0]
else:
channels = self.current_size.channels
print("Options:", options)
options = hc.Config(options)
stride = options.stride or 1
fltr = options.filter or 3
dilation = 1
padding = 1
if options.padding is not None:
padding = options.padding
layers = [nn.Conv1d(options.input_channels or self.current_size.channels, channels, fltr, stride, padding = padding)]
self.nn_init(layers[-1], options.initializer)
h, _ = self.conv_output_shape((self.current_size.height, self.current_size.height), options.filter or 3, stride, padding, 1)
self.current_size = LayerShape(channels, h)
return nn.Sequential(*layers)
def layer_conv3d(self, net, args, options):
if len(args) > 0:
channels = args[0]
else:
channels = self.current_size.channels
options = hc.Config(options)
stride = options.stride or 1
fltr = options.filter or 3
dilation = 1
padding = options.padding or 1#self.get_same_padding(self.current_width, self.current_width, stride, dilation)
if options.padding0:
padding = [options.padding0, padding, padding]
if options.stride0:
stride = [options.stride0, stride, stride]
else:
stride = [stride, stride, stride]
layers = [nn.Conv3d(options.input_channels or self.current_size.channels, channels, fltr, stride, padding = padding)]
self.nn_init(layer, options.initializer)
self.current_size = LayerShape(frames, channels, self.current_size.height // stride[1], self.current_size.width // stride[2]) #TODO this doesn't work, what is frames? Also chw calculation like conv2d
return nn.Sequential(*layers)
def layer_linear(self, net, args, options):
options = hc.Config(options)
shape = [int(x) for x in str(args[0]).split("*")]
bias = True
if options.bias == False:
bias = False
output_size = 1
for dim in shape:
output_size *= dim
layers = []
if len(self.current_size.dims) != 1:
layers += [nn.Flatten()]
layers += [nn.Linear(options.input_size or self.current_size.size(), output_size, bias=bias)]
self.nn_init(layers[-1], options.initializer)
self.current_size = LayerShape(*list(reversed(shape)))
if len(shape) != 1:
layers.append(Reshape(*self.current_size.dims))
if self.is_latent:
self._latent_parameters += [layers[0].weight]
self.is_latent = False
return nn.Sequential(*layers)
def layer_modulated_conv2d(self, net, args, options):
channels = self.current_size.channels
if len(args) > 0:
channels = args[0]
method = "conv"
if len(args) > 1:
method = args[1]
upsample = method == "upsample"
downsample = method == "downsample"
demodulate = True
if options.demodulate == False:
demodulate = False
filter = 3
if options.filter:
filter = options.filter
lr_mul = 1.0
if options.lr_mul:
lr_mul = options.lr_mul
input_channels = self.current_size.channels
if options.input_channels:
input_channels = options.input_channels
result = ModulatedConv2d(input_channels, channels, filter, self.layer_output_sizes['w'].size(), upsample=upsample, demodulate=demodulate, downsample=downsample, lr_mul=lr_mul)
if upsample:
self.current_size = LayerShape(channels, self.current_size.height * 2, self.current_size.width * 2)
elif downsample:
self.current_size = LayerShape(channels, self.current_size.height // 2, self.current_size.width // 2)
return result
def layer_blur(self, net, args, options):
blur_kernel=[1, 3, 3, 1]
kernel_size=3
factor = 2
p = (len(blur_kernel) - factor) - (kernel_size - 1)
pad0 = (p + 1) // 2 + factor - 1
pad1 = p // 2 + 1
return Blur(blur_kernel, pad=(pad0, pad1), upsample_factor=factor)
def layer_reshape(self, net, args, options):
dims_args = [int(x) for x in args[0].split("*")]
dims = list(reversed(dims_args))
self.current_size = LayerShape(*dims)
return Reshape(*dims)
def layer_adaptive_avg_pool(self, net, args, options):
self.current_size = LayerShape(self.current_size.channels, self.current_size.height // 2, self.current_size.width // 2)
return nn.AdaptiveAvgPool2d([self.current_size.height, self.current_size.width])
def layer_adaptive_avg_pool1d (self, net, args, options):
self.current_size = LayerShape(self.current_size.channels, self.current_size.height // 2)
return nn.AdaptiveAvgPool1d(self.current_size.height)
def layer_avg_pool(self, net, args, options):
self.current_size = LayerShape(self.current_size.channels, self.current_size.height // 2, self.current_size.width // 2)
return nn.AvgPool2d(2, 2)
def layer_adaptive_avg_pool3d(self, net, args, options):
frames = 4 #TODO
self.current_size = LayerShape(frames, self.current_size.channels, self.current_size.height // 2, self.current_size.width // 2)
return nn.AdaptiveAvgPool3d([self.current_size.frames, self.current_size.height, self.current_size.width]) #TODO looks wrong
def layer_instance_norm(self, net, args, options):
options = hc.Config(options)
affine = True
if options.affine == False:
affine = False
return nn.InstanceNorm2d(self.current_size.channels, affine=affine)
def layer_instance_norm1d(self, net, args, options):
options = hc.Config(options)
affine = True
if options.affine == False:
affine = False
return nn.InstanceNorm1d(self.current_size.channels, affine=affine)
def layer_instance_norm3d(self, net, args, options):
options = hc.Config(options)
affine = True
if options.affine == False:
affine = False
return nn.InstanceNorm3d(self.current_size.channels, affine=affine)
def layer_batch_norm(self, net, args, options):
return nn.BatchNorm2d(self.current_size.channels)
def layer_batch_norm1d(self, net, args, options):
return nn.BatchNorm1d(self.current_size.size())
def get_conv_options(self, config, options):
stride = options.stride or self.ops.config_option("stride", [1,1])
fltr = options.filter or self.ops.config_option("filter", [3,3])
avg_pool = options.avg_pool or self.ops.config_option("avg_pool", [1,1])
if type(stride) != type([]):
stride = [stride, stride]
if type(avg_pool) != type([]):
avg_pool = [avg_pool, avg_pool]
if type(fltr) != type([]):
fltr = [fltr, fltr]
return stride, fltr, avg_pool
def layer_deconv(self, net, args, options):
if len(args) > 0:
channels = args[0]
else:
channels = self.current_size.channels
options = hc.Config(options)
filter = 4 #TODO
if options.filter:
filter = options.filter
stride = 2
if options.stride:
stride = options.stride
padding = 1
if options.padding:
padding = options.padding
layer = nn.ConvTranspose2d(options.input_channels or self.current_size.channels, channels, filter, stride, padding)
self.nn_init(layer, options.initializer)
self.current_size = LayerShape(channels, self.current_size.height * 2, self.current_size.width * 2)
return layer
def layer_pad(self, net, args, options):
options = hc.Config(options)
return nn.ZeroPad2d((args[0], args[1], args[2], args[3]))
def layer_pixel_norm(self, net, args, options):
return PixelNorm()
def layer_pretrained(self, net, args, options):
model = getattr(torchvision.models, args[0])(pretrained=True)
model.train(True)
if options.layer:
layers = list(model.children())[:options.layer]
if options.sublayer:
layers[-1] = nn.Sequential(*layers[-1][:options.sublayer])
else:
layers = [model]
print("List of pretrained layers:", layers)
raise ValidationException("layer=-1 required for pretrained, sublayer=-1 optional. Layers outputted above.")
return nn.Sequential(*layers)
def layer_resize_conv(self, net, args, options):
return self.layer_resize_conv2d(net, args, options)
def layer_resize_conv2d(self, net, args, options):
options = hc.Config(options)
channels = args[0]
w = options.w or self.current_size.width * 2
h = options.h or self.current_size.height * 2
layers = [nn.Upsample((h, w), mode="bilinear"),
nn.Conv2d(options.input_channels or self.current_size.channels, channels, options.filter or 3, 1, 1)]
self.nn_init(layers[-1], options.initializer)
self.current_size = LayerShape(channels, h, w)
return nn.Sequential(*layers)
def layer_resize_conv1d(self, net, args, options):
options = hc.Config(options)
channels = args[0]
h = options.h or self.current_size.height * 2
padding = 1
if options.padding is not None:
padding = options.padding
layers = [nn.Upsample((h)),
nn.Conv1d(options.input_channels or self.current_size.channels, channels, options.filter or 3, 1, padding=padding)]
self.nn_init(layers[-1], options.initializer)
h, _ = self.conv_output_shape((h, h), options.filter or 3, 1, padding, 1)
self.current_size = LayerShape(channels, h)
return nn.Sequential(*layers)
def layer_scaled_conv2d(self, net, args, options):
channels = self.current_size.channels
if len(args) > 0:
channels = args[0]
method = "conv"
if len(args) > 1:
method = args[1]
upsample = method == "upsample"
downsample = method == "downsample"
demodulate = True
if options.demodulate == False:
demodulate = False
filter = 3
if options.filter:
filter = options.filter
lr_mul = 1.0
if options.lr_mul:
lr_mul = options.lr_mul
input_channels = self.current_size.channels
if options.input_channels:
input_channels = options.input_channels
result = ScaledConv2d(input_channels, channels, filter, 0, upsample=upsample, demodulate=demodulate, downsample=downsample, lr_mul=lr_mul)
self.nn_init(result, options.initializer)
if upsample:
self.current_size = LayerShape(channels, self.current_size.height * 2, self.current_size.width * 2)
else:
self.current_size = LayerShape(channels, self.current_size.height - 2, self.current_size.width - 2)
return result
def layer_split(self, net, args, options):
options = hc.Config(options)
split_size = args[0]
select = args[1]
dim = -1
if options.dim:
dim = options.dim
#TODO better validation
#TODO increase dim options
if dim == -1:
dims = list(self.current_size.dims).copy()
dims[0] = split_size
if (select + 1) * split_size > self.current_size.channels:
dims[0] = self.current_size.channels % split_size
self.current_size = LayerShape(*dims)
return NoOp()
def layer_subpixel(self, net, args, options):
options = hc.Config(options)
channels = args[0]
layers = [nn.Conv2d(options.input_channels or self.current_size.channels, channels*4, options.filter or 3, 1, 1), nn.PixelShuffle(2)]
self.nn_init(layers[0], options.initializer)
self.current_size = LayerShape(channels, self.current_size.height * 2, self.current_size.width * 2)
return nn.Sequential(*layers)
def layer_latent(self, net, args, options):
self.current_size = LayerShape(self.gan.latent.current_input_size)
self.is_latent = True
return NoOp()
def layer_linformer(self, net, args, options):
model = Linformer(
input_size = self.current_size.size(),
channels = self.current_size.height # TODO wtf
)
return model
def layer_vae(self, net, args, options):
self.vae = Variational(self.current_size.channels)
return self.vae
def layer_multi_head_attention(self, net, args, options):
output_size = self.current_size.size()
if len(args) > 0:
output_size = args[0]
layer = MultiHeadAttention(self.current_size.size(), output_size, heads=options.heads or 4)
self.current_size = LayerShape(output_size)
self.nn_init(layer.o, options.initializer)
self.nn_init(layer.h, options.initializer)
self.nn_init(layer.g, options.initializer)
self.nn_init(layer.f, options.initializer)
if self.is_latent:
self._latent_parameters += [layer.h.weight, layer.g.weight, layer.f.weight]
self.is_latent = False
return layer
def layer_attention(self, net, args, options):
layer = Attention(self.current_size.channels)
self.nn_init(layer.v, options.initializer)
self.nn_init(layer.h, options.initializer)
self.nn_init(layer.g, options.initializer)
self.nn_init(layer.f, options.initializer)
return layer
def layer_norm(self, net, args, options):
affine = True
if options.affine == False:
affine = False
return nn.LayerNorm(self.current_size.dims, elementwise_affine=affine)
def layer_learned_noise(self, net, args, options):
return LearnedNoise(*([self.gan.batch_size(), *self.current_size.dims]))
def layer_adaptive_instance_norm(self, net, args, options):
return AdaptiveInstanceNorm(self.layer_output_sizes['w'].size(), self.current_size.channels, equal_linear=options.equal_linear)
def layer_flatten(self, net, args, options):
self.current_size = LayerShape(self.current_size.size())
return nn.Flatten()
def layer_zeros_like(self, net, args, options):
return Zeros(self.gan.latent.sample().shape)
def nn_init(self, layer, initializer_option):
if initializer_option is None:
return
if type(initializer_option) == pyparsing.ParseResults and type(initializer_option[0]) == hypergan.parser.Pattern:
args = [initializer_option[0].layer_name] + initializer_option[0].args
options = hc.Config(initializer_option[0].options)
else:
args = [initializer_option]
options = hc.Config({})
layer_data = layer.weight.data
if args[0] == "uniform":
a = float(args[1])
b = float(args[2])
nn.init.uniform_(layer_data, a, b)
elif args[0] == "normal":
mean = float(args[1])
std = float(args[2])
nn.init.normal_(layer_data, mean, std)
elif args[0] == "constant":
val = float(args[1])
nn.init.constant_(layer_data, val)
elif args[0] == "ones":
nn.init.ones_(layer_data)
elif args[0] == "zeros":
nn.init.zeros_(layer_data)
elif args[0] == "eye":
nn.init.eye_(layer_data)
elif args[0] == "dirac":
nn.init.dirac_(layer_data)
elif args[0] == "xavier_uniform":
gain = nn.init.calculate_gain(options.gain or "relu")
nn.init.xavier_uniform_(layer_data, gain=gain)
elif args[0] == "xavier_normal":
gain = nn.init.calculate_gain(options.gain or "relu")
nn.init.xavier_normal_(layer_data, gain=gain)
elif args[0] == "kaiming_uniform":
a = 0 #TODO wrong
nn.init.kaiming_uniform_(layer_data, mode=(options.mode or "fan_in"), nonlinearity=options.gain or "relu")
elif args[0] == "kaiming_normal":
a = 0 #TODO wrong
nn.init.kaiming_normal_(layer_data, mode=(options.mode or "fan_in"), nonlinearity=options.gain or "relu")
elif args[0] == "orthogonal":
if "gain" in options:
gain = nn.init.calculate_gain(options["gain"])
else:
gain = 1
nn.init.orthogonal_(layer_data, gain=gain)
else:
print("Warning: No initializer found for " + args[0])
if "gain" in options:
layer_data.mul_(nn.init.calculate_gain(options["gain"]))
return NoOp()
def forward(self, input, context={}):
if self.get_device().index != input.device.index:
input = input.to(self.get_device())
for module, parsed, layer_shape in zip(self.net, self.parsed_layers, self.layer_shapes):
try:
options = parsed.parsed_options
args = parsed.args
layer_name = parsed.layer_name
name = options.name
if isinstance(module, hg.Layer):
input = module(input, context)
elif layer_name == "adaptive_instance_norm":
input = module(input, context['w'])
elif layer_name == "ez_norm":
input = module(input, context['w'])
elif layer_name == "split":
input = torch.split(input, args[0], options.dim or -1)[args[1]]
elif layer_name == "latent":
input = self.gan.latent.z#sample()
elif layer_name == "modulated_conv2d":
input = module(input, context['w'])
elif layer_name == "pretrained":
in_zero_one = (input + self.const_one) / self.const_two
mean = torch.as_tensor([0.485, 0.456, 0.406], device='cuda:0').view(1, 3, 1, 1)
std = torch.as_tensor([0.229, 0.224, 0.225], device='cuda:0').view(1, 3, 1, 1)
input = module(input.clone().sub_(mean).div_(std))
else:
input = module(input)
if self.gan.steps == 0:
size = LayerShape(*list(input.shape[1:]))
if size.squeeze_dims() != layer_shape.squeeze_dims():
print("Error: Size error on", layer_name)
print("Error: Expected output size", layer_shape.dims)
print("Error: Actual output size", size.dims)
raise "Layer size error, cannot continue"
else:
pass
if name is not None:
context[name] = input
except:
raise ValidationException("Error on " + parsed.layer_defn + " - input size " + ",".join([str(x) for x in input.shape]))
self.sample = input
return input
def latent_parameters(self):
return self._latent_parameters
def set_trainable(self, flag):
for p in (set(list(self.parameters())) - self.untrainable_parameters):
p.requires_grad = flag
def layer_shape(self):
return self.current_size
def __getstate__(self):
obj = dict(self.__dict__)
del obj["parser"]
return obj
def __setstate__(self, d):
self.__dict__ = d
self.parser = hypergan.parser.Parser()
|
255BITS/HyperGAN
|
hypergan/configurable_component.py
|
Python
|
mit
| 32,342
|
[
"DIRAC"
] |
57f453c1a9b980ac591391d18e935227cc73f49512d324d5cb70ff70a6a0272e
|
# ----------------------------------------------------------------------------------------------------
#
# Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
import json
import os
import re
import subprocess
from multiprocessing import Lock
import mx
_bear_version_regex = re.compile(r"bear ([0-9]+).([0-9]+).([0-9]+)", re.IGNORECASE)
_bear_version = '<uninitialized>'
def _get_bear_version():
global _bear_version
if _bear_version == '<uninitialized>':
try:
output = mx._check_output_str(['bear', '--version'], stderr=subprocess.STDOUT)
except OSError:
output = ''
m = _bear_version_regex.search(output)
if m:
_bear_version = int(m.group(1))
else:
mx.warn("Could not find bear, will not produce compilation database for make projects.")
_bear_version = None
return _bear_version
def gmake_with_bear(out=None, append=False, context=None):
v = _get_bear_version()
if v is None:
return [mx.gmake_cmd(context=context)]
else:
ret = ['bear']
if append:
ret.append('--append' if v >= 3 else '-a')
if out is not None:
ret.append('--output' if v >= 3 else '-o')
ret.append(out)
if v >= 3:
ret.append('--')
ret.append(mx.gmake_cmd(context=context))
return ret
_compdb_path = None
_compdb_lock = None
def _default_compdb_path():
suite = mx.primary_suite()
if suite is None:
# no primary suite, don't try to enable compdb
return None
if suite.vc_dir:
return os.path.join(os.path.dirname(suite.vc_dir), 'compile_commands.json')
else:
return os.path.join(suite.dir, 'compile_commands.json')
def init():
global _compdb_path
global _compdb_lock
o = mx.get_opts().compdb
if o is None:
o = mx.get_env('MX_COMPDB')
if o is not None and o != 'none':
_compdb_lock = Lock()
if o == 'default':
_compdb_path = _default_compdb_path()
else:
_compdb_path = os.path.abspath(o)
def enabled():
return _compdb_path is not None
def gmake_with_compdb_cmd(context=None):
if enabled():
return gmake_with_bear(append=True, context=context)
else:
return [mx.gmake_cmd(context=context)]
class Compdb:
def __init__(self):
self.content = {}
def __enter__(self):
_compdb_lock.acquire()
if os.path.exists(_compdb_path):
self.mergeFile(_compdb_path)
return self
def __exit__(self, *args):
with open(_compdb_path, 'w') as f:
json.dump(list(self.content.values()), f, indent=4)
_compdb_lock.release()
def merge(self, data):
for item in data:
key = item['file']
if not os.path.isabs(key):
key = os.path.normpath(os.path.join(item['directory'], item['file']))
self.content[item['file']] = item
def mergeString(self, string):
try:
self.merge(json.loads(string))
except json.JSONDecodeError:
mx.warn("Error decoding JSON compilation database. Ignoring.")
def mergeFile(self, path):
with open(path, 'r') as f:
try:
self.merge(json.load(f))
except json.JSONDecodeError:
mx.warn("Error decoding JSON compilation database from '%s'. Ignoring." % path)
class CompdbCapture:
def __init__(self, suite):
self.data = ""
def __call__(self, data):
self.data += data
def __enter__(self):
if enabled():
return self
else:
return None
def __exit__(self, *args):
if enabled():
with Compdb() as db:
db.mergeString(self.data)
def merge_compdb(subject, path):
if enabled():
with Compdb() as db:
inFile = os.path.join(path, 'compile_commands.json')
if os.path.exists(inFile):
db.mergeFile(inFile)
else:
mx.warn("JSON compilation database for %s not found (expected at %s)." % (subject, inFile))
|
graalvm/mx
|
mx_compdb.py
|
Python
|
gpl-2.0
| 5,256
|
[
"VisIt"
] |
318b30996a0bfb1c7510967f8cdff0925a84f2be3ee143d009c6bec87092ec4a
|
"""Rewrite assertion AST to produce nice error messages"""
import ast
import _ast
import errno
import itertools
import imp
import marshal
import os
import re
import struct
import sys
import types
from fnmatch import fnmatch
import py
from _pytest.assertion import util
# pytest caches rewritten pycs in __pycache__.
if hasattr(imp, "get_tag"):
PYTEST_TAG = imp.get_tag() + "-PYTEST"
else:
if hasattr(sys, "pypy_version_info"):
impl = "pypy"
elif sys.platform == "java":
impl = "jython"
else:
impl = "cpython"
ver = sys.version_info
PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
del ver, impl
PYC_EXT = ".py" + (__debug__ and "c" or "o")
PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
if sys.version_info >= (3,5):
ast_Call = ast.Call
else:
ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None)
class AssertionRewritingHook(object):
"""PEP302 Import hook which rewrites asserts."""
def __init__(self, config):
self.config = config
self.fnpats = config.getini("python_files")
self.session = None
self.modules = {}
self._rewritten_names = set()
self._register_with_pkg_resources()
self._must_rewrite = set()
def set_session(self, session):
self.session = session
def find_module(self, name, path=None):
state = self.config._assertstate
state.trace("find_module called for: %s" % name)
names = name.rsplit(".", 1)
lastname = names[-1]
pth = None
if path is not None:
# Starting with Python 3.3, path is a _NamespacePath(), which
# causes problems if not converted to list.
path = list(path)
if len(path) == 1:
pth = path[0]
if pth is None:
try:
fd, fn, desc = imp.find_module(lastname, path)
except ImportError:
return None
if fd is not None:
fd.close()
tp = desc[2]
if tp == imp.PY_COMPILED:
if hasattr(imp, "source_from_cache"):
try:
fn = imp.source_from_cache(fn)
except ValueError:
# Python 3 doesn't like orphaned but still-importable
# .pyc files.
fn = fn[:-1]
else:
fn = fn[:-1]
elif tp != imp.PY_SOURCE:
# Don't know what this is.
return None
else:
fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
fn_pypath = py.path.local(fn)
if not self._should_rewrite(name, fn_pypath, state):
return None
self._rewritten_names.add(name)
# The requested module looks like a test file, so rewrite it. This is
# the most magical part of the process: load the source, rewrite the
# asserts, and load the rewritten source. We also cache the rewritten
# module code in a special pyc. We must be aware of the possibility of
# concurrent pytest processes rewriting and loading pycs. To avoid
# tricky race conditions, we maintain the following invariant: The
# cached pyc is always a complete, valid pyc. Operations on it must be
# atomic. POSIX's atomic rename comes in handy.
write = not sys.dont_write_bytecode
cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
if write:
try:
os.mkdir(cache_dir)
except OSError:
e = sys.exc_info()[1].errno
if e == errno.EEXIST:
# Either the __pycache__ directory already exists (the
# common case) or it's blocked by a non-dir node. In the
# latter case, we'll ignore it in _write_pyc.
pass
elif e in [errno.ENOENT, errno.ENOTDIR]:
# One of the path components was not a directory, likely
# because we're in a zip file.
write = False
elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
state.trace("read only directory: %r" % fn_pypath.dirname)
write = False
else:
raise
cache_name = fn_pypath.basename[:-3] + PYC_TAIL
pyc = os.path.join(cache_dir, cache_name)
# Notice that even if we're in a read-only directory, I'm going
# to check for a cached pyc. This may not be optimal...
co = _read_pyc(fn_pypath, pyc, state.trace)
if co is None:
state.trace("rewriting %r" % (fn,))
source_stat, co = _rewrite_test(self.config, fn_pypath)
if co is None:
# Probably a SyntaxError in the test.
return None
if write:
_make_rewritten_pyc(state, source_stat, pyc, co)
else:
state.trace("found cached rewritten pyc for %r" % (fn,))
self.modules[name] = co, pyc
return self
def _should_rewrite(self, name, fn_pypath, state):
# always rewrite conftest files
fn = str(fn_pypath)
if fn_pypath.basename == 'conftest.py':
state.trace("rewriting conftest file: %r" % (fn,))
return True
if self.session is not None:
if self.session.isinitpath(fn):
state.trace("matched test file (was specified on cmdline): %r" %
(fn,))
return True
# modules not passed explicitly on the command line are only
# rewritten if they match the naming convention for test files
for pat in self.fnpats:
# use fnmatch instead of fn_pypath.fnmatch because the
# latter might trigger an import to fnmatch.fnmatch
# internally, which would cause this method to be
# called recursively
if fnmatch(fn_pypath.basename, pat):
state.trace("matched test file %r" % (fn,))
return True
for marked in self._must_rewrite:
if name.startswith(marked):
state.trace("matched marked file %r (from %r)" % (name, marked))
return True
return False
def mark_rewrite(self, *names):
"""Mark import names as needing to be re-written.
The named module or package as well as any nested modules will
be re-written on import.
"""
already_imported = set(names).intersection(set(sys.modules))
if already_imported:
for name in already_imported:
if name not in self._rewritten_names:
self._warn_already_imported(name)
self._must_rewrite.update(names)
def _warn_already_imported(self, name):
self.config.warn(
'P1',
'Module already imported so can not be re-written: %s' % name)
def load_module(self, name):
# If there is an existing module object named 'fullname' in
# sys.modules, the loader must use that existing module. (Otherwise,
# the reload() builtin will not work correctly.)
if name in sys.modules:
return sys.modules[name]
co, pyc = self.modules.pop(name)
# I wish I could just call imp.load_compiled here, but __file__ has to
# be set properly. In Python 3.2+, this all would be handled correctly
# by load_compiled.
mod = sys.modules[name] = imp.new_module(name)
try:
mod.__file__ = co.co_filename
# Normally, this attribute is 3.2+.
mod.__cached__ = pyc
mod.__loader__ = self
py.builtin.exec_(co, mod.__dict__)
except:
if name in sys.modules:
del sys.modules[name]
raise
return sys.modules[name]
def is_package(self, name):
try:
fd, fn, desc = imp.find_module(name)
except ImportError:
return False
if fd is not None:
fd.close()
tp = desc[2]
return tp == imp.PKG_DIRECTORY
@classmethod
def _register_with_pkg_resources(cls):
"""
Ensure package resources can be loaded from this loader. May be called
multiple times, as the operation is idempotent.
"""
try:
import pkg_resources
# access an attribute in case a deferred importer is present
pkg_resources.__name__
except ImportError:
return
# Since pytest tests are always located in the file system, the
# DefaultProvider is appropriate.
pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
def get_data(self, pathname):
"""Optional PEP302 get_data API.
"""
with open(pathname, 'rb') as f:
return f.read()
def _write_pyc(state, co, source_stat, pyc):
# Technically, we don't have to have the same pyc format as
# (C)Python, since these "pycs" should never be seen by builtin
# import. However, there's little reason deviate, and I hope
# sometime to be able to use imp.load_compiled to load them. (See
# the comment in load_module above.)
try:
fp = open(pyc, "wb")
except IOError:
err = sys.exc_info()[1].errno
state.trace("error writing pyc file at %s: errno=%s" %(pyc, err))
# we ignore any failure to write the cache file
# there are many reasons, permission-denied, __pycache__ being a
# file etc.
return False
try:
fp.write(imp.get_magic())
mtime = int(source_stat.mtime)
size = source_stat.size & 0xFFFFFFFF
fp.write(struct.pack("<ll", mtime, size))
marshal.dump(co, fp)
finally:
fp.close()
return True
RN = "\r\n".encode("utf-8")
N = "\n".encode("utf-8")
cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
BOM_UTF8 = '\xef\xbb\xbf'
def _rewrite_test(config, fn):
"""Try to read and rewrite *fn* and return the code object."""
state = config._assertstate
try:
stat = fn.stat()
source = fn.read("rb")
except EnvironmentError:
return None, None
if ASCII_IS_DEFAULT_ENCODING:
# ASCII is the default encoding in Python 2. Without a coding
# declaration, Python 2 will complain about any bytes in the file
# outside the ASCII range. Sadly, this behavior does not extend to
# compile() or ast.parse(), which prefer to interpret the bytes as
# latin-1. (At least they properly handle explicit coding cookies.) To
# preserve this error behavior, we could force ast.parse() to use ASCII
# as the encoding by inserting a coding cookie. Unfortunately, that
# messes up line numbers. Thus, we have to check ourselves if anything
# is outside the ASCII range in the case no encoding is explicitly
# declared. For more context, see issue #269. Yay for Python 3 which
# gets this right.
end1 = source.find("\n")
end2 = source.find("\n", end1 + 1)
if (not source.startswith(BOM_UTF8) and
cookie_re.match(source[0:end1]) is None and
cookie_re.match(source[end1 + 1:end2]) is None):
if hasattr(state, "_indecode"):
# encodings imported us again, so don't rewrite.
return None, None
state._indecode = True
try:
try:
source.decode("ascii")
except UnicodeDecodeError:
# Let it fail in real import.
return None, None
finally:
del state._indecode
# On Python versions which are not 2.7 and less than or equal to 3.1, the
# parser expects *nix newlines.
if REWRITE_NEWLINES:
source = source.replace(RN, N) + N
try:
tree = ast.parse(source)
except SyntaxError:
# Let this pop up again in the real import.
state.trace("failed to parse: %r" % (fn,))
return None, None
rewrite_asserts(tree, fn, config)
try:
co = compile(tree, fn.strpath, "exec")
except SyntaxError:
# It's possible that this error is from some bug in the
# assertion rewriting, but I don't know of a fast way to tell.
state.trace("failed to compile: %r" % (fn,))
return None, None
return stat, co
def _make_rewritten_pyc(state, source_stat, pyc, co):
"""Try to dump rewritten code to *pyc*."""
if sys.platform.startswith("win"):
# Windows grants exclusive access to open files and doesn't have atomic
# rename, so just write into the final file.
_write_pyc(state, co, source_stat, pyc)
else:
# When not on windows, assume rename is atomic. Dump the code object
# into a file specific to this process and atomically replace it.
proc_pyc = pyc + "." + str(os.getpid())
if _write_pyc(state, co, source_stat, proc_pyc):
os.rename(proc_pyc, pyc)
def _read_pyc(source, pyc, trace=lambda x: None):
"""Possibly read a pytest pyc containing rewritten code.
Return rewritten code if successful or None if not.
"""
try:
fp = open(pyc, "rb")
except IOError:
return None
with fp:
try:
mtime = int(source.mtime())
size = source.size()
data = fp.read(12)
except EnvironmentError as e:
trace('_read_pyc(%s): EnvironmentError %s' % (source, e))
return None
# Check for invalid or out of date pyc file.
if (len(data) != 12 or data[:4] != imp.get_magic() or
struct.unpack("<ll", data[4:]) != (mtime, size)):
trace('_read_pyc(%s): invalid or out of date pyc' % source)
return None
try:
co = marshal.load(fp)
except Exception as e:
trace('_read_pyc(%s): marshal.load error %s' % (source, e))
return None
if not isinstance(co, types.CodeType):
trace('_read_pyc(%s): not a code object' % source)
return None
return co
def rewrite_asserts(mod, module_path=None, config=None):
"""Rewrite the assert statements in mod."""
AssertionRewriter(module_path, config).run(mod)
def _saferepr(obj):
"""Get a safe repr of an object for assertion error messages.
The assertion formatting (util.format_explanation()) requires
newlines to be escaped since they are a special character for it.
Normally assertion.util.format_explanation() does this but for a
custom repr it is possible to contain one of the special escape
sequences, especially '\n{' and '\n}' are likely to be present in
JSON reprs.
"""
repr = py.io.saferepr(obj)
if py.builtin._istext(repr):
t = py.builtin.text
else:
t = py.builtin.bytes
return repr.replace(t("\n"), t("\\n"))
from _pytest.assertion.util import format_explanation as _format_explanation # noqa
def _format_assertmsg(obj):
"""Format the custom assertion message given.
For strings this simply replaces newlines with '\n~' so that
util.format_explanation() will preserve them instead of escaping
newlines. For other objects py.io.saferepr() is used first.
"""
# reprlib appears to have a bug which means that if a string
# contains a newline it gets escaped, however if an object has a
# .__repr__() which contains newlines it does not get escaped.
# However in either case we want to preserve the newline.
if py.builtin._istext(obj) or py.builtin._isbytes(obj):
s = obj
is_repr = False
else:
s = py.io.saferepr(obj)
is_repr = True
if py.builtin._istext(s):
t = py.builtin.text
else:
t = py.builtin.bytes
s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
if is_repr:
s = s.replace(t("\\n"), t("\n~"))
return s
def _should_repr_global_name(obj):
return not hasattr(obj, "__name__") and not py.builtin.callable(obj)
def _format_boolop(explanations, is_or):
explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
if py.builtin._istext(explanation):
t = py.builtin.text
else:
t = py.builtin.bytes
return explanation.replace(t('%'), t('%%'))
def _call_reprcompare(ops, results, expls, each_obj):
for i, res, expl in zip(range(len(ops)), results, expls):
try:
done = not res
except Exception:
done = True
if done:
break
if util._reprcompare is not None:
custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
if custom is not None:
return custom
return expl
unary_map = {
ast.Not: "not %s",
ast.Invert: "~%s",
ast.USub: "-%s",
ast.UAdd: "+%s"
}
binop_map = {
ast.BitOr: "|",
ast.BitXor: "^",
ast.BitAnd: "&",
ast.LShift: "<<",
ast.RShift: ">>",
ast.Add: "+",
ast.Sub: "-",
ast.Mult: "*",
ast.Div: "/",
ast.FloorDiv: "//",
ast.Mod: "%%", # escaped for string formatting
ast.Eq: "==",
ast.NotEq: "!=",
ast.Lt: "<",
ast.LtE: "<=",
ast.Gt: ">",
ast.GtE: ">=",
ast.Pow: "**",
ast.Is: "is",
ast.IsNot: "is not",
ast.In: "in",
ast.NotIn: "not in"
}
# Python 3.5+ compatibility
try:
binop_map[ast.MatMult] = "@"
except AttributeError:
pass
# Python 3.4+ compatibility
if hasattr(ast, "NameConstant"):
_NameConstant = ast.NameConstant
else:
def _NameConstant(c):
return ast.Name(str(c), ast.Load())
def set_location(node, lineno, col_offset):
"""Set node location information recursively."""
def _fix(node, lineno, col_offset):
if "lineno" in node._attributes:
node.lineno = lineno
if "col_offset" in node._attributes:
node.col_offset = col_offset
for child in ast.iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, lineno, col_offset)
return node
class AssertionRewriter(ast.NodeVisitor):
"""Assertion rewriting implementation.
The main entrypoint is to call .run() with an ast.Module instance,
this will then find all the assert statements and re-write them to
provide intermediate values and a detailed assertion error. See
http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
for an overview of how this works.
The entry point here is .run() which will iterate over all the
statements in an ast.Module and for each ast.Assert statement it
finds call .visit() with it. Then .visit_Assert() takes over and
is responsible for creating new ast statements to replace the
original assert statement: it re-writes the test of an assertion
to provide intermediate values and replace it with an if statement
which raises an assertion error with a detailed explanation in
case the expression is false.
For this .visit_Assert() uses the visitor pattern to visit all the
AST nodes of the ast.Assert.test field, each visit call returning
an AST node and the corresponding explanation string. During this
state is kept in several instance attributes:
:statements: All the AST statements which will replace the assert
statement.
:variables: This is populated by .variable() with each variable
used by the statements so that they can all be set to None at
the end of the statements.
:variable_counter: Counter to create new unique variables needed
by statements. Variables are created using .variable() and
have the form of "@py_assert0".
:on_failure: The AST statements which will be executed if the
assertion test fails. This is the code which will construct
the failure message and raises the AssertionError.
:explanation_specifiers: A dict filled by .explanation_param()
with %-formatting placeholders and their corresponding
expressions to use in the building of an assertion message.
This is used by .pop_format_context() to build a message.
:stack: A stack of the explanation_specifiers dicts maintained by
.push_format_context() and .pop_format_context() which allows
to build another %-formatted string while already building one.
This state is reset on every new assert statement visited and used
by the other visitors.
"""
def __init__(self, module_path, config):
super(AssertionRewriter, self).__init__()
self.module_path = module_path
self.config = config
def run(self, mod):
"""Find all assert statements in *mod* and rewrite them."""
if not mod.body:
# Nothing to do.
return
# Insert some special imports at the top of the module but after any
# docstrings and __future__ imports.
aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
expect_docstring = True
pos = 0
lineno = 0
for item in mod.body:
if (expect_docstring and isinstance(item, ast.Expr) and
isinstance(item.value, ast.Str)):
doc = item.value.s
if "PYTEST_DONT_REWRITE" in doc:
# The module has disabled assertion rewriting.
return
lineno += len(doc) - 1
expect_docstring = False
elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
item.module != "__future__"):
lineno = item.lineno
break
pos += 1
imports = [ast.Import([alias], lineno=lineno, col_offset=0)
for alias in aliases]
mod.body[pos:pos] = imports
# Collect asserts.
nodes = [mod]
while nodes:
node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
for i, child in enumerate(field):
if isinstance(child, ast.Assert):
# Transform assert.
new.extend(self.visit(child))
else:
new.append(child)
if isinstance(child, ast.AST):
nodes.append(child)
setattr(node, name, new)
elif (isinstance(field, ast.AST) and
# Don't recurse into expressions as they can't contain
# asserts.
not isinstance(field, ast.expr)):
nodes.append(field)
def variable(self):
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
self.variables.append(name)
return name
def assign(self, expr):
"""Give *expr* a name."""
name = self.variable()
self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
return ast.Name(name, ast.Load())
def display(self, expr):
"""Call py.io.saferepr on the expression."""
return self.helper("saferepr", expr)
def helper(self, name, *args):
"""Call a helper in this module."""
py_name = ast.Name("@pytest_ar", ast.Load())
attr = ast.Attribute(py_name, "_" + name, ast.Load())
return ast_Call(attr, list(args), [])
def builtin(self, name):
"""Return the builtin called *name*."""
builtin_name = ast.Name("@py_builtins", ast.Load())
return ast.Attribute(builtin_name, name, ast.Load())
def explanation_param(self, expr):
"""Return a new named %-formatting placeholder for expr.
This creates a %-formatting placeholder for expr in the
current formatting context, e.g. ``%(py0)s``. The placeholder
and expr are placed in the current format context so that it
can be used on the next call to .pop_format_context().
"""
specifier = "py" + str(next(self.variable_counter))
self.explanation_specifiers[specifier] = expr
return "%(" + specifier + ")s"
def push_format_context(self):
"""Create a new formatting context.
The format context is used for when an explanation wants to
have a variable value formatted in the assertion message. In
this case the value required can be added using
.explanation_param(). Finally .pop_format_context() is used
to format a string of %-formatted values as added by
.explanation_param().
"""
self.explanation_specifiers = {}
self.stack.append(self.explanation_specifiers)
def pop_format_context(self, expl_expr):
"""Format the %-formatted string with current format context.
The expl_expr should be an ast.Str instance constructed from
the %-placeholders created by .explanation_param(). This will
add the required code to format said string to .on_failure and
return the ast.Name instance of the formatted string.
"""
current = self.stack.pop()
if self.stack:
self.explanation_specifiers = self.stack[-1]
keys = [ast.Str(key) for key in current.keys()]
format_dict = ast.Dict(keys, list(current.values()))
form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
name = "@py_format" + str(next(self.variable_counter))
self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
return ast.Name(name, ast.Load())
def generic_visit(self, node):
"""Handle expressions we don't have custom code for."""
assert isinstance(node, ast.expr)
res = self.assign(node)
return res, self.explanation_param(self.display(res))
def visit_Assert(self, assert_):
"""Return the AST statements to replace the ast.Assert instance.
This re-writes the test of an assertion to provide
intermediate values and replace it with an if statement which
raises an assertion error with a detailed explanation in case
the expression is false.
"""
if isinstance(assert_.test, ast.Tuple) and self.config is not None:
fslocation = (self.module_path, assert_.lineno)
self.config.warn('R1', 'assertion is always true, perhaps '
'remove parentheses?', fslocation=fslocation)
self.statements = []
self.variables = []
self.variable_counter = itertools.count()
self.stack = []
self.on_failure = []
self.push_format_context()
# Rewrite assert into a bunch of statements.
top_condition, explanation = self.visit(assert_.test)
# Create failure message.
body = self.on_failure
negation = ast.UnaryOp(ast.Not(), top_condition)
self.statements.append(ast.If(negation, body, []))
if assert_.msg:
assertmsg = self.helper('format_assertmsg', assert_.msg)
explanation = "\n>assert " + explanation
else:
assertmsg = ast.Str("")
explanation = "assert " + explanation
template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
msg = self.pop_format_context(template)
fmt = self.helper("format_explanation", msg)
err_name = ast.Name("AssertionError", ast.Load())
exc = ast_Call(err_name, [fmt], [])
if sys.version_info[0] >= 3:
raise_ = ast.Raise(exc, None)
else:
raise_ = ast.Raise(exc, None, None)
body.append(raise_)
# Clear temporary variables by setting them to None.
if self.variables:
variables = [ast.Name(name, ast.Store())
for name in self.variables]
clear = ast.Assign(variables, _NameConstant(None))
self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
return self.statements
def visit_Name(self, name):
# Display the repr of the name if it's a local variable or
# _should_repr_global_name() thinks it's acceptable.
locs = ast_Call(self.builtin("locals"), [], [])
inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
dorepr = self.helper("should_repr_global_name", name)
test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
res_var = self.variable()
expl_list = self.assign(ast.List([], ast.Load()))
app = ast.Attribute(expl_list, "append", ast.Load())
is_or = int(isinstance(boolop.op, ast.Or))
body = save = self.statements
fail_save = self.on_failure
levels = len(boolop.values) - 1
self.push_format_context()
# Process each operand, short-circuting if needed.
for i, v in enumerate(boolop.values):
if i:
fail_inner = []
# cond is set in a prior loop iteration below
self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
self.on_failure = fail_inner
self.push_format_context()
res, expl = self.visit(v)
body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
expl_format = self.pop_format_context(ast.Str(expl))
call = ast_Call(app, [expl_format], [])
self.on_failure.append(ast.Expr(call))
if i < levels:
cond = res
if is_or:
cond = ast.UnaryOp(ast.Not(), cond)
inner = []
self.statements.append(ast.If(cond, inner, []))
self.statements = body = inner
self.statements = save
self.on_failure = fail_save
expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
operand_res, operand_expl = self.visit(unary.operand)
res = self.assign(ast.UnaryOp(unary.op, operand_res))
return res, pattern % (operand_expl,)
def visit_BinOp(self, binop):
symbol = binop_map[binop.op.__class__]
left_expr, left_expl = self.visit(binop.left)
right_expr, right_expl = self.visit(binop.right)
explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
return res, explanation
def visit_Call_35(self, call):
"""
visit `ast.Call` nodes on Python3.5 and after
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
for arg in call.args:
res, expl = self.visit(arg)
arg_expls.append(expl)
new_args.append(res)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
if keyword.arg:
arg_expls.append(keyword.arg + "=" + expl)
else: ## **args have `arg` keywords with an .arg of None
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
def visit_Starred(self, starred):
# From Python 3.5, a Starred node can appear in a function call
res, expl = self.visit(starred.value)
return starred, '*' + expl
def visit_Call_legacy(self, call):
"""
visit `ast.Call nodes on 3.4 and below`
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
new_star = new_kwarg = None
for arg in call.args:
res, expl = self.visit(arg)
new_args.append(res)
arg_expls.append(expl)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
arg_expls.append(keyword.arg + "=" + expl)
if call.starargs:
new_star, expl = self.visit(call.starargs)
arg_expls.append("*" + expl)
if call.kwargs:
new_kwarg, expl = self.visit(call.kwargs)
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs,
new_star, new_kwarg)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
# ast.Call signature changed on 3.5,
# conditionally change which methods is named
# visit_Call depending on Python version
if sys.version_info >= (3, 5):
visit_Call = visit_Call_35
else:
visit_Call = visit_Call_legacy
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
value, value_expl = self.visit(attr.value)
res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
res_expl = self.explanation_param(self.display(res))
pat = "%s\n{%s = %s.%s\n}"
expl = pat % (res_expl, res_expl, value_expl, attr.attr)
return res, expl
def visit_Compare(self, comp):
self.push_format_context()
left_res, left_expl = self.visit(comp.left)
if isinstance(comp.left, (_ast.Compare, _ast.BoolOp)):
left_expl = "({0})".format(left_expl)
res_variables = [self.variable() for i in range(len(comp.ops))]
load_names = [ast.Name(v, ast.Load()) for v in res_variables]
store_names = [ast.Name(v, ast.Store()) for v in res_variables]
it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
expls = []
syms = []
results = [left_res]
for i, op, next_operand in it:
next_res, next_expl = self.visit(next_operand)
if isinstance(next_operand, (_ast.Compare, _ast.BoolOp)):
next_expl = "({0})".format(next_expl)
results.append(next_res)
sym = binop_map[op.__class__]
syms.append(ast.Str(sym))
expl = "%s %s %s" % (left_expl, sym, next_expl)
expls.append(ast.Str(expl))
res_expr = ast.Compare(left_res, [op], [next_res])
self.statements.append(ast.Assign([store_names[i]], res_expr))
left_res, left_expl = next_res, next_expl
# Use pytest.assertion.util._reprcompare if that's available.
expl_call = self.helper("call_reprcompare",
ast.Tuple(syms, ast.Load()),
ast.Tuple(load_names, ast.Load()),
ast.Tuple(expls, ast.Load()),
ast.Tuple(results, ast.Load()))
if len(comp.ops) > 1:
res = ast.BoolOp(ast.And(), load_names)
else:
res = load_names[0]
return res, self.explanation_param(self.pop_format_context(expl_call))
|
jaraco/pytest
|
_pytest/assertion/rewrite.py
|
Python
|
mit
| 36,408
|
[
"VisIt"
] |
eb323067c6c1fa56007179e82feaebc742ee2ed5bfdff1b103a1e8c0c2e52b69
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/ui_semiautomaticclassificationplugin.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_SemiAutomaticClassificationPlugin(object):
def setupUi(self, SemiAutomaticClassificationPlugin):
SemiAutomaticClassificationPlugin.setObjectName("SemiAutomaticClassificationPlugin")
SemiAutomaticClassificationPlugin.setEnabled(True)
SemiAutomaticClassificationPlugin.resize(951, 529)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(SemiAutomaticClassificationPlugin.sizePolicy().hasHeightForWidth())
SemiAutomaticClassificationPlugin.setSizePolicy(sizePolicy)
SemiAutomaticClassificationPlugin.setMinimumSize(QtCore.QSize(400, 400))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
SemiAutomaticClassificationPlugin.setWindowIcon(icon)
SemiAutomaticClassificationPlugin.setSizeGripEnabled(True)
self.gridLayout_301 = QtWidgets.QGridLayout(SemiAutomaticClassificationPlugin)
self.gridLayout_301.setContentsMargins(2, 2, 2, 2)
self.gridLayout_301.setObjectName("gridLayout_301")
self.splitter = QtWidgets.QSplitter(SemiAutomaticClassificationPlugin)
self.splitter.setFrameShape(QtWidgets.QFrame.NoFrame)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setChildrenCollapsible(False)
self.splitter.setObjectName("splitter")
self.widget = QtWidgets.QWidget(self.splitter)
self.widget.setMinimumSize(QtCore.QSize(50, 0))
self.widget.setMaximumSize(QtCore.QSize(250, 16777215))
self.widget.setObjectName("widget")
self.gridLayout_193 = QtWidgets.QGridLayout(self.widget)
self.gridLayout_193.setContentsMargins(1, 1, 1, 1)
self.gridLayout_193.setObjectName("gridLayout_193")
self.f_filter_lineEdit = QtWidgets.QLineEdit(self.widget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.f_filter_lineEdit.sizePolicy().hasHeightForWidth())
self.f_filter_lineEdit.setSizePolicy(sizePolicy)
self.f_filter_lineEdit.setMinimumSize(QtCore.QSize(100, 0))
self.f_filter_lineEdit.setObjectName("f_filter_lineEdit")
self.gridLayout_193.addWidget(self.f_filter_lineEdit, 0, 0, 1, 1)
self.menu_treeWidget = QtWidgets.QTreeWidget(self.widget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.menu_treeWidget.sizePolicy().hasHeightForWidth())
self.menu_treeWidget.setSizePolicy(sizePolicy)
self.menu_treeWidget.setMinimumSize(QtCore.QSize(100, 0))
self.menu_treeWidget.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.menu_treeWidget.setFrameShadow(QtWidgets.QFrame.Sunken)
self.menu_treeWidget.setEditTriggers(QtWidgets.QAbstractItemView.CurrentChanged|QtWidgets.QAbstractItemView.DoubleClicked)
self.menu_treeWidget.setProperty("showDropIndicator", False)
self.menu_treeWidget.setAlternatingRowColors(True)
self.menu_treeWidget.setTextElideMode(QtCore.Qt.ElideNone)
self.menu_treeWidget.setIndentation(15)
self.menu_treeWidget.setRootIsDecorated(True)
self.menu_treeWidget.setHeaderHidden(True)
self.menu_treeWidget.setObjectName("menu_treeWidget")
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_bandset_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon1)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_roi_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon2)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_weight_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon3)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
item_1.setIcon(0, icon1)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_export_spectral_library.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon4)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_import_spectral_library.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon5)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_LCS_threshold_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon6)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_roi_multiple.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon7)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_rgb_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon8)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_threshold_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon9)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_download_arrow.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon10)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon11 = QtGui.QIcon()
icon11.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_class_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon11)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon12 = QtGui.QIcon()
icon12.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_aster_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon12)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon13 = QtGui.QIcon()
icon13.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_goes_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon13)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon14 = QtGui.QIcon()
icon14.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_landsat8_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon14)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon15 = QtGui.QIcon()
icon15.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_modis_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon15)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon16 = QtGui.QIcon()
icon16.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_sentinel1_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon16)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon17 = QtGui.QIcon()
icon17.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_sentinel_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon17)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon18 = QtGui.QIcon()
icon18.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_sentinel3_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon18)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon19 = QtGui.QIcon()
icon19.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_clip_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon19)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon20 = QtGui.QIcon()
icon20.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_cloud_masking_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon20)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon21 = QtGui.QIcon()
icon21.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_mosaic_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon21)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon22 = QtGui.QIcon()
icon22.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_neighbor_pixels.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon22)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon23 = QtGui.QIcon()
icon23.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_reproject_raster_bands.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon23)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon24 = QtGui.QIcon()
icon24.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_split_raster.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon24)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon25 = QtGui.QIcon()
icon25.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_stack_raster.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon25)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon26 = QtGui.QIcon()
icon26.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_vector_to_raster_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon26)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon27 = QtGui.QIcon()
icon27.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_band_processing.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon27)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon28 = QtGui.QIcon()
icon28.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_band_combination_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon28)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon29 = QtGui.QIcon()
icon29.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_classification.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon29)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon30 = QtGui.QIcon()
icon30.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_kmeans_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon30)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon31 = QtGui.QIcon()
icon31.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_pca_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon31)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon32 = QtGui.QIcon()
icon32.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_random_forest.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon32)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon33 = QtGui.QIcon()
icon33.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_spectral_distance.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon33)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon34 = QtGui.QIcon()
icon34.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_post_process.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon34)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon35 = QtGui.QIcon()
icon35.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_accuracy_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon35)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon36 = QtGui.QIcon()
icon36.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_classification_dilation.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon36)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon37 = QtGui.QIcon()
icon37.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_classification_erosion.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon37)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon38 = QtGui.QIcon()
icon38.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_report_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon38)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon39 = QtGui.QIcon()
icon39.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_class_to_vector_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon39)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon40 = QtGui.QIcon()
icon40.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_classification_sieve.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon40)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon41 = QtGui.QIcon()
icon41.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_class_signature_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon41)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon42 = QtGui.QIcon()
icon42.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_cross_classification.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon42)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon43 = QtGui.QIcon()
icon43.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_edit_raster.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon43)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon44 = QtGui.QIcon()
icon44.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_land_cover_change.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon44)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon45 = QtGui.QIcon()
icon45.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_reclassification_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon45)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
icon46 = QtGui.QIcon()
icon46.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_zonal_stat_raster_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_1.setIcon(0, icon46)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon47 = QtGui.QIcon()
icon47.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_bandcalc_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon47)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon48 = QtGui.QIcon()
icon48.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_batch.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon48)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon49 = QtGui.QIcon()
icon49.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_settings_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon49)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
icon50 = QtGui.QIcon()
icon50.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/guide.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon50)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
icon51 = QtGui.QIcon()
icon51.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/help.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon51)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item_0.setFont(0, font)
icon52 = QtGui.QIcon()
icon52.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/fromGIStoRS.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_0.setIcon(0, icon52)
item_0 = QtWidgets.QTreeWidgetItem(self.menu_treeWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
font.setStrikeOut(False)
item_0.setFont(0, font)
brush = QtGui.QBrush(QtGui.QColor(92, 184, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
item_0.setBackground(0, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.NoBrush)
item_0.setForeground(0, brush)
item_0.setIcon(0, icon)
self.gridLayout_193.addWidget(self.menu_treeWidget, 1, 0, 1, 1)
self.main_tabWidget = QtWidgets.QTabWidget(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.main_tabWidget.sizePolicy().hasHeightForWidth())
self.main_tabWidget.setSizePolicy(sizePolicy)
self.main_tabWidget.setMinimumSize(QtCore.QSize(400, 0))
self.main_tabWidget.setTabPosition(QtWidgets.QTabWidget.East)
self.main_tabWidget.setTabShape(QtWidgets.QTabWidget.Rounded)
self.main_tabWidget.setIconSize(QtCore.QSize(12, 12))
self.main_tabWidget.setDocumentMode(True)
self.main_tabWidget.setObjectName("main_tabWidget")
self.tool_tab = QtWidgets.QWidget()
self.tool_tab.setObjectName("tool_tab")
self.gridLayout_262 = QtWidgets.QGridLayout(self.tool_tab)
self.gridLayout_262.setObjectName("gridLayout_262")
self.SCP_tabs = QtWidgets.QTabWidget(self.tool_tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.SCP_tabs.sizePolicy().hasHeightForWidth())
self.SCP_tabs.setSizePolicy(sizePolicy)
self.SCP_tabs.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.SCP_tabs.setIconSize(QtCore.QSize(20, 20))
self.SCP_tabs.setDocumentMode(True)
self.SCP_tabs.setObjectName("SCP_tabs")
self.tab_band_set = QtWidgets.QWidget()
self.tab_band_set.setObjectName("tab_band_set")
self.gridLayout_219 = QtWidgets.QGridLayout(self.tab_band_set)
self.gridLayout_219.setObjectName("gridLayout_219")
self.gridLayout_52 = QtWidgets.QGridLayout()
self.gridLayout_52.setObjectName("gridLayout_52")
self.label_59 = QtWidgets.QLabel(self.tab_band_set)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_59.sizePolicy().hasHeightForWidth())
self.label_59.setSizePolicy(sizePolicy)
self.label_59.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_59.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_59.setWordWrap(False)
self.label_59.setObjectName("label_59")
self.gridLayout_52.addWidget(self.label_59, 0, 0, 1, 1)
self.wavelength_sat_combo = QtWidgets.QComboBox(self.tab_band_set)
self.wavelength_sat_combo.setObjectName("wavelength_sat_combo")
self.gridLayout_52.addWidget(self.wavelength_sat_combo, 0, 1, 1, 1)
self.gridLayout_50 = QtWidgets.QGridLayout()
self.gridLayout_50.setObjectName("gridLayout_50")
self.label_60 = QtWidgets.QLabel(self.tab_band_set)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_60.sizePolicy().hasHeightForWidth())
self.label_60.setSizePolicy(sizePolicy)
self.label_60.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_60.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_60.setObjectName("label_60")
self.gridLayout_50.addWidget(self.label_60, 0, 0, 1, 1)
self.unit_combo = QtWidgets.QComboBox(self.tab_band_set)
self.unit_combo.setMinimumSize(QtCore.QSize(100, 0))
self.unit_combo.setObjectName("unit_combo")
self.gridLayout_50.addWidget(self.unit_combo, 0, 1, 1, 1)
self.export_bandset_toolButton = QtWidgets.QToolButton(self.tab_band_set)
self.export_bandset_toolButton.setStyleSheet("margin: 0px;padding: 0px")
icon53 = QtGui.QIcon()
icon53.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_export.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.export_bandset_toolButton.setIcon(icon53)
self.export_bandset_toolButton.setIconSize(QtCore.QSize(22, 22))
self.export_bandset_toolButton.setObjectName("export_bandset_toolButton")
self.gridLayout_50.addWidget(self.export_bandset_toolButton, 0, 5, 1, 1)
self.import_bandset_toolButton = QtWidgets.QToolButton(self.tab_band_set)
self.import_bandset_toolButton.setStyleSheet("margin: 0px;padding: 0px")
icon54 = QtGui.QIcon()
icon54.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_import.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.import_bandset_toolButton.setIcon(icon54)
self.import_bandset_toolButton.setIconSize(QtCore.QSize(22, 22))
self.import_bandset_toolButton.setObjectName("import_bandset_toolButton")
self.gridLayout_50.addWidget(self.import_bandset_toolButton, 0, 4, 1, 1)
self.bandset_dateEdit = QtWidgets.QDateEdit(self.tab_band_set)
self.bandset_dateEdit.setDateTime(QtCore.QDateTime(QtCore.QDate(2020, 1, 1), QtCore.QTime(0, 0, 0)))
self.bandset_dateEdit.setMaximumDate(QtCore.QDate(2045, 12, 31))
self.bandset_dateEdit.setMinimumDate(QtCore.QDate(1972, 1, 1))
self.bandset_dateEdit.setCalendarPopup(True)
self.bandset_dateEdit.setDate(QtCore.QDate(2020, 1, 1))
self.bandset_dateEdit.setObjectName("bandset_dateEdit")
self.gridLayout_50.addWidget(self.bandset_dateEdit, 0, 3, 1, 1)
self.label_3 = QtWidgets.QLabel(self.tab_band_set)
self.label_3.setObjectName("label_3")
self.gridLayout_50.addWidget(self.label_3, 0, 2, 1, 1)
self.gridLayout_52.addLayout(self.gridLayout_50, 0, 2, 1, 1)
self.gridLayout_219.addLayout(self.gridLayout_52, 2, 0, 1, 2)
self.splitter_3 = QtWidgets.QSplitter(self.tab_band_set)
self.splitter_3.setOrientation(QtCore.Qt.Vertical)
self.splitter_3.setChildrenCollapsible(False)
self.splitter_3.setObjectName("splitter_3")
self.widget_5 = QtWidgets.QWidget(self.splitter_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget_5.sizePolicy().hasHeightForWidth())
self.widget_5.setSizePolicy(sizePolicy)
self.widget_5.setMinimumSize(QtCore.QSize(0, 50))
self.widget_5.setObjectName("widget_5")
self.gridLayout_203 = QtWidgets.QGridLayout(self.widget_5)
self.gridLayout_203.setContentsMargins(1, 1, 1, 1)
self.gridLayout_203.setObjectName("gridLayout_203")
self.gridLayout_59 = QtWidgets.QGridLayout()
self.gridLayout_59.setObjectName("gridLayout_59")
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_59.addItem(spacerItem, 2, 1, 1, 1)
self.label_52 = QtWidgets.QLabel(self.widget_5)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_52.sizePolicy().hasHeightForWidth())
self.label_52.setSizePolicy(sizePolicy)
self.label_52.setStyleSheet("background-color : #656565; color : white")
self.label_52.setFrameShape(QtWidgets.QFrame.Panel)
self.label_52.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_52.setObjectName("label_52")
self.gridLayout_59.addWidget(self.label_52, 0, 0, 1, 1)
self.bands_filter_lineEdit = QtWidgets.QLineEdit(self.widget_5)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.bands_filter_lineEdit.sizePolicy().hasHeightForWidth())
self.bands_filter_lineEdit.setSizePolicy(sizePolicy)
self.bands_filter_lineEdit.setObjectName("bands_filter_lineEdit")
self.gridLayout_59.addWidget(self.bands_filter_lineEdit, 0, 1, 1, 1)
self.gridLayout_69 = QtWidgets.QGridLayout()
self.gridLayout_69.setObjectName("gridLayout_69")
self.bands_tableWidget = QtWidgets.QTableWidget(self.widget_5)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.bands_tableWidget.sizePolicy().hasHeightForWidth())
self.bands_tableWidget.setSizePolicy(sizePolicy)
self.bands_tableWidget.setMinimumSize(QtCore.QSize(0, 30))
self.bands_tableWidget.setFrameShadow(QtWidgets.QFrame.Sunken)
self.bands_tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.bands_tableWidget.setTabKeyNavigation(True)
self.bands_tableWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.bands_tableWidget.setObjectName("bands_tableWidget")
self.bands_tableWidget.setColumnCount(1)
self.bands_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.bands_tableWidget.setHorizontalHeaderItem(0, item)
self.bands_tableWidget.horizontalHeader().setVisible(False)
self.bands_tableWidget.horizontalHeader().setStretchLastSection(True)
self.bands_tableWidget.verticalHeader().setVisible(False)
self.gridLayout_69.addWidget(self.bands_tableWidget, 0, 0, 1, 1)
self.gridLayout_60 = QtWidgets.QGridLayout()
self.gridLayout_60.setObjectName("gridLayout_60")
self.toolButton_reload_3 = QtWidgets.QToolButton(self.widget_5)
self.toolButton_reload_3.setStyleSheet("margin: 0px;padding: 0px")
icon55 = QtGui.QIcon()
icon55.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_reload.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_reload_3.setIcon(icon55)
self.toolButton_reload_3.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_3.setObjectName("toolButton_reload_3")
self.gridLayout_60.addWidget(self.toolButton_reload_3, 1, 0, 1, 1)
self.select_all_bands_Button = QtWidgets.QToolButton(self.widget_5)
self.select_all_bands_Button.setStyleSheet("margin: 0px;padding: 0px;")
icon56 = QtGui.QIcon()
icon56.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_select_all.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.select_all_bands_Button.setIcon(icon56)
self.select_all_bands_Button.setIconSize(QtCore.QSize(22, 22))
self.select_all_bands_Button.setObjectName("select_all_bands_Button")
self.gridLayout_60.addWidget(self.select_all_bands_Button, 2, 0, 1, 1)
self.add_raster_bands_Button = QtWidgets.QToolButton(self.widget_5)
self.add_raster_bands_Button.setStyleSheet("margin: 0px;padding: 0px")
icon57 = QtGui.QIcon()
icon57.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_plus.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.add_raster_bands_Button.setIcon(icon57)
self.add_raster_bands_Button.setIconSize(QtCore.QSize(22, 22))
self.add_raster_bands_Button.setObjectName("add_raster_bands_Button")
self.gridLayout_60.addWidget(self.add_raster_bands_Button, 3, 0, 1, 1)
self.gridLayout_69.addLayout(self.gridLayout_60, 0, 1, 1, 1)
self.gridLayout_59.addLayout(self.gridLayout_69, 1, 0, 2, 2)
self.gridLayout_203.addLayout(self.gridLayout_59, 0, 0, 1, 1)
self.widget_6 = QtWidgets.QWidget(self.splitter_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget_6.sizePolicy().hasHeightForWidth())
self.widget_6.setSizePolicy(sizePolicy)
self.widget_6.setMinimumSize(QtCore.QSize(0, 50))
self.widget_6.setObjectName("widget_6")
self.gridLayout_217 = QtWidgets.QGridLayout(self.widget_6)
self.gridLayout_217.setContentsMargins(1, 1, 1, 1)
self.gridLayout_217.setObjectName("gridLayout_217")
self.gridLayout_11 = QtWidgets.QGridLayout()
self.gridLayout_11.setObjectName("gridLayout_11")
self.gridLayout_42 = QtWidgets.QGridLayout()
self.gridLayout_42.setObjectName("gridLayout_42")
self.label_53 = QtWidgets.QLabel(self.widget_6)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_53.sizePolicy().hasHeightForWidth())
self.label_53.setSizePolicy(sizePolicy)
self.label_53.setStyleSheet("background-color : #656565; color : white")
self.label_53.setFrameShape(QtWidgets.QFrame.Panel)
self.label_53.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_53.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_53.setObjectName("label_53")
self.gridLayout_42.addWidget(self.label_53, 0, 0, 1, 1)
self.gridLayout_11.addLayout(self.gridLayout_42, 0, 0, 1, 2)
self.Band_set_tabWidget = QtWidgets.QTabWidget(self.widget_6)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Band_set_tabWidget.sizePolicy().hasHeightForWidth())
self.Band_set_tabWidget.setSizePolicy(sizePolicy)
self.Band_set_tabWidget.setMinimumSize(QtCore.QSize(0, 20))
self.Band_set_tabWidget.setStyleSheet("QTabBar::tab {\n"
"min-height: 30px;\n"
"min-width: 120px;\n"
"}\n"
"QTabBar::tab:selected { font: bold; color: green; }")
self.Band_set_tabWidget.setTabPosition(QtWidgets.QTabWidget.North)
self.Band_set_tabWidget.setDocumentMode(True)
self.Band_set_tabWidget.setTabsClosable(True)
self.Band_set_tabWidget.setMovable(True)
self.Band_set_tabWidget.setObjectName("Band_set_tabWidget")
self.gridLayout_11.addWidget(self.Band_set_tabWidget, 1, 0, 1, 1)
self.gridLayout_65 = QtWidgets.QGridLayout()
self.gridLayout_65.setObjectName("gridLayout_65")
self.gridLayout_169 = QtWidgets.QGridLayout()
self.gridLayout_169.setObjectName("gridLayout_169")
self.remove_toolButton = QtWidgets.QToolButton(self.widget_6)
self.remove_toolButton.setStyleSheet("margin: 0px;padding: 0px")
icon58 = QtGui.QIcon()
icon58.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_remove.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.remove_toolButton.setIcon(icon58)
self.remove_toolButton.setIconSize(QtCore.QSize(22, 22))
self.remove_toolButton.setObjectName("remove_toolButton")
self.gridLayout_169.addWidget(self.remove_toolButton, 0, 0, 1, 1)
self.clear_bandset_toolButton = QtWidgets.QToolButton(self.widget_6)
self.clear_bandset_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon59 = QtGui.QIcon()
icon59.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_reset.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.clear_bandset_toolButton.setIcon(icon59)
self.clear_bandset_toolButton.setIconSize(QtCore.QSize(22, 22))
self.clear_bandset_toolButton.setObjectName("clear_bandset_toolButton")
self.gridLayout_169.addWidget(self.clear_bandset_toolButton, 1, 0, 1, 1)
self.gridLayout_65.addLayout(self.gridLayout_169, 1, 0, 1, 2)
self.gridLayout_127 = QtWidgets.QGridLayout()
self.gridLayout_127.setObjectName("gridLayout_127")
self.sort_by_name_toolButton = QtWidgets.QToolButton(self.widget_6)
self.sort_by_name_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon60 = QtGui.QIcon()
icon60.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_order_by_name.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.sort_by_name_toolButton.setIcon(icon60)
self.sort_by_name_toolButton.setIconSize(QtCore.QSize(22, 22))
self.sort_by_name_toolButton.setObjectName("sort_by_name_toolButton")
self.gridLayout_127.addWidget(self.sort_by_name_toolButton, 3, 0, 1, 1)
self.move_up_toolButton = QtWidgets.QToolButton(self.widget_6)
self.move_up_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon61 = QtGui.QIcon()
icon61.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_move_up.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.move_up_toolButton.setIcon(icon61)
self.move_up_toolButton.setIconSize(QtCore.QSize(22, 22))
self.move_up_toolButton.setObjectName("move_up_toolButton")
self.gridLayout_127.addWidget(self.move_up_toolButton, 1, 0, 1, 1)
self.move_down_toolButton = QtWidgets.QToolButton(self.widget_6)
self.move_down_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon62 = QtGui.QIcon()
icon62.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_move_down.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.move_down_toolButton.setIcon(icon62)
self.move_down_toolButton.setIconSize(QtCore.QSize(22, 22))
self.move_down_toolButton.setObjectName("move_down_toolButton")
self.gridLayout_127.addWidget(self.move_down_toolButton, 2, 0, 1, 1)
self.add_band_set_toolButton = QtWidgets.QToolButton(self.widget_6)
self.add_band_set_toolButton.setStyleSheet("margin: 0px;padding: 0px")
icon63 = QtGui.QIcon()
icon63.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_add_bandset_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.add_band_set_toolButton.setIcon(icon63)
self.add_band_set_toolButton.setIconSize(QtCore.QSize(22, 22))
self.add_band_set_toolButton.setObjectName("add_band_set_toolButton")
self.gridLayout_127.addWidget(self.add_band_set_toolButton, 0, 0, 1, 1)
self.gridLayout_65.addLayout(self.gridLayout_127, 0, 0, 1, 2)
self.gridLayout_11.addLayout(self.gridLayout_65, 1, 1, 1, 1)
self.gridLayout_217.addLayout(self.gridLayout_11, 0, 0, 1, 1)
self.gridLayout_219.addWidget(self.splitter_3, 1, 0, 1, 2)
self.gridLayout_53 = QtWidgets.QGridLayout()
self.gridLayout_53.setObjectName("gridLayout_53")
self.virtual_raster_bandset_checkBox = QtWidgets.QCheckBox(self.tab_band_set)
self.virtual_raster_bandset_checkBox.setObjectName("virtual_raster_bandset_checkBox")
self.gridLayout_53.addWidget(self.virtual_raster_bandset_checkBox, 1, 0, 1, 1)
self.band_calc_checkBox = QtWidgets.QCheckBox(self.tab_band_set)
self.band_calc_checkBox.setObjectName("band_calc_checkBox")
self.gridLayout_53.addWidget(self.band_calc_checkBox, 1, 3, 1, 1)
self.stack_raster_bandset_checkBox = QtWidgets.QCheckBox(self.tab_band_set)
self.stack_raster_bandset_checkBox.setObjectName("stack_raster_bandset_checkBox")
self.gridLayout_53.addWidget(self.stack_raster_bandset_checkBox, 1, 1, 1, 1)
self.overview_raster_bandset_checkBox = QtWidgets.QCheckBox(self.tab_band_set)
self.overview_raster_bandset_checkBox.setObjectName("overview_raster_bandset_checkBox")
self.gridLayout_53.addWidget(self.overview_raster_bandset_checkBox, 1, 2, 1, 1)
self.label_94 = QtWidgets.QLabel(self.tab_band_set)
self.label_94.setStyleSheet("background-color : #656565; color : white")
self.label_94.setFrameShape(QtWidgets.QFrame.Panel)
self.label_94.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_94.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_94.setObjectName("label_94")
self.gridLayout_53.addWidget(self.label_94, 0, 0, 1, 7)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_53.addItem(spacerItem1, 1, 4, 1, 1)
self.band_set_process_toolButton = QtWidgets.QToolButton(self.tab_band_set)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.band_set_process_toolButton.setFont(font)
self.band_set_process_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.band_set_process_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon64 = QtGui.QIcon()
icon64.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_run.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.band_set_process_toolButton.setIcon(icon64)
self.band_set_process_toolButton.setIconSize(QtCore.QSize(34, 34))
self.band_set_process_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.band_set_process_toolButton.setObjectName("band_set_process_toolButton")
self.gridLayout_53.addWidget(self.band_set_process_toolButton, 1, 5, 1, 1)
self.gridLayout_219.addLayout(self.gridLayout_53, 3, 0, 1, 2)
self.gridLayout_130 = QtWidgets.QGridLayout()
self.gridLayout_130.setObjectName("gridLayout_130")
self.toolButton_reload = QtWidgets.QToolButton(self.tab_band_set)
self.toolButton_reload.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload.setIcon(icon55)
self.toolButton_reload.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload.setObjectName("toolButton_reload")
self.gridLayout_130.addWidget(self.toolButton_reload, 1, 4, 1, 1)
self.label_39 = QtWidgets.QLabel(self.tab_band_set)
self.label_39.setStyleSheet("background-color : #656565; color : white")
self.label_39.setFrameShape(QtWidgets.QFrame.Panel)
self.label_39.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_39.setObjectName("label_39")
self.gridLayout_130.addWidget(self.label_39, 0, 0, 1, 5)
self.toolButton_input_raster = QtWidgets.QToolButton(self.tab_band_set)
self.toolButton_input_raster.setStyleSheet("margin: 0px;padding: 0px;")
icon65 = QtGui.QIcon()
icon65.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_open_file.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_input_raster.setIcon(icon65)
self.toolButton_input_raster.setIconSize(QtCore.QSize(22, 22))
self.toolButton_input_raster.setObjectName("toolButton_input_raster")
self.gridLayout_130.addWidget(self.toolButton_input_raster, 1, 3, 1, 1)
self.image_raster_name_combo = QtWidgets.QComboBox(self.tab_band_set)
self.image_raster_name_combo.setObjectName("image_raster_name_combo")
self.gridLayout_130.addWidget(self.image_raster_name_combo, 1, 0, 1, 3)
self.gridLayout_219.addLayout(self.gridLayout_130, 0, 0, 1, 2)
self.SCP_tabs.addTab(self.tab_band_set, "")
self.tab_basic_tools = QtWidgets.QWidget()
self.tab_basic_tools.setObjectName("tab_basic_tools")
self.gridLayout_216 = QtWidgets.QGridLayout(self.tab_basic_tools)
self.gridLayout_216.setObjectName("gridLayout_216")
self.tabWidget_5 = QtWidgets.QTabWidget(self.tab_basic_tools)
self.tabWidget_5.setStyleSheet("")
self.tabWidget_5.setIconSize(QtCore.QSize(20, 20))
self.tabWidget_5.setDocumentMode(True)
self.tabWidget_5.setObjectName("tabWidget_5")
self.tab_RGB = QtWidgets.QWidget()
self.tab_RGB.setObjectName("tab_RGB")
self.gridLayout_213 = QtWidgets.QGridLayout(self.tab_RGB)
self.gridLayout_213.setObjectName("gridLayout_213")
self.gridLayout_243 = QtWidgets.QGridLayout()
self.gridLayout_243.setObjectName("gridLayout_243")
self.gridLayout_234 = QtWidgets.QGridLayout()
self.gridLayout_234.setObjectName("gridLayout_234")
self.label_126 = QtWidgets.QLabel(self.tab_RGB)
self.label_126.setStyleSheet("background-color : #656565; color : white")
self.label_126.setFrameShape(QtWidgets.QFrame.Panel)
self.label_126.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_126.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_126.setObjectName("label_126")
self.gridLayout_234.addWidget(self.label_126, 0, 0, 1, 1)
self.gridLayout_243.addLayout(self.gridLayout_234, 0, 0, 1, 2)
self.gridLayout_244 = QtWidgets.QGridLayout()
self.gridLayout_244.setObjectName("gridLayout_244")
spacerItem2 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_244.addItem(spacerItem2, 3, 0, 1, 1)
self.sort_by_name_toolButton_2 = QtWidgets.QToolButton(self.tab_RGB)
self.sort_by_name_toolButton_2.setStyleSheet("margin: 0px;padding: 0px;")
self.sort_by_name_toolButton_2.setIcon(icon60)
self.sort_by_name_toolButton_2.setIconSize(QtCore.QSize(22, 22))
self.sort_by_name_toolButton_2.setObjectName("sort_by_name_toolButton_2")
self.gridLayout_244.addWidget(self.sort_by_name_toolButton_2, 2, 0, 1, 1)
self.move_down_toolButton_3 = QtWidgets.QToolButton(self.tab_RGB)
self.move_down_toolButton_3.setStyleSheet("margin: 0px;padding: 0px;")
self.move_down_toolButton_3.setIcon(icon62)
self.move_down_toolButton_3.setIconSize(QtCore.QSize(22, 22))
self.move_down_toolButton_3.setObjectName("move_down_toolButton_3")
self.gridLayout_244.addWidget(self.move_down_toolButton_3, 1, 0, 1, 1)
self.move_up_toolButton_3 = QtWidgets.QToolButton(self.tab_RGB)
self.move_up_toolButton_3.setStyleSheet("margin: 0px;padding: 0px;")
self.move_up_toolButton_3.setIcon(icon61)
self.move_up_toolButton_3.setIconSize(QtCore.QSize(22, 22))
self.move_up_toolButton_3.setObjectName("move_up_toolButton_3")
self.gridLayout_244.addWidget(self.move_up_toolButton_3, 0, 0, 1, 1)
self.gridLayout_243.addLayout(self.gridLayout_244, 1, 1, 1, 1)
self.gridLayout_245 = QtWidgets.QGridLayout()
self.gridLayout_245.setObjectName("gridLayout_245")
self.add_RGB_pushButton = QtWidgets.QToolButton(self.tab_RGB)
self.add_RGB_pushButton.setStyleSheet("margin: 0px;padding: 0px")
icon66 = QtGui.QIcon()
icon66.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_add.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.add_RGB_pushButton.setIcon(icon66)
self.add_RGB_pushButton.setIconSize(QtCore.QSize(22, 22))
self.add_RGB_pushButton.setObjectName("add_RGB_pushButton")
self.gridLayout_245.addWidget(self.add_RGB_pushButton, 0, 0, 1, 1)
self.export_RGB_List_toolButton = QtWidgets.QToolButton(self.tab_RGB)
self.export_RGB_List_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.export_RGB_List_toolButton.setIcon(icon53)
self.export_RGB_List_toolButton.setIconSize(QtCore.QSize(22, 22))
self.export_RGB_List_toolButton.setObjectName("export_RGB_List_toolButton")
self.gridLayout_245.addWidget(self.export_RGB_List_toolButton, 4, 0, 1, 1)
self.import_RGB_List_toolButton = QtWidgets.QToolButton(self.tab_RGB)
self.import_RGB_List_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.import_RGB_List_toolButton.setIcon(icon54)
self.import_RGB_List_toolButton.setIconSize(QtCore.QSize(22, 22))
self.import_RGB_List_toolButton.setObjectName("import_RGB_List_toolButton")
self.gridLayout_245.addWidget(self.import_RGB_List_toolButton, 5, 0, 1, 1)
self.clear_RGB_list_toolButton = QtWidgets.QToolButton(self.tab_RGB)
self.clear_RGB_list_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.clear_RGB_list_toolButton.setIcon(icon59)
self.clear_RGB_list_toolButton.setIconSize(QtCore.QSize(22, 22))
self.clear_RGB_list_toolButton.setObjectName("clear_RGB_list_toolButton")
self.gridLayout_245.addWidget(self.clear_RGB_list_toolButton, 2, 0, 1, 1)
self.remove_RGB_toolButton = QtWidgets.QToolButton(self.tab_RGB)
self.remove_RGB_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.remove_RGB_toolButton.setIcon(icon58)
self.remove_RGB_toolButton.setIconSize(QtCore.QSize(22, 22))
self.remove_RGB_toolButton.setObjectName("remove_RGB_toolButton")
self.gridLayout_245.addWidget(self.remove_RGB_toolButton, 1, 0, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_245.addItem(spacerItem3, 3, 0, 1, 1)
self.gridLayout_243.addLayout(self.gridLayout_245, 2, 1, 1, 1)
self.RGB_tableWidget = QtWidgets.QTableWidget(self.tab_RGB)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.RGB_tableWidget.sizePolicy().hasHeightForWidth())
self.RGB_tableWidget.setSizePolicy(sizePolicy)
self.RGB_tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.DoubleClicked)
self.RGB_tableWidget.setAlternatingRowColors(True)
self.RGB_tableWidget.setSelectionMode(QtWidgets.QAbstractItemView.MultiSelection)
self.RGB_tableWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.RGB_tableWidget.setObjectName("RGB_tableWidget")
self.RGB_tableWidget.setColumnCount(1)
self.RGB_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.RGB_tableWidget.setHorizontalHeaderItem(0, item)
self.RGB_tableWidget.horizontalHeader().setDefaultSectionSize(50)
self.RGB_tableWidget.horizontalHeader().setStretchLastSection(True)
self.RGB_tableWidget.verticalHeader().setDefaultSectionSize(20)
self.gridLayout_243.addWidget(self.RGB_tableWidget, 1, 0, 2, 1)
self.gridLayout_213.addLayout(self.gridLayout_243, 0, 0, 1, 1)
self.gridLayout_246 = QtWidgets.QGridLayout()
self.gridLayout_246.setObjectName("gridLayout_246")
self.label_196 = QtWidgets.QLabel(self.tab_RGB)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_196.sizePolicy().hasHeightForWidth())
self.label_196.setSizePolicy(sizePolicy)
self.label_196.setStyleSheet("background-color : #656565; color : white")
self.label_196.setFrameShape(QtWidgets.QFrame.Panel)
self.label_196.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_196.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_196.setObjectName("label_196")
self.gridLayout_246.addWidget(self.label_196, 0, 0, 1, 2)
self.horizontalLayout_27 = QtWidgets.QHBoxLayout()
self.horizontalLayout_27.setObjectName("horizontalLayout_27")
self.label_192 = QtWidgets.QLabel(self.tab_RGB)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_192.sizePolicy().hasHeightForWidth())
self.label_192.setSizePolicy(sizePolicy)
self.label_192.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_192.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_192.setObjectName("label_192")
self.horizontalLayout_27.addWidget(self.label_192)
self.all_RGB_list_toolButton = QtWidgets.QToolButton(self.tab_RGB)
self.all_RGB_list_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon67 = QtGui.QIcon()
icon67.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_enter.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.all_RGB_list_toolButton.setIcon(icon67)
self.all_RGB_list_toolButton.setIconSize(QtCore.QSize(22, 22))
self.all_RGB_list_toolButton.setObjectName("all_RGB_list_toolButton")
self.horizontalLayout_27.addWidget(self.all_RGB_list_toolButton)
spacerItem4 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_27.addItem(spacerItem4)
self.gridLayout_246.addLayout(self.horizontalLayout_27, 1, 0, 1, 2)
self.gridLayout_213.addLayout(self.gridLayout_246, 1, 0, 1, 1)
self.tabWidget_5.addTab(self.tab_RGB, "")
self.tab_band_set_list = QtWidgets.QWidget()
self.tab_band_set_list.setObjectName("tab_band_set_list")
self.gridLayout_197 = QtWidgets.QGridLayout(self.tab_band_set_list)
self.gridLayout_197.setObjectName("gridLayout_197")
self.gridLayout_267 = QtWidgets.QGridLayout()
self.gridLayout_267.setObjectName("gridLayout_267")
self.label_208 = QtWidgets.QLabel(self.tab_band_set_list)
self.label_208.setStyleSheet("background-color : #656565; color : white")
self.label_208.setFrameShape(QtWidgets.QFrame.Panel)
self.label_208.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_208.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_208.setObjectName("label_208")
self.gridLayout_267.addWidget(self.label_208, 0, 0, 1, 1)
self.band_set_filter_lineEdit = QtWidgets.QLineEdit(self.tab_band_set_list)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.band_set_filter_lineEdit.sizePolicy().hasHeightForWidth())
self.band_set_filter_lineEdit.setSizePolicy(sizePolicy)
self.band_set_filter_lineEdit.setObjectName("band_set_filter_lineEdit")
self.gridLayout_267.addWidget(self.band_set_filter_lineEdit, 0, 1, 1, 1)
self.gridLayout_197.addLayout(self.gridLayout_267, 0, 0, 1, 1)
self.band_set_list_tableWidget = QtWidgets.QTableWidget(self.tab_band_set_list)
self.band_set_list_tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.band_set_list_tableWidget.setAlternatingRowColors(True)
self.band_set_list_tableWidget.setSelectionMode(QtWidgets.QAbstractItemView.MultiSelection)
self.band_set_list_tableWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.band_set_list_tableWidget.setObjectName("band_set_list_tableWidget")
self.band_set_list_tableWidget.setColumnCount(3)
self.band_set_list_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.band_set_list_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.band_set_list_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.band_set_list_tableWidget.setHorizontalHeaderItem(2, item)
self.band_set_list_tableWidget.horizontalHeader().setDefaultSectionSize(68)
self.band_set_list_tableWidget.horizontalHeader().setStretchLastSection(True)
self.band_set_list_tableWidget.verticalHeader().setDefaultSectionSize(20)
self.gridLayout_197.addWidget(self.band_set_list_tableWidget, 1, 0, 2, 1)
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.move_down_toolButton_4 = QtWidgets.QToolButton(self.tab_band_set_list)
self.move_down_toolButton_4.setStyleSheet("margin: 0px;padding: 0px;")
self.move_down_toolButton_4.setIcon(icon62)
self.move_down_toolButton_4.setIconSize(QtCore.QSize(22, 22))
self.move_down_toolButton_4.setObjectName("move_down_toolButton_4")
self.verticalLayout_2.addWidget(self.move_down_toolButton_4)
self.move_up_toolButton_4 = QtWidgets.QToolButton(self.tab_band_set_list)
self.move_up_toolButton_4.setStyleSheet("margin: 0px;padding: 0px;")
self.move_up_toolButton_4.setIcon(icon61)
self.move_up_toolButton_4.setIconSize(QtCore.QSize(22, 22))
self.move_up_toolButton_4.setObjectName("move_up_toolButton_4")
self.verticalLayout_2.addWidget(self.move_up_toolButton_4)
spacerItem5 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem5)
self.rgb_toolButton = QtWidgets.QToolButton(self.tab_band_set_list)
self.rgb_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.rgb_toolButton.setIcon(icon8)
self.rgb_toolButton.setIconSize(QtCore.QSize(22, 22))
self.rgb_toolButton.setObjectName("rgb_toolButton")
self.verticalLayout_2.addWidget(self.rgb_toolButton)
self.gridLayout_197.addLayout(self.verticalLayout_2, 1, 1, 1, 1)
self.gridLayout_269 = QtWidgets.QGridLayout()
self.gridLayout_269.setObjectName("gridLayout_269")
self.add_bandset_pushButton = QtWidgets.QToolButton(self.tab_band_set_list)
self.add_bandset_pushButton.setStyleSheet("margin: 0px;padding: 0px")
self.add_bandset_pushButton.setIcon(icon66)
self.add_bandset_pushButton.setIconSize(QtCore.QSize(22, 22))
self.add_bandset_pushButton.setObjectName("add_bandset_pushButton")
self.gridLayout_269.addWidget(self.add_bandset_pushButton, 2, 0, 1, 1)
self.export_bandset_List_toolButton = QtWidgets.QToolButton(self.tab_band_set_list)
self.export_bandset_List_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.export_bandset_List_toolButton.setIcon(icon53)
self.export_bandset_List_toolButton.setIconSize(QtCore.QSize(22, 22))
self.export_bandset_List_toolButton.setObjectName("export_bandset_List_toolButton")
self.gridLayout_269.addWidget(self.export_bandset_List_toolButton, 5, 0, 1, 1)
self.import_bandset_List_toolButton = QtWidgets.QToolButton(self.tab_band_set_list)
self.import_bandset_List_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.import_bandset_List_toolButton.setIcon(icon54)
self.import_bandset_List_toolButton.setIconSize(QtCore.QSize(22, 22))
self.import_bandset_List_toolButton.setObjectName("import_bandset_List_toolButton")
self.gridLayout_269.addWidget(self.import_bandset_List_toolButton, 6, 0, 1, 1)
spacerItem6 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_269.addItem(spacerItem6, 4, 0, 1, 1)
self.remove_bandset_toolButton = QtWidgets.QToolButton(self.tab_band_set_list)
self.remove_bandset_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.remove_bandset_toolButton.setIcon(icon58)
self.remove_bandset_toolButton.setIconSize(QtCore.QSize(22, 22))
self.remove_bandset_toolButton.setObjectName("remove_bandset_toolButton")
self.gridLayout_269.addWidget(self.remove_bandset_toolButton, 3, 0, 1, 1)
spacerItem7 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_269.addItem(spacerItem7, 1, 0, 1, 1)
self.sort_by_date = QtWidgets.QToolButton(self.tab_band_set_list)
self.sort_by_date.setStyleSheet("margin: 0px;padding: 0px;")
self.sort_by_date.setIcon(icon60)
self.sort_by_date.setIconSize(QtCore.QSize(22, 22))
self.sort_by_date.setObjectName("sort_by_date")
self.gridLayout_269.addWidget(self.sort_by_date, 0, 0, 1, 1)
self.gridLayout_197.addLayout(self.gridLayout_269, 2, 1, 1, 1)
self.tabWidget_5.addTab(self.tab_band_set_list, "")
self.tab_algorithm_weight = QtWidgets.QWidget()
self.tab_algorithm_weight.setObjectName("tab_algorithm_weight")
self.gridLayout_150 = QtWidgets.QGridLayout(self.tab_algorithm_weight)
self.gridLayout_150.setObjectName("gridLayout_150")
self.gridLayout_100 = QtWidgets.QGridLayout()
self.gridLayout_100.setObjectName("gridLayout_100")
self.gridLayout_108 = QtWidgets.QGridLayout()
self.gridLayout_108.setObjectName("gridLayout_108")
self.label_79 = QtWidgets.QLabel(self.tab_algorithm_weight)
self.label_79.setStyleSheet("background-color : #656565; color : white")
self.label_79.setFrameShape(QtWidgets.QFrame.Panel)
self.label_79.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_79.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_79.setObjectName("label_79")
self.gridLayout_108.addWidget(self.label_79, 0, 0, 1, 1)
self.gridLayout_100.addLayout(self.gridLayout_108, 0, 0, 1, 2)
self.gridLayout_101 = QtWidgets.QGridLayout()
self.gridLayout_101.setObjectName("gridLayout_101")
spacerItem8 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_101.addItem(spacerItem8, 6, 1, 1, 1)
self.gridLayout_104 = QtWidgets.QGridLayout()
self.gridLayout_104.setObjectName("gridLayout_104")
self.gridLayout_102 = QtWidgets.QGridLayout()
self.gridLayout_102.setObjectName("gridLayout_102")
self.reset_weights_pushButton = QtWidgets.QToolButton(self.tab_algorithm_weight)
self.reset_weights_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_weights_pushButton.setIcon(icon59)
self.reset_weights_pushButton.setIconSize(QtCore.QSize(22, 22))
self.reset_weights_pushButton.setObjectName("reset_weights_pushButton")
self.gridLayout_102.addWidget(self.reset_weights_pushButton, 0, 0, 1, 1)
self.gridLayout_104.addLayout(self.gridLayout_102, 0, 0, 1, 1)
self.gridLayout_101.addLayout(self.gridLayout_104, 5, 0, 1, 3)
spacerItem9 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_101.addItem(spacerItem9, 3, 1, 1, 1)
self.gridLayout_100.addLayout(self.gridLayout_101, 1, 1, 1, 1)
self.gridLayout_103 = QtWidgets.QGridLayout()
self.gridLayout_103.setObjectName("gridLayout_103")
self.set_weight_value_pushButton = QtWidgets.QToolButton(self.tab_algorithm_weight)
self.set_weight_value_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.set_weight_value_pushButton.setIcon(icon67)
self.set_weight_value_pushButton.setIconSize(QtCore.QSize(22, 22))
self.set_weight_value_pushButton.setObjectName("set_weight_value_pushButton")
self.gridLayout_103.addWidget(self.set_weight_value_pushButton, 1, 2, 1, 1)
self.label_131 = QtWidgets.QLabel(self.tab_algorithm_weight)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_131.sizePolicy().hasHeightForWidth())
self.label_131.setSizePolicy(sizePolicy)
self.label_131.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_131.setObjectName("label_131")
self.gridLayout_103.addWidget(self.label_131, 1, 0, 1, 1)
self.weight_doubleSpinBox = QtWidgets.QDoubleSpinBox(self.tab_algorithm_weight)
self.weight_doubleSpinBox.setMaximum(1000.0)
self.weight_doubleSpinBox.setProperty("value", 1.0)
self.weight_doubleSpinBox.setObjectName("weight_doubleSpinBox")
self.gridLayout_103.addWidget(self.weight_doubleSpinBox, 1, 1, 1, 1)
spacerItem10 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_103.addItem(spacerItem10, 1, 3, 1, 1)
self.label_93 = QtWidgets.QLabel(self.tab_algorithm_weight)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_93.sizePolicy().hasHeightForWidth())
self.label_93.setSizePolicy(sizePolicy)
self.label_93.setStyleSheet("background-color : #656565; color : white")
self.label_93.setFrameShape(QtWidgets.QFrame.Panel)
self.label_93.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_93.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_93.setObjectName("label_93")
self.gridLayout_103.addWidget(self.label_93, 0, 0, 1, 4)
self.gridLayout_100.addLayout(self.gridLayout_103, 2, 0, 1, 2)
self.alg_band_weight_tabWidget = QtWidgets.QTabWidget(self.tab_algorithm_weight)
self.alg_band_weight_tabWidget.setTabPosition(QtWidgets.QTabWidget.North)
self.alg_band_weight_tabWidget.setObjectName("alg_band_weight_tabWidget")
self.gridLayout_100.addWidget(self.alg_band_weight_tabWidget, 1, 0, 1, 1)
self.gridLayout_150.addLayout(self.gridLayout_100, 0, 0, 1, 1)
self.tabWidget_5.addTab(self.tab_algorithm_weight, "")
self.tab_multiple_ROI = QtWidgets.QWidget()
self.tab_multiple_ROI.setObjectName("tab_multiple_ROI")
self.gridLayout_247 = QtWidgets.QGridLayout(self.tab_multiple_ROI)
self.gridLayout_247.setObjectName("gridLayout_247")
self.gridLayout_8 = QtWidgets.QGridLayout()
self.gridLayout_8.setObjectName("gridLayout_8")
self.point_distance_spinBox = QtWidgets.QSpinBox(self.tab_multiple_ROI)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.point_distance_spinBox.sizePolicy().hasHeightForWidth())
self.point_distance_spinBox.setSizePolicy(sizePolicy)
self.point_distance_spinBox.setMinimumSize(QtCore.QSize(40, 0))
self.point_distance_spinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.point_distance_spinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.point_distance_spinBox.setMinimum(1)
self.point_distance_spinBox.setMaximum(999999999)
self.point_distance_spinBox.setProperty("value", 100)
self.point_distance_spinBox.setObjectName("point_distance_spinBox")
self.gridLayout_8.addWidget(self.point_distance_spinBox, 1, 5, 1, 1)
self.point_grid_spinBox = QtWidgets.QSpinBox(self.tab_multiple_ROI)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.point_grid_spinBox.sizePolicy().hasHeightForWidth())
self.point_grid_spinBox.setSizePolicy(sizePolicy)
self.point_grid_spinBox.setMinimumSize(QtCore.QSize(40, 0))
self.point_grid_spinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.point_grid_spinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.point_grid_spinBox.setMinimum(1)
self.point_grid_spinBox.setMaximum(999999999)
self.point_grid_spinBox.setProperty("value", 10000)
self.point_grid_spinBox.setObjectName("point_grid_spinBox")
self.gridLayout_8.addWidget(self.point_grid_spinBox, 1, 3, 1, 1)
self.label_48 = QtWidgets.QLabel(self.tab_multiple_ROI)
self.label_48.setStyleSheet("background-color : #656565; color : white")
self.label_48.setFrameShape(QtWidgets.QFrame.Panel)
self.label_48.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_48.setObjectName("label_48")
self.gridLayout_8.addWidget(self.label_48, 0, 0, 1, 9)
self.label_139 = QtWidgets.QLabel(self.tab_multiple_ROI)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_139.sizePolicy().hasHeightForWidth())
self.label_139.setSizePolicy(sizePolicy)
self.label_139.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_139.setObjectName("label_139")
self.gridLayout_8.addWidget(self.label_139, 1, 7, 1, 1)
self.label_19 = QtWidgets.QLabel(self.tab_multiple_ROI)
self.label_19.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_19.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_19.setObjectName("label_19")
self.gridLayout_8.addWidget(self.label_19, 1, 0, 1, 1)
self.point_number_spinBox = QtWidgets.QSpinBox(self.tab_multiple_ROI)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.point_number_spinBox.sizePolicy().hasHeightForWidth())
self.point_number_spinBox.setSizePolicy(sizePolicy)
self.point_number_spinBox.setMinimumSize(QtCore.QSize(40, 0))
self.point_number_spinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.point_number_spinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.point_number_spinBox.setMinimum(1)
self.point_number_spinBox.setMaximum(999999999)
self.point_number_spinBox.setProperty("value", 100)
self.point_number_spinBox.setObjectName("point_number_spinBox")
self.gridLayout_8.addWidget(self.point_number_spinBox, 1, 1, 1, 1)
self.add_random_point_pushButton = QtWidgets.QToolButton(self.tab_multiple_ROI)
self.add_random_point_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.add_random_point_pushButton.setIcon(icon67)
self.add_random_point_pushButton.setIconSize(QtCore.QSize(22, 22))
self.add_random_point_pushButton.setObjectName("add_random_point_pushButton")
self.gridLayout_8.addWidget(self.add_random_point_pushButton, 1, 8, 1, 1)
self.point_distance_checkBox = QtWidgets.QCheckBox(self.tab_multiple_ROI)
self.point_distance_checkBox.setObjectName("point_distance_checkBox")
self.gridLayout_8.addWidget(self.point_distance_checkBox, 1, 4, 1, 1)
self.point_grid_checkBox = QtWidgets.QCheckBox(self.tab_multiple_ROI)
self.point_grid_checkBox.setObjectName("point_grid_checkBox")
self.gridLayout_8.addWidget(self.point_grid_checkBox, 1, 2, 1, 1)
spacerItem11 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_8.addItem(spacerItem11, 1, 6, 1, 1)
self.stratified_point_checkBox = QtWidgets.QCheckBox(self.tab_multiple_ROI)
self.stratified_point_checkBox.setObjectName("stratified_point_checkBox")
self.gridLayout_8.addWidget(self.stratified_point_checkBox, 2, 0, 1, 2)
self.stratified_lineEdit = QtWidgets.QLineEdit(self.tab_multiple_ROI)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.stratified_lineEdit.sizePolicy().hasHeightForWidth())
self.stratified_lineEdit.setSizePolicy(sizePolicy)
self.stratified_lineEdit.setMinimumSize(QtCore.QSize(400, 0))
self.stratified_lineEdit.setMaxLength(10000)
self.stratified_lineEdit.setObjectName("stratified_lineEdit")
self.gridLayout_8.addWidget(self.stratified_lineEdit, 2, 2, 1, 4)
self.band_set_comb_spinBox_10 = QtWidgets.QSpinBox(self.tab_multiple_ROI)
self.band_set_comb_spinBox_10.setMinimum(1)
self.band_set_comb_spinBox_10.setMaximum(100000)
self.band_set_comb_spinBox_10.setObjectName("band_set_comb_spinBox_10")
self.gridLayout_8.addWidget(self.band_set_comb_spinBox_10, 2, 8, 1, 1)
self.label_25 = QtWidgets.QLabel(self.tab_multiple_ROI)
self.label_25.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_25.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_25.setObjectName("label_25")
self.gridLayout_8.addWidget(self.label_25, 2, 6, 1, 2)
self.gridLayout_247.addLayout(self.gridLayout_8, 0, 0, 1, 1)
self.gridLayout_32 = QtWidgets.QGridLayout()
self.gridLayout_32.setObjectName("gridLayout_32")
self.label_47 = QtWidgets.QLabel(self.tab_multiple_ROI)
self.label_47.setStyleSheet("background-color : #656565; color : white")
self.label_47.setFrameShape(QtWidgets.QFrame.Panel)
self.label_47.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_47.setObjectName("label_47")
self.gridLayout_32.addWidget(self.label_47, 0, 0, 1, 2)
self.point_tableWidget = QtWidgets.QTableWidget(self.tab_multiple_ROI)
self.point_tableWidget.setAlternatingRowColors(True)
self.point_tableWidget.setObjectName("point_tableWidget")
self.point_tableWidget.setColumnCount(10)
self.point_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.point_tableWidget.setHorizontalHeaderItem(9, item)
self.point_tableWidget.horizontalHeader().setDefaultSectionSize(90)
self.point_tableWidget.verticalHeader().setDefaultSectionSize(24)
self.gridLayout_32.addWidget(self.point_tableWidget, 1, 0, 1, 1)
self.gridLayout_72 = QtWidgets.QGridLayout()
self.gridLayout_72.setObjectName("gridLayout_72")
self.gridLayout_39 = QtWidgets.QGridLayout()
self.gridLayout_39.setObjectName("gridLayout_39")
spacerItem12 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_39.addItem(spacerItem12, 3, 0, 1, 1)
self.add_point_pushButton = QtWidgets.QToolButton(self.tab_multiple_ROI)
self.add_point_pushButton.setStyleSheet("margin: 0px;padding: 0px")
self.add_point_pushButton.setIcon(icon66)
self.add_point_pushButton.setIconSize(QtCore.QSize(22, 22))
self.add_point_pushButton.setObjectName("add_point_pushButton")
self.gridLayout_39.addWidget(self.add_point_pushButton, 1, 0, 1, 1)
self.remove_point_pushButton = QtWidgets.QToolButton(self.tab_multiple_ROI)
self.remove_point_pushButton.setStyleSheet("margin: 0px;padding: 0px")
self.remove_point_pushButton.setIcon(icon58)
self.remove_point_pushButton.setIconSize(QtCore.QSize(22, 22))
self.remove_point_pushButton.setObjectName("remove_point_pushButton")
self.gridLayout_39.addWidget(self.remove_point_pushButton, 2, 0, 1, 1)
self.export_point_list_pushButton = QtWidgets.QToolButton(self.tab_multiple_ROI)
self.export_point_list_pushButton.setStyleSheet("margin: 0px;padding: 0px")
self.export_point_list_pushButton.setIcon(icon53)
self.export_point_list_pushButton.setIconSize(QtCore.QSize(22, 22))
self.export_point_list_pushButton.setObjectName("export_point_list_pushButton")
self.gridLayout_39.addWidget(self.export_point_list_pushButton, 5, 0, 1, 1)
spacerItem13 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_39.addItem(spacerItem13, 6, 0, 1, 1)
self.import_point_list_pushButton = QtWidgets.QToolButton(self.tab_multiple_ROI)
self.import_point_list_pushButton.setStyleSheet("margin: 0px;padding: 0px")
self.import_point_list_pushButton.setIcon(icon54)
self.import_point_list_pushButton.setIconSize(QtCore.QSize(22, 22))
self.import_point_list_pushButton.setObjectName("import_point_list_pushButton")
self.gridLayout_39.addWidget(self.import_point_list_pushButton, 4, 0, 1, 1)
spacerItem14 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_39.addItem(spacerItem14, 0, 0, 1, 1)
self.gridLayout_72.addLayout(self.gridLayout_39, 2, 0, 1, 1)
self.gridLayout_32.addLayout(self.gridLayout_72, 1, 1, 1, 1)
self.gridLayout_73 = QtWidgets.QGridLayout()
self.gridLayout_73.setObjectName("gridLayout_73")
spacerItem15 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_73.addItem(spacerItem15, 1, 4, 1, 1)
self.signature_checkBox2 = QtWidgets.QCheckBox(self.tab_multiple_ROI)
self.signature_checkBox2.setChecked(True)
self.signature_checkBox2.setObjectName("signature_checkBox2")
self.gridLayout_73.addWidget(self.signature_checkBox2, 1, 5, 1, 1)
self.label_159 = QtWidgets.QLabel(self.tab_multiple_ROI)
self.label_159.setStyleSheet("background-color : #656565; color : white")
self.label_159.setFrameShape(QtWidgets.QFrame.Panel)
self.label_159.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_159.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_159.setObjectName("label_159")
self.gridLayout_73.addWidget(self.label_159, 0, 4, 1, 3)
self.save_point_rois_pushButton = QtWidgets.QToolButton(self.tab_multiple_ROI)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.save_point_rois_pushButton.setFont(font)
self.save_point_rois_pushButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.save_point_rois_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.save_point_rois_pushButton.setIcon(icon64)
self.save_point_rois_pushButton.setIconSize(QtCore.QSize(34, 34))
self.save_point_rois_pushButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.save_point_rois_pushButton.setObjectName("save_point_rois_pushButton")
self.gridLayout_73.addWidget(self.save_point_rois_pushButton, 1, 6, 1, 1)
self.gridLayout_32.addLayout(self.gridLayout_73, 3, 0, 1, 2)
self.gridLayout_247.addLayout(self.gridLayout_32, 1, 0, 1, 1)
self.tabWidget_5.addTab(self.tab_multiple_ROI, "")
self.tab_Import = QtWidgets.QWidget()
self.tab_Import.setObjectName("tab_Import")
self.gridLayout_168 = QtWidgets.QGridLayout(self.tab_Import)
self.gridLayout_168.setObjectName("gridLayout_168")
self.toolBox_4 = QtWidgets.QToolBox(self.tab_Import)
self.toolBox_4.setObjectName("toolBox_4")
self.page_8 = QtWidgets.QWidget()
self.page_8.setGeometry(QtCore.QRect(0, 0, 357, 448))
self.page_8.setObjectName("page_8")
self.gridLayout_4 = QtWidgets.QGridLayout(self.page_8)
self.gridLayout_4.setObjectName("gridLayout_4")
self.gridLayout_31 = QtWidgets.QGridLayout()
self.gridLayout_31.setObjectName("gridLayout_31")
self.usgs_chapter_comboBox = QtWidgets.QComboBox(self.page_8)
self.usgs_chapter_comboBox.setObjectName("usgs_chapter_comboBox")
self.gridLayout_31.addWidget(self.usgs_chapter_comboBox, 0, 1, 1, 1)
self.usgs_library_comboBox = QtWidgets.QComboBox(self.page_8)
self.usgs_library_comboBox.setObjectName("usgs_library_comboBox")
self.gridLayout_31.addWidget(self.usgs_library_comboBox, 1, 1, 1, 1)
self.gridLayout_14 = QtWidgets.QGridLayout()
self.gridLayout_14.setObjectName("gridLayout_14")
self.gridLayout_31.addLayout(self.gridLayout_14, 2, 1, 1, 1)
self.label_123 = QtWidgets.QLabel(self.page_8)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_123.sizePolicy().hasHeightForWidth())
self.label_123.setSizePolicy(sizePolicy)
self.label_123.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_123.setObjectName("label_123")
self.gridLayout_31.addWidget(self.label_123, 0, 0, 1, 1)
self.label_124 = QtWidgets.QLabel(self.page_8)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_124.sizePolicy().hasHeightForWidth())
self.label_124.setSizePolicy(sizePolicy)
self.label_124.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_124.setObjectName("label_124")
self.gridLayout_31.addWidget(self.label_124, 1, 0, 1, 1)
self.gridLayout_12 = QtWidgets.QGridLayout()
self.gridLayout_12.setObjectName("gridLayout_12")
spacerItem16 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_12.addItem(spacerItem16, 0, 0, 1, 1)
self.label_130 = QtWidgets.QLabel(self.page_8)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_130.sizePolicy().hasHeightForWidth())
self.label_130.setSizePolicy(sizePolicy)
self.label_130.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_130.setObjectName("label_130")
self.gridLayout_12.addWidget(self.label_130, 0, 1, 1, 1)
self.add_usgs_library_pushButton = QtWidgets.QToolButton(self.page_8)
self.add_usgs_library_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.add_usgs_library_pushButton.setIcon(icon67)
self.add_usgs_library_pushButton.setIconSize(QtCore.QSize(22, 22))
self.add_usgs_library_pushButton.setObjectName("add_usgs_library_pushButton")
self.gridLayout_12.addWidget(self.add_usgs_library_pushButton, 0, 2, 1, 1)
self.gridLayout_31.addLayout(self.gridLayout_12, 3, 0, 1, 2)
self.USGS_library_textBrowser = QtWidgets.QTextBrowser(self.page_8)
self.USGS_library_textBrowser.setFrameShape(QtWidgets.QFrame.Panel)
self.USGS_library_textBrowser.setFrameShadow(QtWidgets.QFrame.Sunken)
self.USGS_library_textBrowser.setOpenExternalLinks(True)
self.USGS_library_textBrowser.setObjectName("USGS_library_textBrowser")
self.gridLayout_31.addWidget(self.USGS_library_textBrowser, 5, 0, 1, 2)
self.label = QtWidgets.QLabel(self.page_8)
font = QtGui.QFont()
font.setPointSize(8)
self.label.setFont(font)
self.label.setFrameShape(QtWidgets.QFrame.Panel)
self.label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label.setWordWrap(True)
self.label.setOpenExternalLinks(True)
self.label.setObjectName("label")
self.gridLayout_31.addWidget(self.label, 6, 0, 1, 2)
self.label_129 = QtWidgets.QLabel(self.page_8)
self.label_129.setStyleSheet("background-color : #656565; color : white")
self.label_129.setFrameShape(QtWidgets.QFrame.Panel)
self.label_129.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_129.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_129.setObjectName("label_129")
self.gridLayout_31.addWidget(self.label_129, 4, 0, 1, 2)
self.gridLayout_4.addLayout(self.gridLayout_31, 0, 1, 1, 1)
self.toolBox_4.addItem(self.page_8, "")
self.page_6 = QtWidgets.QWidget()
self.page_6.setGeometry(QtCore.QRect(0, 0, 604, 53))
self.page_6.setObjectName("page_6")
self.gridLayout_175 = QtWidgets.QGridLayout(self.page_6)
self.gridLayout_175.setObjectName("gridLayout_175")
self.gridLayout_174 = QtWidgets.QGridLayout()
self.gridLayout_174.setObjectName("gridLayout_174")
self.label_9 = QtWidgets.QLabel(self.page_6)
self.label_9.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_9.setObjectName("label_9")
self.gridLayout_174.addWidget(self.label_9, 0, 0, 1, 1)
self.open_library_pushButton = QtWidgets.QToolButton(self.page_6)
self.open_library_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.open_library_pushButton.setIcon(icon65)
self.open_library_pushButton.setIconSize(QtCore.QSize(22, 22))
self.open_library_pushButton.setObjectName("open_library_pushButton")
self.gridLayout_174.addWidget(self.open_library_pushButton, 0, 1, 1, 1)
spacerItem17 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_174.addItem(spacerItem17, 2, 0, 1, 1)
self.gridLayout_175.addLayout(self.gridLayout_174, 0, 0, 1, 1)
self.toolBox_4.addItem(self.page_6, "")
self.page_9 = QtWidgets.QWidget()
self.page_9.setGeometry(QtCore.QRect(0, 0, 417, 165))
self.page_9.setObjectName("page_9")
self.gridLayout_181 = QtWidgets.QGridLayout(self.page_9)
self.gridLayout_181.setObjectName("gridLayout_181")
self.gridLayout_178 = QtWidgets.QGridLayout()
self.gridLayout_178.setObjectName("gridLayout_178")
self.open_shapefile_pushButton = QtWidgets.QToolButton(self.page_9)
self.open_shapefile_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.open_shapefile_pushButton.setIcon(icon65)
self.open_shapefile_pushButton.setIconSize(QtCore.QSize(20, 20))
self.open_shapefile_pushButton.setObjectName("open_shapefile_pushButton")
self.gridLayout_178.addWidget(self.open_shapefile_pushButton, 0, 2, 1, 1)
self.label_120 = QtWidgets.QLabel(self.page_9)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_120.sizePolicy().hasHeightForWidth())
self.label_120.setSizePolicy(sizePolicy)
self.label_120.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_120.setObjectName("label_120")
self.gridLayout_178.addWidget(self.label_120, 0, 0, 1, 1)
self.gridLayout_179 = QtWidgets.QGridLayout()
self.gridLayout_179.setObjectName("gridLayout_179")
self.C_ID_combo = QtWidgets.QComboBox(self.page_9)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.C_ID_combo.sizePolicy().hasHeightForWidth())
self.C_ID_combo.setSizePolicy(sizePolicy)
self.C_ID_combo.setObjectName("C_ID_combo")
self.gridLayout_179.addWidget(self.C_ID_combo, 2, 2, 1, 1)
self.MC_ID_combo = QtWidgets.QComboBox(self.page_9)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.MC_ID_combo.sizePolicy().hasHeightForWidth())
self.MC_ID_combo.setSizePolicy(sizePolicy)
self.MC_ID_combo.setObjectName("MC_ID_combo")
self.gridLayout_179.addWidget(self.MC_ID_combo, 2, 0, 1, 1)
self.MC_Info_combo = QtWidgets.QComboBox(self.page_9)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.MC_Info_combo.sizePolicy().hasHeightForWidth())
self.MC_Info_combo.setSizePolicy(sizePolicy)
self.MC_Info_combo.setObjectName("MC_Info_combo")
self.gridLayout_179.addWidget(self.MC_Info_combo, 2, 1, 1, 1)
self.label_99 = QtWidgets.QLabel(self.page_9)
self.label_99.setFrameShape(QtWidgets.QFrame.Panel)
self.label_99.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_99.setObjectName("label_99")
self.gridLayout_179.addWidget(self.label_99, 1, 3, 1, 1)
self.C_Info_combo = QtWidgets.QComboBox(self.page_9)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.C_Info_combo.sizePolicy().hasHeightForWidth())
self.C_Info_combo.setSizePolicy(sizePolicy)
self.C_Info_combo.setObjectName("C_Info_combo")
self.gridLayout_179.addWidget(self.C_Info_combo, 2, 3, 1, 1)
self.label_119 = QtWidgets.QLabel(self.page_9)
self.label_119.setStyleSheet("background-color : #656565; color : white")
self.label_119.setFrameShape(QtWidgets.QFrame.Panel)
self.label_119.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_119.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_119.setObjectName("label_119")
self.gridLayout_179.addWidget(self.label_119, 0, 0, 1, 5)
self.MC_ID_combo_2 = QtWidgets.QLabel(self.page_9)
self.MC_ID_combo_2.setFrameShape(QtWidgets.QFrame.Panel)
self.MC_ID_combo_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.MC_ID_combo_2.setObjectName("MC_ID_combo_2")
self.gridLayout_179.addWidget(self.MC_ID_combo_2, 1, 2, 1, 1)
self.label_121 = QtWidgets.QLabel(self.page_9)
self.label_121.setFrameShape(QtWidgets.QFrame.Panel)
self.label_121.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_121.setObjectName("label_121")
self.gridLayout_179.addWidget(self.label_121, 1, 0, 1, 1)
self.label_122 = QtWidgets.QLabel(self.page_9)
self.label_122.setFrameShape(QtWidgets.QFrame.Panel)
self.label_122.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_122.setObjectName("label_122")
self.gridLayout_179.addWidget(self.label_122, 1, 1, 1, 1)
self.gridLayout_178.addLayout(self.gridLayout_179, 3, 0, 1, 3)
spacerItem18 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_178.addItem(spacerItem18, 5, 0, 1, 1)
self.gridLayout_180 = QtWidgets.QGridLayout()
self.gridLayout_180.setObjectName("gridLayout_180")
self.label_2 = QtWidgets.QLabel(self.page_9)
self.label_2.setObjectName("label_2")
self.gridLayout_180.addWidget(self.label_2, 0, 2, 1, 1)
spacerItem19 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_180.addItem(spacerItem19, 0, 0, 1, 1)
self.signature_checkBox_2 = QtWidgets.QCheckBox(self.page_9)
self.signature_checkBox_2.setChecked(True)
self.signature_checkBox_2.setObjectName("signature_checkBox_2")
self.gridLayout_180.addWidget(self.signature_checkBox_2, 0, 1, 1, 1)
self.import_shapefile_pushButton = QtWidgets.QToolButton(self.page_9)
self.import_shapefile_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.import_shapefile_pushButton.setIcon(icon67)
self.import_shapefile_pushButton.setIconSize(QtCore.QSize(22, 22))
self.import_shapefile_pushButton.setObjectName("import_shapefile_pushButton")
self.gridLayout_180.addWidget(self.import_shapefile_pushButton, 0, 3, 1, 1)
self.gridLayout_178.addLayout(self.gridLayout_180, 4, 0, 1, 3)
self.select_shapefile_label = QtWidgets.QLabel(self.page_9)
self.select_shapefile_label.setFrameShape(QtWidgets.QFrame.Panel)
self.select_shapefile_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.select_shapefile_label.setText("")
self.select_shapefile_label.setObjectName("select_shapefile_label")
self.gridLayout_178.addWidget(self.select_shapefile_label, 0, 1, 1, 1)
self.gridLayout_181.addLayout(self.gridLayout_178, 0, 1, 1, 1)
self.toolBox_4.addItem(self.page_9, "")
self.gridLayout_168.addWidget(self.toolBox_4, 0, 0, 1, 1)
self.tabWidget_5.addTab(self.tab_Import, "")
self.tab_export = QtWidgets.QWidget()
self.tab_export.setObjectName("tab_export")
self.gridLayout_142 = QtWidgets.QGridLayout(self.tab_export)
self.gridLayout_142.setObjectName("gridLayout_142")
self.gridLayout_176 = QtWidgets.QGridLayout()
self.gridLayout_176.setObjectName("gridLayout_176")
self.label_97 = QtWidgets.QLabel(self.tab_export)
self.label_97.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_97.setObjectName("label_97")
self.gridLayout_176.addWidget(self.label_97, 1, 0, 1, 1)
self.export_SCP_pushButton = QtWidgets.QToolButton(self.tab_export)
self.export_SCP_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
icon68 = QtGui.QIcon()
icon68.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_new_file.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.export_SCP_pushButton.setIcon(icon68)
self.export_SCP_pushButton.setIconSize(QtCore.QSize(22, 22))
self.export_SCP_pushButton.setObjectName("export_SCP_pushButton")
self.gridLayout_176.addWidget(self.export_SCP_pushButton, 1, 1, 1, 1)
self.export_CSV_library_toolButton = QtWidgets.QToolButton(self.tab_export)
self.export_CSV_library_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon69 = QtGui.QIcon()
icon69.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_open_dir.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.export_CSV_library_toolButton.setIcon(icon69)
self.export_CSV_library_toolButton.setIconSize(QtCore.QSize(22, 22))
self.export_CSV_library_toolButton.setObjectName("export_CSV_library_toolButton")
self.gridLayout_176.addWidget(self.export_CSV_library_toolButton, 3, 1, 1, 1)
self.label_96 = QtWidgets.QLabel(self.tab_export)
self.label_96.setStyleSheet("background-color : #656565; color : white")
self.label_96.setFrameShape(QtWidgets.QFrame.Panel)
self.label_96.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_96.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_96.setObjectName("label_96")
self.gridLayout_176.addWidget(self.label_96, 0, 0, 1, 2)
self.label_222 = QtWidgets.QLabel(self.tab_export)
self.label_222.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_222.setObjectName("label_222")
self.gridLayout_176.addWidget(self.label_222, 2, 0, 1, 1)
self.label_20 = QtWidgets.QLabel(self.tab_export)
self.label_20.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_20.setObjectName("label_20")
self.gridLayout_176.addWidget(self.label_20, 3, 0, 1, 1)
self.export_SHP_pushButton = QtWidgets.QToolButton(self.tab_export)
self.export_SHP_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.export_SHP_pushButton.setIcon(icon68)
self.export_SHP_pushButton.setIconSize(QtCore.QSize(22, 22))
self.export_SHP_pushButton.setObjectName("export_SHP_pushButton")
self.gridLayout_176.addWidget(self.export_SHP_pushButton, 2, 1, 1, 1)
spacerItem20 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_176.addItem(spacerItem20, 4, 0, 1, 1)
self.gridLayout_142.addLayout(self.gridLayout_176, 0, 0, 1, 1)
self.tabWidget_5.addTab(self.tab_export, "")
self.tab_threshold = QtWidgets.QWidget()
self.tab_threshold.setObjectName("tab_threshold")
self.gridLayout_177 = QtWidgets.QGridLayout(self.tab_threshold)
self.gridLayout_177.setObjectName("gridLayout_177")
self.gridLayout_110 = QtWidgets.QGridLayout()
self.gridLayout_110.setObjectName("gridLayout_110")
self.gridLayout_161 = QtWidgets.QGridLayout()
self.gridLayout_161.setObjectName("gridLayout_161")
self.gridLayout_198 = QtWidgets.QGridLayout()
self.gridLayout_198.setObjectName("gridLayout_198")
spacerItem21 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_198.addItem(spacerItem21, 0, 0, 1, 1)
self.gridLayout_161.addLayout(self.gridLayout_198, 0, 0, 1, 1)
spacerItem22 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_161.addItem(spacerItem22, 3, 0, 1, 1)
self.gridLayout_129 = QtWidgets.QGridLayout()
self.gridLayout_129.setObjectName("gridLayout_129")
self.reset_threshold_pushButton = QtWidgets.QToolButton(self.tab_threshold)
self.reset_threshold_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_threshold_pushButton.setIcon(icon59)
self.reset_threshold_pushButton.setIconSize(QtCore.QSize(22, 22))
self.reset_threshold_pushButton.setObjectName("reset_threshold_pushButton")
self.gridLayout_129.addWidget(self.reset_threshold_pushButton, 0, 0, 1, 1)
self.gridLayout_161.addLayout(self.gridLayout_129, 1, 0, 1, 1)
self.gridLayout_110.addLayout(self.gridLayout_161, 1, 1, 1, 1)
self.signature_threshold_tableWidget = QtWidgets.QTableWidget(self.tab_threshold)
self.signature_threshold_tableWidget.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.signature_threshold_tableWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectItems)
self.signature_threshold_tableWidget.setObjectName("signature_threshold_tableWidget")
self.signature_threshold_tableWidget.setColumnCount(7)
self.signature_threshold_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.signature_threshold_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.signature_threshold_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.signature_threshold_tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.signature_threshold_tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.signature_threshold_tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.signature_threshold_tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.signature_threshold_tableWidget.setHorizontalHeaderItem(6, item)
self.signature_threshold_tableWidget.horizontalHeader().setDefaultSectionSize(50)
self.signature_threshold_tableWidget.horizontalHeader().setStretchLastSection(True)
self.signature_threshold_tableWidget.verticalHeader().setDefaultSectionSize(20)
self.gridLayout_110.addWidget(self.signature_threshold_tableWidget, 1, 0, 1, 1)
self.gridLayout_109 = QtWidgets.QGridLayout()
self.gridLayout_109.setObjectName("gridLayout_109")
self.label_80 = QtWidgets.QLabel(self.tab_threshold)
self.label_80.setStyleSheet("background-color : #656565; color : white")
self.label_80.setFrameShape(QtWidgets.QFrame.Panel)
self.label_80.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_80.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_80.setObjectName("label_80")
self.gridLayout_109.addWidget(self.label_80, 0, 0, 1, 1)
self.gridLayout_110.addLayout(self.gridLayout_109, 0, 0, 1, 2)
self.gridLayout_111 = QtWidgets.QGridLayout()
self.gridLayout_111.setObjectName("gridLayout_111")
self.label_85 = QtWidgets.QLabel(self.tab_threshold)
self.label_85.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_85.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_85.setObjectName("label_85")
self.gridLayout_111.addWidget(self.label_85, 1, 2, 1, 1)
self.multiplicative_threshold_doubleSpinBox = QtWidgets.QDoubleSpinBox(self.tab_threshold)
self.multiplicative_threshold_doubleSpinBox.setDecimals(1)
self.multiplicative_threshold_doubleSpinBox.setMaximum(10000.0)
self.multiplicative_threshold_doubleSpinBox.setProperty("value", 1.0)
self.multiplicative_threshold_doubleSpinBox.setObjectName("multiplicative_threshold_doubleSpinBox")
self.gridLayout_111.addWidget(self.multiplicative_threshold_doubleSpinBox, 1, 3, 1, 1)
self.automatic_threshold_pushButton = QtWidgets.QToolButton(self.tab_threshold)
self.automatic_threshold_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.automatic_threshold_pushButton.setIcon(icon67)
self.automatic_threshold_pushButton.setIconSize(QtCore.QSize(22, 22))
self.automatic_threshold_pushButton.setObjectName("automatic_threshold_pushButton")
self.gridLayout_111.addWidget(self.automatic_threshold_pushButton, 1, 4, 1, 1)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label_132 = QtWidgets.QLabel(self.tab_threshold)
self.label_132.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_132.setObjectName("label_132")
self.horizontalLayout_3.addWidget(self.label_132)
self.threshold_doubleSpinBox = QtWidgets.QDoubleSpinBox(self.tab_threshold)
self.threshold_doubleSpinBox.setDecimals(4)
self.threshold_doubleSpinBox.setMaximum(10000.0)
self.threshold_doubleSpinBox.setProperty("value", 0.0)
self.threshold_doubleSpinBox.setObjectName("threshold_doubleSpinBox")
self.horizontalLayout_3.addWidget(self.threshold_doubleSpinBox)
self.set_threshold_value_pushButton = QtWidgets.QToolButton(self.tab_threshold)
self.set_threshold_value_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.set_threshold_value_pushButton.setIcon(icon67)
self.set_threshold_value_pushButton.setIconSize(QtCore.QSize(22, 22))
self.set_threshold_value_pushButton.setObjectName("set_threshold_value_pushButton")
self.horizontalLayout_3.addWidget(self.set_threshold_value_pushButton)
self.gridLayout_111.addLayout(self.horizontalLayout_3, 1, 0, 1, 1)
spacerItem23 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_111.addItem(spacerItem23, 1, 1, 1, 1)
spacerItem24 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_111.addItem(spacerItem24, 1, 5, 1, 1)
self.label_88 = QtWidgets.QLabel(self.tab_threshold)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_88.sizePolicy().hasHeightForWidth())
self.label_88.setSizePolicy(sizePolicy)
self.label_88.setStyleSheet("background-color : #656565; color : white")
self.label_88.setFrameShape(QtWidgets.QFrame.Panel)
self.label_88.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_88.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_88.setObjectName("label_88")
self.gridLayout_111.addWidget(self.label_88, 0, 0, 1, 6)
self.gridLayout_110.addLayout(self.gridLayout_111, 2, 0, 1, 2)
self.gridLayout_177.addLayout(self.gridLayout_110, 0, 0, 1, 1)
self.tabWidget_5.addTab(self.tab_threshold, "")
self.tab_LCS_threshold = QtWidgets.QWidget()
self.tab_LCS_threshold.setObjectName("tab_LCS_threshold")
self.gridLayout_105 = QtWidgets.QGridLayout(self.tab_LCS_threshold)
self.gridLayout_105.setObjectName("gridLayout_105")
self.gridLayout_137 = QtWidgets.QGridLayout()
self.gridLayout_137.setObjectName("gridLayout_137")
self.LCS_tableWidget = QtWidgets.QTableWidget(self.tab_LCS_threshold)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.LCS_tableWidget.sizePolicy().hasHeightForWidth())
self.LCS_tableWidget.setSizePolicy(sizePolicy)
self.LCS_tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.SelectedClicked)
self.LCS_tableWidget.setAlternatingRowColors(True)
self.LCS_tableWidget.setObjectName("LCS_tableWidget")
self.LCS_tableWidget.setColumnCount(5)
self.LCS_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.LCS_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.LCS_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.LCS_tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.LCS_tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.LCS_tableWidget.setHorizontalHeaderItem(4, item)
self.LCS_tableWidget.horizontalHeader().setDefaultSectionSize(50)
self.LCS_tableWidget.horizontalHeader().setStretchLastSection(True)
self.LCS_tableWidget.verticalHeader().setDefaultSectionSize(20)
self.gridLayout_137.addWidget(self.LCS_tableWidget, 1, 0, 1, 1)
self.gridLayout_138 = QtWidgets.QGridLayout()
self.gridLayout_138.setObjectName("gridLayout_138")
self.label_86 = QtWidgets.QLabel(self.tab_LCS_threshold)
self.label_86.setStyleSheet("background-color : #656565; color : white")
self.label_86.setFrameShape(QtWidgets.QFrame.Panel)
self.label_86.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_86.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_86.setObjectName("label_86")
self.gridLayout_138.addWidget(self.label_86, 0, 0, 1, 1)
self.gridLayout_137.addLayout(self.gridLayout_138, 0, 0, 1, 2)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.signature_spectral_plot_toolButton_2 = QtWidgets.QToolButton(self.tab_LCS_threshold)
self.signature_spectral_plot_toolButton_2.setStyleSheet("margin: 0px;padding: 0px;")
icon70 = QtGui.QIcon()
icon70.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_sign_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.signature_spectral_plot_toolButton_2.setIcon(icon70)
self.signature_spectral_plot_toolButton_2.setIconSize(QtCore.QSize(22, 22))
self.signature_spectral_plot_toolButton_2.setObjectName("signature_spectral_plot_toolButton_2")
self.verticalLayout.addWidget(self.signature_spectral_plot_toolButton_2)
self.gridLayout_137.addLayout(self.verticalLayout, 1, 1, 1, 1)
self.gridLayout_139 = QtWidgets.QGridLayout()
self.gridLayout_139.setObjectName("gridLayout_139")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_102 = QtWidgets.QLabel(self.tab_LCS_threshold)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_102.sizePolicy().hasHeightForWidth())
self.label_102.setSizePolicy(sizePolicy)
self.label_102.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_102.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_102.setObjectName("label_102")
self.horizontalLayout.addWidget(self.label_102)
self.set_min_max_Button = QtWidgets.QToolButton(self.tab_LCS_threshold)
self.set_min_max_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.set_min_max_Button.setIcon(icon67)
self.set_min_max_Button.setIconSize(QtCore.QSize(22, 22))
self.set_min_max_Button.setObjectName("set_min_max_Button")
self.horizontalLayout.addWidget(self.set_min_max_Button)
self.gridLayout_139.addLayout(self.horizontalLayout, 1, 0, 1, 1)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem25 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem25)
self.label_101 = QtWidgets.QLabel(self.tab_LCS_threshold)
self.label_101.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_101.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_101.setObjectName("label_101")
self.horizontalLayout_2.addWidget(self.label_101)
self.multiplicative_threshold_doubleSpinBox_2 = QtWidgets.QDoubleSpinBox(self.tab_LCS_threshold)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.multiplicative_threshold_doubleSpinBox_2.sizePolicy().hasHeightForWidth())
self.multiplicative_threshold_doubleSpinBox_2.setSizePolicy(sizePolicy)
self.multiplicative_threshold_doubleSpinBox_2.setDecimals(1)
self.multiplicative_threshold_doubleSpinBox_2.setMaximum(10000.0)
self.multiplicative_threshold_doubleSpinBox_2.setProperty("value", 1.0)
self.multiplicative_threshold_doubleSpinBox_2.setObjectName("multiplicative_threshold_doubleSpinBox_2")
self.horizontalLayout_2.addWidget(self.multiplicative_threshold_doubleSpinBox_2)
self.automatic_threshold_pushButton_2 = QtWidgets.QToolButton(self.tab_LCS_threshold)
self.automatic_threshold_pushButton_2.setStyleSheet("margin: 0px;padding: 0px;")
self.automatic_threshold_pushButton_2.setIcon(icon67)
self.automatic_threshold_pushButton_2.setIconSize(QtCore.QSize(22, 22))
self.automatic_threshold_pushButton_2.setObjectName("automatic_threshold_pushButton_2")
self.horizontalLayout_2.addWidget(self.automatic_threshold_pushButton_2)
spacerItem26 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem26)
self.gridLayout_140 = QtWidgets.QGridLayout()
self.gridLayout_140.setObjectName("gridLayout_140")
self.label_89 = QtWidgets.QLabel(self.tab_LCS_threshold)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_89.sizePolicy().hasHeightForWidth())
self.label_89.setSizePolicy(sizePolicy)
self.label_89.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_89.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_89.setObjectName("label_89")
self.gridLayout_140.addWidget(self.label_89, 0, 2, 1, 3)
self.LCS_pointerButton = QtWidgets.QToolButton(self.tab_LCS_threshold)
self.LCS_pointerButton.setStyleSheet("margin: 0px;padding: 0px;")
icon71 = QtGui.QIcon()
icon71.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_LCS_threshold_set_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.LCS_pointerButton.setIcon(icon71)
self.LCS_pointerButton.setIconSize(QtCore.QSize(22, 22))
self.LCS_pointerButton.setObjectName("LCS_pointerButton")
self.gridLayout_140.addWidget(self.LCS_pointerButton, 0, 5, 1, 1)
self.gridLayout_144 = QtWidgets.QGridLayout()
self.gridLayout_144.setObjectName("gridLayout_144")
self.LCS_include_checkBox = QtWidgets.QCheckBox(self.tab_LCS_threshold)
self.LCS_include_checkBox.setText("")
self.LCS_include_checkBox.setIcon(icon57)
self.LCS_include_checkBox.setChecked(True)
self.LCS_include_checkBox.setObjectName("LCS_include_checkBox")
self.gridLayout_144.addWidget(self.LCS_include_checkBox, 0, 0, 1, 1)
self.LCS_cut_checkBox = QtWidgets.QCheckBox(self.tab_LCS_threshold)
self.LCS_cut_checkBox.setText("")
icon72 = QtGui.QIcon()
icon72.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_minus.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.LCS_cut_checkBox.setIcon(icon72)
self.LCS_cut_checkBox.setObjectName("LCS_cut_checkBox")
self.gridLayout_144.addWidget(self.LCS_cut_checkBox, 1, 0, 1, 1)
self.gridLayout_140.addLayout(self.gridLayout_144, 0, 6, 1, 1)
self.label_178 = QtWidgets.QLabel(self.tab_LCS_threshold)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_178.sizePolicy().hasHeightForWidth())
self.label_178.setSizePolicy(sizePolicy)
self.label_178.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_178.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_178.setObjectName("label_178")
self.gridLayout_140.addWidget(self.label_178, 0, 0, 1, 1)
self.LCS_ROI_button = QtWidgets.QToolButton(self.tab_LCS_threshold)
self.LCS_ROI_button.setStyleSheet("margin: 0px;padding: 0px;")
icon73 = QtGui.QIcon()
icon73.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_LCS_threshold_ROI_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.LCS_ROI_button.setIcon(icon73)
self.LCS_ROI_button.setIconSize(QtCore.QSize(22, 22))
self.LCS_ROI_button.setObjectName("LCS_ROI_button")
self.gridLayout_140.addWidget(self.LCS_ROI_button, 0, 1, 1, 1)
self.horizontalLayout_2.addLayout(self.gridLayout_140)
self.gridLayout_139.addLayout(self.horizontalLayout_2, 1, 1, 1, 1)
self.label_125 = QtWidgets.QLabel(self.tab_LCS_threshold)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_125.sizePolicy().hasHeightForWidth())
self.label_125.setSizePolicy(sizePolicy)
self.label_125.setStyleSheet("background-color : #656565; color : white")
self.label_125.setFrameShape(QtWidgets.QFrame.Panel)
self.label_125.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_125.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_125.setObjectName("label_125")
self.gridLayout_139.addWidget(self.label_125, 0, 0, 1, 2)
self.gridLayout_137.addLayout(self.gridLayout_139, 2, 0, 1, 2)
self.gridLayout_105.addLayout(self.gridLayout_137, 0, 0, 1, 1)
self.tabWidget_5.addTab(self.tab_LCS_threshold, "")
self.gridLayout_216.addWidget(self.tabWidget_5, 0, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_basic_tools, "")
self.tab_download_products = QtWidgets.QWidget()
self.tab_download_products.setObjectName("tab_download_products")
self.gridLayout_68 = QtWidgets.QGridLayout(self.tab_download_products)
self.gridLayout_68.setObjectName("gridLayout_68")
self.gridLayout_113 = QtWidgets.QGridLayout()
self.gridLayout_113.setObjectName("gridLayout_113")
self.tabWidget_3 = QtWidgets.QTabWidget(self.tab_download_products)
self.tabWidget_3.setStyleSheet("QTabBar::tab {\n"
"padding: 10px;\n"
"min-height: 18px;\n"
"}")
self.tabWidget_3.setTabPosition(QtWidgets.QTabWidget.North)
self.tabWidget_3.setObjectName("tabWidget_3")
self.tab_login = QtWidgets.QWidget()
self.tab_login.setObjectName("tab_login")
self.gridLayout_238 = QtWidgets.QGridLayout(self.tab_login)
self.gridLayout_238.setObjectName("gridLayout_238")
self.gridLayout_227 = QtWidgets.QGridLayout()
self.gridLayout_227.setObjectName("gridLayout_227")
self.remember_user_checkBox_2 = QtWidgets.QCheckBox(self.tab_login)
self.remember_user_checkBox_2.setChecked(True)
self.remember_user_checkBox_2.setObjectName("remember_user_checkBox_2")
self.gridLayout_227.addWidget(self.remember_user_checkBox_2, 1, 4, 1, 1)
self.password_usgs_lineEdit = QtWidgets.QLineEdit(self.tab_login)
self.password_usgs_lineEdit.setObjectName("password_usgs_lineEdit")
self.gridLayout_227.addWidget(self.password_usgs_lineEdit, 1, 3, 1, 1)
self.password_scihub_label_3 = QtWidgets.QLabel(self.tab_login)
self.password_scihub_label_3.setObjectName("password_scihub_label_3")
self.gridLayout_227.addWidget(self.password_scihub_label_3, 1, 2, 1, 1)
self.label_180 = QtWidgets.QLabel(self.tab_login)
self.label_180.setStyleSheet("background-color : #656565; color : white")
self.label_180.setFrameShape(QtWidgets.QFrame.Panel)
self.label_180.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_180.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_180.setOpenExternalLinks(True)
self.label_180.setObjectName("label_180")
self.gridLayout_227.addWidget(self.label_180, 0, 0, 1, 6)
self.user_usgs_lineEdit = QtWidgets.QLineEdit(self.tab_login)
self.user_usgs_lineEdit.setObjectName("user_usgs_lineEdit")
self.gridLayout_227.addWidget(self.user_usgs_lineEdit, 1, 1, 1, 1)
self.user_scihub_label_2 = QtWidgets.QLabel(self.tab_login)
self.user_scihub_label_2.setObjectName("user_scihub_label_2")
self.gridLayout_227.addWidget(self.user_scihub_label_2, 1, 0, 1, 1)
self.gridLayout_238.addLayout(self.gridLayout_227, 0, 0, 1, 1)
self.gridLayout_242 = QtWidgets.QGridLayout()
self.gridLayout_242.setObjectName("gridLayout_242")
self.remember_user_checkBox_3 = QtWidgets.QCheckBox(self.tab_login)
self.remember_user_checkBox_3.setChecked(True)
self.remember_user_checkBox_3.setObjectName("remember_user_checkBox_3")
self.gridLayout_242.addWidget(self.remember_user_checkBox_3, 1, 4, 1, 1)
self.password_usgs_lineEdit_2 = QtWidgets.QLineEdit(self.tab_login)
self.password_usgs_lineEdit_2.setObjectName("password_usgs_lineEdit_2")
self.gridLayout_242.addWidget(self.password_usgs_lineEdit_2, 1, 3, 1, 1)
self.password_scihub_label_4 = QtWidgets.QLabel(self.tab_login)
self.password_scihub_label_4.setObjectName("password_scihub_label_4")
self.gridLayout_242.addWidget(self.password_scihub_label_4, 1, 2, 1, 1)
self.label_191 = QtWidgets.QLabel(self.tab_login)
self.label_191.setStyleSheet("background-color : #656565; color : white")
self.label_191.setFrameShape(QtWidgets.QFrame.Panel)
self.label_191.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_191.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_191.setOpenExternalLinks(True)
self.label_191.setObjectName("label_191")
self.gridLayout_242.addWidget(self.label_191, 0, 0, 1, 6)
self.user_usgs_lineEdit_2 = QtWidgets.QLineEdit(self.tab_login)
self.user_usgs_lineEdit_2.setObjectName("user_usgs_lineEdit_2")
self.gridLayout_242.addWidget(self.user_usgs_lineEdit_2, 1, 1, 1, 1)
self.user_scihub_label_3 = QtWidgets.QLabel(self.tab_login)
self.user_scihub_label_3.setObjectName("user_scihub_label_3")
self.gridLayout_242.addWidget(self.user_scihub_label_3, 1, 0, 1, 1)
self.gridLayout_238.addLayout(self.gridLayout_242, 1, 0, 1, 1)
self.gridLayout_159 = QtWidgets.QGridLayout()
self.gridLayout_159.setObjectName("gridLayout_159")
self.user_scihub_lineEdit = QtWidgets.QLineEdit(self.tab_login)
self.user_scihub_lineEdit.setObjectName("user_scihub_lineEdit")
self.gridLayout_159.addWidget(self.user_scihub_lineEdit, 2, 1, 1, 1)
self.password_scihub_label = QtWidgets.QLabel(self.tab_login)
self.password_scihub_label.setObjectName("password_scihub_label")
self.gridLayout_159.addWidget(self.password_scihub_label, 2, 2, 1, 1)
self.label_147 = QtWidgets.QLabel(self.tab_login)
self.label_147.setStyleSheet("background-color : #656565; color : white")
self.label_147.setFrameShape(QtWidgets.QFrame.Panel)
self.label_147.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_147.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_147.setOpenExternalLinks(True)
self.label_147.setObjectName("label_147")
self.gridLayout_159.addWidget(self.label_147, 0, 0, 1, 6)
self.remember_user_checkBox = QtWidgets.QCheckBox(self.tab_login)
self.remember_user_checkBox.setChecked(True)
self.remember_user_checkBox.setObjectName("remember_user_checkBox")
self.gridLayout_159.addWidget(self.remember_user_checkBox, 2, 4, 1, 1)
self.user_scihub_label = QtWidgets.QLabel(self.tab_login)
self.user_scihub_label.setObjectName("user_scihub_label")
self.gridLayout_159.addWidget(self.user_scihub_label, 2, 0, 1, 1)
self.password_scihub_lineEdit = QtWidgets.QLineEdit(self.tab_login)
self.password_scihub_lineEdit.setObjectName("password_scihub_lineEdit")
self.gridLayout_159.addWidget(self.password_scihub_lineEdit, 2, 3, 1, 1)
self.horizontalLayout_10 = QtWidgets.QHBoxLayout()
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.password_scihub_label_2 = QtWidgets.QLabel(self.tab_login)
self.password_scihub_label_2.setObjectName("password_scihub_label_2")
self.horizontalLayout_10.addWidget(self.password_scihub_label_2)
self.sentinel_service_lineEdit = QtWidgets.QLineEdit(self.tab_login)
self.sentinel_service_lineEdit.setText("")
self.sentinel_service_lineEdit.setObjectName("sentinel_service_lineEdit")
self.horizontalLayout_10.addWidget(self.sentinel_service_lineEdit)
self.reset_sentinel_service_toolButton = QtWidgets.QToolButton(self.tab_login)
self.reset_sentinel_service_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_sentinel_service_toolButton.setIcon(icon59)
self.reset_sentinel_service_toolButton.setIconSize(QtCore.QSize(22, 22))
self.reset_sentinel_service_toolButton.setObjectName("reset_sentinel_service_toolButton")
self.horizontalLayout_10.addWidget(self.reset_sentinel_service_toolButton)
self.gridLayout_159.addLayout(self.horizontalLayout_10, 1, 0, 1, 5)
self.sentinel2_alternative_search_checkBox = QtWidgets.QCheckBox(self.tab_login)
self.sentinel2_alternative_search_checkBox.setObjectName("sentinel2_alternative_search_checkBox")
self.gridLayout_159.addWidget(self.sentinel2_alternative_search_checkBox, 3, 0, 1, 5)
self.gridLayout_238.addLayout(self.gridLayout_159, 2, 0, 1, 1)
spacerItem27 = QtWidgets.QSpacerItem(20, 251, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_238.addItem(spacerItem27, 3, 0, 1, 1)
icon74 = QtGui.QIcon()
icon74.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_download_login.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget_3.addTab(self.tab_login, icon74, "")
self.tab_search = QtWidgets.QWidget()
self.tab_search.setObjectName("tab_search")
self.gridLayout_264 = QtWidgets.QGridLayout(self.tab_search)
self.gridLayout_264.setObjectName("gridLayout_264")
self.gridLayout_search = QtWidgets.QGridLayout()
self.gridLayout_search.setObjectName("gridLayout_search")
self.gridLayout_132 = QtWidgets.QGridLayout()
self.gridLayout_132.setObjectName("gridLayout_132")
self.remove_image_toolButton = QtWidgets.QToolButton(self.tab_search)
self.remove_image_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.remove_image_toolButton.setIcon(icon58)
self.remove_image_toolButton.setIconSize(QtCore.QSize(22, 22))
self.remove_image_toolButton.setObjectName("remove_image_toolButton")
self.gridLayout_132.addWidget(self.remove_image_toolButton, 1, 1, 1, 1)
self.toolButton_display = QtWidgets.QToolButton(self.tab_search)
self.toolButton_display.setStyleSheet("margin: 0px;padding: 0px;")
icon75 = QtGui.QIcon()
icon75.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_download_image_preview.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_display.setIcon(icon75)
self.toolButton_display.setIconSize(QtCore.QSize(22, 22))
self.toolButton_display.setObjectName("toolButton_display")
self.gridLayout_132.addWidget(self.toolButton_display, 0, 1, 1, 1)
self.clear_table_toolButton = QtWidgets.QToolButton(self.tab_search)
self.clear_table_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.clear_table_toolButton.setIcon(icon59)
self.clear_table_toolButton.setIconSize(QtCore.QSize(22, 22))
self.clear_table_toolButton.setObjectName("clear_table_toolButton")
self.gridLayout_132.addWidget(self.clear_table_toolButton, 2, 1, 1, 1)
self.export_table_pushButton = QtWidgets.QToolButton(self.tab_search)
self.export_table_pushButton.setStyleSheet("margin: 0px;padding: 0px")
self.export_table_pushButton.setIcon(icon53)
self.export_table_pushButton.setIconSize(QtCore.QSize(22, 22))
self.export_table_pushButton.setObjectName("export_table_pushButton")
self.gridLayout_132.addWidget(self.export_table_pushButton, 4, 1, 1, 1)
self.import_table_pushButton = QtWidgets.QToolButton(self.tab_search)
self.import_table_pushButton.setStyleSheet("margin: 0px;padding: 0px")
self.import_table_pushButton.setIcon(icon54)
self.import_table_pushButton.setIconSize(QtCore.QSize(22, 22))
self.import_table_pushButton.setObjectName("import_table_pushButton")
self.gridLayout_132.addWidget(self.import_table_pushButton, 3, 1, 1, 1)
self.gridLayout_121 = QtWidgets.QGridLayout()
self.gridLayout_121.setObjectName("gridLayout_121")
self.image_preview_label = QtWidgets.QLabel(self.tab_search)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.image_preview_label.sizePolicy().hasHeightForWidth())
self.image_preview_label.setSizePolicy(sizePolicy)
self.image_preview_label.setMinimumSize(QtCore.QSize(300, 300))
self.image_preview_label.setFrameShape(QtWidgets.QFrame.Panel)
self.image_preview_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.image_preview_label.setAlignment(QtCore.Qt.AlignCenter)
self.image_preview_label.setObjectName("image_preview_label")
self.gridLayout_121.addWidget(self.image_preview_label, 0, 0, 1, 1)
self.gridLayout_132.addLayout(self.gridLayout_121, 0, 0, 5, 1)
self.gridLayout_search.addLayout(self.gridLayout_132, 2, 1, 1, 1)
self.download_images_tableWidget = QtWidgets.QTableWidget(self.tab_search)
self.download_images_tableWidget.setFrameShadow(QtWidgets.QFrame.Sunken)
self.download_images_tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.download_images_tableWidget.setTabKeyNavigation(True)
self.download_images_tableWidget.setAlternatingRowColors(True)
self.download_images_tableWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.download_images_tableWidget.setObjectName("download_images_tableWidget")
self.download_images_tableWidget.setColumnCount(14)
self.download_images_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(12, item)
item = QtWidgets.QTableWidgetItem()
self.download_images_tableWidget.setHorizontalHeaderItem(13, item)
self.download_images_tableWidget.verticalHeader().setDefaultSectionSize(20)
self.gridLayout_search.addWidget(self.download_images_tableWidget, 2, 0, 1, 1)
self.label_100 = QtWidgets.QLabel(self.tab_search)
self.label_100.setStyleSheet("background-color : #656565; color : white")
self.label_100.setFrameShape(QtWidgets.QFrame.Panel)
self.label_100.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_100.setObjectName("label_100")
self.gridLayout_search.addWidget(self.label_100, 0, 0, 1, 1)
self.products_filter_lineEdit = QtWidgets.QLineEdit(self.tab_search)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.products_filter_lineEdit.sizePolicy().hasHeightForWidth())
self.products_filter_lineEdit.setSizePolicy(sizePolicy)
self.products_filter_lineEdit.setObjectName("products_filter_lineEdit")
self.gridLayout_search.addWidget(self.products_filter_lineEdit, 0, 1, 1, 1)
self.gridLayout_264.addLayout(self.gridLayout_search, 1, 0, 1, 1)
self.gridLayout_133 = QtWidgets.QGridLayout()
self.gridLayout_133.setObjectName("gridLayout_133")
self.gridLayout_54 = QtWidgets.QGridLayout()
self.gridLayout_54.setObjectName("gridLayout_54")
self.toolButton_OSM = QtWidgets.QToolButton(self.tab_search)
self.toolButton_OSM.setStyleSheet("margin: 0px;padding: 0px;")
icon76 = QtGui.QIcon()
icon76.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_osm_add.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButton_OSM.setIcon(icon76)
self.toolButton_OSM.setIconSize(QtCore.QSize(22, 22))
self.toolButton_OSM.setObjectName("toolButton_OSM")
self.gridLayout_54.addWidget(self.toolButton_OSM, 0, 0, 1, 1)
self.label_205 = QtWidgets.QLabel(self.tab_search)
self.label_205.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_205.setAlignment(QtCore.Qt.AlignCenter)
self.label_205.setOpenExternalLinks(True)
self.label_205.setObjectName("label_205")
self.gridLayout_54.addWidget(self.label_205, 0, 1, 1, 1)
self.label_206 = QtWidgets.QLabel(self.tab_search)
self.label_206.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_206.setAlignment(QtCore.Qt.AlignCenter)
self.label_206.setOpenExternalLinks(True)
self.label_206.setObjectName("label_206")
self.gridLayout_54.addWidget(self.label_206, 0, 2, 1, 1)
spacerItem28 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_54.addItem(spacerItem28, 0, 3, 1, 2)
self.gridLayout_133.addLayout(self.gridLayout_54, 3, 0, 1, 2)
self.label_103 = QtWidgets.QLabel(self.tab_search)
self.label_103.setStyleSheet("background-color : #656565; color : white")
self.label_103.setFrameShape(QtWidgets.QFrame.Panel)
self.label_103.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_103.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_103.setObjectName("label_103")
self.gridLayout_133.addWidget(self.label_103, 0, 0, 1, 2)
self.gridLayout_122 = QtWidgets.QGridLayout()
self.gridLayout_122.setObjectName("gridLayout_122")
self.selectUL_toolButton_3 = QtWidgets.QToolButton(self.tab_search)
self.selectUL_toolButton_3.setStyleSheet("margin: 0px;padding: 0px;")
icon77 = QtGui.QIcon()
icon77.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_pointer_tool.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.selectUL_toolButton_3.setIcon(icon77)
self.selectUL_toolButton_3.setIconSize(QtCore.QSize(22, 22))
self.selectUL_toolButton_3.setObjectName("selectUL_toolButton_3")
self.gridLayout_122.addWidget(self.selectUL_toolButton_3, 0, 7, 1, 1)
self.LX_lineEdit_3 = QtWidgets.QLineEdit(self.tab_search)
self.LX_lineEdit_3.setText("")
self.LX_lineEdit_3.setMaxLength(15)
self.LX_lineEdit_3.setObjectName("LX_lineEdit_3")
self.gridLayout_122.addWidget(self.LX_lineEdit_3, 0, 4, 1, 1)
self.UX_lineEdit_3 = QtWidgets.QLineEdit(self.tab_search)
self.UX_lineEdit_3.setMaxLength(15)
self.UX_lineEdit_3.setObjectName("UX_lineEdit_3")
self.gridLayout_122.addWidget(self.UX_lineEdit_3, 0, 1, 1, 1)
self.label_105 = QtWidgets.QLabel(self.tab_search)
self.label_105.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_105.setAlignment(QtCore.Qt.AlignCenter)
self.label_105.setObjectName("label_105")
self.gridLayout_122.addWidget(self.label_105, 0, 3, 1, 1)
self.label_107 = QtWidgets.QLabel(self.tab_search)
self.label_107.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_107.setAlignment(QtCore.Qt.AlignCenter)
self.label_107.setObjectName("label_107")
self.gridLayout_122.addWidget(self.label_107, 0, 0, 1, 1)
self.LY_lineEdit_3 = QtWidgets.QLineEdit(self.tab_search)
self.LY_lineEdit_3.setMaxLength(15)
self.LY_lineEdit_3.setObjectName("LY_lineEdit_3")
self.gridLayout_122.addWidget(self.LY_lineEdit_3, 0, 5, 1, 1)
self.UY_lineEdit_3 = QtWidgets.QLineEdit(self.tab_search)
self.UY_lineEdit_3.setMaxLength(15)
self.UY_lineEdit_3.setObjectName("UY_lineEdit_3")
self.gridLayout_122.addWidget(self.UY_lineEdit_3, 0, 2, 1, 1)
self.show_area_radioButton_2 = QtWidgets.QRadioButton(self.tab_search)
self.show_area_radioButton_2.setChecked(True)
self.show_area_radioButton_2.setAutoExclusive(False)
self.show_area_radioButton_2.setObjectName("show_area_radioButton_2")
self.gridLayout_122.addWidget(self.show_area_radioButton_2, 0, 6, 1, 1)
self.gridLayout_115 = QtWidgets.QGridLayout()
self.gridLayout_115.setObjectName("gridLayout_115")
self.find_images_toolButton = QtWidgets.QToolButton(self.tab_search)
self.find_images_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
icon78 = QtGui.QIcon()
icon78.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_search_images.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.find_images_toolButton.setIcon(icon78)
self.find_images_toolButton.setIconSize(QtCore.QSize(22, 22))
self.find_images_toolButton.setObjectName("find_images_toolButton")
self.gridLayout_115.addWidget(self.find_images_toolButton, 1, 8, 1, 1)
self.label_35 = QtWidgets.QLabel(self.tab_search)
self.label_35.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_35.setObjectName("label_35")
self.gridLayout_115.addWidget(self.label_35, 1, 7, 1, 1)
self.gridLayout_114 = QtWidgets.QGridLayout()
self.gridLayout_114.setObjectName("gridLayout_114")
self.landsat_satellite_combo = QtWidgets.QComboBox(self.tab_search)
self.landsat_satellite_combo.setObjectName("landsat_satellite_combo")
self.gridLayout_114.addWidget(self.landsat_satellite_combo, 0, 1, 1, 1)
self.dateEdit_from = QtWidgets.QDateEdit(self.tab_search)
self.dateEdit_from.setDateTime(QtCore.QDateTime(QtCore.QDate(2016, 1, 1), QtCore.QTime(0, 0, 0)))
self.dateEdit_from.setMaximumDate(QtCore.QDate(2045, 12, 31))
self.dateEdit_from.setMinimumDate(QtCore.QDate(1972, 1, 1))
self.dateEdit_from.setCalendarPopup(True)
self.dateEdit_from.setDate(QtCore.QDate(2016, 1, 1))
self.dateEdit_from.setObjectName("dateEdit_from")
self.gridLayout_114.addWidget(self.dateEdit_from, 0, 4, 1, 1)
self.label_110 = QtWidgets.QLabel(self.tab_search)
self.label_110.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_110.setAlignment(QtCore.Qt.AlignCenter)
self.label_110.setObjectName("label_110")
self.gridLayout_114.addWidget(self.label_110, 0, 8, 1, 1)
self.dateEdit_to = QtWidgets.QDateEdit(self.tab_search)
self.dateEdit_to.setMaximumDate(QtCore.QDate(2045, 12, 31))
self.dateEdit_to.setMinimumDate(QtCore.QDate(1980, 1, 2))
self.dateEdit_to.setCalendarPopup(True)
self.dateEdit_to.setDate(QtCore.QDate(2045, 12, 31))
self.dateEdit_to.setObjectName("dateEdit_to")
self.gridLayout_114.addWidget(self.dateEdit_to, 0, 6, 1, 1)
self.label_112 = QtWidgets.QLabel(self.tab_search)
self.label_112.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_112.setAlignment(QtCore.Qt.AlignCenter)
self.label_112.setObjectName("label_112")
self.gridLayout_114.addWidget(self.label_112, 0, 5, 1, 1)
self.label_111 = QtWidgets.QLabel(self.tab_search)
self.label_111.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_111.setAlignment(QtCore.Qt.AlignCenter)
self.label_111.setObjectName("label_111")
self.gridLayout_114.addWidget(self.label_111, 0, 3, 1, 1)
self.cloud_cover_spinBox = QtWidgets.QSpinBox(self.tab_search)
self.cloud_cover_spinBox.setMinimumSize(QtCore.QSize(50, 0))
self.cloud_cover_spinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.cloud_cover_spinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.cloud_cover_spinBox.setMinimum(0)
self.cloud_cover_spinBox.setMaximum(100)
self.cloud_cover_spinBox.setSingleStep(10)
self.cloud_cover_spinBox.setProperty("value", 100)
self.cloud_cover_spinBox.setObjectName("cloud_cover_spinBox")
self.gridLayout_114.addWidget(self.cloud_cover_spinBox, 0, 9, 1, 1)
self.label_114 = QtWidgets.QLabel(self.tab_search)
self.label_114.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_114.setAlignment(QtCore.Qt.AlignCenter)
self.label_114.setObjectName("label_114")
self.gridLayout_114.addWidget(self.label_114, 0, 0, 1, 1)
spacerItem29 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_114.addItem(spacerItem29, 0, 7, 1, 1)
spacerItem30 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_114.addItem(spacerItem30, 0, 2, 1, 1)
self.gridLayout_115.addLayout(self.gridLayout_114, 0, 0, 1, 9)
self.gridLayout_120 = QtWidgets.QGridLayout()
self.gridLayout_120.setObjectName("gridLayout_120")
self.label_194 = QtWidgets.QLabel(self.tab_search)
self.label_194.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_194.setAlignment(QtCore.Qt.AlignCenter)
self.label_194.setObjectName("label_194")
self.gridLayout_120.addWidget(self.label_194, 0, 0, 1, 1)
self.label_113 = QtWidgets.QLabel(self.tab_search)
self.label_113.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_113.setAlignment(QtCore.Qt.AlignCenter)
self.label_113.setObjectName("label_113")
self.gridLayout_120.addWidget(self.label_113, 0, 2, 1, 1)
self.imageID_lineEdit = QtWidgets.QLineEdit(self.tab_search)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.imageID_lineEdit.sizePolicy().hasHeightForWidth())
self.imageID_lineEdit.setSizePolicy(sizePolicy)
self.imageID_lineEdit.setMinimumSize(QtCore.QSize(200, 0))
self.imageID_lineEdit.setMaxLength(10000)
self.imageID_lineEdit.setObjectName("imageID_lineEdit")
self.gridLayout_120.addWidget(self.imageID_lineEdit, 0, 3, 1, 1)
self.result_number_spinBox_2 = QtWidgets.QSpinBox(self.tab_search)
self.result_number_spinBox_2.setMinimumSize(QtCore.QSize(50, 0))
self.result_number_spinBox_2.setMaximumSize(QtCore.QSize(100, 16777215))
self.result_number_spinBox_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.result_number_spinBox_2.setMinimum(5)
self.result_number_spinBox_2.setMaximum(2000)
self.result_number_spinBox_2.setSingleStep(5)
self.result_number_spinBox_2.setProperty("value", 20)
self.result_number_spinBox_2.setObjectName("result_number_spinBox_2")
self.gridLayout_120.addWidget(self.result_number_spinBox_2, 0, 1, 1, 1)
self.gridLayout_115.addLayout(self.gridLayout_120, 1, 0, 1, 7)
self.gridLayout_122.addLayout(self.gridLayout_115, 1, 0, 1, 8)
self.gridLayout_133.addLayout(self.gridLayout_122, 1, 0, 2, 2)
self.gridLayout_264.addLayout(self.gridLayout_133, 0, 0, 1, 1)
icon79 = QtGui.QIcon()
icon79.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_download_search.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget_3.addTab(self.tab_search, icon79, "")
self.tab_options = QtWidgets.QWidget()
self.tab_options.setObjectName("tab_options")
self.gridLayout_199 = QtWidgets.QGridLayout(self.tab_options)
self.gridLayout_199.setObjectName("gridLayout_199")
self.gridLayout_116 = QtWidgets.QGridLayout()
self.gridLayout_116.setObjectName("gridLayout_116")
self.checkBox_band_6 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_6.setChecked(True)
self.checkBox_band_6.setObjectName("checkBox_band_6")
self.gridLayout_116.addWidget(self.checkBox_band_6, 1, 5, 1, 1)
self.checkBox_band_4 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_4.setChecked(True)
self.checkBox_band_4.setObjectName("checkBox_band_4")
self.gridLayout_116.addWidget(self.checkBox_band_4, 1, 3, 1, 1)
self.checkBox_band_1 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_1.setChecked(True)
self.checkBox_band_1.setObjectName("checkBox_band_1")
self.gridLayout_116.addWidget(self.checkBox_band_1, 1, 0, 1, 1)
self.checkBox_band_3 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_3.setChecked(True)
self.checkBox_band_3.setObjectName("checkBox_band_3")
self.gridLayout_116.addWidget(self.checkBox_band_3, 1, 2, 1, 1)
self.checkBox_band_12 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_12.setChecked(True)
self.checkBox_band_12.setObjectName("checkBox_band_12")
self.gridLayout_116.addWidget(self.checkBox_band_12, 2, 5, 1, 1)
self.checkBox_band_2 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_2.setChecked(True)
self.checkBox_band_2.setObjectName("checkBox_band_2")
self.gridLayout_116.addWidget(self.checkBox_band_2, 1, 1, 1, 1)
self.checkBox_band_11 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_11.setChecked(True)
self.checkBox_band_11.setObjectName("checkBox_band_11")
self.gridLayout_116.addWidget(self.checkBox_band_11, 2, 4, 1, 1)
self.checkBox_band_5 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_5.setChecked(True)
self.checkBox_band_5.setObjectName("checkBox_band_5")
self.gridLayout_116.addWidget(self.checkBox_band_5, 1, 4, 1, 1)
self.check_toolButton = QtWidgets.QToolButton(self.tab_options)
self.check_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.check_toolButton.setIcon(icon56)
self.check_toolButton.setIconSize(QtCore.QSize(22, 22))
self.check_toolButton.setObjectName("check_toolButton")
self.gridLayout_116.addWidget(self.check_toolButton, 1, 6, 1, 1)
self.label_108 = QtWidgets.QLabel(self.tab_options)
self.label_108.setStyleSheet("background-color : #656565; color : white")
self.label_108.setFrameShape(QtWidgets.QFrame.Panel)
self.label_108.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_108.setObjectName("label_108")
self.gridLayout_116.addWidget(self.label_108, 0, 0, 1, 7)
self.gridLayout_117 = QtWidgets.QGridLayout()
self.gridLayout_117.setObjectName("gridLayout_117")
spacerItem31 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_117.addItem(spacerItem31, 3, 1, 1, 1)
self.gridLayout_136 = QtWidgets.QGridLayout()
self.gridLayout_136.setObjectName("gridLayout_136")
self.checkBoxs_band_9 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_9.setChecked(True)
self.checkBoxs_band_9.setObjectName("checkBoxs_band_9")
self.gridLayout_136.addWidget(self.checkBoxs_band_9, 1, 8, 1, 1)
self.checkBoxs_band_1 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_1.setChecked(True)
self.checkBoxs_band_1.setObjectName("checkBoxs_band_1")
self.gridLayout_136.addWidget(self.checkBoxs_band_1, 1, 0, 1, 1)
self.check_toolButton_2 = QtWidgets.QToolButton(self.tab_options)
self.check_toolButton_2.setStyleSheet("margin: 0px;padding: 0px;")
self.check_toolButton_2.setIcon(icon56)
self.check_toolButton_2.setIconSize(QtCore.QSize(22, 22))
self.check_toolButton_2.setObjectName("check_toolButton_2")
self.gridLayout_136.addWidget(self.check_toolButton_2, 1, 14, 1, 1)
self.label_118 = QtWidgets.QLabel(self.tab_options)
self.label_118.setStyleSheet("background-color : #656565; color : white")
self.label_118.setFrameShape(QtWidgets.QFrame.Panel)
self.label_118.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_118.setObjectName("label_118")
self.gridLayout_136.addWidget(self.label_118, 0, 0, 1, 15)
self.checkBoxs_band_2 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_2.setChecked(True)
self.checkBoxs_band_2.setObjectName("checkBoxs_band_2")
self.gridLayout_136.addWidget(self.checkBoxs_band_2, 1, 1, 1, 1)
self.checkBoxs_band_3 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_3.setChecked(True)
self.checkBoxs_band_3.setObjectName("checkBoxs_band_3")
self.gridLayout_136.addWidget(self.checkBoxs_band_3, 1, 2, 1, 1)
self.checkBoxs_band_4 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_4.setChecked(True)
self.checkBoxs_band_4.setObjectName("checkBoxs_band_4")
self.gridLayout_136.addWidget(self.checkBoxs_band_4, 1, 3, 1, 1)
self.checkBoxs_band_5 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_5.setChecked(True)
self.checkBoxs_band_5.setObjectName("checkBoxs_band_5")
self.gridLayout_136.addWidget(self.checkBoxs_band_5, 1, 4, 1, 1)
self.checkBoxs_band_6 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_6.setChecked(True)
self.checkBoxs_band_6.setObjectName("checkBoxs_band_6")
self.gridLayout_136.addWidget(self.checkBoxs_band_6, 1, 5, 1, 1)
self.checkBoxs_band_7 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_7.setChecked(True)
self.checkBoxs_band_7.setObjectName("checkBoxs_band_7")
self.gridLayout_136.addWidget(self.checkBoxs_band_7, 1, 6, 1, 1)
self.checkBoxs_band_12 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_12.setChecked(True)
self.checkBoxs_band_12.setObjectName("checkBoxs_band_12")
self.gridLayout_136.addWidget(self.checkBoxs_band_12, 1, 11, 1, 1)
self.checkBoxs_band_8 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_8.setChecked(True)
self.checkBoxs_band_8.setObjectName("checkBoxs_band_8")
self.gridLayout_136.addWidget(self.checkBoxs_band_8, 1, 7, 1, 1)
self.checkBoxs_band_10 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_10.setChecked(True)
self.checkBoxs_band_10.setObjectName("checkBoxs_band_10")
self.gridLayout_136.addWidget(self.checkBoxs_band_10, 1, 9, 1, 1)
self.ancillary_data_checkBox = QtWidgets.QCheckBox(self.tab_options)
self.ancillary_data_checkBox.setChecked(True)
self.ancillary_data_checkBox.setObjectName("ancillary_data_checkBox")
self.gridLayout_136.addWidget(self.ancillary_data_checkBox, 1, 13, 1, 1)
self.checkBoxs_band_13 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_13.setChecked(True)
self.checkBoxs_band_13.setObjectName("checkBoxs_band_13")
self.gridLayout_136.addWidget(self.checkBoxs_band_13, 1, 12, 1, 1)
self.checkBoxs_band_11 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_band_11.setChecked(True)
self.checkBoxs_band_11.setObjectName("checkBoxs_band_11")
self.gridLayout_136.addWidget(self.checkBoxs_band_11, 1, 10, 1, 1)
self.gridLayout_117.addLayout(self.gridLayout_136, 0, 1, 1, 1)
self.gridLayout_160 = QtWidgets.QGridLayout()
self.gridLayout_160.setObjectName("gridLayout_160")
self.checkBoxs3_band_6 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_6.setChecked(True)
self.checkBoxs3_band_6.setObjectName("checkBoxs3_band_6")
self.gridLayout_160.addWidget(self.checkBoxs3_band_6, 1, 5, 1, 1)
self.checkBoxs3_band_2 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_2.setChecked(True)
self.checkBoxs3_band_2.setObjectName("checkBoxs3_band_2")
self.gridLayout_160.addWidget(self.checkBoxs3_band_2, 1, 1, 1, 1)
self.checkBoxs3_band_5 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_5.setChecked(True)
self.checkBoxs3_band_5.setObjectName("checkBoxs3_band_5")
self.gridLayout_160.addWidget(self.checkBoxs3_band_5, 1, 4, 1, 1)
self.checkBoxs3_band_8 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_8.setChecked(True)
self.checkBoxs3_band_8.setObjectName("checkBoxs3_band_8")
self.gridLayout_160.addWidget(self.checkBoxs3_band_8, 1, 7, 1, 1)
self.checkBoxs3_band_1 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_1.setChecked(True)
self.checkBoxs3_band_1.setObjectName("checkBoxs3_band_1")
self.gridLayout_160.addWidget(self.checkBoxs3_band_1, 1, 0, 1, 1)
self.checkBoxs3_band_16 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_16.setChecked(True)
self.checkBoxs3_band_16.setObjectName("checkBoxs3_band_16")
self.gridLayout_160.addWidget(self.checkBoxs3_band_16, 2, 4, 1, 1)
self.checkBoxs3_band_10 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_10.setChecked(True)
self.checkBoxs3_band_10.setObjectName("checkBoxs3_band_10")
self.gridLayout_160.addWidget(self.checkBoxs3_band_10, 1, 9, 1, 1)
self.checkBoxs3_band_12 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_12.setChecked(True)
self.checkBoxs3_band_12.setObjectName("checkBoxs3_band_12")
self.gridLayout_160.addWidget(self.checkBoxs3_band_12, 2, 0, 1, 1)
self.s3_ancillary_data_checkBox = QtWidgets.QCheckBox(self.tab_options)
self.s3_ancillary_data_checkBox.setChecked(True)
self.s3_ancillary_data_checkBox.setObjectName("s3_ancillary_data_checkBox")
self.gridLayout_160.addWidget(self.s3_ancillary_data_checkBox, 2, 10, 1, 1)
self.checkBoxs3_band_3 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_3.setChecked(True)
self.checkBoxs3_band_3.setObjectName("checkBoxs3_band_3")
self.gridLayout_160.addWidget(self.checkBoxs3_band_3, 1, 2, 1, 1)
self.label_127 = QtWidgets.QLabel(self.tab_options)
self.label_127.setStyleSheet("background-color : #656565; color : white")
self.label_127.setFrameShape(QtWidgets.QFrame.Panel)
self.label_127.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_127.setObjectName("label_127")
self.gridLayout_160.addWidget(self.label_127, 0, 0, 1, 12)
self.checkBoxs3_band_20 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_20.setChecked(True)
self.checkBoxs3_band_20.setObjectName("checkBoxs3_band_20")
self.gridLayout_160.addWidget(self.checkBoxs3_band_20, 2, 8, 1, 1)
self.checkBoxs3_band_17 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_17.setChecked(True)
self.checkBoxs3_band_17.setObjectName("checkBoxs3_band_17")
self.gridLayout_160.addWidget(self.checkBoxs3_band_17, 2, 5, 1, 1)
self.checkBoxs3_band_14 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_14.setChecked(True)
self.checkBoxs3_band_14.setObjectName("checkBoxs3_band_14")
self.gridLayout_160.addWidget(self.checkBoxs3_band_14, 2, 2, 1, 1)
self.checkBoxs3_band_9 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_9.setChecked(True)
self.checkBoxs3_band_9.setObjectName("checkBoxs3_band_9")
self.gridLayout_160.addWidget(self.checkBoxs3_band_9, 1, 8, 1, 1)
self.checkBoxs3_band_13 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_13.setChecked(True)
self.checkBoxs3_band_13.setObjectName("checkBoxs3_band_13")
self.gridLayout_160.addWidget(self.checkBoxs3_band_13, 2, 1, 1, 1)
self.checkBoxs3_band_19 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_19.setChecked(True)
self.checkBoxs3_band_19.setObjectName("checkBoxs3_band_19")
self.gridLayout_160.addWidget(self.checkBoxs3_band_19, 2, 7, 1, 1)
self.check_toolButton_3 = QtWidgets.QToolButton(self.tab_options)
self.check_toolButton_3.setStyleSheet("margin: 0px;padding: 0px;")
self.check_toolButton_3.setIcon(icon56)
self.check_toolButton_3.setIconSize(QtCore.QSize(22, 22))
self.check_toolButton_3.setObjectName("check_toolButton_3")
self.gridLayout_160.addWidget(self.check_toolButton_3, 1, 11, 1, 1)
self.checkBoxs3_band_7 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_7.setChecked(True)
self.checkBoxs3_band_7.setObjectName("checkBoxs3_band_7")
self.gridLayout_160.addWidget(self.checkBoxs3_band_7, 1, 6, 1, 1)
self.checkBoxs3_band_4 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_4.setChecked(True)
self.checkBoxs3_band_4.setObjectName("checkBoxs3_band_4")
self.gridLayout_160.addWidget(self.checkBoxs3_band_4, 1, 3, 1, 1)
self.checkBoxs3_band_11 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_11.setChecked(True)
self.checkBoxs3_band_11.setObjectName("checkBoxs3_band_11")
self.gridLayout_160.addWidget(self.checkBoxs3_band_11, 1, 10, 1, 1)
self.checkBoxs3_band_15 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_15.setChecked(True)
self.checkBoxs3_band_15.setObjectName("checkBoxs3_band_15")
self.gridLayout_160.addWidget(self.checkBoxs3_band_15, 2, 3, 1, 1)
self.checkBoxs3_band_21 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_21.setChecked(True)
self.checkBoxs3_band_21.setObjectName("checkBoxs3_band_21")
self.gridLayout_160.addWidget(self.checkBoxs3_band_21, 2, 9, 1, 1)
self.checkBoxs3_band_18 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs3_band_18.setChecked(True)
self.checkBoxs3_band_18.setObjectName("checkBoxs3_band_18")
self.gridLayout_160.addWidget(self.checkBoxs3_band_18, 2, 6, 1, 1)
self.gridLayout_117.addLayout(self.gridLayout_160, 1, 1, 1, 1)
self.gridLayout_141 = QtWidgets.QGridLayout()
self.gridLayout_141.setObjectName("gridLayout_141")
self.checkBoxs_goes_band_1 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_goes_band_1.setChecked(True)
self.checkBoxs_goes_band_1.setObjectName("checkBoxs_goes_band_1")
self.gridLayout_141.addWidget(self.checkBoxs_goes_band_1, 1, 0, 1, 1)
self.label_272 = QtWidgets.QLabel(self.tab_options)
self.label_272.setStyleSheet("background-color : #656565; color : white")
self.label_272.setFrameShape(QtWidgets.QFrame.Panel)
self.label_272.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_272.setObjectName("label_272")
self.gridLayout_141.addWidget(self.label_272, 0, 0, 1, 8)
self.checkBoxs_goes_band_5 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_goes_band_5.setChecked(True)
self.checkBoxs_goes_band_5.setObjectName("checkBoxs_goes_band_5")
self.gridLayout_141.addWidget(self.checkBoxs_goes_band_5, 1, 4, 1, 1)
self.checkBoxs_goes_band_3 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_goes_band_3.setChecked(True)
self.checkBoxs_goes_band_3.setObjectName("checkBoxs_goes_band_3")
self.gridLayout_141.addWidget(self.checkBoxs_goes_band_3, 1, 2, 1, 1)
self.checkBoxs_goes_band_4 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_goes_band_4.setChecked(True)
self.checkBoxs_goes_band_4.setObjectName("checkBoxs_goes_band_4")
self.gridLayout_141.addWidget(self.checkBoxs_goes_band_4, 1, 3, 1, 1)
self.checkBoxs_goes_band_2 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_goes_band_2.setChecked(True)
self.checkBoxs_goes_band_2.setObjectName("checkBoxs_goes_band_2")
self.gridLayout_141.addWidget(self.checkBoxs_goes_band_2, 1, 1, 1, 1)
self.checkBoxs_goes_band_6 = QtWidgets.QCheckBox(self.tab_options)
self.checkBoxs_goes_band_6.setChecked(True)
self.checkBoxs_goes_band_6.setObjectName("checkBoxs_goes_band_6")
self.gridLayout_141.addWidget(self.checkBoxs_goes_band_6, 1, 5, 1, 1)
self.check_toolButton_4 = QtWidgets.QToolButton(self.tab_options)
self.check_toolButton_4.setStyleSheet("margin: 0px;padding: 0px;")
self.check_toolButton_4.setIcon(icon56)
self.check_toolButton_4.setIconSize(QtCore.QSize(22, 22))
self.check_toolButton_4.setObjectName("check_toolButton_4")
self.gridLayout_141.addWidget(self.check_toolButton_4, 1, 7, 1, 1)
spacerItem32 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_141.addItem(spacerItem32, 1, 6, 1, 1)
self.gridLayout_117.addLayout(self.gridLayout_141, 2, 1, 1, 1)
self.gridLayout_116.addLayout(self.gridLayout_117, 3, 0, 1, 7)
self.checkBox_band_8 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_8.setChecked(True)
self.checkBox_band_8.setObjectName("checkBox_band_8")
self.gridLayout_116.addWidget(self.checkBox_band_8, 2, 1, 1, 1)
self.checkBox_band_10 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_10.setChecked(True)
self.checkBox_band_10.setObjectName("checkBox_band_10")
self.gridLayout_116.addWidget(self.checkBox_band_10, 2, 3, 1, 1)
self.checkBox_band_9 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_9.setChecked(True)
self.checkBox_band_9.setObjectName("checkBox_band_9")
self.gridLayout_116.addWidget(self.checkBox_band_9, 2, 2, 1, 1)
self.checkBox_band_7 = QtWidgets.QCheckBox(self.tab_options)
self.checkBox_band_7.setChecked(True)
self.checkBox_band_7.setObjectName("checkBox_band_7")
self.gridLayout_116.addWidget(self.checkBox_band_7, 2, 0, 1, 1)
self.gridLayout_199.addLayout(self.gridLayout_116, 0, 0, 1, 1)
icon80 = QtGui.QIcon()
icon80.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_download_options.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget_3.addTab(self.tab_options, icon80, "")
self.gridLayout_113.addWidget(self.tabWidget_3, 0, 1, 1, 1)
self.gridLayout_68.addLayout(self.gridLayout_113, 0, 0, 1, 1)
self.gridLayout_320 = QtWidgets.QGridLayout()
self.gridLayout_320.setObjectName("gridLayout_320")
self.label_258 = QtWidgets.QLabel(self.tab_download_products)
self.label_258.setStyleSheet("background-color : #656565; color : white")
self.label_258.setFrameShape(QtWidgets.QFrame.Panel)
self.label_258.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_258.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_258.setOpenExternalLinks(True)
self.label_258.setObjectName("label_258")
self.gridLayout_320.addWidget(self.label_258, 0, 0, 1, 8)
self.preprocess_checkBox = QtWidgets.QCheckBox(self.tab_download_products)
self.preprocess_checkBox.setChecked(True)
self.preprocess_checkBox.setObjectName("preprocess_checkBox")
self.gridLayout_320.addWidget(self.preprocess_checkBox, 1, 2, 1, 1)
self.load_in_QGIS_checkBox = QtWidgets.QCheckBox(self.tab_download_products)
self.load_in_QGIS_checkBox.setChecked(True)
self.load_in_QGIS_checkBox.setObjectName("load_in_QGIS_checkBox")
self.gridLayout_320.addWidget(self.load_in_QGIS_checkBox, 1, 3, 1, 1)
self.download_if_preview_in_legend_checkBox = QtWidgets.QCheckBox(self.tab_download_products)
self.download_if_preview_in_legend_checkBox.setChecked(True)
self.download_if_preview_in_legend_checkBox.setObjectName("download_if_preview_in_legend_checkBox")
self.gridLayout_320.addWidget(self.download_if_preview_in_legend_checkBox, 1, 1, 1, 1)
spacerItem33 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_320.addItem(spacerItem33, 1, 5, 1, 1)
self.export_links_Button = QtWidgets.QToolButton(self.tab_download_products)
self.export_links_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.export_links_Button.setIcon(icon53)
self.export_links_Button.setIconSize(QtCore.QSize(22, 22))
self.export_links_Button.setObjectName("export_links_Button")
self.gridLayout_320.addWidget(self.export_links_Button, 1, 6, 1, 1)
self.download_images_Button = QtWidgets.QToolButton(self.tab_download_products)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.download_images_Button.setFont(font)
self.download_images_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.download_images_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.download_images_Button.setIcon(icon64)
self.download_images_Button.setIconSize(QtCore.QSize(34, 34))
self.download_images_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.download_images_Button.setObjectName("download_images_Button")
self.gridLayout_320.addWidget(self.download_images_Button, 1, 7, 1, 1)
self.virtual_download_checkBox = QtWidgets.QCheckBox(self.tab_download_products)
self.virtual_download_checkBox.setObjectName("virtual_download_checkBox")
self.gridLayout_320.addWidget(self.virtual_download_checkBox, 1, 4, 1, 1)
self.gridLayout_68.addLayout(self.gridLayout_320, 1, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_download_products, "")
self.tab_preprocessing = QtWidgets.QWidget()
self.tab_preprocessing.setStyleSheet("")
self.tab_preprocessing.setObjectName("tab_preprocessing")
self.gridLayout_6 = QtWidgets.QGridLayout(self.tab_preprocessing)
self.gridLayout_6.setObjectName("gridLayout_6")
self.tabWidget_preprocessing = QtWidgets.QTabWidget(self.tab_preprocessing)
self.tabWidget_preprocessing.setStyleSheet("")
self.tabWidget_preprocessing.setIconSize(QtCore.QSize(20, 20))
self.tabWidget_preprocessing.setDocumentMode(True)
self.tabWidget_preprocessing.setObjectName("tabWidget_preprocessing")
self.tab_Landsat = QtWidgets.QWidget()
self.tab_Landsat.setObjectName("tab_Landsat")
self.gridLayout_18 = QtWidgets.QGridLayout(self.tab_Landsat)
self.gridLayout_18.setObjectName("gridLayout_18")
self.gridLayout_37 = QtWidgets.QGridLayout()
self.gridLayout_37.setObjectName("gridLayout_37")
self.label_36 = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_36.sizePolicy().hasHeightForWidth())
self.label_36.setSizePolicy(sizePolicy)
self.label_36.setMinimumSize(QtCore.QSize(229, 0))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_36.setFont(font)
self.label_36.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_36.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_36.setObjectName("label_36")
self.gridLayout_37.addWidget(self.label_36, 1, 0, 1, 1)
self.label_37 = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_37.sizePolicy().hasHeightForWidth())
self.label_37.setSizePolicy(sizePolicy)
self.label_37.setStyleSheet("background-color : #656565; color : white")
self.label_37.setFrameShape(QtWidgets.QFrame.Panel)
self.label_37.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_37.setObjectName("label_37")
self.gridLayout_37.addWidget(self.label_37, 0, 0, 1, 4)
self.celsius_checkBox = QtWidgets.QCheckBox(self.tab_Landsat)
self.celsius_checkBox.setChecked(False)
self.celsius_checkBox.setTristate(False)
self.celsius_checkBox.setObjectName("celsius_checkBox")
self.gridLayout_37.addWidget(self.celsius_checkBox, 3, 0, 1, 1)
self.DOS1_checkBox = QtWidgets.QCheckBox(self.tab_Landsat)
self.DOS1_checkBox.setChecked(False)
self.DOS1_checkBox.setTristate(False)
self.DOS1_checkBox.setObjectName("DOS1_checkBox")
self.gridLayout_37.addWidget(self.DOS1_checkBox, 4, 0, 1, 1)
self.gridLayout_29 = QtWidgets.QGridLayout()
self.gridLayout_29.setObjectName("gridLayout_29")
self.nodata_spinBox_3 = QtWidgets.QSpinBox(self.tab_Landsat)
self.nodata_spinBox_3.setMinimum(-999)
self.nodata_spinBox_3.setMaximum(100000)
self.nodata_spinBox_3.setObjectName("nodata_spinBox_3")
self.gridLayout_29.addWidget(self.nodata_spinBox_3, 0, 2, 1, 1)
self.nodata_checkBox_2 = QtWidgets.QCheckBox(self.tab_Landsat)
self.nodata_checkBox_2.setChecked(True)
self.nodata_checkBox_2.setObjectName("nodata_checkBox_2")
self.gridLayout_29.addWidget(self.nodata_checkBox_2, 0, 1, 1, 1)
spacerItem34 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_29.addItem(spacerItem34, 0, 0, 1, 1)
self.gridLayout_37.addLayout(self.gridLayout_29, 4, 1, 1, 3)
self.label_41 = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_41.sizePolicy().hasHeightForWidth())
self.label_41.setSizePolicy(sizePolicy)
self.label_41.setMinimumSize(QtCore.QSize(229, 0))
self.label_41.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_41.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_41.setObjectName("label_41")
self.gridLayout_37.addWidget(self.label_41, 2, 0, 1, 1)
self.toolButton_directoryInput = QtWidgets.QToolButton(self.tab_Landsat)
self.toolButton_directoryInput.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_directoryInput.setIcon(icon69)
self.toolButton_directoryInput.setIconSize(QtCore.QSize(22, 22))
self.toolButton_directoryInput.setObjectName("toolButton_directoryInput")
self.gridLayout_37.addWidget(self.toolButton_directoryInput, 1, 3, 1, 1)
self.label_26 = QtWidgets.QLabel(self.tab_Landsat)
self.label_26.setFrameShape(QtWidgets.QFrame.Panel)
self.label_26.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_26.setText("")
self.label_26.setObjectName("label_26")
self.gridLayout_37.addWidget(self.label_26, 1, 1, 1, 2)
self.label_27 = QtWidgets.QLabel(self.tab_Landsat)
self.label_27.setFrameShape(QtWidgets.QFrame.Panel)
self.label_27.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_27.setText("")
self.label_27.setObjectName("label_27")
self.gridLayout_37.addWidget(self.label_27, 2, 1, 1, 2)
self.gridLayout_183 = QtWidgets.QGridLayout()
self.gridLayout_183.setObjectName("gridLayout_183")
self.pansharpening_checkBox = QtWidgets.QCheckBox(self.tab_Landsat)
self.pansharpening_checkBox.setChecked(False)
self.pansharpening_checkBox.setTristate(False)
self.pansharpening_checkBox.setObjectName("pansharpening_checkBox")
self.gridLayout_183.addWidget(self.pansharpening_checkBox, 0, 0, 1, 1)
self.create_bandset_checkBox = QtWidgets.QCheckBox(self.tab_Landsat)
self.create_bandset_checkBox.setChecked(True)
self.create_bandset_checkBox.setTristate(False)
self.create_bandset_checkBox.setObjectName("create_bandset_checkBox")
self.gridLayout_183.addWidget(self.create_bandset_checkBox, 1, 0, 1, 1)
self.add_new_bandset_checkBox_1 = QtWidgets.QCheckBox(self.tab_Landsat)
self.add_new_bandset_checkBox_1.setChecked(True)
self.add_new_bandset_checkBox_1.setTristate(False)
self.add_new_bandset_checkBox_1.setObjectName("add_new_bandset_checkBox_1")
self.gridLayout_183.addWidget(self.add_new_bandset_checkBox_1, 1, 1, 1, 1)
spacerItem35 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_183.addItem(spacerItem35, 1, 2, 1, 1)
self.gridLayout_37.addLayout(self.gridLayout_183, 5, 0, 1, 4)
self.toolButton_directoryInput_MTL = QtWidgets.QToolButton(self.tab_Landsat)
self.toolButton_directoryInput_MTL.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_directoryInput_MTL.setIcon(icon65)
self.toolButton_directoryInput_MTL.setIconSize(QtCore.QSize(22, 22))
self.toolButton_directoryInput_MTL.setObjectName("toolButton_directoryInput_MTL")
self.gridLayout_37.addWidget(self.toolButton_directoryInput_MTL, 2, 3, 1, 1)
self.gridLayout_18.addLayout(self.gridLayout_37, 0, 0, 1, 1)
self.gridLayout_95 = QtWidgets.QGridLayout()
self.gridLayout_95.setObjectName("gridLayout_95")
self.landsat_tableWidget = QtWidgets.QTableWidget(self.tab_Landsat)
self.landsat_tableWidget.setObjectName("landsat_tableWidget")
self.landsat_tableWidget.setColumnCount(13)
self.landsat_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.landsat_tableWidget.setHorizontalHeaderItem(12, item)
self.landsat_tableWidget.horizontalHeader().setDefaultSectionSize(155)
self.gridLayout_95.addWidget(self.landsat_tableWidget, 1, 0, 1, 1)
self.gridLayout_15 = QtWidgets.QGridLayout()
self.gridLayout_15.setObjectName("gridLayout_15")
self.pushButton_remove_band = QtWidgets.QToolButton(self.tab_Landsat)
self.pushButton_remove_band.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_remove_band.setIcon(icon58)
self.pushButton_remove_band.setIconSize(QtCore.QSize(22, 22))
self.pushButton_remove_band.setObjectName("pushButton_remove_band")
self.gridLayout_15.addWidget(self.pushButton_remove_band, 0, 0, 1, 1)
self.gridLayout_95.addLayout(self.gridLayout_15, 1, 1, 1, 1)
self.gridLayout_98 = QtWidgets.QGridLayout()
self.gridLayout_98.setObjectName("gridLayout_98")
self.satellite_label = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label.sizePolicy().hasHeightForWidth())
self.satellite_label.setSizePolicy(sizePolicy)
self.satellite_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label.setObjectName("satellite_label")
self.gridLayout_98.addWidget(self.satellite_label, 1, 0, 1, 1)
self.satellite_label_3 = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_3.sizePolicy().hasHeightForWidth())
self.satellite_label_3.setSizePolicy(sizePolicy)
self.satellite_label_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_3.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_3.setObjectName("satellite_label_3")
self.gridLayout_98.addWidget(self.satellite_label_3, 1, 4, 1, 1)
self.date_lineEdit = QtWidgets.QLineEdit(self.tab_Landsat)
self.date_lineEdit.setObjectName("date_lineEdit")
self.gridLayout_98.addWidget(self.date_lineEdit, 1, 3, 1, 1)
self.satellite_label_2 = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_2.sizePolicy().hasHeightForWidth())
self.satellite_label_2.setSizePolicy(sizePolicy)
self.satellite_label_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_2.setObjectName("satellite_label_2")
self.gridLayout_98.addWidget(self.satellite_label_2, 1, 2, 1, 1)
self.satellite_label_4 = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_4.sizePolicy().hasHeightForWidth())
self.satellite_label_4.setSizePolicy(sizePolicy)
self.satellite_label_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_4.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_4.setObjectName("satellite_label_4")
self.gridLayout_98.addWidget(self.satellite_label_4, 1, 6, 1, 1)
self.sun_elev_lineEdit = QtWidgets.QLineEdit(self.tab_Landsat)
self.sun_elev_lineEdit.setObjectName("sun_elev_lineEdit")
self.gridLayout_98.addWidget(self.sun_elev_lineEdit, 1, 5, 1, 1)
self.earth_sun_dist_lineEdit = QtWidgets.QLineEdit(self.tab_Landsat)
self.earth_sun_dist_lineEdit.setObjectName("earth_sun_dist_lineEdit")
self.gridLayout_98.addWidget(self.earth_sun_dist_lineEdit, 1, 7, 1, 1)
self.label_74 = QtWidgets.QLabel(self.tab_Landsat)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_74.sizePolicy().hasHeightForWidth())
self.label_74.setSizePolicy(sizePolicy)
self.label_74.setStyleSheet("background-color : #656565; color : white")
self.label_74.setFrameShape(QtWidgets.QFrame.Panel)
self.label_74.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_74.setObjectName("label_74")
self.gridLayout_98.addWidget(self.label_74, 0, 0, 1, 9)
self.satellite_lineEdit = QtWidgets.QLineEdit(self.tab_Landsat)
self.satellite_lineEdit.setObjectName("satellite_lineEdit")
self.gridLayout_98.addWidget(self.satellite_lineEdit, 1, 1, 1, 1)
self.gridLayout_95.addLayout(self.gridLayout_98, 0, 0, 1, 2)
self.gridLayout_18.addLayout(self.gridLayout_95, 1, 0, 1, 1)
self.gridLayout_97 = QtWidgets.QGridLayout()
self.gridLayout_97.setObjectName("gridLayout_97")
spacerItem36 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_97.addItem(spacerItem36, 1, 1, 1, 1)
self.label_161 = QtWidgets.QLabel(self.tab_Landsat)
self.label_161.setStyleSheet("background-color : #656565; color : white")
self.label_161.setFrameShape(QtWidgets.QFrame.Panel)
self.label_161.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_161.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_161.setObjectName("label_161")
self.gridLayout_97.addWidget(self.label_161, 0, 1, 1, 3)
self.pushButton_Conversion = QtWidgets.QToolButton(self.tab_Landsat)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pushButton_Conversion.setFont(font)
self.pushButton_Conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pushButton_Conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_Conversion.setIcon(icon64)
self.pushButton_Conversion.setIconSize(QtCore.QSize(34, 34))
self.pushButton_Conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pushButton_Conversion.setObjectName("pushButton_Conversion")
self.gridLayout_97.addWidget(self.pushButton_Conversion, 1, 3, 1, 1)
self.landsat_conversion = QtWidgets.QToolButton(self.tab_Landsat)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.landsat_conversion.setFont(font)
self.landsat_conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.landsat_conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.landsat_conversion.setIcon(icon48)
self.landsat_conversion.setIconSize(QtCore.QSize(34, 34))
self.landsat_conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.landsat_conversion.setObjectName("landsat_conversion")
self.gridLayout_97.addWidget(self.landsat_conversion, 1, 2, 1, 1)
self.gridLayout_18.addLayout(self.gridLayout_97, 2, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_Landsat, "")
self.tab_Sentinel1 = QtWidgets.QWidget()
self.tab_Sentinel1.setObjectName("tab_Sentinel1")
self.gridLayout_167 = QtWidgets.QGridLayout(self.tab_Sentinel1)
self.gridLayout_167.setObjectName("gridLayout_167")
self.gridLayout_265 = QtWidgets.QGridLayout()
self.gridLayout_265.setObjectName("gridLayout_265")
self.gridLayout_279 = QtWidgets.QGridLayout()
self.gridLayout_279.setObjectName("gridLayout_279")
self.S1_label_95 = QtWidgets.QLabel(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S1_label_95.sizePolicy().hasHeightForWidth())
self.S1_label_95.setSizePolicy(sizePolicy)
self.S1_label_95.setMinimumSize(QtCore.QSize(229, 0))
self.S1_label_95.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S1_label_95.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.S1_label_95.setObjectName("S1_label_95")
self.gridLayout_279.addWidget(self.S1_label_95, 0, 0, 1, 1)
self.S1_label_96 = QtWidgets.QLabel(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S1_label_96.sizePolicy().hasHeightForWidth())
self.S1_label_96.setSizePolicy(sizePolicy)
self.S1_label_96.setFrameShape(QtWidgets.QFrame.Panel)
self.S1_label_96.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S1_label_96.setText("")
self.S1_label_96.setObjectName("S1_label_96")
self.gridLayout_279.addWidget(self.S1_label_96, 0, 1, 1, 1)
self.S1_toolButton_directoryInput_xml = QtWidgets.QToolButton(self.tab_Sentinel1)
self.S1_toolButton_directoryInput_xml.setStyleSheet("margin: 0px;padding: 0px;")
self.S1_toolButton_directoryInput_xml.setIcon(icon65)
self.S1_toolButton_directoryInput_xml.setIconSize(QtCore.QSize(22, 22))
self.S1_toolButton_directoryInput_xml.setObjectName("S1_toolButton_directoryInput_xml")
self.gridLayout_279.addWidget(self.S1_toolButton_directoryInput_xml, 0, 2, 1, 1)
self.horizontalLayout_48 = QtWidgets.QHBoxLayout()
self.horizontalLayout_48.setObjectName("horizontalLayout_48")
self.S1_label_97 = QtWidgets.QLabel(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S1_label_97.sizePolicy().hasHeightForWidth())
self.S1_label_97.setSizePolicy(sizePolicy)
self.S1_label_97.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S1_label_97.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.S1_label_97.setObjectName("S1_label_97")
self.horizontalLayout_48.addWidget(self.S1_label_97)
self.VH_checkBox_S1 = QtWidgets.QCheckBox(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.VH_checkBox_S1.sizePolicy().hasHeightForWidth())
self.VH_checkBox_S1.setSizePolicy(sizePolicy)
self.VH_checkBox_S1.setChecked(True)
self.VH_checkBox_S1.setTristate(False)
self.VH_checkBox_S1.setObjectName("VH_checkBox_S1")
self.horizontalLayout_48.addWidget(self.VH_checkBox_S1)
self.VV_checkBox_S1 = QtWidgets.QCheckBox(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.VV_checkBox_S1.sizePolicy().hasHeightForWidth())
self.VV_checkBox_S1.setSizePolicy(sizePolicy)
self.VV_checkBox_S1.setChecked(True)
self.VV_checkBox_S1.setTristate(False)
self.VV_checkBox_S1.setObjectName("VV_checkBox_S1")
self.horizontalLayout_48.addWidget(self.VV_checkBox_S1)
self.gridLayout_279.addLayout(self.horizontalLayout_48, 1, 0, 1, 1)
self.gridLayout_265.addLayout(self.gridLayout_279, 3, 0, 1, 3)
self.label_209 = QtWidgets.QLabel(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_209.sizePolicy().hasHeightForWidth())
self.label_209.setSizePolicy(sizePolicy)
self.label_209.setStyleSheet("background-color : #656565; color : white")
self.label_209.setFrameShape(QtWidgets.QFrame.Panel)
self.label_209.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_209.setObjectName("label_209")
self.gridLayout_265.addWidget(self.label_209, 0, 0, 1, 3)
self.S1_create_bandset_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel1)
self.S1_create_bandset_checkBox.setChecked(True)
self.S1_create_bandset_checkBox.setTristate(False)
self.S1_create_bandset_checkBox.setObjectName("S1_create_bandset_checkBox")
self.gridLayout_265.addWidget(self.S1_create_bandset_checkBox, 5, 0, 1, 1)
self.add_new_bandset_checkBox_6 = QtWidgets.QCheckBox(self.tab_Sentinel1)
self.add_new_bandset_checkBox_6.setChecked(True)
self.add_new_bandset_checkBox_6.setTristate(False)
self.add_new_bandset_checkBox_6.setObjectName("add_new_bandset_checkBox_6")
self.gridLayout_265.addWidget(self.add_new_bandset_checkBox_6, 5, 1, 1, 2)
self.gridLayout_266 = QtWidgets.QGridLayout()
self.gridLayout_266.setObjectName("gridLayout_266")
self.S1_label_87 = QtWidgets.QLabel(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S1_label_87.sizePolicy().hasHeightForWidth())
self.S1_label_87.setSizePolicy(sizePolicy)
self.S1_label_87.setFrameShape(QtWidgets.QFrame.Panel)
self.S1_label_87.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S1_label_87.setText("")
self.S1_label_87.setObjectName("S1_label_87")
self.gridLayout_266.addWidget(self.S1_label_87, 0, 1, 1, 1)
self.S1_toolButton_fileInput = QtWidgets.QToolButton(self.tab_Sentinel1)
self.S1_toolButton_fileInput.setStyleSheet("margin: 0px;padding: 0px;")
self.S1_toolButton_fileInput.setIcon(icon65)
self.S1_toolButton_fileInput.setIconSize(QtCore.QSize(22, 22))
self.S1_toolButton_fileInput.setObjectName("S1_toolButton_fileInput")
self.gridLayout_266.addWidget(self.S1_toolButton_fileInput, 0, 2, 1, 1)
self.label_207 = QtWidgets.QLabel(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_207.sizePolicy().hasHeightForWidth())
self.label_207.setSizePolicy(sizePolicy)
self.label_207.setMinimumSize(QtCore.QSize(229, 0))
self.label_207.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_207.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_207.setObjectName("label_207")
self.gridLayout_266.addWidget(self.label_207, 0, 0, 1, 1)
self.gridLayout_265.addLayout(self.gridLayout_266, 2, 0, 1, 3)
self.horizontalLayout_46 = QtWidgets.QHBoxLayout()
self.horizontalLayout_46.setObjectName("horizontalLayout_46")
self.projection_checkBox_S1 = QtWidgets.QCheckBox(self.tab_Sentinel1)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.projection_checkBox_S1.sizePolicy().hasHeightForWidth())
self.projection_checkBox_S1.setSizePolicy(sizePolicy)
self.projection_checkBox_S1.setTristate(False)
self.projection_checkBox_S1.setObjectName("projection_checkBox_S1")
self.horizontalLayout_46.addWidget(self.projection_checkBox_S1)
self.band_set_comb_spinBox_11 = QtWidgets.QSpinBox(self.tab_Sentinel1)
self.band_set_comb_spinBox_11.setMinimum(1)
self.band_set_comb_spinBox_11.setMaximum(100000)
self.band_set_comb_spinBox_11.setObjectName("band_set_comb_spinBox_11")
self.horizontalLayout_46.addWidget(self.band_set_comb_spinBox_11)
spacerItem37 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_46.addItem(spacerItem37)
self.convert_to_db_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel1)
self.convert_to_db_checkBox.setChecked(True)
self.convert_to_db_checkBox.setTristate(False)
self.convert_to_db_checkBox.setObjectName("convert_to_db_checkBox")
self.horizontalLayout_46.addWidget(self.convert_to_db_checkBox)
spacerItem38 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_46.addItem(spacerItem38)
self.S1_nodata_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel1)
self.S1_nodata_checkBox.setChecked(True)
self.S1_nodata_checkBox.setObjectName("S1_nodata_checkBox")
self.horizontalLayout_46.addWidget(self.S1_nodata_checkBox)
self.S1_nodata_spinBox = QtWidgets.QSpinBox(self.tab_Sentinel1)
self.S1_nodata_spinBox.setMinimum(-999)
self.S1_nodata_spinBox.setMaximum(100000)
self.S1_nodata_spinBox.setObjectName("S1_nodata_spinBox")
self.horizontalLayout_46.addWidget(self.S1_nodata_spinBox)
self.gridLayout_265.addLayout(self.horizontalLayout_46, 4, 0, 1, 3)
self.gridLayout_167.addLayout(self.gridLayout_265, 0, 0, 1, 1)
spacerItem39 = QtWidgets.QSpacerItem(20, 296, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_167.addItem(spacerItem39, 1, 0, 1, 1)
self.gridLayout_268 = QtWidgets.QGridLayout()
self.gridLayout_268.setObjectName("gridLayout_268")
self.label_210 = QtWidgets.QLabel(self.tab_Sentinel1)
self.label_210.setStyleSheet("background-color : #656565; color : white")
self.label_210.setFrameShape(QtWidgets.QFrame.Panel)
self.label_210.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_210.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_210.setObjectName("label_210")
self.gridLayout_268.addWidget(self.label_210, 0, 0, 1, 3)
spacerItem40 = QtWidgets.QSpacerItem(782, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_268.addItem(spacerItem40, 1, 0, 1, 1)
self.pushButton_Conversion_6 = QtWidgets.QToolButton(self.tab_Sentinel1)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pushButton_Conversion_6.setFont(font)
self.pushButton_Conversion_6.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pushButton_Conversion_6.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_Conversion_6.setIcon(icon64)
self.pushButton_Conversion_6.setIconSize(QtCore.QSize(34, 34))
self.pushButton_Conversion_6.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pushButton_Conversion_6.setObjectName("pushButton_Conversion_6")
self.gridLayout_268.addWidget(self.pushButton_Conversion_6, 1, 2, 1, 1)
self.sentinel1_conversion = QtWidgets.QToolButton(self.tab_Sentinel1)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.sentinel1_conversion.setFont(font)
self.sentinel1_conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.sentinel1_conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.sentinel1_conversion.setIcon(icon48)
self.sentinel1_conversion.setIconSize(QtCore.QSize(34, 34))
self.sentinel1_conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.sentinel1_conversion.setObjectName("sentinel1_conversion")
self.gridLayout_268.addWidget(self.sentinel1_conversion, 1, 1, 1, 1)
self.gridLayout_167.addLayout(self.gridLayout_268, 2, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_Sentinel1, "")
self.tab_Sentinel2 = QtWidgets.QWidget()
self.tab_Sentinel2.setObjectName("tab_Sentinel2")
self.gridLayout_164 = QtWidgets.QGridLayout(self.tab_Sentinel2)
self.gridLayout_164.setObjectName("gridLayout_164")
self.gridLayout_146 = QtWidgets.QGridLayout()
self.gridLayout_146.setObjectName("gridLayout_146")
self.label_90 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_90.sizePolicy().hasHeightForWidth())
self.label_90.setSizePolicy(sizePolicy)
self.label_90.setMinimumSize(QtCore.QSize(229, 0))
self.label_90.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_90.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_90.setObjectName("label_90")
self.gridLayout_146.addWidget(self.label_90, 1, 0, 1, 1)
self.gridLayout_147 = QtWidgets.QGridLayout()
self.gridLayout_147.setObjectName("gridLayout_147")
self.S2_toolButton_directoryInput = QtWidgets.QToolButton(self.tab_Sentinel2)
self.S2_toolButton_directoryInput.setStyleSheet("margin: 0px;padding: 0px;")
self.S2_toolButton_directoryInput.setIcon(icon69)
self.S2_toolButton_directoryInput.setIconSize(QtCore.QSize(22, 22))
self.S2_toolButton_directoryInput.setObjectName("S2_toolButton_directoryInput")
self.gridLayout_147.addWidget(self.S2_toolButton_directoryInput, 0, 1, 1, 1)
self.S2_label_86 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S2_label_86.sizePolicy().hasHeightForWidth())
self.S2_label_86.setSizePolicy(sizePolicy)
self.S2_label_86.setFrameShape(QtWidgets.QFrame.Panel)
self.S2_label_86.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S2_label_86.setText("")
self.S2_label_86.setObjectName("S2_label_86")
self.gridLayout_147.addWidget(self.S2_label_86, 0, 0, 1, 1)
self.gridLayout_146.addLayout(self.gridLayout_147, 1, 1, 1, 2)
self.DOS1_checkBox_S2 = QtWidgets.QCheckBox(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.DOS1_checkBox_S2.sizePolicy().hasHeightForWidth())
self.DOS1_checkBox_S2.setSizePolicy(sizePolicy)
self.DOS1_checkBox_S2.setChecked(False)
self.DOS1_checkBox_S2.setTristate(False)
self.DOS1_checkBox_S2.setObjectName("DOS1_checkBox_S2")
self.gridLayout_146.addWidget(self.DOS1_checkBox_S2, 3, 0, 1, 1)
self.gridLayout_148 = QtWidgets.QGridLayout()
self.gridLayout_148.setObjectName("gridLayout_148")
self.S2_nodata_spinBox = QtWidgets.QSpinBox(self.tab_Sentinel2)
self.S2_nodata_spinBox.setMinimum(-999)
self.S2_nodata_spinBox.setMaximum(100000)
self.S2_nodata_spinBox.setObjectName("S2_nodata_spinBox")
self.gridLayout_148.addWidget(self.S2_nodata_spinBox, 0, 2, 1, 1)
self.S2_nodata_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel2)
self.S2_nodata_checkBox.setChecked(True)
self.S2_nodata_checkBox.setObjectName("S2_nodata_checkBox")
self.gridLayout_148.addWidget(self.S2_nodata_checkBox, 0, 1, 1, 1)
spacerItem41 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_148.addItem(spacerItem41, 0, 0, 1, 1)
self.gridLayout_146.addLayout(self.gridLayout_148, 3, 1, 1, 2)
self.label_91 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_91.sizePolicy().hasHeightForWidth())
self.label_91.setSizePolicy(sizePolicy)
self.label_91.setStyleSheet("background-color : #656565; color : white")
self.label_91.setFrameShape(QtWidgets.QFrame.Panel)
self.label_91.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_91.setObjectName("label_91")
self.gridLayout_146.addWidget(self.label_91, 0, 0, 1, 3)
self.S2_label_93 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S2_label_93.sizePolicy().hasHeightForWidth())
self.S2_label_93.setSizePolicy(sizePolicy)
self.S2_label_93.setMinimumSize(QtCore.QSize(229, 0))
self.S2_label_93.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S2_label_93.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.S2_label_93.setObjectName("S2_label_93")
self.gridLayout_146.addWidget(self.S2_label_93, 2, 0, 1, 1)
self.gridLayout_156 = QtWidgets.QGridLayout()
self.gridLayout_156.setObjectName("gridLayout_156")
self.S2_label_94 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S2_label_94.sizePolicy().hasHeightForWidth())
self.S2_label_94.setSizePolicy(sizePolicy)
self.S2_label_94.setFrameShape(QtWidgets.QFrame.Panel)
self.S2_label_94.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S2_label_94.setText("")
self.S2_label_94.setObjectName("S2_label_94")
self.gridLayout_156.addWidget(self.S2_label_94, 0, 0, 1, 1)
self.S2_toolButton_directoryInput_xml2 = QtWidgets.QToolButton(self.tab_Sentinel2)
self.S2_toolButton_directoryInput_xml2.setStyleSheet("margin: 0px;padding: 0px;")
self.S2_toolButton_directoryInput_xml2.setIcon(icon65)
self.S2_toolButton_directoryInput_xml2.setIconSize(QtCore.QSize(22, 22))
self.S2_toolButton_directoryInput_xml2.setObjectName("S2_toolButton_directoryInput_xml2")
self.gridLayout_156.addWidget(self.S2_toolButton_directoryInput_xml2, 0, 1, 1, 1)
self.gridLayout_146.addLayout(self.gridLayout_156, 2, 1, 1, 2)
self.S2_create_bandset_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel2)
self.S2_create_bandset_checkBox.setChecked(True)
self.S2_create_bandset_checkBox.setTristate(False)
self.S2_create_bandset_checkBox.setObjectName("S2_create_bandset_checkBox")
self.gridLayout_146.addWidget(self.S2_create_bandset_checkBox, 5, 0, 1, 1)
self.add_new_bandset_checkBox_2 = QtWidgets.QCheckBox(self.tab_Sentinel2)
self.add_new_bandset_checkBox_2.setChecked(True)
self.add_new_bandset_checkBox_2.setTristate(False)
self.add_new_bandset_checkBox_2.setObjectName("add_new_bandset_checkBox_2")
self.gridLayout_146.addWidget(self.add_new_bandset_checkBox_2, 5, 1, 1, 2)
self.preprocess_b_1_9_10_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel2)
self.preprocess_b_1_9_10_checkBox.setTristate(False)
self.preprocess_b_1_9_10_checkBox.setObjectName("preprocess_b_1_9_10_checkBox")
self.gridLayout_146.addWidget(self.preprocess_b_1_9_10_checkBox, 4, 0, 1, 1)
self.gridLayout_164.addLayout(self.gridLayout_146, 0, 0, 1, 1)
self.gridLayout_166 = QtWidgets.QGridLayout()
self.gridLayout_166.setObjectName("gridLayout_166")
self.S2_satellite_lineEdit = QtWidgets.QLineEdit(self.tab_Sentinel2)
self.S2_satellite_lineEdit.setObjectName("S2_satellite_lineEdit")
self.gridLayout_166.addWidget(self.S2_satellite_lineEdit, 1, 1, 1, 1)
self.satellite_label_5 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_5.sizePolicy().hasHeightForWidth())
self.satellite_label_5.setSizePolicy(sizePolicy)
self.satellite_label_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_5.setObjectName("satellite_label_5")
self.gridLayout_166.addWidget(self.satellite_label_5, 1, 0, 1, 1)
spacerItem42 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_166.addItem(spacerItem42, 1, 2, 1, 1)
self.satellite_label_6 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_6.sizePolicy().hasHeightForWidth())
self.satellite_label_6.setSizePolicy(sizePolicy)
self.satellite_label_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_6.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_6.setObjectName("satellite_label_6")
self.gridLayout_166.addWidget(self.satellite_label_6, 1, 7, 1, 1)
self.S2_product_lineEdit = QtWidgets.QLineEdit(self.tab_Sentinel2)
self.S2_product_lineEdit.setObjectName("S2_product_lineEdit")
self.gridLayout_166.addWidget(self.S2_product_lineEdit, 1, 8, 1, 1)
self.label_92 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_92.sizePolicy().hasHeightForWidth())
self.label_92.setSizePolicy(sizePolicy)
self.label_92.setStyleSheet("background-color : #656565; color : white")
self.label_92.setFrameShape(QtWidgets.QFrame.Panel)
self.label_92.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_92.setObjectName("label_92")
self.gridLayout_166.addWidget(self.label_92, 0, 0, 1, 9)
self.date_lineEdit_3 = QtWidgets.QLineEdit(self.tab_Sentinel2)
self.date_lineEdit_3.setObjectName("date_lineEdit_3")
self.gridLayout_166.addWidget(self.date_lineEdit_3, 1, 4, 1, 1)
spacerItem43 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_166.addItem(spacerItem43, 1, 6, 1, 1)
self.satellite_label_15 = QtWidgets.QLabel(self.tab_Sentinel2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_15.sizePolicy().hasHeightForWidth())
self.satellite_label_15.setSizePolicy(sizePolicy)
self.satellite_label_15.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_15.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_15.setObjectName("satellite_label_15")
self.gridLayout_166.addWidget(self.satellite_label_15, 1, 3, 1, 1)
self.gridLayout_164.addLayout(self.gridLayout_166, 1, 0, 1, 1)
self.gridLayout_162 = QtWidgets.QGridLayout()
self.gridLayout_162.setObjectName("gridLayout_162")
self.sentinel_2_tableWidget = QtWidgets.QTableWidget(self.tab_Sentinel2)
self.sentinel_2_tableWidget.setTextElideMode(QtCore.Qt.ElideMiddle)
self.sentinel_2_tableWidget.setObjectName("sentinel_2_tableWidget")
self.sentinel_2_tableWidget.setColumnCount(3)
self.sentinel_2_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.sentinel_2_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.sentinel_2_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.sentinel_2_tableWidget.setHorizontalHeaderItem(2, item)
self.sentinel_2_tableWidget.horizontalHeader().setDefaultSectionSize(155)
self.sentinel_2_tableWidget.horizontalHeader().setStretchLastSection(True)
self.gridLayout_162.addWidget(self.sentinel_2_tableWidget, 0, 0, 1, 1)
self.S2_pushButton_remove_band = QtWidgets.QToolButton(self.tab_Sentinel2)
self.S2_pushButton_remove_band.setStyleSheet("margin: 0px;padding: 0px;")
self.S2_pushButton_remove_band.setIcon(icon58)
self.S2_pushButton_remove_band.setIconSize(QtCore.QSize(22, 22))
self.S2_pushButton_remove_band.setObjectName("S2_pushButton_remove_band")
self.gridLayout_162.addWidget(self.S2_pushButton_remove_band, 0, 1, 1, 1)
self.gridLayout_164.addLayout(self.gridLayout_162, 2, 0, 1, 1)
self.gridLayout_165 = QtWidgets.QGridLayout()
self.gridLayout_165.setObjectName("gridLayout_165")
spacerItem44 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_165.addItem(spacerItem44, 1, 1, 1, 1)
self.label_162 = QtWidgets.QLabel(self.tab_Sentinel2)
self.label_162.setStyleSheet("background-color : #656565; color : white")
self.label_162.setFrameShape(QtWidgets.QFrame.Panel)
self.label_162.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_162.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_162.setObjectName("label_162")
self.gridLayout_165.addWidget(self.label_162, 0, 1, 1, 3)
self.pushButton_Conversion_2 = QtWidgets.QToolButton(self.tab_Sentinel2)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pushButton_Conversion_2.setFont(font)
self.pushButton_Conversion_2.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pushButton_Conversion_2.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_Conversion_2.setIcon(icon64)
self.pushButton_Conversion_2.setIconSize(QtCore.QSize(34, 34))
self.pushButton_Conversion_2.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pushButton_Conversion_2.setObjectName("pushButton_Conversion_2")
self.gridLayout_165.addWidget(self.pushButton_Conversion_2, 1, 3, 1, 1)
self.sentinel2_conversion = QtWidgets.QToolButton(self.tab_Sentinel2)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.sentinel2_conversion.setFont(font)
self.sentinel2_conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.sentinel2_conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.sentinel2_conversion.setIcon(icon48)
self.sentinel2_conversion.setIconSize(QtCore.QSize(34, 34))
self.sentinel2_conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.sentinel2_conversion.setObjectName("sentinel2_conversion")
self.gridLayout_165.addWidget(self.sentinel2_conversion, 1, 2, 1, 1)
self.gridLayout_164.addLayout(self.gridLayout_165, 3, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_Sentinel2, "")
self.tab_Sentinel3 = QtWidgets.QWidget()
self.tab_Sentinel3.setObjectName("tab_Sentinel3")
self.gridLayout_158 = QtWidgets.QGridLayout(self.tab_Sentinel3)
self.gridLayout_158.setObjectName("gridLayout_158")
self.gridLayout_153 = QtWidgets.QGridLayout()
self.gridLayout_153.setObjectName("gridLayout_153")
self.gridLayout_157 = QtWidgets.QGridLayout()
self.gridLayout_157.setObjectName("gridLayout_157")
self.S2_nodata_spinBox_2 = QtWidgets.QSpinBox(self.tab_Sentinel3)
self.S2_nodata_spinBox_2.setMinimum(-999)
self.S2_nodata_spinBox_2.setMaximum(100000)
self.S2_nodata_spinBox_2.setObjectName("S2_nodata_spinBox_2")
self.gridLayout_157.addWidget(self.S2_nodata_spinBox_2, 0, 2, 1, 1)
self.S3_nodata_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel3)
self.S3_nodata_checkBox.setChecked(True)
self.S3_nodata_checkBox.setObjectName("S3_nodata_checkBox")
self.gridLayout_157.addWidget(self.S3_nodata_checkBox, 0, 1, 1, 1)
spacerItem45 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_157.addItem(spacerItem45, 0, 0, 1, 1)
self.gridLayout_153.addLayout(self.gridLayout_157, 3, 1, 1, 2)
self.label_109 = QtWidgets.QLabel(self.tab_Sentinel3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_109.sizePolicy().hasHeightForWidth())
self.label_109.setSizePolicy(sizePolicy)
self.label_109.setStyleSheet("background-color : #656565; color : white")
self.label_109.setFrameShape(QtWidgets.QFrame.Panel)
self.label_109.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_109.setObjectName("label_109")
self.gridLayout_153.addWidget(self.label_109, 0, 0, 1, 3)
self.S3_create_bandset_checkBox = QtWidgets.QCheckBox(self.tab_Sentinel3)
self.S3_create_bandset_checkBox.setChecked(True)
self.S3_create_bandset_checkBox.setTristate(False)
self.S3_create_bandset_checkBox.setObjectName("S3_create_bandset_checkBox")
self.gridLayout_153.addWidget(self.S3_create_bandset_checkBox, 4, 0, 1, 1)
self.label_106 = QtWidgets.QLabel(self.tab_Sentinel3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_106.sizePolicy().hasHeightForWidth())
self.label_106.setSizePolicy(sizePolicy)
self.label_106.setMinimumSize(QtCore.QSize(229, 0))
self.label_106.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_106.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_106.setObjectName("label_106")
self.gridLayout_153.addWidget(self.label_106, 2, 0, 1, 1)
self.gridLayout_155 = QtWidgets.QGridLayout()
self.gridLayout_155.setObjectName("gridLayout_155")
self.S3_toolButton_directoryInput = QtWidgets.QToolButton(self.tab_Sentinel3)
self.S3_toolButton_directoryInput.setStyleSheet("margin: 0px;padding: 0px;")
self.S3_toolButton_directoryInput.setIcon(icon69)
self.S3_toolButton_directoryInput.setIconSize(QtCore.QSize(22, 22))
self.S3_toolButton_directoryInput.setObjectName("S3_toolButton_directoryInput")
self.gridLayout_155.addWidget(self.S3_toolButton_directoryInput, 0, 1, 1, 1)
self.S3_label_87 = QtWidgets.QLabel(self.tab_Sentinel3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.S3_label_87.sizePolicy().hasHeightForWidth())
self.S3_label_87.setSizePolicy(sizePolicy)
self.S3_label_87.setFrameShape(QtWidgets.QFrame.Panel)
self.S3_label_87.setFrameShadow(QtWidgets.QFrame.Sunken)
self.S3_label_87.setText("")
self.S3_label_87.setObjectName("S3_label_87")
self.gridLayout_155.addWidget(self.S3_label_87, 0, 0, 1, 1)
self.gridLayout_153.addLayout(self.gridLayout_155, 2, 1, 1, 2)
self.DOS1_checkBox_S3 = QtWidgets.QCheckBox(self.tab_Sentinel3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.DOS1_checkBox_S3.sizePolicy().hasHeightForWidth())
self.DOS1_checkBox_S3.setSizePolicy(sizePolicy)
self.DOS1_checkBox_S3.setChecked(False)
self.DOS1_checkBox_S3.setTristate(False)
self.DOS1_checkBox_S3.setObjectName("DOS1_checkBox_S3")
self.gridLayout_153.addWidget(self.DOS1_checkBox_S3, 3, 0, 1, 1)
self.add_new_bandset_checkBox_3 = QtWidgets.QCheckBox(self.tab_Sentinel3)
self.add_new_bandset_checkBox_3.setChecked(True)
self.add_new_bandset_checkBox_3.setTristate(False)
self.add_new_bandset_checkBox_3.setObjectName("add_new_bandset_checkBox_3")
self.gridLayout_153.addWidget(self.add_new_bandset_checkBox_3, 4, 1, 1, 2)
self.gridLayout_158.addLayout(self.gridLayout_153, 0, 0, 1, 1)
self.gridLayout_212 = QtWidgets.QGridLayout()
self.gridLayout_212.setObjectName("gridLayout_212")
self.sentinel_3_tableWidget = QtWidgets.QTableWidget(self.tab_Sentinel3)
self.sentinel_3_tableWidget.setTextElideMode(QtCore.Qt.ElideMiddle)
self.sentinel_3_tableWidget.setObjectName("sentinel_3_tableWidget")
self.sentinel_3_tableWidget.setColumnCount(1)
self.sentinel_3_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.sentinel_3_tableWidget.setHorizontalHeaderItem(0, item)
self.sentinel_3_tableWidget.horizontalHeader().setDefaultSectionSize(155)
self.sentinel_3_tableWidget.horizontalHeader().setStretchLastSection(True)
self.gridLayout_212.addWidget(self.sentinel_3_tableWidget, 1, 0, 1, 1)
self.gridLayout_229 = QtWidgets.QGridLayout()
self.gridLayout_229.setObjectName("gridLayout_229")
self.S3_pushButton_remove_band = QtWidgets.QToolButton(self.tab_Sentinel3)
self.S3_pushButton_remove_band.setStyleSheet("margin: 0px;padding: 0px;")
self.S3_pushButton_remove_band.setIcon(icon58)
self.S3_pushButton_remove_band.setIconSize(QtCore.QSize(22, 22))
self.S3_pushButton_remove_band.setObjectName("S3_pushButton_remove_band")
self.gridLayout_229.addWidget(self.S3_pushButton_remove_band, 0, 0, 1, 1)
self.gridLayout_212.addLayout(self.gridLayout_229, 1, 1, 1, 1)
self.gridLayout_230 = QtWidgets.QGridLayout()
self.gridLayout_230.setObjectName("gridLayout_230")
self.S3_satellite_lineEdit = QtWidgets.QLineEdit(self.tab_Sentinel3)
self.S3_satellite_lineEdit.setObjectName("S3_satellite_lineEdit")
self.gridLayout_230.addWidget(self.S3_satellite_lineEdit, 1, 1, 1, 1)
self.satellite_label_12 = QtWidgets.QLabel(self.tab_Sentinel3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_12.sizePolicy().hasHeightForWidth())
self.satellite_label_12.setSizePolicy(sizePolicy)
self.satellite_label_12.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_12.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_12.setObjectName("satellite_label_12")
self.gridLayout_230.addWidget(self.satellite_label_12, 1, 0, 1, 1)
spacerItem46 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_230.addItem(spacerItem46, 1, 2, 1, 1)
self.satellite_label_14 = QtWidgets.QLabel(self.tab_Sentinel3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_14.sizePolicy().hasHeightForWidth())
self.satellite_label_14.setSizePolicy(sizePolicy)
self.satellite_label_14.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_14.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_14.setObjectName("satellite_label_14")
self.gridLayout_230.addWidget(self.satellite_label_14, 1, 3, 1, 1)
self.S3_product_lineEdit = QtWidgets.QLineEdit(self.tab_Sentinel3)
self.S3_product_lineEdit.setObjectName("S3_product_lineEdit")
self.gridLayout_230.addWidget(self.S3_product_lineEdit, 1, 4, 1, 1)
self.label_115 = QtWidgets.QLabel(self.tab_Sentinel3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_115.sizePolicy().hasHeightForWidth())
self.label_115.setSizePolicy(sizePolicy)
self.label_115.setStyleSheet("background-color : #656565; color : white")
self.label_115.setFrameShape(QtWidgets.QFrame.Panel)
self.label_115.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_115.setObjectName("label_115")
self.gridLayout_230.addWidget(self.label_115, 0, 0, 1, 5)
self.gridLayout_212.addLayout(self.gridLayout_230, 0, 0, 1, 2)
self.gridLayout_232 = QtWidgets.QGridLayout()
self.gridLayout_232.setObjectName("gridLayout_232")
spacerItem47 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_232.addItem(spacerItem47, 1, 1, 1, 1)
self.label_181 = QtWidgets.QLabel(self.tab_Sentinel3)
self.label_181.setStyleSheet("background-color : #656565; color : white")
self.label_181.setFrameShape(QtWidgets.QFrame.Panel)
self.label_181.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_181.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_181.setObjectName("label_181")
self.gridLayout_232.addWidget(self.label_181, 0, 1, 1, 3)
self.pushButton_Conversion_5 = QtWidgets.QToolButton(self.tab_Sentinel3)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pushButton_Conversion_5.setFont(font)
self.pushButton_Conversion_5.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pushButton_Conversion_5.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_Conversion_5.setIcon(icon64)
self.pushButton_Conversion_5.setIconSize(QtCore.QSize(34, 34))
self.pushButton_Conversion_5.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pushButton_Conversion_5.setObjectName("pushButton_Conversion_5")
self.gridLayout_232.addWidget(self.pushButton_Conversion_5, 1, 3, 1, 1)
self.sentinel3_conversion = QtWidgets.QToolButton(self.tab_Sentinel3)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.sentinel3_conversion.setFont(font)
self.sentinel3_conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.sentinel3_conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.sentinel3_conversion.setIcon(icon48)
self.sentinel3_conversion.setIconSize(QtCore.QSize(34, 34))
self.sentinel3_conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.sentinel3_conversion.setObjectName("sentinel3_conversion")
self.gridLayout_232.addWidget(self.sentinel3_conversion, 1, 2, 1, 1)
self.gridLayout_212.addLayout(self.gridLayout_232, 2, 0, 1, 2)
self.gridLayout_158.addLayout(self.gridLayout_212, 1, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_Sentinel3, "")
self.tab_ASTER = QtWidgets.QWidget()
self.tab_ASTER.setObjectName("tab_ASTER")
self.gridLayout_118 = QtWidgets.QGridLayout(self.tab_ASTER)
self.gridLayout_118.setObjectName("gridLayout_118")
self.gridLayout_92 = QtWidgets.QGridLayout()
self.gridLayout_92.setObjectName("gridLayout_92")
self.gridLayout_94 = QtWidgets.QGridLayout()
self.gridLayout_94.setObjectName("gridLayout_94")
self.nodata_spinBox_6 = QtWidgets.QSpinBox(self.tab_ASTER)
self.nodata_spinBox_6.setMinimum(-999)
self.nodata_spinBox_6.setMaximum(100000)
self.nodata_spinBox_6.setObjectName("nodata_spinBox_6")
self.gridLayout_94.addWidget(self.nodata_spinBox_6, 0, 2, 1, 1)
self.nodata_checkBox_5 = QtWidgets.QCheckBox(self.tab_ASTER)
self.nodata_checkBox_5.setChecked(True)
self.nodata_checkBox_5.setObjectName("nodata_checkBox_5")
self.gridLayout_94.addWidget(self.nodata_checkBox_5, 0, 1, 1, 1)
spacerItem48 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_94.addItem(spacerItem48, 0, 0, 1, 1)
self.gridLayout_92.addLayout(self.gridLayout_94, 3, 1, 1, 3)
self.label_143 = QtWidgets.QLabel(self.tab_ASTER)
self.label_143.setFrameShape(QtWidgets.QFrame.Panel)
self.label_143.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_143.setText("")
self.label_143.setObjectName("label_143")
self.gridLayout_92.addWidget(self.label_143, 1, 1, 1, 2)
self.toolButton_directoryInput_ASTER = QtWidgets.QToolButton(self.tab_ASTER)
self.toolButton_directoryInput_ASTER.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_directoryInput_ASTER.setIcon(icon65)
self.toolButton_directoryInput_ASTER.setIconSize(QtCore.QSize(22, 22))
self.toolButton_directoryInput_ASTER.setObjectName("toolButton_directoryInput_ASTER")
self.gridLayout_92.addWidget(self.toolButton_directoryInput_ASTER, 1, 3, 1, 1)
self.DOS1_checkBox_2 = QtWidgets.QCheckBox(self.tab_ASTER)
self.DOS1_checkBox_2.setChecked(False)
self.DOS1_checkBox_2.setTristate(False)
self.DOS1_checkBox_2.setObjectName("DOS1_checkBox_2")
self.gridLayout_92.addWidget(self.DOS1_checkBox_2, 3, 0, 1, 1)
self.label_67 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_67.sizePolicy().hasHeightForWidth())
self.label_67.setSizePolicy(sizePolicy)
self.label_67.setStyleSheet("background-color : #656565; color : white")
self.label_67.setFrameShape(QtWidgets.QFrame.Panel)
self.label_67.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_67.setObjectName("label_67")
self.gridLayout_92.addWidget(self.label_67, 0, 0, 1, 4)
self.label_55 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_55.sizePolicy().hasHeightForWidth())
self.label_55.setSizePolicy(sizePolicy)
self.label_55.setMinimumSize(QtCore.QSize(229, 0))
self.label_55.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_55.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_55.setObjectName("label_55")
self.gridLayout_92.addWidget(self.label_55, 1, 0, 1, 1)
self.gridLayout_221 = QtWidgets.QGridLayout()
self.gridLayout_221.setObjectName("gridLayout_221")
self.create_bandset_checkBox_2 = QtWidgets.QCheckBox(self.tab_ASTER)
self.create_bandset_checkBox_2.setChecked(True)
self.create_bandset_checkBox_2.setTristate(False)
self.create_bandset_checkBox_2.setObjectName("create_bandset_checkBox_2")
self.gridLayout_221.addWidget(self.create_bandset_checkBox_2, 0, 0, 1, 1)
self.add_new_bandset_checkBox_4 = QtWidgets.QCheckBox(self.tab_ASTER)
self.add_new_bandset_checkBox_4.setChecked(True)
self.add_new_bandset_checkBox_4.setTristate(False)
self.add_new_bandset_checkBox_4.setObjectName("add_new_bandset_checkBox_4")
self.gridLayout_221.addWidget(self.add_new_bandset_checkBox_4, 0, 1, 1, 1)
spacerItem49 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_221.addItem(spacerItem49, 0, 2, 1, 1)
self.gridLayout_92.addLayout(self.gridLayout_221, 4, 0, 1, 4)
self.celsius_checkBox_2 = QtWidgets.QCheckBox(self.tab_ASTER)
self.celsius_checkBox_2.setChecked(False)
self.celsius_checkBox_2.setTristate(False)
self.celsius_checkBox_2.setObjectName("celsius_checkBox_2")
self.gridLayout_92.addWidget(self.celsius_checkBox_2, 2, 0, 1, 3)
self.gridLayout_118.addLayout(self.gridLayout_92, 0, 0, 1, 1)
self.gridLayout_96 = QtWidgets.QGridLayout()
self.gridLayout_96.setObjectName("gridLayout_96")
self.ASTER_tableWidget = QtWidgets.QTableWidget(self.tab_ASTER)
self.ASTER_tableWidget.setObjectName("ASTER_tableWidget")
self.ASTER_tableWidget.setColumnCount(3)
self.ASTER_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.ASTER_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.ASTER_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.ASTER_tableWidget.setHorizontalHeaderItem(2, item)
self.ASTER_tableWidget.horizontalHeader().setDefaultSectionSize(155)
self.ASTER_tableWidget.horizontalHeader().setStretchLastSection(True)
self.gridLayout_96.addWidget(self.ASTER_tableWidget, 1, 0, 1, 1)
self.gridLayout_222 = QtWidgets.QGridLayout()
self.gridLayout_222.setObjectName("gridLayout_222")
self.pushButton_remove_band_2 = QtWidgets.QToolButton(self.tab_ASTER)
self.pushButton_remove_band_2.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_remove_band_2.setIcon(icon58)
self.pushButton_remove_band_2.setIconSize(QtCore.QSize(22, 22))
self.pushButton_remove_band_2.setObjectName("pushButton_remove_band_2")
self.gridLayout_222.addWidget(self.pushButton_remove_band_2, 0, 0, 1, 1)
self.gridLayout_96.addLayout(self.gridLayout_222, 1, 1, 1, 1)
self.gridLayout_223 = QtWidgets.QGridLayout()
self.gridLayout_223.setObjectName("gridLayout_223")
self.date_lineEdit_2 = QtWidgets.QLineEdit(self.tab_ASTER)
self.date_lineEdit_2.setObjectName("date_lineEdit_2")
self.gridLayout_223.addWidget(self.date_lineEdit_2, 1, 2, 1, 1)
self.earth_sun_dist_lineEdit_2 = QtWidgets.QLineEdit(self.tab_ASTER)
self.earth_sun_dist_lineEdit_2.setObjectName("earth_sun_dist_lineEdit_2")
self.gridLayout_223.addWidget(self.earth_sun_dist_lineEdit_2, 1, 6, 1, 1)
self.satellite_label_9 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_9.sizePolicy().hasHeightForWidth())
self.satellite_label_9.setSizePolicy(sizePolicy)
self.satellite_label_9.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_9.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_9.setObjectName("satellite_label_9")
self.gridLayout_223.addWidget(self.satellite_label_9, 1, 5, 1, 1)
self.satellite_label_8 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_8.sizePolicy().hasHeightForWidth())
self.satellite_label_8.setSizePolicy(sizePolicy)
self.satellite_label_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_8.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_8.setObjectName("satellite_label_8")
self.gridLayout_223.addWidget(self.satellite_label_8, 1, 1, 1, 1)
self.satellite_label_7 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_7.sizePolicy().hasHeightForWidth())
self.satellite_label_7.setSizePolicy(sizePolicy)
self.satellite_label_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_7.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_7.setObjectName("satellite_label_7")
self.gridLayout_223.addWidget(self.satellite_label_7, 1, 3, 1, 1)
self.sun_elev_lineEdit_2 = QtWidgets.QLineEdit(self.tab_ASTER)
self.sun_elev_lineEdit_2.setObjectName("sun_elev_lineEdit_2")
self.gridLayout_223.addWidget(self.sun_elev_lineEdit_2, 1, 4, 1, 1)
self.ulm_lineEdit = QtWidgets.QLineEdit(self.tab_ASTER)
self.ulm_lineEdit.setObjectName("ulm_lineEdit")
self.gridLayout_223.addWidget(self.ulm_lineEdit, 1, 10, 1, 1)
self.satellite_label_10 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_10.sizePolicy().hasHeightForWidth())
self.satellite_label_10.setSizePolicy(sizePolicy)
self.satellite_label_10.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_10.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_10.setObjectName("satellite_label_10")
self.gridLayout_223.addWidget(self.satellite_label_10, 1, 7, 1, 1)
self.utm_zone_lineEdit = QtWidgets.QLineEdit(self.tab_ASTER)
self.utm_zone_lineEdit.setObjectName("utm_zone_lineEdit")
self.gridLayout_223.addWidget(self.utm_zone_lineEdit, 1, 8, 1, 1)
self.satellite_label_11 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_11.sizePolicy().hasHeightForWidth())
self.satellite_label_11.setSizePolicy(sizePolicy)
self.satellite_label_11.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_11.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_11.setObjectName("satellite_label_11")
self.gridLayout_223.addWidget(self.satellite_label_11, 1, 9, 1, 1)
self.satellite_label_17 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_17.sizePolicy().hasHeightForWidth())
self.satellite_label_17.setSizePolicy(sizePolicy)
self.satellite_label_17.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_17.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_17.setObjectName("satellite_label_17")
self.gridLayout_223.addWidget(self.satellite_label_17, 1, 11, 1, 1)
self.lrm_lineEdit = QtWidgets.QLineEdit(self.tab_ASTER)
self.lrm_lineEdit.setObjectName("lrm_lineEdit")
self.gridLayout_223.addWidget(self.lrm_lineEdit, 1, 12, 1, 1)
self.label_160 = QtWidgets.QLabel(self.tab_ASTER)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_160.sizePolicy().hasHeightForWidth())
self.label_160.setSizePolicy(sizePolicy)
self.label_160.setStyleSheet("background-color : #656565; color : white")
self.label_160.setFrameShape(QtWidgets.QFrame.Panel)
self.label_160.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_160.setObjectName("label_160")
self.gridLayout_223.addWidget(self.label_160, 0, 0, 1, 13)
self.gridLayout_96.addLayout(self.gridLayout_223, 0, 0, 1, 2)
self.gridLayout_118.addLayout(self.gridLayout_96, 1, 0, 1, 1)
self.gridLayout_224 = QtWidgets.QGridLayout()
self.gridLayout_224.setObjectName("gridLayout_224")
spacerItem50 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_224.addItem(spacerItem50, 1, 1, 1, 1)
self.label_163 = QtWidgets.QLabel(self.tab_ASTER)
self.label_163.setStyleSheet("background-color : #656565; color : white")
self.label_163.setFrameShape(QtWidgets.QFrame.Panel)
self.label_163.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_163.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_163.setObjectName("label_163")
self.gridLayout_224.addWidget(self.label_163, 0, 1, 1, 3)
self.pushButton_Conversion_3 = QtWidgets.QToolButton(self.tab_ASTER)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pushButton_Conversion_3.setFont(font)
self.pushButton_Conversion_3.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pushButton_Conversion_3.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_Conversion_3.setIcon(icon64)
self.pushButton_Conversion_3.setIconSize(QtCore.QSize(34, 34))
self.pushButton_Conversion_3.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pushButton_Conversion_3.setObjectName("pushButton_Conversion_3")
self.gridLayout_224.addWidget(self.pushButton_Conversion_3, 1, 3, 1, 1)
self.aster_conversion = QtWidgets.QToolButton(self.tab_ASTER)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.aster_conversion.setFont(font)
self.aster_conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.aster_conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.aster_conversion.setIcon(icon48)
self.aster_conversion.setIconSize(QtCore.QSize(34, 34))
self.aster_conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.aster_conversion.setObjectName("aster_conversion")
self.gridLayout_224.addWidget(self.aster_conversion, 1, 2, 1, 1)
self.gridLayout_118.addLayout(self.gridLayout_224, 2, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_ASTER, "")
self.tab_MODIS = QtWidgets.QWidget()
self.tab_MODIS.setObjectName("tab_MODIS")
self.gridLayout_70 = QtWidgets.QGridLayout(self.tab_MODIS)
self.gridLayout_70.setObjectName("gridLayout_70")
self.gridLayout_270 = QtWidgets.QGridLayout()
self.gridLayout_270.setObjectName("gridLayout_270")
self.label_218 = QtWidgets.QLabel(self.tab_MODIS)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_218.sizePolicy().hasHeightForWidth())
self.label_218.setSizePolicy(sizePolicy)
self.label_218.setStyleSheet("background-color : #656565; color : white")
self.label_218.setFrameShape(QtWidgets.QFrame.Panel)
self.label_218.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_218.setObjectName("label_218")
self.gridLayout_270.addWidget(self.label_218, 0, 0, 1, 4)
self.label_217 = QtWidgets.QLabel(self.tab_MODIS)
self.label_217.setFrameShape(QtWidgets.QFrame.Panel)
self.label_217.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_217.setText("")
self.label_217.setObjectName("label_217")
self.gridLayout_270.addWidget(self.label_217, 2, 1, 1, 2)
self.gridLayout_272 = QtWidgets.QGridLayout()
self.gridLayout_272.setObjectName("gridLayout_272")
self.create_bandset_checkBox_3 = QtWidgets.QCheckBox(self.tab_MODIS)
self.create_bandset_checkBox_3.setChecked(True)
self.create_bandset_checkBox_3.setTristate(False)
self.create_bandset_checkBox_3.setObjectName("create_bandset_checkBox_3")
self.gridLayout_272.addWidget(self.create_bandset_checkBox_3, 0, 0, 1, 1)
self.add_new_bandset_checkBox_5 = QtWidgets.QCheckBox(self.tab_MODIS)
self.add_new_bandset_checkBox_5.setChecked(True)
self.add_new_bandset_checkBox_5.setTristate(False)
self.add_new_bandset_checkBox_5.setObjectName("add_new_bandset_checkBox_5")
self.gridLayout_272.addWidget(self.add_new_bandset_checkBox_5, 0, 1, 1, 1)
spacerItem51 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_272.addItem(spacerItem51, 0, 2, 1, 1)
self.gridLayout_270.addLayout(self.gridLayout_272, 4, 0, 1, 4)
self.label_219 = QtWidgets.QLabel(self.tab_MODIS)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_219.sizePolicy().hasHeightForWidth())
self.label_219.setSizePolicy(sizePolicy)
self.label_219.setMinimumSize(QtCore.QSize(229, 0))
self.label_219.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_219.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_219.setObjectName("label_219")
self.gridLayout_270.addWidget(self.label_219, 2, 0, 1, 1)
self.toolButton_directoryInput_MODIS = QtWidgets.QToolButton(self.tab_MODIS)
self.toolButton_directoryInput_MODIS.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_directoryInput_MODIS.setIcon(icon65)
self.toolButton_directoryInput_MODIS.setIconSize(QtCore.QSize(22, 22))
self.toolButton_directoryInput_MODIS.setObjectName("toolButton_directoryInput_MODIS")
self.gridLayout_270.addWidget(self.toolButton_directoryInput_MODIS, 2, 3, 1, 1)
self.gridLayout_271 = QtWidgets.QGridLayout()
self.gridLayout_271.setObjectName("gridLayout_271")
self.nodata_spinBox_8 = QtWidgets.QSpinBox(self.tab_MODIS)
self.nodata_spinBox_8.setMinimum(-100000)
self.nodata_spinBox_8.setMaximum(100000)
self.nodata_spinBox_8.setProperty("value", -999)
self.nodata_spinBox_8.setObjectName("nodata_spinBox_8")
self.gridLayout_271.addWidget(self.nodata_spinBox_8, 0, 2, 1, 1)
self.nodata_checkBox_7 = QtWidgets.QCheckBox(self.tab_MODIS)
self.nodata_checkBox_7.setChecked(True)
self.nodata_checkBox_7.setObjectName("nodata_checkBox_7")
self.gridLayout_271.addWidget(self.nodata_checkBox_7, 0, 1, 1, 1)
spacerItem52 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_271.addItem(spacerItem52, 0, 0, 1, 1)
self.gridLayout_270.addLayout(self.gridLayout_271, 3, 1, 1, 3)
self.reproject_modis_checkBox = QtWidgets.QCheckBox(self.tab_MODIS)
self.reproject_modis_checkBox.setChecked(True)
self.reproject_modis_checkBox.setTristate(False)
self.reproject_modis_checkBox.setObjectName("reproject_modis_checkBox")
self.gridLayout_270.addWidget(self.reproject_modis_checkBox, 3, 0, 1, 1)
self.gridLayout_70.addLayout(self.gridLayout_270, 0, 0, 1, 1)
self.gridLayout_273 = QtWidgets.QGridLayout()
self.gridLayout_273.setObjectName("gridLayout_273")
self.MODIS_tableWidget = QtWidgets.QTableWidget(self.tab_MODIS)
self.MODIS_tableWidget.setObjectName("MODIS_tableWidget")
self.MODIS_tableWidget.setColumnCount(2)
self.MODIS_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.MODIS_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.MODIS_tableWidget.setHorizontalHeaderItem(1, item)
self.MODIS_tableWidget.horizontalHeader().setDefaultSectionSize(155)
self.MODIS_tableWidget.horizontalHeader().setStretchLastSection(True)
self.gridLayout_273.addWidget(self.MODIS_tableWidget, 1, 0, 1, 1)
self.gridLayout_274 = QtWidgets.QGridLayout()
self.gridLayout_274.setObjectName("gridLayout_274")
self.pushButton_remove_band_3 = QtWidgets.QToolButton(self.tab_MODIS)
self.pushButton_remove_band_3.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_remove_band_3.setIcon(icon58)
self.pushButton_remove_band_3.setIconSize(QtCore.QSize(22, 22))
self.pushButton_remove_band_3.setObjectName("pushButton_remove_band_3")
self.gridLayout_274.addWidget(self.pushButton_remove_band_3, 0, 0, 1, 1)
self.gridLayout_273.addLayout(self.gridLayout_274, 1, 1, 1, 1)
self.gridLayout_275 = QtWidgets.QGridLayout()
self.gridLayout_275.setObjectName("gridLayout_275")
self.satellite_label_16 = QtWidgets.QLabel(self.tab_MODIS)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_16.sizePolicy().hasHeightForWidth())
self.satellite_label_16.setSizePolicy(sizePolicy)
self.satellite_label_16.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_16.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_16.setObjectName("satellite_label_16")
self.gridLayout_275.addWidget(self.satellite_label_16, 1, 4, 1, 1)
self.MODIS_ID_lineEdit = QtWidgets.QLineEdit(self.tab_MODIS)
self.MODIS_ID_lineEdit.setObjectName("MODIS_ID_lineEdit")
self.gridLayout_275.addWidget(self.MODIS_ID_lineEdit, 1, 2, 1, 1)
self.satellite_label_13 = QtWidgets.QLabel(self.tab_MODIS)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_13.sizePolicy().hasHeightForWidth())
self.satellite_label_13.setSizePolicy(sizePolicy)
self.satellite_label_13.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_13.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_13.setObjectName("satellite_label_13")
self.gridLayout_275.addWidget(self.satellite_label_13, 1, 1, 1, 1)
self.MODIS_date_lineEdit = QtWidgets.QLineEdit(self.tab_MODIS)
self.MODIS_date_lineEdit.setObjectName("MODIS_date_lineEdit")
self.gridLayout_275.addWidget(self.MODIS_date_lineEdit, 1, 5, 1, 1)
self.label_220 = QtWidgets.QLabel(self.tab_MODIS)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_220.sizePolicy().hasHeightForWidth())
self.label_220.setSizePolicy(sizePolicy)
self.label_220.setStyleSheet("background-color : #656565; color : white")
self.label_220.setFrameShape(QtWidgets.QFrame.Panel)
self.label_220.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_220.setObjectName("label_220")
self.gridLayout_275.addWidget(self.label_220, 0, 0, 1, 6)
spacerItem53 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_275.addItem(spacerItem53, 1, 3, 1, 1)
self.gridLayout_273.addLayout(self.gridLayout_275, 0, 0, 1, 2)
self.gridLayout_70.addLayout(self.gridLayout_273, 1, 0, 1, 1)
self.gridLayout_276 = QtWidgets.QGridLayout()
self.gridLayout_276.setObjectName("gridLayout_276")
spacerItem54 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_276.addItem(spacerItem54, 1, 1, 1, 1)
self.label_221 = QtWidgets.QLabel(self.tab_MODIS)
self.label_221.setStyleSheet("background-color : #656565; color : white")
self.label_221.setFrameShape(QtWidgets.QFrame.Panel)
self.label_221.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_221.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_221.setObjectName("label_221")
self.gridLayout_276.addWidget(self.label_221, 0, 1, 1, 3)
self.pushButton_Conversion_4 = QtWidgets.QToolButton(self.tab_MODIS)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pushButton_Conversion_4.setFont(font)
self.pushButton_Conversion_4.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pushButton_Conversion_4.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_Conversion_4.setIcon(icon64)
self.pushButton_Conversion_4.setIconSize(QtCore.QSize(34, 34))
self.pushButton_Conversion_4.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pushButton_Conversion_4.setObjectName("pushButton_Conversion_4")
self.gridLayout_276.addWidget(self.pushButton_Conversion_4, 1, 3, 1, 1)
self.modis_conversion = QtWidgets.QToolButton(self.tab_MODIS)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.modis_conversion.setFont(font)
self.modis_conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.modis_conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.modis_conversion.setIcon(icon48)
self.modis_conversion.setIconSize(QtCore.QSize(34, 34))
self.modis_conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.modis_conversion.setObjectName("modis_conversion")
self.gridLayout_276.addWidget(self.modis_conversion, 1, 2, 1, 1)
self.gridLayout_70.addLayout(self.gridLayout_276, 2, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_MODIS, "")
self.tab_spectral_distance = QtWidgets.QWidget()
self.tab_spectral_distance.setObjectName("tab_spectral_distance")
self.gridLayout_61 = QtWidgets.QGridLayout(self.tab_spectral_distance)
self.gridLayout_61.setObjectName("gridLayout_61")
self.gridLayout_119 = QtWidgets.QGridLayout()
self.gridLayout_119.setObjectName("gridLayout_119")
self.label_142 = QtWidgets.QLabel(self.tab_spectral_distance)
self.label_142.setStyleSheet("background-color : #656565; color : white")
self.label_142.setFrameShape(QtWidgets.QFrame.Panel)
self.label_142.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_142.setObjectName("label_142")
self.gridLayout_119.addWidget(self.label_142, 0, 0, 1, 1)
self.gridLayout_61.addLayout(self.gridLayout_119, 0, 0, 1, 1)
self.horizontalLayout_22 = QtWidgets.QHBoxLayout()
self.horizontalLayout_22.setObjectName("horizontalLayout_22")
self.label_64 = QtWidgets.QLabel(self.tab_spectral_distance)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_64.sizePolicy().hasHeightForWidth())
self.label_64.setSizePolicy(sizePolicy)
self.label_64.setMinimumSize(QtCore.QSize(229, 0))
self.label_64.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_64.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_64.setObjectName("label_64")
self.horizontalLayout_22.addWidget(self.label_64)
self.vector_name_combo = QtWidgets.QComboBox(self.tab_spectral_distance)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.vector_name_combo.sizePolicy().hasHeightForWidth())
self.vector_name_combo.setSizePolicy(sizePolicy)
self.vector_name_combo.setObjectName("vector_name_combo")
self.horizontalLayout_22.addWidget(self.vector_name_combo)
self.toolButton_reload_16 = QtWidgets.QToolButton(self.tab_spectral_distance)
self.toolButton_reload_16.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_16.setIcon(icon55)
self.toolButton_reload_16.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_16.setObjectName("toolButton_reload_16")
self.horizontalLayout_22.addWidget(self.toolButton_reload_16)
self.gridLayout_61.addLayout(self.horizontalLayout_22, 1, 0, 1, 1)
self.horizontalLayout_19 = QtWidgets.QHBoxLayout()
self.horizontalLayout_19.setObjectName("horizontalLayout_19")
self.field_checkBox = QtWidgets.QCheckBox(self.tab_spectral_distance)
self.field_checkBox.setChecked(True)
self.field_checkBox.setObjectName("field_checkBox")
self.horizontalLayout_19.addWidget(self.field_checkBox)
self.field_comboBox = QtWidgets.QComboBox(self.tab_spectral_distance)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.field_comboBox.sizePolicy().hasHeightForWidth())
self.field_comboBox.setSizePolicy(sizePolicy)
self.field_comboBox.setObjectName("field_comboBox")
self.horizontalLayout_19.addWidget(self.field_comboBox)
self.gridLayout_61.addLayout(self.horizontalLayout_19, 2, 0, 1, 1)
self.horizontalLayout_25 = QtWidgets.QHBoxLayout()
self.horizontalLayout_25.setObjectName("horizontalLayout_25")
self.constant_value_checkBox = QtWidgets.QCheckBox(self.tab_spectral_distance)
self.constant_value_checkBox.setObjectName("constant_value_checkBox")
self.horizontalLayout_25.addWidget(self.constant_value_checkBox)
self.constant_value_spinBox = QtWidgets.QSpinBox(self.tab_spectral_distance)
self.constant_value_spinBox.setMinimum(-100000)
self.constant_value_spinBox.setMaximum(100000)
self.constant_value_spinBox.setProperty("value", 1)
self.constant_value_spinBox.setObjectName("constant_value_spinBox")
self.horizontalLayout_25.addWidget(self.constant_value_spinBox)
self.gridLayout_61.addLayout(self.horizontalLayout_25, 3, 0, 1, 1)
self.horizontalLayout_20 = QtWidgets.QHBoxLayout()
self.horizontalLayout_20.setObjectName("horizontalLayout_20")
self.label_157 = QtWidgets.QLabel(self.tab_spectral_distance)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_157.sizePolicy().hasHeightForWidth())
self.label_157.setSizePolicy(sizePolicy)
self.label_157.setMinimumSize(QtCore.QSize(229, 0))
self.label_157.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_157.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_157.setObjectName("label_157")
self.horizontalLayout_20.addWidget(self.label_157)
self.conversion_type_combo = QtWidgets.QComboBox(self.tab_spectral_distance)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.conversion_type_combo.sizePolicy().hasHeightForWidth())
self.conversion_type_combo.setSizePolicy(sizePolicy)
self.conversion_type_combo.setObjectName("conversion_type_combo")
self.horizontalLayout_20.addWidget(self.conversion_type_combo)
self.gridLayout_61.addLayout(self.horizontalLayout_20, 4, 0, 1, 1)
self.gridLayout_194 = QtWidgets.QGridLayout()
self.gridLayout_194.setObjectName("gridLayout_194")
self.label_156 = QtWidgets.QLabel(self.tab_spectral_distance)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_156.sizePolicy().hasHeightForWidth())
self.label_156.setSizePolicy(sizePolicy)
self.label_156.setMinimumSize(QtCore.QSize(229, 0))
self.label_156.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_156.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_156.setObjectName("label_156")
self.gridLayout_194.addWidget(self.label_156, 0, 0, 1, 1)
self.toolButton_reload_17 = QtWidgets.QToolButton(self.tab_spectral_distance)
self.toolButton_reload_17.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_17.setIcon(icon55)
self.toolButton_reload_17.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_17.setObjectName("toolButton_reload_17")
self.gridLayout_194.addWidget(self.toolButton_reload_17, 0, 2, 1, 1)
self.reference_raster_name_combo = QtWidgets.QComboBox(self.tab_spectral_distance)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.reference_raster_name_combo.sizePolicy().hasHeightForWidth())
self.reference_raster_name_combo.setSizePolicy(sizePolicy)
self.reference_raster_name_combo.setObjectName("reference_raster_name_combo")
self.gridLayout_194.addWidget(self.reference_raster_name_combo, 0, 1, 1, 1)
self.gridLayout_61.addLayout(self.gridLayout_194, 5, 0, 1, 1)
self.horizontalLayout_47 = QtWidgets.QHBoxLayout()
self.horizontalLayout_47.setObjectName("horizontalLayout_47")
self.extent_checkBox_2 = QtWidgets.QCheckBox(self.tab_spectral_distance)
self.extent_checkBox_2.setObjectName("extent_checkBox_2")
self.horizontalLayout_47.addWidget(self.extent_checkBox_2)
self.gridLayout_61.addLayout(self.horizontalLayout_47, 6, 0, 1, 1)
self.gridLayout_210 = QtWidgets.QGridLayout()
self.gridLayout_210.setObjectName("gridLayout_210")
spacerItem55 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_210.addItem(spacerItem55, 0, 2, 1, 1)
self.label_167 = QtWidgets.QLabel(self.tab_spectral_distance)
self.label_167.setStyleSheet("background-color : #656565; color : white")
self.label_167.setFrameShape(QtWidgets.QFrame.Panel)
self.label_167.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_167.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_167.setObjectName("label_167")
self.gridLayout_210.addWidget(self.label_167, 1, 0, 1, 3)
spacerItem56 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_210.addItem(spacerItem56, 2, 0, 1, 1)
self.convert_vector_toolButton = QtWidgets.QToolButton(self.tab_spectral_distance)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.convert_vector_toolButton.setFont(font)
self.convert_vector_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.convert_vector_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.convert_vector_toolButton.setIcon(icon64)
self.convert_vector_toolButton.setIconSize(QtCore.QSize(34, 34))
self.convert_vector_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.convert_vector_toolButton.setObjectName("convert_vector_toolButton")
self.gridLayout_210.addWidget(self.convert_vector_toolButton, 2, 2, 1, 1)
self.vector_to_raster = QtWidgets.QToolButton(self.tab_spectral_distance)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.vector_to_raster.setFont(font)
self.vector_to_raster.setLayoutDirection(QtCore.Qt.RightToLeft)
self.vector_to_raster.setStyleSheet("margin: 0px;padding: 0px;")
self.vector_to_raster.setIcon(icon48)
self.vector_to_raster.setIconSize(QtCore.QSize(34, 34))
self.vector_to_raster.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.vector_to_raster.setObjectName("vector_to_raster")
self.gridLayout_210.addWidget(self.vector_to_raster, 2, 1, 1, 1)
self.gridLayout_61.addLayout(self.gridLayout_210, 7, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_spectral_distance, "")
self.tab_clip = QtWidgets.QWidget()
self.tab_clip.setObjectName("tab_clip")
self.gridLayout_58 = QtWidgets.QGridLayout(self.tab_clip)
self.gridLayout_58.setObjectName("gridLayout_58")
self.gridLayout_51 = QtWidgets.QGridLayout()
self.gridLayout_51.setObjectName("gridLayout_51")
self.label_128 = QtWidgets.QLabel(self.tab_clip)
self.label_128.setStyleSheet("background-color : #656565; color : white")
self.label_128.setFrameShape(QtWidgets.QFrame.Panel)
self.label_128.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_128.setObjectName("label_128")
self.gridLayout_51.addWidget(self.label_128, 0, 0, 1, 1)
self.gridLayout_58.addLayout(self.gridLayout_51, 0, 0, 1, 1)
self.gridLayout_24 = QtWidgets.QGridLayout()
self.gridLayout_24.setObjectName("gridLayout_24")
self.nodata_spinBox = QtWidgets.QSpinBox(self.tab_clip)
self.nodata_spinBox.setMinimum(-2147483647)
self.nodata_spinBox.setMaximum(2147483647)
self.nodata_spinBox.setObjectName("nodata_spinBox")
self.gridLayout_24.addWidget(self.nodata_spinBox, 1, 1, 1, 1)
self.band_set_comb_spinBox_2 = QtWidgets.QSpinBox(self.tab_clip)
self.band_set_comb_spinBox_2.setMinimum(1)
self.band_set_comb_spinBox_2.setMaximum(100000)
self.band_set_comb_spinBox_2.setObjectName("band_set_comb_spinBox_2")
self.gridLayout_24.addWidget(self.band_set_comb_spinBox_2, 0, 1, 1, 1)
self.label_251 = QtWidgets.QLabel(self.tab_clip)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_251.sizePolicy().hasHeightForWidth())
self.label_251.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_251.setFont(font)
self.label_251.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_251.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_251.setObjectName("label_251")
self.gridLayout_24.addWidget(self.label_251, 0, 0, 1, 1)
self.label_62 = QtWidgets.QLabel(self.tab_clip)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_62.sizePolicy().hasHeightForWidth())
self.label_62.setSizePolicy(sizePolicy)
self.label_62.setMinimumSize(QtCore.QSize(150, 0))
self.label_62.setMaximumSize(QtCore.QSize(100, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_62.setFont(font)
self.label_62.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_62.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_62.setObjectName("label_62")
self.gridLayout_24.addWidget(self.label_62, 2, 0, 1, 1)
self.label_16 = QtWidgets.QLabel(self.tab_clip)
self.label_16.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_16.setObjectName("label_16")
self.gridLayout_24.addWidget(self.label_16, 1, 0, 1, 1)
spacerItem57 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_24.addItem(spacerItem57, 1, 2, 1, 1)
self.output_clip_name_lineEdit = QtWidgets.QLineEdit(self.tab_clip)
self.output_clip_name_lineEdit.setMaxLength(10)
self.output_clip_name_lineEdit.setObjectName("output_clip_name_lineEdit")
self.gridLayout_24.addWidget(self.output_clip_name_lineEdit, 2, 1, 1, 2)
self.gridLayout_58.addLayout(self.gridLayout_24, 1, 0, 1, 1)
self.gridLayout_20 = QtWidgets.QGridLayout()
self.gridLayout_20.setObjectName("gridLayout_20")
spacerItem58 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_20.addItem(spacerItem58, 1, 8, 1, 1)
self.LX_lineEdit = QtWidgets.QLineEdit(self.tab_clip)
self.LX_lineEdit.setMaxLength(10)
self.LX_lineEdit.setObjectName("LX_lineEdit")
self.gridLayout_20.addWidget(self.LX_lineEdit, 1, 6, 1, 1)
self.UX_lineEdit = QtWidgets.QLineEdit(self.tab_clip)
self.UX_lineEdit.setMaxLength(10)
self.UX_lineEdit.setObjectName("UX_lineEdit")
self.gridLayout_20.addWidget(self.UX_lineEdit, 1, 3, 1, 1)
self.UY_lineEdit = QtWidgets.QLineEdit(self.tab_clip)
self.UY_lineEdit.setMaxLength(10)
self.UY_lineEdit.setObjectName("UY_lineEdit")
self.gridLayout_20.addWidget(self.UY_lineEdit, 1, 4, 1, 1)
self.LY_lineEdit = QtWidgets.QLineEdit(self.tab_clip)
self.LY_lineEdit.setMaxLength(10)
self.LY_lineEdit.setObjectName("LY_lineEdit")
self.gridLayout_20.addWidget(self.LY_lineEdit, 1, 7, 1, 1)
self.label_12 = QtWidgets.QLabel(self.tab_clip)
self.label_12.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_12.setAlignment(QtCore.Qt.AlignCenter)
self.label_12.setObjectName("label_12")
self.gridLayout_20.addWidget(self.label_12, 1, 5, 1, 1)
self.selectUL_toolButton = QtWidgets.QToolButton(self.tab_clip)
self.selectUL_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.selectUL_toolButton.setIcon(icon77)
self.selectUL_toolButton.setIconSize(QtCore.QSize(22, 22))
self.selectUL_toolButton.setObjectName("selectUL_toolButton")
self.gridLayout_20.addWidget(self.selectUL_toolButton, 1, 10, 1, 1)
self.label_29 = QtWidgets.QLabel(self.tab_clip)
self.label_29.setStyleSheet("background-color : #656565; color : white")
self.label_29.setFrameShape(QtWidgets.QFrame.Panel)
self.label_29.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_29.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_29.setObjectName("label_29")
self.gridLayout_20.addWidget(self.label_29, 0, 0, 1, 11)
self.show_area_radioButton_3 = QtWidgets.QRadioButton(self.tab_clip)
self.show_area_radioButton_3.setChecked(True)
self.show_area_radioButton_3.setAutoExclusive(False)
self.show_area_radioButton_3.setObjectName("show_area_radioButton_3")
self.gridLayout_20.addWidget(self.show_area_radioButton_3, 1, 9, 1, 1)
self.label_11 = QtWidgets.QLabel(self.tab_clip)
self.label_11.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_11.setAlignment(QtCore.Qt.AlignCenter)
self.label_11.setObjectName("label_11")
self.gridLayout_20.addWidget(self.label_11, 1, 0, 1, 3)
self.gridLayout_58.addLayout(self.gridLayout_20, 2, 0, 1, 1)
self.gridLayout_22 = QtWidgets.QGridLayout()
self.gridLayout_22.setObjectName("gridLayout_22")
self.shapefile_comboBox = QtWidgets.QComboBox(self.tab_clip)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.shapefile_comboBox.sizePolicy().hasHeightForWidth())
self.shapefile_comboBox.setSizePolicy(sizePolicy)
self.shapefile_comboBox.setObjectName("shapefile_comboBox")
self.gridLayout_22.addWidget(self.shapefile_comboBox, 0, 1, 1, 1)
self.shapefile_checkBox = QtWidgets.QCheckBox(self.tab_clip)
self.shapefile_checkBox.setObjectName("shapefile_checkBox")
self.gridLayout_22.addWidget(self.shapefile_checkBox, 0, 0, 1, 1)
self.toolButton_reload_8 = QtWidgets.QToolButton(self.tab_clip)
self.toolButton_reload_8.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_8.setIcon(icon55)
self.toolButton_reload_8.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_8.setObjectName("toolButton_reload_8")
self.gridLayout_22.addWidget(self.toolButton_reload_8, 0, 2, 1, 1)
self.temporary_ROI_checkBox = QtWidgets.QCheckBox(self.tab_clip)
self.temporary_ROI_checkBox.setObjectName("temporary_ROI_checkBox")
self.gridLayout_22.addWidget(self.temporary_ROI_checkBox, 2, 0, 1, 1)
self.gridLayout_19 = QtWidgets.QGridLayout()
self.gridLayout_19.setObjectName("gridLayout_19")
self.vector_field_checkBox = QtWidgets.QCheckBox(self.tab_clip)
self.vector_field_checkBox.setObjectName("vector_field_checkBox")
self.gridLayout_19.addWidget(self.vector_field_checkBox, 0, 0, 1, 1)
self.class_field_comboBox_3 = QtWidgets.QComboBox(self.tab_clip)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_field_comboBox_3.sizePolicy().hasHeightForWidth())
self.class_field_comboBox_3.setSizePolicy(sizePolicy)
self.class_field_comboBox_3.setObjectName("class_field_comboBox_3")
self.gridLayout_19.addWidget(self.class_field_comboBox_3, 0, 1, 1, 1)
self.gridLayout_22.addLayout(self.gridLayout_19, 1, 1, 1, 1)
self.gridLayout_58.addLayout(self.gridLayout_22, 3, 0, 1, 1)
self.gridLayout_28 = QtWidgets.QGridLayout()
self.gridLayout_28.setObjectName("gridLayout_28")
spacerItem59 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_28.addItem(spacerItem59, 2, 0, 1, 1)
self.label_164 = QtWidgets.QLabel(self.tab_clip)
self.label_164.setStyleSheet("background-color : #656565; color : white")
self.label_164.setFrameShape(QtWidgets.QFrame.Panel)
self.label_164.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_164.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_164.setObjectName("label_164")
self.gridLayout_28.addWidget(self.label_164, 1, 0, 1, 3)
self.clip_Button = QtWidgets.QToolButton(self.tab_clip)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.clip_Button.setFont(font)
self.clip_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.clip_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.clip_Button.setIcon(icon64)
self.clip_Button.setIconSize(QtCore.QSize(34, 34))
self.clip_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.clip_Button.setObjectName("clip_Button")
self.gridLayout_28.addWidget(self.clip_Button, 2, 2, 1, 1)
spacerItem60 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_28.addItem(spacerItem60, 0, 0, 1, 1)
self.clip_multiple_rasters = QtWidgets.QToolButton(self.tab_clip)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.clip_multiple_rasters.setFont(font)
self.clip_multiple_rasters.setLayoutDirection(QtCore.Qt.RightToLeft)
self.clip_multiple_rasters.setStyleSheet("margin: 0px;padding: 0px;")
self.clip_multiple_rasters.setIcon(icon48)
self.clip_multiple_rasters.setIconSize(QtCore.QSize(34, 34))
self.clip_multiple_rasters.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.clip_multiple_rasters.setObjectName("clip_multiple_rasters")
self.gridLayout_28.addWidget(self.clip_multiple_rasters, 2, 1, 1, 1)
self.gridLayout_58.addLayout(self.gridLayout_28, 4, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_clip, "")
self.tab_reproject_bands = QtWidgets.QWidget()
self.tab_reproject_bands.setObjectName("tab_reproject_bands")
self.gridLayout_295 = QtWidgets.QGridLayout(self.tab_reproject_bands)
self.gridLayout_295.setObjectName("gridLayout_295")
self.gridLayout_289 = QtWidgets.QGridLayout()
self.gridLayout_289.setObjectName("gridLayout_289")
self.band_set_comb_spinBox_14 = QtWidgets.QSpinBox(self.tab_reproject_bands)
self.band_set_comb_spinBox_14.setMinimum(1)
self.band_set_comb_spinBox_14.setMaximum(100000)
self.band_set_comb_spinBox_14.setObjectName("band_set_comb_spinBox_14")
self.gridLayout_289.addWidget(self.band_set_comb_spinBox_14, 1, 1, 1, 1)
self.label_264 = QtWidgets.QLabel(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_264.sizePolicy().hasHeightForWidth())
self.label_264.setSizePolicy(sizePolicy)
self.label_264.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_264.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_264.setObjectName("label_264")
self.gridLayout_289.addWidget(self.label_264, 1, 0, 1, 1)
spacerItem61 = QtWidgets.QSpacerItem(605, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_289.addItem(spacerItem61, 1, 2, 1, 1)
self.label_249 = QtWidgets.QLabel(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_249.sizePolicy().hasHeightForWidth())
self.label_249.setSizePolicy(sizePolicy)
self.label_249.setStyleSheet("background-color : #656565; color : white")
self.label_249.setFrameShape(QtWidgets.QFrame.Panel)
self.label_249.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_249.setObjectName("label_249")
self.gridLayout_289.addWidget(self.label_249, 0, 0, 1, 3)
self.gridLayout_295.addLayout(self.gridLayout_289, 0, 0, 1, 1)
self.gridLayout_291 = QtWidgets.QGridLayout()
self.gridLayout_291.setObjectName("gridLayout_291")
self.raster_align_comboBox = QtWidgets.QComboBox(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.raster_align_comboBox.sizePolicy().hasHeightForWidth())
self.raster_align_comboBox.setSizePolicy(sizePolicy)
self.raster_align_comboBox.setObjectName("raster_align_comboBox")
self.gridLayout_291.addWidget(self.raster_align_comboBox, 0, 2, 1, 1)
self.use_align_raster_checkBox = QtWidgets.QCheckBox(self.tab_reproject_bands)
self.use_align_raster_checkBox.setObjectName("use_align_raster_checkBox")
self.gridLayout_291.addWidget(self.use_align_raster_checkBox, 0, 0, 1, 1)
self.toolButton_reload_25 = QtWidgets.QToolButton(self.tab_reproject_bands)
self.toolButton_reload_25.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_25.setIcon(icon55)
self.toolButton_reload_25.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_25.setObjectName("toolButton_reload_25")
self.gridLayout_291.addWidget(self.toolButton_reload_25, 0, 3, 1, 1)
self.same_extent_raster_checkBox = QtWidgets.QCheckBox(self.tab_reproject_bands)
self.same_extent_raster_checkBox.setObjectName("same_extent_raster_checkBox")
self.gridLayout_291.addWidget(self.same_extent_raster_checkBox, 0, 1, 1, 1)
self.gridLayout_295.addLayout(self.gridLayout_291, 1, 0, 1, 1)
self.gridLayout_292 = QtWidgets.QGridLayout()
self.gridLayout_292.setObjectName("gridLayout_292")
self.epsg_code_lineEdit = QtWidgets.QLineEdit(self.tab_reproject_bands)
self.epsg_code_lineEdit.setText("")
self.epsg_code_lineEdit.setMaxLength(10)
self.epsg_code_lineEdit.setObjectName("epsg_code_lineEdit")
self.gridLayout_292.addWidget(self.epsg_code_lineEdit, 0, 1, 1, 1)
self.use_epsg_checkBox = QtWidgets.QCheckBox(self.tab_reproject_bands)
self.use_epsg_checkBox.setObjectName("use_epsg_checkBox")
self.gridLayout_292.addWidget(self.use_epsg_checkBox, 0, 0, 1, 1)
self.label_267 = QtWidgets.QLabel(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_267.sizePolicy().hasHeightForWidth())
self.label_267.setSizePolicy(sizePolicy)
self.label_267.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_267.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_267.setObjectName("label_267")
self.gridLayout_292.addWidget(self.label_267, 0, 6, 1, 1)
self.label_266 = QtWidgets.QLabel(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_266.sizePolicy().hasHeightForWidth())
self.label_266.setSizePolicy(sizePolicy)
self.label_266.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_266.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_266.setObjectName("label_266")
self.gridLayout_292.addWidget(self.label_266, 0, 3, 1, 1)
self.x_resolution_lineEdit = QtWidgets.QLineEdit(self.tab_reproject_bands)
self.x_resolution_lineEdit.setText("")
self.x_resolution_lineEdit.setMaxLength(10)
self.x_resolution_lineEdit.setObjectName("x_resolution_lineEdit")
self.gridLayout_292.addWidget(self.x_resolution_lineEdit, 0, 4, 1, 1)
self.y_resolution_lineEdit = QtWidgets.QLineEdit(self.tab_reproject_bands)
self.y_resolution_lineEdit.setText("")
self.y_resolution_lineEdit.setMaxLength(10)
self.y_resolution_lineEdit.setObjectName("y_resolution_lineEdit")
self.gridLayout_292.addWidget(self.y_resolution_lineEdit, 0, 7, 1, 1)
spacerItem62 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_292.addItem(spacerItem62, 0, 2, 1, 1)
spacerItem63 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_292.addItem(spacerItem63, 0, 5, 1, 1)
self.gridLayout_295.addLayout(self.gridLayout_292, 2, 0, 1, 1)
self.horizontalLayout_63 = QtWidgets.QHBoxLayout()
self.horizontalLayout_63.setObjectName("horizontalLayout_63")
self.resample_checkBox = QtWidgets.QCheckBox(self.tab_reproject_bands)
self.resample_checkBox.setObjectName("resample_checkBox")
self.horizontalLayout_63.addWidget(self.resample_checkBox)
self.resample_lineEdit = QtWidgets.QLineEdit(self.tab_reproject_bands)
self.resample_lineEdit.setMaxLength(10)
self.resample_lineEdit.setObjectName("resample_lineEdit")
self.horizontalLayout_63.addWidget(self.resample_lineEdit)
self.gridLayout_295.addLayout(self.horizontalLayout_63, 3, 0, 1, 1)
self.horizontalLayout_64 = QtWidgets.QHBoxLayout()
self.horizontalLayout_64.setObjectName("horizontalLayout_64")
self.label_269 = QtWidgets.QLabel(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_269.sizePolicy().hasHeightForWidth())
self.label_269.setSizePolicy(sizePolicy)
self.label_269.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_269.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_269.setObjectName("label_269")
self.horizontalLayout_64.addWidget(self.label_269)
self.resampling_method_comboBox = QtWidgets.QComboBox(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.resampling_method_comboBox.sizePolicy().hasHeightForWidth())
self.resampling_method_comboBox.setSizePolicy(sizePolicy)
self.resampling_method_comboBox.setObjectName("resampling_method_comboBox")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.resampling_method_comboBox.addItem("")
self.horizontalLayout_64.addWidget(self.resampling_method_comboBox)
spacerItem64 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_64.addItem(spacerItem64)
self.label_270 = QtWidgets.QLabel(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_270.sizePolicy().hasHeightForWidth())
self.label_270.setSizePolicy(sizePolicy)
self.label_270.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_270.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_270.setObjectName("label_270")
self.horizontalLayout_64.addWidget(self.label_270)
self.raster_type_combo_2 = QtWidgets.QComboBox(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.raster_type_combo_2.sizePolicy().hasHeightForWidth())
self.raster_type_combo_2.setSizePolicy(sizePolicy)
self.raster_type_combo_2.setObjectName("raster_type_combo_2")
self.raster_type_combo_2.addItem("")
self.raster_type_combo_2.addItem("")
self.raster_type_combo_2.addItem("")
self.raster_type_combo_2.addItem("")
self.raster_type_combo_2.addItem("")
self.raster_type_combo_2.addItem("")
self.raster_type_combo_2.addItem("")
self.horizontalLayout_64.addWidget(self.raster_type_combo_2)
self.gridLayout_295.addLayout(self.horizontalLayout_64, 4, 0, 1, 1)
self.horizontalLayout_66 = QtWidgets.QHBoxLayout()
self.horizontalLayout_66.setObjectName("horizontalLayout_66")
self.change_nodata_checkBox = QtWidgets.QCheckBox(self.tab_reproject_bands)
self.change_nodata_checkBox.setObjectName("change_nodata_checkBox")
self.horizontalLayout_66.addWidget(self.change_nodata_checkBox)
self.nodata_spinBox_14 = QtWidgets.QSpinBox(self.tab_reproject_bands)
self.nodata_spinBox_14.setMinimum(-2147483647)
self.nodata_spinBox_14.setMaximum(2147483647)
self.nodata_spinBox_14.setProperty("value", -32768)
self.nodata_spinBox_14.setObjectName("nodata_spinBox_14")
self.horizontalLayout_66.addWidget(self.nodata_spinBox_14)
spacerItem65 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_66.addItem(spacerItem65)
self.gridLayout_295.addLayout(self.horizontalLayout_66, 5, 0, 1, 1)
self.horizontalLayout_68 = QtWidgets.QHBoxLayout()
self.horizontalLayout_68.setObjectName("horizontalLayout_68")
self.label_265 = QtWidgets.QLabel(self.tab_reproject_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_265.sizePolicy().hasHeightForWidth())
self.label_265.setSizePolicy(sizePolicy)
self.label_265.setMinimumSize(QtCore.QSize(229, 0))
self.label_265.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_265.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_265.setObjectName("label_265")
self.horizontalLayout_68.addWidget(self.label_265)
self.reproj_output_name_lineEdit = QtWidgets.QLineEdit(self.tab_reproject_bands)
self.reproj_output_name_lineEdit.setMaxLength(10)
self.reproj_output_name_lineEdit.setObjectName("reproj_output_name_lineEdit")
self.horizontalLayout_68.addWidget(self.reproj_output_name_lineEdit)
self.gridLayout_295.addLayout(self.horizontalLayout_68, 6, 0, 1, 1)
spacerItem66 = QtWidgets.QSpacerItem(20, 198, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_295.addItem(spacerItem66, 7, 0, 1, 1)
self.gridLayout_290 = QtWidgets.QGridLayout()
self.gridLayout_290.setObjectName("gridLayout_290")
self.label_263 = QtWidgets.QLabel(self.tab_reproject_bands)
self.label_263.setStyleSheet("background-color : #656565; color : white")
self.label_263.setFrameShape(QtWidgets.QFrame.Panel)
self.label_263.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_263.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_263.setObjectName("label_263")
self.gridLayout_290.addWidget(self.label_263, 0, 0, 1, 2)
self.horizontalLayout_59 = QtWidgets.QHBoxLayout()
self.horizontalLayout_59.setObjectName("horizontalLayout_59")
spacerItem67 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_59.addItem(spacerItem67)
self.reproject_raster_bands = QtWidgets.QToolButton(self.tab_reproject_bands)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.reproject_raster_bands.setFont(font)
self.reproject_raster_bands.setLayoutDirection(QtCore.Qt.RightToLeft)
self.reproject_raster_bands.setStyleSheet("margin: 0px;padding: 0px;")
self.reproject_raster_bands.setIcon(icon48)
self.reproject_raster_bands.setIconSize(QtCore.QSize(34, 34))
self.reproject_raster_bands.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.reproject_raster_bands.setObjectName("reproject_raster_bands")
self.horizontalLayout_59.addWidget(self.reproject_raster_bands)
self.reproject_Button = QtWidgets.QToolButton(self.tab_reproject_bands)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.reproject_Button.setFont(font)
self.reproject_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.reproject_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.reproject_Button.setIcon(icon64)
self.reproject_Button.setIconSize(QtCore.QSize(34, 34))
self.reproject_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.reproject_Button.setObjectName("reproject_Button")
self.horizontalLayout_59.addWidget(self.reproject_Button)
self.gridLayout_290.addLayout(self.horizontalLayout_59, 2, 0, 1, 2)
self.gridLayout_295.addLayout(self.gridLayout_290, 8, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_reproject_bands, "")
self.tab_split_raster = QtWidgets.QWidget()
self.tab_split_raster.setObjectName("tab_split_raster")
self.gridLayout_57 = QtWidgets.QGridLayout(self.tab_split_raster)
self.gridLayout_57.setObjectName("gridLayout_57")
spacerItem68 = QtWidgets.QSpacerItem(20, 302, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_57.addItem(spacerItem68, 3, 1, 1, 1)
self.gridLayout_190 = QtWidgets.QGridLayout()
self.gridLayout_190.setObjectName("gridLayout_190")
self.raster_name_combo = QtWidgets.QComboBox(self.tab_split_raster)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.raster_name_combo.sizePolicy().hasHeightForWidth())
self.raster_name_combo.setSizePolicy(sizePolicy)
self.raster_name_combo.setObjectName("raster_name_combo")
self.gridLayout_190.addWidget(self.raster_name_combo, 1, 1, 1, 1)
self.label_57 = QtWidgets.QLabel(self.tab_split_raster)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_57.sizePolicy().hasHeightForWidth())
self.label_57.setSizePolicy(sizePolicy)
self.label_57.setStyleSheet("background-color : #656565; color : white")
self.label_57.setFrameShape(QtWidgets.QFrame.Panel)
self.label_57.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_57.setObjectName("label_57")
self.gridLayout_190.addWidget(self.label_57, 0, 0, 1, 3)
self.label_50 = QtWidgets.QLabel(self.tab_split_raster)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_50.sizePolicy().hasHeightForWidth())
self.label_50.setSizePolicy(sizePolicy)
self.label_50.setMinimumSize(QtCore.QSize(229, 0))
self.label_50.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_50.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_50.setObjectName("label_50")
self.gridLayout_190.addWidget(self.label_50, 1, 0, 1, 1)
self.toolButton_reload_9 = QtWidgets.QToolButton(self.tab_split_raster)
self.toolButton_reload_9.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_9.setIcon(icon55)
self.toolButton_reload_9.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_9.setObjectName("toolButton_reload_9")
self.gridLayout_190.addWidget(self.toolButton_reload_9, 1, 2, 1, 1)
self.gridLayout_57.addLayout(self.gridLayout_190, 0, 0, 1, 4)
self.label_165 = QtWidgets.QLabel(self.tab_split_raster)
self.label_165.setStyleSheet("background-color : #656565; color : white")
self.label_165.setFrameShape(QtWidgets.QFrame.Panel)
self.label_165.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_165.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_165.setObjectName("label_165")
self.gridLayout_57.addWidget(self.label_165, 4, 0, 1, 4)
self.horizontalLayout_57 = QtWidgets.QHBoxLayout()
self.horizontalLayout_57.setObjectName("horizontalLayout_57")
self.label_61 = QtWidgets.QLabel(self.tab_split_raster)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_61.sizePolicy().hasHeightForWidth())
self.label_61.setSizePolicy(sizePolicy)
self.label_61.setMinimumSize(QtCore.QSize(229, 0))
self.label_61.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_61.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_61.setObjectName("label_61")
self.horizontalLayout_57.addWidget(self.label_61)
self.output_name_lineEdit = QtWidgets.QLineEdit(self.tab_split_raster)
self.output_name_lineEdit.setMaxLength(10)
self.output_name_lineEdit.setObjectName("output_name_lineEdit")
self.horizontalLayout_57.addWidget(self.output_name_lineEdit)
self.gridLayout_57.addLayout(self.horizontalLayout_57, 1, 0, 1, 4)
self.horizontalLayout_54 = QtWidgets.QHBoxLayout()
self.horizontalLayout_54.setObjectName("horizontalLayout_54")
spacerItem69 = QtWidgets.QSpacerItem(667, 38, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_54.addItem(spacerItem69)
self.split_raster_bands = QtWidgets.QToolButton(self.tab_split_raster)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.split_raster_bands.setFont(font)
self.split_raster_bands.setLayoutDirection(QtCore.Qt.RightToLeft)
self.split_raster_bands.setStyleSheet("margin: 0px;padding: 0px;")
self.split_raster_bands.setIcon(icon48)
self.split_raster_bands.setIconSize(QtCore.QSize(34, 34))
self.split_raster_bands.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.split_raster_bands.setObjectName("split_raster_bands")
self.horizontalLayout_54.addWidget(self.split_raster_bands)
self.split_Button = QtWidgets.QToolButton(self.tab_split_raster)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.split_Button.setFont(font)
self.split_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.split_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.split_Button.setIcon(icon64)
self.split_Button.setIconSize(QtCore.QSize(34, 34))
self.split_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.split_Button.setObjectName("split_Button")
self.horizontalLayout_54.addWidget(self.split_Button)
self.gridLayout_57.addLayout(self.horizontalLayout_54, 5, 0, 1, 4)
self.tabWidget_preprocessing.addTab(self.tab_split_raster, "")
self.tab_stack_bands = QtWidgets.QWidget()
self.tab_stack_bands.setObjectName("tab_stack_bands")
self.gridLayout_23 = QtWidgets.QGridLayout(self.tab_stack_bands)
self.gridLayout_23.setObjectName("gridLayout_23")
self.label_252 = QtWidgets.QLabel(self.tab_stack_bands)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_252.sizePolicy().hasHeightForWidth())
self.label_252.setSizePolicy(sizePolicy)
self.label_252.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_252.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_252.setObjectName("label_252")
self.gridLayout_23.addWidget(self.label_252, 1, 0, 1, 1)
self.band_set_comb_spinBox_3 = QtWidgets.QSpinBox(self.tab_stack_bands)
self.band_set_comb_spinBox_3.setMinimum(1)
self.band_set_comb_spinBox_3.setMaximum(100000)
self.band_set_comb_spinBox_3.setObjectName("band_set_comb_spinBox_3")
self.gridLayout_23.addWidget(self.band_set_comb_spinBox_3, 1, 1, 1, 1)
spacerItem70 = QtWidgets.QSpacerItem(647, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_23.addItem(spacerItem70, 1, 3, 1, 1)
spacerItem71 = QtWidgets.QSpacerItem(20, 339, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_23.addItem(spacerItem71, 2, 0, 1, 1)
self.horizontalLayout_53 = QtWidgets.QHBoxLayout()
self.horizontalLayout_53.setObjectName("horizontalLayout_53")
spacerItem72 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_53.addItem(spacerItem72)
self.stack_raster_bands = QtWidgets.QToolButton(self.tab_stack_bands)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.stack_raster_bands.setFont(font)
self.stack_raster_bands.setLayoutDirection(QtCore.Qt.RightToLeft)
self.stack_raster_bands.setStyleSheet("margin: 0px;padding: 0px;")
self.stack_raster_bands.setIcon(icon48)
self.stack_raster_bands.setIconSize(QtCore.QSize(34, 34))
self.stack_raster_bands.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.stack_raster_bands.setObjectName("stack_raster_bands")
self.horizontalLayout_53.addWidget(self.stack_raster_bands)
self.stack_Button = QtWidgets.QToolButton(self.tab_stack_bands)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.stack_Button.setFont(font)
self.stack_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.stack_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.stack_Button.setIcon(icon64)
self.stack_Button.setIconSize(QtCore.QSize(34, 34))
self.stack_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.stack_Button.setObjectName("stack_Button")
self.horizontalLayout_53.addWidget(self.stack_Button)
self.gridLayout_23.addLayout(self.horizontalLayout_53, 4, 0, 1, 4)
self.label_223 = QtWidgets.QLabel(self.tab_stack_bands)
self.label_223.setStyleSheet("background-color : #656565; color : white")
self.label_223.setFrameShape(QtWidgets.QFrame.Panel)
self.label_223.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_223.setObjectName("label_223")
self.gridLayout_23.addWidget(self.label_223, 0, 0, 1, 4)
self.label_226 = QtWidgets.QLabel(self.tab_stack_bands)
self.label_226.setStyleSheet("background-color : #656565; color : white")
self.label_226.setFrameShape(QtWidgets.QFrame.Panel)
self.label_226.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_226.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_226.setObjectName("label_226")
self.gridLayout_23.addWidget(self.label_226, 3, 0, 1, 4)
self.tabWidget_preprocessing.addTab(self.tab_stack_bands, "")
self.tab_mosaic_band_sets = QtWidgets.QWidget()
self.tab_mosaic_band_sets.setObjectName("tab_mosaic_band_sets")
self.gridLayout_278 = QtWidgets.QGridLayout(self.tab_mosaic_band_sets)
self.gridLayout_278.setObjectName("gridLayout_278")
self.horizontalLayout_28 = QtWidgets.QHBoxLayout()
self.horizontalLayout_28.setObjectName("horizontalLayout_28")
self.label_134 = QtWidgets.QLabel(self.tab_mosaic_band_sets)
self.label_134.setStyleSheet("background-color : #656565; color : white")
self.label_134.setFrameShape(QtWidgets.QFrame.Panel)
self.label_134.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_134.setObjectName("label_134")
self.horizontalLayout_28.addWidget(self.label_134)
self.gridLayout_278.addLayout(self.horizontalLayout_28, 0, 0, 1, 1)
self.gridLayout_66 = QtWidgets.QGridLayout()
self.gridLayout_66.setObjectName("gridLayout_66")
self.label_135 = QtWidgets.QLabel(self.tab_mosaic_band_sets)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_135.sizePolicy().hasHeightForWidth())
self.label_135.setSizePolicy(sizePolicy)
self.label_135.setMinimumSize(QtCore.QSize(229, 0))
self.label_135.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_135.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_135.setObjectName("label_135")
self.gridLayout_66.addWidget(self.label_135, 3, 0, 1, 1)
self.nodata_checkBox_9 = QtWidgets.QCheckBox(self.tab_mosaic_band_sets)
self.nodata_checkBox_9.setChecked(True)
self.nodata_checkBox_9.setObjectName("nodata_checkBox_9")
self.gridLayout_66.addWidget(self.nodata_checkBox_9, 1, 0, 1, 1)
self.nodata_spinBox_10 = QtWidgets.QSpinBox(self.tab_mosaic_band_sets)
self.nodata_spinBox_10.setMinimum(-2147483647)
self.nodata_spinBox_10.setMaximum(2147483647)
self.nodata_spinBox_10.setObjectName("nodata_spinBox_10")
self.gridLayout_66.addWidget(self.nodata_spinBox_10, 1, 1, 1, 1)
spacerItem73 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_66.addItem(spacerItem73, 1, 2, 1, 1)
self.mosaic_output_name_lineEdit = QtWidgets.QLineEdit(self.tab_mosaic_band_sets)
self.mosaic_output_name_lineEdit.setMaxLength(10)
self.mosaic_output_name_lineEdit.setObjectName("mosaic_output_name_lineEdit")
self.gridLayout_66.addWidget(self.mosaic_output_name_lineEdit, 3, 1, 1, 2)
self.label_144 = QtWidgets.QLabel(self.tab_mosaic_band_sets)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_144.sizePolicy().hasHeightForWidth())
self.label_144.setSizePolicy(sizePolicy)
self.label_144.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_144.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_144.setObjectName("label_144")
self.gridLayout_66.addWidget(self.label_144, 0, 0, 1, 1)
self.mosaic_band_sets_lineEdit = QtWidgets.QLineEdit(self.tab_mosaic_band_sets)
self.mosaic_band_sets_lineEdit.setObjectName("mosaic_band_sets_lineEdit")
self.gridLayout_66.addWidget(self.mosaic_band_sets_lineEdit, 0, 1, 1, 2)
self.mosaic_virtual_checkBox = QtWidgets.QCheckBox(self.tab_mosaic_band_sets)
self.mosaic_virtual_checkBox.setObjectName("mosaic_virtual_checkBox")
self.gridLayout_66.addWidget(self.mosaic_virtual_checkBox, 2, 0, 1, 1)
self.gridLayout_278.addLayout(self.gridLayout_66, 1, 0, 1, 1)
spacerItem74 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_278.addItem(spacerItem74, 2, 0, 1, 1)
self.gridLayout_228 = QtWidgets.QGridLayout()
self.gridLayout_228.setObjectName("gridLayout_228")
spacerItem75 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_228.addItem(spacerItem75, 1, 0, 1, 1)
self.mosaic_bandsets_toolButton = QtWidgets.QToolButton(self.tab_mosaic_band_sets)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.mosaic_bandsets_toolButton.setFont(font)
self.mosaic_bandsets_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.mosaic_bandsets_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.mosaic_bandsets_toolButton.setIcon(icon64)
self.mosaic_bandsets_toolButton.setIconSize(QtCore.QSize(34, 34))
self.mosaic_bandsets_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.mosaic_bandsets_toolButton.setObjectName("mosaic_bandsets_toolButton")
self.gridLayout_228.addWidget(self.mosaic_bandsets_toolButton, 1, 2, 1, 1)
self.mosaic_bandsets = QtWidgets.QToolButton(self.tab_mosaic_band_sets)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.mosaic_bandsets.setFont(font)
self.mosaic_bandsets.setLayoutDirection(QtCore.Qt.RightToLeft)
self.mosaic_bandsets.setStyleSheet("margin: 0px;padding: 0px;")
self.mosaic_bandsets.setIcon(icon48)
self.mosaic_bandsets.setIconSize(QtCore.QSize(34, 34))
self.mosaic_bandsets.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.mosaic_bandsets.setObjectName("mosaic_bandsets")
self.gridLayout_228.addWidget(self.mosaic_bandsets, 1, 1, 1, 1)
self.label_182 = QtWidgets.QLabel(self.tab_mosaic_band_sets)
self.label_182.setStyleSheet("background-color : #656565; color : white")
self.label_182.setFrameShape(QtWidgets.QFrame.Panel)
self.label_182.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_182.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_182.setObjectName("label_182")
self.gridLayout_228.addWidget(self.label_182, 0, 0, 1, 3)
self.gridLayout_278.addLayout(self.gridLayout_228, 3, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_mosaic_band_sets, "")
self.tab_cloud_mask = QtWidgets.QWidget()
self.tab_cloud_mask.setObjectName("tab_cloud_mask")
self.gridLayout_296 = QtWidgets.QGridLayout(self.tab_cloud_mask)
self.gridLayout_296.setObjectName("gridLayout_296")
self.gridLayout_261 = QtWidgets.QGridLayout()
self.gridLayout_261.setObjectName("gridLayout_261")
self.label_260 = QtWidgets.QLabel(self.tab_cloud_mask)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_260.sizePolicy().hasHeightForWidth())
self.label_260.setSizePolicy(sizePolicy)
self.label_260.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_260.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_260.setObjectName("label_260")
self.gridLayout_261.addWidget(self.label_260, 0, 0, 1, 1)
spacerItem76 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_261.addItem(spacerItem76, 0, 2, 1, 1)
self.band_set_comb_spinBox_9 = QtWidgets.QSpinBox(self.tab_cloud_mask)
self.band_set_comb_spinBox_9.setMinimum(1)
self.band_set_comb_spinBox_9.setMaximum(100000)
self.band_set_comb_spinBox_9.setObjectName("band_set_comb_spinBox_9")
self.gridLayout_261.addWidget(self.band_set_comb_spinBox_9, 0, 1, 1, 1)
self.gridLayout_296.addLayout(self.gridLayout_261, 1, 0, 1, 2)
self.gridLayout_106 = QtWidgets.QGridLayout()
self.gridLayout_106.setObjectName("gridLayout_106")
self.classification_name_combo_4 = QtWidgets.QComboBox(self.tab_cloud_mask)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_name_combo_4.sizePolicy().hasHeightForWidth())
self.classification_name_combo_4.setSizePolicy(sizePolicy)
self.classification_name_combo_4.setObjectName("classification_name_combo_4")
self.gridLayout_106.addWidget(self.classification_name_combo_4, 0, 1, 1, 1)
self.cloud_mask_classes_lineEdit = QtWidgets.QLineEdit(self.tab_cloud_mask)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cloud_mask_classes_lineEdit.sizePolicy().hasHeightForWidth())
self.cloud_mask_classes_lineEdit.setSizePolicy(sizePolicy)
self.cloud_mask_classes_lineEdit.setMinimumSize(QtCore.QSize(400, 26))
self.cloud_mask_classes_lineEdit.setText("")
self.cloud_mask_classes_lineEdit.setMaxLength(10000)
self.cloud_mask_classes_lineEdit.setObjectName("cloud_mask_classes_lineEdit")
self.gridLayout_106.addWidget(self.cloud_mask_classes_lineEdit, 1, 1, 1, 1)
self.label_203 = QtWidgets.QLabel(self.tab_cloud_mask)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_203.sizePolicy().hasHeightForWidth())
self.label_203.setSizePolicy(sizePolicy)
self.label_203.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_203.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_203.setObjectName("label_203")
self.gridLayout_106.addWidget(self.label_203, 1, 0, 1, 1)
self.label_186 = QtWidgets.QLabel(self.tab_cloud_mask)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_186.sizePolicy().hasHeightForWidth())
self.label_186.setSizePolicy(sizePolicy)
self.label_186.setMinimumSize(QtCore.QSize(229, 0))
self.label_186.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_186.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_186.setObjectName("label_186")
self.gridLayout_106.addWidget(self.label_186, 0, 0, 1, 1)
self.toolButton_reload_23 = QtWidgets.QToolButton(self.tab_cloud_mask)
self.toolButton_reload_23.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_23.setIcon(icon55)
self.toolButton_reload_23.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_23.setObjectName("toolButton_reload_23")
self.gridLayout_106.addWidget(self.toolButton_reload_23, 0, 2, 1, 1)
self.gridLayout_296.addLayout(self.gridLayout_106, 2, 0, 1, 2)
self.gridLayout_143 = QtWidgets.QGridLayout()
self.gridLayout_143.setObjectName("gridLayout_143")
self.label_140 = QtWidgets.QLabel(self.tab_cloud_mask)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_140.sizePolicy().hasHeightForWidth())
self.label_140.setSizePolicy(sizePolicy)
self.label_140.setMinimumSize(QtCore.QSize(229, 0))
self.label_140.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_140.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_140.setObjectName("label_140")
self.gridLayout_143.addWidget(self.label_140, 1, 0, 1, 1)
self.mask_output_name_lineEdit = QtWidgets.QLineEdit(self.tab_cloud_mask)
self.mask_output_name_lineEdit.setMaxLength(10)
self.mask_output_name_lineEdit.setObjectName("mask_output_name_lineEdit")
self.gridLayout_143.addWidget(self.mask_output_name_lineEdit, 1, 1, 1, 1)
self.gridLayout_145 = QtWidgets.QGridLayout()
self.gridLayout_145.setObjectName("gridLayout_145")
self.cloud_buffer_spinBox = QtWidgets.QSpinBox(self.tab_cloud_mask)
self.cloud_buffer_spinBox.setMinimum(1)
self.cloud_buffer_spinBox.setMaximum(1000)
self.cloud_buffer_spinBox.setProperty("value", 1)
self.cloud_buffer_spinBox.setObjectName("cloud_buffer_spinBox")
self.gridLayout_145.addWidget(self.cloud_buffer_spinBox, 0, 1, 1, 1)
spacerItem77 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_145.addItem(spacerItem77, 0, 2, 1, 1)
self.cloud_buffer_checkBox = QtWidgets.QCheckBox(self.tab_cloud_mask)
self.cloud_buffer_checkBox.setEnabled(True)
self.cloud_buffer_checkBox.setObjectName("cloud_buffer_checkBox")
self.gridLayout_145.addWidget(self.cloud_buffer_checkBox, 0, 0, 1, 1)
self.nodata_spinBox_11 = QtWidgets.QSpinBox(self.tab_cloud_mask)
self.nodata_spinBox_11.setMinimum(-2147483647)
self.nodata_spinBox_11.setMaximum(2147483647)
self.nodata_spinBox_11.setProperty("value", -32768)
self.nodata_spinBox_11.setObjectName("nodata_spinBox_11")
self.gridLayout_145.addWidget(self.nodata_spinBox_11, 1, 1, 1, 1)
self.label_141 = QtWidgets.QLabel(self.tab_cloud_mask)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_141.sizePolicy().hasHeightForWidth())
self.label_141.setSizePolicy(sizePolicy)
self.label_141.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_141.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_141.setObjectName("label_141")
self.gridLayout_145.addWidget(self.label_141, 1, 0, 1, 1)
self.gridLayout_143.addLayout(self.gridLayout_145, 0, 0, 1, 2)
self.gridLayout_296.addLayout(self.gridLayout_143, 3, 0, 1, 2)
spacerItem78 = QtWidgets.QSpacerItem(20, 173, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_296.addItem(spacerItem78, 4, 1, 1, 1)
self.gridLayout_257 = QtWidgets.QGridLayout()
self.gridLayout_257.setObjectName("gridLayout_257")
self.cloud_mask_toolButton = QtWidgets.QToolButton(self.tab_cloud_mask)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cloud_mask_toolButton.setFont(font)
self.cloud_mask_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.cloud_mask_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.cloud_mask_toolButton.setIcon(icon64)
self.cloud_mask_toolButton.setIconSize(QtCore.QSize(34, 34))
self.cloud_mask_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.cloud_mask_toolButton.setObjectName("cloud_mask_toolButton")
self.gridLayout_257.addWidget(self.cloud_mask_toolButton, 1, 2, 1, 1)
self.cloud_masking = QtWidgets.QToolButton(self.tab_cloud_mask)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cloud_masking.setFont(font)
self.cloud_masking.setLayoutDirection(QtCore.Qt.RightToLeft)
self.cloud_masking.setStyleSheet("margin: 0px;padding: 0px;")
self.cloud_masking.setIcon(icon48)
self.cloud_masking.setIconSize(QtCore.QSize(34, 34))
self.cloud_masking.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.cloud_masking.setObjectName("cloud_masking")
self.gridLayout_257.addWidget(self.cloud_masking, 1, 1, 1, 1)
spacerItem79 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_257.addItem(spacerItem79, 1, 0, 1, 1)
self.label_185 = QtWidgets.QLabel(self.tab_cloud_mask)
self.label_185.setStyleSheet("background-color : #656565; color : white")
self.label_185.setFrameShape(QtWidgets.QFrame.Panel)
self.label_185.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_185.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_185.setObjectName("label_185")
self.gridLayout_257.addWidget(self.label_185, 0, 0, 1, 3)
self.gridLayout_296.addLayout(self.gridLayout_257, 5, 0, 1, 2)
self.horizontalLayout_33 = QtWidgets.QHBoxLayout()
self.horizontalLayout_33.setObjectName("horizontalLayout_33")
self.label_138 = QtWidgets.QLabel(self.tab_cloud_mask)
self.label_138.setStyleSheet("background-color : #656565; color : white")
self.label_138.setFrameShape(QtWidgets.QFrame.Panel)
self.label_138.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_138.setObjectName("label_138")
self.horizontalLayout_33.addWidget(self.label_138)
self.gridLayout_296.addLayout(self.horizontalLayout_33, 0, 0, 1, 2)
self.tabWidget_preprocessing.addTab(self.tab_cloud_mask, "")
self.tab_GOES = QtWidgets.QWidget()
self.tab_GOES.setObjectName("tab_GOES")
self.gridLayout_308 = QtWidgets.QGridLayout(self.tab_GOES)
self.gridLayout_308.setObjectName("gridLayout_308")
self.gridLayout_297 = QtWidgets.QGridLayout()
self.gridLayout_297.setObjectName("gridLayout_297")
self.gridLayout_298 = QtWidgets.QGridLayout()
self.gridLayout_298.setObjectName("gridLayout_298")
self.GOES_nodata_spinBox = QtWidgets.QSpinBox(self.tab_GOES)
self.GOES_nodata_spinBox.setMinimum(-999)
self.GOES_nodata_spinBox.setMaximum(100000)
self.GOES_nodata_spinBox.setObjectName("GOES_nodata_spinBox")
self.gridLayout_298.addWidget(self.GOES_nodata_spinBox, 0, 2, 1, 1)
self.GOES_nodata_checkBox = QtWidgets.QCheckBox(self.tab_GOES)
self.GOES_nodata_checkBox.setChecked(True)
self.GOES_nodata_checkBox.setObjectName("GOES_nodata_checkBox")
self.gridLayout_298.addWidget(self.GOES_nodata_checkBox, 0, 1, 1, 1)
spacerItem80 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_298.addItem(spacerItem80, 0, 0, 1, 1)
self.gridLayout_297.addLayout(self.gridLayout_298, 3, 1, 1, 2)
self.label_273 = QtWidgets.QLabel(self.tab_GOES)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_273.sizePolicy().hasHeightForWidth())
self.label_273.setSizePolicy(sizePolicy)
self.label_273.setStyleSheet("background-color : #656565; color : white")
self.label_273.setFrameShape(QtWidgets.QFrame.Panel)
self.label_273.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_273.setObjectName("label_273")
self.gridLayout_297.addWidget(self.label_273, 0, 0, 1, 3)
self.GOES_create_bandset_checkBox = QtWidgets.QCheckBox(self.tab_GOES)
self.GOES_create_bandset_checkBox.setChecked(True)
self.GOES_create_bandset_checkBox.setTristate(False)
self.GOES_create_bandset_checkBox.setObjectName("GOES_create_bandset_checkBox")
self.gridLayout_297.addWidget(self.GOES_create_bandset_checkBox, 4, 0, 1, 1)
self.label_274 = QtWidgets.QLabel(self.tab_GOES)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_274.sizePolicy().hasHeightForWidth())
self.label_274.setSizePolicy(sizePolicy)
self.label_274.setMinimumSize(QtCore.QSize(229, 0))
self.label_274.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_274.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_274.setObjectName("label_274")
self.gridLayout_297.addWidget(self.label_274, 2, 0, 1, 1)
self.gridLayout_299 = QtWidgets.QGridLayout()
self.gridLayout_299.setObjectName("gridLayout_299")
self.GOES_toolButton_directoryInput = QtWidgets.QToolButton(self.tab_GOES)
self.GOES_toolButton_directoryInput.setStyleSheet("margin: 0px;padding: 0px;")
self.GOES_toolButton_directoryInput.setIcon(icon69)
self.GOES_toolButton_directoryInput.setIconSize(QtCore.QSize(22, 22))
self.GOES_toolButton_directoryInput.setObjectName("GOES_toolButton_directoryInput")
self.gridLayout_299.addWidget(self.GOES_toolButton_directoryInput, 0, 1, 1, 1)
self.GOES_label = QtWidgets.QLabel(self.tab_GOES)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.GOES_label.sizePolicy().hasHeightForWidth())
self.GOES_label.setSizePolicy(sizePolicy)
self.GOES_label.setFrameShape(QtWidgets.QFrame.Panel)
self.GOES_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.GOES_label.setText("")
self.GOES_label.setObjectName("GOES_label")
self.gridLayout_299.addWidget(self.GOES_label, 0, 0, 1, 1)
self.gridLayout_297.addLayout(self.gridLayout_299, 2, 1, 1, 2)
self.add_new_bandset_checkBox_7 = QtWidgets.QCheckBox(self.tab_GOES)
self.add_new_bandset_checkBox_7.setChecked(True)
self.add_new_bandset_checkBox_7.setTristate(False)
self.add_new_bandset_checkBox_7.setObjectName("add_new_bandset_checkBox_7")
self.gridLayout_297.addWidget(self.add_new_bandset_checkBox_7, 4, 1, 1, 2)
self.gridLayout_308.addLayout(self.gridLayout_297, 0, 0, 1, 1)
self.gridLayout_304 = QtWidgets.QGridLayout()
self.gridLayout_304.setObjectName("gridLayout_304")
self.GOES_tableWidget = QtWidgets.QTableWidget(self.tab_GOES)
self.GOES_tableWidget.setTextElideMode(QtCore.Qt.ElideMiddle)
self.GOES_tableWidget.setObjectName("GOES_tableWidget")
self.GOES_tableWidget.setColumnCount(1)
self.GOES_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.GOES_tableWidget.setHorizontalHeaderItem(0, item)
self.GOES_tableWidget.horizontalHeader().setDefaultSectionSize(155)
self.GOES_tableWidget.horizontalHeader().setStretchLastSection(True)
self.gridLayout_304.addWidget(self.GOES_tableWidget, 1, 0, 1, 1)
self.gridLayout_305 = QtWidgets.QGridLayout()
self.gridLayout_305.setObjectName("gridLayout_305")
self.GOES_pushButton_remove_band = QtWidgets.QToolButton(self.tab_GOES)
self.GOES_pushButton_remove_band.setStyleSheet("margin: 0px;padding: 0px;")
self.GOES_pushButton_remove_band.setIcon(icon58)
self.GOES_pushButton_remove_band.setIconSize(QtCore.QSize(22, 22))
self.GOES_pushButton_remove_band.setObjectName("GOES_pushButton_remove_band")
self.gridLayout_305.addWidget(self.GOES_pushButton_remove_band, 0, 0, 1, 1)
self.gridLayout_304.addLayout(self.gridLayout_305, 1, 1, 1, 1)
self.gridLayout_306 = QtWidgets.QGridLayout()
self.gridLayout_306.setObjectName("gridLayout_306")
self.label_277 = QtWidgets.QLabel(self.tab_GOES)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_277.sizePolicy().hasHeightForWidth())
self.label_277.setSizePolicy(sizePolicy)
self.label_277.setStyleSheet("background-color : #656565; color : white")
self.label_277.setFrameShape(QtWidgets.QFrame.Panel)
self.label_277.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_277.setObjectName("label_277")
self.gridLayout_306.addWidget(self.label_277, 0, 0, 1, 4)
self.satellite_label_20 = QtWidgets.QLabel(self.tab_GOES)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.satellite_label_20.sizePolicy().hasHeightForWidth())
self.satellite_label_20.setSizePolicy(sizePolicy)
self.satellite_label_20.setFrameShadow(QtWidgets.QFrame.Sunken)
self.satellite_label_20.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.satellite_label_20.setObjectName("satellite_label_20")
self.gridLayout_306.addWidget(self.satellite_label_20, 1, 0, 1, 1)
self.GOES_satellite_lineEdit = QtWidgets.QLineEdit(self.tab_GOES)
self.GOES_satellite_lineEdit.setObjectName("GOES_satellite_lineEdit")
self.gridLayout_306.addWidget(self.GOES_satellite_lineEdit, 1, 1, 1, 1)
spacerItem81 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_306.addItem(spacerItem81, 1, 2, 1, 1)
self.gridLayout_304.addLayout(self.gridLayout_306, 0, 0, 1, 2)
self.gridLayout_307 = QtWidgets.QGridLayout()
self.gridLayout_307.setObjectName("gridLayout_307")
spacerItem82 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_307.addItem(spacerItem82, 1, 1, 1, 1)
self.label_278 = QtWidgets.QLabel(self.tab_GOES)
self.label_278.setStyleSheet("background-color : #656565; color : white")
self.label_278.setFrameShape(QtWidgets.QFrame.Panel)
self.label_278.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_278.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_278.setObjectName("label_278")
self.gridLayout_307.addWidget(self.label_278, 0, 1, 1, 3)
self.pushButton_Conversion_8 = QtWidgets.QToolButton(self.tab_GOES)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pushButton_Conversion_8.setFont(font)
self.pushButton_Conversion_8.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pushButton_Conversion_8.setStyleSheet("margin: 0px;padding: 0px;")
self.pushButton_Conversion_8.setIcon(icon64)
self.pushButton_Conversion_8.setIconSize(QtCore.QSize(34, 34))
self.pushButton_Conversion_8.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pushButton_Conversion_8.setObjectName("pushButton_Conversion_8")
self.gridLayout_307.addWidget(self.pushButton_Conversion_8, 1, 3, 1, 1)
self.goes_conversion = QtWidgets.QToolButton(self.tab_GOES)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.goes_conversion.setFont(font)
self.goes_conversion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.goes_conversion.setStyleSheet("margin: 0px;padding: 0px;")
self.goes_conversion.setIcon(icon48)
self.goes_conversion.setIconSize(QtCore.QSize(34, 34))
self.goes_conversion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.goes_conversion.setObjectName("goes_conversion")
self.gridLayout_307.addWidget(self.goes_conversion, 1, 2, 1, 1)
self.gridLayout_304.addLayout(self.gridLayout_307, 2, 0, 1, 2)
self.gridLayout_308.addLayout(self.gridLayout_304, 1, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_GOES, "")
self.tab_neighbor_pixels = QtWidgets.QWidget()
self.tab_neighbor_pixels.setObjectName("tab_neighbor_pixels")
self.gridLayout_310 = QtWidgets.QGridLayout(self.tab_neighbor_pixels)
self.gridLayout_310.setObjectName("gridLayout_310")
self.gridLayout_240 = QtWidgets.QGridLayout()
self.gridLayout_240.setObjectName("gridLayout_240")
self.label_283 = QtWidgets.QLabel(self.tab_neighbor_pixels)
self.label_283.setStyleSheet("background-color : #656565; color : white")
self.label_283.setFrameShape(QtWidgets.QFrame.Panel)
self.label_283.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_283.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_283.setObjectName("label_283")
self.gridLayout_240.addWidget(self.label_283, 8, 0, 1, 3)
self.gridLayout_309 = QtWidgets.QGridLayout()
self.gridLayout_309.setObjectName("gridLayout_309")
self.label_287 = QtWidgets.QLabel(self.tab_neighbor_pixels)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_287.sizePolicy().hasHeightForWidth())
self.label_287.setSizePolicy(sizePolicy)
self.label_287.setFrameShape(QtWidgets.QFrame.Panel)
self.label_287.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_287.setText("")
self.label_287.setObjectName("label_287")
self.gridLayout_309.addWidget(self.label_287, 0, 1, 1, 1)
self.label_281 = QtWidgets.QLabel(self.tab_neighbor_pixels)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_281.sizePolicy().hasHeightForWidth())
self.label_281.setSizePolicy(sizePolicy)
self.label_281.setMinimumSize(QtCore.QSize(229, 0))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_281.setFont(font)
self.label_281.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_281.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_281.setObjectName("label_281")
self.gridLayout_309.addWidget(self.label_281, 0, 0, 1, 1)
self.toolButton_input_matrix = QtWidgets.QToolButton(self.tab_neighbor_pixels)
self.toolButton_input_matrix.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_input_matrix.setIcon(icon65)
self.toolButton_input_matrix.setIconSize(QtCore.QSize(22, 22))
self.toolButton_input_matrix.setObjectName("toolButton_input_matrix")
self.gridLayout_309.addWidget(self.toolButton_input_matrix, 0, 2, 1, 1)
self.horizontalLayout_71 = QtWidgets.QHBoxLayout()
self.horizontalLayout_71.setObjectName("horizontalLayout_71")
self.label_279 = QtWidgets.QLabel(self.tab_neighbor_pixels)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_279.sizePolicy().hasHeightForWidth())
self.label_279.setSizePolicy(sizePolicy)
self.label_279.setMinimumSize(QtCore.QSize(229, 0))
self.label_279.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_279.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_279.setObjectName("label_279")
self.horizontalLayout_71.addWidget(self.label_279)
self.neighbor_output_name_lineEdit = QtWidgets.QLineEdit(self.tab_neighbor_pixels)
self.neighbor_output_name_lineEdit.setMaxLength(10)
self.neighbor_output_name_lineEdit.setObjectName("neighbor_output_name_lineEdit")
self.horizontalLayout_71.addWidget(self.neighbor_output_name_lineEdit)
self.gridLayout_309.addLayout(self.horizontalLayout_71, 2, 0, 1, 3)
self.neighbor_virtual_checkBox = QtWidgets.QCheckBox(self.tab_neighbor_pixels)
self.neighbor_virtual_checkBox.setObjectName("neighbor_virtual_checkBox")
self.gridLayout_309.addWidget(self.neighbor_virtual_checkBox, 1, 0, 1, 1)
self.gridLayout_240.addLayout(self.gridLayout_309, 2, 0, 1, 3)
self.neighbor_pixels = QtWidgets.QToolButton(self.tab_neighbor_pixels)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.neighbor_pixels.setFont(font)
self.neighbor_pixels.setLayoutDirection(QtCore.Qt.RightToLeft)
self.neighbor_pixels.setStyleSheet("margin: 0px;padding: 0px;")
self.neighbor_pixels.setIcon(icon48)
self.neighbor_pixels.setIconSize(QtCore.QSize(34, 34))
self.neighbor_pixels.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.neighbor_pixels.setObjectName("neighbor_pixels")
self.gridLayout_240.addWidget(self.neighbor_pixels, 9, 1, 1, 1)
spacerItem83 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_240.addItem(spacerItem83, 9, 0, 1, 1)
self.class_neighbor_toolButton = QtWidgets.QToolButton(self.tab_neighbor_pixels)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.class_neighbor_toolButton.setFont(font)
self.class_neighbor_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.class_neighbor_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.class_neighbor_toolButton.setIcon(icon64)
self.class_neighbor_toolButton.setIconSize(QtCore.QSize(34, 34))
self.class_neighbor_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.class_neighbor_toolButton.setObjectName("class_neighbor_toolButton")
self.gridLayout_240.addWidget(self.class_neighbor_toolButton, 9, 2, 1, 1)
spacerItem84 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_240.addItem(spacerItem84, 7, 2, 1, 1)
self.horizontalLayout_70 = QtWidgets.QHBoxLayout()
self.horizontalLayout_70.setObjectName("horizontalLayout_70")
self.label_286 = QtWidgets.QLabel(self.tab_neighbor_pixels)
self.label_286.setStyleSheet("background-color : #656565; color : white")
self.label_286.setFrameShape(QtWidgets.QFrame.Panel)
self.label_286.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_286.setObjectName("label_286")
self.horizontalLayout_70.addWidget(self.label_286)
self.gridLayout_240.addLayout(self.horizontalLayout_70, 0, 0, 1, 3)
self.gridLayout_277 = QtWidgets.QGridLayout()
self.gridLayout_277.setObjectName("gridLayout_277")
self.statistic_lineEdit_2 = QtWidgets.QLineEdit(self.tab_neighbor_pixels)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.statistic_lineEdit_2.sizePolicy().hasHeightForWidth())
self.statistic_lineEdit_2.setSizePolicy(sizePolicy)
self.statistic_lineEdit_2.setMaximumSize(QtCore.QSize(200, 16777215))
self.statistic_lineEdit_2.setText("")
self.statistic_lineEdit_2.setMaxLength(10000)
self.statistic_lineEdit_2.setObjectName("statistic_lineEdit_2")
self.gridLayout_277.addWidget(self.statistic_lineEdit_2, 1, 2, 1, 1)
self.statistic_name_combobox_2 = QtWidgets.QComboBox(self.tab_neighbor_pixels)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.statistic_name_combobox_2.sizePolicy().hasHeightForWidth())
self.statistic_name_combobox_2.setSizePolicy(sizePolicy)
self.statistic_name_combobox_2.setMaximumSize(QtCore.QSize(200, 16777215))
self.statistic_name_combobox_2.setObjectName("statistic_name_combobox_2")
self.gridLayout_277.addWidget(self.statistic_name_combobox_2, 1, 1, 1, 1)
self.label_284 = QtWidgets.QLabel(self.tab_neighbor_pixels)
self.label_284.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_284.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_284.setObjectName("label_284")
self.gridLayout_277.addWidget(self.label_284, 1, 0, 1, 1)
spacerItem85 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_277.addItem(spacerItem85, 1, 3, 1, 1)
self.label_285 = QtWidgets.QLabel(self.tab_neighbor_pixels)
self.label_285.setStyleSheet("background-color : #656565; color : white")
self.label_285.setFrameShape(QtWidgets.QFrame.Panel)
self.label_285.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_285.setObjectName("label_285")
self.gridLayout_277.addWidget(self.label_285, 0, 0, 1, 4)
self.gridLayout_240.addLayout(self.gridLayout_277, 6, 0, 1, 3)
self.gridLayout_300 = QtWidgets.QGridLayout()
self.gridLayout_300.setObjectName("gridLayout_300")
self.label_282 = QtWidgets.QLabel(self.tab_neighbor_pixels)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_282.sizePolicy().hasHeightForWidth())
self.label_282.setSizePolicy(sizePolicy)
self.label_282.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_282.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_282.setObjectName("label_282")
self.gridLayout_300.addWidget(self.label_282, 0, 0, 1, 1)
self.label_280 = QtWidgets.QLabel(self.tab_neighbor_pixels)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_280.sizePolicy().hasHeightForWidth())
self.label_280.setSizePolicy(sizePolicy)
self.label_280.setMinimumSize(QtCore.QSize(229, 0))
self.label_280.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_280.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_280.setObjectName("label_280")
self.gridLayout_300.addWidget(self.label_280, 1, 0, 1, 1)
self.band_set_comb_spinBox_15 = QtWidgets.QSpinBox(self.tab_neighbor_pixels)
self.band_set_comb_spinBox_15.setMinimum(1)
self.band_set_comb_spinBox_15.setMaximum(100000)
self.band_set_comb_spinBox_15.setObjectName("band_set_comb_spinBox_15")
self.gridLayout_300.addWidget(self.band_set_comb_spinBox_15, 0, 1, 1, 1)
self.class_neighbor_threshold_spinBox = QtWidgets.QSpinBox(self.tab_neighbor_pixels)
self.class_neighbor_threshold_spinBox.setMinimum(1)
self.class_neighbor_threshold_spinBox.setMaximum(1000)
self.class_neighbor_threshold_spinBox.setProperty("value", 1)
self.class_neighbor_threshold_spinBox.setObjectName("class_neighbor_threshold_spinBox")
self.gridLayout_300.addWidget(self.class_neighbor_threshold_spinBox, 1, 1, 1, 1)
spacerItem86 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_300.addItem(spacerItem86, 1, 3, 1, 1)
self.circular_structure_checkBox = QtWidgets.QCheckBox(self.tab_neighbor_pixels)
self.circular_structure_checkBox.setObjectName("circular_structure_checkBox")
self.gridLayout_300.addWidget(self.circular_structure_checkBox, 1, 2, 1, 1)
self.gridLayout_240.addLayout(self.gridLayout_300, 1, 0, 1, 3)
self.horizontalLayout_72 = QtWidgets.QHBoxLayout()
self.horizontalLayout_72.setObjectName("horizontalLayout_72")
spacerItem87 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_72.addItem(spacerItem87)
self.gridLayout_240.addLayout(self.horizontalLayout_72, 3, 0, 1, 3)
self.gridLayout_310.addLayout(self.gridLayout_240, 0, 0, 1, 1)
self.tabWidget_preprocessing.addTab(self.tab_neighbor_pixels, "")
self.gridLayout_6.addWidget(self.tabWidget_preprocessing, 0, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_preprocessing, "")
self.tab_band_processing = QtWidgets.QWidget()
self.tab_band_processing.setObjectName("tab_band_processing")
self.gridLayout_163 = QtWidgets.QGridLayout(self.tab_band_processing)
self.gridLayout_163.setObjectName("gridLayout_163")
self.tabWidget_4 = QtWidgets.QTabWidget(self.tab_band_processing)
self.tabWidget_4.setStyleSheet("")
self.tabWidget_4.setIconSize(QtCore.QSize(20, 20))
self.tabWidget_4.setDocumentMode(True)
self.tabWidget_4.setObjectName("tabWidget_4")
self.tab_bandset_combination_2 = QtWidgets.QWidget()
self.tab_bandset_combination_2.setObjectName("tab_bandset_combination_2")
self.gridLayout_62 = QtWidgets.QGridLayout(self.tab_bandset_combination_2)
self.gridLayout_62.setObjectName("gridLayout_62")
self.toolBox_band_set_combination = QtWidgets.QToolBox(self.tab_bandset_combination_2)
self.toolBox_band_set_combination.setObjectName("toolBox_band_set_combination")
self.page_29 = QtWidgets.QWidget()
self.page_29.setGeometry(QtCore.QRect(0, 0, 723, 351))
self.page_29.setObjectName("page_29")
self.gridLayout_330 = QtWidgets.QGridLayout(self.page_29)
self.gridLayout_330.setObjectName("gridLayout_330")
self.gridLayout_333 = QtWidgets.QGridLayout()
self.gridLayout_333.setObjectName("gridLayout_333")
spacerItem88 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_333.addItem(spacerItem88, 0, 2, 1, 1)
spacerItem89 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_333.addItem(spacerItem89, 2, 0, 1, 1)
self.label_253 = QtWidgets.QLabel(self.page_29)
self.label_253.setStyleSheet("background-color : #656565; color : white")
self.label_253.setFrameShape(QtWidgets.QFrame.Panel)
self.label_253.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_253.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_253.setObjectName("label_253")
self.gridLayout_333.addWidget(self.label_253, 1, 0, 1, 3)
self.calculateBandSetComb_toolButton = QtWidgets.QToolButton(self.page_29)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.calculateBandSetComb_toolButton.setFont(font)
self.calculateBandSetComb_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.calculateBandSetComb_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.calculateBandSetComb_toolButton.setIcon(icon64)
self.calculateBandSetComb_toolButton.setIconSize(QtCore.QSize(34, 34))
self.calculateBandSetComb_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.calculateBandSetComb_toolButton.setObjectName("calculateBandSetComb_toolButton")
self.gridLayout_333.addWidget(self.calculateBandSetComb_toolButton, 2, 2, 1, 1)
self.band_combination = QtWidgets.QToolButton(self.page_29)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.band_combination.setFont(font)
self.band_combination.setLayoutDirection(QtCore.Qt.RightToLeft)
self.band_combination.setStyleSheet("margin: 0px;padding: 0px;")
self.band_combination.setIcon(icon48)
self.band_combination.setIconSize(QtCore.QSize(34, 34))
self.band_combination.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.band_combination.setObjectName("band_combination")
self.gridLayout_333.addWidget(self.band_combination, 2, 1, 1, 1)
self.gridLayout_330.addLayout(self.gridLayout_333, 2, 1, 1, 1)
self.gridLayout_331 = QtWidgets.QGridLayout()
self.gridLayout_331.setObjectName("gridLayout_331")
self.label_250 = QtWidgets.QLabel(self.page_29)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_250.sizePolicy().hasHeightForWidth())
self.label_250.setSizePolicy(sizePolicy)
self.label_250.setMinimumSize(QtCore.QSize(229, 0))
self.label_250.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_250.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_250.setWordWrap(True)
self.label_250.setObjectName("label_250")
self.gridLayout_331.addWidget(self.label_250, 1, 0, 1, 2)
self.band_set_comb_spinBox = QtWidgets.QSpinBox(self.page_29)
self.band_set_comb_spinBox.setMinimum(1)
self.band_set_comb_spinBox.setMaximum(100000)
self.band_set_comb_spinBox.setObjectName("band_set_comb_spinBox")
self.gridLayout_331.addWidget(self.band_set_comb_spinBox, 1, 2, 1, 1)
spacerItem90 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_331.addItem(spacerItem90, 1, 3, 1, 1)
self.gridLayout_330.addLayout(self.gridLayout_331, 1, 1, 1, 1)
self.horizontalLayout_34 = QtWidgets.QHBoxLayout()
self.horizontalLayout_34.setObjectName("horizontalLayout_34")
self.label_72 = QtWidgets.QLabel(self.page_29)
self.label_72.setStyleSheet("background-color : #656565; color : white")
self.label_72.setFrameShape(QtWidgets.QFrame.Panel)
self.label_72.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_72.setObjectName("label_72")
self.horizontalLayout_34.addWidget(self.label_72)
self.gridLayout_330.addLayout(self.horizontalLayout_34, 0, 1, 1, 1)
self.toolBox_band_set_combination.addItem(self.page_29, "")
self.page_30 = QtWidgets.QWidget()
self.page_30.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_30.setObjectName("page_30")
self.gridLayout_334 = QtWidgets.QGridLayout(self.page_30)
self.gridLayout_334.setObjectName("gridLayout_334")
self.gridLayout_335 = QtWidgets.QGridLayout()
self.gridLayout_335.setObjectName("gridLayout_335")
self.band_set_comb_textBrowser = QtWidgets.QTextBrowser(self.page_30)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.band_set_comb_textBrowser.setFont(font)
self.band_set_comb_textBrowser.setTabChangesFocus(True)
self.band_set_comb_textBrowser.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.band_set_comb_textBrowser.setTabStopWidth(120)
self.band_set_comb_textBrowser.setOpenLinks(False)
self.band_set_comb_textBrowser.setObjectName("band_set_comb_textBrowser")
self.gridLayout_335.addWidget(self.band_set_comb_textBrowser, 0, 0, 1, 1)
self.gridLayout_334.addLayout(self.gridLayout_335, 0, 0, 1, 1)
self.toolBox_band_set_combination.addItem(self.page_30, "")
self.gridLayout_62.addWidget(self.toolBox_band_set_combination, 0, 0, 1, 1)
self.tabWidget_4.addTab(self.tab_bandset_combination_2, "")
self.PCA_tab = QtWidgets.QWidget()
self.PCA_tab.setObjectName("PCA_tab")
self.gridLayout_170 = QtWidgets.QGridLayout(self.PCA_tab)
self.gridLayout_170.setObjectName("gridLayout_170")
self.toolBox_PCA = QtWidgets.QToolBox(self.PCA_tab)
self.toolBox_PCA.setStyleSheet("")
self.toolBox_PCA.setObjectName("toolBox_PCA")
self.page_16 = QtWidgets.QWidget()
self.page_16.setGeometry(QtCore.QRect(0, 0, 459, 196))
self.page_16.setObjectName("page_16")
self.gridLayout_182 = QtWidgets.QGridLayout(self.page_16)
self.gridLayout_182.setObjectName("gridLayout_182")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.label_58 = QtWidgets.QLabel(self.page_16)
self.label_58.setStyleSheet("background-color : #656565; color : white")
self.label_58.setFrameShape(QtWidgets.QFrame.Panel)
self.label_58.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_58.setObjectName("label_58")
self.horizontalLayout_5.addWidget(self.label_58)
self.gridLayout_182.addLayout(self.horizontalLayout_5, 0, 0, 1, 1)
self.gridLayout_173 = QtWidgets.QGridLayout()
self.gridLayout_173.setObjectName("gridLayout_173")
self.nodata_checkBox_4 = QtWidgets.QCheckBox(self.page_16)
self.nodata_checkBox_4.setObjectName("nodata_checkBox_4")
self.gridLayout_173.addWidget(self.nodata_checkBox_4, 2, 0, 1, 1)
self.num_comp_checkBox = QtWidgets.QCheckBox(self.page_16)
self.num_comp_checkBox.setObjectName("num_comp_checkBox")
self.gridLayout_173.addWidget(self.num_comp_checkBox, 1, 0, 1, 1)
self.nodata_spinBox_5 = QtWidgets.QSpinBox(self.page_16)
self.nodata_spinBox_5.setMinimum(-999999999)
self.nodata_spinBox_5.setMaximum(999999999)
self.nodata_spinBox_5.setProperty("value", 0)
self.nodata_spinBox_5.setObjectName("nodata_spinBox_5")
self.gridLayout_173.addWidget(self.nodata_spinBox_5, 2, 1, 1, 1)
spacerItem91 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_173.addItem(spacerItem91, 3, 4, 1, 1)
self.pca_Button = QtWidgets.QToolButton(self.page_16)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pca_Button.setFont(font)
self.pca_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pca_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.pca_Button.setIcon(icon64)
self.pca_Button.setIconSize(QtCore.QSize(34, 34))
self.pca_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pca_Button.setObjectName("pca_Button")
self.gridLayout_173.addWidget(self.pca_Button, 5, 4, 1, 1)
self.label_254 = QtWidgets.QLabel(self.page_16)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_254.sizePolicy().hasHeightForWidth())
self.label_254.setSizePolicy(sizePolicy)
self.label_254.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_254.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_254.setObjectName("label_254")
self.gridLayout_173.addWidget(self.label_254, 0, 0, 1, 1)
self.band_set_comb_spinBox_4 = QtWidgets.QSpinBox(self.page_16)
self.band_set_comb_spinBox_4.setMinimum(1)
self.band_set_comb_spinBox_4.setMaximum(100000)
self.band_set_comb_spinBox_4.setObjectName("band_set_comb_spinBox_4")
self.gridLayout_173.addWidget(self.band_set_comb_spinBox_4, 0, 1, 1, 1)
self.pca_components_spinBox = QtWidgets.QSpinBox(self.page_16)
self.pca_components_spinBox.setMinimum(2)
self.pca_components_spinBox.setMaximum(1000)
self.pca_components_spinBox.setObjectName("pca_components_spinBox")
self.gridLayout_173.addWidget(self.pca_components_spinBox, 1, 1, 1, 1)
self.label_166 = QtWidgets.QLabel(self.page_16)
self.label_166.setStyleSheet("background-color : #656565; color : white")
self.label_166.setFrameShape(QtWidgets.QFrame.Panel)
self.label_166.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_166.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_166.setObjectName("label_166")
self.gridLayout_173.addWidget(self.label_166, 4, 0, 1, 5)
spacerItem92 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_173.addItem(spacerItem92, 5, 2, 1, 1)
self.pca = QtWidgets.QToolButton(self.page_16)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.pca.setFont(font)
self.pca.setLayoutDirection(QtCore.Qt.RightToLeft)
self.pca.setStyleSheet("margin: 0px;padding: 0px;")
self.pca.setIcon(icon48)
self.pca.setIconSize(QtCore.QSize(34, 34))
self.pca.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.pca.setObjectName("pca")
self.gridLayout_173.addWidget(self.pca, 5, 3, 1, 1)
self.gridLayout_182.addLayout(self.gridLayout_173, 1, 0, 1, 1)
self.toolBox_PCA.addItem(self.page_16, "")
self.page_17 = QtWidgets.QWidget()
self.page_17.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_17.setObjectName("page_17")
self.gridLayout_200 = QtWidgets.QGridLayout(self.page_17)
self.gridLayout_200.setObjectName("gridLayout_200")
self.gridLayout_201 = QtWidgets.QGridLayout()
self.gridLayout_201.setObjectName("gridLayout_201")
self.report_textBrowser_2 = QtWidgets.QTextBrowser(self.page_17)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.report_textBrowser_2.setFont(font)
self.report_textBrowser_2.setTabChangesFocus(True)
self.report_textBrowser_2.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.report_textBrowser_2.setTabStopWidth(160)
self.report_textBrowser_2.setOpenLinks(False)
self.report_textBrowser_2.setObjectName("report_textBrowser_2")
self.gridLayout_201.addWidget(self.report_textBrowser_2, 0, 0, 1, 1)
self.gridLayout_200.addLayout(self.gridLayout_201, 0, 0, 1, 1)
self.toolBox_PCA.addItem(self.page_17, "")
self.gridLayout_170.addWidget(self.toolBox_PCA, 0, 0, 1, 1)
self.tabWidget_4.addTab(self.PCA_tab, "")
self.tab_kmeans = QtWidgets.QWidget()
self.tab_kmeans.setObjectName("tab_kmeans")
self.gridLayout_208 = QtWidgets.QGridLayout(self.tab_kmeans)
self.gridLayout_208.setObjectName("gridLayout_208")
self.toolBox_kmeans = QtWidgets.QToolBox(self.tab_kmeans)
self.toolBox_kmeans.setStyleSheet("")
self.toolBox_kmeans.setObjectName("toolBox_kmeans")
self.page_18 = QtWidgets.QWidget()
self.page_18.setGeometry(QtCore.QRect(0, 0, 764, 390))
self.page_18.setObjectName("page_18")
self.gridLayout_152 = QtWidgets.QGridLayout(self.page_18)
self.gridLayout_152.setObjectName("gridLayout_152")
self.horizontalLayout_29 = QtWidgets.QHBoxLayout()
self.horizontalLayout_29.setObjectName("horizontalLayout_29")
self.label_78 = QtWidgets.QLabel(self.page_18)
self.label_78.setStyleSheet("background-color : #656565; color : white")
self.label_78.setFrameShape(QtWidgets.QFrame.Panel)
self.label_78.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_78.setObjectName("label_78")
self.horizontalLayout_29.addWidget(self.label_78)
self.gridLayout_152.addLayout(self.horizontalLayout_29, 0, 0, 1, 1)
self.gridLayout_151 = QtWidgets.QGridLayout()
self.gridLayout_151.setObjectName("gridLayout_151")
self.isodata_radioButton = QtWidgets.QRadioButton(self.page_18)
self.isodata_radioButton.setChecked(False)
self.isodata_radioButton.setAutoExclusive(False)
self.isodata_radioButton.setObjectName("isodata_radioButton")
self.gridLayout_151.addWidget(self.isodata_radioButton, 0, 5, 1, 1)
self.band_set_comb_spinBox_5 = QtWidgets.QSpinBox(self.page_18)
self.band_set_comb_spinBox_5.setMinimum(1)
self.band_set_comb_spinBox_5.setMaximum(100000)
self.band_set_comb_spinBox_5.setObjectName("band_set_comb_spinBox_5")
self.gridLayout_151.addWidget(self.band_set_comb_spinBox_5, 0, 1, 1, 1)
self.label_230 = QtWidgets.QLabel(self.page_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_230.sizePolicy().hasHeightForWidth())
self.label_230.setSizePolicy(sizePolicy)
self.label_230.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_230.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_230.setObjectName("label_230")
self.gridLayout_151.addWidget(self.label_230, 0, 3, 1, 1)
self.label_255 = QtWidgets.QLabel(self.page_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_255.sizePolicy().hasHeightForWidth())
self.label_255.setSizePolicy(sizePolicy)
self.label_255.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_255.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_255.setObjectName("label_255")
self.gridLayout_151.addWidget(self.label_255, 0, 0, 1, 1)
self.kmeans_radioButton = QtWidgets.QRadioButton(self.page_18)
self.kmeans_radioButton.setChecked(True)
self.kmeans_radioButton.setAutoExclusive(False)
self.kmeans_radioButton.setObjectName("kmeans_radioButton")
self.gridLayout_151.addWidget(self.kmeans_radioButton, 0, 4, 1, 1)
spacerItem93 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_151.addItem(spacerItem93, 0, 2, 1, 1)
spacerItem94 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_151.addItem(spacerItem94, 0, 6, 1, 1)
self.gridLayout_152.addLayout(self.gridLayout_151, 1, 0, 1, 1)
self.gridLayout_125 = QtWidgets.QGridLayout()
self.gridLayout_125.setObjectName("gridLayout_125")
self.label_225 = QtWidgets.QLabel(self.page_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_225.sizePolicy().hasHeightForWidth())
self.label_225.setSizePolicy(sizePolicy)
self.label_225.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_225.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_225.setObjectName("label_225")
self.gridLayout_125.addWidget(self.label_225, 1, 0, 1, 1)
self.thresh_doubleSpinBox = QtWidgets.QDoubleSpinBox(self.page_18)
self.thresh_doubleSpinBox.setDecimals(7)
self.thresh_doubleSpinBox.setMinimum(1e-06)
self.thresh_doubleSpinBox.setMaximum(10000000.0)
self.thresh_doubleSpinBox.setSingleStep(1e-05)
self.thresh_doubleSpinBox.setProperty("value", 0.0001)
self.thresh_doubleSpinBox.setObjectName("thresh_doubleSpinBox")
self.gridLayout_125.addWidget(self.thresh_doubleSpinBox, 0, 1, 1, 1)
self.std_dev_doubleSpinBox = QtWidgets.QDoubleSpinBox(self.page_18)
self.std_dev_doubleSpinBox.setDecimals(7)
self.std_dev_doubleSpinBox.setMinimum(1e-06)
self.std_dev_doubleSpinBox.setMaximum(1000000000.0)
self.std_dev_doubleSpinBox.setSingleStep(1e-05)
self.std_dev_doubleSpinBox.setProperty("value", 0.0001)
self.std_dev_doubleSpinBox.setObjectName("std_dev_doubleSpinBox")
self.gridLayout_125.addWidget(self.std_dev_doubleSpinBox, 2, 1, 1, 1)
self.kmean_threshold_checkBox = QtWidgets.QCheckBox(self.page_18)
self.kmean_threshold_checkBox.setChecked(True)
self.kmean_threshold_checkBox.setObjectName("kmean_threshold_checkBox")
self.gridLayout_125.addWidget(self.kmean_threshold_checkBox, 0, 0, 1, 1)
self.nodata_spinBox_9 = QtWidgets.QSpinBox(self.page_18)
self.nodata_spinBox_9.setMinimum(-999999999)
self.nodata_spinBox_9.setMaximum(999999999)
self.nodata_spinBox_9.setProperty("value", 0)
self.nodata_spinBox_9.setObjectName("nodata_spinBox_9")
self.gridLayout_125.addWidget(self.nodata_spinBox_9, 4, 1, 1, 1)
self.kmeans_iter_spinBox = QtWidgets.QSpinBox(self.page_18)
self.kmeans_iter_spinBox.setMinimum(1)
self.kmeans_iter_spinBox.setMaximum(1000)
self.kmeans_iter_spinBox.setProperty("value", 10)
self.kmeans_iter_spinBox.setObjectName("kmeans_iter_spinBox")
self.gridLayout_125.addWidget(self.kmeans_iter_spinBox, 1, 1, 1, 1)
self.label_228 = QtWidgets.QLabel(self.page_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_228.sizePolicy().hasHeightForWidth())
self.label_228.setSizePolicy(sizePolicy)
self.label_228.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_228.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_228.setWordWrap(True)
self.label_228.setObjectName("label_228")
self.gridLayout_125.addWidget(self.label_228, 2, 0, 1, 1)
self.nodata_checkBox_8 = QtWidgets.QCheckBox(self.page_18)
self.nodata_checkBox_8.setObjectName("nodata_checkBox_8")
self.gridLayout_125.addWidget(self.nodata_checkBox_8, 4, 0, 1, 1)
self.kmeans_classes_spinBox = QtWidgets.QSpinBox(self.page_18)
self.kmeans_classes_spinBox.setMinimum(1)
self.kmeans_classes_spinBox.setMaximum(1000)
self.kmeans_classes_spinBox.setProperty("value", 10)
self.kmeans_classes_spinBox.setObjectName("kmeans_classes_spinBox")
self.gridLayout_125.addWidget(self.kmeans_classes_spinBox, 0, 3, 1, 1)
self.label_224 = QtWidgets.QLabel(self.page_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_224.sizePolicy().hasHeightForWidth())
self.label_224.setSizePolicy(sizePolicy)
self.label_224.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_224.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_224.setObjectName("label_224")
self.gridLayout_125.addWidget(self.label_224, 0, 2, 1, 1)
spacerItem95 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_125.addItem(spacerItem95, 0, 4, 1, 1)
self.label_229 = QtWidgets.QLabel(self.page_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_229.sizePolicy().hasHeightForWidth())
self.label_229.setSizePolicy(sizePolicy)
self.label_229.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_229.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_229.setWordWrap(True)
self.label_229.setObjectName("label_229")
self.gridLayout_125.addWidget(self.label_229, 2, 2, 1, 1)
self.min_size_class_spinBox = QtWidgets.QSpinBox(self.page_18)
self.min_size_class_spinBox.setMinimum(1)
self.min_size_class_spinBox.setMaximum(1000000)
self.min_size_class_spinBox.setProperty("value", 10)
self.min_size_class_spinBox.setObjectName("min_size_class_spinBox")
self.gridLayout_125.addWidget(self.min_size_class_spinBox, 2, 3, 1, 1)
self.gridLayout_152.addLayout(self.gridLayout_125, 2, 0, 1, 1)
self.gridLayout_231 = QtWidgets.QGridLayout()
self.gridLayout_231.setObjectName("gridLayout_231")
self.gridLayout_135 = QtWidgets.QGridLayout()
self.gridLayout_135.setObjectName("gridLayout_135")
self.min_distance_radioButton = QtWidgets.QRadioButton(self.page_18)
self.min_distance_radioButton.setChecked(True)
self.min_distance_radioButton.setAutoExclusive(False)
self.min_distance_radioButton.setObjectName("min_distance_radioButton")
self.gridLayout_135.addWidget(self.min_distance_radioButton, 2, 1, 1, 1)
self.kmean_save_siglist_checkBox = QtWidgets.QCheckBox(self.page_18)
self.kmean_save_siglist_checkBox.setObjectName("kmean_save_siglist_checkBox")
self.gridLayout_135.addWidget(self.kmean_save_siglist_checkBox, 3, 0, 1, 4)
self.label_227 = QtWidgets.QLabel(self.page_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_227.sizePolicy().hasHeightForWidth())
self.label_227.setSizePolicy(sizePolicy)
self.label_227.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_227.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_227.setObjectName("label_227")
self.gridLayout_135.addWidget(self.label_227, 2, 0, 1, 1)
self.horizontalLayout_30 = QtWidgets.QHBoxLayout()
self.horizontalLayout_30.setObjectName("horizontalLayout_30")
self.label_104 = QtWidgets.QLabel(self.page_18)
self.label_104.setStyleSheet("background-color : #656565; color : white")
self.label_104.setFrameShape(QtWidgets.QFrame.Panel)
self.label_104.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_104.setObjectName("label_104")
self.horizontalLayout_30.addWidget(self.label_104)
self.gridLayout_135.addLayout(self.horizontalLayout_30, 0, 0, 1, 4)
self.gridLayout_10 = QtWidgets.QGridLayout()
self.gridLayout_10.setObjectName("gridLayout_10")
self.gridLayout_226 = QtWidgets.QGridLayout()
self.gridLayout_226.setObjectName("gridLayout_226")
self.kmean_siglist_radioButton = QtWidgets.QRadioButton(self.page_18)
self.kmean_siglist_radioButton.setChecked(False)
self.kmean_siglist_radioButton.setAutoExclusive(False)
self.kmean_siglist_radioButton.setObjectName("kmean_siglist_radioButton")
self.gridLayout_226.addWidget(self.kmean_siglist_radioButton, 0, 1, 1, 1)
self.kmean_randomsiglist_radioButton = QtWidgets.QRadioButton(self.page_18)
self.kmean_randomsiglist_radioButton.setChecked(False)
self.kmean_randomsiglist_radioButton.setAutoExclusive(False)
self.kmean_randomsiglist_radioButton.setObjectName("kmean_randomsiglist_radioButton")
self.gridLayout_226.addWidget(self.kmean_randomsiglist_radioButton, 0, 2, 1, 1)
self.kmean_minmax_radioButton = QtWidgets.QRadioButton(self.page_18)
self.kmean_minmax_radioButton.setChecked(True)
self.kmean_minmax_radioButton.setAutoExclusive(False)
self.kmean_minmax_radioButton.setObjectName("kmean_minmax_radioButton")
self.gridLayout_226.addWidget(self.kmean_minmax_radioButton, 0, 0, 1, 1)
self.gridLayout_10.addLayout(self.gridLayout_226, 1, 0, 1, 1)
self.gridLayout_135.addLayout(self.gridLayout_10, 1, 0, 1, 4)
self.spectral_angle_map_radioButton = QtWidgets.QRadioButton(self.page_18)
self.spectral_angle_map_radioButton.setChecked(False)
self.spectral_angle_map_radioButton.setAutoExclusive(False)
self.spectral_angle_map_radioButton.setObjectName("spectral_angle_map_radioButton")
self.gridLayout_135.addWidget(self.spectral_angle_map_radioButton, 2, 2, 1, 1)
self.gridLayout_231.addLayout(self.gridLayout_135, 0, 0, 1, 4)
self.label_179 = QtWidgets.QLabel(self.page_18)
self.label_179.setStyleSheet("background-color : #656565; color : white")
self.label_179.setFrameShape(QtWidgets.QFrame.Panel)
self.label_179.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_179.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_179.setObjectName("label_179")
self.gridLayout_231.addWidget(self.label_179, 2, 0, 1, 4)
spacerItem96 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_231.addItem(spacerItem96, 3, 0, 1, 2)
self.kmeans_Button = QtWidgets.QToolButton(self.page_18)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.kmeans_Button.setFont(font)
self.kmeans_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.kmeans_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.kmeans_Button.setIcon(icon64)
self.kmeans_Button.setIconSize(QtCore.QSize(34, 34))
self.kmeans_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.kmeans_Button.setObjectName("kmeans_Button")
self.gridLayout_231.addWidget(self.kmeans_Button, 3, 3, 1, 1)
self.clustering = QtWidgets.QToolButton(self.page_18)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.clustering.setFont(font)
self.clustering.setLayoutDirection(QtCore.Qt.RightToLeft)
self.clustering.setStyleSheet("margin: 0px;padding: 0px;")
self.clustering.setIcon(icon48)
self.clustering.setIconSize(QtCore.QSize(34, 34))
self.clustering.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.clustering.setObjectName("clustering")
self.gridLayout_231.addWidget(self.clustering, 3, 2, 1, 1)
spacerItem97 = QtWidgets.QSpacerItem(38, 0, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_231.addItem(spacerItem97, 1, 3, 1, 1)
self.gridLayout_152.addLayout(self.gridLayout_231, 3, 0, 1, 1)
self.toolBox_kmeans.addItem(self.page_18, "")
self.page_23 = QtWidgets.QWidget()
self.page_23.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_23.setObjectName("page_23")
self.gridLayout_235 = QtWidgets.QGridLayout(self.page_23)
self.gridLayout_235.setObjectName("gridLayout_235")
self.gridLayout_236 = QtWidgets.QGridLayout()
self.gridLayout_236.setObjectName("gridLayout_236")
self.report_textBrowser_3 = QtWidgets.QTextBrowser(self.page_23)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.report_textBrowser_3.setFont(font)
self.report_textBrowser_3.setTabChangesFocus(True)
self.report_textBrowser_3.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.report_textBrowser_3.setTabStopWidth(160)
self.report_textBrowser_3.setOpenLinks(False)
self.report_textBrowser_3.setObjectName("report_textBrowser_3")
self.gridLayout_236.addWidget(self.report_textBrowser_3, 0, 0, 1, 1)
self.gridLayout_235.addLayout(self.gridLayout_236, 0, 0, 1, 1)
self.toolBox_kmeans.addItem(self.page_23, "")
self.gridLayout_208.addWidget(self.toolBox_kmeans, 0, 0, 1, 1)
self.tabWidget_4.addTab(self.tab_kmeans, "")
self.tab_spectral_dist = QtWidgets.QWidget()
self.tab_spectral_dist.setObjectName("tab_spectral_dist")
self.gridLayout_154 = QtWidgets.QGridLayout(self.tab_spectral_dist)
self.gridLayout_154.setObjectName("gridLayout_154")
self.gridLayout_149 = QtWidgets.QGridLayout()
self.gridLayout_149.setObjectName("gridLayout_149")
self.min_distance_radioButton_2 = QtWidgets.QRadioButton(self.tab_spectral_dist)
self.min_distance_radioButton_2.setChecked(True)
self.min_distance_radioButton_2.setAutoExclusive(False)
self.min_distance_radioButton_2.setObjectName("min_distance_radioButton_2")
self.gridLayout_149.addWidget(self.min_distance_radioButton_2, 2, 1, 1, 1)
self.label_231 = QtWidgets.QLabel(self.tab_spectral_dist)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_231.sizePolicy().hasHeightForWidth())
self.label_231.setSizePolicy(sizePolicy)
self.label_231.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_231.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_231.setObjectName("label_231")
self.gridLayout_149.addWidget(self.label_231, 2, 0, 1, 1)
self.spectral_angle_map_radioButton_2 = QtWidgets.QRadioButton(self.tab_spectral_dist)
self.spectral_angle_map_radioButton_2.setChecked(False)
self.spectral_angle_map_radioButton_2.setAutoExclusive(False)
self.spectral_angle_map_radioButton_2.setObjectName("spectral_angle_map_radioButton_2")
self.gridLayout_149.addWidget(self.spectral_angle_map_radioButton_2, 2, 2, 1, 1)
self.horizontalLayout_32 = QtWidgets.QHBoxLayout()
self.horizontalLayout_32.setObjectName("horizontalLayout_32")
self.horizontalLayout_31 = QtWidgets.QHBoxLayout()
self.horizontalLayout_31.setObjectName("horizontalLayout_31")
self.label_137 = QtWidgets.QLabel(self.tab_spectral_dist)
self.label_137.setStyleSheet("background-color : #656565; color : white")
self.label_137.setFrameShape(QtWidgets.QFrame.Panel)
self.label_137.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_137.setObjectName("label_137")
self.horizontalLayout_31.addWidget(self.label_137)
self.horizontalLayout_32.addLayout(self.horizontalLayout_31)
self.gridLayout_149.addLayout(self.horizontalLayout_32, 0, 0, 1, 4)
self.distance_threshold_checkBox = QtWidgets.QCheckBox(self.tab_spectral_dist)
self.distance_threshold_checkBox.setChecked(True)
self.distance_threshold_checkBox.setObjectName("distance_threshold_checkBox")
self.gridLayout_149.addWidget(self.distance_threshold_checkBox, 3, 0, 1, 1)
self.thresh_doubleSpinBox_2 = QtWidgets.QDoubleSpinBox(self.tab_spectral_dist)
self.thresh_doubleSpinBox_2.setDecimals(7)
self.thresh_doubleSpinBox_2.setMinimum(1e-06)
self.thresh_doubleSpinBox_2.setMaximum(1000.0)
self.thresh_doubleSpinBox_2.setSingleStep(1.0)
self.thresh_doubleSpinBox_2.setProperty("value", 0.1)
self.thresh_doubleSpinBox_2.setObjectName("thresh_doubleSpinBox_2")
self.gridLayout_149.addWidget(self.thresh_doubleSpinBox_2, 3, 1, 1, 1)
self.gridLayout_41 = QtWidgets.QGridLayout()
self.gridLayout_41.setObjectName("gridLayout_41")
self.label_256 = QtWidgets.QLabel(self.tab_spectral_dist)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_256.sizePolicy().hasHeightForWidth())
self.label_256.setSizePolicy(sizePolicy)
self.label_256.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_256.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_256.setObjectName("label_256")
self.gridLayout_41.addWidget(self.label_256, 0, 0, 1, 1)
self.band_set_comb_spinBox_7 = QtWidgets.QSpinBox(self.tab_spectral_dist)
self.band_set_comb_spinBox_7.setMinimum(1)
self.band_set_comb_spinBox_7.setMaximum(100000)
self.band_set_comb_spinBox_7.setProperty("value", 2)
self.band_set_comb_spinBox_7.setObjectName("band_set_comb_spinBox_7")
self.gridLayout_41.addWidget(self.band_set_comb_spinBox_7, 1, 1, 1, 1)
self.label_257 = QtWidgets.QLabel(self.tab_spectral_dist)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_257.sizePolicy().hasHeightForWidth())
self.label_257.setSizePolicy(sizePolicy)
self.label_257.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_257.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_257.setObjectName("label_257")
self.gridLayout_41.addWidget(self.label_257, 1, 0, 1, 1)
self.band_set_comb_spinBox_6 = QtWidgets.QSpinBox(self.tab_spectral_dist)
self.band_set_comb_spinBox_6.setMinimum(1)
self.band_set_comb_spinBox_6.setMaximum(100000)
self.band_set_comb_spinBox_6.setObjectName("band_set_comb_spinBox_6")
self.gridLayout_41.addWidget(self.band_set_comb_spinBox_6, 0, 1, 1, 1)
spacerItem98 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_41.addItem(spacerItem98, 1, 2, 1, 1)
self.gridLayout_149.addLayout(self.gridLayout_41, 1, 0, 1, 4)
self.gridLayout_154.addLayout(self.gridLayout_149, 0, 0, 1, 1)
self.gridLayout_233 = QtWidgets.QGridLayout()
self.gridLayout_233.setObjectName("gridLayout_233")
self.label_183 = QtWidgets.QLabel(self.tab_spectral_dist)
self.label_183.setStyleSheet("background-color : #656565; color : white")
self.label_183.setFrameShape(QtWidgets.QFrame.Panel)
self.label_183.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_183.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_183.setObjectName("label_183")
self.gridLayout_233.addWidget(self.label_183, 1, 0, 1, 4)
spacerItem99 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_233.addItem(spacerItem99, 2, 0, 1, 2)
spacerItem100 = QtWidgets.QSpacerItem(38, 37, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_233.addItem(spacerItem100, 0, 3, 1, 1)
self.spectral_distance_bandsets_toolButton = QtWidgets.QToolButton(self.tab_spectral_dist)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.spectral_distance_bandsets_toolButton.setFont(font)
self.spectral_distance_bandsets_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.spectral_distance_bandsets_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.spectral_distance_bandsets_toolButton.setIcon(icon64)
self.spectral_distance_bandsets_toolButton.setIconSize(QtCore.QSize(34, 34))
self.spectral_distance_bandsets_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.spectral_distance_bandsets_toolButton.setObjectName("spectral_distance_bandsets_toolButton")
self.gridLayout_233.addWidget(self.spectral_distance_bandsets_toolButton, 2, 3, 1, 1)
self.spectral_distance = QtWidgets.QToolButton(self.tab_spectral_dist)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.spectral_distance.setFont(font)
self.spectral_distance.setLayoutDirection(QtCore.Qt.RightToLeft)
self.spectral_distance.setStyleSheet("margin: 0px;padding: 0px;")
self.spectral_distance.setIcon(icon48)
self.spectral_distance.setIconSize(QtCore.QSize(34, 34))
self.spectral_distance.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.spectral_distance.setObjectName("spectral_distance")
self.gridLayout_233.addWidget(self.spectral_distance, 2, 2, 1, 1)
self.gridLayout_154.addLayout(self.gridLayout_233, 1, 0, 1, 1)
self.tabWidget_4.addTab(self.tab_spectral_dist, "")
self.tab_classification = QtWidgets.QWidget()
self.tab_classification.setObjectName("tab_classification")
self.gridLayout_260 = QtWidgets.QGridLayout(self.tab_classification)
self.gridLayout_260.setObjectName("gridLayout_260")
self.gridLayout_218 = QtWidgets.QGridLayout()
self.gridLayout_218.setObjectName("gridLayout_218")
self.horizontalLayout_55 = QtWidgets.QHBoxLayout()
self.horizontalLayout_55.setObjectName("horizontalLayout_55")
self.label_32 = QtWidgets.QLabel(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_32.sizePolicy().hasHeightForWidth())
self.label_32.setSizePolicy(sizePolicy)
self.label_32.setObjectName("label_32")
self.horizontalLayout_55.addWidget(self.label_32)
self.macroclass_checkBox = QtWidgets.QCheckBox(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.macroclass_checkBox.sizePolicy().hasHeightForWidth())
self.macroclass_checkBox.setSizePolicy(sizePolicy)
self.macroclass_checkBox.setObjectName("macroclass_checkBox")
self.horizontalLayout_55.addWidget(self.macroclass_checkBox)
self.class_checkBox = QtWidgets.QCheckBox(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_checkBox.sizePolicy().hasHeightForWidth())
self.class_checkBox.setSizePolicy(sizePolicy)
self.class_checkBox.setObjectName("class_checkBox")
self.horizontalLayout_55.addWidget(self.class_checkBox)
spacerItem101 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_55.addItem(spacerItem101)
self.algorithm_weight_button = QtWidgets.QToolButton(self.tab_classification)
self.algorithm_weight_button.setStyleSheet("margin: 0px;padding: 0px;")
self.algorithm_weight_button.setIcon(icon3)
self.algorithm_weight_button.setIconSize(QtCore.QSize(22, 22))
self.algorithm_weight_button.setObjectName("algorithm_weight_button")
self.horizontalLayout_55.addWidget(self.algorithm_weight_button)
self.gridLayout_218.addLayout(self.horizontalLayout_55, 2, 0, 1, 4)
self.algorithm_combo = QtWidgets.QComboBox(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.algorithm_combo.sizePolicy().hasHeightForWidth())
self.algorithm_combo.setSizePolicy(sizePolicy)
self.algorithm_combo.setMinimumSize(QtCore.QSize(100, 0))
self.algorithm_combo.setObjectName("algorithm_combo")
self.algorithm_combo.addItem("")
self.algorithm_combo.addItem("")
self.algorithm_combo.addItem("")
self.gridLayout_218.addWidget(self.algorithm_combo, 4, 0, 1, 3)
self.band_set_comb_spinBox_12 = QtWidgets.QSpinBox(self.tab_classification)
self.band_set_comb_spinBox_12.setMinimum(1)
self.band_set_comb_spinBox_12.setMaximum(100000)
self.band_set_comb_spinBox_12.setObjectName("band_set_comb_spinBox_12")
self.gridLayout_218.addWidget(self.band_set_comb_spinBox_12, 1, 1, 1, 1)
self.label_261 = QtWidgets.QLabel(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_261.sizePolicy().hasHeightForWidth())
self.label_261.setSizePolicy(sizePolicy)
self.label_261.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_261.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_261.setObjectName("label_261")
self.gridLayout_218.addWidget(self.label_261, 1, 0, 1, 1)
self.label_240 = QtWidgets.QLabel(self.tab_classification)
self.label_240.setStyleSheet("background-color : #656565; color : white")
self.label_240.setFrameShape(QtWidgets.QFrame.Panel)
self.label_240.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_240.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_240.setObjectName("label_240")
self.gridLayout_218.addWidget(self.label_240, 3, 0, 1, 4)
self.gridLayout_255 = QtWidgets.QGridLayout()
self.gridLayout_255.setObjectName("gridLayout_255")
self.alg_threshold_SpinBox = QtWidgets.QDoubleSpinBox(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.alg_threshold_SpinBox.sizePolicy().hasHeightForWidth())
self.alg_threshold_SpinBox.setSizePolicy(sizePolicy)
self.alg_threshold_SpinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.alg_threshold_SpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.alg_threshold_SpinBox.setDecimals(4)
self.alg_threshold_SpinBox.setMaximum(10000.0)
self.alg_threshold_SpinBox.setObjectName("alg_threshold_SpinBox")
self.gridLayout_255.addWidget(self.alg_threshold_SpinBox, 0, 1, 1, 1)
self.algorithm_threshold_button = QtWidgets.QToolButton(self.tab_classification)
self.algorithm_threshold_button.setStyleSheet("margin: 0px;padding: 0px;")
self.algorithm_threshold_button.setIcon(icon9)
self.algorithm_threshold_button.setIconSize(QtCore.QSize(22, 22))
self.algorithm_threshold_button.setObjectName("algorithm_threshold_button")
self.gridLayout_255.addWidget(self.algorithm_threshold_button, 0, 3, 1, 1)
self.label_234 = QtWidgets.QLabel(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_234.sizePolicy().hasHeightForWidth())
self.label_234.setSizePolicy(sizePolicy)
self.label_234.setMaximumSize(QtCore.QSize(100, 16777215))
self.label_234.setObjectName("label_234")
self.gridLayout_255.addWidget(self.label_234, 0, 0, 1, 1)
spacerItem102 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_255.addItem(spacerItem102, 0, 2, 1, 1)
self.gridLayout_218.addLayout(self.gridLayout_255, 4, 3, 1, 1)
self.label_243 = QtWidgets.QLabel(self.tab_classification)
self.label_243.setStyleSheet("background-color : #656565; color : white")
self.label_243.setFrameShape(QtWidgets.QFrame.Panel)
self.label_243.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_243.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_243.setObjectName("label_243")
self.gridLayout_218.addWidget(self.label_243, 0, 0, 1, 4)
spacerItem103 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_218.addItem(spacerItem103, 1, 3, 1, 1)
self.gridLayout_260.addLayout(self.gridLayout_218, 0, 0, 1, 1)
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.gridLayout_225 = QtWidgets.QGridLayout()
self.gridLayout_225.setObjectName("gridLayout_225")
self.LC_signature_checkBox = QtWidgets.QCheckBox(self.tab_classification)
self.LC_signature_checkBox.setObjectName("LC_signature_checkBox")
self.gridLayout_225.addWidget(self.LC_signature_checkBox, 1, 1, 1, 1)
self.label_235 = QtWidgets.QLabel(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_235.sizePolicy().hasHeightForWidth())
self.label_235.setSizePolicy(sizePolicy)
self.label_235.setObjectName("label_235")
self.gridLayout_225.addWidget(self.label_235, 1, 0, 1, 1)
self.LC_signature_button = QtWidgets.QToolButton(self.tab_classification)
self.LC_signature_button.setStyleSheet("margin: 0px;padding: 0px;")
self.LC_signature_button.setText("")
self.LC_signature_button.setIcon(icon6)
self.LC_signature_button.setIconSize(QtCore.QSize(22, 22))
self.LC_signature_button.setObjectName("LC_signature_button")
self.gridLayout_225.addWidget(self.LC_signature_button, 1, 5, 1, 1)
self.LCS_leave_unclassified_checkBox = QtWidgets.QCheckBox(self.tab_classification)
self.LCS_leave_unclassified_checkBox.setObjectName("LCS_leave_unclassified_checkBox")
self.gridLayout_225.addWidget(self.LCS_leave_unclassified_checkBox, 1, 3, 1, 1)
self.label_241 = QtWidgets.QLabel(self.tab_classification)
self.label_241.setStyleSheet("background-color : #656565; color : white")
self.label_241.setFrameShape(QtWidgets.QFrame.Panel)
self.label_241.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_241.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_241.setObjectName("label_241")
self.gridLayout_225.addWidget(self.label_241, 0, 0, 1, 6)
self.LCS_class_algorithm_checkBox = QtWidgets.QCheckBox(self.tab_classification)
self.LCS_class_algorithm_checkBox.setObjectName("LCS_class_algorithm_checkBox")
self.gridLayout_225.addWidget(self.LCS_class_algorithm_checkBox, 1, 2, 1, 1)
self.verticalLayout_4.addLayout(self.gridLayout_225)
self.label_242 = QtWidgets.QLabel(self.tab_classification)
self.label_242.setStyleSheet("background-color : #656565; color : white")
self.label_242.setFrameShape(QtWidgets.QFrame.Panel)
self.label_242.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_242.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_242.setObjectName("label_242")
self.verticalLayout_4.addWidget(self.label_242)
self.gridLayout_239 = QtWidgets.QGridLayout()
self.gridLayout_239.setSpacing(4)
self.gridLayout_239.setObjectName("gridLayout_239")
self.resetQmlButton = QtWidgets.QToolButton(self.tab_classification)
self.resetQmlButton.setStyleSheet("margin: 0px;padding: 0px;")
self.resetQmlButton.setIcon(icon59)
self.resetQmlButton.setIconSize(QtCore.QSize(22, 22))
self.resetQmlButton.setObjectName("resetQmlButton")
self.gridLayout_239.addWidget(self.resetQmlButton, 0, 3, 1, 1)
self.label_238 = QtWidgets.QLabel(self.tab_classification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_238.sizePolicy().hasHeightForWidth())
self.label_238.setSizePolicy(sizePolicy)
self.label_238.setObjectName("label_238")
self.gridLayout_239.addWidget(self.label_238, 0, 0, 1, 1)
self.qml_Button = QtWidgets.QToolButton(self.tab_classification)
self.qml_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.qml_Button.setIcon(icon65)
self.qml_Button.setIconSize(QtCore.QSize(22, 22))
self.qml_Button.setObjectName("qml_Button")
self.gridLayout_239.addWidget(self.qml_Button, 0, 2, 1, 1)
self.qml_lineEdit = QtWidgets.QLineEdit(self.tab_classification)
self.qml_lineEdit.setEnabled(False)
self.qml_lineEdit.setObjectName("qml_lineEdit")
self.gridLayout_239.addWidget(self.qml_lineEdit, 0, 1, 1, 1)
self.verticalLayout_4.addLayout(self.gridLayout_239)
self.gridLayout_241 = QtWidgets.QGridLayout()
self.gridLayout_241.setObjectName("gridLayout_241")
self.mask_checkBox = QtWidgets.QCheckBox(self.tab_classification)
self.mask_checkBox.setObjectName("mask_checkBox")
self.gridLayout_241.addWidget(self.mask_checkBox, 0, 0, 1, 1)
self.resetMaskButton = QtWidgets.QToolButton(self.tab_classification)
self.resetMaskButton.setStyleSheet("margin: 0px;padding: 0px;")
self.resetMaskButton.setIcon(icon59)
self.resetMaskButton.setIconSize(QtCore.QSize(22, 22))
self.resetMaskButton.setObjectName("resetMaskButton")
self.gridLayout_241.addWidget(self.resetMaskButton, 0, 3, 1, 1)
self.mask_lineEdit = QtWidgets.QLineEdit(self.tab_classification)
self.mask_lineEdit.setEnabled(False)
self.mask_lineEdit.setInputMethodHints(QtCore.Qt.ImhUrlCharactersOnly)
self.mask_lineEdit.setText("")
self.mask_lineEdit.setObjectName("mask_lineEdit")
self.gridLayout_241.addWidget(self.mask_lineEdit, 0, 1, 1, 2)
self.verticalLayout_4.addLayout(self.gridLayout_241)
self.horizontalLayout_56 = QtWidgets.QHBoxLayout()
self.horizontalLayout_56.setObjectName("horizontalLayout_56")
self.vector_output_checkBox = QtWidgets.QCheckBox(self.tab_classification)
self.vector_output_checkBox.setObjectName("vector_output_checkBox")
self.horizontalLayout_56.addWidget(self.vector_output_checkBox)
self.report_checkBox = QtWidgets.QCheckBox(self.tab_classification)
self.report_checkBox.setObjectName("report_checkBox")
self.horizontalLayout_56.addWidget(self.report_checkBox)
self.alg_files_checkBox = QtWidgets.QCheckBox(self.tab_classification)
self.alg_files_checkBox.setObjectName("alg_files_checkBox")
self.horizontalLayout_56.addWidget(self.alg_files_checkBox)
self.verticalLayout_4.addLayout(self.horizontalLayout_56)
self.gridLayout_249 = QtWidgets.QGridLayout()
self.gridLayout_249.setObjectName("gridLayout_249")
self.button_classification = QtWidgets.QToolButton(self.tab_classification)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.button_classification.setFont(font)
self.button_classification.setLayoutDirection(QtCore.Qt.RightToLeft)
self.button_classification.setStyleSheet("margin: 0px;padding: 0px;")
self.button_classification.setIcon(icon64)
self.button_classification.setIconSize(QtCore.QSize(34, 34))
self.button_classification.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.button_classification.setObjectName("button_classification")
self.gridLayout_249.addWidget(self.button_classification, 2, 2, 1, 1)
self.label_239 = QtWidgets.QLabel(self.tab_classification)
self.label_239.setStyleSheet("background-color : #656565; color : white")
self.label_239.setFrameShape(QtWidgets.QFrame.Panel)
self.label_239.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_239.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_239.setObjectName("label_239")
self.gridLayout_249.addWidget(self.label_239, 1, 0, 1, 3)
spacerItem104 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_249.addItem(spacerItem104, 0, 2, 1, 1)
spacerItem105 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_249.addItem(spacerItem105, 2, 0, 1, 1)
self.classification = QtWidgets.QToolButton(self.tab_classification)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.classification.setFont(font)
self.classification.setLayoutDirection(QtCore.Qt.RightToLeft)
self.classification.setStyleSheet("margin: 0px;padding: 0px;")
self.classification.setIcon(icon48)
self.classification.setIconSize(QtCore.QSize(34, 34))
self.classification.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.classification.setObjectName("classification")
self.gridLayout_249.addWidget(self.classification, 2, 1, 1, 1)
self.verticalLayout_4.addLayout(self.gridLayout_249)
self.gridLayout_260.addLayout(self.verticalLayout_4, 1, 0, 1, 1)
self.tabWidget_4.addTab(self.tab_classification, "")
self.tab_random_forest = QtWidgets.QWidget()
self.tab_random_forest.setObjectName("tab_random_forest")
self.gridLayout_288 = QtWidgets.QGridLayout(self.tab_random_forest)
self.gridLayout_288.setObjectName("gridLayout_288")
self.toolBox_random_forest = QtWidgets.QToolBox(self.tab_random_forest)
self.toolBox_random_forest.setStyleSheet("")
self.toolBox_random_forest.setObjectName("toolBox_random_forest")
self.page_21 = QtWidgets.QWidget()
self.page_21.setGeometry(QtCore.QRect(0, 0, 634, 327))
self.page_21.setObjectName("page_21")
self.gridLayout_287 = QtWidgets.QGridLayout(self.page_21)
self.gridLayout_287.setObjectName("gridLayout_287")
self.gridLayout_280 = QtWidgets.QGridLayout()
self.gridLayout_280.setObjectName("gridLayout_280")
self.horizontalLayout_58 = QtWidgets.QHBoxLayout()
self.horizontalLayout_58.setObjectName("horizontalLayout_58")
self.label_233 = QtWidgets.QLabel(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_233.sizePolicy().hasHeightForWidth())
self.label_233.setSizePolicy(sizePolicy)
self.label_233.setObjectName("label_233")
self.horizontalLayout_58.addWidget(self.label_233)
self.macroclass_checkBox_rf = QtWidgets.QCheckBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.macroclass_checkBox_rf.sizePolicy().hasHeightForWidth())
self.macroclass_checkBox_rf.setSizePolicy(sizePolicy)
self.macroclass_checkBox_rf.setChecked(True)
self.macroclass_checkBox_rf.setObjectName("macroclass_checkBox_rf")
self.horizontalLayout_58.addWidget(self.macroclass_checkBox_rf)
self.class_checkBox_rf = QtWidgets.QCheckBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_checkBox_rf.sizePolicy().hasHeightForWidth())
self.class_checkBox_rf.setSizePolicy(sizePolicy)
self.class_checkBox_rf.setObjectName("class_checkBox_rf")
self.horizontalLayout_58.addWidget(self.class_checkBox_rf)
spacerItem106 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_58.addItem(spacerItem106)
self.gridLayout_280.addLayout(self.horizontalLayout_58, 2, 0, 1, 3)
self.band_set_comb_spinBox_13 = QtWidgets.QSpinBox(self.page_21)
self.band_set_comb_spinBox_13.setMinimum(1)
self.band_set_comb_spinBox_13.setMaximum(100000)
self.band_set_comb_spinBox_13.setObjectName("band_set_comb_spinBox_13")
self.gridLayout_280.addWidget(self.band_set_comb_spinBox_13, 1, 1, 1, 1)
self.label_262 = QtWidgets.QLabel(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_262.sizePolicy().hasHeightForWidth())
self.label_262.setSizePolicy(sizePolicy)
self.label_262.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_262.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_262.setObjectName("label_262")
self.gridLayout_280.addWidget(self.label_262, 1, 0, 1, 1)
self.label_245 = QtWidgets.QLabel(self.page_21)
self.label_245.setStyleSheet("background-color : #656565; color : white")
self.label_245.setFrameShape(QtWidgets.QFrame.Panel)
self.label_245.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_245.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_245.setObjectName("label_245")
self.gridLayout_280.addWidget(self.label_245, 0, 0, 1, 3)
self.gridLayout_284 = QtWidgets.QGridLayout()
self.gridLayout_284.setObjectName("gridLayout_284")
self.number_trees_SpinBox = QtWidgets.QDoubleSpinBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.number_trees_SpinBox.sizePolicy().hasHeightForWidth())
self.number_trees_SpinBox.setSizePolicy(sizePolicy)
self.number_trees_SpinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.number_trees_SpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.number_trees_SpinBox.setDecimals(0)
self.number_trees_SpinBox.setMinimum(1.0)
self.number_trees_SpinBox.setMaximum(1000000.0)
self.number_trees_SpinBox.setSingleStep(1.0)
self.number_trees_SpinBox.setProperty("value", 10.0)
self.number_trees_SpinBox.setObjectName("number_trees_SpinBox")
self.gridLayout_284.addWidget(self.number_trees_SpinBox, 0, 1, 1, 1)
self.label_237 = QtWidgets.QLabel(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_237.sizePolicy().hasHeightForWidth())
self.label_237.setSizePolicy(sizePolicy)
self.label_237.setMaximumSize(QtCore.QSize(150, 16777215))
self.label_237.setObjectName("label_237")
self.gridLayout_284.addWidget(self.label_237, 0, 0, 1, 1)
spacerItem107 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_284.addItem(spacerItem107, 0, 2, 1, 1)
self.gridLayout_280.addLayout(self.gridLayout_284, 4, 0, 1, 3)
self.gridLayout_282 = QtWidgets.QGridLayout()
self.gridLayout_282.setObjectName("gridLayout_282")
self.number_training_samples_SpinBox = QtWidgets.QDoubleSpinBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.number_training_samples_SpinBox.sizePolicy().hasHeightForWidth())
self.number_training_samples_SpinBox.setSizePolicy(sizePolicy)
self.number_training_samples_SpinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.number_training_samples_SpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.number_training_samples_SpinBox.setDecimals(0)
self.number_training_samples_SpinBox.setMinimum(1.0)
self.number_training_samples_SpinBox.setMaximum(1000000.0)
self.number_training_samples_SpinBox.setSingleStep(100.0)
self.number_training_samples_SpinBox.setProperty("value", 5000.0)
self.number_training_samples_SpinBox.setObjectName("number_training_samples_SpinBox")
self.gridLayout_282.addWidget(self.number_training_samples_SpinBox, 0, 1, 1, 1)
self.label_236 = QtWidgets.QLabel(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_236.sizePolicy().hasHeightForWidth())
self.label_236.setSizePolicy(sizePolicy)
self.label_236.setMaximumSize(QtCore.QSize(208, 16777215))
self.label_236.setObjectName("label_236")
self.gridLayout_282.addWidget(self.label_236, 0, 0, 1, 1)
spacerItem108 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_282.addItem(spacerItem108, 0, 2, 1, 1)
self.gridLayout_280.addLayout(self.gridLayout_282, 3, 0, 1, 3)
spacerItem109 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_280.addItem(spacerItem109, 1, 2, 1, 1)
self.gridLayout_287.addLayout(self.gridLayout_280, 0, 0, 1, 1)
self.gridLayout_285 = QtWidgets.QGridLayout()
self.gridLayout_285.setObjectName("gridLayout_285")
self.evaluate_classifier_checkBox = QtWidgets.QCheckBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.evaluate_classifier_checkBox.sizePolicy().hasHeightForWidth())
self.evaluate_classifier_checkBox.setSizePolicy(sizePolicy)
self.evaluate_classifier_checkBox.setObjectName("evaluate_classifier_checkBox")
self.gridLayout_285.addWidget(self.evaluate_classifier_checkBox, 0, 0, 1, 1)
self.evaluate_feature_power_set_checkBox = QtWidgets.QCheckBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.evaluate_feature_power_set_checkBox.sizePolicy().hasHeightForWidth())
self.evaluate_feature_power_set_checkBox.setSizePolicy(sizePolicy)
self.evaluate_feature_power_set_checkBox.setObjectName("evaluate_feature_power_set_checkBox")
self.gridLayout_285.addWidget(self.evaluate_feature_power_set_checkBox, 0, 1, 1, 1)
spacerItem110 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_285.addItem(spacerItem110, 0, 6, 1, 1)
self.label_248 = QtWidgets.QLabel(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_248.sizePolicy().hasHeightForWidth())
self.label_248.setSizePolicy(sizePolicy)
self.label_248.setMaximumSize(QtCore.QSize(100, 16777215))
self.label_248.setObjectName("label_248")
self.gridLayout_285.addWidget(self.label_248, 0, 4, 1, 1)
self.rf_power_min_SpinBox = QtWidgets.QDoubleSpinBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rf_power_min_SpinBox.sizePolicy().hasHeightForWidth())
self.rf_power_min_SpinBox.setSizePolicy(sizePolicy)
self.rf_power_min_SpinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.rf_power_min_SpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.rf_power_min_SpinBox.setDecimals(0)
self.rf_power_min_SpinBox.setMinimum(1.0)
self.rf_power_min_SpinBox.setMaximum(1000000.0)
self.rf_power_min_SpinBox.setSingleStep(1.0)
self.rf_power_min_SpinBox.setProperty("value", 2.0)
self.rf_power_min_SpinBox.setObjectName("rf_power_min_SpinBox")
self.gridLayout_285.addWidget(self.rf_power_min_SpinBox, 0, 3, 1, 1)
self.label_247 = QtWidgets.QLabel(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_247.sizePolicy().hasHeightForWidth())
self.label_247.setSizePolicy(sizePolicy)
self.label_247.setMaximumSize(QtCore.QSize(100, 16777215))
self.label_247.setObjectName("label_247")
self.gridLayout_285.addWidget(self.label_247, 0, 2, 1, 1)
self.rf_power_max_SpinBox = QtWidgets.QDoubleSpinBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rf_power_max_SpinBox.sizePolicy().hasHeightForWidth())
self.rf_power_max_SpinBox.setSizePolicy(sizePolicy)
self.rf_power_max_SpinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.rf_power_max_SpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.rf_power_max_SpinBox.setDecimals(0)
self.rf_power_max_SpinBox.setMinimum(1.0)
self.rf_power_max_SpinBox.setMaximum(1000000.0)
self.rf_power_max_SpinBox.setSingleStep(1.0)
self.rf_power_max_SpinBox.setProperty("value", 7.0)
self.rf_power_max_SpinBox.setObjectName("rf_power_max_SpinBox")
self.gridLayout_285.addWidget(self.rf_power_max_SpinBox, 0, 5, 1, 1)
self.save_classifier_checkBox = QtWidgets.QCheckBox(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.save_classifier_checkBox.sizePolicy().hasHeightForWidth())
self.save_classifier_checkBox.setSizePolicy(sizePolicy)
self.save_classifier_checkBox.setObjectName("save_classifier_checkBox")
self.gridLayout_285.addWidget(self.save_classifier_checkBox, 1, 0, 1, 1)
self.gridLayout_287.addLayout(self.gridLayout_285, 1, 0, 1, 1)
self.gridLayout_286 = QtWidgets.QGridLayout()
self.gridLayout_286.setObjectName("gridLayout_286")
self.label_244 = QtWidgets.QLabel(self.page_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_244.sizePolicy().hasHeightForWidth())
self.label_244.setSizePolicy(sizePolicy)
self.label_244.setObjectName("label_244")
self.gridLayout_286.addWidget(self.label_244, 0, 0, 1, 1)
self.classifier_Button = QtWidgets.QToolButton(self.page_21)
self.classifier_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.classifier_Button.setIcon(icon65)
self.classifier_Button.setIconSize(QtCore.QSize(22, 22))
self.classifier_Button.setObjectName("classifier_Button")
self.gridLayout_286.addWidget(self.classifier_Button, 0, 2, 1, 1)
self.classifier_lineEdit_ = QtWidgets.QLineEdit(self.page_21)
self.classifier_lineEdit_.setEnabled(False)
self.classifier_lineEdit_.setObjectName("classifier_lineEdit_")
self.gridLayout_286.addWidget(self.classifier_lineEdit_, 0, 1, 1, 1)
self.resetClassifierButton = QtWidgets.QToolButton(self.page_21)
self.resetClassifierButton.setStyleSheet("margin: 0px;padding: 0px;")
self.resetClassifierButton.setIcon(icon59)
self.resetClassifierButton.setIconSize(QtCore.QSize(22, 22))
self.resetClassifierButton.setObjectName("resetClassifierButton")
self.gridLayout_286.addWidget(self.resetClassifierButton, 0, 3, 1, 1)
self.gridLayout_287.addLayout(self.gridLayout_286, 2, 0, 1, 1)
self.gridLayout_283 = QtWidgets.QGridLayout()
self.gridLayout_283.setObjectName("gridLayout_283")
self.button_random_forest = QtWidgets.QToolButton(self.page_21)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.button_random_forest.setFont(font)
self.button_random_forest.setLayoutDirection(QtCore.Qt.RightToLeft)
self.button_random_forest.setStyleSheet("margin: 0px;padding: 0px;")
self.button_random_forest.setIcon(icon64)
self.button_random_forest.setIconSize(QtCore.QSize(34, 34))
self.button_random_forest.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.button_random_forest.setObjectName("button_random_forest")
self.gridLayout_283.addWidget(self.button_random_forest, 2, 2, 1, 1)
self.label_246 = QtWidgets.QLabel(self.page_21)
self.label_246.setStyleSheet("background-color : #656565; color : white")
self.label_246.setFrameShape(QtWidgets.QFrame.Panel)
self.label_246.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_246.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_246.setObjectName("label_246")
self.gridLayout_283.addWidget(self.label_246, 1, 0, 1, 3)
spacerItem111 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_283.addItem(spacerItem111, 0, 2, 1, 1)
spacerItem112 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_283.addItem(spacerItem112, 2, 0, 1, 1)
self.random_forest = QtWidgets.QToolButton(self.page_21)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.random_forest.setFont(font)
self.random_forest.setLayoutDirection(QtCore.Qt.RightToLeft)
self.random_forest.setStyleSheet("margin: 0px;padding: 0px;")
self.random_forest.setIcon(icon48)
self.random_forest.setIconSize(QtCore.QSize(34, 34))
self.random_forest.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.random_forest.setObjectName("random_forest")
self.gridLayout_283.addWidget(self.random_forest, 2, 1, 1, 1)
self.gridLayout_287.addLayout(self.gridLayout_283, 3, 0, 1, 1)
self.toolBox_random_forest.addItem(self.page_21, "")
self.page_25 = QtWidgets.QWidget()
self.page_25.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_25.setObjectName("page_25")
self.gridLayout_293 = QtWidgets.QGridLayout(self.page_25)
self.gridLayout_293.setObjectName("gridLayout_293")
self.gridLayout_294 = QtWidgets.QGridLayout()
self.gridLayout_294.setObjectName("gridLayout_294")
self.report_textBrowser_5 = QtWidgets.QTextBrowser(self.page_25)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.report_textBrowser_5.setFont(font)
self.report_textBrowser_5.setTabChangesFocus(True)
self.report_textBrowser_5.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.report_textBrowser_5.setTabStopWidth(160)
self.report_textBrowser_5.setOpenLinks(False)
self.report_textBrowser_5.setObjectName("report_textBrowser_5")
self.gridLayout_294.addWidget(self.report_textBrowser_5, 0, 0, 1, 1)
self.gridLayout_293.addLayout(self.gridLayout_294, 0, 0, 1, 1)
self.toolBox_random_forest.addItem(self.page_25, "")
self.gridLayout_288.addWidget(self.toolBox_random_forest, 0, 0, 1, 1)
self.tabWidget_4.addTab(self.tab_random_forest, "")
self.gridLayout_163.addWidget(self.tabWidget_4, 0, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_band_processing, "")
self.tab_postProcessing = QtWidgets.QWidget()
self.tab_postProcessing.setObjectName("tab_postProcessing")
self.gridLayout_552 = QtWidgets.QGridLayout(self.tab_postProcessing)
self.gridLayout_552.setObjectName("gridLayout_552")
self.tabWidget_2 = QtWidgets.QTabWidget(self.tab_postProcessing)
self.tabWidget_2.setStyleSheet("")
self.tabWidget_2.setIconSize(QtCore.QSize(20, 20))
self.tabWidget_2.setDocumentMode(True)
self.tabWidget_2.setObjectName("tabWidget_2")
self.tab_accuracy = QtWidgets.QWidget()
self.tab_accuracy.setObjectName("tab_accuracy")
self.gridLayout_184 = QtWidgets.QGridLayout(self.tab_accuracy)
self.gridLayout_184.setObjectName("gridLayout_184")
self.toolBox_accuracy = QtWidgets.QToolBox(self.tab_accuracy)
self.toolBox_accuracy.setObjectName("toolBox_accuracy")
self.page_10 = QtWidgets.QWidget()
self.page_10.setGeometry(QtCore.QRect(0, 0, 723, 351))
self.page_10.setObjectName("page_10")
self.gridLayout_36 = QtWidgets.QGridLayout(self.page_10)
self.gridLayout_36.setObjectName("gridLayout_36")
self.gridLayout_33 = QtWidgets.QGridLayout()
self.gridLayout_33.setObjectName("gridLayout_33")
self.label_33 = QtWidgets.QLabel(self.page_10)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_33.sizePolicy().hasHeightForWidth())
self.label_33.setSizePolicy(sizePolicy)
self.label_33.setMinimumSize(QtCore.QSize(229, 0))
self.label_33.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_33.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_33.setObjectName("label_33")
self.gridLayout_33.addWidget(self.label_33, 1, 0, 1, 1)
self.label_34 = QtWidgets.QLabel(self.page_10)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_34.sizePolicy().hasHeightForWidth())
self.label_34.setSizePolicy(sizePolicy)
self.label_34.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_34.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_34.setObjectName("label_34")
self.gridLayout_33.addWidget(self.label_34, 2, 0, 1, 1)
self.classification_name_combo = QtWidgets.QComboBox(self.page_10)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_name_combo.sizePolicy().hasHeightForWidth())
self.classification_name_combo.setSizePolicy(sizePolicy)
self.classification_name_combo.setObjectName("classification_name_combo")
self.gridLayout_33.addWidget(self.classification_name_combo, 1, 1, 1, 3)
self.buttonReload_shape_4 = QtWidgets.QToolButton(self.page_10)
self.buttonReload_shape_4.setStyleSheet("margin: 0px;padding: 0px;")
self.buttonReload_shape_4.setIcon(icon55)
self.buttonReload_shape_4.setIconSize(QtCore.QSize(22, 22))
self.buttonReload_shape_4.setObjectName("buttonReload_shape_4")
self.gridLayout_33.addWidget(self.buttonReload_shape_4, 2, 4, 1, 1)
self.toolButton_reload_4 = QtWidgets.QToolButton(self.page_10)
self.toolButton_reload_4.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_4.setIcon(icon55)
self.toolButton_reload_4.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_4.setObjectName("toolButton_reload_4")
self.gridLayout_33.addWidget(self.toolButton_reload_4, 1, 4, 1, 1)
self.reference_name_combo = QtWidgets.QComboBox(self.page_10)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.reference_name_combo.sizePolicy().hasHeightForWidth())
self.reference_name_combo.setSizePolicy(sizePolicy)
self.reference_name_combo.setObjectName("reference_name_combo")
self.gridLayout_33.addWidget(self.reference_name_combo, 2, 1, 1, 3)
self.horizontalLayout_36 = QtWidgets.QHBoxLayout()
self.horizontalLayout_36.setObjectName("horizontalLayout_36")
self.label_145 = QtWidgets.QLabel(self.page_10)
self.label_145.setStyleSheet("background-color : #656565; color : white")
self.label_145.setFrameShape(QtWidgets.QFrame.Panel)
self.label_145.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_145.setObjectName("label_145")
self.horizontalLayout_36.addWidget(self.label_145)
self.gridLayout_33.addLayout(self.horizontalLayout_36, 0, 0, 1, 5)
self.label_82 = QtWidgets.QLabel(self.page_10)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_82.sizePolicy().hasHeightForWidth())
self.label_82.setSizePolicy(sizePolicy)
self.label_82.setMinimumSize(QtCore.QSize(6, 0))
self.label_82.setMaximumSize(QtCore.QSize(100, 200))
self.label_82.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_82.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_82.setObjectName("label_82")
self.gridLayout_33.addWidget(self.label_82, 3, 1, 1, 1)
self.class_field_comboBox = QtWidgets.QComboBox(self.page_10)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_field_comboBox.sizePolicy().hasHeightForWidth())
self.class_field_comboBox.setSizePolicy(sizePolicy)
self.class_field_comboBox.setObjectName("class_field_comboBox")
self.gridLayout_33.addWidget(self.class_field_comboBox, 3, 2, 1, 2)
self.horizontalLayout_67 = QtWidgets.QHBoxLayout()
self.horizontalLayout_67.setObjectName("horizontalLayout_67")
self.nodata_checkBox_11 = QtWidgets.QCheckBox(self.page_10)
self.nodata_checkBox_11.setObjectName("nodata_checkBox_11")
self.horizontalLayout_67.addWidget(self.nodata_checkBox_11)
self.nodata_spinBox_15 = QtWidgets.QSpinBox(self.page_10)
self.nodata_spinBox_15.setMinimum(-2147483647)
self.nodata_spinBox_15.setMaximum(2147483647)
self.nodata_spinBox_15.setProperty("value", 0)
self.nodata_spinBox_15.setObjectName("nodata_spinBox_15")
self.horizontalLayout_67.addWidget(self.nodata_spinBox_15)
spacerItem113 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_67.addItem(spacerItem113)
self.gridLayout_33.addLayout(self.horizontalLayout_67, 4, 0, 1, 5)
self.gridLayout_36.addLayout(self.gridLayout_33, 0, 0, 1, 1)
self.gridLayout_25 = QtWidgets.QGridLayout()
self.gridLayout_25.setObjectName("gridLayout_25")
spacerItem114 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_25.addItem(spacerItem114, 0, 2, 1, 1)
spacerItem115 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_25.addItem(spacerItem115, 2, 0, 1, 1)
self.label_168 = QtWidgets.QLabel(self.page_10)
self.label_168.setStyleSheet("background-color : #656565; color : white")
self.label_168.setFrameShape(QtWidgets.QFrame.Panel)
self.label_168.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_168.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_168.setObjectName("label_168")
self.gridLayout_25.addWidget(self.label_168, 1, 0, 1, 3)
self.calculateMatrix_toolButton = QtWidgets.QToolButton(self.page_10)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.calculateMatrix_toolButton.setFont(font)
self.calculateMatrix_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.calculateMatrix_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.calculateMatrix_toolButton.setIcon(icon64)
self.calculateMatrix_toolButton.setIconSize(QtCore.QSize(34, 34))
self.calculateMatrix_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.calculateMatrix_toolButton.setObjectName("calculateMatrix_toolButton")
self.gridLayout_25.addWidget(self.calculateMatrix_toolButton, 2, 2, 1, 1)
self.accuracy = QtWidgets.QToolButton(self.page_10)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.accuracy.setFont(font)
self.accuracy.setLayoutDirection(QtCore.Qt.RightToLeft)
self.accuracy.setStyleSheet("margin: 0px;padding: 0px;")
self.accuracy.setIcon(icon48)
self.accuracy.setIconSize(QtCore.QSize(34, 34))
self.accuracy.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.accuracy.setObjectName("accuracy")
self.gridLayout_25.addWidget(self.accuracy, 2, 1, 1, 1)
self.gridLayout_36.addLayout(self.gridLayout_25, 1, 0, 1, 1)
self.toolBox_accuracy.addItem(self.page_10, "")
self.page_11 = QtWidgets.QWidget()
self.page_11.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_11.setObjectName("page_11")
self.gridLayout_35 = QtWidgets.QGridLayout(self.page_11)
self.gridLayout_35.setObjectName("gridLayout_35")
self.gridLayout_34 = QtWidgets.QGridLayout()
self.gridLayout_34.setObjectName("gridLayout_34")
self.error_matrix_textBrowser = QtWidgets.QTextBrowser(self.page_11)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.error_matrix_textBrowser.setFont(font)
self.error_matrix_textBrowser.setTabChangesFocus(True)
self.error_matrix_textBrowser.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.error_matrix_textBrowser.setTabStopWidth(120)
self.error_matrix_textBrowser.setOpenLinks(False)
self.error_matrix_textBrowser.setObjectName("error_matrix_textBrowser")
self.gridLayout_34.addWidget(self.error_matrix_textBrowser, 0, 0, 1, 1)
self.gridLayout_35.addLayout(self.gridLayout_34, 0, 0, 1, 1)
self.toolBox_accuracy.addItem(self.page_11, "")
self.gridLayout_184.addWidget(self.toolBox_accuracy, 0, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_accuracy, "")
self.tab_landCoverChange = QtWidgets.QWidget()
self.tab_landCoverChange.setObjectName("tab_landCoverChange")
self.gridLayout_187 = QtWidgets.QGridLayout(self.tab_landCoverChange)
self.gridLayout_187.setObjectName("gridLayout_187")
self.toolBox_landCoverChange = QtWidgets.QToolBox(self.tab_landCoverChange)
self.toolBox_landCoverChange.setObjectName("toolBox_landCoverChange")
self.page_12 = QtWidgets.QWidget()
self.page_12.setGeometry(QtCore.QRect(0, 0, 385, 193))
self.page_12.setObjectName("page_12")
self.gridLayout_186 = QtWidgets.QGridLayout(self.page_12)
self.gridLayout_186.setObjectName("gridLayout_186")
self.gridLayout_44 = QtWidgets.QGridLayout()
self.gridLayout_44.setObjectName("gridLayout_44")
self.mask_unchanged_checkBox = QtWidgets.QCheckBox(self.page_12)
self.mask_unchanged_checkBox.setChecked(True)
self.mask_unchanged_checkBox.setObjectName("mask_unchanged_checkBox")
self.gridLayout_44.addWidget(self.mask_unchanged_checkBox, 3, 0, 1, 1)
self.classification_reference_name_combo = QtWidgets.QComboBox(self.page_12)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_reference_name_combo.sizePolicy().hasHeightForWidth())
self.classification_reference_name_combo.setSizePolicy(sizePolicy)
self.classification_reference_name_combo.setObjectName("classification_reference_name_combo")
self.gridLayout_44.addWidget(self.classification_reference_name_combo, 1, 1, 1, 1)
self.label_40 = QtWidgets.QLabel(self.page_12)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_40.sizePolicy().hasHeightForWidth())
self.label_40.setSizePolicy(sizePolicy)
self.label_40.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_40.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_40.setObjectName("label_40")
self.gridLayout_44.addWidget(self.label_40, 2, 0, 1, 1)
self.label_38 = QtWidgets.QLabel(self.page_12)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_38.sizePolicy().hasHeightForWidth())
self.label_38.setSizePolicy(sizePolicy)
self.label_38.setMinimumSize(QtCore.QSize(229, 0))
self.label_38.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_38.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_38.setObjectName("label_38")
self.gridLayout_44.addWidget(self.label_38, 1, 0, 1, 1)
self.new_classification_name_combo = QtWidgets.QComboBox(self.page_12)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.new_classification_name_combo.sizePolicy().hasHeightForWidth())
self.new_classification_name_combo.setSizePolicy(sizePolicy)
self.new_classification_name_combo.setObjectName("new_classification_name_combo")
self.gridLayout_44.addWidget(self.new_classification_name_combo, 2, 1, 1, 1)
self.toolButton_reload_5 = QtWidgets.QToolButton(self.page_12)
self.toolButton_reload_5.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_5.setIcon(icon55)
self.toolButton_reload_5.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_5.setObjectName("toolButton_reload_5")
self.gridLayout_44.addWidget(self.toolButton_reload_5, 1, 2, 1, 1)
self.toolButton_reload_6 = QtWidgets.QToolButton(self.page_12)
self.toolButton_reload_6.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_6.setIcon(icon55)
self.toolButton_reload_6.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_6.setObjectName("toolButton_reload_6")
self.gridLayout_44.addWidget(self.toolButton_reload_6, 2, 2, 1, 1)
self.horizontalLayout_35 = QtWidgets.QHBoxLayout()
self.horizontalLayout_35.setObjectName("horizontalLayout_35")
self.label_116 = QtWidgets.QLabel(self.page_12)
self.label_116.setStyleSheet("background-color : #656565; color : white")
self.label_116.setFrameShape(QtWidgets.QFrame.Panel)
self.label_116.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_116.setObjectName("label_116")
self.horizontalLayout_35.addWidget(self.label_116)
self.gridLayout_44.addLayout(self.horizontalLayout_35, 0, 0, 1, 3)
self.gridLayout_186.addLayout(self.gridLayout_44, 0, 0, 1, 1)
self.gridLayout_45 = QtWidgets.QGridLayout()
self.gridLayout_45.setObjectName("gridLayout_45")
spacerItem116 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_45.addItem(spacerItem116, 2, 0, 1, 1)
spacerItem117 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_45.addItem(spacerItem117, 0, 2, 1, 1)
self.label_169 = QtWidgets.QLabel(self.page_12)
self.label_169.setStyleSheet("background-color : #656565; color : white")
self.label_169.setFrameShape(QtWidgets.QFrame.Panel)
self.label_169.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_169.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_169.setObjectName("label_169")
self.gridLayout_45.addWidget(self.label_169, 1, 0, 1, 3)
self.calculateLandCoverChange_toolButton = QtWidgets.QToolButton(self.page_12)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.calculateLandCoverChange_toolButton.setFont(font)
self.calculateLandCoverChange_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.calculateLandCoverChange_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.calculateLandCoverChange_toolButton.setIcon(icon64)
self.calculateLandCoverChange_toolButton.setIconSize(QtCore.QSize(34, 34))
self.calculateLandCoverChange_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.calculateLandCoverChange_toolButton.setObjectName("calculateLandCoverChange_toolButton")
self.gridLayout_45.addWidget(self.calculateLandCoverChange_toolButton, 2, 2, 1, 1)
self.land_cover_change = QtWidgets.QToolButton(self.page_12)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.land_cover_change.setFont(font)
self.land_cover_change.setLayoutDirection(QtCore.Qt.RightToLeft)
self.land_cover_change.setStyleSheet("margin: 0px;padding: 0px;")
self.land_cover_change.setIcon(icon48)
self.land_cover_change.setIconSize(QtCore.QSize(34, 34))
self.land_cover_change.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.land_cover_change.setObjectName("land_cover_change")
self.gridLayout_45.addWidget(self.land_cover_change, 2, 1, 1, 1)
self.gridLayout_186.addLayout(self.gridLayout_45, 1, 0, 1, 1)
self.toolBox_landCoverChange.addItem(self.page_12, "")
self.page_13 = QtWidgets.QWidget()
self.page_13.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_13.setObjectName("page_13")
self.gridLayout_185 = QtWidgets.QGridLayout(self.page_13)
self.gridLayout_185.setObjectName("gridLayout_185")
self.gridLayout_46 = QtWidgets.QGridLayout()
self.gridLayout_46.setObjectName("gridLayout_46")
self.change_textBrowser = QtWidgets.QTextBrowser(self.page_13)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.change_textBrowser.setFont(font)
self.change_textBrowser.setTabChangesFocus(True)
self.change_textBrowser.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.change_textBrowser.setTabStopWidth(160)
self.change_textBrowser.setOpenLinks(False)
self.change_textBrowser.setObjectName("change_textBrowser")
self.gridLayout_46.addWidget(self.change_textBrowser, 0, 0, 1, 1)
self.gridLayout_185.addLayout(self.gridLayout_46, 0, 0, 1, 1)
self.toolBox_landCoverChange.addItem(self.page_13, "")
self.gridLayout_187.addWidget(self.toolBox_landCoverChange, 0, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_landCoverChange, "")
self.tab_class_report = QtWidgets.QWidget()
self.tab_class_report.setObjectName("tab_class_report")
self.gridLayout_27 = QtWidgets.QGridLayout(self.tab_class_report)
self.gridLayout_27.setObjectName("gridLayout_27")
self.toolBox_class_report = QtWidgets.QToolBox(self.tab_class_report)
self.toolBox_class_report.setStyleSheet("")
self.toolBox_class_report.setObjectName("toolBox_class_report")
self.page_14 = QtWidgets.QWidget()
self.page_14.setGeometry(QtCore.QRect(0, 0, 444, 167))
self.page_14.setObjectName("page_14")
self.gridLayout_48 = QtWidgets.QGridLayout(self.page_14)
self.gridLayout_48.setObjectName("gridLayout_48")
self.gridLayout_47 = QtWidgets.QGridLayout()
self.gridLayout_47.setObjectName("gridLayout_47")
self.classification_report_name_combo = QtWidgets.QComboBox(self.page_14)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_report_name_combo.sizePolicy().hasHeightForWidth())
self.classification_report_name_combo.setSizePolicy(sizePolicy)
self.classification_report_name_combo.setObjectName("classification_report_name_combo")
self.gridLayout_47.addWidget(self.classification_report_name_combo, 1, 1, 1, 1)
self.label_44 = QtWidgets.QLabel(self.page_14)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_44.sizePolicy().hasHeightForWidth())
self.label_44.setSizePolicy(sizePolicy)
self.label_44.setMinimumSize(QtCore.QSize(229, 0))
self.label_44.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_44.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_44.setObjectName("label_44")
self.gridLayout_47.addWidget(self.label_44, 1, 0, 1, 1)
self.gridLayout_26 = QtWidgets.QGridLayout()
self.gridLayout_26.setObjectName("gridLayout_26")
self.nodata_checkBox = QtWidgets.QCheckBox(self.page_14)
self.nodata_checkBox.setObjectName("nodata_checkBox")
self.gridLayout_26.addWidget(self.nodata_checkBox, 0, 0, 1, 1)
self.nodata_spinBox_2 = QtWidgets.QSpinBox(self.page_14)
self.nodata_spinBox_2.setMinimum(-2147483647)
self.nodata_spinBox_2.setMaximum(2147483647)
self.nodata_spinBox_2.setProperty("value", 0)
self.nodata_spinBox_2.setObjectName("nodata_spinBox_2")
self.gridLayout_26.addWidget(self.nodata_spinBox_2, 0, 1, 1, 1)
self.gridLayout_47.addLayout(self.gridLayout_26, 2, 0, 1, 1)
self.toolButton_reload_10 = QtWidgets.QToolButton(self.page_14)
self.toolButton_reload_10.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_10.setIcon(icon55)
self.toolButton_reload_10.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_10.setObjectName("toolButton_reload_10")
self.gridLayout_47.addWidget(self.toolButton_reload_10, 1, 2, 1, 1)
self.gridLayout_48.addLayout(self.gridLayout_47, 1, 1, 1, 1)
self.gridLayout_189 = QtWidgets.QGridLayout()
self.gridLayout_189.setObjectName("gridLayout_189")
spacerItem118 = QtWidgets.QSpacerItem(358, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_189.addItem(spacerItem118, 2, 1, 1, 1)
self.label_170 = QtWidgets.QLabel(self.page_14)
self.label_170.setStyleSheet("background-color : #656565; color : white")
self.label_170.setFrameShape(QtWidgets.QFrame.Panel)
self.label_170.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_170.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_170.setObjectName("label_170")
self.gridLayout_189.addWidget(self.label_170, 1, 0, 1, 4)
self.calculateReport_toolButton = QtWidgets.QToolButton(self.page_14)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.calculateReport_toolButton.setFont(font)
self.calculateReport_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.calculateReport_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.calculateReport_toolButton.setIcon(icon64)
self.calculateReport_toolButton.setIconSize(QtCore.QSize(34, 34))
self.calculateReport_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.calculateReport_toolButton.setObjectName("calculateReport_toolButton")
self.gridLayout_189.addWidget(self.calculateReport_toolButton, 2, 3, 1, 1)
spacerItem119 = QtWidgets.QSpacerItem(20, 82, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_189.addItem(spacerItem119, 0, 0, 1, 2)
self.classification_report = QtWidgets.QToolButton(self.page_14)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.classification_report.setFont(font)
self.classification_report.setLayoutDirection(QtCore.Qt.RightToLeft)
self.classification_report.setStyleSheet("margin: 0px;padding: 0px;")
self.classification_report.setIcon(icon48)
self.classification_report.setIconSize(QtCore.QSize(34, 34))
self.classification_report.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.classification_report.setObjectName("classification_report")
self.gridLayout_189.addWidget(self.classification_report, 2, 2, 1, 1)
self.gridLayout_48.addLayout(self.gridLayout_189, 2, 1, 1, 1)
self.horizontalLayout_37 = QtWidgets.QHBoxLayout()
self.horizontalLayout_37.setObjectName("horizontalLayout_37")
self.label_148 = QtWidgets.QLabel(self.page_14)
self.label_148.setStyleSheet("background-color : #656565; color : white")
self.label_148.setFrameShape(QtWidgets.QFrame.Panel)
self.label_148.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_148.setObjectName("label_148")
self.horizontalLayout_37.addWidget(self.label_148)
self.gridLayout_48.addLayout(self.horizontalLayout_37, 0, 1, 1, 1)
self.toolBox_class_report.addItem(self.page_14, "")
self.page_15 = QtWidgets.QWidget()
self.page_15.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_15.setObjectName("page_15")
self.gridLayout_188 = QtWidgets.QGridLayout(self.page_15)
self.gridLayout_188.setObjectName("gridLayout_188")
self.gridLayout_43 = QtWidgets.QGridLayout()
self.gridLayout_43.setObjectName("gridLayout_43")
self.report_textBrowser = QtWidgets.QTextBrowser(self.page_15)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.report_textBrowser.setFont(font)
self.report_textBrowser.setTabChangesFocus(True)
self.report_textBrowser.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.report_textBrowser.setTabStopWidth(160)
self.report_textBrowser.setOpenLinks(False)
self.report_textBrowser.setObjectName("report_textBrowser")
self.gridLayout_43.addWidget(self.report_textBrowser, 0, 0, 1, 1)
self.gridLayout_188.addLayout(self.gridLayout_43, 0, 0, 1, 1)
self.toolBox_class_report.addItem(self.page_15, "")
self.gridLayout_27.addWidget(self.toolBox_class_report, 0, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_class_report, "")
self.tab_cross_classification = QtWidgets.QWidget()
self.tab_cross_classification.setObjectName("tab_cross_classification")
self.gridLayout_254 = QtWidgets.QGridLayout(self.tab_cross_classification)
self.gridLayout_254.setObjectName("gridLayout_254")
self.toolBox_cross_classification = QtWidgets.QToolBox(self.tab_cross_classification)
self.toolBox_cross_classification.setObjectName("toolBox_cross_classification")
self.page_19 = QtWidgets.QWidget()
self.page_19.setGeometry(QtCore.QRect(0, 0, 456, 229))
self.page_19.setObjectName("page_19")
self.gridLayout_192 = QtWidgets.QGridLayout(self.page_19)
self.gridLayout_192.setObjectName("gridLayout_192")
self.horizontalLayout_38 = QtWidgets.QHBoxLayout()
self.horizontalLayout_38.setObjectName("horizontalLayout_38")
self.label_187 = QtWidgets.QLabel(self.page_19)
self.label_187.setStyleSheet("background-color : #656565; color : white")
self.label_187.setFrameShape(QtWidgets.QFrame.Panel)
self.label_187.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_187.setObjectName("label_187")
self.horizontalLayout_38.addWidget(self.label_187)
self.gridLayout_192.addLayout(self.horizontalLayout_38, 0, 0, 1, 1)
self.gridLayout_250 = QtWidgets.QGridLayout()
self.gridLayout_250.setObjectName("gridLayout_250")
self.gridLayout_248 = QtWidgets.QGridLayout()
self.gridLayout_248.setObjectName("gridLayout_248")
self.nodata_checkBox_6 = QtWidgets.QCheckBox(self.page_19)
self.nodata_checkBox_6.setObjectName("nodata_checkBox_6")
self.gridLayout_248.addWidget(self.nodata_checkBox_6, 0, 0, 1, 1)
self.nodata_spinBox_7 = QtWidgets.QSpinBox(self.page_19)
self.nodata_spinBox_7.setMinimum(-2147483647)
self.nodata_spinBox_7.setMaximum(2147483647)
self.nodata_spinBox_7.setObjectName("nodata_spinBox_7")
self.gridLayout_248.addWidget(self.nodata_spinBox_7, 0, 1, 1, 1)
self.gridLayout_250.addLayout(self.gridLayout_248, 2, 0, 1, 1)
self.label_197 = QtWidgets.QLabel(self.page_19)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_197.sizePolicy().hasHeightForWidth())
self.label_197.setSizePolicy(sizePolicy)
self.label_197.setMinimumSize(QtCore.QSize(229, 0))
self.label_197.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_197.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_197.setObjectName("label_197")
self.gridLayout_250.addWidget(self.label_197, 1, 0, 1, 1)
self.label_199 = QtWidgets.QLabel(self.page_19)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_199.sizePolicy().hasHeightForWidth())
self.label_199.setSizePolicy(sizePolicy)
self.label_199.setMinimumSize(QtCore.QSize(6, 0))
self.label_199.setMaximumSize(QtCore.QSize(100, 200))
self.label_199.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_199.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_199.setObjectName("label_199")
self.gridLayout_250.addWidget(self.label_199, 4, 1, 1, 1)
self.class_field_comboBox_2 = QtWidgets.QComboBox(self.page_19)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_field_comboBox_2.sizePolicy().hasHeightForWidth())
self.class_field_comboBox_2.setSizePolicy(sizePolicy)
self.class_field_comboBox_2.setObjectName("class_field_comboBox_2")
self.gridLayout_250.addWidget(self.class_field_comboBox_2, 4, 2, 1, 2)
self.toolButton_reload_21 = QtWidgets.QToolButton(self.page_19)
self.toolButton_reload_21.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_21.setIcon(icon55)
self.toolButton_reload_21.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_21.setObjectName("toolButton_reload_21")
self.gridLayout_250.addWidget(self.toolButton_reload_21, 1, 4, 1, 1)
self.classification_name_combo_2 = QtWidgets.QComboBox(self.page_19)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_name_combo_2.sizePolicy().hasHeightForWidth())
self.classification_name_combo_2.setSizePolicy(sizePolicy)
self.classification_name_combo_2.setObjectName("classification_name_combo_2")
self.gridLayout_250.addWidget(self.classification_name_combo_2, 1, 1, 1, 3)
self.buttonReload_shape_5 = QtWidgets.QToolButton(self.page_19)
self.buttonReload_shape_5.setStyleSheet("margin: 0px;padding: 0px;")
self.buttonReload_shape_5.setIcon(icon55)
self.buttonReload_shape_5.setIconSize(QtCore.QSize(22, 22))
self.buttonReload_shape_5.setObjectName("buttonReload_shape_5")
self.gridLayout_250.addWidget(self.buttonReload_shape_5, 3, 4, 1, 1)
self.reference_name_combo_2 = QtWidgets.QComboBox(self.page_19)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.reference_name_combo_2.sizePolicy().hasHeightForWidth())
self.reference_name_combo_2.setSizePolicy(sizePolicy)
self.reference_name_combo_2.setObjectName("reference_name_combo_2")
self.gridLayout_250.addWidget(self.reference_name_combo_2, 3, 1, 1, 3)
self.label_198 = QtWidgets.QLabel(self.page_19)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_198.sizePolicy().hasHeightForWidth())
self.label_198.setSizePolicy(sizePolicy)
self.label_198.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_198.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_198.setObjectName("label_198")
self.gridLayout_250.addWidget(self.label_198, 3, 0, 1, 1)
self.gridLayout_192.addLayout(self.gridLayout_250, 1, 0, 1, 1)
self.gridLayout_251 = QtWidgets.QGridLayout()
self.gridLayout_251.setObjectName("gridLayout_251")
spacerItem120 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_251.addItem(spacerItem120, 0, 2, 1, 1)
spacerItem121 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_251.addItem(spacerItem121, 2, 0, 1, 1)
self.label_200 = QtWidgets.QLabel(self.page_19)
self.label_200.setStyleSheet("background-color : #656565; color : white")
self.label_200.setFrameShape(QtWidgets.QFrame.Panel)
self.label_200.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_200.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_200.setObjectName("label_200")
self.gridLayout_251.addWidget(self.label_200, 1, 0, 1, 3)
self.calculatecrossClass_toolButton = QtWidgets.QToolButton(self.page_19)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.calculatecrossClass_toolButton.setFont(font)
self.calculatecrossClass_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.calculatecrossClass_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.calculatecrossClass_toolButton.setIcon(icon64)
self.calculatecrossClass_toolButton.setIconSize(QtCore.QSize(34, 34))
self.calculatecrossClass_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.calculatecrossClass_toolButton.setObjectName("calculatecrossClass_toolButton")
self.gridLayout_251.addWidget(self.calculatecrossClass_toolButton, 2, 2, 1, 1)
self.cross_classification = QtWidgets.QToolButton(self.page_19)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.cross_classification.setFont(font)
self.cross_classification.setLayoutDirection(QtCore.Qt.RightToLeft)
self.cross_classification.setStyleSheet("margin: 0px;padding: 0px;")
self.cross_classification.setIcon(icon48)
self.cross_classification.setIconSize(QtCore.QSize(34, 34))
self.cross_classification.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.cross_classification.setObjectName("cross_classification")
self.gridLayout_251.addWidget(self.cross_classification, 2, 1, 1, 1)
self.gridLayout_192.addLayout(self.gridLayout_251, 2, 0, 1, 1)
self.toolBox_cross_classification.addItem(self.page_19, "")
self.page_22 = QtWidgets.QWidget()
self.page_22.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_22.setObjectName("page_22")
self.gridLayout_252 = QtWidgets.QGridLayout(self.page_22)
self.gridLayout_252.setObjectName("gridLayout_252")
self.gridLayout_253 = QtWidgets.QGridLayout()
self.gridLayout_253.setObjectName("gridLayout_253")
self.cross_matrix_textBrowser = QtWidgets.QTextBrowser(self.page_22)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.cross_matrix_textBrowser.setFont(font)
self.cross_matrix_textBrowser.setTabChangesFocus(True)
self.cross_matrix_textBrowser.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.cross_matrix_textBrowser.setTabStopWidth(120)
self.cross_matrix_textBrowser.setOpenLinks(False)
self.cross_matrix_textBrowser.setObjectName("cross_matrix_textBrowser")
self.gridLayout_253.addWidget(self.cross_matrix_textBrowser, 0, 0, 1, 1)
self.gridLayout_252.addLayout(self.gridLayout_253, 0, 0, 1, 1)
self.toolBox_cross_classification.addItem(self.page_22, "")
self.gridLayout_254.addWidget(self.toolBox_cross_classification, 0, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_cross_classification, "")
self.tab_class_signature = QtWidgets.QWidget()
self.tab_class_signature.setObjectName("tab_class_signature")
self.gridLayout_123 = QtWidgets.QGridLayout(self.tab_class_signature)
self.gridLayout_123.setObjectName("gridLayout_123")
self.toolBox_class_signature = QtWidgets.QToolBox(self.tab_class_signature)
self.toolBox_class_signature.setStyleSheet("")
self.toolBox_class_signature.setObjectName("toolBox_class_signature")
self.page_20 = QtWidgets.QWidget()
self.page_20.setGeometry(QtCore.QRect(0, 0, 387, 196))
self.page_20.setObjectName("page_20")
self.gridLayout_209 = QtWidgets.QGridLayout(self.page_20)
self.gridLayout_209.setObjectName("gridLayout_209")
self.horizontalLayout_39 = QtWidgets.QHBoxLayout()
self.horizontalLayout_39.setObjectName("horizontalLayout_39")
self.label_188 = QtWidgets.QLabel(self.page_20)
self.label_188.setStyleSheet("background-color : #656565; color : white")
self.label_188.setFrameShape(QtWidgets.QFrame.Panel)
self.label_188.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_188.setObjectName("label_188")
self.horizontalLayout_39.addWidget(self.label_188)
self.gridLayout_209.addLayout(self.horizontalLayout_39, 0, 0, 1, 1)
self.gridLayout_211 = QtWidgets.QGridLayout()
self.gridLayout_211.setObjectName("gridLayout_211")
self.gridLayout_112 = QtWidgets.QGridLayout()
self.gridLayout_112.setObjectName("gridLayout_112")
self.classification_name_combo_3 = QtWidgets.QComboBox(self.page_20)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_name_combo_3.sizePolicy().hasHeightForWidth())
self.classification_name_combo_3.setSizePolicy(sizePolicy)
self.classification_name_combo_3.setObjectName("classification_name_combo_3")
self.gridLayout_112.addWidget(self.classification_name_combo_3, 0, 1, 1, 1)
self.label_201 = QtWidgets.QLabel(self.page_20)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_201.sizePolicy().hasHeightForWidth())
self.label_201.setSizePolicy(sizePolicy)
self.label_201.setMinimumSize(QtCore.QSize(229, 0))
self.label_201.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_201.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_201.setObjectName("label_201")
self.gridLayout_112.addWidget(self.label_201, 0, 0, 1, 1)
self.toolButton_reload_22 = QtWidgets.QToolButton(self.page_20)
self.toolButton_reload_22.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_22.setIcon(icon55)
self.toolButton_reload_22.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_22.setObjectName("toolButton_reload_22")
self.gridLayout_112.addWidget(self.toolButton_reload_22, 0, 2, 1, 1)
self.gridLayout_211.addLayout(self.gridLayout_112, 0, 0, 1, 3)
self.label_259 = QtWidgets.QLabel(self.page_20)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_259.sizePolicy().hasHeightForWidth())
self.label_259.setSizePolicy(sizePolicy)
self.label_259.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_259.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_259.setObjectName("label_259")
self.gridLayout_211.addWidget(self.label_259, 1, 0, 1, 1)
self.band_set_comb_spinBox_8 = QtWidgets.QSpinBox(self.page_20)
self.band_set_comb_spinBox_8.setMinimum(1)
self.band_set_comb_spinBox_8.setMaximum(100000)
self.band_set_comb_spinBox_8.setObjectName("band_set_comb_spinBox_8")
self.gridLayout_211.addWidget(self.band_set_comb_spinBox_8, 1, 1, 1, 1)
spacerItem122 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_211.addItem(spacerItem122, 1, 2, 1, 1)
self.gridLayout_209.addLayout(self.gridLayout_211, 1, 0, 1, 1)
self.gridLayout_256 = QtWidgets.QGridLayout()
self.gridLayout_256.setObjectName("gridLayout_256")
self.label_184 = QtWidgets.QLabel(self.page_20)
self.label_184.setStyleSheet("background-color : #656565; color : white")
self.label_184.setFrameShape(QtWidgets.QFrame.Panel)
self.label_184.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_184.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_184.setObjectName("label_184")
self.gridLayout_256.addWidget(self.label_184, 2, 0, 1, 4)
self.class_signature_Button = QtWidgets.QToolButton(self.page_20)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.class_signature_Button.setFont(font)
self.class_signature_Button.setLayoutDirection(QtCore.Qt.RightToLeft)
self.class_signature_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.class_signature_Button.setIcon(icon64)
self.class_signature_Button.setIconSize(QtCore.QSize(34, 34))
self.class_signature_Button.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.class_signature_Button.setObjectName("class_signature_Button")
self.gridLayout_256.addWidget(self.class_signature_Button, 3, 3, 1, 1)
spacerItem123 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_256.addItem(spacerItem123, 3, 0, 1, 2)
spacerItem124 = QtWidgets.QSpacerItem(38, 37, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_256.addItem(spacerItem124, 1, 0, 1, 1)
self.class_signature_save_siglist_checkBox = QtWidgets.QCheckBox(self.page_20)
self.class_signature_save_siglist_checkBox.setChecked(True)
self.class_signature_save_siglist_checkBox.setObjectName("class_signature_save_siglist_checkBox")
self.gridLayout_256.addWidget(self.class_signature_save_siglist_checkBox, 0, 0, 1, 4)
self.class_signature = QtWidgets.QToolButton(self.page_20)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.class_signature.setFont(font)
self.class_signature.setLayoutDirection(QtCore.Qt.RightToLeft)
self.class_signature.setStyleSheet("margin: 0px;padding: 0px;")
self.class_signature.setIcon(icon48)
self.class_signature.setIconSize(QtCore.QSize(34, 34))
self.class_signature.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.class_signature.setObjectName("class_signature")
self.gridLayout_256.addWidget(self.class_signature, 3, 2, 1, 1)
self.gridLayout_209.addLayout(self.gridLayout_256, 2, 0, 1, 1)
self.toolBox_class_signature.addItem(self.page_20, "")
self.page_24 = QtWidgets.QWidget()
self.page_24.setGeometry(QtCore.QRect(0, 0, 90, 90))
self.page_24.setObjectName("page_24")
self.gridLayout_258 = QtWidgets.QGridLayout(self.page_24)
self.gridLayout_258.setObjectName("gridLayout_258")
self.gridLayout_259 = QtWidgets.QGridLayout()
self.gridLayout_259.setObjectName("gridLayout_259")
self.report_textBrowser_4 = QtWidgets.QTextBrowser(self.page_24)
font = QtGui.QFont()
font.setFamily("Courier 10 Pitch")
self.report_textBrowser_4.setFont(font)
self.report_textBrowser_4.setTabChangesFocus(True)
self.report_textBrowser_4.setLineWrapMode(QtWidgets.QTextEdit.NoWrap)
self.report_textBrowser_4.setTabStopWidth(160)
self.report_textBrowser_4.setOpenLinks(False)
self.report_textBrowser_4.setObjectName("report_textBrowser_4")
self.gridLayout_259.addWidget(self.report_textBrowser_4, 0, 0, 1, 1)
self.gridLayout_258.addLayout(self.gridLayout_259, 0, 0, 1, 1)
self.toolBox_class_signature.addItem(self.page_24, "")
self.gridLayout_123.addWidget(self.toolBox_class_signature, 0, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_class_signature, "")
self.tab_class_to_vector = QtWidgets.QWidget()
self.tab_class_to_vector.setObjectName("tab_class_to_vector")
self.gridLayout_49 = QtWidgets.QGridLayout(self.tab_class_to_vector)
self.gridLayout_49.setObjectName("gridLayout_49")
self.horizontalLayout_40 = QtWidgets.QHBoxLayout()
self.horizontalLayout_40.setObjectName("horizontalLayout_40")
self.label_189 = QtWidgets.QLabel(self.tab_class_to_vector)
self.label_189.setStyleSheet("background-color : #656565; color : white")
self.label_189.setFrameShape(QtWidgets.QFrame.Panel)
self.label_189.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_189.setObjectName("label_189")
self.horizontalLayout_40.addWidget(self.label_189)
self.gridLayout_49.addLayout(self.horizontalLayout_40, 0, 0, 1, 1)
self.gridLayout_71 = QtWidgets.QGridLayout()
self.gridLayout_71.setObjectName("gridLayout_71")
self.label_63 = QtWidgets.QLabel(self.tab_class_to_vector)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_63.sizePolicy().hasHeightForWidth())
self.label_63.setSizePolicy(sizePolicy)
self.label_63.setMinimumSize(QtCore.QSize(229, 0))
self.label_63.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_63.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_63.setObjectName("label_63")
self.gridLayout_71.addWidget(self.label_63, 0, 0, 1, 1)
self.classification_vector_name_combo = QtWidgets.QComboBox(self.tab_class_to_vector)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_vector_name_combo.sizePolicy().hasHeightForWidth())
self.classification_vector_name_combo.setSizePolicy(sizePolicy)
self.classification_vector_name_combo.setObjectName("classification_vector_name_combo")
self.gridLayout_71.addWidget(self.classification_vector_name_combo, 0, 1, 1, 1)
self.toolButton_reload_11 = QtWidgets.QToolButton(self.tab_class_to_vector)
self.toolButton_reload_11.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_11.setIcon(icon55)
self.toolButton_reload_11.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_11.setObjectName("toolButton_reload_11")
self.gridLayout_71.addWidget(self.toolButton_reload_11, 0, 2, 1, 1)
self.gridLayout_49.addLayout(self.gridLayout_71, 1, 0, 1, 1)
self.gridLayout_74 = QtWidgets.QGridLayout()
self.gridLayout_74.setObjectName("gridLayout_74")
self.class_macroclass_comboBox = QtWidgets.QComboBox(self.tab_class_to_vector)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_macroclass_comboBox.sizePolicy().hasHeightForWidth())
self.class_macroclass_comboBox.setSizePolicy(sizePolicy)
self.class_macroclass_comboBox.setObjectName("class_macroclass_comboBox")
self.class_macroclass_comboBox.addItem("")
self.class_macroclass_comboBox.addItem("")
self.gridLayout_74.addWidget(self.class_macroclass_comboBox, 1, 1, 1, 1)
self.use_class_code_checkBox = QtWidgets.QCheckBox(self.tab_class_to_vector)
self.use_class_code_checkBox.setObjectName("use_class_code_checkBox")
self.gridLayout_74.addWidget(self.use_class_code_checkBox, 1, 0, 1, 1)
self.label_49 = QtWidgets.QLabel(self.tab_class_to_vector)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_49.sizePolicy().hasHeightForWidth())
self.label_49.setSizePolicy(sizePolicy)
self.label_49.setStyleSheet("background-color : #656565; color : white")
self.label_49.setFrameShape(QtWidgets.QFrame.Panel)
self.label_49.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_49.setObjectName("label_49")
self.gridLayout_74.addWidget(self.label_49, 0, 0, 1, 2)
self.dissolve_output_checkBox = QtWidgets.QCheckBox(self.tab_class_to_vector)
self.dissolve_output_checkBox.setObjectName("dissolve_output_checkBox")
self.gridLayout_74.addWidget(self.dissolve_output_checkBox, 2, 0, 1, 1)
self.gridLayout_49.addLayout(self.gridLayout_74, 2, 0, 1, 1)
self.gridLayout_75 = QtWidgets.QGridLayout()
self.gridLayout_75.setObjectName("gridLayout_75")
spacerItem125 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_75.addItem(spacerItem125, 0, 1, 1, 1)
spacerItem126 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_75.addItem(spacerItem126, 0, 0, 1, 1)
self.convert_toolButton = QtWidgets.QToolButton(self.tab_class_to_vector)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.convert_toolButton.setFont(font)
self.convert_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.convert_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.convert_toolButton.setIcon(icon64)
self.convert_toolButton.setIconSize(QtCore.QSize(34, 34))
self.convert_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.convert_toolButton.setObjectName("convert_toolButton")
self.gridLayout_75.addWidget(self.convert_toolButton, 2, 2, 1, 1)
self.label_171 = QtWidgets.QLabel(self.tab_class_to_vector)
self.label_171.setStyleSheet("background-color : #656565; color : white")
self.label_171.setFrameShape(QtWidgets.QFrame.Panel)
self.label_171.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_171.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_171.setObjectName("label_171")
self.gridLayout_75.addWidget(self.label_171, 1, 0, 1, 3)
self.classification_to_vector = QtWidgets.QToolButton(self.tab_class_to_vector)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.classification_to_vector.setFont(font)
self.classification_to_vector.setLayoutDirection(QtCore.Qt.RightToLeft)
self.classification_to_vector.setStyleSheet("margin: 0px;padding: 0px;")
self.classification_to_vector.setIcon(icon48)
self.classification_to_vector.setIconSize(QtCore.QSize(34, 34))
self.classification_to_vector.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.classification_to_vector.setObjectName("classification_to_vector")
self.gridLayout_75.addWidget(self.classification_to_vector, 2, 1, 1, 1)
self.gridLayout_49.addLayout(self.gridLayout_75, 3, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_class_to_vector, "")
self.tab_reclassification = QtWidgets.QWidget()
self.tab_reclassification.setObjectName("tab_reclassification")
self.gridLayout_191 = QtWidgets.QGridLayout(self.tab_reclassification)
self.gridLayout_191.setObjectName("gridLayout_191")
self.gridLayout_78 = QtWidgets.QGridLayout()
self.gridLayout_78.setObjectName("gridLayout_78")
self.label_65 = QtWidgets.QLabel(self.tab_reclassification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_65.sizePolicy().hasHeightForWidth())
self.label_65.setSizePolicy(sizePolicy)
self.label_65.setMinimumSize(QtCore.QSize(229, 0))
self.label_65.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_65.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_65.setObjectName("label_65")
self.gridLayout_78.addWidget(self.label_65, 1, 0, 1, 1)
self.reclassification_name_combo = QtWidgets.QComboBox(self.tab_reclassification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.reclassification_name_combo.sizePolicy().hasHeightForWidth())
self.reclassification_name_combo.setSizePolicy(sizePolicy)
self.reclassification_name_combo.setObjectName("reclassification_name_combo")
self.gridLayout_78.addWidget(self.reclassification_name_combo, 1, 1, 1, 1)
self.toolButton_reload_12 = QtWidgets.QToolButton(self.tab_reclassification)
self.toolButton_reload_12.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_12.setIcon(icon55)
self.toolButton_reload_12.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_12.setObjectName("toolButton_reload_12")
self.gridLayout_78.addWidget(self.toolButton_reload_12, 1, 2, 1, 1)
self.horizontalLayout_41 = QtWidgets.QHBoxLayout()
self.horizontalLayout_41.setObjectName("horizontalLayout_41")
self.label_190 = QtWidgets.QLabel(self.tab_reclassification)
self.label_190.setStyleSheet("background-color : #656565; color : white")
self.label_190.setFrameShape(QtWidgets.QFrame.Panel)
self.label_190.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_190.setObjectName("label_190")
self.horizontalLayout_41.addWidget(self.label_190)
self.gridLayout_78.addLayout(self.horizontalLayout_41, 0, 0, 1, 3)
self.gridLayout_191.addLayout(self.gridLayout_78, 0, 0, 1, 1)
self.gridLayout_79 = QtWidgets.QGridLayout()
self.gridLayout_79.setObjectName("gridLayout_79")
self.calculate_unique_values_toolButton = QtWidgets.QToolButton(self.tab_reclassification)
self.calculate_unique_values_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.calculate_unique_values_toolButton.setIcon(icon67)
self.calculate_unique_values_toolButton.setIconSize(QtCore.QSize(22, 22))
self.calculate_unique_values_toolButton.setObjectName("calculate_unique_values_toolButton")
self.gridLayout_79.addWidget(self.calculate_unique_values_toolButton, 1, 4, 1, 1)
self.CID_MCID_code_checkBox = QtWidgets.QCheckBox(self.tab_reclassification)
self.CID_MCID_code_checkBox.setObjectName("CID_MCID_code_checkBox")
self.gridLayout_79.addWidget(self.CID_MCID_code_checkBox, 1, 2, 1, 1)
self.label_98 = QtWidgets.QLabel(self.tab_reclassification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_98.sizePolicy().hasHeightForWidth())
self.label_98.setSizePolicy(sizePolicy)
self.label_98.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_98.setObjectName("label_98")
self.gridLayout_79.addWidget(self.label_98, 1, 3, 1, 1)
self.label_54 = QtWidgets.QLabel(self.tab_reclassification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_54.sizePolicy().hasHeightForWidth())
self.label_54.setSizePolicy(sizePolicy)
self.label_54.setStyleSheet("background-color : #656565; color : white")
self.label_54.setFrameShape(QtWidgets.QFrame.Panel)
self.label_54.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_54.setObjectName("label_54")
self.gridLayout_79.addWidget(self.label_54, 0, 2, 1, 3)
self.incremental_new_values_toolButton = QtWidgets.QToolButton(self.tab_reclassification)
self.incremental_new_values_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.incremental_new_values_toolButton.setIcon(icon67)
self.incremental_new_values_toolButton.setIconSize(QtCore.QSize(22, 22))
self.incremental_new_values_toolButton.setObjectName("incremental_new_values_toolButton")
self.gridLayout_79.addWidget(self.incremental_new_values_toolButton, 2, 4, 1, 1)
self.label_271 = QtWidgets.QLabel(self.tab_reclassification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_271.sizePolicy().hasHeightForWidth())
self.label_271.setSizePolicy(sizePolicy)
self.label_271.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_271.setObjectName("label_271")
self.gridLayout_79.addWidget(self.label_271, 2, 3, 1, 1)
self.gridLayout_191.addLayout(self.gridLayout_79, 1, 0, 1, 1)
self.gridLayout_77 = QtWidgets.QGridLayout()
self.gridLayout_77.setObjectName("gridLayout_77")
self.reclass_values_tableWidget = QtWidgets.QTableWidget(self.tab_reclassification)
self.reclass_values_tableWidget.setAlternatingRowColors(True)
self.reclass_values_tableWidget.setObjectName("reclass_values_tableWidget")
self.reclass_values_tableWidget.setColumnCount(2)
self.reclass_values_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.reclass_values_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.reclass_values_tableWidget.setHorizontalHeaderItem(1, item)
self.reclass_values_tableWidget.horizontalHeader().setStretchLastSection(True)
self.reclass_values_tableWidget.verticalHeader().setDefaultSectionSize(24)
self.gridLayout_77.addWidget(self.reclass_values_tableWidget, 0, 0, 1, 1)
self.gridLayout_81 = QtWidgets.QGridLayout()
self.gridLayout_81.setObjectName("gridLayout_81")
self.add_value_pushButton = QtWidgets.QToolButton(self.tab_reclassification)
self.add_value_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.add_value_pushButton.setIcon(icon66)
self.add_value_pushButton.setIconSize(QtCore.QSize(22, 22))
self.add_value_pushButton.setObjectName("add_value_pushButton")
self.gridLayout_81.addWidget(self.add_value_pushButton, 1, 0, 1, 1)
self.remove_row_pushButton = QtWidgets.QToolButton(self.tab_reclassification)
self.remove_row_pushButton.setStyleSheet("margin: 0px;padding: 0px;")
self.remove_row_pushButton.setIcon(icon58)
self.remove_row_pushButton.setIconSize(QtCore.QSize(22, 22))
self.remove_row_pushButton.setObjectName("remove_row_pushButton")
self.gridLayout_81.addWidget(self.remove_row_pushButton, 2, 0, 1, 1)
spacerItem127 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_81.addItem(spacerItem127, 0, 0, 1, 1)
spacerItem128 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_81.addItem(spacerItem128, 6, 0, 1, 1)
self.import_reclass_toolButton = QtWidgets.QToolButton(self.tab_reclassification)
self.import_reclass_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.import_reclass_toolButton.setIcon(icon54)
self.import_reclass_toolButton.setIconSize(QtCore.QSize(22, 22))
self.import_reclass_toolButton.setObjectName("import_reclass_toolButton")
self.gridLayout_81.addWidget(self.import_reclass_toolButton, 4, 0, 1, 1)
self.export_reclass_toolButton = QtWidgets.QToolButton(self.tab_reclassification)
self.export_reclass_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.export_reclass_toolButton.setIcon(icon53)
self.export_reclass_toolButton.setIconSize(QtCore.QSize(22, 22))
self.export_reclass_toolButton.setObjectName("export_reclass_toolButton")
self.gridLayout_81.addWidget(self.export_reclass_toolButton, 5, 0, 1, 1)
spacerItem129 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_81.addItem(spacerItem129, 3, 0, 1, 1)
self.gridLayout_77.addLayout(self.gridLayout_81, 0, 1, 1, 1)
self.gridLayout_191.addLayout(self.gridLayout_77, 2, 0, 1, 1)
self.gridLayout_80 = QtWidgets.QGridLayout()
self.gridLayout_80.setObjectName("gridLayout_80")
self.gridLayout_82 = QtWidgets.QGridLayout()
self.gridLayout_82.setObjectName("gridLayout_82")
self.apply_symbology_checkBox = QtWidgets.QCheckBox(self.tab_reclassification)
self.apply_symbology_checkBox.setChecked(False)
self.apply_symbology_checkBox.setObjectName("apply_symbology_checkBox")
self.gridLayout_82.addWidget(self.apply_symbology_checkBox, 1, 0, 1, 1)
self.class_macroclass_comboBox_2 = QtWidgets.QComboBox(self.tab_reclassification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_macroclass_comboBox_2.sizePolicy().hasHeightForWidth())
self.class_macroclass_comboBox_2.setSizePolicy(sizePolicy)
self.class_macroclass_comboBox_2.setObjectName("class_macroclass_comboBox_2")
self.class_macroclass_comboBox_2.addItem("")
self.class_macroclass_comboBox_2.addItem("")
self.gridLayout_82.addWidget(self.class_macroclass_comboBox_2, 1, 1, 1, 1)
self.label_51 = QtWidgets.QLabel(self.tab_reclassification)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_51.sizePolicy().hasHeightForWidth())
self.label_51.setSizePolicy(sizePolicy)
self.label_51.setStyleSheet("background-color : #656565; color : white")
self.label_51.setFrameShape(QtWidgets.QFrame.Panel)
self.label_51.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_51.setObjectName("label_51")
self.gridLayout_82.addWidget(self.label_51, 0, 0, 1, 2)
self.gridLayout_80.addLayout(self.gridLayout_82, 0, 0, 1, 1)
self.gridLayout_191.addLayout(self.gridLayout_80, 3, 0, 1, 1)
self.gridLayout_83 = QtWidgets.QGridLayout()
self.gridLayout_83.setObjectName("gridLayout_83")
spacerItem130 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_83.addItem(spacerItem130, 1, 0, 1, 1)
self.label_172 = QtWidgets.QLabel(self.tab_reclassification)
self.label_172.setStyleSheet("background-color : #656565; color : white")
self.label_172.setFrameShape(QtWidgets.QFrame.Panel)
self.label_172.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_172.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_172.setObjectName("label_172")
self.gridLayout_83.addWidget(self.label_172, 0, 0, 1, 3)
self.reclassify_toolButton = QtWidgets.QToolButton(self.tab_reclassification)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.reclassify_toolButton.setFont(font)
self.reclassify_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.reclassify_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.reclassify_toolButton.setIcon(icon64)
self.reclassify_toolButton.setIconSize(QtCore.QSize(34, 34))
self.reclassify_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.reclassify_toolButton.setObjectName("reclassify_toolButton")
self.gridLayout_83.addWidget(self.reclassify_toolButton, 1, 2, 1, 1)
self.reclassification = QtWidgets.QToolButton(self.tab_reclassification)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.reclassification.setFont(font)
self.reclassification.setLayoutDirection(QtCore.Qt.RightToLeft)
self.reclassification.setStyleSheet("margin: 0px;padding: 0px;")
self.reclassification.setIcon(icon48)
self.reclassification.setIconSize(QtCore.QSize(34, 34))
self.reclassification.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.reclassification.setObjectName("reclassification")
self.gridLayout_83.addWidget(self.reclassification, 1, 1, 1, 1)
self.gridLayout_191.addLayout(self.gridLayout_83, 4, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_reclassification, "")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.gridLayout_359 = QtWidgets.QGridLayout(self.tab)
self.gridLayout_359.setObjectName("gridLayout_359")
self.horizontalLayout_42 = QtWidgets.QHBoxLayout()
self.horizontalLayout_42.setObjectName("horizontalLayout_42")
self.label_193 = QtWidgets.QLabel(self.tab)
self.label_193.setStyleSheet("background-color : #656565; color : white")
self.label_193.setFrameShape(QtWidgets.QFrame.Panel)
self.label_193.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_193.setObjectName("label_193")
self.horizontalLayout_42.addWidget(self.label_193)
self.gridLayout_359.addLayout(self.horizontalLayout_42, 0, 0, 1, 1)
self.gridLayout_196 = QtWidgets.QGridLayout()
self.gridLayout_196.setObjectName("gridLayout_196")
self.undo_edit_Button = QtWidgets.QToolButton(self.tab)
self.undo_edit_Button.setEnabled(False)
self.undo_edit_Button.setStyleSheet("margin: 0px;padding: 0px;")
icon81 = QtGui.QIcon()
icon81.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_undo_edit_raster.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.undo_edit_Button.setIcon(icon81)
self.undo_edit_Button.setIconSize(QtCore.QSize(22, 22))
self.undo_edit_Button.setObjectName("undo_edit_Button")
self.gridLayout_196.addWidget(self.undo_edit_Button, 10, 0, 1, 1)
self.label_173 = QtWidgets.QLabel(self.tab)
self.label_173.setStyleSheet("background-color : #656565; color : white")
self.label_173.setFrameShape(QtWidgets.QFrame.Panel)
self.label_173.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_173.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_173.setObjectName("label_173")
self.gridLayout_196.addWidget(self.label_173, 9, 0, 1, 4)
self.horizontalLayout_8 = QtWidgets.QHBoxLayout()
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.label_66 = QtWidgets.QLabel(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_66.sizePolicy().hasHeightForWidth())
self.label_66.setSizePolicy(sizePolicy)
self.label_66.setMinimumSize(QtCore.QSize(229, 0))
self.label_66.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_66.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_66.setObjectName("label_66")
self.horizontalLayout_8.addWidget(self.label_66)
self.edit_raster_name_combo = QtWidgets.QComboBox(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.edit_raster_name_combo.sizePolicy().hasHeightForWidth())
self.edit_raster_name_combo.setSizePolicy(sizePolicy)
self.edit_raster_name_combo.setObjectName("edit_raster_name_combo")
self.horizontalLayout_8.addWidget(self.edit_raster_name_combo)
self.toolButton_reload_14 = QtWidgets.QToolButton(self.tab)
self.toolButton_reload_14.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_14.setIcon(icon55)
self.toolButton_reload_14.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_14.setObjectName("toolButton_reload_14")
self.horizontalLayout_8.addWidget(self.toolButton_reload_14)
self.gridLayout_196.addLayout(self.horizontalLayout_8, 0, 0, 1, 4)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.use_constant_val_checkBox = QtWidgets.QCheckBox(self.tab)
self.use_constant_val_checkBox.setChecked(True)
self.use_constant_val_checkBox.setObjectName("use_constant_val_checkBox")
self.horizontalLayout_6.addWidget(self.use_constant_val_checkBox)
self.value_spinBox = QtWidgets.QSpinBox(self.tab)
self.value_spinBox.setMinimum(-2147483647)
self.value_spinBox.setMaximum(2147483647)
self.value_spinBox.setObjectName("value_spinBox")
self.horizontalLayout_6.addWidget(self.value_spinBox)
spacerItem131 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem131)
self.gridLayout_196.addLayout(self.horizontalLayout_6, 6, 0, 1, 4)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.use_expression_checkBox = QtWidgets.QCheckBox(self.tab)
self.use_expression_checkBox.setObjectName("use_expression_checkBox")
self.horizontalLayout_7.addWidget(self.use_expression_checkBox)
self.expression_lineEdit = QtWidgets.QLineEdit(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.expression_lineEdit.sizePolicy().hasHeightForWidth())
self.expression_lineEdit.setSizePolicy(sizePolicy)
self.expression_lineEdit.setMinimumSize(QtCore.QSize(400, 26))
self.expression_lineEdit.setMaxLength(10000)
self.expression_lineEdit.setObjectName("expression_lineEdit")
self.horizontalLayout_7.addWidget(self.expression_lineEdit)
self.gridLayout_196.addLayout(self.horizontalLayout_7, 7, 0, 1, 4)
spacerItem132 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_196.addItem(spacerItem132, 10, 1, 1, 1)
spacerItem133 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_196.addItem(spacerItem133, 8, 3, 1, 1)
self.horizontalLayout_9 = QtWidgets.QHBoxLayout()
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.label_81 = QtWidgets.QLabel(self.tab)
self.label_81.setStyleSheet("background-color : #656565; color : white")
self.label_81.setFrameShape(QtWidgets.QFrame.Panel)
self.label_81.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_81.setObjectName("label_81")
self.horizontalLayout_9.addWidget(self.label_81)
self.gridLayout_196.addLayout(self.horizontalLayout_9, 1, 0, 1, 4)
self.horizontalLayout_24 = QtWidgets.QHBoxLayout()
self.horizontalLayout_24.setObjectName("horizontalLayout_24")
self.use_field_vector_checkBox = QtWidgets.QCheckBox(self.tab)
self.use_field_vector_checkBox.setObjectName("use_field_vector_checkBox")
self.horizontalLayout_24.addWidget(self.use_field_vector_checkBox)
self.field_comboBox_2 = QtWidgets.QComboBox(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.field_comboBox_2.sizePolicy().hasHeightForWidth())
self.field_comboBox_2.setSizePolicy(sizePolicy)
self.field_comboBox_2.setObjectName("field_comboBox_2")
self.horizontalLayout_24.addWidget(self.field_comboBox_2)
self.gridLayout_196.addLayout(self.horizontalLayout_24, 5, 0, 1, 4)
self.horizontalLayout_21 = QtWidgets.QHBoxLayout()
self.horizontalLayout_21.setObjectName("horizontalLayout_21")
self.edit_val_use_vector_radioButton = QtWidgets.QRadioButton(self.tab)
self.edit_val_use_vector_radioButton.setObjectName("edit_val_use_vector_radioButton")
self.horizontalLayout_21.addWidget(self.edit_val_use_vector_radioButton)
self.vector_name_combo_2 = QtWidgets.QComboBox(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.vector_name_combo_2.sizePolicy().hasHeightForWidth())
self.vector_name_combo_2.setSizePolicy(sizePolicy)
self.vector_name_combo_2.setObjectName("vector_name_combo_2")
self.horizontalLayout_21.addWidget(self.vector_name_combo_2)
self.toolButton_reload_20 = QtWidgets.QToolButton(self.tab)
self.toolButton_reload_20.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_20.setIcon(icon55)
self.toolButton_reload_20.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_20.setObjectName("toolButton_reload_20")
self.horizontalLayout_21.addWidget(self.toolButton_reload_20)
self.gridLayout_196.addLayout(self.horizontalLayout_21, 3, 0, 1, 4)
self.horizontalLayout_23 = QtWidgets.QHBoxLayout()
self.horizontalLayout_23.setObjectName("horizontalLayout_23")
self.edit_val_use_ROI_radioButton = QtWidgets.QRadioButton(self.tab)
self.edit_val_use_ROI_radioButton.setChecked(True)
self.edit_val_use_ROI_radioButton.setObjectName("edit_val_use_ROI_radioButton")
self.horizontalLayout_23.addWidget(self.edit_val_use_ROI_radioButton)
self.gridLayout_196.addLayout(self.horizontalLayout_23, 2, 0, 1, 4)
self.horizontalLayout_26 = QtWidgets.QHBoxLayout()
self.horizontalLayout_26.setObjectName("horizontalLayout_26")
self.label_158 = QtWidgets.QLabel(self.tab)
self.label_158.setStyleSheet("background-color : #656565; color : white")
self.label_158.setFrameShape(QtWidgets.QFrame.Panel)
self.label_158.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_158.setObjectName("label_158")
self.horizontalLayout_26.addWidget(self.label_158)
self.gridLayout_196.addLayout(self.horizontalLayout_26, 4, 0, 1, 4)
self.raster_set_value_toolButton = QtWidgets.QToolButton(self.tab)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.raster_set_value_toolButton.setFont(font)
self.raster_set_value_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.raster_set_value_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.raster_set_value_toolButton.setIcon(icon64)
self.raster_set_value_toolButton.setIconSize(QtCore.QSize(34, 34))
self.raster_set_value_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.raster_set_value_toolButton.setObjectName("raster_set_value_toolButton")
self.gridLayout_196.addWidget(self.raster_set_value_toolButton, 10, 3, 1, 1)
self.edit_raster_using_vector = QtWidgets.QToolButton(self.tab)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.edit_raster_using_vector.setFont(font)
self.edit_raster_using_vector.setLayoutDirection(QtCore.Qt.RightToLeft)
self.edit_raster_using_vector.setStyleSheet("margin: 0px;padding: 0px;")
self.edit_raster_using_vector.setIcon(icon48)
self.edit_raster_using_vector.setIconSize(QtCore.QSize(34, 34))
self.edit_raster_using_vector.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.edit_raster_using_vector.setObjectName("edit_raster_using_vector")
self.gridLayout_196.addWidget(self.edit_raster_using_vector, 10, 2, 1, 1)
self.gridLayout_359.addLayout(self.gridLayout_196, 1, 0, 1, 1)
self.tabWidget_2.addTab(self.tab, "")
self.tab_sieve = QtWidgets.QWidget()
self.tab_sieve.setObjectName("tab_sieve")
self.gridLayout_202 = QtWidgets.QGridLayout(self.tab_sieve)
self.gridLayout_202.setObjectName("gridLayout_202")
self.gridLayout_64 = QtWidgets.QGridLayout()
self.gridLayout_64.setObjectName("gridLayout_64")
spacerItem134 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_64.addItem(spacerItem134, 2, 2, 1, 1)
spacerItem135 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_64.addItem(spacerItem135, 4, 0, 1, 1)
self.horizontalLayout_11 = QtWidgets.QHBoxLayout()
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.label_70 = QtWidgets.QLabel(self.tab_sieve)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_70.sizePolicy().hasHeightForWidth())
self.label_70.setSizePolicy(sizePolicy)
self.label_70.setMinimumSize(QtCore.QSize(229, 0))
self.label_70.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_70.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_70.setObjectName("label_70")
self.horizontalLayout_11.addWidget(self.label_70)
self.sieve_raster_name_combo = QtWidgets.QComboBox(self.tab_sieve)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sieve_raster_name_combo.sizePolicy().hasHeightForWidth())
self.sieve_raster_name_combo.setSizePolicy(sizePolicy)
self.sieve_raster_name_combo.setObjectName("sieve_raster_name_combo")
self.horizontalLayout_11.addWidget(self.sieve_raster_name_combo)
self.toolButton_reload_15 = QtWidgets.QToolButton(self.tab_sieve)
self.toolButton_reload_15.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_15.setIcon(icon55)
self.toolButton_reload_15.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_15.setObjectName("toolButton_reload_15")
self.horizontalLayout_11.addWidget(self.toolButton_reload_15)
self.gridLayout_64.addLayout(self.horizontalLayout_11, 0, 0, 1, 3)
self.horizontalLayout_12 = QtWidgets.QHBoxLayout()
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.label_133 = QtWidgets.QLabel(self.tab_sieve)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_133.sizePolicy().hasHeightForWidth())
self.label_133.setSizePolicy(sizePolicy)
self.label_133.setMinimumSize(QtCore.QSize(229, 0))
self.label_133.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_133.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_133.setObjectName("label_133")
self.horizontalLayout_12.addWidget(self.label_133)
self.sieve_threshold_spinBox = QtWidgets.QSpinBox(self.tab_sieve)
self.sieve_threshold_spinBox.setMinimum(2)
self.sieve_threshold_spinBox.setMaximum(10000)
self.sieve_threshold_spinBox.setObjectName("sieve_threshold_spinBox")
self.horizontalLayout_12.addWidget(self.sieve_threshold_spinBox)
spacerItem136 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_12.addItem(spacerItem136)
self.label_136 = QtWidgets.QLabel(self.tab_sieve)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_136.sizePolicy().hasHeightForWidth())
self.label_136.setSizePolicy(sizePolicy)
self.label_136.setMinimumSize(QtCore.QSize(229, 0))
self.label_136.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_136.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_136.setObjectName("label_136")
self.horizontalLayout_12.addWidget(self.label_136)
self.sieve_connection_combo = QtWidgets.QComboBox(self.tab_sieve)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sieve_connection_combo.sizePolicy().hasHeightForWidth())
self.sieve_connection_combo.setSizePolicy(sizePolicy)
self.sieve_connection_combo.setMaximumSize(QtCore.QSize(80, 16777215))
self.sieve_connection_combo.setObjectName("sieve_connection_combo")
self.sieve_connection_combo.addItem("")
self.sieve_connection_combo.addItem("")
self.horizontalLayout_12.addWidget(self.sieve_connection_combo)
self.gridLayout_64.addLayout(self.horizontalLayout_12, 1, 0, 1, 3)
self.label_174 = QtWidgets.QLabel(self.tab_sieve)
self.label_174.setStyleSheet("background-color : #656565; color : white")
self.label_174.setFrameShape(QtWidgets.QFrame.Panel)
self.label_174.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_174.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_174.setObjectName("label_174")
self.gridLayout_64.addWidget(self.label_174, 3, 0, 1, 3)
self.sieve_toolButton = QtWidgets.QToolButton(self.tab_sieve)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.sieve_toolButton.setFont(font)
self.sieve_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.sieve_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.sieve_toolButton.setIcon(icon64)
self.sieve_toolButton.setIconSize(QtCore.QSize(34, 34))
self.sieve_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.sieve_toolButton.setObjectName("sieve_toolButton")
self.gridLayout_64.addWidget(self.sieve_toolButton, 4, 2, 1, 1)
self.classification_sieve = QtWidgets.QToolButton(self.tab_sieve)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.classification_sieve.setFont(font)
self.classification_sieve.setLayoutDirection(QtCore.Qt.RightToLeft)
self.classification_sieve.setStyleSheet("margin: 0px;padding: 0px;")
self.classification_sieve.setIcon(icon48)
self.classification_sieve.setIconSize(QtCore.QSize(34, 34))
self.classification_sieve.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.classification_sieve.setObjectName("classification_sieve")
self.gridLayout_64.addWidget(self.classification_sieve, 4, 1, 1, 1)
self.gridLayout_202.addLayout(self.gridLayout_64, 1, 0, 1, 1)
self.horizontalLayout_43 = QtWidgets.QHBoxLayout()
self.horizontalLayout_43.setObjectName("horizontalLayout_43")
self.label_195 = QtWidgets.QLabel(self.tab_sieve)
self.label_195.setStyleSheet("background-color : #656565; color : white")
self.label_195.setFrameShape(QtWidgets.QFrame.Panel)
self.label_195.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_195.setObjectName("label_195")
self.horizontalLayout_43.addWidget(self.label_195)
self.gridLayout_202.addLayout(self.horizontalLayout_43, 0, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_sieve, "")
self.tab_erosion = QtWidgets.QWidget()
self.tab_erosion.setObjectName("tab_erosion")
self.gridLayout_205 = QtWidgets.QGridLayout(self.tab_erosion)
self.gridLayout_205.setObjectName("gridLayout_205")
self.horizontalLayout_44 = QtWidgets.QHBoxLayout()
self.horizontalLayout_44.setObjectName("horizontalLayout_44")
self.label_202 = QtWidgets.QLabel(self.tab_erosion)
self.label_202.setStyleSheet("background-color : #656565; color : white")
self.label_202.setFrameShape(QtWidgets.QFrame.Panel)
self.label_202.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_202.setObjectName("label_202")
self.horizontalLayout_44.addWidget(self.label_202)
self.gridLayout_205.addLayout(self.horizontalLayout_44, 0, 0, 1, 1)
self.gridLayout_204 = QtWidgets.QGridLayout()
self.gridLayout_204.setObjectName("gridLayout_204")
spacerItem137 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_204.addItem(spacerItem137, 3, 2, 1, 1)
spacerItem138 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_204.addItem(spacerItem138, 5, 0, 1, 1)
self.horizontalLayout_13 = QtWidgets.QHBoxLayout()
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.label_146 = QtWidgets.QLabel(self.tab_erosion)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_146.sizePolicy().hasHeightForWidth())
self.label_146.setSizePolicy(sizePolicy)
self.label_146.setMinimumSize(QtCore.QSize(229, 0))
self.label_146.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_146.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_146.setObjectName("label_146")
self.horizontalLayout_13.addWidget(self.label_146)
self.erosion_raster_name_combo = QtWidgets.QComboBox(self.tab_erosion)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.erosion_raster_name_combo.sizePolicy().hasHeightForWidth())
self.erosion_raster_name_combo.setSizePolicy(sizePolicy)
self.erosion_raster_name_combo.setObjectName("erosion_raster_name_combo")
self.horizontalLayout_13.addWidget(self.erosion_raster_name_combo)
self.toolButton_reload_18 = QtWidgets.QToolButton(self.tab_erosion)
self.toolButton_reload_18.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_18.setIcon(icon55)
self.toolButton_reload_18.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_18.setObjectName("toolButton_reload_18")
self.horizontalLayout_13.addWidget(self.toolButton_reload_18)
self.gridLayout_204.addLayout(self.horizontalLayout_13, 0, 0, 1, 3)
self.horizontalLayout_14 = QtWidgets.QHBoxLayout()
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.label_149 = QtWidgets.QLabel(self.tab_erosion)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_149.sizePolicy().hasHeightForWidth())
self.label_149.setSizePolicy(sizePolicy)
self.label_149.setMinimumSize(QtCore.QSize(229, 0))
self.label_149.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_149.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_149.setObjectName("label_149")
self.horizontalLayout_14.addWidget(self.label_149)
self.erosion_threshold_spinBox = QtWidgets.QSpinBox(self.tab_erosion)
self.erosion_threshold_spinBox.setMinimum(1)
self.erosion_threshold_spinBox.setMaximum(1000)
self.erosion_threshold_spinBox.setProperty("value", 1)
self.erosion_threshold_spinBox.setObjectName("erosion_threshold_spinBox")
self.horizontalLayout_14.addWidget(self.erosion_threshold_spinBox)
self.circular_structure_checkBox_3 = QtWidgets.QCheckBox(self.tab_erosion)
self.circular_structure_checkBox_3.setObjectName("circular_structure_checkBox_3")
self.horizontalLayout_14.addWidget(self.circular_structure_checkBox_3)
spacerItem139 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_14.addItem(spacerItem139)
self.gridLayout_204.addLayout(self.horizontalLayout_14, 2, 0, 1, 3)
self.horizontalLayout_15 = QtWidgets.QHBoxLayout()
self.horizontalLayout_15.setObjectName("horizontalLayout_15")
self.label_151 = QtWidgets.QLabel(self.tab_erosion)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_151.sizePolicy().hasHeightForWidth())
self.label_151.setSizePolicy(sizePolicy)
self.label_151.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_151.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_151.setObjectName("label_151")
self.horizontalLayout_15.addWidget(self.label_151)
self.erosion_classes_lineEdit = QtWidgets.QLineEdit(self.tab_erosion)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.erosion_classes_lineEdit.sizePolicy().hasHeightForWidth())
self.erosion_classes_lineEdit.setSizePolicy(sizePolicy)
self.erosion_classes_lineEdit.setMinimumSize(QtCore.QSize(400, 26))
self.erosion_classes_lineEdit.setText("")
self.erosion_classes_lineEdit.setMaxLength(10000)
self.erosion_classes_lineEdit.setObjectName("erosion_classes_lineEdit")
self.horizontalLayout_15.addWidget(self.erosion_classes_lineEdit)
self.gridLayout_204.addLayout(self.horizontalLayout_15, 1, 0, 1, 3)
self.label_175 = QtWidgets.QLabel(self.tab_erosion)
self.label_175.setStyleSheet("background-color : #656565; color : white")
self.label_175.setFrameShape(QtWidgets.QFrame.Panel)
self.label_175.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_175.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_175.setObjectName("label_175")
self.gridLayout_204.addWidget(self.label_175, 4, 0, 1, 3)
self.class_erosion_toolButton = QtWidgets.QToolButton(self.tab_erosion)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.class_erosion_toolButton.setFont(font)
self.class_erosion_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.class_erosion_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.class_erosion_toolButton.setIcon(icon64)
self.class_erosion_toolButton.setIconSize(QtCore.QSize(34, 34))
self.class_erosion_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.class_erosion_toolButton.setObjectName("class_erosion_toolButton")
self.gridLayout_204.addWidget(self.class_erosion_toolButton, 5, 2, 1, 1)
self.classification_erosion = QtWidgets.QToolButton(self.tab_erosion)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.classification_erosion.setFont(font)
self.classification_erosion.setLayoutDirection(QtCore.Qt.RightToLeft)
self.classification_erosion.setStyleSheet("margin: 0px;padding: 0px;")
self.classification_erosion.setIcon(icon48)
self.classification_erosion.setIconSize(QtCore.QSize(34, 34))
self.classification_erosion.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.classification_erosion.setObjectName("classification_erosion")
self.gridLayout_204.addWidget(self.classification_erosion, 5, 1, 1, 1)
self.gridLayout_205.addLayout(self.gridLayout_204, 1, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_erosion, "")
self.tab_dilation = QtWidgets.QWidget()
self.tab_dilation.setObjectName("tab_dilation")
self.gridLayout_207 = QtWidgets.QGridLayout(self.tab_dilation)
self.gridLayout_207.setObjectName("gridLayout_207")
self.horizontalLayout_45 = QtWidgets.QHBoxLayout()
self.horizontalLayout_45.setObjectName("horizontalLayout_45")
self.label_204 = QtWidgets.QLabel(self.tab_dilation)
self.label_204.setStyleSheet("background-color : #656565; color : white")
self.label_204.setFrameShape(QtWidgets.QFrame.Panel)
self.label_204.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_204.setObjectName("label_204")
self.horizontalLayout_45.addWidget(self.label_204)
self.gridLayout_207.addLayout(self.horizontalLayout_45, 0, 0, 1, 1)
self.gridLayout_206 = QtWidgets.QGridLayout()
self.gridLayout_206.setObjectName("gridLayout_206")
spacerItem140 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_206.addItem(spacerItem140, 3, 2, 1, 1)
spacerItem141 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_206.addItem(spacerItem141, 5, 0, 1, 1)
self.horizontalLayout_16 = QtWidgets.QHBoxLayout()
self.horizontalLayout_16.setObjectName("horizontalLayout_16")
self.label_152 = QtWidgets.QLabel(self.tab_dilation)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_152.sizePolicy().hasHeightForWidth())
self.label_152.setSizePolicy(sizePolicy)
self.label_152.setMinimumSize(QtCore.QSize(229, 0))
self.label_152.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_152.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_152.setObjectName("label_152")
self.horizontalLayout_16.addWidget(self.label_152)
self.dilation_raster_name_combo = QtWidgets.QComboBox(self.tab_dilation)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dilation_raster_name_combo.sizePolicy().hasHeightForWidth())
self.dilation_raster_name_combo.setSizePolicy(sizePolicy)
self.dilation_raster_name_combo.setObjectName("dilation_raster_name_combo")
self.horizontalLayout_16.addWidget(self.dilation_raster_name_combo)
self.toolButton_reload_19 = QtWidgets.QToolButton(self.tab_dilation)
self.toolButton_reload_19.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_19.setIcon(icon55)
self.toolButton_reload_19.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_19.setObjectName("toolButton_reload_19")
self.horizontalLayout_16.addWidget(self.toolButton_reload_19)
self.gridLayout_206.addLayout(self.horizontalLayout_16, 0, 0, 1, 3)
self.horizontalLayout_17 = QtWidgets.QHBoxLayout()
self.horizontalLayout_17.setObjectName("horizontalLayout_17")
self.label_153 = QtWidgets.QLabel(self.tab_dilation)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_153.sizePolicy().hasHeightForWidth())
self.label_153.setSizePolicy(sizePolicy)
self.label_153.setMinimumSize(QtCore.QSize(229, 0))
self.label_153.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_153.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_153.setObjectName("label_153")
self.horizontalLayout_17.addWidget(self.label_153)
self.dilation_threshold_spinBox = QtWidgets.QSpinBox(self.tab_dilation)
self.dilation_threshold_spinBox.setMinimum(1)
self.dilation_threshold_spinBox.setMaximum(1000)
self.dilation_threshold_spinBox.setProperty("value", 1)
self.dilation_threshold_spinBox.setObjectName("dilation_threshold_spinBox")
self.horizontalLayout_17.addWidget(self.dilation_threshold_spinBox)
self.circular_structure_checkBox_2 = QtWidgets.QCheckBox(self.tab_dilation)
self.circular_structure_checkBox_2.setObjectName("circular_structure_checkBox_2")
self.horizontalLayout_17.addWidget(self.circular_structure_checkBox_2)
spacerItem142 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_17.addItem(spacerItem142)
self.gridLayout_206.addLayout(self.horizontalLayout_17, 2, 0, 1, 3)
self.horizontalLayout_18 = QtWidgets.QHBoxLayout()
self.horizontalLayout_18.setObjectName("horizontalLayout_18")
self.label_155 = QtWidgets.QLabel(self.tab_dilation)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_155.sizePolicy().hasHeightForWidth())
self.label_155.setSizePolicy(sizePolicy)
self.label_155.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_155.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_155.setObjectName("label_155")
self.horizontalLayout_18.addWidget(self.label_155)
self.dilation_classes_lineEdit = QtWidgets.QLineEdit(self.tab_dilation)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dilation_classes_lineEdit.sizePolicy().hasHeightForWidth())
self.dilation_classes_lineEdit.setSizePolicy(sizePolicy)
self.dilation_classes_lineEdit.setMinimumSize(QtCore.QSize(400, 26))
self.dilation_classes_lineEdit.setText("")
self.dilation_classes_lineEdit.setMaxLength(10000)
self.dilation_classes_lineEdit.setObjectName("dilation_classes_lineEdit")
self.horizontalLayout_18.addWidget(self.dilation_classes_lineEdit)
self.gridLayout_206.addLayout(self.horizontalLayout_18, 1, 0, 1, 3)
self.label_176 = QtWidgets.QLabel(self.tab_dilation)
self.label_176.setStyleSheet("background-color : #656565; color : white")
self.label_176.setFrameShape(QtWidgets.QFrame.Panel)
self.label_176.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_176.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_176.setObjectName("label_176")
self.gridLayout_206.addWidget(self.label_176, 4, 0, 1, 3)
self.class_dilation_toolButton = QtWidgets.QToolButton(self.tab_dilation)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.class_dilation_toolButton.setFont(font)
self.class_dilation_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.class_dilation_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.class_dilation_toolButton.setIcon(icon64)
self.class_dilation_toolButton.setIconSize(QtCore.QSize(34, 34))
self.class_dilation_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.class_dilation_toolButton.setObjectName("class_dilation_toolButton")
self.gridLayout_206.addWidget(self.class_dilation_toolButton, 5, 2, 1, 1)
self.classification_dilation = QtWidgets.QToolButton(self.tab_dilation)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.classification_dilation.setFont(font)
self.classification_dilation.setLayoutDirection(QtCore.Qt.RightToLeft)
self.classification_dilation.setStyleSheet("margin: 0px;padding: 0px;")
self.classification_dilation.setIcon(icon48)
self.classification_dilation.setIconSize(QtCore.QSize(34, 34))
self.classification_dilation.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.classification_dilation.setObjectName("classification_dilation")
self.gridLayout_206.addWidget(self.classification_dilation, 5, 1, 1, 1)
self.gridLayout_207.addLayout(self.gridLayout_206, 1, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_dilation, "")
self.tab_zonal_stats_rasters = QtWidgets.QWidget()
self.tab_zonal_stats_rasters.setObjectName("tab_zonal_stats_rasters")
self.gridLayout_281 = QtWidgets.QGridLayout(self.tab_zonal_stats_rasters)
self.gridLayout_281.setObjectName("gridLayout_281")
self.gridLayout_87 = QtWidgets.QGridLayout()
self.gridLayout_87.setObjectName("gridLayout_87")
self.horizontalLayout_50 = QtWidgets.QHBoxLayout()
self.horizontalLayout_50.setObjectName("horizontalLayout_50")
self.label_212 = QtWidgets.QLabel(self.tab_zonal_stats_rasters)
self.label_212.setStyleSheet("background-color : #656565; color : white")
self.label_212.setFrameShape(QtWidgets.QFrame.Panel)
self.label_212.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_212.setObjectName("label_212")
self.horizontalLayout_50.addWidget(self.label_212)
self.gridLayout_87.addLayout(self.horizontalLayout_50, 0, 0, 1, 1)
self.horizontalLayout_51 = QtWidgets.QHBoxLayout()
self.horizontalLayout_51.setObjectName("horizontalLayout_51")
self.label_77 = QtWidgets.QLabel(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_77.sizePolicy().hasHeightForWidth())
self.label_77.setSizePolicy(sizePolicy)
self.label_77.setMinimumSize(QtCore.QSize(229, 0))
self.label_77.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_77.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_77.setObjectName("label_77")
self.horizontalLayout_51.addWidget(self.label_77)
self.classification_name_combo_5 = QtWidgets.QComboBox(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.classification_name_combo_5.sizePolicy().hasHeightForWidth())
self.classification_name_combo_5.setSizePolicy(sizePolicy)
self.classification_name_combo_5.setObjectName("classification_name_combo_5")
self.horizontalLayout_51.addWidget(self.classification_name_combo_5)
self.toolButton_reload_24 = QtWidgets.QToolButton(self.tab_zonal_stats_rasters)
self.toolButton_reload_24.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_reload_24.setIcon(icon55)
self.toolButton_reload_24.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_24.setObjectName("toolButton_reload_24")
self.horizontalLayout_51.addWidget(self.toolButton_reload_24)
self.gridLayout_87.addLayout(self.horizontalLayout_51, 1, 0, 1, 1)
self.horizontalLayout_52 = QtWidgets.QHBoxLayout()
self.horizontalLayout_52.setObjectName("horizontalLayout_52")
self.nodata_checkBox_10 = QtWidgets.QCheckBox(self.tab_zonal_stats_rasters)
self.nodata_checkBox_10.setObjectName("nodata_checkBox_10")
self.horizontalLayout_52.addWidget(self.nodata_checkBox_10)
self.nodata_spinBox_12 = QtWidgets.QSpinBox(self.tab_zonal_stats_rasters)
self.nodata_spinBox_12.setMinimum(-2147483647)
self.nodata_spinBox_12.setMaximum(2147483647)
self.nodata_spinBox_12.setObjectName("nodata_spinBox_12")
self.horizontalLayout_52.addWidget(self.nodata_spinBox_12)
spacerItem143 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_52.addItem(spacerItem143)
self.gridLayout_87.addLayout(self.horizontalLayout_52, 2, 0, 1, 1)
self.gridLayout_281.addLayout(self.gridLayout_87, 0, 0, 1, 1)
self.gridLayout_91 = QtWidgets.QGridLayout()
self.gridLayout_91.setObjectName("gridLayout_91")
self.class_field_comboBox_4 = QtWidgets.QComboBox(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.class_field_comboBox_4.sizePolicy().hasHeightForWidth())
self.class_field_comboBox_4.setSizePolicy(sizePolicy)
self.class_field_comboBox_4.setObjectName("class_field_comboBox_4")
self.gridLayout_91.addWidget(self.class_field_comboBox_4, 1, 2, 1, 1)
self.label_214 = QtWidgets.QLabel(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_214.sizePolicy().hasHeightForWidth())
self.label_214.setSizePolicy(sizePolicy)
self.label_214.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_214.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_214.setObjectName("label_214")
self.gridLayout_91.addWidget(self.label_214, 0, 0, 1, 1)
self.buttonReload_shape_6 = QtWidgets.QToolButton(self.tab_zonal_stats_rasters)
self.buttonReload_shape_6.setStyleSheet("margin: 0px;padding: 0px;")
self.buttonReload_shape_6.setIcon(icon55)
self.buttonReload_shape_6.setIconSize(QtCore.QSize(22, 22))
self.buttonReload_shape_6.setObjectName("buttonReload_shape_6")
self.gridLayout_91.addWidget(self.buttonReload_shape_6, 0, 3, 1, 1)
self.label_213 = QtWidgets.QLabel(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_213.sizePolicy().hasHeightForWidth())
self.label_213.setSizePolicy(sizePolicy)
self.label_213.setMinimumSize(QtCore.QSize(6, 0))
self.label_213.setMaximumSize(QtCore.QSize(100, 200))
self.label_213.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_213.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_213.setObjectName("label_213")
self.gridLayout_91.addWidget(self.label_213, 1, 1, 1, 1)
self.reference_name_combo_3 = QtWidgets.QComboBox(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.reference_name_combo_3.sizePolicy().hasHeightForWidth())
self.reference_name_combo_3.setSizePolicy(sizePolicy)
self.reference_name_combo_3.setObjectName("reference_name_combo_3")
self.gridLayout_91.addWidget(self.reference_name_combo_3, 0, 1, 1, 2)
self.gridLayout_281.addLayout(self.gridLayout_91, 1, 0, 1, 1)
self.gridLayout_131 = QtWidgets.QGridLayout()
self.gridLayout_131.setObjectName("gridLayout_131")
self.statistic_lineEdit = QtWidgets.QLineEdit(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.statistic_lineEdit.sizePolicy().hasHeightForWidth())
self.statistic_lineEdit.setSizePolicy(sizePolicy)
self.statistic_lineEdit.setMaximumSize(QtCore.QSize(200, 16777215))
self.statistic_lineEdit.setText("")
self.statistic_lineEdit.setMaxLength(10000)
self.statistic_lineEdit.setObjectName("statistic_lineEdit")
self.gridLayout_131.addWidget(self.statistic_lineEdit, 1, 2, 1, 1)
self.statistic_name_combobox = QtWidgets.QComboBox(self.tab_zonal_stats_rasters)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.statistic_name_combobox.sizePolicy().hasHeightForWidth())
self.statistic_name_combobox.setSizePolicy(sizePolicy)
self.statistic_name_combobox.setMaximumSize(QtCore.QSize(200, 16777215))
self.statistic_name_combobox.setObjectName("statistic_name_combobox")
self.gridLayout_131.addWidget(self.statistic_name_combobox, 1, 1, 1, 1)
self.label_232 = QtWidgets.QLabel(self.tab_zonal_stats_rasters)
self.label_232.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_232.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_232.setObjectName("label_232")
self.gridLayout_131.addWidget(self.label_232, 1, 0, 1, 1)
spacerItem144 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_131.addItem(spacerItem144, 1, 3, 1, 1)
self.label_216 = QtWidgets.QLabel(self.tab_zonal_stats_rasters)
self.label_216.setStyleSheet("background-color : #656565; color : white")
self.label_216.setFrameShape(QtWidgets.QFrame.Panel)
self.label_216.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_216.setObjectName("label_216")
self.gridLayout_131.addWidget(self.label_216, 0, 0, 1, 4)
self.gridLayout_281.addLayout(self.gridLayout_131, 2, 0, 1, 1)
self.gridLayout_128 = QtWidgets.QGridLayout()
self.gridLayout_128.setObjectName("gridLayout_128")
self.zonal_stat_raster_toolButton = QtWidgets.QToolButton(self.tab_zonal_stats_rasters)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.zonal_stat_raster_toolButton.setFont(font)
self.zonal_stat_raster_toolButton.setLayoutDirection(QtCore.Qt.RightToLeft)
self.zonal_stat_raster_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.zonal_stat_raster_toolButton.setIcon(icon64)
self.zonal_stat_raster_toolButton.setIconSize(QtCore.QSize(34, 34))
self.zonal_stat_raster_toolButton.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.zonal_stat_raster_toolButton.setObjectName("zonal_stat_raster_toolButton")
self.gridLayout_128.addWidget(self.zonal_stat_raster_toolButton, 2, 2, 1, 1)
self.label_215 = QtWidgets.QLabel(self.tab_zonal_stats_rasters)
self.label_215.setStyleSheet("background-color : #656565; color : white")
self.label_215.setFrameShape(QtWidgets.QFrame.Panel)
self.label_215.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_215.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_215.setObjectName("label_215")
self.gridLayout_128.addWidget(self.label_215, 1, 0, 1, 3)
spacerItem145 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_128.addItem(spacerItem145, 2, 0, 1, 1)
spacerItem146 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_128.addItem(spacerItem146, 0, 0, 1, 1)
self.zonal_stat_raster = QtWidgets.QToolButton(self.tab_zonal_stats_rasters)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.zonal_stat_raster.setFont(font)
self.zonal_stat_raster.setLayoutDirection(QtCore.Qt.RightToLeft)
self.zonal_stat_raster.setStyleSheet("margin: 0px;padding: 0px;")
self.zonal_stat_raster.setIcon(icon48)
self.zonal_stat_raster.setIconSize(QtCore.QSize(34, 34))
self.zonal_stat_raster.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.zonal_stat_raster.setObjectName("zonal_stat_raster")
self.gridLayout_128.addWidget(self.zonal_stat_raster, 2, 1, 1, 1)
self.gridLayout_281.addLayout(self.gridLayout_128, 3, 0, 1, 1)
self.tabWidget_2.addTab(self.tab_zonal_stats_rasters, "")
self.gridLayout_552.addWidget(self.tabWidget_2, 0, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_postProcessing, "")
self.tab_band_calc = QtWidgets.QWidget()
self.tab_band_calc.setObjectName("tab_band_calc")
self.gridLayout_303 = QtWidgets.QGridLayout(self.tab_band_calc)
self.gridLayout_303.setObjectName("gridLayout_303")
self.splitter_band_calc = QtWidgets.QSplitter(self.tab_band_calc)
self.splitter_band_calc.setOrientation(QtCore.Qt.Vertical)
self.splitter_band_calc.setChildrenCollapsible(False)
self.splitter_band_calc.setObjectName("splitter_band_calc")
self.widget_2 = QtWidgets.QWidget(self.splitter_band_calc)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget_2.sizePolicy().hasHeightForWidth())
self.widget_2.setSizePolicy(sizePolicy)
self.widget_2.setMinimumSize(QtCore.QSize(0, 150))
self.widget_2.setObjectName("widget_2")
self.gridLayout_126 = QtWidgets.QGridLayout(self.widget_2)
self.gridLayout_126.setContentsMargins(1, 1, 1, 1)
self.gridLayout_126.setObjectName("gridLayout_126")
self.gridLayout_86 = QtWidgets.QGridLayout()
self.gridLayout_86.setObjectName("gridLayout_86")
self.gridLayout_171 = QtWidgets.QGridLayout()
self.gridLayout_171.setObjectName("gridLayout_171")
self.toolButton_reload_13 = QtWidgets.QToolButton(self.widget_2)
self.toolButton_reload_13.setStyleSheet("margin: 0px;padding: 0px")
self.toolButton_reload_13.setIcon(icon55)
self.toolButton_reload_13.setIconSize(QtCore.QSize(22, 22))
self.toolButton_reload_13.setObjectName("toolButton_reload_13")
self.gridLayout_171.addWidget(self.toolButton_reload_13, 0, 0, 1, 1)
self.gridLayout_86.addLayout(self.gridLayout_171, 1, 1, 2, 1)
self.tableWidget_band_calc = QtWidgets.QTableWidget(self.widget_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget_band_calc.sizePolicy().hasHeightForWidth())
self.tableWidget_band_calc.setSizePolicy(sizePolicy)
self.tableWidget_band_calc.setFrameShape(QtWidgets.QFrame.WinPanel)
self.tableWidget_band_calc.setFrameShadow(QtWidgets.QFrame.Sunken)
self.tableWidget_band_calc.setAlternatingRowColors(True)
self.tableWidget_band_calc.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
self.tableWidget_band_calc.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.tableWidget_band_calc.setColumnCount(2)
self.tableWidget_band_calc.setObjectName("tableWidget_band_calc")
self.tableWidget_band_calc.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_band_calc.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_band_calc.setHorizontalHeaderItem(1, item)
self.tableWidget_band_calc.horizontalHeader().setDefaultSectionSize(200)
self.tableWidget_band_calc.horizontalHeader().setStretchLastSection(True)
self.gridLayout_86.addWidget(self.tableWidget_band_calc, 1, 0, 2, 1)
self.gridLayout_85 = QtWidgets.QGridLayout()
self.gridLayout_85.setObjectName("gridLayout_85")
self.bandcalc_filter_lineEdit = QtWidgets.QLineEdit(self.widget_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.bandcalc_filter_lineEdit.sizePolicy().hasHeightForWidth())
self.bandcalc_filter_lineEdit.setSizePolicy(sizePolicy)
self.bandcalc_filter_lineEdit.setObjectName("bandcalc_filter_lineEdit")
self.gridLayout_85.addWidget(self.bandcalc_filter_lineEdit, 0, 1, 1, 1)
self.label_71 = QtWidgets.QLabel(self.widget_2)
self.label_71.setStyleSheet("background-color : #656565; color : white")
self.label_71.setFrameShape(QtWidgets.QFrame.Panel)
self.label_71.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_71.setObjectName("label_71")
self.gridLayout_85.addWidget(self.label_71, 0, 0, 1, 1)
self.gridLayout_86.addLayout(self.gridLayout_85, 0, 0, 1, 2)
self.gridLayout_126.addLayout(self.gridLayout_86, 0, 0, 1, 1)
self.widget_3 = QtWidgets.QWidget(self.splitter_band_calc)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget_3.sizePolicy().hasHeightForWidth())
self.widget_3.setSizePolicy(sizePolicy)
self.widget_3.setMinimumSize(QtCore.QSize(0, 250))
self.widget_3.setObjectName("widget_3")
self.gridLayout_40 = QtWidgets.QGridLayout(self.widget_3)
self.gridLayout_40.setContentsMargins(0, 0, 0, 0)
self.gridLayout_40.setObjectName("gridLayout_40")
self.band_calc_tabWidget = QtWidgets.QTabWidget(self.widget_3)
self.band_calc_tabWidget.setStyleSheet("QTabBar::tab {\n"
"padding: 10px;\n"
"min-height: 18px;\n"
"}")
self.band_calc_tabWidget.setTabPosition(QtWidgets.QTabWidget.North)
self.band_calc_tabWidget.setObjectName("band_calc_tabWidget")
self.tab_expression = QtWidgets.QWidget()
self.tab_expression.setObjectName("tab_expression")
self.gridLayout_2 = QtWidgets.QGridLayout(self.tab_expression)
self.gridLayout_2.setObjectName("gridLayout_2")
self.splitter_2 = QtWidgets.QSplitter(self.tab_expression)
self.splitter_2.setOrientation(QtCore.Qt.Horizontal)
self.splitter_2.setChildrenCollapsible(False)
self.splitter_2.setObjectName("splitter_2")
self.plainTextEdit_calc = QtWidgets.QPlainTextEdit(self.splitter_2)
self.plainTextEdit_calc.setMinimumSize(QtCore.QSize(100, 0))
self.plainTextEdit_calc.setMaximumSize(QtCore.QSize(16777215, 400))
font = QtGui.QFont()
font.setPointSize(11)
self.plainTextEdit_calc.setFont(font)
self.plainTextEdit_calc.setPlainText("")
self.plainTextEdit_calc.setObjectName("plainTextEdit_calc")
self.frame = QtWidgets.QFrame(self.splitter_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setMinimumSize(QtCore.QSize(100, 0))
self.frame.setMaximumSize(QtCore.QSize(300, 16777215))
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.gridLayout_67 = QtWidgets.QGridLayout(self.frame)
self.gridLayout_67.setContentsMargins(2, 2, 2, 2)
self.gridLayout_67.setObjectName("gridLayout_67")
self.gridLayout_88 = QtWidgets.QGridLayout()
self.gridLayout_88.setObjectName("gridLayout_88")
self.gridLayout_90 = QtWidgets.QGridLayout()
self.gridLayout_90.setObjectName("gridLayout_90")
self.horizontalLayout_49 = QtWidgets.QHBoxLayout()
self.horizontalLayout_49.setObjectName("horizontalLayout_49")
self.toolButton_less = QtWidgets.QToolButton(self.frame)
self.toolButton_less.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_less.setObjectName("toolButton_less")
self.horizontalLayout_49.addWidget(self.toolButton_less)
self.toolButton_greater = QtWidgets.QToolButton(self.frame)
self.toolButton_greater.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_greater.setObjectName("toolButton_greater")
self.horizontalLayout_49.addWidget(self.toolButton_greater)
self.toolButton_lbracket = QtWidgets.QToolButton(self.frame)
self.toolButton_lbracket.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_lbracket.setObjectName("toolButton_lbracket")
self.horizontalLayout_49.addWidget(self.toolButton_lbracket)
self.toolButton_rbracket = QtWidgets.QToolButton(self.frame)
self.toolButton_rbracket.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_rbracket.setObjectName("toolButton_rbracket")
self.horizontalLayout_49.addWidget(self.toolButton_rbracket)
self.toolButton_power = QtWidgets.QToolButton(self.frame)
self.toolButton_power.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_power.setObjectName("toolButton_power")
self.horizontalLayout_49.addWidget(self.toolButton_power)
self.toolButton_sqrt = QtWidgets.QToolButton(self.frame)
self.toolButton_sqrt.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_sqrt.setObjectName("toolButton_sqrt")
self.horizontalLayout_49.addWidget(self.toolButton_sqrt)
self.gridLayout_90.addLayout(self.horizontalLayout_49, 1, 0, 1, 1)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.toolButton_plus = QtWidgets.QToolButton(self.frame)
self.toolButton_plus.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_plus.setObjectName("toolButton_plus")
self.horizontalLayout_4.addWidget(self.toolButton_plus)
self.toolButton_minus = QtWidgets.QToolButton(self.frame)
self.toolButton_minus.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_minus.setObjectName("toolButton_minus")
self.horizontalLayout_4.addWidget(self.toolButton_minus)
self.toolButton_product = QtWidgets.QToolButton(self.frame)
self.toolButton_product.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_product.setObjectName("toolButton_product")
self.horizontalLayout_4.addWidget(self.toolButton_product)
self.toolButton_ratio = QtWidgets.QToolButton(self.frame)
self.toolButton_ratio.setMinimumSize(QtCore.QSize(25, 25))
self.toolButton_ratio.setObjectName("toolButton_ratio")
self.horizontalLayout_4.addWidget(self.toolButton_ratio)
self.toolButton_equal = QtWidgets.QToolButton(self.frame)
self.toolButton_equal.setMinimumSize(QtCore.QSize(45, 25))
self.toolButton_equal.setObjectName("toolButton_equal")
self.horizontalLayout_4.addWidget(self.toolButton_equal)
self.toolButton_unequal = QtWidgets.QToolButton(self.frame)
self.toolButton_unequal.setMinimumSize(QtCore.QSize(45, 25))
self.toolButton_unequal.setObjectName("toolButton_unequal")
self.horizontalLayout_4.addWidget(self.toolButton_unequal)
self.gridLayout_90.addLayout(self.horizontalLayout_4, 0, 0, 1, 2)
self.toolButton_import_expression = QtWidgets.QToolButton(self.frame)
self.toolButton_import_expression.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_import_expression.setIcon(icon65)
self.toolButton_import_expression.setIconSize(QtCore.QSize(22, 22))
self.toolButton_import_expression.setObjectName("toolButton_import_expression")
self.gridLayout_90.addWidget(self.toolButton_import_expression, 1, 1, 1, 1)
self.gridLayout_88.addLayout(self.gridLayout_90, 0, 0, 2, 3)
self.gridLayout_93 = QtWidgets.QGridLayout()
self.gridLayout_93.setObjectName("gridLayout_93")
self.gridLayout_38 = QtWidgets.QGridLayout()
self.gridLayout_38.setObjectName("gridLayout_38")
self.band_calc_function_tableWidget = QtWidgets.QTableWidget(self.frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.band_calc_function_tableWidget.sizePolicy().hasHeightForWidth())
self.band_calc_function_tableWidget.setSizePolicy(sizePolicy)
self.band_calc_function_tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.band_calc_function_tableWidget.setAlternatingRowColors(True)
self.band_calc_function_tableWidget.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.band_calc_function_tableWidget.setObjectName("band_calc_function_tableWidget")
self.band_calc_function_tableWidget.setColumnCount(1)
self.band_calc_function_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.band_calc_function_tableWidget.setHorizontalHeaderItem(0, item)
self.band_calc_function_tableWidget.horizontalHeader().setStretchLastSection(True)
self.band_calc_function_tableWidget.verticalHeader().setVisible(False)
self.gridLayout_38.addWidget(self.band_calc_function_tableWidget, 0, 0, 1, 1)
self.gridLayout_93.addLayout(self.gridLayout_38, 0, 0, 1, 2)
self.gridLayout_88.addLayout(self.gridLayout_93, 3, 0, 1, 3)
self.gridLayout_67.addLayout(self.gridLayout_88, 0, 0, 1, 1)
self.gridLayout_2.addWidget(self.splitter_2, 0, 0, 1, 1)
icon82 = QtGui.QIcon()
icon82.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_bandcalc_expression.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.band_calc_tabWidget.addTab(self.tab_expression, icon82, "")
self.tab_decision_rules = QtWidgets.QWidget()
self.tab_decision_rules.setObjectName("tab_decision_rules")
self.gridLayout_89 = QtWidgets.QGridLayout(self.tab_decision_rules)
self.gridLayout_89.setObjectName("gridLayout_89")
self.gridLayout_215 = QtWidgets.QGridLayout()
self.gridLayout_215.setObjectName("gridLayout_215")
self.decision_rules_tableWidget = QtWidgets.QTableWidget(self.tab_decision_rules)
font = QtGui.QFont()
font.setPointSize(10)
self.decision_rules_tableWidget.setFont(font)
self.decision_rules_tableWidget.setObjectName("decision_rules_tableWidget")
self.decision_rules_tableWidget.setColumnCount(2)
self.decision_rules_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.decision_rules_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.decision_rules_tableWidget.setHorizontalHeaderItem(1, item)
self.decision_rules_tableWidget.horizontalHeader().setStretchLastSection(True)
self.gridLayout_215.addWidget(self.decision_rules_tableWidget, 0, 0, 1, 1)
self.gridLayout_89.addLayout(self.gridLayout_215, 0, 0, 1, 1)
self.gridLayout_220 = QtWidgets.QGridLayout()
self.gridLayout_220.setObjectName("gridLayout_220")
spacerItem147 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_220.addItem(spacerItem147, 6, 0, 1, 1)
self.remove_rule_toolButton = QtWidgets.QToolButton(self.tab_decision_rules)
self.remove_rule_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.remove_rule_toolButton.setIcon(icon58)
self.remove_rule_toolButton.setIconSize(QtCore.QSize(22, 22))
self.remove_rule_toolButton.setObjectName("remove_rule_toolButton")
self.gridLayout_220.addWidget(self.remove_rule_toolButton, 4, 0, 1, 1)
self.move_up_toolButton_2 = QtWidgets.QToolButton(self.tab_decision_rules)
self.move_up_toolButton_2.setStyleSheet("margin: 0px;padding: 0px;")
self.move_up_toolButton_2.setIcon(icon61)
self.move_up_toolButton_2.setIconSize(QtCore.QSize(22, 22))
self.move_up_toolButton_2.setObjectName("move_up_toolButton_2")
self.gridLayout_220.addWidget(self.move_up_toolButton_2, 0, 0, 1, 1)
self.import_rules_toolButton = QtWidgets.QToolButton(self.tab_decision_rules)
self.import_rules_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.import_rules_toolButton.setIcon(icon54)
self.import_rules_toolButton.setIconSize(QtCore.QSize(22, 22))
self.import_rules_toolButton.setObjectName("import_rules_toolButton")
self.gridLayout_220.addWidget(self.import_rules_toolButton, 7, 0, 1, 1)
self.clear_rules_toolButton = QtWidgets.QToolButton(self.tab_decision_rules)
self.clear_rules_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.clear_rules_toolButton.setIcon(icon59)
self.clear_rules_toolButton.setIconSize(QtCore.QSize(22, 22))
self.clear_rules_toolButton.setObjectName("clear_rules_toolButton")
self.gridLayout_220.addWidget(self.clear_rules_toolButton, 5, 0, 1, 1)
spacerItem148 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_220.addItem(spacerItem148, 9, 0, 1, 1)
self.add_rule_toolButton = QtWidgets.QToolButton(self.tab_decision_rules)
self.add_rule_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.add_rule_toolButton.setIcon(icon66)
self.add_rule_toolButton.setIconSize(QtCore.QSize(22, 22))
self.add_rule_toolButton.setObjectName("add_rule_toolButton")
self.gridLayout_220.addWidget(self.add_rule_toolButton, 3, 0, 1, 1)
spacerItem149 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_220.addItem(spacerItem149, 2, 0, 1, 1)
self.move_down_toolButton_2 = QtWidgets.QToolButton(self.tab_decision_rules)
self.move_down_toolButton_2.setStyleSheet("margin: 0px;padding: 0px;")
self.move_down_toolButton_2.setIcon(icon62)
self.move_down_toolButton_2.setIconSize(QtCore.QSize(22, 22))
self.move_down_toolButton_2.setObjectName("move_down_toolButton_2")
self.gridLayout_220.addWidget(self.move_down_toolButton_2, 1, 0, 1, 1)
self.export_rules_toolButton = QtWidgets.QToolButton(self.tab_decision_rules)
self.export_rules_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.export_rules_toolButton.setIcon(icon53)
self.export_rules_toolButton.setIconSize(QtCore.QSize(22, 22))
self.export_rules_toolButton.setObjectName("export_rules_toolButton")
self.gridLayout_220.addWidget(self.export_rules_toolButton, 8, 0, 1, 1)
self.gridLayout_89.addLayout(self.gridLayout_220, 0, 1, 1, 1)
icon83 = QtGui.QIcon()
icon83.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_bandcalc_rules.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.band_calc_tabWidget.addTab(self.tab_decision_rules, icon83, "")
self.gridLayout_40.addWidget(self.band_calc_tabWidget, 0, 0, 1, 1)
self.horizontalLayout_73 = QtWidgets.QHBoxLayout()
self.horizontalLayout_73.setObjectName("horizontalLayout_73")
self.nodata_as_value_checkBox = QtWidgets.QCheckBox(self.widget_3)
self.nodata_as_value_checkBox.setObjectName("nodata_as_value_checkBox")
self.horizontalLayout_73.addWidget(self.nodata_as_value_checkBox)
self.nodata_checkBox_3 = QtWidgets.QCheckBox(self.widget_3)
self.nodata_checkBox_3.setObjectName("nodata_checkBox_3")
self.horizontalLayout_73.addWidget(self.nodata_checkBox_3)
self.nodata_spinBox_13 = QtWidgets.QSpinBox(self.widget_3)
self.nodata_spinBox_13.setMinimum(-2147483647)
self.nodata_spinBox_13.setMaximum(2147483647)
self.nodata_spinBox_13.setObjectName("nodata_spinBox_13")
self.horizontalLayout_73.addWidget(self.nodata_spinBox_13)
self.label_4 = QtWidgets.QLabel(self.widget_3)
self.label_4.setObjectName("label_4")
self.horizontalLayout_73.addWidget(self.label_4)
self.calc_type_combo = QtWidgets.QComboBox(self.widget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.calc_type_combo.sizePolicy().hasHeightForWidth())
self.calc_type_combo.setSizePolicy(sizePolicy)
self.calc_type_combo.setObjectName("calc_type_combo")
self.calc_type_combo.addItem("")
self.calc_type_combo.addItem("")
self.calc_type_combo.addItem("")
self.calc_type_combo.addItem("")
self.calc_type_combo.addItem("")
self.calc_type_combo.addItem("")
self.horizontalLayout_73.addWidget(self.calc_type_combo)
spacerItem150 = QtWidgets.QSpacerItem(214, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_73.addItem(spacerItem150)
self.label_83 = QtWidgets.QLabel(self.widget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_83.sizePolicy().hasHeightForWidth())
self.label_83.setSizePolicy(sizePolicy)
self.label_83.setMinimumSize(QtCore.QSize(40, 0))
self.label_83.setMaximumSize(QtCore.QSize(120, 16777215))
self.label_83.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_83.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_83.setObjectName("label_83")
self.horizontalLayout_73.addWidget(self.label_83)
self.intersection_checkBox = QtWidgets.QCheckBox(self.widget_3)
self.intersection_checkBox.setChecked(True)
self.intersection_checkBox.setObjectName("intersection_checkBox")
self.horizontalLayout_73.addWidget(self.intersection_checkBox)
self.extent_checkBox = QtWidgets.QCheckBox(self.widget_3)
self.extent_checkBox.setObjectName("extent_checkBox")
self.horizontalLayout_73.addWidget(self.extent_checkBox)
self.raster_extent_combo = QtWidgets.QComboBox(self.widget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.raster_extent_combo.sizePolicy().hasHeightForWidth())
self.raster_extent_combo.setSizePolicy(sizePolicy)
self.raster_extent_combo.setObjectName("raster_extent_combo")
self.horizontalLayout_73.addWidget(self.raster_extent_combo)
self.align_radioButton = QtWidgets.QRadioButton(self.widget_3)
self.align_radioButton.setChecked(True)
self.align_radioButton.setObjectName("align_radioButton")
self.horizontalLayout_73.addWidget(self.align_radioButton)
self.gridLayout_40.addLayout(self.horizontalLayout_73, 1, 0, 1, 1)
self.label_84 = QtWidgets.QLabel(self.widget_3)
self.label_84.setStyleSheet("background-color : #656565; color : white")
self.label_84.setFrameShape(QtWidgets.QFrame.Panel)
self.label_84.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_84.setObjectName("label_84")
self.gridLayout_40.addWidget(self.label_84, 2, 0, 1, 1)
self.horizontalLayout_60 = QtWidgets.QHBoxLayout()
self.horizontalLayout_60.setObjectName("horizontalLayout_60")
self.raster_type_combo = QtWidgets.QComboBox(self.widget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.raster_type_combo.sizePolicy().hasHeightForWidth())
self.raster_type_combo.setSizePolicy(sizePolicy)
self.raster_type_combo.setObjectName("raster_type_combo")
self.raster_type_combo.addItem("")
self.raster_type_combo.addItem("")
self.raster_type_combo.addItem("")
self.raster_type_combo.addItem("")
self.raster_type_combo.addItem("")
self.raster_type_combo.addItem("")
self.horizontalLayout_60.addWidget(self.raster_type_combo)
self.label_268 = QtWidgets.QLabel(self.widget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_268.sizePolicy().hasHeightForWidth())
self.label_268.setSizePolicy(sizePolicy)
self.label_268.setMinimumSize(QtCore.QSize(50, 0))
self.label_268.setMaximumSize(QtCore.QSize(130, 16777215))
self.label_268.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_268.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_268.setObjectName("label_268")
self.horizontalLayout_60.addWidget(self.label_268)
self.nodata_spinBox_4 = QtWidgets.QSpinBox(self.widget_3)
self.nodata_spinBox_4.setMinimum(-2147483647)
self.nodata_spinBox_4.setMaximum(2147483647)
self.nodata_spinBox_4.setProperty("value", -32768)
self.nodata_spinBox_4.setObjectName("nodata_spinBox_4")
self.horizontalLayout_60.addWidget(self.nodata_spinBox_4)
self.nodata_mask_checkBox = QtWidgets.QCheckBox(self.widget_3)
self.nodata_mask_checkBox.setChecked(True)
self.nodata_mask_checkBox.setObjectName("nodata_mask_checkBox")
self.horizontalLayout_60.addWidget(self.nodata_mask_checkBox)
self.set_scale_checkBox = QtWidgets.QCheckBox(self.widget_3)
self.set_scale_checkBox.setObjectName("set_scale_checkBox")
self.horizontalLayout_60.addWidget(self.set_scale_checkBox)
self.scale_doubleSpinBox = QtWidgets.QDoubleSpinBox(self.widget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scale_doubleSpinBox.sizePolicy().hasHeightForWidth())
self.scale_doubleSpinBox.setSizePolicy(sizePolicy)
self.scale_doubleSpinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.scale_doubleSpinBox.setDecimals(7)
self.scale_doubleSpinBox.setMinimum(-1e+34)
self.scale_doubleSpinBox.setMaximum(1e+34)
self.scale_doubleSpinBox.setProperty("value", 1.0)
self.scale_doubleSpinBox.setObjectName("scale_doubleSpinBox")
self.horizontalLayout_60.addWidget(self.scale_doubleSpinBox)
self.set_offset_checkBox = QtWidgets.QCheckBox(self.widget_3)
self.set_offset_checkBox.setObjectName("set_offset_checkBox")
self.horizontalLayout_60.addWidget(self.set_offset_checkBox)
self.offset_doubleSpinBox = QtWidgets.QDoubleSpinBox(self.widget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.offset_doubleSpinBox.sizePolicy().hasHeightForWidth())
self.offset_doubleSpinBox.setSizePolicy(sizePolicy)
self.offset_doubleSpinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.offset_doubleSpinBox.setDecimals(7)
self.offset_doubleSpinBox.setMinimum(-1e+34)
self.offset_doubleSpinBox.setMaximum(1e+34)
self.offset_doubleSpinBox.setProperty("value", 0.0)
self.offset_doubleSpinBox.setObjectName("offset_doubleSpinBox")
self.horizontalLayout_60.addWidget(self.offset_doubleSpinBox)
spacerItem151 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_60.addItem(spacerItem151)
self.band_calc = QtWidgets.QToolButton(self.widget_3)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.band_calc.setFont(font)
self.band_calc.setLayoutDirection(QtCore.Qt.RightToLeft)
self.band_calc.setStyleSheet("margin: 0px;padding: 0px;")
self.band_calc.setIcon(icon48)
self.band_calc.setIconSize(QtCore.QSize(34, 34))
self.band_calc.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.band_calc.setObjectName("band_calc")
self.horizontalLayout_60.addWidget(self.band_calc)
self.toolButton_calculate = QtWidgets.QToolButton(self.widget_3)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.toolButton_calculate.setFont(font)
self.toolButton_calculate.setLayoutDirection(QtCore.Qt.RightToLeft)
self.toolButton_calculate.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_calculate.setIcon(icon64)
self.toolButton_calculate.setIconSize(QtCore.QSize(34, 34))
self.toolButton_calculate.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_calculate.setObjectName("toolButton_calculate")
self.horizontalLayout_60.addWidget(self.toolButton_calculate)
self.gridLayout_40.addLayout(self.horizontalLayout_60, 3, 0, 1, 1)
self.gridLayout_303.addWidget(self.splitter_band_calc, 0, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_band_calc, "")
self.tab_batch = QtWidgets.QWidget()
self.tab_batch.setObjectName("tab_batch")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.tab_batch)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.label_73 = QtWidgets.QLabel(self.tab_batch)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_73.sizePolicy().hasHeightForWidth())
self.label_73.setSizePolicy(sizePolicy)
self.label_73.setStyleSheet("background-color : #656565; color : white")
self.label_73.setFrameShape(QtWidgets.QFrame.Panel)
self.label_73.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_73.setObjectName("label_73")
self.verticalLayout_3.addWidget(self.label_73)
self.splitter_batch = QtWidgets.QSplitter(self.tab_batch)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.splitter_batch.sizePolicy().hasHeightForWidth())
self.splitter_batch.setSizePolicy(sizePolicy)
self.splitter_batch.setMinimumSize(QtCore.QSize(100, 0))
self.splitter_batch.setOrientation(QtCore.Qt.Horizontal)
self.splitter_batch.setChildrenCollapsible(False)
self.splitter_batch.setObjectName("splitter_batch")
self.plainTextEdit_batch = QtWidgets.QPlainTextEdit(self.splitter_batch)
self.plainTextEdit_batch.setMinimumSize(QtCore.QSize(200, 0))
font = QtGui.QFont()
font.setPointSize(11)
self.plainTextEdit_batch.setFont(font)
self.plainTextEdit_batch.setPlainText("")
self.plainTextEdit_batch.setObjectName("plainTextEdit_batch")
self.widget_4 = QtWidgets.QWidget(self.splitter_batch)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget_4.sizePolicy().hasHeightForWidth())
self.widget_4.setSizePolicy(sizePolicy)
self.widget_4.setMinimumSize(QtCore.QSize(100, 400))
self.widget_4.setMaximumSize(QtCore.QSize(250, 16777215))
self.widget_4.setObjectName("widget_4")
self.gridLayout_76 = QtWidgets.QGridLayout(self.widget_4)
self.gridLayout_76.setContentsMargins(1, 1, 1, 1)
self.gridLayout_76.setObjectName("gridLayout_76")
self.gridLayout_214 = QtWidgets.QGridLayout()
self.gridLayout_214.setObjectName("gridLayout_214")
self.export_batch_toolButton = QtWidgets.QToolButton(self.widget_4)
self.export_batch_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.export_batch_toolButton.setIcon(icon53)
self.export_batch_toolButton.setIconSize(QtCore.QSize(22, 22))
self.export_batch_toolButton.setObjectName("export_batch_toolButton")
self.gridLayout_214.addWidget(self.export_batch_toolButton, 0, 2, 1, 1)
self.clear_batch_toolButton = QtWidgets.QToolButton(self.widget_4)
self.clear_batch_toolButton.setStyleSheet("margin: 0px;padding: 0px;")
self.clear_batch_toolButton.setIcon(icon59)
self.clear_batch_toolButton.setIconSize(QtCore.QSize(22, 22))
self.clear_batch_toolButton.setObjectName("clear_batch_toolButton")
self.gridLayout_214.addWidget(self.clear_batch_toolButton, 0, 0, 1, 1)
self.import_batch_toolButton = QtWidgets.QToolButton(self.widget_4)
self.import_batch_toolButton.setStyleSheet("margin: 0px;padding: 0px")
self.import_batch_toolButton.setIcon(icon54)
self.import_batch_toolButton.setIconSize(QtCore.QSize(22, 22))
self.import_batch_toolButton.setObjectName("import_batch_toolButton")
self.gridLayout_214.addWidget(self.import_batch_toolButton, 0, 1, 1, 1)
self.gridLayout_76.addLayout(self.gridLayout_214, 1, 0, 1, 1)
self.batch_tableWidget = QtWidgets.QTableWidget(self.widget_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.batch_tableWidget.sizePolicy().hasHeightForWidth())
self.batch_tableWidget.setSizePolicy(sizePolicy)
self.batch_tableWidget.setMaximumSize(QtCore.QSize(300, 16777215))
self.batch_tableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.batch_tableWidget.setAlternatingRowColors(True)
self.batch_tableWidget.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.batch_tableWidget.setObjectName("batch_tableWidget")
self.batch_tableWidget.setColumnCount(1)
self.batch_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.batch_tableWidget.setHorizontalHeaderItem(0, item)
self.batch_tableWidget.horizontalHeader().setStretchLastSection(True)
self.batch_tableWidget.verticalHeader().setVisible(False)
self.gridLayout_76.addWidget(self.batch_tableWidget, 0, 0, 1, 1)
self.verticalLayout_3.addWidget(self.splitter_batch)
self.batch_label = QtWidgets.QLabel(self.tab_batch)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.batch_label.sizePolicy().hasHeightForWidth())
self.batch_label.setSizePolicy(sizePolicy)
self.batch_label.setText("")
self.batch_label.setObjectName("batch_label")
self.verticalLayout_3.addWidget(self.batch_label)
self.label_177 = QtWidgets.QLabel(self.tab_batch)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_177.sizePolicy().hasHeightForWidth())
self.label_177.setSizePolicy(sizePolicy)
self.label_177.setStyleSheet("background-color : #656565; color : white")
self.label_177.setFrameShape(QtWidgets.QFrame.Panel)
self.label_177.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_177.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_177.setObjectName("label_177")
self.verticalLayout_3.addWidget(self.label_177)
self.horizontalLayout_69 = QtWidgets.QHBoxLayout()
self.horizontalLayout_69.setObjectName("horizontalLayout_69")
spacerItem152 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_69.addItem(spacerItem152)
self.check_batch = QtWidgets.QToolButton(self.tab_batch)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.check_batch.setFont(font)
self.check_batch.setLayoutDirection(QtCore.Qt.RightToLeft)
self.check_batch.setStyleSheet("margin: 0px;padding: 0px;")
icon84 = QtGui.QIcon()
icon84.addPixmap(QtGui.QPixmap(":/plugins/semiautomaticclassificationplugin/icons/semiautomaticclassificationplugin_batch_check.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.check_batch.setIcon(icon84)
self.check_batch.setIconSize(QtCore.QSize(34, 34))
self.check_batch.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.check_batch.setObjectName("check_batch")
self.horizontalLayout_69.addWidget(self.check_batch)
self.toolButton_run_batch = QtWidgets.QToolButton(self.tab_batch)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.toolButton_run_batch.setFont(font)
self.toolButton_run_batch.setLayoutDirection(QtCore.Qt.RightToLeft)
self.toolButton_run_batch.setStyleSheet("margin: 0px;padding: 0px;")
self.toolButton_run_batch.setIcon(icon64)
self.toolButton_run_batch.setIconSize(QtCore.QSize(34, 34))
self.toolButton_run_batch.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolButton_run_batch.setObjectName("toolButton_run_batch")
self.horizontalLayout_69.addWidget(self.toolButton_run_batch)
self.verticalLayout_3.addLayout(self.horizontalLayout_69)
self.verticalLayout_5.addLayout(self.verticalLayout_3)
self.SCP_tabs.addTab(self.tab_batch, "")
self.tab_Settings = QtWidgets.QWidget()
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tab_Settings.sizePolicy().hasHeightForWidth())
self.tab_Settings.setSizePolicy(sizePolicy)
self.tab_Settings.setMinimumSize(QtCore.QSize(454, 0))
self.tab_Settings.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.tab_Settings.setObjectName("tab_Settings")
self.gridLayout_134 = QtWidgets.QGridLayout(self.tab_Settings)
self.gridLayout_134.setObjectName("gridLayout_134")
self.settings_tabWidget = QtWidgets.QTabWidget(self.tab_Settings)
self.settings_tabWidget.setDocumentMode(True)
self.settings_tabWidget.setObjectName("settings_tabWidget")
self.tabWidgetProcessing = QtWidgets.QWidget()
self.tabWidgetProcessing.setObjectName("tabWidgetProcessing")
self.gridLayout_195 = QtWidgets.QGridLayout(self.tabWidgetProcessing)
self.gridLayout_195.setObjectName("gridLayout_195")
self.gridLayout_3 = QtWidgets.QGridLayout()
self.gridLayout_3.setObjectName("gridLayout_3")
self.label_28 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_28.setStyleSheet("background-color : #656565; color : white")
self.label_28.setFrameShape(QtWidgets.QFrame.Panel)
self.label_28.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_28.setObjectName("label_28")
self.gridLayout_3.addWidget(self.label_28, 0, 0, 1, 1)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.RAM_spinBox = QtWidgets.QSpinBox(self.tabWidgetProcessing)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.RAM_spinBox.sizePolicy().hasHeightForWidth())
self.RAM_spinBox.setSizePolicy(sizePolicy)
self.RAM_spinBox.setMinimumSize(QtCore.QSize(50, 0))
self.RAM_spinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.RAM_spinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.RAM_spinBox.setMinimum(128)
self.RAM_spinBox.setMaximum(196608)
self.RAM_spinBox.setSingleStep(10)
self.RAM_spinBox.setProperty("value", 512)
self.RAM_spinBox.setObjectName("RAM_spinBox")
self.gridLayout.addWidget(self.RAM_spinBox, 0, 1, 1, 1)
self.label_23 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_23.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_23.setObjectName("label_23")
self.gridLayout.addWidget(self.label_23, 0, 0, 1, 1)
self.label_56 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_56.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_56.setObjectName("label_56")
self.gridLayout.addWidget(self.label_56, 1, 0, 1, 1)
self.CPU_spinBox = QtWidgets.QSpinBox(self.tabWidgetProcessing)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.CPU_spinBox.sizePolicy().hasHeightForWidth())
self.CPU_spinBox.setSizePolicy(sizePolicy)
self.CPU_spinBox.setMinimumSize(QtCore.QSize(50, 0))
self.CPU_spinBox.setMaximumSize(QtCore.QSize(100, 16777215))
self.CPU_spinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.CPU_spinBox.setMinimum(1)
self.CPU_spinBox.setMaximum(1000)
self.CPU_spinBox.setSingleStep(1)
self.CPU_spinBox.setProperty("value", 1)
self.CPU_spinBox.setObjectName("CPU_spinBox")
self.gridLayout.addWidget(self.CPU_spinBox, 1, 1, 1, 1)
self.gridLayout_3.addLayout(self.gridLayout, 1, 0, 1, 1)
self.gridLayout_195.addLayout(self.gridLayout_3, 0, 0, 1, 1)
self.gridLayout_237 = QtWidgets.QGridLayout()
self.gridLayout_237.setObjectName("gridLayout_237")
self.label_13 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_13.setFrameShape(QtWidgets.QFrame.Panel)
self.label_13.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_13.setObjectName("label_13")
self.gridLayout_237.addWidget(self.label_13, 1, 1, 1, 1)
self.label_18 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_18.setFrameShape(QtWidgets.QFrame.Panel)
self.label_18.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_18.setObjectName("label_18")
self.gridLayout_237.addWidget(self.label_18, 1, 3, 1, 1)
self.smtp_user_lineEdit = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.smtp_user_lineEdit.setObjectName("smtp_user_lineEdit")
self.gridLayout_237.addWidget(self.smtp_user_lineEdit, 2, 2, 1, 1)
self.smtp_checkBox = QtWidgets.QCheckBox(self.tabWidgetProcessing)
self.smtp_checkBox.setChecked(True)
self.smtp_checkBox.setTristate(False)
self.smtp_checkBox.setObjectName("smtp_checkBox")
self.gridLayout_237.addWidget(self.smtp_checkBox, 3, 1, 1, 1)
self.label_117 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_117.setStyleSheet("background-color : #656565; color : white")
self.label_117.setFrameShape(QtWidgets.QFrame.Panel)
self.label_117.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_117.setObjectName("label_117")
self.gridLayout_237.addWidget(self.label_117, 0, 0, 1, 5)
self.smtp_password_lineEdit = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.smtp_password_lineEdit.setObjectName("smtp_password_lineEdit")
self.gridLayout_237.addWidget(self.smtp_password_lineEdit, 2, 3, 1, 1)
self.smtp_server_lineEdit = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.smtp_server_lineEdit.setObjectName("smtp_server_lineEdit")
self.gridLayout_237.addWidget(self.smtp_server_lineEdit, 2, 1, 1, 1)
self.label_14 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_14.setFrameShape(QtWidgets.QFrame.Panel)
self.label_14.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_14.setObjectName("label_14")
self.gridLayout_237.addWidget(self.label_14, 1, 2, 1, 1)
self.to_email_lineEdit = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.to_email_lineEdit.setObjectName("to_email_lineEdit")
self.gridLayout_237.addWidget(self.to_email_lineEdit, 3, 2, 1, 3)
self.remeber_settings_checkBox = QtWidgets.QCheckBox(self.tabWidgetProcessing)
self.remeber_settings_checkBox.setChecked(True)
self.remeber_settings_checkBox.setObjectName("remeber_settings_checkBox")
self.gridLayout_237.addWidget(self.remeber_settings_checkBox, 2, 4, 1, 1)
self.gridLayout_195.addLayout(self.gridLayout_237, 2, 0, 1, 1)
self.gridLayout_124 = QtWidgets.QGridLayout()
self.gridLayout_124.setObjectName("gridLayout_124")
self.reset_temp_directory_Button = QtWidgets.QToolButton(self.tabWidgetProcessing)
self.reset_temp_directory_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_temp_directory_Button.setIcon(icon59)
self.reset_temp_directory_Button.setIconSize(QtCore.QSize(22, 22))
self.reset_temp_directory_Button.setObjectName("reset_temp_directory_Button")
self.gridLayout_124.addWidget(self.reset_temp_directory_Button, 2, 3, 1, 1)
self.temp_directory_Button = QtWidgets.QToolButton(self.tabWidgetProcessing)
self.temp_directory_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.temp_directory_Button.setIcon(icon69)
self.temp_directory_Button.setIconSize(QtCore.QSize(22, 22))
self.temp_directory_Button.setObjectName("temp_directory_Button")
self.gridLayout_124.addWidget(self.temp_directory_Button, 2, 0, 1, 1)
self.label_87 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_87.setStyleSheet("background-color : #656565; color : white")
self.label_87.setFrameShape(QtWidgets.QFrame.Panel)
self.label_87.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_87.setObjectName("label_87")
self.gridLayout_124.addWidget(self.label_87, 0, 0, 1, 4)
self.temp_directory_label = QtWidgets.QLabel(self.tabWidgetProcessing)
self.temp_directory_label.setFrameShape(QtWidgets.QFrame.Panel)
self.temp_directory_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.temp_directory_label.setText("")
self.temp_directory_label.setObjectName("temp_directory_label")
self.gridLayout_124.addWidget(self.temp_directory_label, 2, 1, 1, 2)
self.gridLayout_195.addLayout(self.gridLayout_124, 3, 0, 1, 1)
self.gridLayout_107 = QtWidgets.QGridLayout()
self.gridLayout_107.setObjectName("gridLayout_107")
self.verticalLayout_6 = QtWidgets.QVBoxLayout()
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.SNAP_label = QtWidgets.QLabel(self.tabWidgetProcessing)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.SNAP_label.sizePolicy().hasHeightForWidth())
self.SNAP_label.setSizePolicy(sizePolicy)
self.SNAP_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.SNAP_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.SNAP_label.setObjectName("SNAP_label")
self.verticalLayout_6.addWidget(self.SNAP_label)
self.label_276 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_276.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_276.setObjectName("label_276")
self.verticalLayout_6.addWidget(self.label_276)
self.label_288 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_288.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_288.setObjectName("label_288")
self.verticalLayout_6.addWidget(self.label_288)
self.label_275 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_275.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_275.setObjectName("label_275")
self.verticalLayout_6.addWidget(self.label_275)
self.gridLayout_107.addLayout(self.verticalLayout_6, 1, 0, 1, 1)
self.verticalLayout_7 = QtWidgets.QVBoxLayout()
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.SNAP_GPT_lineEdit = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.SNAP_GPT_lineEdit.setObjectName("SNAP_GPT_lineEdit")
self.verticalLayout_7.addWidget(self.SNAP_GPT_lineEdit)
self.python_path_lineEdit = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.python_path_lineEdit.setObjectName("python_path_lineEdit")
self.verticalLayout_7.addWidget(self.python_path_lineEdit)
self.python_path_lineEdit_2 = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.python_path_lineEdit_2.setObjectName("python_path_lineEdit_2")
self.verticalLayout_7.addWidget(self.python_path_lineEdit_2)
self.gdal_path_lineEdit = QtWidgets.QLineEdit(self.tabWidgetProcessing)
self.gdal_path_lineEdit.setObjectName("gdal_path_lineEdit")
self.verticalLayout_7.addWidget(self.gdal_path_lineEdit)
self.gridLayout_107.addLayout(self.verticalLayout_7, 1, 1, 1, 1)
self.label_211 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_211.setStyleSheet("background-color : #656565; color : white")
self.label_211.setFrameShape(QtWidgets.QFrame.Panel)
self.label_211.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_211.setObjectName("label_211")
self.gridLayout_107.addWidget(self.label_211, 0, 0, 1, 2)
self.gridLayout_195.addLayout(self.gridLayout_107, 4, 0, 1, 1)
self.gridLayout_30 = QtWidgets.QGridLayout()
self.gridLayout_30.setObjectName("gridLayout_30")
self.sound_checkBox = QtWidgets.QCheckBox(self.tabWidgetProcessing)
self.sound_checkBox.setChecked(True)
self.sound_checkBox.setTristate(False)
self.sound_checkBox.setObjectName("sound_checkBox")
self.gridLayout_30.addWidget(self.sound_checkBox, 1, 0, 1, 1)
self.virtual_raster_checkBox = QtWidgets.QCheckBox(self.tabWidgetProcessing)
self.virtual_raster_checkBox.setChecked(True)
self.virtual_raster_checkBox.setObjectName("virtual_raster_checkBox")
self.gridLayout_30.addWidget(self.virtual_raster_checkBox, 1, 1, 1, 1)
self.label_45 = QtWidgets.QLabel(self.tabWidgetProcessing)
self.label_45.setStyleSheet("background-color : #656565; color : white")
self.label_45.setFrameShape(QtWidgets.QFrame.Panel)
self.label_45.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_45.setObjectName("label_45")
self.gridLayout_30.addWidget(self.label_45, 0, 0, 1, 3)
self.raster_compression_checkBox = QtWidgets.QCheckBox(self.tabWidgetProcessing)
self.raster_compression_checkBox.setChecked(True)
self.raster_compression_checkBox.setObjectName("raster_compression_checkBox")
self.gridLayout_30.addWidget(self.raster_compression_checkBox, 1, 2, 1, 1)
self.horizontalLayout_65 = QtWidgets.QHBoxLayout()
self.horizontalLayout_65.setObjectName("horizontalLayout_65")
self.parallel_writing_checkBox = QtWidgets.QCheckBox(self.tabWidgetProcessing)
self.parallel_writing_checkBox.setObjectName("parallel_writing_checkBox")
self.horizontalLayout_65.addWidget(self.parallel_writing_checkBox)
self.gridLayout_30.addLayout(self.horizontalLayout_65, 2, 0, 1, 3)
self.gridLayout_195.addLayout(self.gridLayout_30, 1, 0, 1, 1)
spacerItem153 = QtWidgets.QSpacerItem(17, 17, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_195.addItem(spacerItem153, 5, 0, 1, 1)
self.settings_tabWidget.addTab(self.tabWidgetProcessing, "")
self.tabWidgetInterface = QtWidgets.QWidget()
self.tabWidgetInterface.setObjectName("tabWidgetInterface")
self.gridLayout_63 = QtWidgets.QGridLayout(self.tabWidgetInterface)
self.gridLayout_63.setObjectName("gridLayout_63")
self.gridLayout_13 = QtWidgets.QGridLayout()
self.gridLayout_13.setObjectName("gridLayout_13")
self.label_31 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_31.setFrameShape(QtWidgets.QFrame.Panel)
self.label_31.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_31.setObjectName("label_31")
self.gridLayout_13.addWidget(self.label_31, 1, 3, 1, 1)
self.Info_field_name_lineEdit = QtWidgets.QLineEdit(self.tabWidgetInterface)
self.Info_field_name_lineEdit.setMaxLength(10)
self.Info_field_name_lineEdit.setObjectName("Info_field_name_lineEdit")
self.gridLayout_13.addWidget(self.Info_field_name_lineEdit, 2, 3, 1, 1)
self.label_24 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_24.setStyleSheet("background-color : #656565; color : white")
self.label_24.setFrameShape(QtWidgets.QFrame.Panel)
self.label_24.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_24.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_24.setObjectName("label_24")
self.gridLayout_13.addWidget(self.label_24, 0, 0, 1, 5)
self.ID_field_name_lineEdit = QtWidgets.QLineEdit(self.tabWidgetInterface)
self.ID_field_name_lineEdit.setMaxLength(10)
self.ID_field_name_lineEdit.setObjectName("ID_field_name_lineEdit")
self.gridLayout_13.addWidget(self.ID_field_name_lineEdit, 2, 2, 1, 1)
self.MID_field_name_lineEdit = QtWidgets.QLineEdit(self.tabWidgetInterface)
self.MID_field_name_lineEdit.setMaxLength(10)
self.MID_field_name_lineEdit.setObjectName("MID_field_name_lineEdit")
self.gridLayout_13.addWidget(self.MID_field_name_lineEdit, 2, 0, 1, 1)
self.label_10 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_10.setFrameShape(QtWidgets.QFrame.Panel)
self.label_10.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_10.setObjectName("label_10")
self.gridLayout_13.addWidget(self.label_10, 1, 2, 1, 1)
self.MCInfo_field_name_lineEdit = QtWidgets.QLineEdit(self.tabWidgetInterface)
self.MCInfo_field_name_lineEdit.setMaxLength(10)
self.MCInfo_field_name_lineEdit.setObjectName("MCInfo_field_name_lineEdit")
self.gridLayout_13.addWidget(self.MCInfo_field_name_lineEdit, 2, 1, 1, 1)
self.label_17 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_17.setFrameShape(QtWidgets.QFrame.Panel)
self.label_17.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_17.setObjectName("label_17")
self.gridLayout_13.addWidget(self.label_17, 1, 0, 1, 1)
self.label_46 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_46.setFrameShape(QtWidgets.QFrame.Panel)
self.label_46.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_46.setObjectName("label_46")
self.gridLayout_13.addWidget(self.label_46, 1, 1, 1, 1)
self.reset_field_names_Button = QtWidgets.QToolButton(self.tabWidgetInterface)
self.reset_field_names_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_field_names_Button.setIcon(icon59)
self.reset_field_names_Button.setIconSize(QtCore.QSize(22, 22))
self.reset_field_names_Button.setObjectName("reset_field_names_Button")
self.gridLayout_13.addWidget(self.reset_field_names_Button, 2, 4, 1, 1)
self.gridLayout_63.addLayout(self.gridLayout_13, 0, 0, 1, 1)
self.gridLayout_21 = QtWidgets.QGridLayout()
self.gridLayout_21.setObjectName("gridLayout_21")
self.label_21 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_21.setStyleSheet("background-color : #656565; color : white")
self.label_21.setFrameShape(QtWidgets.QFrame.Panel)
self.label_21.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_21.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_21.setObjectName("label_21")
self.gridLayout_21.addWidget(self.label_21, 0, 1, 1, 3)
self.gridLayout_172 = QtWidgets.QGridLayout()
self.gridLayout_172.setObjectName("gridLayout_172")
self.change_color_Button = QtWidgets.QPushButton(self.tabWidgetInterface)
self.change_color_Button.setStyleSheet("background-color : #FFAA00")
self.change_color_Button.setText("")
self.change_color_Button.setObjectName("change_color_Button")
self.gridLayout_172.addWidget(self.change_color_Button, 0, 1, 1, 1)
self.label_22 = QtWidgets.QLabel(self.tabWidgetInterface)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_22.sizePolicy().hasHeightForWidth())
self.label_22.setSizePolicy(sizePolicy)
self.label_22.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_22.setObjectName("label_22")
self.gridLayout_172.addWidget(self.label_22, 0, 0, 1, 1)
self.transparency_Label = QtWidgets.QLabel(self.tabWidgetInterface)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.transparency_Label.sizePolicy().hasHeightForWidth())
self.transparency_Label.setSizePolicy(sizePolicy)
self.transparency_Label.setMinimumSize(QtCore.QSize(100, 0))
self.transparency_Label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.transparency_Label.setObjectName("transparency_Label")
self.gridLayout_172.addWidget(self.transparency_Label, 0, 2, 1, 1)
self.transparency_Slider = QtWidgets.QSlider(self.tabWidgetInterface)
self.transparency_Slider.setMaximum(100)
self.transparency_Slider.setSingleStep(10)
self.transparency_Slider.setProperty("value", 50)
self.transparency_Slider.setOrientation(QtCore.Qt.Horizontal)
self.transparency_Slider.setTickPosition(QtWidgets.QSlider.TicksBelow)
self.transparency_Slider.setTickInterval(10)
self.transparency_Slider.setObjectName("transparency_Slider")
self.gridLayout_172.addWidget(self.transparency_Slider, 0, 3, 1, 1)
self.gridLayout_21.addLayout(self.gridLayout_172, 2, 1, 1, 1)
self.reset_color_Button = QtWidgets.QToolButton(self.tabWidgetInterface)
self.reset_color_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_color_Button.setIcon(icon59)
self.reset_color_Button.setIconSize(QtCore.QSize(22, 22))
self.reset_color_Button.setObjectName("reset_color_Button")
self.gridLayout_21.addWidget(self.reset_color_Button, 2, 2, 1, 1)
self.gridLayout_63.addLayout(self.gridLayout_21, 1, 0, 1, 1)
self.gridLayout_84 = QtWidgets.QGridLayout()
self.gridLayout_84.setObjectName("gridLayout_84")
self.label_68 = QtWidgets.QLabel(self.tabWidgetInterface)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_68.sizePolicy().hasHeightForWidth())
self.label_68.setSizePolicy(sizePolicy)
self.label_68.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_68.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_68.setObjectName("label_68")
self.gridLayout_84.addWidget(self.label_68, 1, 0, 1, 1)
self.variable_name_lineEdit = QtWidgets.QLineEdit(self.tabWidgetInterface)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.variable_name_lineEdit.sizePolicy().hasHeightForWidth())
self.variable_name_lineEdit.setSizePolicy(sizePolicy)
self.variable_name_lineEdit.setMaxLength(20)
self.variable_name_lineEdit.setObjectName("variable_name_lineEdit")
self.gridLayout_84.addWidget(self.variable_name_lineEdit, 1, 1, 1, 1)
self.label_69 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_69.setStyleSheet("background-color : #656565; color : white")
self.label_69.setFrameShape(QtWidgets.QFrame.Panel)
self.label_69.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_69.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_69.setObjectName("label_69")
self.gridLayout_84.addWidget(self.label_69, 0, 0, 1, 3)
self.reset_variable_name_Button = QtWidgets.QToolButton(self.tabWidgetInterface)
self.reset_variable_name_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_variable_name_Button.setIcon(icon59)
self.reset_variable_name_Button.setIconSize(QtCore.QSize(22, 22))
self.reset_variable_name_Button.setObjectName("reset_variable_name_Button")
self.gridLayout_84.addWidget(self.reset_variable_name_Button, 1, 2, 1, 1)
self.gridLayout_63.addLayout(self.gridLayout_84, 2, 0, 1, 1)
self.gridLayout_17 = QtWidgets.QGridLayout()
self.gridLayout_17.setObjectName("gridLayout_17")
self.label_95 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_95.setStyleSheet("background-color : #656565; color : white")
self.label_95.setFrameShape(QtWidgets.QFrame.Panel)
self.label_95.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_95.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_95.setObjectName("label_95")
self.gridLayout_17.addWidget(self.label_95, 0, 0, 1, 1)
spacerItem154 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_17.addItem(spacerItem154, 2, 0, 1, 1)
self.download_news_checkBox = QtWidgets.QCheckBox(self.tabWidgetInterface)
self.download_news_checkBox.setChecked(True)
self.download_news_checkBox.setObjectName("download_news_checkBox")
self.gridLayout_17.addWidget(self.download_news_checkBox, 1, 0, 1, 1)
self.gridLayout_63.addLayout(self.gridLayout_17, 4, 0, 1, 1)
self.gridLayout_99 = QtWidgets.QGridLayout()
self.gridLayout_99.setObjectName("gridLayout_99")
self.label_76 = QtWidgets.QLabel(self.tabWidgetInterface)
self.label_76.setStyleSheet("background-color : #656565; color : white")
self.label_76.setFrameShape(QtWidgets.QFrame.Panel)
self.label_76.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_76.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_76.setObjectName("label_76")
self.gridLayout_99.addWidget(self.label_76, 0, 0, 1, 3)
self.reset_group_name_Button = QtWidgets.QToolButton(self.tabWidgetInterface)
self.reset_group_name_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.reset_group_name_Button.setIcon(icon59)
self.reset_group_name_Button.setIconSize(QtCore.QSize(22, 22))
self.reset_group_name_Button.setObjectName("reset_group_name_Button")
self.gridLayout_99.addWidget(self.reset_group_name_Button, 1, 2, 1, 1)
self.virtual_raster_load_checkBox = QtWidgets.QCheckBox(self.tabWidgetInterface)
self.virtual_raster_load_checkBox.setObjectName("virtual_raster_load_checkBox")
self.gridLayout_99.addWidget(self.virtual_raster_load_checkBox, 2, 0, 1, 1)
self.horizontalLayout_61 = QtWidgets.QHBoxLayout()
self.horizontalLayout_61.setObjectName("horizontalLayout_61")
self.label_75 = QtWidgets.QLabel(self.tabWidgetInterface)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_75.sizePolicy().hasHeightForWidth())
self.label_75.setSizePolicy(sizePolicy)
self.label_75.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_75.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_75.setObjectName("label_75")
self.horizontalLayout_61.addWidget(self.label_75)
self.group_name_lineEdit = QtWidgets.QLineEdit(self.tabWidgetInterface)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.group_name_lineEdit.sizePolicy().hasHeightForWidth())
self.group_name_lineEdit.setSizePolicy(sizePolicy)
self.group_name_lineEdit.setMaxLength(20)
self.group_name_lineEdit.setObjectName("group_name_lineEdit")
self.horizontalLayout_61.addWidget(self.group_name_lineEdit)
self.gridLayout_99.addLayout(self.horizontalLayout_61, 1, 0, 1, 2)
self.gridLayout_63.addLayout(self.gridLayout_99, 3, 0, 1, 1)
self.settings_tabWidget.addTab(self.tabWidgetInterface, "")
self.tabWidgetDebug = QtWidgets.QWidget()
self.tabWidgetDebug.setObjectName("tabWidgetDebug")
self.gridLayout_56 = QtWidgets.QGridLayout(self.tabWidgetDebug)
self.gridLayout_56.setObjectName("gridLayout_56")
self.gridLayout_7 = QtWidgets.QGridLayout()
self.gridLayout_7.setObjectName("gridLayout_7")
self.log_checkBox = QtWidgets.QCheckBox(self.tabWidgetDebug)
self.log_checkBox.setChecked(False)
self.log_checkBox.setTristate(False)
self.log_checkBox.setObjectName("log_checkBox")
self.gridLayout_7.addWidget(self.log_checkBox, 1, 0, 1, 1)
self.exportLog_Button = QtWidgets.QToolButton(self.tabWidgetDebug)
self.exportLog_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.exportLog_Button.setIcon(icon53)
self.exportLog_Button.setIconSize(QtCore.QSize(22, 22))
self.exportLog_Button.setObjectName("exportLog_Button")
self.gridLayout_7.addWidget(self.exportLog_Button, 1, 1, 1, 1)
self.clearLog_Button = QtWidgets.QToolButton(self.tabWidgetDebug)
self.clearLog_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.clearLog_Button.setIcon(icon59)
self.clearLog_Button.setIconSize(QtCore.QSize(22, 22))
self.clearLog_Button.setObjectName("clearLog_Button")
self.gridLayout_7.addWidget(self.clearLog_Button, 1, 2, 1, 1)
self.label_30 = QtWidgets.QLabel(self.tabWidgetDebug)
self.label_30.setStyleSheet("background-color : #656565; color : white")
self.label_30.setFrameShape(QtWidgets.QFrame.Panel)
self.label_30.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_30.setObjectName("label_30")
self.gridLayout_7.addWidget(self.label_30, 0, 0, 1, 3)
self.log_tableWidget = QtWidgets.QTableWidget(self.tabWidgetDebug)
self.log_tableWidget.setObjectName("log_tableWidget")
self.log_tableWidget.setColumnCount(3)
self.log_tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.log_tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.log_tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.log_tableWidget.setHorizontalHeaderItem(2, item)
self.log_tableWidget.horizontalHeader().setStretchLastSection(True)
self.gridLayout_7.addWidget(self.log_tableWidget, 2, 0, 1, 3)
self.gridLayout_56.addLayout(self.gridLayout_7, 0, 0, 1, 1)
self.gridLayout_55 = QtWidgets.QGridLayout()
self.gridLayout_55.setObjectName("gridLayout_55")
self.test_dependencies_Button = QtWidgets.QToolButton(self.tabWidgetDebug)
self.test_dependencies_Button.setStyleSheet("margin: 0px;padding: 0px;")
self.test_dependencies_Button.setIcon(icon67)
self.test_dependencies_Button.setIconSize(QtCore.QSize(22, 22))
self.test_dependencies_Button.setObjectName("test_dependencies_Button")
self.gridLayout_55.addWidget(self.test_dependencies_Button, 2, 2, 1, 1)
self.label_42 = QtWidgets.QLabel(self.tabWidgetDebug)
self.label_42.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_42.setObjectName("label_42")
self.gridLayout_55.addWidget(self.label_42, 2, 1, 1, 1)
spacerItem155 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_55.addItem(spacerItem155, 2, 3, 1, 1)
self.label_43 = QtWidgets.QLabel(self.tabWidgetDebug)
self.label_43.setStyleSheet("background-color : #656565; color : white")
self.label_43.setFrameShape(QtWidgets.QFrame.Panel)
self.label_43.setFrameShadow(QtWidgets.QFrame.Sunken)
self.label_43.setObjectName("label_43")
self.gridLayout_55.addWidget(self.label_43, 0, 0, 1, 4)
self.gridLayout_56.addLayout(self.gridLayout_55, 1, 0, 1, 1)
self.settings_tabWidget.addTab(self.tabWidgetDebug, "")
self.gridLayout_134.addWidget(self.settings_tabWidget, 0, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_Settings, "")
self.tab_About = QtWidgets.QWidget()
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tab_About.sizePolicy().hasHeightForWidth())
self.tab_About.setSizePolicy(sizePolicy)
self.tab_About.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.tab_About.setObjectName("tab_About")
self.gridLayout_16 = QtWidgets.QGridLayout(self.tab_About)
self.gridLayout_16.setObjectName("gridLayout_16")
self.gridLayout_5 = QtWidgets.QGridLayout()
self.gridLayout_5.setObjectName("gridLayout_5")
self.plugin_version_label = QtWidgets.QLabel(self.tab_About)
self.plugin_version_label.setFrameShape(QtWidgets.QFrame.NoFrame)
self.plugin_version_label.setFrameShadow(QtWidgets.QFrame.Raised)
self.plugin_version_label.setText("")
self.plugin_version_label.setTextFormat(QtCore.Qt.PlainText)
self.plugin_version_label.setAlignment(QtCore.Qt.AlignCenter)
self.plugin_version_label.setObjectName("plugin_version_label")
self.gridLayout_5.addWidget(self.plugin_version_label, 1, 0, 1, 1)
self.plugin_label = QtWidgets.QLabel(self.tab_About)
self.plugin_label.setFrameShape(QtWidgets.QFrame.NoFrame)
self.plugin_label.setFrameShadow(QtWidgets.QFrame.Raised)
self.plugin_label.setTextFormat(QtCore.Qt.PlainText)
self.plugin_label.setAlignment(QtCore.Qt.AlignCenter)
self.plugin_label.setObjectName("plugin_label")
self.gridLayout_5.addWidget(self.plugin_label, 0, 0, 1, 1)
self.gridLayout_16.addLayout(self.gridLayout_5, 0, 0, 1, 1)
self.gridLayout_9 = QtWidgets.QGridLayout()
self.gridLayout_9.setObjectName("gridLayout_9")
self.textBrowser = QtWidgets.QTextBrowser(self.tab_About)
self.textBrowser.setFrameShape(QtWidgets.QFrame.Panel)
self.textBrowser.setFrameShadow(QtWidgets.QFrame.Sunken)
self.textBrowser.setOpenExternalLinks(True)
self.textBrowser.setObjectName("textBrowser")
self.gridLayout_9.addWidget(self.textBrowser, 0, 0, 1, 1)
self.gridLayout_16.addLayout(self.gridLayout_9, 1, 0, 1, 1)
self.SCP_tabs.addTab(self.tab_About, "")
self.gridLayout_262.addWidget(self.SCP_tabs, 0, 0, 1, 1)
self.main_tabWidget.addTab(self.tool_tab, icon, "")
self.help_tab = QtWidgets.QWidget()
self.help_tab.setObjectName("help_tab")
self.gridLayout_263 = QtWidgets.QGridLayout(self.help_tab)
self.gridLayout_263.setObjectName("gridLayout_263")
self.help_textBrowser = QtWidgets.QTextBrowser(self.help_tab)
self.help_textBrowser.setOpenExternalLinks(True)
self.help_textBrowser.setOpenLinks(False)
self.help_textBrowser.setObjectName("help_textBrowser")
self.gridLayout_263.addWidget(self.help_textBrowser, 1, 0, 1, 1)
self.main_tabWidget.addTab(self.help_tab, icon51, "")
self.gridLayout_301.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(SemiAutomaticClassificationPlugin)
self.main_tabWidget.setCurrentIndex(0)
self.SCP_tabs.setCurrentIndex(0)
self.Band_set_tabWidget.setCurrentIndex(-1)
self.tabWidget_5.setCurrentIndex(0)
self.alg_band_weight_tabWidget.setCurrentIndex(-1)
self.toolBox_4.setCurrentIndex(2)
self.tabWidget_3.setCurrentIndex(1)
self.tabWidget_preprocessing.setCurrentIndex(0)
self.tabWidget_4.setCurrentIndex(0)
self.toolBox_band_set_combination.setCurrentIndex(0)
self.toolBox_PCA.setCurrentIndex(0)
self.toolBox_kmeans.setCurrentIndex(0)
self.toolBox_random_forest.setCurrentIndex(0)
self.tabWidget_2.setCurrentIndex(0)
self.toolBox_accuracy.setCurrentIndex(0)
self.toolBox_landCoverChange.setCurrentIndex(0)
self.toolBox_class_report.setCurrentIndex(0)
self.toolBox_cross_classification.setCurrentIndex(0)
self.toolBox_class_signature.setCurrentIndex(0)
self.band_calc_tabWidget.setCurrentIndex(0)
self.settings_tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(SemiAutomaticClassificationPlugin)
def retranslateUi(self, SemiAutomaticClassificationPlugin):
_translate = QtCore.QCoreApplication.translate
SemiAutomaticClassificationPlugin.setWindowTitle(_translate("SemiAutomaticClassificationPlugin", "Semi-Automatic Classification Plugin"))
self.f_filter_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Filter"))
__sortingEnabled = self.menu_treeWidget.isSortingEnabled()
self.menu_treeWidget.setSortingEnabled(False)
self.menu_treeWidget.topLevelItem(0).setText(0, _translate("SemiAutomaticClassificationPlugin", "Band set"))
self.menu_treeWidget.topLevelItem(1).setText(0, _translate("SemiAutomaticClassificationPlugin", "Basic tools"))
self.menu_treeWidget.topLevelItem(1).child(0).setText(0, _translate("SemiAutomaticClassificationPlugin", "Algorithm band weight"))
self.menu_treeWidget.topLevelItem(1).child(1).setText(0, _translate("SemiAutomaticClassificationPlugin", "Band set list"))
self.menu_treeWidget.topLevelItem(1).child(2).setText(0, _translate("SemiAutomaticClassificationPlugin", "Export signatures"))
self.menu_treeWidget.topLevelItem(1).child(3).setText(0, _translate("SemiAutomaticClassificationPlugin", "Import signatures"))
self.menu_treeWidget.topLevelItem(1).child(4).setText(0, _translate("SemiAutomaticClassificationPlugin", "LCS threshold"))
self.menu_treeWidget.topLevelItem(1).child(5).setText(0, _translate("SemiAutomaticClassificationPlugin", "Multiple ROI creation"))
self.menu_treeWidget.topLevelItem(1).child(6).setText(0, _translate("SemiAutomaticClassificationPlugin", "RGB list"))
self.menu_treeWidget.topLevelItem(1).child(7).setText(0, _translate("SemiAutomaticClassificationPlugin", "Signature threshold"))
self.menu_treeWidget.topLevelItem(2).setText(0, _translate("SemiAutomaticClassificationPlugin", "Download products"))
self.menu_treeWidget.topLevelItem(3).setText(0, _translate("SemiAutomaticClassificationPlugin", "Preprocessing"))
self.menu_treeWidget.topLevelItem(3).child(0).setText(0, _translate("SemiAutomaticClassificationPlugin", "ASTER"))
self.menu_treeWidget.topLevelItem(3).child(1).setText(0, _translate("SemiAutomaticClassificationPlugin", "GOES"))
self.menu_treeWidget.topLevelItem(3).child(2).setText(0, _translate("SemiAutomaticClassificationPlugin", "Landsat"))
self.menu_treeWidget.topLevelItem(3).child(3).setText(0, _translate("SemiAutomaticClassificationPlugin", "MODIS"))
self.menu_treeWidget.topLevelItem(3).child(4).setText(0, _translate("SemiAutomaticClassificationPlugin", "Sentinel-1"))
self.menu_treeWidget.topLevelItem(3).child(5).setText(0, _translate("SemiAutomaticClassificationPlugin", "Sentinel-2"))
self.menu_treeWidget.topLevelItem(3).child(6).setText(0, _translate("SemiAutomaticClassificationPlugin", "Sentinel-3"))
self.menu_treeWidget.topLevelItem(3).child(7).setText(0, _translate("SemiAutomaticClassificationPlugin", "Clip multiple rasters"))
self.menu_treeWidget.topLevelItem(3).child(8).setText(0, _translate("SemiAutomaticClassificationPlugin", "Cloud masking"))
self.menu_treeWidget.topLevelItem(3).child(9).setText(0, _translate("SemiAutomaticClassificationPlugin", "Mosaic band sets"))
self.menu_treeWidget.topLevelItem(3).child(10).setText(0, _translate("SemiAutomaticClassificationPlugin", "Neighbor pixels"))
self.menu_treeWidget.topLevelItem(3).child(11).setText(0, _translate("SemiAutomaticClassificationPlugin", "Reproject raster bands"))
self.menu_treeWidget.topLevelItem(3).child(12).setText(0, _translate("SemiAutomaticClassificationPlugin", "Split raster bands"))
self.menu_treeWidget.topLevelItem(3).child(13).setText(0, _translate("SemiAutomaticClassificationPlugin", "Stack raster bands"))
self.menu_treeWidget.topLevelItem(3).child(14).setText(0, _translate("SemiAutomaticClassificationPlugin", "Vector to raster"))
self.menu_treeWidget.topLevelItem(4).setText(0, _translate("SemiAutomaticClassificationPlugin", "Band processing"))
self.menu_treeWidget.topLevelItem(4).child(0).setText(0, _translate("SemiAutomaticClassificationPlugin", "Band combination"))
self.menu_treeWidget.topLevelItem(4).child(1).setText(0, _translate("SemiAutomaticClassificationPlugin", "Classification"))
self.menu_treeWidget.topLevelItem(4).child(2).setText(0, _translate("SemiAutomaticClassificationPlugin", "Clustering"))
self.menu_treeWidget.topLevelItem(4).child(3).setText(0, _translate("SemiAutomaticClassificationPlugin", "PCA"))
self.menu_treeWidget.topLevelItem(4).child(4).setText(0, _translate("SemiAutomaticClassificationPlugin", "Random forest"))
self.menu_treeWidget.topLevelItem(4).child(5).setText(0, _translate("SemiAutomaticClassificationPlugin", "Spectral distance"))
self.menu_treeWidget.topLevelItem(5).setText(0, _translate("SemiAutomaticClassificationPlugin", "Postprocessing"))
self.menu_treeWidget.topLevelItem(5).child(0).setText(0, _translate("SemiAutomaticClassificationPlugin", "Accuracy"))
self.menu_treeWidget.topLevelItem(5).child(1).setText(0, _translate("SemiAutomaticClassificationPlugin", "Classification dilation"))
self.menu_treeWidget.topLevelItem(5).child(2).setText(0, _translate("SemiAutomaticClassificationPlugin", "Classification erosion"))
self.menu_treeWidget.topLevelItem(5).child(3).setText(0, _translate("SemiAutomaticClassificationPlugin", "Classification report"))
self.menu_treeWidget.topLevelItem(5).child(4).setText(0, _translate("SemiAutomaticClassificationPlugin", "Classification to vector"))
self.menu_treeWidget.topLevelItem(5).child(5).setText(0, _translate("SemiAutomaticClassificationPlugin", "Classification sieve"))
self.menu_treeWidget.topLevelItem(5).child(6).setText(0, _translate("SemiAutomaticClassificationPlugin", "Class signature"))
self.menu_treeWidget.topLevelItem(5).child(7).setText(0, _translate("SemiAutomaticClassificationPlugin", "Cross classification"))
self.menu_treeWidget.topLevelItem(5).child(8).setText(0, _translate("SemiAutomaticClassificationPlugin", "Edit raster"))
self.menu_treeWidget.topLevelItem(5).child(9).setText(0, _translate("SemiAutomaticClassificationPlugin", "Land cover change"))
self.menu_treeWidget.topLevelItem(5).child(10).setText(0, _translate("SemiAutomaticClassificationPlugin", "Reclassification"))
self.menu_treeWidget.topLevelItem(5).child(11).setText(0, _translate("SemiAutomaticClassificationPlugin", "Zonal stat raster"))
self.menu_treeWidget.topLevelItem(6).setText(0, _translate("SemiAutomaticClassificationPlugin", "Band calc"))
self.menu_treeWidget.topLevelItem(7).setText(0, _translate("SemiAutomaticClassificationPlugin", "Batch"))
self.menu_treeWidget.topLevelItem(8).setText(0, _translate("SemiAutomaticClassificationPlugin", "Settings"))
self.menu_treeWidget.topLevelItem(8).child(0).setText(0, _translate("SemiAutomaticClassificationPlugin", "Debug"))
self.menu_treeWidget.topLevelItem(8).child(1).setText(0, _translate("SemiAutomaticClassificationPlugin", "Interface"))
self.menu_treeWidget.topLevelItem(8).child(2).setText(0, _translate("SemiAutomaticClassificationPlugin", "Processing setting"))
self.menu_treeWidget.topLevelItem(9).setText(0, _translate("SemiAutomaticClassificationPlugin", "User manual"))
self.menu_treeWidget.topLevelItem(10).setText(0, _translate("SemiAutomaticClassificationPlugin", "Help"))
self.menu_treeWidget.topLevelItem(11).setText(0, _translate("SemiAutomaticClassificationPlugin", "About"))
self.menu_treeWidget.topLevelItem(12).setText(0, _translate("SemiAutomaticClassificationPlugin", "Support the SCP"))
self.menu_treeWidget.setSortingEnabled(__sortingEnabled)
self.label_59.setText(_translate("SemiAutomaticClassificationPlugin", "Wavelength\n"
"quick settings"))
self.wavelength_sat_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a configuration for setting band center wavelengths</p></body></html>"))
self.label_60.setText(_translate("SemiAutomaticClassificationPlugin", "Wavelength \n"
"unit"))
self.unit_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Wavelength unit</p></body></html>"))
self.export_bandset_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export band set to text file</p></body></html>"))
self.export_bandset_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.import_bandset_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import band set from text file</p></body></html>"))
self.import_bandset_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.bandset_dateEdit.setDisplayFormat(_translate("SemiAutomaticClassificationPlugin", "yyyy-MM-dd"))
self.label_3.setText(_translate("SemiAutomaticClassificationPlugin", "Date"))
self.label_52.setText(_translate("SemiAutomaticClassificationPlugin", " Single band list"))
self.bands_filter_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Filter</p></body></html>"))
self.bands_filter_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Filter"))
item = self.bands_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band"))
self.toolButton_reload_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_3.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.select_all_bands_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select all / Unselect all</p></body></html>"))
self.select_all_bands_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.add_raster_bands_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Add band to Band set</p></body></html>"))
self.add_raster_bands_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_53.setText(_translate("SemiAutomaticClassificationPlugin", " Band set definition"))
self.remove_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.remove_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.clear_bandset_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.clear_bandset_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.sort_by_name_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Sort bands by name (priority to ending number)</p></body></html>"))
self.sort_by_name_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.move_up_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted band up</p></body></html>"))
self.move_up_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.move_down_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted band down</p></body></html>"))
self.move_down_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.add_band_set_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Add a new band set</p></body></html>"))
self.add_band_set_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.virtual_raster_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a virtual raster of active band set</p></body></html>"))
self.virtual_raster_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create virtual raster of band set"))
self.band_calc_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Calculate expression in Band calc</p></body></html>"))
self.band_calc_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Band calc expressions"))
self.stack_raster_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a .tif raster stacking the bands of the active band set</p></body></html>"))
self.stack_raster_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create raster of band set \n"
"(stack bands)"))
self.overview_raster_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Build band overviews (external pyramids) of active band set for faster visualization</p></body></html>"))
self.overview_raster_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Build band overviews"))
self.label_94.setText(_translate("SemiAutomaticClassificationPlugin", " Band set tools"))
self.band_set_process_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.band_set_process_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.toolButton_reload.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_39.setText(_translate("SemiAutomaticClassificationPlugin", " Multiband image list"))
self.toolButton_input_raster.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Open a file</span></p></body></html>"))
self.image_raster_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a multiband image</p></body></html>"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_band_set), _translate("SemiAutomaticClassificationPlugin", "Band set"))
self.label_126.setText(_translate("SemiAutomaticClassificationPlugin", "RGB list"))
self.sort_by_name_toolButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Sort RGB automatically</p></body></html>"))
self.sort_by_name_toolButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.move_down_toolButton_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted RGB down</p></body></html>"))
self.move_down_toolButton_3.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.move_up_toolButton_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted RGB up</p></body></html>"))
self.move_up_toolButton_3.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.add_RGB_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Add row</p></body></html>"))
self.add_RGB_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.export_RGB_List_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export RGB list to text file</p></body></html>"))
self.export_RGB_List_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.import_RGB_List_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import RGB list from text file</p></body></html>"))
self.import_RGB_List_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.clear_RGB_list_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.clear_RGB_list_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.remove_RGB_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.remove_RGB_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
item = self.RGB_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "RGB"))
self.label_196.setText(_translate("SemiAutomaticClassificationPlugin", "Automatic RGB"))
self.label_192.setText(_translate("SemiAutomaticClassificationPlugin", "Band combinations"))
self.all_RGB_list_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Add all combinations of bands</p></body></html>"))
self.all_RGB_list_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_RGB), _translate("SemiAutomaticClassificationPlugin", "RGB list"))
self.label_208.setText(_translate("SemiAutomaticClassificationPlugin", "Band set list"))
self.band_set_filter_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Filter</p></body></html>"))
self.band_set_filter_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Filter"))
item = self.band_set_list_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Number"))
item = self.band_set_list_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Bands"))
item = self.band_set_list_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Date"))
self.move_down_toolButton_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted Band sets down</p></body></html>"))
self.move_down_toolButton_4.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.move_up_toolButton_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted Band sets up</p></body></html>"))
self.move_up_toolButton_4.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.rgb_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Add row</p></body></html>"))
self.rgb_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.add_bandset_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Add row</p></body></html>"))
self.add_bandset_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.export_bandset_List_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export Band set list to file</p></body></html>"))
self.export_bandset_List_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.import_bandset_List_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import Band set list from file</p></body></html>"))
self.import_bandset_List_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.remove_bandset_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.remove_bandset_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.sort_by_date.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Sort band sets by date</p></body></html>"))
self.sort_by_date.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_band_set_list), _translate("SemiAutomaticClassificationPlugin", "Band set list"))
self.label_79.setText(_translate("SemiAutomaticClassificationPlugin", "Algorithm band weight"))
self.reset_weights_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Reset</p></body></html>\n"
""))
self.set_weight_value_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Set</p></body></html>\n"
""))
self.label_131.setText(_translate("SemiAutomaticClassificationPlugin", "Set weight"))
self.weight_doubleSpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set a value</p></body></html>"))
self.label_93.setText(_translate("SemiAutomaticClassificationPlugin", "Automatic weight"))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_algorithm_weight), _translate("SemiAutomaticClassificationPlugin", "Algorithm band weight"))
self.point_distance_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p align=\"justify\">Minimum distance between points</p></body></html>"))
self.point_grid_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p align=\"justify\">Size of a grid cell within points are created randomly</p></body></html>"))
self.label_48.setText(_translate("SemiAutomaticClassificationPlugin", " Create random points"))
self.label_139.setText(_translate("SemiAutomaticClassificationPlugin", "Create points"))
self.label_19.setText(_translate("SemiAutomaticClassificationPlugin", "Number of points"))
self.point_number_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p align=\"justify\">Number of points created randomly</p></body></html>"))
self.add_random_point_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create points</p></body></html>"))
self.point_distance_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create random points with a minimum distance</p></body></html>"))
self.point_distance_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "min distance"))
self.point_grid_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create random points inside each cell of a grid with this size</p></body></html>"))
self.point_grid_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "inside grid"))
self.stratified_point_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create stratified random points</p></body></html>"))
self.stratified_point_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "stratified for the values"))
self.stratified_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter one or more rules separated by semicolon (e.g. raster > 0; raster == 1 )</p></body></html>"))
self.stratified_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "raster > 0"))
self.band_set_comb_spinBox_10.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_25.setText(_translate("SemiAutomaticClassificationPlugin", "of first band of band set"))
self.label_47.setText(_translate("SemiAutomaticClassificationPlugin", " Point coordinates and ROI definition"))
self.point_tableWidget.setSortingEnabled(False)
item = self.point_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "X"))
item = self.point_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Y"))
item = self.point_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "MC ID"))
item = self.point_tableWidget.horizontalHeaderItem(3)
item.setText(_translate("SemiAutomaticClassificationPlugin", "MC Name"))
item = self.point_tableWidget.horizontalHeaderItem(4)
item.setText(_translate("SemiAutomaticClassificationPlugin", "C ID"))
item = self.point_tableWidget.horizontalHeaderItem(5)
item.setText(_translate("SemiAutomaticClassificationPlugin", "C Name"))
item = self.point_tableWidget.horizontalHeaderItem(6)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Min"))
item = self.point_tableWidget.horizontalHeaderItem(7)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Max"))
item = self.point_tableWidget.horizontalHeaderItem(8)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Dist"))
item = self.point_tableWidget.horizontalHeaderItem(9)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Rapid ROI band"))
self.add_point_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Add row</p></body></html>"))
self.add_point_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.remove_point_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.remove_point_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.export_point_list_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Export point list to text file</p></body></html>"))
self.export_point_list_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.import_point_list_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Import point list from text file</p></body></html>"))
self.import_point_list_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.signature_checkBox2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Add ROI spectral signatures to signature list</p></body></html>"))
self.signature_checkBox2.setText(_translate("SemiAutomaticClassificationPlugin", "Calculate sig."))
self.label_159.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.save_point_rois_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.save_point_rois_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_multiple_ROI), _translate("SemiAutomaticClassificationPlugin", "Multiple ROI creation"))
self.usgs_chapter_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a chapter</p></body></html>"))
self.usgs_library_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a library</p></body></html>"))
self.label_123.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a chapter</p></body></html>"))
self.label_124.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a library</p></body></html>"))
self.label_130.setText(_translate("SemiAutomaticClassificationPlugin", "Import spectral library"))
self.add_usgs_library_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import spectral library</p></body></html>"))
self.USGS_library_textBrowser.setHtml(_translate("SemiAutomaticClassificationPlugin", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Droid Sans\'; font-size:9pt;\"><br /></p></body></html>"))
self.label.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>USGS Spectral Library Version 7 downloaded from <a href=\"https://crustal.usgs.gov/speclab/QueryAll07a.php\"><span style=\" text-decoration: underline; color:#0000ff;\">https://crustal.usgs.gov/speclab/QueryAll07a.php</span></a>.<br/><span style=\" font-weight:600;\">Reference</span>: Kokaly, R.F., Clark, R.N., Swayze, G.A., Livo, K.E., Hoefen, T.M., Pearson, N.C., Wise, R.A., Benzel, W.M., Lowers, H.A., Driscoll, R.L., and Klein, A.J., 2017, USGS Spectral Library Version 7: U.S. Geological Survey Data Series 1035, 61 p., https://doi.org/10.3133/ds1035.</p></body></html>"))
self.label_129.setText(_translate("SemiAutomaticClassificationPlugin", " Library Description (requires internet connection)"))
self.toolBox_4.setItemText(self.toolBox_4.indexOf(self.page_8), _translate("SemiAutomaticClassificationPlugin", "Download USGS Spectral Library"))
self.label_9.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a file: SCP file (*.scp) ; USGS library (*.asc) ; ASTER library (*.txt) ; CSV (*.csv)</p></body></html>"))
self.open_library_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Open a file</span></p></body></html>"))
self.toolBox_4.setItemText(self.toolBox_4.indexOf(self.page_6), _translate("SemiAutomaticClassificationPlugin", "Import library file"))
self.open_shapefile_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open a file</p></body></html>"))
self.label_120.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a vector (*.shp;*.gpkg)</p></body></html>"))
self.C_ID_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>C ID field</p></body></html>"))
self.MC_ID_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>MC ID field</p></body></html>"))
self.MC_Info_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>MC Name field</p></body></html>"))
self.label_99.setText(_translate("SemiAutomaticClassificationPlugin", "C Name field"))
self.C_Info_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>C Name field</p></body></html>"))
self.label_119.setText(_translate("SemiAutomaticClassificationPlugin", " Vector fields"))
self.MC_ID_combo_2.setText(_translate("SemiAutomaticClassificationPlugin", "C ID field"))
self.label_121.setText(_translate("SemiAutomaticClassificationPlugin", "MC ID field"))
self.label_122.setText(_translate("SemiAutomaticClassificationPlugin", "MC Name field"))
self.label_2.setText(_translate("SemiAutomaticClassificationPlugin", " Import vector"))
self.signature_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Add ROI spectral signature to signature list</p></body></html>"))
self.signature_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", "Calculate sig."))
self.import_shapefile_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import vector</p></body></html>"))
self.import_shapefile_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.toolBox_4.setItemText(self.toolBox_4.indexOf(self.page_9), _translate("SemiAutomaticClassificationPlugin", "Import vector"))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_Import), _translate("SemiAutomaticClassificationPlugin", "Import signatures"))
self.label_97.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export as SCP file (*.scp)</p></body></html>"))
self.export_SCP_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Export highlighted spectral signatures</p></body></html>\n"
""))
self.export_CSV_library_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a directory where highlighted spectral signatures are saved as .csv</p></body></html>"))
self.export_CSV_library_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_96.setText(_translate("SemiAutomaticClassificationPlugin", "Export "))
self.label_222.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export as shapefile (*.shp) or geopackage (*.gpkg)</p></body></html>"))
self.label_20.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export as CSV file (.csv)</p></body></html>"))
self.export_SHP_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Export highlighted spectral signatures</p></body></html>\n"
""))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_export), _translate("SemiAutomaticClassificationPlugin", "Export signatures"))
self.reset_threshold_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Reset</p></body></html>\n"
""))
self.signature_threshold_tableWidget.setSortingEnabled(True)
item = self.signature_threshold_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "MC ID"))
item = self.signature_threshold_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "MC Name"))
item = self.signature_threshold_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "C ID"))
item = self.signature_threshold_tableWidget.horizontalHeaderItem(3)
item.setText(_translate("SemiAutomaticClassificationPlugin", "C Name"))
item = self.signature_threshold_tableWidget.horizontalHeaderItem(4)
item.setText(_translate("SemiAutomaticClassificationPlugin", "MD Threshold"))
item = self.signature_threshold_tableWidget.horizontalHeaderItem(5)
item.setText(_translate("SemiAutomaticClassificationPlugin", "ML Threshold"))
item = self.signature_threshold_tableWidget.horizontalHeaderItem(6)
item.setText(_translate("SemiAutomaticClassificationPlugin", "SAM Threshold"))
self.label_80.setText(_translate("SemiAutomaticClassificationPlugin", " Signature threshold"))
self.label_85.setText(_translate("SemiAutomaticClassificationPlugin", "Set threshold = σ *"))
self.multiplicative_threshold_doubleSpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set a value that will be multiplied by standard deviation</p></body></html>"))
self.automatic_threshold_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set automatic threshold σ</p></body></html>"))
self.label_132.setText(_translate("SemiAutomaticClassificationPlugin", "Set threshold"))
self.threshold_doubleSpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set a value</p></body></html>"))
self.set_threshold_value_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Set</p></body></html>\n"
""))
self.label_88.setText(_translate("SemiAutomaticClassificationPlugin", " Automatic thresholds"))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_threshold), _translate("SemiAutomaticClassificationPlugin", "Signature threshold"))
self.LCS_tableWidget.setSortingEnabled(True)
item = self.LCS_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "MC ID"))
item = self.LCS_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "MC Name"))
item = self.LCS_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "C ID"))
item = self.LCS_tableWidget.horizontalHeaderItem(3)
item.setText(_translate("SemiAutomaticClassificationPlugin", "C Name"))
item = self.LCS_tableWidget.horizontalHeaderItem(4)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Color [overlap MC_ID-C_ID]"))
self.label_86.setText(_translate("SemiAutomaticClassificationPlugin", " LC Signature threshold"))
self.signature_spectral_plot_toolButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Add highlighted signatures to spectral signature plot</p></body></html>"))
self.signature_spectral_plot_toolButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_102.setText(_translate("SemiAutomaticClassificationPlugin", "Min Max"))
self.set_min_max_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set automatic threshold Min Max</p></body></html>"))
self.label_101.setText(_translate("SemiAutomaticClassificationPlugin", "σ *"))
self.multiplicative_threshold_doubleSpinBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set a value that will be multiplied by standard deviation</p></body></html>"))
self.automatic_threshold_pushButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set automatic threshold σ</p></body></html>"))
self.label_89.setText(_translate("SemiAutomaticClassificationPlugin", "From pixel"))
self.LCS_pointerButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Activate pointer for setting thresholds from pixel</p></body></html>"))
self.LCS_include_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, signature threshold is extended to include pixel signature</p></body></html>"))
self.LCS_cut_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, signature threshold is reduced to exclude pixel signature</p></body></html>"))
self.label_178.setText(_translate("SemiAutomaticClassificationPlugin", "From ROI"))
self.LCS_ROI_button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set thresholds from temporary ROI</p></body></html>"))
self.label_125.setText(_translate("SemiAutomaticClassificationPlugin", "Automatic thresholds"))
self.tabWidget_5.setTabText(self.tabWidget_5.indexOf(self.tab_LCS_threshold), _translate("SemiAutomaticClassificationPlugin", "LCS threshold"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_basic_tools), _translate("SemiAutomaticClassificationPlugin", "Basic tools"))
self.remember_user_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, remember user name and password locally in QGIS</p></body></html>"))
self.remember_user_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", "remember"))
self.password_usgs_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Password</p></body></html>"))
self.password_scihub_label_3.setText(_translate("SemiAutomaticClassificationPlugin", "Password"))
self.label_180.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Login Landsat (<a href=\"https://ers.cr.usgs.gov\"><span style=\" text-decoration: underline; color:#ffffff;\">https://ers.cr.usgs.gov</span></a>)</p></body></html>"))
self.user_usgs_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>User name</p></body></html>"))
self.user_scihub_label_2.setText(_translate("SemiAutomaticClassificationPlugin", "User"))
self.remember_user_checkBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, remember user name and password locally in QGIS</p></body></html>"))
self.remember_user_checkBox_3.setText(_translate("SemiAutomaticClassificationPlugin", "remember"))
self.password_usgs_lineEdit_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Password</p></body></html>"))
self.password_scihub_label_4.setText(_translate("SemiAutomaticClassificationPlugin", "Password"))
self.label_191.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Login ASTER and MODIS (<a href=\"https://urs.earthdata.nasa.gov\"><span style=\" text-decoration: underline; color:#ffffff;\">https://urs.earthdata.nasa.gov</span></a>)</p></body></html>"))
self.user_usgs_lineEdit_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>User name</p></body></html>"))
self.user_scihub_label_3.setText(_translate("SemiAutomaticClassificationPlugin", "User"))
self.user_scihub_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>User name</p></body></html>"))
self.password_scihub_label.setText(_translate("SemiAutomaticClassificationPlugin", "Password"))
self.label_147.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Login Sentinels</p></body></html>"))
self.remember_user_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, remember user name and password locally in QGIS</p></body></html>"))
self.remember_user_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "remember"))
self.user_scihub_label.setText(_translate("SemiAutomaticClassificationPlugin", "User"))
self.password_scihub_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Password</p></body></html>"))
self.password_scihub_label_2.setText(_translate("SemiAutomaticClassificationPlugin", "Service"))
self.sentinel_service_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Service</p></body></html>"))
self.reset_sentinel_service_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.reset_sentinel_service_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.sentinel2_alternative_search_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use alternative search for Sentinel-2 (no authentication required)</p></body></html>"))
self.sentinel2_alternative_search_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use alternative search for Sentinel-2 (no authentication required)"))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab_login), _translate("SemiAutomaticClassificationPlugin", "Login data"))
self.remove_image_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.remove_image_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.toolButton_display.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Display preview of highlighted images in map</p></body></html>"))
self.toolButton_display.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.clear_table_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.clear_table_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.export_table_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export table to text file</p></body></html>"))
self.export_table_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.import_table_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import table from text file</p></body></html>"))
self.import_table_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.image_preview_label.setText(_translate("SemiAutomaticClassificationPlugin", "Preview"))
self.download_images_tableWidget.setSortingEnabled(True)
item = self.download_images_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Product"))
item = self.download_images_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "ProductID"))
item = self.download_images_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "AcquisitionDate"))
item = self.download_images_tableWidget.horizontalHeaderItem(3)
item.setText(_translate("SemiAutomaticClassificationPlugin", "CloudCover"))
item = self.download_images_tableWidget.horizontalHeaderItem(4)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Zone/Path"))
item = self.download_images_tableWidget.horizontalHeaderItem(5)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Row/DayNight"))
item = self.download_images_tableWidget.horizontalHeaderItem(6)
item.setText(_translate("SemiAutomaticClassificationPlugin", "min_lat"))
item = self.download_images_tableWidget.horizontalHeaderItem(7)
item.setText(_translate("SemiAutomaticClassificationPlugin", "min_lon"))
item = self.download_images_tableWidget.horizontalHeaderItem(8)
item.setText(_translate("SemiAutomaticClassificationPlugin", "max_lat"))
item = self.download_images_tableWidget.horizontalHeaderItem(9)
item.setText(_translate("SemiAutomaticClassificationPlugin", "max_lon"))
item = self.download_images_tableWidget.horizontalHeaderItem(10)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Collection/Size"))
item = self.download_images_tableWidget.horizontalHeaderItem(11)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Preview"))
item = self.download_images_tableWidget.horizontalHeaderItem(12)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Collection/ID"))
item = self.download_images_tableWidget.horizontalHeaderItem(13)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Collection/Image"))
self.label_100.setText(_translate("SemiAutomaticClassificationPlugin", " Product list"))
self.products_filter_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Filter</p></body></html>"))
self.products_filter_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Filter"))
self.toolButton_OSM.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Add OpenStreetMap to the map</p></body></html>"))
self.toolButton_OSM.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_205.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span style=\" color:#000000;\">Add OpenStreetMap to the map</span></p></body></html>"))
self.label_206.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>(© <a href=\"http://www.openstreetmap.org/copyright\"><span style=\" text-decoration: underline; color:#0000ff;\">OpenStreetMap</span></a> contributors. The cartography is licensed as CC BY-SA. <a href=\"https://operations.osmfoundation.org/policies/tiles/\"><span style=\" text-decoration: underline; color:#0000ff;\">Tile Usage Policy</span></a>)</p></body></html>"))
self.label_103.setText(_translate("SemiAutomaticClassificationPlugin", " Search parameters"))
self.selectUL_toolButton_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set area in the map</p></body></html>"))
self.LX_lineEdit_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Lower right X</p></body></html>"))
self.LX_lineEdit_3.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "X (Lon)"))
self.UX_lineEdit_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Upper left X</p></body></html>"))
self.UX_lineEdit_3.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "X (Lon)"))
self.label_105.setText(_translate("SemiAutomaticClassificationPlugin", "LR"))
self.label_107.setText(_translate("SemiAutomaticClassificationPlugin", "UL"))
self.LY_lineEdit_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Lower right Y</p></body></html>"))
self.LY_lineEdit_3.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Y (Lat)"))
self.UY_lineEdit_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Upper left Y</p></body></html>"))
self.UY_lineEdit_3.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Y (Lat)"))
self.show_area_radioButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Show / hide area</p></body></html>"))
self.show_area_radioButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Show"))
self.find_images_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Find images</p></body></html>"))
self.label_35.setText(_translate("SemiAutomaticClassificationPlugin", "Find"))
self.landsat_satellite_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a product</p></body></html>"))
self.dateEdit_from.setDisplayFormat(_translate("SemiAutomaticClassificationPlugin", "yyyy-MM-dd"))
self.label_110.setText(_translate("SemiAutomaticClassificationPlugin", "Max cloud cover (%)"))
self.dateEdit_to.setDisplayFormat(_translate("SemiAutomaticClassificationPlugin", "yyyy-MM-dd"))
self.label_112.setText(_translate("SemiAutomaticClassificationPlugin", "to"))
self.label_111.setText(_translate("SemiAutomaticClassificationPlugin", "Date from"))
self.cloud_cover_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Maximum cloud cover percentage</p></body></html>"))
self.label_114.setText(_translate("SemiAutomaticClassificationPlugin", "Products"))
self.label_194.setText(_translate("SemiAutomaticClassificationPlugin", "Results"))
self.label_113.setText(_translate("SemiAutomaticClassificationPlugin", "Advanced search"))
self.imageID_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Filter images</p></body></html>"))
self.result_number_spinBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Maximum number of results (images)</p></body></html>"))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab_search), _translate("SemiAutomaticClassificationPlugin", "Search"))
self.checkBox_band_6.setText(_translate("SemiAutomaticClassificationPlugin", "6 (Landsat 1-8)"))
self.checkBox_band_4.setText(_translate("SemiAutomaticClassificationPlugin", "4 (Landsat 1-8)"))
self.checkBox_band_1.setText(_translate("SemiAutomaticClassificationPlugin", "1 (Landsat 4-8)"))
self.checkBox_band_3.setText(_translate("SemiAutomaticClassificationPlugin", "3 (Landsat 4-8)"))
self.checkBox_band_12.setText(_translate("SemiAutomaticClassificationPlugin", "Ancillary data"))
self.checkBox_band_2.setText(_translate("SemiAutomaticClassificationPlugin", "2 (Landsat 4-8)"))
self.checkBox_band_11.setText(_translate("SemiAutomaticClassificationPlugin", "11 (Landsat 8)"))
self.checkBox_band_5.setText(_translate("SemiAutomaticClassificationPlugin", "5 (Landsat 1-8)"))
self.check_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select all</p></body></html>"))
self.check_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_108.setText(_translate("SemiAutomaticClassificationPlugin", " Landsat bands"))
self.checkBoxs_band_9.setText(_translate("SemiAutomaticClassificationPlugin", "8A"))
self.checkBoxs_band_1.setText(_translate("SemiAutomaticClassificationPlugin", "1"))
self.check_toolButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select all</p></body></html>"))
self.check_toolButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_118.setText(_translate("SemiAutomaticClassificationPlugin", " Sentinel-2 bands"))
self.checkBoxs_band_2.setText(_translate("SemiAutomaticClassificationPlugin", "2"))
self.checkBoxs_band_3.setText(_translate("SemiAutomaticClassificationPlugin", "3"))
self.checkBoxs_band_4.setText(_translate("SemiAutomaticClassificationPlugin", "4"))
self.checkBoxs_band_5.setText(_translate("SemiAutomaticClassificationPlugin", "5"))
self.checkBoxs_band_6.setText(_translate("SemiAutomaticClassificationPlugin", "6"))
self.checkBoxs_band_7.setText(_translate("SemiAutomaticClassificationPlugin", "7"))
self.checkBoxs_band_12.setText(_translate("SemiAutomaticClassificationPlugin", "11"))
self.checkBoxs_band_8.setText(_translate("SemiAutomaticClassificationPlugin", "8"))
self.checkBoxs_band_10.setText(_translate("SemiAutomaticClassificationPlugin", "9"))
self.ancillary_data_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Ancillary data"))
self.checkBoxs_band_13.setText(_translate("SemiAutomaticClassificationPlugin", "12"))
self.checkBoxs_band_11.setText(_translate("SemiAutomaticClassificationPlugin", "10"))
self.checkBoxs3_band_6.setText(_translate("SemiAutomaticClassificationPlugin", "6"))
self.checkBoxs3_band_2.setText(_translate("SemiAutomaticClassificationPlugin", "2"))
self.checkBoxs3_band_5.setText(_translate("SemiAutomaticClassificationPlugin", "5"))
self.checkBoxs3_band_8.setText(_translate("SemiAutomaticClassificationPlugin", "8"))
self.checkBoxs3_band_1.setText(_translate("SemiAutomaticClassificationPlugin", "1"))
self.checkBoxs3_band_16.setText(_translate("SemiAutomaticClassificationPlugin", "16"))
self.checkBoxs3_band_10.setText(_translate("SemiAutomaticClassificationPlugin", "10"))
self.checkBoxs3_band_12.setText(_translate("SemiAutomaticClassificationPlugin", "12"))
self.s3_ancillary_data_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Ancillary data"))
self.checkBoxs3_band_3.setText(_translate("SemiAutomaticClassificationPlugin", "3"))
self.label_127.setText(_translate("SemiAutomaticClassificationPlugin", " Sentinel-3 bands"))
self.checkBoxs3_band_20.setText(_translate("SemiAutomaticClassificationPlugin", "20"))
self.checkBoxs3_band_17.setText(_translate("SemiAutomaticClassificationPlugin", "17"))
self.checkBoxs3_band_14.setText(_translate("SemiAutomaticClassificationPlugin", "14"))
self.checkBoxs3_band_9.setText(_translate("SemiAutomaticClassificationPlugin", "9"))
self.checkBoxs3_band_13.setText(_translate("SemiAutomaticClassificationPlugin", "13"))
self.checkBoxs3_band_19.setText(_translate("SemiAutomaticClassificationPlugin", "19"))
self.check_toolButton_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select all</p></body></html>"))
self.check_toolButton_3.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.checkBoxs3_band_7.setText(_translate("SemiAutomaticClassificationPlugin", "7"))
self.checkBoxs3_band_4.setText(_translate("SemiAutomaticClassificationPlugin", "4"))
self.checkBoxs3_band_11.setText(_translate("SemiAutomaticClassificationPlugin", "11"))
self.checkBoxs3_band_15.setText(_translate("SemiAutomaticClassificationPlugin", "15"))
self.checkBoxs3_band_21.setText(_translate("SemiAutomaticClassificationPlugin", "21"))
self.checkBoxs3_band_18.setText(_translate("SemiAutomaticClassificationPlugin", "18"))
self.checkBoxs_goes_band_1.setText(_translate("SemiAutomaticClassificationPlugin", "1"))
self.label_272.setText(_translate("SemiAutomaticClassificationPlugin", " GOES bands"))
self.checkBoxs_goes_band_5.setText(_translate("SemiAutomaticClassificationPlugin", "5"))
self.checkBoxs_goes_band_3.setText(_translate("SemiAutomaticClassificationPlugin", "3"))
self.checkBoxs_goes_band_4.setText(_translate("SemiAutomaticClassificationPlugin", "4"))
self.checkBoxs_goes_band_2.setText(_translate("SemiAutomaticClassificationPlugin", "2"))
self.checkBoxs_goes_band_6.setText(_translate("SemiAutomaticClassificationPlugin", "6"))
self.check_toolButton_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select all</p></body></html>"))
self.check_toolButton_4.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.checkBox_band_8.setText(_translate("SemiAutomaticClassificationPlugin", "8 (Landsat 7, 8)"))
self.checkBox_band_10.setText(_translate("SemiAutomaticClassificationPlugin", "10 (Landsat 8)"))
self.checkBox_band_9.setText(_translate("SemiAutomaticClassificationPlugin", "9 (Landsat 8)"))
self.checkBox_band_7.setText(_translate("SemiAutomaticClassificationPlugin", "7 (Landsat 1-8)"))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab_options), _translate("SemiAutomaticClassificationPlugin", "Download options"))
self.label_258.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span style=\" color:#ffffff;\"> Download</span></p></body></html>"))
self.preprocess_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Preprocess images</p></body></html>"))
self.preprocess_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Preprocess images"))
self.load_in_QGIS_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Load images in QGIS after download</p></body></html>"))
self.load_in_QGIS_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Load bands in QGIS"))
self.download_if_preview_in_legend_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Download images from list only if the corresponding previews are loaded in QGIS</p></body></html>"))
self.download_if_preview_in_legend_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Only if preview in Layers"))
self.export_links_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export download links to a text file</p></body></html>"))
self.export_links_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.download_images_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.download_images_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.virtual_download_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, download as virtual file only the portion of the image defined by search coordinates (does not work for all the sources)</p></body></html>"))
self.virtual_download_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Virtual download"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_download_products), _translate("SemiAutomaticClassificationPlugin", "Download products"))
self.label_36.setText(_translate("SemiAutomaticClassificationPlugin", "Directory containing Landsat bands"))
self.label_37.setText(_translate("SemiAutomaticClassificationPlugin", " Landsat conversion to TOA reflectance and brightness temperature"))
self.celsius_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable calculation of temperature in Celsius from thermal band</p></body></html>"))
self.celsius_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", " Brightness temperature in Celsius"))
self.DOS1_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable the DOS1 atmospheric correction (thermal band is not corrected)</p></body></html>"))
self.DOS1_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", " Apply DOS1 atmospheric correction"))
self.nodata_spinBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>No data value</p></body></html>"))
self.nodata_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.label_41.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select MTL file (if not in Landsat directory)</p></body></html>"))
self.label_41.setText(_translate("SemiAutomaticClassificationPlugin", "Select MTL file"))
self.toolButton_directoryInput.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select a directory</p></body></html>"))
self.toolButton_directoryInput.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.pansharpening_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Perform pan-sharpening (Brovey Transform)</p></body></html>"))
self.pansharpening_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Perform pansharpening (Landsat 7 or 8)"))
self.create_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create the Band set automatically and use the checked Band set tools</p></body></html>"))
self.create_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create Band set and use Band set tools"))
self.add_new_bandset_checkBox_1.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.add_new_bandset_checkBox_1.setText(_translate("SemiAutomaticClassificationPlugin", "Add bands in a new Band set"))
self.toolButton_directoryInput_MTL.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Open a file</span></p></body></html>"))
self.landsat_tableWidget.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit metadata</p></body></html>"))
item = self.landsat_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band"))
item = self.landsat_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "RADIANCE_MULT"))
item = self.landsat_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "RADIANCE_ADD"))
item = self.landsat_tableWidget.horizontalHeaderItem(3)
item.setText(_translate("SemiAutomaticClassificationPlugin", "REFLECTANCE_MULT"))
item = self.landsat_tableWidget.horizontalHeaderItem(4)
item.setText(_translate("SemiAutomaticClassificationPlugin", "REFLECTANCE_ADD"))
item = self.landsat_tableWidget.horizontalHeaderItem(5)
item.setText(_translate("SemiAutomaticClassificationPlugin", "RADIANCE_MAXIMUM"))
item = self.landsat_tableWidget.horizontalHeaderItem(6)
item.setText(_translate("SemiAutomaticClassificationPlugin", "REFLECTANCE_MAXIMUM"))
item = self.landsat_tableWidget.horizontalHeaderItem(7)
item.setText(_translate("SemiAutomaticClassificationPlugin", "K1_CONSTANT"))
item = self.landsat_tableWidget.horizontalHeaderItem(8)
item.setText(_translate("SemiAutomaticClassificationPlugin", "K2_CONSTANT"))
item = self.landsat_tableWidget.horizontalHeaderItem(9)
item.setText(_translate("SemiAutomaticClassificationPlugin", "LMAX"))
item = self.landsat_tableWidget.horizontalHeaderItem(10)
item.setText(_translate("SemiAutomaticClassificationPlugin", "LMIN"))
item = self.landsat_tableWidget.horizontalHeaderItem(11)
item.setText(_translate("SemiAutomaticClassificationPlugin", "QCALMAX"))
item = self.landsat_tableWidget.horizontalHeaderItem(12)
item.setText(_translate("SemiAutomaticClassificationPlugin", "QCALMIN"))
self.pushButton_remove_band.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.pushButton_remove_band.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.satellite_label.setText(_translate("SemiAutomaticClassificationPlugin", "Satellite"))
self.satellite_label_3.setText(_translate("SemiAutomaticClassificationPlugin", "Sun elevation"))
self.date_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>DATE ACQUIRED</p></body></html>"))
self.satellite_label_2.setText(_translate("SemiAutomaticClassificationPlugin", "Date (YYYY-MM-DD)"))
self.satellite_label_4.setText(_translate("SemiAutomaticClassificationPlugin", "Earth sun distance"))
self.sun_elev_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>SUN ELEVATION</p></body></html>"))
self.earth_sun_dist_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Earth sun distance</p></body></html>"))
self.label_74.setText(_translate("SemiAutomaticClassificationPlugin", "Metadata"))
self.satellite_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Satellite (e.g. LANDSAT8)</p></body></html>"))
self.label_161.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pushButton_Conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pushButton_Conversion.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.landsat_conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.landsat_conversion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_Landsat), _translate("SemiAutomaticClassificationPlugin", "Landsat"))
self.S1_label_95.setText(_translate("SemiAutomaticClassificationPlugin", "Select SNAP xml graph (optional)"))
self.S1_toolButton_directoryInput_xml.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Open a file</span></p></body></html>"))
self.S1_label_97.setText(_translate("SemiAutomaticClassificationPlugin", "Polarization"))
self.VH_checkBox_S1.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select VH polarization</p></body></html>"))
self.VH_checkBox_S1.setText(_translate("SemiAutomaticClassificationPlugin", "VH"))
self.VV_checkBox_S1.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select VV polarization</p></body></html>"))
self.VV_checkBox_S1.setText(_translate("SemiAutomaticClassificationPlugin", "VV"))
self.label_209.setText(_translate("SemiAutomaticClassificationPlugin", " Sentinel-1 conversion (ESA SNAP software required)"))
self.S1_create_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create the Band set automatically and use the checked Band set tools</p></body></html>"))
self.S1_create_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create Band set and use Band set tools"))
self.add_new_bandset_checkBox_6.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.add_new_bandset_checkBox_6.setText(_translate("SemiAutomaticClassificationPlugin", "Add bands in a new Band set"))
self.S1_toolButton_fileInput.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Open a file</span></p></body></html>"))
self.label_207.setText(_translate("SemiAutomaticClassificationPlugin", "Sentinel-1 file"))
self.projection_checkBox_S1.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, project the output to the same projection as selected Band set</p></body></html>"))
self.projection_checkBox_S1.setText(_translate("SemiAutomaticClassificationPlugin", "Raster projection as Band set"))
self.band_set_comb_spinBox_11.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.convert_to_db_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, convert to dB</p></body></html>"))
self.convert_to_db_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", " convert to dB"))
self.S1_nodata_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.S1_nodata_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.S1_nodata_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>No data value</p></body></html>"))
self.label_210.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pushButton_Conversion_6.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pushButton_Conversion_6.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.sentinel1_conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.sentinel1_conversion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_Sentinel1), _translate("SemiAutomaticClassificationPlugin", "Sentinel-1"))
self.label_90.setText(_translate("SemiAutomaticClassificationPlugin", "Directory containing Sentinel-2 bands"))
self.S2_toolButton_directoryInput.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select a directory</p></body></html>"))
self.S2_toolButton_directoryInput.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.DOS1_checkBox_S2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable the DOS1 atmospheric correction</p></body></html>"))
self.DOS1_checkBox_S2.setText(_translate("SemiAutomaticClassificationPlugin", " Apply DOS1 atmospheric correction"))
self.S2_nodata_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>No data value</p></body></html>"))
self.S2_nodata_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.S2_nodata_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.label_91.setText(_translate("SemiAutomaticClassificationPlugin", " Sentinel-2 conversion"))
self.S2_label_93.setText(_translate("SemiAutomaticClassificationPlugin", "Select metadata file (MTD_MSI)"))
self.S2_toolButton_directoryInput_xml2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Open a file</span></p></body></html>"))
self.S2_create_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create the Band set automatically and use the checked Band set tools</p></body></html>"))
self.S2_create_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create Band set and use Band set tools"))
self.add_new_bandset_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.add_new_bandset_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", "Add bands in a new Band set"))
self.preprocess_b_1_9_10_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.preprocess_b_1_9_10_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Preprocess bands 1, 9, 10"))
self.S2_satellite_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Satellite (e.g. Sentinel-2A)</p></body></html>"))
self.satellite_label_5.setText(_translate("SemiAutomaticClassificationPlugin", "Satellite"))
self.satellite_label_6.setText(_translate("SemiAutomaticClassificationPlugin", "Product"))
self.S2_product_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Satellite (e.g. Sentinel-2A)</p></body></html>"))
self.label_92.setText(_translate("SemiAutomaticClassificationPlugin", "Metadata"))
self.date_lineEdit_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>DATE ACQUIRED</p></body></html>"))
self.satellite_label_15.setText(_translate("SemiAutomaticClassificationPlugin", "Date (YYYY-MM-DD)"))
self.sentinel_2_tableWidget.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit metadata</p></body></html>"))
item = self.sentinel_2_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band"))
item = self.sentinel_2_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Quantification value"))
item = self.sentinel_2_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Solar irradiance"))
self.S2_pushButton_remove_band.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.S2_pushButton_remove_band.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_162.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pushButton_Conversion_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pushButton_Conversion_2.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.sentinel2_conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.sentinel2_conversion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_Sentinel2), _translate("SemiAutomaticClassificationPlugin", "Sentinel-2"))
self.S2_nodata_spinBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>No data value</p></body></html>"))
self.S3_nodata_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.S3_nodata_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.label_109.setText(_translate("SemiAutomaticClassificationPlugin", " Sentinel-3 conversion"))
self.S3_create_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create the Band set automatically and use the checked Band set tools</p></body></html>"))
self.S3_create_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create Band set and use Band set tools"))
self.label_106.setText(_translate("SemiAutomaticClassificationPlugin", "Directory containing Sentinel-3 bands"))
self.S3_toolButton_directoryInput.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select a directory</p></body></html>"))
self.S3_toolButton_directoryInput.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.DOS1_checkBox_S3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable the DOS1 atmospheric correction</p></body></html>"))
self.DOS1_checkBox_S3.setText(_translate("SemiAutomaticClassificationPlugin", " Apply DOS1 atmospheric correction"))
self.add_new_bandset_checkBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.add_new_bandset_checkBox_3.setText(_translate("SemiAutomaticClassificationPlugin", "Add bands in a new Band set"))
self.sentinel_3_tableWidget.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit metadata</p></body></html>"))
item = self.sentinel_3_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band"))
self.S3_pushButton_remove_band.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.S3_pushButton_remove_band.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.S3_satellite_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Satellite (e.g. Sentinel-3A)</p></body></html>"))
self.satellite_label_12.setText(_translate("SemiAutomaticClassificationPlugin", "Satellite"))
self.satellite_label_14.setText(_translate("SemiAutomaticClassificationPlugin", "Product"))
self.S3_product_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Satellite (e.g. Sentinel-3A)</p></body></html>"))
self.label_115.setText(_translate("SemiAutomaticClassificationPlugin", "Metadata"))
self.label_181.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pushButton_Conversion_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pushButton_Conversion_5.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.sentinel3_conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.sentinel3_conversion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_Sentinel3), _translate("SemiAutomaticClassificationPlugin", "Sentinel-3"))
self.nodata_spinBox_6.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>No data value</p></body></html>"))
self.nodata_checkBox_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_5.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.toolButton_directoryInput_ASTER.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open a file</p></body></html>"))
self.toolButton_directoryInput_ASTER.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.DOS1_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable the DOS1 atmospheric correction (thermal band is not corrected)</p></body></html>"))
self.DOS1_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", " Apply DOS1 atmospheric correction"))
self.label_67.setText(_translate("SemiAutomaticClassificationPlugin", " ASTER conversion to TOA reflectance and brightness temperature"))
self.label_55.setText(_translate("SemiAutomaticClassificationPlugin", "Select file ASTER L1T (.hdf)"))
self.create_bandset_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create the Band set automatically and use the checked Band set tools</p></body></html>"))
self.create_bandset_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", "Create Band set and use Band set tools"))
self.add_new_bandset_checkBox_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.add_new_bandset_checkBox_4.setText(_translate("SemiAutomaticClassificationPlugin", "Add bands in a new Band set"))
self.celsius_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable calculation of temperature in Celsius from thermal band</p></body></html>"))
self.celsius_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", " Brightness temperature in Celsius"))
self.ASTER_tableWidget.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit metadata</p></body></html>"))
item = self.ASTER_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band"))
item = self.ASTER_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "UnitConversionCoeff"))
item = self.ASTER_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "PixelSize"))
self.pushButton_remove_band_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.pushButton_remove_band_2.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.date_lineEdit_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>DATE ACQUIRED</p></body></html>"))
self.earth_sun_dist_lineEdit_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Earth sun distance</p></body></html>"))
self.satellite_label_9.setText(_translate("SemiAutomaticClassificationPlugin", "Earth sun \n"
"distance"))
self.satellite_label_8.setText(_translate("SemiAutomaticClassificationPlugin", "Date\n"
" (YYYYMMDD)"))
self.satellite_label_7.setText(_translate("SemiAutomaticClassificationPlugin", "Sun elevation"))
self.sun_elev_lineEdit_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>SUN ELEVATION</p></body></html>"))
self.ulm_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Upper left</p></body></html>"))
self.satellite_label_10.setText(_translate("SemiAutomaticClassificationPlugin", "UTM zone"))
self.utm_zone_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>UTM zone</p></body></html>"))
self.satellite_label_11.setText(_translate("SemiAutomaticClassificationPlugin", "UPPERLEFTM"))
self.satellite_label_17.setText(_translate("SemiAutomaticClassificationPlugin", "LOWERRIGHTM"))
self.lrm_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Lower right</p></body></html>"))
self.label_160.setText(_translate("SemiAutomaticClassificationPlugin", "Metadata"))
self.label_163.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pushButton_Conversion_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pushButton_Conversion_3.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.aster_conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.aster_conversion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_ASTER), _translate("SemiAutomaticClassificationPlugin", "ASTER"))
self.label_218.setText(_translate("SemiAutomaticClassificationPlugin", " MODIS conversion"))
self.create_bandset_checkBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create the Band set automatically and use the checked Band set tools</p></body></html>"))
self.create_bandset_checkBox_3.setText(_translate("SemiAutomaticClassificationPlugin", "Create Band set and use Band set tools"))
self.add_new_bandset_checkBox_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.add_new_bandset_checkBox_5.setText(_translate("SemiAutomaticClassificationPlugin", "Add bands in a new Band set"))
self.label_219.setText(_translate("SemiAutomaticClassificationPlugin", "Select file MODIS (.hdf)"))
self.toolButton_directoryInput_MODIS.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open a file</p></body></html>"))
self.toolButton_directoryInput_MODIS.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.nodata_spinBox_8.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>No data value</p></body></html>"))
self.nodata_checkBox_7.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_7.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.reproject_modis_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Reproject bands to WGS 84</p></body></html>"))
self.reproject_modis_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Reproject to WGS 84"))
self.MODIS_tableWidget.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit metadata</p></body></html>"))
item = self.MODIS_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band"))
item = self.MODIS_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "UnitConversionCoeff"))
self.pushButton_remove_band_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.pushButton_remove_band_3.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.satellite_label_16.setText(_translate("SemiAutomaticClassificationPlugin", "Date (YYYY-MM-DD)"))
self.MODIS_ID_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>DATE ACQUIRED</p></body></html>"))
self.satellite_label_13.setText(_translate("SemiAutomaticClassificationPlugin", "ID"))
self.MODIS_date_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>DATE ACQUIRED</p></body></html>"))
self.label_220.setText(_translate("SemiAutomaticClassificationPlugin", "Metadata"))
self.label_221.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pushButton_Conversion_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pushButton_Conversion_4.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.modis_conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.modis_conversion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_MODIS), _translate("SemiAutomaticClassificationPlugin", "MODIS"))
self.label_142.setText(_translate("SemiAutomaticClassificationPlugin", " Convert vector to raster"))
self.label_64.setText(_translate("SemiAutomaticClassificationPlugin", "Select the vector"))
self.vector_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the vector</p></body></html>"))
self.toolButton_reload_16.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_16.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.field_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the value field of the vector</p></body></html>"))
self.field_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use the value field of the vector"))
self.field_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the value field</p></body></html>"))
self.constant_value_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use constant value</p></body></html>"))
self.constant_value_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use constant value"))
self.constant_value_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Value</p></body></html>"))
self.label_157.setText(_translate("SemiAutomaticClassificationPlugin", "Select the type of conversion"))
self.conversion_type_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the type of conversion</p></body></html>"))
self.label_156.setText(_translate("SemiAutomaticClassificationPlugin", "Select the reference raster"))
self.toolButton_reload_17.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_17.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.reference_raster_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the reference raster</p></body></html>"))
self.extent_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the same extent as reference raster</p></body></html>"))
self.extent_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", "Same extent as reference raster"))
self.label_167.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.convert_vector_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.convert_vector_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.vector_to_raster.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.vector_to_raster.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_spectral_distance), _translate("SemiAutomaticClassificationPlugin", "Vector to raster"))
self.label_128.setText(_translate("SemiAutomaticClassificationPlugin", " Clip band set"))
self.nodata_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.band_set_comb_spinBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_251.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.label_62.setText(_translate("SemiAutomaticClassificationPlugin", "Output name prefix"))
self.label_16.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.output_clip_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Output name prefix</p></body></html>"))
self.output_clip_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "clip"))
self.LX_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Lower right X</p></body></html>"))
self.LX_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "X"))
self.UX_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Upper left X</p></body></html>"))
self.UX_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "X"))
self.UY_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Upper left Y</p></body></html>"))
self.UY_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Y"))
self.LY_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Lower right Y</p></body></html>"))
self.LY_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Y"))
self.label_12.setText(_translate("SemiAutomaticClassificationPlugin", "LR"))
self.selectUL_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set area in the map</p></body></html>"))
self.label_29.setText(_translate("SemiAutomaticClassificationPlugin", " Clip coordinates"))
self.show_area_radioButton_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Show / hide area</p></body></html>"))
self.show_area_radioButton_3.setText(_translate("SemiAutomaticClassificationPlugin", "Show"))
self.label_11.setText(_translate("SemiAutomaticClassificationPlugin", "UL"))
self.shapefile_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the vector for clipping</p></body></html>"))
self.shapefile_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use vector boundaries for clipping rasters</p></body></html>"))
self.shapefile_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use vector for clipping"))
self.toolButton_reload_8.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_8.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.temporary_ROI_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use temporary ROI boundaries for clipping rasters</p></body></html>"))
self.temporary_ROI_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use temporary ROI for clipping"))
self.vector_field_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, clip iterating through each vector polygon and add field value to the output name</p></body></html>"))
self.vector_field_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use vector field for output name"))
self.class_field_comboBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the vector field</p></body></html>"))
self.label_164.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.clip_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.clip_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.clip_multiple_rasters.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.clip_multiple_rasters.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_clip), _translate("SemiAutomaticClassificationPlugin", "Clip multiple rasters"))
self.band_set_comb_spinBox_14.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_264.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.label_249.setText(_translate("SemiAutomaticClassificationPlugin", " Reproject raster bands"))
self.raster_align_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the reference raster</p></body></html>"))
self.use_align_raster_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Align to raster</p></body></html>"))
self.use_align_raster_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Align to raster"))
self.toolButton_reload_25.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_25.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.same_extent_raster_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Keep the same extent as the reference raster</p></body></html>"))
self.same_extent_raster_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "same extent as reference"))
self.epsg_code_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>EPSG value</p></body></html>"))
self.use_epsg_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use EPSG value</p></body></html>"))
self.use_epsg_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use EPSG code"))
self.label_267.setText(_translate("SemiAutomaticClassificationPlugin", " Y resolution"))
self.label_266.setText(_translate("SemiAutomaticClassificationPlugin", " X resolution"))
self.x_resolution_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>X resolution</p></body></html>"))
self.y_resolution_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Y resolution</p></body></html>"))
self.resample_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, new pixel size is original pixel size times this factor</p></body></html>"))
self.resample_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Resample pixel factor"))
self.resample_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Resample factor</p></body></html>"))
self.resample_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "1"))
self.label_269.setText(_translate("SemiAutomaticClassificationPlugin", "Resampling method"))
self.resampling_method_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the resampling method</p></body></html>"))
self.resampling_method_comboBox.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "nearest_neighbour"))
self.resampling_method_comboBox.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "average"))
self.resampling_method_comboBox.setItemText(2, _translate("SemiAutomaticClassificationPlugin", "sum"))
self.resampling_method_comboBox.setItemText(3, _translate("SemiAutomaticClassificationPlugin", "maximum"))
self.resampling_method_comboBox.setItemText(4, _translate("SemiAutomaticClassificationPlugin", "minimum"))
self.resampling_method_comboBox.setItemText(5, _translate("SemiAutomaticClassificationPlugin", "mode"))
self.resampling_method_comboBox.setItemText(6, _translate("SemiAutomaticClassificationPlugin", "median"))
self.resampling_method_comboBox.setItemText(7, _translate("SemiAutomaticClassificationPlugin", "first_quartile"))
self.resampling_method_comboBox.setItemText(8, _translate("SemiAutomaticClassificationPlugin", "third_quartile"))
self.label_270.setText(_translate("SemiAutomaticClassificationPlugin", "Output type"))
self.raster_type_combo_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a type</p></body></html>"))
self.raster_type_combo_2.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "Auto"))
self.raster_type_combo_2.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "Float32"))
self.raster_type_combo_2.setItemText(2, _translate("SemiAutomaticClassificationPlugin", "Int32"))
self.raster_type_combo_2.setItemText(3, _translate("SemiAutomaticClassificationPlugin", "UInt32"))
self.raster_type_combo_2.setItemText(4, _translate("SemiAutomaticClassificationPlugin", "Int16"))
self.raster_type_combo_2.setItemText(5, _translate("SemiAutomaticClassificationPlugin", "UInt16"))
self.raster_type_combo_2.setItemText(6, _translate("SemiAutomaticClassificationPlugin", "Byte"))
self.change_nodata_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, change output NoData value</p></body></html>"))
self.change_nodata_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Change output NoData value"))
self.nodata_spinBox_14.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value of the output raster</p></body></html>"))
self.label_265.setText(_translate("SemiAutomaticClassificationPlugin", "Output name prefix"))
self.reproj_output_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Output name prefix</p></body></html>"))
self.reproj_output_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "reproj"))
self.label_263.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.reproject_raster_bands.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.reproject_raster_bands.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.reproject_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.reproject_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_reproject_bands), _translate("SemiAutomaticClassificationPlugin", "Reproject raster bands"))
self.raster_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the image to be split</p></body></html>"))
self.label_57.setText(_translate("SemiAutomaticClassificationPlugin", " Split raster bands"))
self.label_50.setText(_translate("SemiAutomaticClassificationPlugin", "Select a multiband raster"))
self.toolButton_reload_9.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_9.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_165.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.label_61.setText(_translate("SemiAutomaticClassificationPlugin", "Output name prefix"))
self.output_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Output name prefix</p></body></html>"))
self.output_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "split"))
self.split_raster_bands.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.split_raster_bands.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.split_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.split_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_split_raster), _translate("SemiAutomaticClassificationPlugin", "Split raster bands"))
self.label_252.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.band_set_comb_spinBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.stack_raster_bands.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.stack_raster_bands.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.stack_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.stack_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.label_223.setText(_translate("SemiAutomaticClassificationPlugin", " Stack band set"))
self.label_226.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_stack_bands), _translate("SemiAutomaticClassificationPlugin", "Stack raster bands"))
self.label_134.setText(_translate("SemiAutomaticClassificationPlugin", " Mosaic of band sets"))
self.label_135.setText(_translate("SemiAutomaticClassificationPlugin", "Output name prefix"))
self.nodata_checkBox_9.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_9.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.nodata_spinBox_10.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.mosaic_output_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Output name prefix</p></body></html>"))
self.mosaic_output_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "mosaic"))
self.label_144.setText(_translate("SemiAutomaticClassificationPlugin", "Band set list"))
self.mosaic_band_sets_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>List of band set numbers separated by comma ,<br/>Use * for selecting all the band sets</p></body></html>"))
self.mosaic_band_sets_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "1, 2"))
self.mosaic_virtual_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, output bands are virtual rasters</p></body></html>"))
self.mosaic_virtual_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create virtual raster output"))
self.mosaic_bandsets_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.mosaic_bandsets_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.mosaic_bandsets.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.mosaic_bandsets.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.label_182.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_mosaic_band_sets), _translate("SemiAutomaticClassificationPlugin", "Mosaic band sets"))
self.label_260.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.band_set_comb_spinBox_9.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.classification_name_combo_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification</p></body></html>"))
self.cloud_mask_classes_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter class values separated by , or -</p></body></html>"))
self.label_203.setText(_translate("SemiAutomaticClassificationPlugin", "Mask class values"))
self.label_186.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.toolButton_reload_23.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_23.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_140.setText(_translate("SemiAutomaticClassificationPlugin", "Output name prefix"))
self.mask_output_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Output name prefix</p></body></html>"))
self.mask_output_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "mask"))
self.cloud_buffer_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Size in pixels</p></body></html>"))
self.cloud_buffer_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, create a buffer for class values</p></body></html>"))
self.cloud_buffer_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use buffer of pixel size"))
self.nodata_spinBox_11.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.label_141.setText(_translate("SemiAutomaticClassificationPlugin", "Output NoData value"))
self.cloud_mask_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.cloud_mask_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.cloud_masking.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.cloud_masking.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.label_185.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.label_138.setText(_translate("SemiAutomaticClassificationPlugin", " Mask of band set"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_cloud_mask), _translate("SemiAutomaticClassificationPlugin", "Cloud masking"))
self.GOES_nodata_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>No data value</p></body></html>"))
self.GOES_nodata_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.GOES_nodata_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.label_273.setText(_translate("SemiAutomaticClassificationPlugin", " GOES conversion"))
self.GOES_create_bandset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create the Band set automatically and use the checked Band set tools</p></body></html>"))
self.GOES_create_bandset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create Band set and use Band set tools"))
self.label_274.setText(_translate("SemiAutomaticClassificationPlugin", "Directory containing GOES bands"))
self.GOES_toolButton_directoryInput.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select a directory</p></body></html>"))
self.GOES_toolButton_directoryInput.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.add_new_bandset_checkBox_7.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a new band set where bands are added</p></body></html>"))
self.add_new_bandset_checkBox_7.setText(_translate("SemiAutomaticClassificationPlugin", "Add bands in a new Band set"))
self.GOES_tableWidget.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit metadata</p></body></html>"))
item = self.GOES_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band"))
self.GOES_pushButton_remove_band.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.GOES_pushButton_remove_band.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_277.setText(_translate("SemiAutomaticClassificationPlugin", "Metadata"))
self.satellite_label_20.setText(_translate("SemiAutomaticClassificationPlugin", "Satellite"))
self.GOES_satellite_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Satellite (e.g. Sentinel-3A)</p></body></html>"))
self.label_278.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pushButton_Conversion_8.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pushButton_Conversion_8.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.goes_conversion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.goes_conversion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_GOES), _translate("SemiAutomaticClassificationPlugin", "GOES"))
self.label_283.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.label_281.setText(_translate("SemiAutomaticClassificationPlugin", "Matrix file (optional)"))
self.toolButton_input_matrix.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Open a file</span></p></body></html>"))
self.label_279.setText(_translate("SemiAutomaticClassificationPlugin", "Output name prefix"))
self.neighbor_output_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Output name prefix</p></body></html>"))
self.neighbor_output_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "neighbor"))
self.neighbor_virtual_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, output bands are virtual rasters</p></body></html>"))
self.neighbor_virtual_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create virtual raster output"))
self.neighbor_pixels.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.neighbor_pixels.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.class_neighbor_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.class_neighbor_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.label_286.setText(_translate("SemiAutomaticClassificationPlugin", " Neighbor pixels"))
self.statistic_lineEdit_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter a value</p></body></html>"))
self.statistic_name_combobox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a statistic</p></body></html>"))
self.label_284.setText(_translate("SemiAutomaticClassificationPlugin", "Select a statistic"))
self.label_285.setText(_translate("SemiAutomaticClassificationPlugin", " Statistic"))
self.label_282.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.label_280.setText(_translate("SemiAutomaticClassificationPlugin", "Neighbor distance in pixels"))
self.band_set_comb_spinBox_15.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.class_neighbor_threshold_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Distance in pixels</p></body></html>"))
self.circular_structure_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, neighbor pixels are calculated inside a circle of radius equal to the distance in pixels</p></body></html>"))
self.circular_structure_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Circular"))
self.tabWidget_preprocessing.setTabText(self.tabWidget_preprocessing.indexOf(self.tab_neighbor_pixels), _translate("SemiAutomaticClassificationPlugin", "Neighbor pixels"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_preprocessing), _translate("SemiAutomaticClassificationPlugin", "Preprocessing"))
self.label_253.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.calculateBandSetComb_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.calculateBandSetComb_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.band_combination.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.band_combination.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.label_250.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set (of classifications)"))
self.band_set_comb_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_72.setText(_translate("SemiAutomaticClassificationPlugin", " Combination of band values"))
self.toolBox_band_set_combination.setItemText(self.toolBox_band_set_combination.indexOf(self.page_29), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.band_set_comb_textBrowser.setHtml(_translate("SemiAutomaticClassificationPlugin", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Courier 10 Pitch\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:9pt;\"><br /></p></body></html>"))
self.toolBox_band_set_combination.setItemText(self.toolBox_band_set_combination.indexOf(self.page_30), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_bandset_combination_2), _translate("SemiAutomaticClassificationPlugin", "Band combination"))
self.label_58.setText(_translate("SemiAutomaticClassificationPlugin", " Principal Components Analysis of band set"))
self.nodata_checkBox_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_4.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.num_comp_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, calculate this number of components only</p></body></html>"))
self.num_comp_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Number of components"))
self.nodata_spinBox_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.pca_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.pca_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.label_254.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.band_set_comb_spinBox_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.pca_components_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Number of components</p></body></html>"))
self.label_166.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.pca.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.pca.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolBox_PCA.setItemText(self.toolBox_PCA.indexOf(self.page_16), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.toolBox_PCA.setItemText(self.toolBox_PCA.indexOf(self.page_17), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.PCA_tab), _translate("SemiAutomaticClassificationPlugin", "PCA"))
self.label_78.setText(_translate("SemiAutomaticClassificationPlugin", " Clustering of band set"))
self.isodata_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use ISODATA</p></body></html>"))
self.isodata_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "ISODATA"))
self.band_set_comb_spinBox_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_230.setText(_translate("SemiAutomaticClassificationPlugin", "Method "))
self.label_255.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.kmeans_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use K-means</p></body></html>"))
self.kmeans_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "&K-means "))
self.label_225.setText(_translate("SemiAutomaticClassificationPlugin", "Max number of iterations"))
self.thresh_doubleSpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Threshold</p></body></html>"))
self.std_dev_doubleSpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Threshold</p></body></html>"))
self.kmean_threshold_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, for K-means: iteration is terminated if distance is lower than threshold; for ISODATA: signatures are merged if distance is greater than threshold</p></body></html>"))
self.kmean_threshold_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Distance threshold"))
self.nodata_spinBox_9.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.kmeans_iter_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the maximum number of iterations</p></body></html>"))
self.label_228.setText(_translate("SemiAutomaticClassificationPlugin", "ISODATA max standard deviation"))
self.nodata_checkBox_8.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_8.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.kmeans_classes_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Number of classes</p></body></html>"))
self.label_224.setText(_translate("SemiAutomaticClassificationPlugin", "Number of classes"))
self.label_229.setText(_translate("SemiAutomaticClassificationPlugin", "ISODATA minimum class size in pixels"))
self.min_size_class_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Minimum class size in pixels</p></body></html>"))
self.min_distance_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use Minimum Distance algorithm</p></body></html>"))
self.min_distance_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "Minimum Distance"))
self.kmean_save_siglist_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, save the resulting signatures to Signature list</p></body></html>"))
self.kmean_save_siglist_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Save resulting signatures to Signature list"))
self.label_227.setText(_translate("SemiAutomaticClassificationPlugin", "Distance algorithm"))
self.label_104.setText(_translate("SemiAutomaticClassificationPlugin", " Seed signatures"))
self.kmean_siglist_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use signatures in Signature list as seed signatures</p></body></html>"))
self.kmean_siglist_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "Use Signature list as seed signatures"))
self.kmean_randomsiglist_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, calculate seed signatures from random pixels</p></body></html>"))
self.kmean_randomsiglist_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "Use random seed signatures"))
self.kmean_minmax_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, calculate seed signatures from minimum and maximum values of bands</p></body></html>"))
self.kmean_minmax_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "Seed signatures from band values"))
self.spectral_angle_map_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use Spectral Angle Mapping algorithm (only for K-means)</p></body></html>"))
self.spectral_angle_map_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "Spectral Angle Mapping"))
self.label_179.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.kmeans_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.kmeans_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.clustering.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.clustering.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolBox_kmeans.setItemText(self.toolBox_kmeans.indexOf(self.page_18), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.toolBox_kmeans.setItemText(self.toolBox_kmeans.indexOf(self.page_23), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_kmeans), _translate("SemiAutomaticClassificationPlugin", "Clustering"))
self.min_distance_radioButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use Minimum Distance algorithm</p></body></html>"))
self.min_distance_radioButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Minimum Distance"))
self.label_231.setText(_translate("SemiAutomaticClassificationPlugin", "Distance algorithm"))
self.spectral_angle_map_radioButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, use Spectral Angle Mapping algorithm (only for K-means)</p></body></html>"))
self.spectral_angle_map_radioButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Spectral Angle Mapping"))
self.label_137.setText(_translate("SemiAutomaticClassificationPlugin", "Spectral distance of band sets"))
self.distance_threshold_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, calculate a raster of changes where distance is above threshold</p></body></html>"))
self.distance_threshold_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Distance threshold"))
self.thresh_doubleSpinBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Threshold</p></body></html>"))
self.label_256.setText(_translate("SemiAutomaticClassificationPlugin", "Select first input band set"))
self.band_set_comb_spinBox_7.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_257.setText(_translate("SemiAutomaticClassificationPlugin", "Select second input band set"))
self.band_set_comb_spinBox_6.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_183.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.spectral_distance_bandsets_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.spectral_distance_bandsets_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.spectral_distance.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.spectral_distance.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_spectral_dist), _translate("SemiAutomaticClassificationPlugin", "Spectral distance"))
self.label_32.setText(_translate("SemiAutomaticClassificationPlugin", "Use"))
self.macroclass_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the ID of macroclasses for the classification</p></body></html>"))
self.macroclass_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "MC ID"))
self.class_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the ID of classes for the classification</p></body></html>"))
self.class_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "C ID"))
self.algorithm_weight_button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open tab Algorithm band weight</p></body></html>"))
self.algorithm_weight_button.setText(_translate("SemiAutomaticClassificationPlugin", "W"))
self.algorithm_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a classification algorithm</p></body></html>"))
self.algorithm_combo.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "Minimum Distance"))
self.algorithm_combo.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "Maximum Likelihood"))
self.algorithm_combo.setItemText(2, _translate("SemiAutomaticClassificationPlugin", "Spectral Angle Mapping"))
self.band_set_comb_spinBox_12.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_261.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.label_240.setText(_translate("SemiAutomaticClassificationPlugin", " Algorithm"))
self.alg_threshold_SpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set a classification threshold for all signatures</p></body></html>"))
self.algorithm_threshold_button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open tab Signature threshold</p></body></html>"))
self.algorithm_threshold_button.setText(_translate("SemiAutomaticClassificationPlugin", "W"))
self.label_234.setText(_translate("SemiAutomaticClassificationPlugin", "Threshold"))
self.label_243.setText(_translate("SemiAutomaticClassificationPlugin", "Classification"))
self.LC_signature_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, the Land Cover Signature Classification is used</p></body></html>"))
self.LC_signature_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "LCS"))
self.label_235.setText(_translate("SemiAutomaticClassificationPlugin", "Use"))
self.LC_signature_button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open tab LCS threshold</p></body></html>"))
self.LCS_leave_unclassified_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, the selected Algorithm is used only for class overlapping pixels of the Land Cover Signature Classification</p></body></html>"))
self.LCS_leave_unclassified_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "only overlap"))
self.label_241.setText(_translate("SemiAutomaticClassificationPlugin", " Land Cover Signature Classification"))
self.LCS_class_algorithm_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, the selected Algorithm is used for unclassified pixels of the Land Cover Signature Classification</p></body></html>"))
self.LCS_class_algorithm_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Algorithm"))
self.label_242.setText(_translate("SemiAutomaticClassificationPlugin", " Classification output"))
self.resetQmlButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.resetQmlButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_238.setText(_translate("SemiAutomaticClassificationPlugin", "Load qml style"))
self.qml_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Select qml style</span></p></body></html>"))
self.qml_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.qml_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Qml file path</p></body></html>"))
self.mask_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select an optional mask vector</p></body></html>"))
self.mask_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Apply mask"))
self.resetMaskButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.resetMaskButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.mask_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Path of the optional mask shapefile</p></body></html>"))
self.vector_output_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Create a classification shapefile after the classification process</p></body></html>"))
self.vector_output_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create vector"))
self.report_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Calculate a classification report</p></body></html>"))
self.report_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Classification report"))
self.alg_files_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If enabled, the rasters calculated by the classification algorithm (one per signature) are saved along with the classification</p></body></html>"))
self.alg_files_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Save algorithm files"))
self.button_classification.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.button_classification.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.label_239.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.classification.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.classification.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_classification), _translate("SemiAutomaticClassificationPlugin", "Classification"))
self.label_233.setText(_translate("SemiAutomaticClassificationPlugin", "Use"))
self.macroclass_checkBox_rf.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the ID of macroclasses for the classification</p></body></html>"))
self.macroclass_checkBox_rf.setText(_translate("SemiAutomaticClassificationPlugin", "MC ID"))
self.class_checkBox_rf.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the ID of classes for the classification</p></body></html>"))
self.class_checkBox_rf.setText(_translate("SemiAutomaticClassificationPlugin", "C ID"))
self.band_set_comb_spinBox_13.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_262.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.label_245.setText(_translate("SemiAutomaticClassificationPlugin", "Random Forest classification (ESA SNAP software required)"))
self.number_trees_SpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Number of trees</p></body></html>"))
self.label_237.setText(_translate("SemiAutomaticClassificationPlugin", "Number of trees"))
self.number_training_samples_SpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Number of training samples</p></body></html>"))
self.label_236.setText(_translate("SemiAutomaticClassificationPlugin", "Number of training samples "))
self.evaluate_classifier_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Evaluate classifier</p></body></html>"))
self.evaluate_classifier_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Evaluate classifier"))
self.evaluate_feature_power_set_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, evaluate feature power set</p></body></html>"))
self.evaluate_feature_power_set_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Evaluate feature power set"))
self.label_248.setText(_translate("SemiAutomaticClassificationPlugin", "Max"))
self.rf_power_min_SpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Minumum power</p></body></html>"))
self.label_247.setText(_translate("SemiAutomaticClassificationPlugin", "Min"))
self.rf_power_max_SpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Maximum power</p></body></html>"))
self.save_classifier_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, save classifier</p></body></html>"))
self.save_classifier_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Save classifier"))
self.label_244.setText(_translate("SemiAutomaticClassificationPlugin", "Load classifier"))
self.classifier_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a previosly saved classifier</p></body></html>"))
self.classifier_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.classifier_lineEdit_.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Classifier file path</p></body></html>"))
self.resetClassifierButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.resetClassifierButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.button_random_forest.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.button_random_forest.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.label_246.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.random_forest.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.random_forest.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolBox_random_forest.setItemText(self.toolBox_random_forest.indexOf(self.page_21), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.toolBox_random_forest.setItemText(self.toolBox_random_forest.indexOf(self.page_25), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_random_forest), _translate("SemiAutomaticClassificationPlugin", "Random forest"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_band_processing), _translate("SemiAutomaticClassificationPlugin", "Band processing"))
self.label_33.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification to assess"))
self.label_34.setText(_translate("SemiAutomaticClassificationPlugin", "Select the reference vector or raster"))
self.classification_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification to assess</p></body></html>"))
self.buttonReload_shape_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.buttonReload_shape_4.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.toolButton_reload_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_4.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.reference_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the reference vector or raster</p></body></html>"))
self.label_145.setText(_translate("SemiAutomaticClassificationPlugin", " Accuracy assessment"))
self.label_82.setText(_translate("SemiAutomaticClassificationPlugin", "Vector field"))
self.class_field_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the field of the classification code </p></body></html>"))
self.nodata_checkBox_11.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_11.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.nodata_spinBox_15.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.label_168.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.calculateMatrix_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.calculateMatrix_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.accuracy.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.accuracy.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolBox_accuracy.setItemText(self.toolBox_accuracy.indexOf(self.page_10), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.error_matrix_textBrowser.setHtml(_translate("SemiAutomaticClassificationPlugin", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Courier 10 Pitch\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:9pt;\"><br /></p></body></html>"))
self.toolBox_accuracy.setItemText(self.toolBox_accuracy.indexOf(self.page_11), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_accuracy), _translate("SemiAutomaticClassificationPlugin", "Accuracy"))
self.mask_unchanged_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p align=\"justify\">If enabled, pixels having the same values in both classifications will be reported; if not enabled, 0 value is set for unchanged pixels</p></body></html>"))
self.mask_unchanged_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Report unchanged pixels"))
self.classification_reference_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the reference classification raster</p></body></html>"))
self.label_40.setText(_translate("SemiAutomaticClassificationPlugin", "Select the new classification"))
self.label_38.setText(_translate("SemiAutomaticClassificationPlugin", "Select the reference classification"))
self.new_classification_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a new raster to be compared with the reference raster</p></body></html>"))
self.toolButton_reload_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_5.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.toolButton_reload_6.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_6.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_116.setText(_translate("SemiAutomaticClassificationPlugin", " Land cover change"))
self.label_169.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.calculateLandCoverChange_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.calculateLandCoverChange_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.land_cover_change.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.land_cover_change.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolBox_landCoverChange.setItemText(self.toolBox_landCoverChange.indexOf(self.page_12), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.toolBox_landCoverChange.setItemText(self.toolBox_landCoverChange.indexOf(self.page_13), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_landCoverChange), _translate("SemiAutomaticClassificationPlugin", "Land cover change"))
self.classification_report_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification raster</p></body></html>"))
self.label_44.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.nodata_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the report</p></body></html>"))
self.nodata_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.nodata_spinBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.toolButton_reload_10.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_10.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_170.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.calculateReport_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.calculateReport_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.classification_report.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.classification_report.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.label_148.setText(_translate("SemiAutomaticClassificationPlugin", " Classification report"))
self.toolBox_class_report.setItemText(self.toolBox_class_report.indexOf(self.page_14), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.toolBox_class_report.setItemText(self.toolBox_class_report.indexOf(self.page_15), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_class_report), _translate("SemiAutomaticClassificationPlugin", "Classification report"))
self.label_187.setText(_translate("SemiAutomaticClassificationPlugin", " Cross classification"))
self.nodata_checkBox_6.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_6.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.nodata_spinBox_7.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.label_197.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.label_199.setText(_translate("SemiAutomaticClassificationPlugin", "Vector field"))
self.class_field_comboBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the vector field</p></body></html>"))
self.toolButton_reload_21.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_21.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.classification_name_combo_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification</p></body></html>"))
self.buttonReload_shape_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.buttonReload_shape_5.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.reference_name_combo_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the reference vector or raster</p></body></html>"))
self.label_198.setText(_translate("SemiAutomaticClassificationPlugin", "Select the reference vector or raster"))
self.label_200.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.calculatecrossClass_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.calculatecrossClass_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.cross_classification.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.cross_classification.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolBox_cross_classification.setItemText(self.toolBox_cross_classification.indexOf(self.page_19), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.cross_matrix_textBrowser.setHtml(_translate("SemiAutomaticClassificationPlugin", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Courier 10 Pitch\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:9pt;\"><br /></p></body></html>"))
self.toolBox_cross_classification.setItemText(self.toolBox_cross_classification.indexOf(self.page_22), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_cross_classification), _translate("SemiAutomaticClassificationPlugin", "Cross classification"))
self.label_188.setText(_translate("SemiAutomaticClassificationPlugin", " Class signature"))
self.classification_name_combo_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification</p></body></html>"))
self.label_201.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.toolButton_reload_22.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_22.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_259.setText(_translate("SemiAutomaticClassificationPlugin", "Select input band set"))
self.band_set_comb_spinBox_8.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band set number</p></body></html>"))
self.label_184.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.class_signature_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.class_signature_Button.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.class_signature_save_siglist_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, save the resulting signatures to Signature list</p></body></html>"))
self.class_signature_save_siglist_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Save resulting signatures to Signature list"))
self.class_signature.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.class_signature.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolBox_class_signature.setItemText(self.toolBox_class_signature.indexOf(self.page_20), _translate("SemiAutomaticClassificationPlugin", "Input"))
self.toolBox_class_signature.setItemText(self.toolBox_class_signature.indexOf(self.page_24), _translate("SemiAutomaticClassificationPlugin", "Output"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_class_signature), _translate("SemiAutomaticClassificationPlugin", "Class signature"))
self.label_189.setText(_translate("SemiAutomaticClassificationPlugin", " Classification to vector"))
self.label_63.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.classification_vector_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification raster</p></body></html>"))
self.toolButton_reload_11.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_11.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.class_macroclass_comboBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the code field</p></body></html>"))
self.class_macroclass_comboBox.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "C_ID"))
self.class_macroclass_comboBox.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "MC_ID"))
self.use_class_code_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the codes from Signature list table for vector symbology</p></body></html>"))
self.use_class_code_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use code from Signature list"))
self.label_49.setText(_translate("SemiAutomaticClassificationPlugin", " Symbology"))
self.dissolve_output_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, the polygons are dissolved to avoid discontinuity between processed blocks (slower)</p></body></html>"))
self.dissolve_output_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Dissolve output"))
self.convert_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.convert_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.label_171.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.classification_to_vector.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.classification_to_vector.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_class_to_vector), _translate("SemiAutomaticClassificationPlugin", "Classification to vector"))
self.label_65.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.reclassification_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification raster</p></body></html>"))
self.toolButton_reload_12.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_12.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_190.setText(_translate("SemiAutomaticClassificationPlugin", " Reclassification"))
self.calculate_unique_values_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Calculate unique values</p></body></html>"))
self.CID_MCID_code_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable this for reclassification from C ID to MC ID; if checked, unique values are calculated from the Signature list, setting old value C ID and new value MC ID</p></body></html>"))
self.CID_MCID_code_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "calculate C ID to MC ID values"))
self.label_98.setText(_translate("SemiAutomaticClassificationPlugin", "Calculate unique values"))
self.label_54.setText(_translate("SemiAutomaticClassificationPlugin", " Values"))
self.incremental_new_values_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set incremental new values</p></body></html>"))
self.label_271.setText(_translate("SemiAutomaticClassificationPlugin", "Incremental new values"))
self.reclass_values_tableWidget.setSortingEnabled(False)
item = self.reclass_values_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Old value"))
item = self.reclass_values_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "New value"))
self.add_value_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Add row</p></body></html>"))
self.add_value_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.remove_row_pushButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.remove_row_pushButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.import_reclass_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import reclassification table from text file</p></body></html>"))
self.import_reclass_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.export_reclass_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export reclassification table to text file</p></body></html>"))
self.export_reclass_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.apply_symbology_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the codes from Signature list table for vector symbology</p></body></html>"))
self.apply_symbology_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use code from Signature list"))
self.class_macroclass_comboBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the code field</p></body></html>"))
self.class_macroclass_comboBox_2.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "MC_ID"))
self.class_macroclass_comboBox_2.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "C_ID"))
self.label_51.setText(_translate("SemiAutomaticClassificationPlugin", " Symbology"))
self.label_172.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.reclassify_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.reclassify_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.reclassification.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.reclassification.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_reclassification), _translate("SemiAutomaticClassificationPlugin", "Reclassification"))
self.label_193.setText(_translate("SemiAutomaticClassificationPlugin", " Edit raster"))
self.undo_edit_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Undo edit (only for ROI polygons)</p></body></html>"))
self.undo_edit_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_173.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.label_66.setText(_translate("SemiAutomaticClassificationPlugin", "Select the input raster"))
self.edit_raster_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the raster to edit</p></body></html>"))
self.toolButton_reload_14.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_14.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.use_constant_val_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use constant value</p></body></html>"))
self.use_constant_val_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use constant value"))
self.value_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Value</p></body></html>"))
self.use_expression_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use expression</p></body></html>"))
self.use_expression_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use expression"))
self.expression_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter expression</p></body></html>"))
self.expression_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "where(raster == 1, 2, raster)"))
self.label_81.setText(_translate("SemiAutomaticClassificationPlugin", " Edit raster values"))
self.use_field_vector_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Use the value field of the vector</p></body></html>"))
self.use_field_vector_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use the value field of the vector"))
self.field_comboBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the value field</p></body></html>"))
self.edit_val_use_vector_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit values using a vector</p></body></html>"))
self.edit_val_use_vector_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", " Edit values using a vector"))
self.vector_name_combo_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the vector</p></body></html>"))
self.toolButton_reload_20.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_20.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.edit_val_use_ROI_radioButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Edit values using temporary ROIs</p></body></html>"))
self.edit_val_use_ROI_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", " Edit values using ROI polygons"))
self.label_158.setText(_translate("SemiAutomaticClassificationPlugin", " Edit options"))
self.raster_set_value_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.raster_set_value_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.edit_raster_using_vector.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.edit_raster_using_vector.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab), _translate("SemiAutomaticClassificationPlugin", "Edit raster"))
self.label_70.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.sieve_raster_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification</p></body></html>"))
self.toolButton_reload_15.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_15.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_133.setText(_translate("SemiAutomaticClassificationPlugin", "Size threshold"))
self.sieve_threshold_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Size threshold in pixels</p></body></html>"))
self.label_136.setText(_translate("SemiAutomaticClassificationPlugin", "Pixel connection"))
self.sieve_connection_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Pixel connection</p></body></html>"))
self.sieve_connection_combo.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "4"))
self.sieve_connection_combo.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "8"))
self.label_174.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.sieve_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.sieve_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.classification_sieve.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.classification_sieve.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.label_195.setText(_translate("SemiAutomaticClassificationPlugin", " Classification sieve"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_sieve), _translate("SemiAutomaticClassificationPlugin", "Classification sieve"))
self.label_202.setText(_translate("SemiAutomaticClassificationPlugin", " Classification erosion"))
self.label_146.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.erosion_raster_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification</p></body></html>"))
self.toolButton_reload_18.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_18.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_149.setText(_translate("SemiAutomaticClassificationPlugin", "Size in pixels"))
self.erosion_threshold_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Size in pixels</p></body></html>"))
self.circular_structure_checkBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, neighbor pixels are calculated inside a circle of radius equal to the distance in pixels</p></body></html>"))
self.circular_structure_checkBox_3.setText(_translate("SemiAutomaticClassificationPlugin", "Circular"))
self.label_151.setText(_translate("SemiAutomaticClassificationPlugin", "Class values"))
self.erosion_classes_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter class values separated by , or -</p></body></html>"))
self.label_175.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.class_erosion_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.class_erosion_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.classification_erosion.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.classification_erosion.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_erosion), _translate("SemiAutomaticClassificationPlugin", "Classification erosion"))
self.label_204.setText(_translate("SemiAutomaticClassificationPlugin", " Classification dilation"))
self.label_152.setText(_translate("SemiAutomaticClassificationPlugin", "Select the classification"))
self.dilation_raster_name_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the classification</p></body></html>"))
self.toolButton_reload_19.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_19.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_153.setText(_translate("SemiAutomaticClassificationPlugin", "Size in pixels"))
self.dilation_threshold_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Size in pixels</p></body></html>"))
self.circular_structure_checkBox_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, neighbor pixels are calculated inside a circle of radius equal to the distance in pixels</p></body></html>"))
self.circular_structure_checkBox_2.setText(_translate("SemiAutomaticClassificationPlugin", "Circular"))
self.label_155.setText(_translate("SemiAutomaticClassificationPlugin", "Class values"))
self.dilation_classes_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter class values separated by , or -</p></body></html>"))
self.label_176.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.class_dilation_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.class_dilation_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.classification_dilation.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.classification_dilation.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_dilation), _translate("SemiAutomaticClassificationPlugin", "Classification dilation"))
self.label_212.setText(_translate("SemiAutomaticClassificationPlugin", " Zonal stat rasters"))
self.label_77.setText(_translate("SemiAutomaticClassificationPlugin", "Select the input raster"))
self.classification_name_combo_5.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the raster to edit</p></body></html>"))
self.toolButton_reload_24.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_24.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.nodata_checkBox_10.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_10.setText(_translate("SemiAutomaticClassificationPlugin", "Use value as NoData"))
self.nodata_spinBox_12.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.class_field_comboBox_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the vector field</p></body></html>"))
self.label_214.setText(_translate("SemiAutomaticClassificationPlugin", "Select the reference vector or raster"))
self.buttonReload_shape_6.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.buttonReload_shape_6.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.label_213.setText(_translate("SemiAutomaticClassificationPlugin", "Vector field"))
self.reference_name_combo_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select the reference vector or raster</p></body></html>"))
self.statistic_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter a value</p></body></html>"))
self.statistic_name_combobox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a statistic</p></body></html>"))
self.label_232.setText(_translate("SemiAutomaticClassificationPlugin", "Select a statistic"))
self.label_216.setText(_translate("SemiAutomaticClassificationPlugin", " Statistic"))
self.zonal_stat_raster_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.zonal_stat_raster_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.label_215.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.zonal_stat_raster.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.zonal_stat_raster.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_zonal_stats_rasters), _translate("SemiAutomaticClassificationPlugin", " Zonal stat rasters"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_postProcessing), _translate("SemiAutomaticClassificationPlugin", "Postprocessing"))
self.toolButton_reload_13.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Refresh list</p></body></html>"))
self.toolButton_reload_13.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.tableWidget_band_calc.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Band list</p></body></html>"))
item = self.tableWidget_band_calc.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Variable"))
item = self.tableWidget_band_calc.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Band name"))
self.bandcalc_filter_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Filter</p></body></html>"))
self.bandcalc_filter_lineEdit.setPlaceholderText(_translate("SemiAutomaticClassificationPlugin", "Filter"))
self.label_71.setText(_translate("SemiAutomaticClassificationPlugin", " Band list"))
self.plainTextEdit_calc.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter an expression (e.g. "raster1" + "raster2" )</p></body></html>"))
self.toolButton_less.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Less than</p></body></html>"))
self.toolButton_less.setText(_translate("SemiAutomaticClassificationPlugin", "<"))
self.toolButton_greater.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Greater than</p></body></html>"))
self.toolButton_greater.setText(_translate("SemiAutomaticClassificationPlugin", ">"))
self.toolButton_lbracket.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open parenthesis</p></body></html>"))
self.toolButton_lbracket.setText(_translate("SemiAutomaticClassificationPlugin", "("))
self.toolButton_rbracket.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Close parenthesis</p></body></html>"))
self.toolButton_rbracket.setText(_translate("SemiAutomaticClassificationPlugin", ")"))
self.toolButton_power.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Power</p></body></html>"))
self.toolButton_power.setText(_translate("SemiAutomaticClassificationPlugin", "^"))
self.toolButton_sqrt.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Square root</p></body></html>"))
self.toolButton_sqrt.setText(_translate("SemiAutomaticClassificationPlugin", "√"))
self.toolButton_plus.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Plus</p></body></html>"))
self.toolButton_plus.setText(_translate("SemiAutomaticClassificationPlugin", "+"))
self.toolButton_minus.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Minus</p></body></html>"))
self.toolButton_minus.setText(_translate("SemiAutomaticClassificationPlugin", "-"))
self.toolButton_product.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Multiplication</p></body></html>"))
self.toolButton_product.setText(_translate("SemiAutomaticClassificationPlugin", "*"))
self.toolButton_ratio.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Division</p></body></html>"))
self.toolButton_ratio.setText(_translate("SemiAutomaticClassificationPlugin", "/"))
self.toolButton_equal.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Equals</p></body></html>"))
self.toolButton_equal.setText(_translate("SemiAutomaticClassificationPlugin", "=="))
self.toolButton_unequal.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Not equals</p></body></html>"))
self.toolButton_unequal.setText(_translate("SemiAutomaticClassificationPlugin", "!="))
self.toolButton_import_expression.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Open a text file to add custom functions</p></body></html>"))
item = self.band_calc_function_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Functions"))
self.band_calc_tabWidget.setTabText(self.band_calc_tabWidget.indexOf(self.tab_expression), _translate("SemiAutomaticClassificationPlugin", "Expression"))
self.decision_rules_tableWidget.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter one or more rules separated by semicolon (e.g. "raster1" > 0; "raster2" > 0 )</p></body></html>"))
item = self.decision_rules_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Value"))
item = self.decision_rules_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Rule"))
self.remove_rule_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Delete row</p></body></html>"))
self.remove_rule_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.move_up_toolButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted rule up</p></body></html>"))
self.move_up_toolButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.import_rules_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import rules from text file</p></body></html>"))
self.import_rules_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.clear_rules_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.clear_rules_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.add_rule_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Add row</p></body></html>"))
self.add_rule_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.move_down_toolButton_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Move highlighted rule down</p></body></html>"))
self.move_down_toolButton_2.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.export_rules_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export rules to text file</p></body></html>"))
self.export_rules_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.band_calc_tabWidget.setTabText(self.band_calc_tabWidget.indexOf(self.tab_decision_rules), _translate("SemiAutomaticClassificationPlugin", "Decision rules"))
self.nodata_as_value_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, input NoData pixels will be evaluated as regular values</p></body></html>"))
self.nodata_as_value_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Input NoData \n"
" as value"))
self.nodata_checkBox_3.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, pixels equal to NoData value will be excluded from the output raster</p></body></html>"))
self.nodata_checkBox_3.setText(_translate("SemiAutomaticClassificationPlugin", "Use value\n"
"as NoData"))
self.nodata_spinBox_13.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value</p></body></html>"))
self.label_4.setText(_translate("SemiAutomaticClassificationPlugin", "Calculation\n"
"data type"))
self.calc_type_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a type</p></body></html>"))
self.calc_type_combo.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "Float32"))
self.calc_type_combo.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "Int32"))
self.calc_type_combo.setItemText(2, _translate("SemiAutomaticClassificationPlugin", "UInt32"))
self.calc_type_combo.setItemText(3, _translate("SemiAutomaticClassificationPlugin", "Int16"))
self.calc_type_combo.setItemText(4, _translate("SemiAutomaticClassificationPlugin", "UInt16"))
self.calc_type_combo.setItemText(5, _translate("SemiAutomaticClassificationPlugin", "Byte"))
self.label_83.setText(_translate("SemiAutomaticClassificationPlugin", "Extent:"))
self.intersection_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, the extent of raster ouput equals the intersection of input rasters</p></body></html>"))
self.intersection_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Intersection"))
self.extent_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, the extent of raster ouput equals the extent of selected raster</p></body></html>"))
self.extent_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Same as"))
self.raster_extent_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a raster</p></body></html>"))
self.align_radioButton.setText(_translate("SemiAutomaticClassificationPlugin", "Align"))
self.label_84.setText(_translate("SemiAutomaticClassificationPlugin", "Output raster"))
self.raster_type_combo.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select a type</p></body></html>"))
self.raster_type_combo.setItemText(0, _translate("SemiAutomaticClassificationPlugin", "Float32"))
self.raster_type_combo.setItemText(1, _translate("SemiAutomaticClassificationPlugin", "Int32"))
self.raster_type_combo.setItemText(2, _translate("SemiAutomaticClassificationPlugin", "UInt32"))
self.raster_type_combo.setItemText(3, _translate("SemiAutomaticClassificationPlugin", "Int16"))
self.raster_type_combo.setItemText(4, _translate("SemiAutomaticClassificationPlugin", "UInt16"))
self.raster_type_combo.setItemText(5, _translate("SemiAutomaticClassificationPlugin", "Byte"))
self.label_268.setText(_translate("SemiAutomaticClassificationPlugin", "Output \n"
"NoData value"))
self.nodata_spinBox_4.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>NoData value of the output raster</p></body></html>"))
self.nodata_mask_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, every NoData pixel in input will be NoData pixel in output</p></body></html>"))
self.nodata_mask_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "NoData mask"))
self.set_scale_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, set a scale</p></body></html>"))
self.set_scale_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Set \n"
"scale"))
self.scale_doubleSpinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Scale</p></body></html>"))
self.set_offset_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, set an offset</p></body></html>"))
self.set_offset_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Set \n"
"offset"))
self.band_calc.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Function to Batch</p></body></html>"))
self.band_calc.setText(_translate("SemiAutomaticClassificationPlugin", " BATCH"))
self.toolButton_calculate.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.toolButton_calculate.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_band_calc), _translate("SemiAutomaticClassificationPlugin", "Band calc"))
self.label_73.setText(_translate("SemiAutomaticClassificationPlugin", "Batch"))
self.plainTextEdit_batch.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enter a batch function</p></body></html>"))
self.export_batch_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export batch to text file</p></body></html>"))
self.export_batch_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
self.clear_batch_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.clear_batch_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.import_batch_toolButton.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Import batch from text file</p></body></html>"))
self.import_batch_toolButton.setText(_translate("SemiAutomaticClassificationPlugin", "Plot"))
item = self.batch_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Functions"))
self.label_177.setText(_translate("SemiAutomaticClassificationPlugin", " Run"))
self.check_batch.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Check batch function</p></body></html>"))
self.check_batch.setText(_translate("SemiAutomaticClassificationPlugin", " CHECK"))
self.toolButton_run_batch.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Run</span></p></body></html>"))
self.toolButton_run_batch.setText(_translate("SemiAutomaticClassificationPlugin", " RUN"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_batch), _translate("SemiAutomaticClassificationPlugin", "Batch"))
self.label_28.setText(_translate("SemiAutomaticClassificationPlugin", " System"))
self.RAM_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set available RAM for processes</p></body></html>"))
self.label_23.setText(_translate("SemiAutomaticClassificationPlugin", "Available RAM (MB)"))
self.label_56.setText(_translate("SemiAutomaticClassificationPlugin", "CPU threads"))
self.CPU_spinBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the number of CPU threads </p></body></html>"))
self.label_13.setText(_translate("SemiAutomaticClassificationPlugin", "SMTP server"))
self.label_18.setText(_translate("SemiAutomaticClassificationPlugin", "password"))
self.smtp_user_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the Class ID field name</p><p>[max 10 characters]</p></body></html>"))
self.smtp_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable the sending of email of completed process</p></body></html>"))
self.smtp_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Send email of completed process to"))
self.label_117.setText(_translate("SemiAutomaticClassificationPlugin", "SMTP process notification"))
self.smtp_password_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Password</p></body></html>"))
self.smtp_server_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the Class ID field name</p><p>[max 10 characters]</p></body></html>"))
self.label_14.setText(_translate("SemiAutomaticClassificationPlugin", "user"))
self.to_email_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>A list of addresses (separated by comma , ) to send this mail to </p></body></html>"))
self.remeber_settings_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, remember user name and password locally in QGIS</p></body></html>"))
self.remeber_settings_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "remember"))
self.reset_temp_directory_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Reset to default temporary directory</p></body></html>"))
self.reset_temp_directory_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.temp_directory_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p >Select a directory</p></body></html>"))
self.temp_directory_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_87.setText(_translate("SemiAutomaticClassificationPlugin", "Temporary directory"))
self.SNAP_label.setText(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><a href=\"https://step.esa.int/main/download/snap-download/\"><span style=\" text-decoration: underline; color:#0000ff;\">ESA SNAP GPT executable</span></a></p></body></html>"))
self.label_276.setText(_translate("SemiAutomaticClassificationPlugin", "Python executable path"))
self.label_288.setText(_translate("SemiAutomaticClassificationPlugin", "Python modules path"))
self.label_275.setText(_translate("SemiAutomaticClassificationPlugin", "GDAL installation directory"))
self.SNAP_GPT_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Path to the GPT executable (e.g. C:\\Program Files\\snap\\bin\\gpt.exe)</p></body></html>"))
self.python_path_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Path to the Python executable (e.g. /usr/local/bin/python3)</p></body></html>"))
self.python_path_lineEdit_2.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Path to Python modules (e.g. /Applications/QGIS.app/Contents/MacOS/lib/python3.8/site-packages).<br/>Multiple paths can be entered separated by ;</p><p>Restart is required.</p></body></html>"))
self.gdal_path_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Path to the GDAL directory containing tools such as gdal_translate and gdalwarp (e.g. /usr/bin)</p></body></html>"))
self.label_211.setText(_translate("SemiAutomaticClassificationPlugin", "External programs"))
self.sound_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable the sound when the process is finished</p></body></html>"))
self.sound_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Play sound when finished"))
self.virtual_raster_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, create virtual rasters for certain temporary files</p></body></html>"))
self.virtual_raster_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Use virtual raster for temp files"))
self.label_45.setText(_translate("SemiAutomaticClassificationPlugin", "Calculation process"))
self.raster_compression_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, a lossless compression is applied to rasters in order to save disk space</p></body></html>"))
self.raster_compression_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Raster compression"))
self.parallel_writing_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, the output raster is compared to output calculation to avoid writing errors. It could slightly slow the process.</p></body></html>"))
self.parallel_writing_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Enable writing verification"))
self.settings_tabWidget.setTabText(self.settings_tabWidget.indexOf(self.tabWidgetProcessing), _translate("SemiAutomaticClassificationPlugin", "Processing"))
self.label_31.setText(_translate("SemiAutomaticClassificationPlugin", "C Name field"))
self.Info_field_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the Class name field</p><p>[max 10 characters]</p></body></html>"))
self.label_24.setText(_translate("SemiAutomaticClassificationPlugin", " Field names of training input"))
self.ID_field_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the Class ID field name</p><p>[max 10 characters]</p></body></html>"))
self.MID_field_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the Macroclass ID field name</p><p>[max 10 characters]</p></body></html>"))
self.label_10.setText(_translate("SemiAutomaticClassificationPlugin", "C ID field"))
self.MCInfo_field_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Set the Macroclass name field</p><p>[max 10 characters]</p></body></html>"))
self.label_17.setText(_translate("SemiAutomaticClassificationPlugin", "MC ID field"))
self.label_46.setText(_translate("SemiAutomaticClassificationPlugin", "MC Name field"))
self.reset_field_names_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.reset_field_names_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_21.setText(_translate("SemiAutomaticClassificationPlugin", " ROI style"))
self.change_color_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Select temporary ROI color</p></body></html>"))
self.label_22.setText(_translate("SemiAutomaticClassificationPlugin", "ROI color"))
self.transparency_Label.setText(_translate("SemiAutomaticClassificationPlugin", "Transparency"))
self.transparency_Slider.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Change temporary ROI transparency</p></body></html>"))
self.reset_color_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.reset_color_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_68.setText(_translate("SemiAutomaticClassificationPlugin", " Variable name"))
self.variable_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Variable name for expressions</p></body></html>"))
self.variable_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "raster"))
self.label_69.setText(_translate("SemiAutomaticClassificationPlugin", " Variable name for expressions (tab Reclassification and Edit raster)"))
self.reset_variable_name_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.reset_variable_name_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_95.setText(_translate("SemiAutomaticClassificationPlugin", " Dock"))
self.download_news_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, news about the SCP are downloaded on startup and displayed in Dock</p></body></html>"))
self.download_news_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Download news on startup"))
self.label_76.setText(_translate("SemiAutomaticClassificationPlugin", " Project"))
self.reset_group_name_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p><span >Reset</span></p></body></html>"))
self.reset_group_name_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.virtual_raster_load_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>If checked, an RGB composite of the active band set is created when a previous project is loaded</p></body></html>"))
self.virtual_raster_load_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Create RGB composite of band set when a project is loaded"))
self.label_75.setText(_translate("SemiAutomaticClassificationPlugin", "Group name"))
self.group_name_lineEdit.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Group name</p></body></html>"))
self.group_name_lineEdit.setText(_translate("SemiAutomaticClassificationPlugin", "Class_temp_group"))
self.settings_tabWidget.setTabText(self.settings_tabWidget.indexOf(self.tabWidgetInterface), _translate("SemiAutomaticClassificationPlugin", "Interface"))
self.log_checkBox.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Enable/Disable the Log of events</p></body></html>"))
self.log_checkBox.setText(_translate("SemiAutomaticClassificationPlugin", "Record events in a Log file"))
self.exportLog_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Export the Log file</p></body></html>"))
self.exportLog_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.clearLog_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Clear the Log file content</p></body></html>"))
self.clearLog_Button.setText(_translate("SemiAutomaticClassificationPlugin", "Import library"))
self.label_30.setText(_translate("SemiAutomaticClassificationPlugin", " Log file"))
item = self.log_tableWidget.horizontalHeaderItem(0)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Date"))
item = self.log_tableWidget.horizontalHeaderItem(1)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Function"))
item = self.log_tableWidget.horizontalHeaderItem(2)
item.setText(_translate("SemiAutomaticClassificationPlugin", "Message"))
self.test_dependencies_Button.setToolTip(_translate("SemiAutomaticClassificationPlugin", "<html><head/><body><p>Test dependencies</p></body></html>"))
self.label_42.setText(_translate("SemiAutomaticClassificationPlugin", "Test dependencies"))
self.label_43.setText(_translate("SemiAutomaticClassificationPlugin", " Test"))
self.settings_tabWidget.setTabText(self.settings_tabWidget.indexOf(self.tabWidgetDebug), _translate("SemiAutomaticClassificationPlugin", "Debug"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_Settings), _translate("SemiAutomaticClassificationPlugin", "Settings"))
self.plugin_label.setText(_translate("SemiAutomaticClassificationPlugin", "Semi-Automatic Classification Plugin"))
self.textBrowser.setHtml(_translate("SemiAutomaticClassificationPlugin", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\';\">Developed by </span><a href=\"http://www.researchgate.net/profile/Luca_Congedo\"><span style=\" font-family:\'Droid Sans\'; text-decoration: underline; color:#0057ae;\">Luca Congedo</span></a><span style=\" font-family:\'Droid Sans\';\"> (ing.congedoluca@gmail.com), the </span><span style=\" font-family:\'Droid Sans\'; font-weight:600;\">Semi-Automatic Classification Plugin</span><span style=\" font-family:\'Droid Sans\';\"> (SCP) is a free open source plugin for QGIS that allows for the semi-automatic classification (also supervised classification) of remote sensing images.</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\';\">It provides several tools for the download of free images, the preprocessing, the postprocessing, and the raster calculation.</span></p>\n"
"<p align=\"justify\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\';\">For more information and tutorials visit the official site </span><span style=\" font-family:\'Droid Sans\'; font-weight:600;\">From GIS to Remote Sensing.</span></p>\n"
"<p align=\"center\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><img src=\":/plugins/semiautomaticclassificationplugin/icons/fromGIStoRS.png\" /><a href=\"https://fromgistors.blogspot.com/p/semi-automatic-classification-plugin.html?spref=sacp\"><span style=\" font-family:\'Droid Sans\'; font-size:24pt; text-decoration: underline; color:#0000ff;\">From GIS to Remote Sensing</span></a></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-size:9pt;\"><br />Please join the </span><a href=\"https://www.facebook.com/groups/SemiAutomaticClassificationPlugin\"><span style=\" font-family:\'Droid Sans\'; font-size:9pt; text-decoration: underline; color:#0057ae;\">Semi-Automatic Classification Plugin group on Facebook</span></a><span style=\" font-size:9pt;\"> or </span><a href=\"https://github.com/semiautomaticgit/SemiAutomaticClassificationPlugin/discussions\"><span style=\" font-size:9pt; text-decoration: underline; color:#0000ff;\">GitHub discussions</span></a></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:9pt;\"><br /></p>\n"
"<p align=\"justify\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-style:italic;\">This plugin requires the installation of GDAL, OGR, Numpy, SciPy, and Matplotlib (already bundled with QGIS).</span></p>\n"
"<p align=\"justify\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-style:italic;\">Some tools require the additional installation of: ESA SNAP</span></p>\n"
"<hr />\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">How to cite:</span></p>\n"
"<p align=\"justify\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\';\">Congedo, Luca, (2021). Semi-Automatic Classification Plugin: A Python tool for the download and processing of remote sensing images in QGIS. Journal of Open Source Software, 6(64), 3172, </span><a href=\"https://doi.org/10.21105/joss.03172\"><span style=\" text-decoration: underline; color:#0000ff;\">https://doi.org/10.21105/joss.03172</span></a></p>\n"
"<hr />\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-size:8pt;\"><br />The Semi-Automatic Classification Plugin is developed by Luca Congedo.</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-size:8pt;\">Translators:</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-size:8pt;\">Language: Author<br /></span></p>\n"
"<p align=\"justify\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-size:8pt;\">Semi-Automatic Classification Plugin is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 3 of the License.</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-size:8pt;\">Semi-Automatic Classification Plugin is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Droid Sans\'; font-size:8pt;\">See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Semi-Automatic Classification Plugin. If not, see <</span><a href=\"http://www.gnu.org/licenses/\"><span style=\" font-family:\'Droid Sans\'; font-size:8pt; text-decoration: underline; color:#0000ff;\">http://www.gnu.org/licenses/</span></a><span style=\" font-family:\'Droid Sans\'; font-size:8pt;\">>.</span></p></body></html>"))
self.SCP_tabs.setTabText(self.SCP_tabs.indexOf(self.tab_About), _translate("SemiAutomaticClassificationPlugin", "About"))
self.main_tabWidget.setTabText(self.main_tabWidget.indexOf(self.tool_tab), _translate("SemiAutomaticClassificationPlugin", "Tool"))
self.main_tabWidget.setTabText(self.main_tabWidget.indexOf(self.help_tab), _translate("SemiAutomaticClassificationPlugin", "Help"))
from . import resources_rc
|
semiautomaticgit/SemiAutomaticClassificationPlugin
|
ui/ui_semiautomaticclassificationplugin.py
|
Python
|
gpl-3.0
| 841,489
|
[
"VisIt"
] |
67be0e08edb3b7528cc32326b3247b48a02507661e0af18d4a74ce0f927cc464
|
#!/usr/bin/env python
# Author: Andrew Jewett (jewett.aij at g mail)
# http://www.chem.ucsb.edu/~sheagroup
# License: 3-clause BSD License (See LICENSE.TXT)
# Copyright (c) 2011, Regents of the University of California
# All rights reserved.
man_page_text = """
nbody_by_type.py reads a LAMMPS data file (or an excerpt of a LAMMPS)
data file containing bonded many-body interactions by atom type
(and bond type), and generates a list of additional interactions
in LAMMPS format consistent with those type (to the standard out).
Typical Usage:
nbody_by_type.py X < old.data > new.data
--or--
nbody_by_type.py X \\
-atoms atoms.data \\
-bonds bonds.data \\
-nbody X.data \\
-nbodybytype X_by_type.data \\
> new_X.data
In both cases "X" denotes the interaction type, which
is either "Angles", "Dihedrals", or "Impropers".
(Support for other interaction types can be added by the user. See below.)
-------- Example 1 -------
nbody_by_type.py X < old.data > new.data
In this example, nbody_by_type.py reads a LAMMPS data file
"orig.data", and extracts the relevant section ("Angles",
"Dihedrals", or "Impropers"). It also looks a section named "X By Type",
(eg. "Angles By type", "Impropers By type", "Impropers By type")
which contains a list of criteria for automatically defining additional
interactions of that type. For example, this file might contain:
Angle By Type
7 1 2 1 * *
8 2 2 * * *
9 3 4 3 * *
The first column is an interaction type ID.
The next 3 columns are atom type identifiers.
The final 2 columns are bond type identifiers.
The * is a wildcard symbol indicating there is no preference for bond types
in this example. (Optionally, regular expressions can also be used to
define a type match, by enclosing the atom or bond type in / slashes.)
The first line tells us to that there should be a 3-body "Angle"
interaction of type "7" whenever an atom of type 1 is bonded to an atom
of type "2", which is bonded to another atom of type "1" again.
The second line tells us that an angle is defined whenever three atoms
are bonded together and the first two are of type "2".
(Redundant angle interactions are filtered.)
New interactions are created for every group of bonded
atoms which match these criteria if they are bonded together
in the relevant way for that interaction type (as determined by
nbody_X.py), and printed to the standard output. For example,
suppose you are automatically generating 3-body "Angle" interactions using:
nbody_by_type Angles < old.data > new.data
The file "new.data" will be identical to "old.data", however the
"Angles By Type" section will be deleted, and the following lines of
text will be added to the "Angles" section:
394 7 5983 5894 5895
395 7 5984 5895 5896
396 7 5985 5896 5897
: : : : :
847 9 14827 14848 14849
The numbers in the first column are counters which assign a ID to
every interaction of that type, and start where the original "Angles"
data left off (New angle ID numbers do not overlap with old ID numbers).
The text in the second column ("7", "9", ...) matches the text from the
first column of the "Angle By Type" section of the input file.
-------- Example 2 -------
nbody_by_type.py X \\
-atoms atoms.data \\
-bonds bonds.data \\
-nbody X.data \\
-nbodybytype X_by_type.data \\
-prefix "SOMESTRING" -suffix "ANOTHERSTRING" \\
> new_X.data
In particular, for Angle interactions:
nbody_by_type.py Angles \\
-atoms atoms.data \\
-bonds bonds.data \\
-nbody angles.data \\
-nbodybytype angles_by_type.data \\
> new_Angles.data
When run this way, nbody_by_type.py behaves exactly the same way
as in Example 1, however only the lines of text corresponding to
the new generated interactions are printed, (not the entire data file).
Also note, that when run this way, nbody_by_type.py does not read the
LAMMPS data from the standard input. Instead, it reads each section of
the data file from a different file indicated by the arguments following
the "-atoms", "-bonds", "-nbody", and "-nbodybytype" flags.
"Angles" is a 3-body interaction style. So when run this way,
nbody_by_type.py will create a 5 (=3+2) column file (new_Angles.data).
Note: the atom, bond and other IDs/types in need not be integers.
Note: This program must be distributed with several python modules, including:
nbody_Angles.py, nbody_Dihedrals.py, and nbody_Impropers.py. These
contain bond definitions for angular, dihedral, and improper interactions.
(In case any new interaction types are ever added to LAMMPS, with only
a few lines of python it is easy to edit to define new bonded
interaction types by supplying new "nbody_X.py" python module.
Refer to the modules listed above for examples.)
Note: Optional "-prefix" and "-suffix" arguments can be included to decorate
the interaction IDs (first column). For example, -prefix "auto_" and
-suffix "_angle", causes "new_Angles.data" to contain the following text:
auto_394_angle 7 5983 5894 5895
auto_395_angle 7 5984 5895 5896
auto_396_angle 7 5985 5896 5897
: : : : :
auto_847_angle 9 14827 14848 14849
"""
g_program_name = __file__.split('/')[-1] # = 'nbody_by_type.py'
g_date_str = '2017-2-06'
g_version_str = '0.20.0'
bond_pattern_module_name = ""
import os
import sys
sys.path.append(os.getcwd())
import importlib
if sys.version < '2.6':
raise InputError('Error: Using python ' + sys.version + '\n'
' Alas, you must upgrade to a newer version of python (2.6 or later).')
elif sys.version < '2.7':
sys.stderr.write('--------------------------------------------------------\n'
'----------------- WARNING: OLD PYTHON VERSION ----------\n'
' This program is untested on your python version (' +
sys.version + ').\n'
' PLEASE LET ME KNOW IF THIS PROGRAM CRASHES (and upgrade python).\n'
' -Andrew 2013-10-25\n'
'--------------------------------------------------------\n'
'--------------------------------------------------------\n')
from ordereddict import OrderedDict
else:
from collections import OrderedDict
try:
from .extract_lammps_data import *
from .nbody_by_type_lib import GenInteractions_str
from .ttree_lex import *
from .lttree_styles import AtomStyle2ColNames, ColNames2AidAtypeMolid
except (ImportError, SystemError, ValueError):
from extract_lammps_data import *
from nbody_by_type_lib import GenInteractions_str
from ttree_lex import *
from lttree_styles import AtomStyle2ColNames, ColNames2AidAtypeMolid
def GenInteractions_lines(lines_atoms,
lines_bonds,
lines_nbody,
lines_nbodybytype,
atom_style,
g_bond_pattern,
canonical_order, # function to sort atoms and bonds
prefix='',
suffix='',
report_progress=False,
check_undefined=False):
column_names = AtomStyle2ColNames(atom_style)
i_atomid, i_atomtype, i_molid = ColNames2AidAtypeMolid(column_names)
atomids_str = []
atomtypes_str = []
for iv in range(0, len(lines_atoms)):
line = lines_atoms[iv].strip()
if '#' in line:
icomment = line.find('#')
line = (line[:icomment]).strip()
if len(line) > 0:
tokens = SplitQuotedString(line)
if ((len(tokens) <= i_atomid) or (len(tokens) <= i_atomtype)):
raise(InputError('Error not enough columns on line ' +
str(iv + 1) + ' of \"Atoms\" section.'))
tokens = SplitQuotedString(line)
atomids_str.append(EscCharStrToChar(tokens[i_atomid]))
atomtypes_str.append(EscCharStrToChar(tokens[i_atomtype]))
bondids_str = []
bondtypes_str = []
bond_pairs = []
for ie in range(0, len(lines_bonds)):
line = lines_bonds[ie].strip()
if '#' in line:
icomment = line.find('#')
line = (line[:icomment]).strip()
if len(line) > 0:
tokens = SplitQuotedString(line)
if len(tokens) < 4:
raise(InputError('Error not enough columns on line ' +
str(ie + 1) + ' of \"Bonds\" section.'))
bondids_str.append(EscCharStrToChar(tokens[0]))
bondtypes_str.append(EscCharStrToChar(tokens[1]))
bond_pairs.append((EscCharStrToChar(tokens[2]),
EscCharStrToChar(tokens[3])))
typepattern_to_coefftypes = []
for i in range(0, len(lines_nbodybytype)):
line = lines_nbodybytype[i].strip()
if '#' in line:
icomment = line.find('#')
line = (line[:icomment]).strip()
if len(line) > 0:
tokens = SplitQuotedString(line)
if ((len(tokens) != 1 + g_bond_pattern.GetNumVerts()) and
(len(tokens) != 1 + g_bond_pattern.GetNumVerts()
+ g_bond_pattern.GetNumEdges())):
raise(InputError('Error: Wrong number of columns in \"By Type\" section of data file.\n'
'Offending line:\n' +
'\"' + line + '\"\n'
'Expected either ' +
str(1 + g_bond_pattern.GetNumVerts()) + ' or ' +
str(1 + g_bond_pattern.GetNumVerts() +
g_bond_pattern.GetNumEdges())
+ ' colunms.'))
coefftype = EscCharStrToChar(tokens[0])
typepattern = []
for typestr in tokens[1:]:
if ((len(typestr) >= 2) and
(typestr[0] == '/') and (typestr[-1] == '/')):
regex_str = typestr[1:-1]
typepattern.append(re.compile(regex_str))
else:
typepattern.append(EscCharStrToChar(typestr))
# If the user neglected to specify the bond types, assume '*'
if len(tokens) == 1 + g_bond_pattern.GetNumVerts():
typepattern += ['*'] * g_bond_pattern.GetNumEdges()
typepattern_to_coefftypes.append([typepattern, coefftype])
coefftype_to_atomids_str = GenInteractions_str(bond_pairs,
g_bond_pattern,
typepattern_to_coefftypes,
canonical_order,
atomids_str,
atomtypes_str,
bondids_str,
bondtypes_str,
report_progress,
check_undefined)
lines_nbody_new = []
for coefftype, atomids_list in coefftype_to_atomids_str.items():
for atomids_found in atomids_list:
n = len(lines_nbody) + len(lines_nbody_new) + 1
line = prefix + str(n) + suffix + ' ' + \
coefftype + ' ' + (' '.join(atomids_found)) + '\n'
lines_nbody_new.append(line)
return lines_nbody_new
def GenInteractions_files(lines_data,
src_bond_pattern,
fname_atoms,
fname_bonds,
fname_nbody,
fname_nbodybytype,
section_name,
section_name_bytype,
atom_style,
prefix='',
suffix='',
report_progress=False,
check_undefined=False):
if fname_atoms == None:
lines_atoms = [
line for line in ExtractDataSection(lines_data, 'Atoms')]
else:
try:
f = open(fname_atoms, 'r')
except:
sys.stderr.write('Error: Unable to open file \"' +
fname_atoms + '\" for reading.\n')
sys.exit(-1)
lines_atoms = [line for line in f.readlines()
if ((len(line.strip()) > 0) and (line.strip()[0] != '#'))]
f.close()
if fname_bonds == None:
lines_bonds = [
line for line in ExtractDataSection(lines_data, 'Bonds')]
else:
try:
f = open(fname_bonds, 'r')
except IOError:
sys.stderr.write('Error: Unable to open file \"' +
fname_bonds + '\" for reading.\n')
sys.exit(-1)
lines_bonds = [line for line in f.readlines()
if ((len(line.strip()) > 0) and (line.strip()[0] != '#'))]
f.close()
if fname_nbody == None:
lines_nbody = [line for line in ExtractDataSection(
lines_data, section_name)]
else:
try:
f = open(fname_nbody, 'r')
lines_nbody = [line for line in f.readlines()
if ((len(line.strip()) > 0) and (line.strip()[0] != '#'))]
f.close()
except IOError:
#sys.stderr.write(' (omitting optional file \"'+fname_nbody+'\")\n')
lines_nbody = []
if fname_nbodybytype == None:
lines_nbodybytype = [line for
line in ExtractDataSection(lines_data,
section_name_bytype)]
else:
try:
f = open(fname_nbodybytype, 'r')
except:
sys.stderr.write('Error: Unable to open file \"' +
fname_nbodybytype + '\" for reading.\n')
sys.exit(-1)
lines_nbodybytype = [line for line in f.readlines()
if((len(line.strip()) > 0)and(line.strip()[0] != '#'))]
f.close()
# search locations
package_opts = [[src_bond_pattern, __package__],
['nbody_alt_symmetry.'+src_bond_pattern, __package__]]
if __package__:
for i in range(0, len(package_opts)):
package_opts[i][0] = '.' + package_opts[i][0]
package_opts.append(['.'+src_bond_pattern, __package__+'.nbody_alt_symmetry'])
g = None
for name, pkg in package_opts:
try:
g = importlib.import_module(name, pkg)
break
except (ImportError, SystemError, ValueError):
pass
if g is None:
sys.stderr.write('Error: Unable to locate file \"' +
src_bond_pattern + '.py\"\n'
' (Did you mispell the file name?\n'
' Check the \"nbody_alt_symmetry/\" directory.)\n')
sys.exit(-1)
return GenInteractions_lines(lines_atoms,
lines_bonds,
lines_nbody,
lines_nbodybytype,
atom_style,
g.bond_pattern,
g.canonical_order,
prefix,
suffix,
report_progress,
check_undefined)
def main():
sys.stderr.write(g_program_name + ' v' +
g_version_str + ' ' + g_date_str + ' ')
if sys.version < '3':
sys.stderr.write(' (python version < 3)\n')
else:
sys.stderr.write('\n')
try:
fname_atoms = None
fname_bonds = None
fname_nbody = None
fname_nbodybytype = None
atom_style = 'full'
prefix = ''
suffix = ''
check_undefined = False
argv = [arg for arg in sys.argv]
if len(argv) == 1:
raise InputError('Error: Missing argument required.\n'
' The \"' + g_program_name +
'\" program requires an argument containing the\n'
' name of a section from a LAMMPS data file storing bonded interactions.\n'
' (For example: "Angles", "Dihedrals", or "Impropers".)\n'
#' Note: The first letter of each section is usually capitalized.)\n'
'\n'
'--------------- general documentation -------------\n'
'\n' + man_page_text + '\n')
section_name = '' # (This will be replaced later.)
section_name_bytype = '' # (This will be replaced later.)
# Loop over the remaining arguments not processed yet.
# These arguments are specific to the lttree.py program
# and are not understood by ttree.py:
i = 1
while i < len(argv):
#sys.stderr.write('argv['+str(i)+'] = \"'+argv[i]+'\"\n')
if ((argv[i].lower() == '-?') or
(argv[i].lower() == '--?') or
(argv[i].lower() == '-help') or
(argv[i].lower() == '-help')):
if i + 1 >= len(argv):
sys.stdout.write(man_page_text + '\n')
sys.exit(0)
elif argv[i].lower() == '-atoms':
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by a file name containing lines of\n'
' text which might appear in the "Atoms" section of a LAMMPS data file.\n')
fname_atoms = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-bonds':
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by a file name containing lines of\n'
' text which might appear in the "Bonds" section of a LAMMPS data file.\n')
fname_bonds = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-nbody':
if i + 1 >= len(argv):
raise InputError(
'Error: ' + argv[i] + ' flag should be followed by a file name\n')
# raise InputError('Error: '+argv[i]+' flag should be followed by a file name containing lines of\n'
# ' text which might appear in the "'+section_name+' section of a LAMMPS data file.\n')
fname_nbody = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-nbodybytype':
if i + 1 >= len(argv):
raise InputError(
'Error: ' + argv[i] + ' flag should be followed by a file name\n')
# raise InputError('Error: '+argv[i]+' flag should be followed by a file name containing\n'
# ' text which might appear in the "'+section_name+' By Type" section\n'
# ' of a LAMMPS data file.\n')
fname_nbodybytype = argv[i + 1]
del(argv[i:i + 2])
elif ((argv[i].lower() == '-atom-style') or
(argv[i].lower() == '-atom_style')):
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by a an atom_style name.\n'
' (Or single quoted string which includes a space-separated\n'
' list of column names.)\n')
atom_style = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-prefix':
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by a prefix string\n'
' (a string you want to appear to the left of the integer\n'
' which counts the bonded interactions you have generated.)\n')
prefix = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-suffix':
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by a suffix string\n'
' (a string you want to appear to the right of the integer\n'
' which counts the bonded interactions you have generated.)\n')
prefix = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-subgraph':
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by the name of a python file\n'
' containing the definition of the subgraph you are searching for\n'
' and it\'s symmetry properties.\n'
' (See nbody_Dihedrals.py for example.)\n')
bond_pattern_module_name = argv[i + 1]
# If the file name ends in ".py", then strip off this suffix.
# The next line does not work. Too lazy to care why.
# bond_pattern_module_name=bond_pattern_module_name.rstrip('.py')
# Do this instead
pc = bond_pattern_module_name.rfind('.py')
if pc != -1:
bond_pattern_module_name = bond_pattern_module_name[0:pc]
del(argv[i:i + 2])
elif argv[i].lower() == '-section':
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by the name of the LAMMPS\n'
' Data section describing the type of interaction being generated.\n'
' (For example: \"Angles\", \"Dihedrals\", \"Impropers\", etc...)\n')
section_name = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-sectionbytype':
if i + 1 >= len(argv):
raise InputError('Error: ' + argv[i] + ' flag should be followed by the name of the\n'
' write_once(\"???? By Type\") section describing how to create the\n'
' interactions. (For example: \"Angles By Type\", \"Dihedrals By Type\",\n'
' \"Impropers By Type\", etc... Note that this argument\n'
' will contain spaces, so surround it with quotes.)\n')
section_name_bytype = argv[i + 1]
del(argv[i:i + 2])
elif argv[i].lower() == '-checkff':
check_undefined = True
del(argv[i:i + 1])
elif argv[i][0] == '-':
raise InputError('Error(' + g_program_name + '):\n'
'Unrecogized command line argument \"' + argv[i] + '\"\n')
else:
i += 1
# if len(argv) == 1:
# raise InputError('Error: Missing argument required.\n'
# ' The \"'+g_program_name+'\" program requires an argument containing the\n'
# ' name of a section from a LAMMPS data file storing bonded interactions.\n'
# ' (For example: "Angles", "Dihedrals", or "Impropers".)\n')
# #' Note: The first letter of each section is usually capitalized.)\n'
if len(argv) == 1:
pass
elif len(argv) == 2:
section_name = argv[1]
section_name_bytype = section_name + ' By Type'
# default bond_pattern_module name
if bond_pattern_module_name == "": # <--if not set by user
bond_pattern_module_name = 'nbody_' + section_name
del(argv[1:2])
else:
# if there are more than 2 remaining arguments,
problem_args = ['\"' + arg + '\"' for arg in argv[1:]]
raise InputError('Syntax Error(' + g_program_name + '):\n\n'
' Problem with argument list.\n'
' The remaining arguments are:\n\n'
' ' + (' '.join(problem_args)) + '\n\n'
' (The actual problem may be earlier in the argument list.)\n')
if ((section_name == '') or
(section_name_bytype == '') or
(bond_pattern_module_name == '')):
raise InputError('Syntax Error(' + g_program_name + '):\n\n'
' You have not defined the following arguments:\n'
' -section name\n'
' -sectionbytype namebytype\n'
' -subgraph pythonfile.py\n')
# ------------ Done parsing argument list ----------
if (fname_atoms or fname_bonds or fname_nbody or fname_nbodybytype):
output_full_DATA_file = False
lines_data = []
else:
output_full_DATA_file = True
lines_data = sys.stdin.readlines()
# Calculate the interactions and generate a list of lines of text
lines_new_interactions = \
GenInteractions_files(lines_data,
bond_pattern_module_name,
fname_atoms,
fname_bonds,
fname_nbody,
fname_nbodybytype,
section_name,
section_name_bytype,
atom_style,
prefix,
suffix,
True,
check_undefined)
# Print this text to the standard out.
# Question: Do we write out the entire DATA file,
# or just the portion that was generated by this program?
if not output_full_DATA_file:
# ...then only print out the interactions which were generated
# by this program, omitting any lines from the original data file:
# (This is the way I usually run this program.)
for line in lines_new_interactions:
sys.stdout.write(line)
else:
# ...then print out the entire data file, deleting the "By Type"
# section, and adding the generated lines of text to the
# corresponding
# If present, update the interaction counter at the beginning
# of the LAMMPS data file. (For example, if if 100 new "Angles"
# interactions were generated, replace "2 Angles" with "102 Angles")
#
for i in range(0, len(lines_data)):
line = lines_data[i].strip()
tokens = SplitQuotedString(line)
# updating the interaction counter
if ((len(tokens) == 2) and (tokens[1] == (section_name).lower())):
tokens[0] = str(int(tokens[0]) +
len(lines_new_interactions))
lines_data[i] = ' '.join(tokens) + '\n'
# stop when you come to a section header
elif line in lammps_data_sections:
#"lammps_data_sections" is defined in "extract_lammps_data.py"
break
# locate the appropriate section of the data file
# (storing the type of interactions we just created)
i_nbody_a, i_nbody_b = \
FindDataSection(lines_data, section_name)
if i_nbody_a == -1:
if len(lines_new_interactions) > 0:
# If not found, create a new section at the end of the file,
# containing a section name followed by the list of lines
lines_data += ['\n', section_name + '\n', '\n'] + \
lines_new_interactions + ['\n']
else:
# Insert the new lines into the existing section
lines_data[i_nbody_b:i_nbody_b] = lines_new_interactions
# Figure out where the "By Type" section is located
# (so we skip over it)
i_bytype_a, i_bytype_b = \
FindDataSection(lines_data, section_name_bytype)
in_bytype_section = False
for i in range(0, len(lines_data)):
line = lines_data[i].strip()
# Omit all lines of text in the 'By Type' section (including the
# header and commments or blank lines which immediately follow
# it.)
if line == section_name_bytype:
in_bytype_section = True
elif i == i_bytype_b:
in_bytype_section = False
if not in_bytype_section:
sys.stdout.write(lines_data[i])
except (ValueError, InputError) as err:
sys.stderr.write('\n' + str(err) + '\n')
sys.exit(-1)
return
if __name__ == '__main__':
main()
|
smsaladi/moltemplate
|
moltemplate/nbody_by_type.py
|
Python
|
bsd-3-clause
| 30,585
|
[
"LAMMPS"
] |
fd6035b4c399d258841e8f670a900cfc9dfd4c7e1e9510eb84222f416d182e67
|
import os
from gpaw import GPAW, restart
from ase import Atoms
from gpaw.test import equal
from math import sqrt
import numpy as np
modes = ['gpw']
try:
import h5py
modes.append('hdf5')
except ImportError:
pass
d = 3.0
atoms = Atoms('Na3', positions=[( 0, 0, 0),
( 0, 0, d),
( 0, d*sqrt(3./4.), d/2.)],
magmoms=[1.0, 1.0, 1.0],
cell=(3.5, 3.5, 4.+2/3.),
pbc=True)
# Only a short, non-converged calcuation
conv = {'eigenstates': 1e-2, 'energy':2e-1, 'density':1e-1}
calc = GPAW(h=0.30, nbands=3, convergence=conv)
atoms.set_calculator(calc)
e0 = atoms.get_potential_energy()
niter0 = calc.get_number_of_iterations()
f0 = atoms.get_forces()
m0 = atoms.get_magnetic_moments()
eig00 = calc.get_eigenvalues(spin=0)
eig01 = calc.get_eigenvalues(spin=1)
# Write the restart file(s)
for mode in modes:
calc.write('tmp.%s' % mode)
del atoms, calc
# Try restarting from all the files
for mode in modes:
atoms, calc = restart('tmp.%s' % mode)
e1 = atoms.get_potential_energy()
try: # number of iterations needed in restart
niter1 = calc.get_number_of_iterations()
except: pass
f1 = atoms.get_forces()
m1 = atoms.get_magnetic_moments()
eig10 = calc.get_eigenvalues(spin=0)
eig11 = calc.get_eigenvalues(spin=1)
print e0, e1
equal(e0, e1, 1e-10)
print f0, f1
for ff0, ff1 in zip(f0, f1):
err = np.linalg.norm(ff0-ff1)
assert err <= 1e-10
print m0, m1
for mm0, mm1 in zip(m0, m1):
equal(mm0, mm1, 1e-10)
print 'A',eig00, eig10
for eig0, eig1 in zip(eig00, eig10):
equal(eig0, eig1, 1e-10)
print 'B',eig01, eig11
for eig0, eig1 in zip(eig01, eig11):
equal(eig0, eig1, 1e-10)
energy_tolerance = 0.0002
niter_tolerance = 0
equal(e0, -0.52198, energy_tolerance)
equal(niter0, 6, niter_tolerance)
equal(e1, -0.52198, energy_tolerance)
|
qsnake/gpaw
|
gpaw/test/restart.py
|
Python
|
gpl-3.0
| 1,993
|
[
"ASE",
"GPAW"
] |
17ba3efe1c985142c070950707458a585896b4e2b80e1a0c46c6cea70e9d53a7
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2012 Michal Kalewski <mkalewski at cs.put.poznan.pl>
#
# This file is a part of the Simple Network Simulator (sim2net) project.
# USE, MODIFICATION, COPYING AND DISTRIBUTION OF THIS SOFTWARE IS SUBJECT TO
# THE TERMS AND CONDITIONS OF THE MIT LICENSE. YOU SHOULD HAVE RECEIVED A COPY
# OF THE MIT LICENSE ALONG WITH THIS SOFTWARE; IF NOT, YOU CAN DOWNLOAD A COPY
# FROM HTTP://WWW.OPENSOURCE.ORG/.
#
# For bug reports, feature and support requests please visit
# <https://github.com/mkalewski/sim2net/issues>.
"""
This package contains miscellaneous utility modules and classes.
"""
__docformat__ = 'reStructuredText'
__all__ = ['logger', 'randomness', 'verification']
|
mkalewski/sim2net
|
sim2net/utility/__init__.py
|
Python
|
mit
| 728
|
[
"VisIt"
] |
49b99d9de0234e377c759063a39809ee521f369092b1270de7eb4906e7ba0a9b
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# tail - [insert a few words of module description on this line]
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
import cgi
import cgitb
cgitb.enable()
from shared.functionality.tail import main
from shared.cgiscriptstub import run_cgi_script
run_cgi_script(main)
|
heromod/migrid
|
mig/cgi-bin/tail.py
|
Python
|
gpl-2.0
| 1,096
|
[
"Brian"
] |
35c1a80cdf3cdf2f65ed0b700bce4192ff847f506b471b5e68068faf1a3ab094
|
# Python standard modules
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
from mpl_toolkits.mplot3d import Axes3D
import seaborn as sns
import statsmodels.api as sm
import matplotlib.colors as mcolors
import matplotlib as mpl
from matplotlib.pylab import *
from mpl_toolkits.axes_grid1 import host_subplot
resultsdf_plot1 = pd.read_excel('PLR_plots.xlsx', 'plot1')
# Results
WindLevel = resultsdf_plot1['wind_pen'].tolist()
Energycost = resultsdf_plot1['cost_energy'].tolist()
EnergycostWind = resultsdf_plot1['cost_wind'].tolist()
PLRcost = resultsdf_plot1['cost_plr'].tolist()
Fixedcost = resultsdf_plot1['mm_fixedcost'].tolist()
sns.set_color_codes("dark")
sns.set_style('ticks')
fig = plt.figure(1)
width = 0.35
ind = np.arange(len(WindLevel))
p3 = plt.bar(ind, Energycost, width, color=sns.xkcd_rgb["windows blue"], label='$Cost\ of\ Energy$')
p2 = plt.bar(ind, EnergycostWind, width, color=sns.xkcd_rgb["medium green"],bottom=Energycost, label='$Cost\ of\ Wind$')
new = [x + y for x, y in zip(Energycost, EnergycostWind)]
p1 = plt.bar(ind, Fixedcost, width, color=sns.xkcd_rgb["amber"],bottom=new, label='$Variable\ and\ Fixed\ Cost\ Not\ Covered$')
new = [x + y for x, y in zip(new, Fixedcost)]
p4 = plt.bar(ind, PLRcost, width, color=sns.xkcd_rgb["pale red"],bottom=new, label='$Cost\ of\ PLR$')
plt.grid(True, linestyle='-', which='major', color='lightgray', alpha=0.5)
plt.axis([-1, 10, 0, 2.5])
plt.ylabel('$Cost\ (M$€$/day)$')
plt.xlabel('$Average\ Wind\ Penetration\ Level\ ($%$)$')
plt.xticks(ind + width/2., ('0', '9.5', '19', '28', '38', '47', '56', '66','70','77'))
plt.yticks(np.arange(0, 3, 0.5))
plt.legend(loc='upper center', bbox_to_anchor=(.5, 1.15), ncol=2, fancybox=True, shadow=True)
axes = plt.gca()
axes.set_axis_bgcolor('whitesmoke')
# plt.savefig('SEpeakloadreserve.pdf')
plt.show()
resultsdf_plot6 = pd.read_excel('PLR_plots.xlsx', 'plot6')
# Results
WindLevel = resultsdf_plot6['wind_pen'].tolist()
PLR20 = resultsdf_plot6['PLR20'].tolist()
PLR15 = resultsdf_plot6['PLR15'].tolist()
PLR10 = resultsdf_plot6['PLR10'].tolist()
PLR5 = resultsdf_plot6['PLR5'].tolist()
sns.set_color_codes("dark")
sns.set_style('ticks')
fig = plt.figure(1)
plt.plot(WindLevel, PLR5, c=sns.xkcd_rgb["windows blue"],alpha=1, label = r"$5\;$%$\;PLR$")
plt.plot(WindLevel, PLR10, c=sns.xkcd_rgb["medium green"],alpha=1, label = r"$10\;$%$\;PLR$")
plt.plot(WindLevel, PLR15, c=sns.xkcd_rgb["amber"],alpha=1, label = r"$15\;$%$\;PLR$")
plt.plot(WindLevel, PLR20, c=sns.xkcd_rgb["pale red"],alpha=1, label = r"$20\;$%$\;PLR$")
plt.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5)
plt.axis([0.0, 77, 0, 35])
plt.ylabel('$Number\ of\ Activations$')
plt.xlabel('$Average\ Wind\ Penetration\ Level\ ($%$)$')
plt.legend(loc='upper center', bbox_to_anchor=(.5, 1.10), ncol=4, fancybox=True, shadow=True)
axes = plt.gca()
axes.set_axis_bgcolor('whitesmoke')
# plt.savefig('peakloadreserveNOA.pdf')
plt.show()
resultsdf_plot2 = pd.read_excel('PLR_plots.xlsx', 'plot2')
resultsdf_plot3 = pd.read_excel('PLR_plots.xlsx', 'plot3')
WindLevel = resultsdf_plot2['wind_pen'].tolist()
Energycost5 = resultsdf_plot2['CostOfEnergy005'].tolist()
Energycost10 = resultsdf_plot2['CostOfEnergy010'].tolist()
Energycost15 = resultsdf_plot2['CostOfEnergy015'].tolist()
Energycost20 = resultsdf_plot2['CostOfEnergy020'].tolist()
MM5 = resultsdf_plot3['mm005'].tolist()
MM10 = resultsdf_plot3['mm010'].tolist()
MM15 = resultsdf_plot3['mm015'].tolist()
MM20 = resultsdf_plot3['mm020'].tolist()
sns.set_color_codes("dark")
sns.set_style('ticks')
f0 = figure(num = 0, figsize = (8, 12))#, dpi = 300)
f0.suptitle("$Peak\ Load\ Reserve\ Demand\ Increase$", fontsize=14)
ax01 = subplot2grid((2, 2), (0, 0))
ax02 = subplot2grid((2, 2), (0, 1))
ax01.set_ylim(0,4)
ax02.set_ylim(0,0.4)
ax01.set_xlim(0,77)
ax02.set_xlim(0,77)
ax01.grid(True)
ax02.grid(True)
ax01.set_xlabel("$Average\ Wind\ Penetration\ Level\ ($%$)$")
ax01.set_ylabel("$Total\ Cost\ (M$€$/day)$")
ax02.set_xlabel("$Average\ Wind\ Penetration\ Level\ ($%$)$")
ax02.set_ylabel("$Missing\ Money\ from\ Var.\ and\ Fixed\ Cost\ (M$€$/day)$")
ax02.yaxis.tick_right()
p011, = ax01.plot(WindLevel, Energycost5, c=sns.xkcd_rgb["windows blue"],alpha=1, label = "$5$%$\ PLR$")
p012, = ax01.plot(WindLevel, Energycost10, c=sns.xkcd_rgb["medium green"],alpha=1, label = "$10$%$\ PLR$")
p013, = ax01.plot(WindLevel, Energycost15, c=sns.xkcd_rgb["amber"],alpha=1, label = "$15$%$\ PLR$")
p014, = ax01.plot(WindLevel, Energycost20, c=sns.xkcd_rgb["pale red"],alpha=1, label = "$20$%$\ PLR$")
p021, = ax02.plot(WindLevel, MM5, c=sns.xkcd_rgb["windows blue"],alpha=1, label = "$5$%$\ PLR$")
p022, = ax02.plot(WindLevel, MM10, c=sns.xkcd_rgb["medium green"],alpha=1, label = "$10$%$\ PLR$")
p023, = ax02.plot(WindLevel, MM15, c=sns.xkcd_rgb["amber"],alpha=1, label = "$15$%$\ PLR$")
p024, = ax02.plot(WindLevel, MM20, c=sns.xkcd_rgb["pale red"],alpha=1, label = "$20$%$\ PLR$")
ax01.legend([p011,p012,p013,p014], [p011.get_label(),p012.get_label(),p013.get_label(),p014.get_label()])
legend = ax01.legend(loc='upper center', bbox_to_anchor=(1, 1.15),
ncol=4, fancybox=True, shadow=True)
ax01.set_axis_bgcolor('whitesmoke')
ax02.set_axis_bgcolor('whitesmoke')
figure(0)
# f0.savefig('PLRdemandincrease.pdf')
show()
resultsdf_plot7 = pd.read_excel('PLR_plots.xlsx', 'plot7')
resultsdf_plot8 = pd.read_excel('PLR_plots.xlsx', 'plot8')
WindLevel = resultsdf_plot7['wind_pen'].tolist()
MMHB = resultsdf_plot7['HighestBid'].tolist()
MM500 = resultsdf_plot7['AP500'].tolist()
MM1000 = resultsdf_plot7['AP1000'].tolist()
MMHB2 = resultsdf_plot8['wind_pen'].tolist()
MM150 = resultsdf_plot8['AP150'].tolist()
MM250 = resultsdf_plot8['AP250'].tolist()
sns.set_color_codes("dark")
sns.set_style('ticks')
f0 = figure(num = 0, figsize = (8, 12))
ax01 = subplot2grid((2, 2), (0, 0))
ax02 = subplot2grid((2, 2), (0, 1))
ax01.set_title(r'$PLR\ demand\ 15\ \%$', y=1.12)
ax02.set_title(r'$PLR\ demand\ 20\ \%$', y=1.12)
ax01.set_ylim(0,0.4)
ax02.set_ylim(0,0.3)
ax01.set_xlim(0,77)
ax02.set_xlim(0,77)
ax01.grid(True)
ax02.grid(True)
ax01.set_xlabel("$Average\ Wind\ Penetration\ Level\ ($%$)$")
ax01.set_ylabel("$Missing\ Money\ from\ Var.\ and\ Fixed\ Cost\ (M$€$/day)$")
ax02.set_xlabel("$Average\ Wind\ Penetration\ Level\ ($%$)$")
ax02.set_ylabel("$Missing\ Money\ from\ Var.\ and\ Fixed\ Cost\ (M$€$/day)$")
p011, = ax01.plot(WindLevel, MMHB, c=sns.xkcd_rgb["windows blue"],alpha=1, label = "$Highest\ Bid$")
p012, = ax01.plot(WindLevel, MM500, c=sns.xkcd_rgb["medium green"],alpha=1, label = "$500\ $€$/MWh$")
p013, = ax01.plot(WindLevel, MM1000, c=sns.xkcd_rgb["pale red"],alpha=1, label = "$1000\ $€$/MWh$")
p021, = ax02.plot(WindLevel, MMHB2, c=sns.xkcd_rgb["windows blue"],alpha=1, label = "$Highest\ Bid$")
p022, = ax02.plot(WindLevel, MM150, c=sns.xkcd_rgb["medium green"],alpha=1, label = "$150\ $€$/MWh$")
p023, = ax02.plot(WindLevel, MM250, c=sns.xkcd_rgb["pale red"],alpha=1, label = "$250\ $€$/MWh$")
ax02.yaxis.tick_right()
ax01.legend([p011,p012,p013], [p011.get_label(),p012.get_label(),p013.get_label()])
ax02.legend([p021,p022,p023], [p021.get_label(),p022.get_label(),p023.get_label()])
legend = ax01.legend(loc='upper center', bbox_to_anchor=(0.5, 1.15),
ncol=2, fancybox=True, shadow=True)
legend = ax02.legend(loc='upper center', bbox_to_anchor=(0.5, 1.15),
ncol=2, fancybox=True, shadow=True)
ax01.set_axis_bgcolor('whitesmoke')
ax02.set_axis_bgcolor('whitesmoke')
figure(0)
# f0.savefig('PLRmissingmoney.pdf')
show()
resultsdf_plot9 = pd.read_excel('PLR_plots.xlsx', 'plot9')
resultsdf_plot10 = pd.read_excel('PLR_plots.xlsx', 'plot10')
WindLevel = resultsdf_plot9['wind_pen'].tolist()
MMHB = resultsdf_plot9['HighestBid'].tolist()
MM500 = resultsdf_plot9['AP500'].tolist()
MM1000 = resultsdf_plot9['AP1000'].tolist()
MMHB2 = resultsdf_plot10['HighestBid'].tolist()
MM150 = resultsdf_plot10['AP150'].tolist()
MM250 = resultsdf_plot10['AP250'].tolist()
sns.set_color_codes("dark")
sns.set_style('ticks')
f0 = figure(num = 0, figsize = (8, 12))
ax01 = subplot2grid((2, 2), (0, 0))
ax02 = subplot2grid((2, 2), (0, 1))
ax01.set_title(r'$PLR\ demand\ 15\ \%$', y=1.12)
ax02.set_title(r'$PLR\ demand\ 20\ \%$', y=1.12)
ax01.set_ylim(0,9)
ax02.set_ylim(0,6.5)
ax01.set_xlim(0,77)
ax02.set_xlim(0,77)
ax01.grid(True)
ax02.grid(True)
ax01.set_xlabel("$Total\ Wind\ Penetration\ Level\ ($%$)$")
ax01.set_ylabel("$Total\ Cost\ (M$€$/day)$")
ax02.set_xlabel("$Average\ Wind\ Penetration\ Level\ ($%$)$")
ax02.set_ylabel("$Total\ Cost\ (M$€$/day)$")
p011, = ax01.plot(WindLevel, MMHB, c=sns.xkcd_rgb["windows blue"],alpha=1, label = "$Highest\ Bid$")
p012, = ax01.plot(WindLevel, MM500, c=sns.xkcd_rgb["medium green"],alpha=1, label = "$500\ $€$/MWh$")
p013, = ax01.plot(WindLevel, MM1000, c=sns.xkcd_rgb["pale red"],alpha=1, label = "$1000\ $€$/MWh$")
p021, = ax02.plot(WindLevel, MMHB2, c=sns.xkcd_rgb["windows blue"],alpha=1, label = "$Highest\ Bid$")
p022, = ax02.plot(WindLevel, MM150, c=sns.xkcd_rgb["medium green"],alpha=1, label = "$150\ $€$/MWh$")
p023, = ax02.plot(WindLevel, MM250, c=sns.xkcd_rgb["pale red"],alpha=1, label = "$250\ $€$/MWh$")
ax02.yaxis.tick_right()
ax01.legend([p011,p012,p013], [p011.get_label(),p012.get_label(),p013.get_label()])
ax02.legend([p021,p022,p023], [p021.get_label(),p022.get_label(),p023.get_label()])
legend = ax01.legend(loc='upper center', bbox_to_anchor=(0.5, 1.15),
ncol=2, fancybox=True, shadow=True)
legend = ax02.legend(loc='upper center', bbox_to_anchor=(0.5, 1.15),
ncol=2, fancybox=True, shadow=True)
ax01.set_axis_bgcolor('whitesmoke')
ax02.set_axis_bgcolor('whitesmoke')
figure(0)
# f0.savefig('PLRenergycost.pdf')
show()
resultsdf_plot1 = pd.read_excel('PLR_plots.xlsx', 'plot11')
WindLevel = resultsdf_plot1['wind_pen'].tolist()
Energycost = resultsdf_plot1['cost_energy'].tolist()
Fixedcost = resultsdf_plot1['relfixedcosts'].tolist()
EnergycostPLR = resultsdf_plot1['cost_energyplr'].tolist()
EnergycostWindPLR = resultsdf_plot1['cost_wind'].tolist()
FixedcostPLR = resultsdf_plot1['relfixedcosts_plr'].tolist()
PLRcost = resultsdf_plot1['cost_plr'].tolist()
sns.set_color_codes("dark")
sns.set_style('ticks')
fig = plt.figure(1)
width = 0.35
ind = np.arange(len(WindLevel))
p3 = plt.bar(ind, EnergycostPLR, width, color=sns.xkcd_rgb["windows blue"], label='$Cost\ of\ Energy$')
p3 = plt.bar(ind, EnergycostWindPLR, width, color=sns.xkcd_rgb["medium green"], bottom=EnergycostPLR,label='$Cost\ of\ Wind$')
new = [x + y for x, y in zip(EnergycostPLR, EnergycostWindPLR)]
p5 = plt.bar(ind, FixedcostPLR, width, color=sns.xkcd_rgb["amber"],bottom=new, label='$Variable\ and\ Fixed\ Cost\ Not\ Covered$')
new = [x + y for x, y in zip(new, FixedcostPLR)]
p4 = plt.bar(ind, PLRcost, width, color=sns.xkcd_rgb["pale red"],bottom=new, label='$Cost\ of\ PLR$')
plt.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5)
plt.axis([-1, 6, 0, 450])
plt.ylabel('$Cost\ (M$€$/day)$')
plt.xlabel('$Average\ Wind\ Penetration\ Level\ ($%$)$')
plt.xticks(ind + width/2., ('0', '19', '33', '43', '50', '55'))
plt.yticks(np.arange(0, 475, 50))
plt.legend(loc='upper center', bbox_to_anchor=(.5, 1.15), ncol=2, fancybox=True, shadow=True)
axes = plt.gca()
axes.set_axis_bgcolor('whitesmoke')
# plt.savefig('EUPLRBARPLOT.pdf')
plt.show()
resultsdf_plot1 = pd.read_excel('PLR_plots.xlsx', 'plot12')
# Results
Iteration = resultsdf_plot1['Iterations'].tolist()
Nuclear = resultsdf_plot1['NuclearProd'].tolist()
Coal = resultsdf_plot1['CoalProd'].tolist()
Gas = resultsdf_plot1['GasProd'].tolist()
Hydro = resultsdf_plot1['HydroProd'].tolist()
Oil = resultsdf_plot1['OilProd'].tolist()
CapReq = resultsdf_plot1['CapRequirement'].tolist()
sns.set_color_codes("dark")
sns.set_style('ticks')
fig, ax1 = plt.subplots()
width = 0.35 # the width of the bars: can also be len(x) sequence
ind = np.arange(len(Iteration))
p3 = ax1.bar(ind, Nuclear, width, color='olive', label='$Nuclear$')
p4 = ax1.bar(ind, Coal, width, color='darkgreen',bottom=Nuclear, label='$Coal$')
new = [x + y for x, y in zip(Nuclear, Coal)]
p5 = ax1.bar(ind, Hydro, width, color='midnightblue',bottom=new, label='$Hydro$')
new = [x + y for x, y in zip(new, Hydro)]
p6 = ax1.bar(ind, Gas, width, color='blue',bottom=new, label='$Gas$')
new = [x + y for x, y in zip(new, Gas)]
p7 = ax1.bar(ind, Oil, width, color='silver',bottom=new, label='$Oil$')
newxlist = (ind + width/2).tolist()
Iteration = newxlist
ax1.plot(Iteration, CapReq, linestyle='--', c='darkred',alpha=1, label = '$Cap.\ Req.$')
ax1.grid(True, linestyle='-', which='major', color='lightgray', alpha=10000)
ax1.set_xlim([-1,5])
ax1.set_ylim([0,4500])
ax1.set_ylabel('$Installed\ Capacity\ (MW)$')
ax1.set_xlabel('$Iteration\ Number$')
plt.xticks(ind + width/2., ('0', '1', '2', '3', '4'))
ax1.legend(loc='upper center', bbox_to_anchor=(.5, 1.1), ncol=6, fancybox=True, shadow=True)
ax1.set_axis_bgcolor('whitesmoke')
# plt.savefig('PLR24busIteration.pdf')
plt.show()
|
Thomsen22/MissingMoney
|
Plots/Peak Load Reserve Plots/plotsPLR.py
|
Python
|
gpl-3.0
| 13,539
|
[
"Amber"
] |
8eeba3750c47fb27ad41bc92813e226e75697c0502a966e8b04a30a40259dc9b
|
import HTSeq
import argparse
import os.path
from CommonFastaFunctions import Create_Blastdb
from CommonFastaFunctions import LoadAlelleFasta
from CommonFastaFunctions import LoadAlellicProfileGeneric
from CommonFastaFunctions import WriteFasta
from CommonFastaFunctions import runBlast
from CommonFastaFunctions import runBlastParser
from Bio.Blast.Applications import NcbiblastnCommandline
def main():
parser = argparse.ArgumentParser(description="Given an ffn file, recovers the genes that are not paralogs and have a size bigger than the g parameter provided")
parser.add_argument('-i', nargs='?', type=str, help='ffn file', required=True)
parser.add_argument('-g', nargs='?', type=int, help='int minimum size', required=True)
args = parser.parse_args()
genes = args.i
sizethresh = args.g
gene_fp = HTSeq.FastaReader(genes)
geneFile = os.path.abspath( genes )
Gene_Blast_DB_name = Create_Blastdb( geneFile, 1 )
geneF = os.path.splitext( geneFile )[0]
blast_out_file = geneF + '.xml'
# list of results - the output of the function
resultsList = []
# ------------------------------ RUNNING BLAST ------------------------------ #
cline = NcbiblastnCommandline(query=geneFile, db=Gene_Blast_DB_name, evalue=0.001, out=blast_out_file, outfmt=5)
blast_records = runBlastParser(cline, blast_out_file, geneFile)
paralogs=[]
for blast_record in blast_records:
try:
alignment=blast_record.alignments[1]
paralogs.append( alignment.hit_def)
except:
continue
pathfiles=os.path.dirname(geneFile)
pathfiles=pathfiles+"/"
print pathfiles
g_fp = HTSeq.FastaReader( genes )
removedparalogs=0
removedsize=0
for contig in g_fp:
name = contig.name+" "+contig.descr
if name not in paralogs:
if int(len(contig.seq))>sizethresh:
namefile=contig.name
namefile=namefile.replace("|","_")
with open(pathfiles+namefile+".fasta", "wb") as f:
f.write(">1\n"+contig.seq+"\n")
else:
removedsize+=1
else:
print name
removedparalogs+=1
print "Removed %s paralog genes" % str(removedparalogs)
print "Removed %s because of size :" % str(removedsize)
if __name__ == "__main__":
main()
|
mickaelsilva/pythonscripts
|
AlleleCalling/ParalogRemove.py
|
Python
|
gpl-2.0
| 2,157
|
[
"BLAST",
"HTSeq"
] |
f83585c846b49b185a113d693e3bb14157435f965a52d0696133988828962194
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Indicator.source'
db.add_column('profiles_indicator', 'source', self.gf('django.db.models.fields.CharField')(default='U.S. Census Bureau', max_length=300, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Indicator.source'
db.delete_column('profiles_indicator', 'source')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'profiles.datadomain': {
'Meta': {'ordering': "['weight']", 'object_name': 'DataDomain'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicators': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.Indicator']", 'through': "orm['profiles.IndicatorDomain']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'subdomain_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subdomains': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.DataDomain']", 'symmetrical': 'False', 'blank': 'True'}),
'weight': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'profiles.datapoint': {
'Meta': {'unique_together': "(('indicator', 'record', 'time'),)", 'object_name': 'DataPoint'},
'change_from_time': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'datapoint_as_change_from'", 'null': 'True', 'to': "orm['profiles.Time']"}),
'change_to_time': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'datapoint_as_change_to'", 'null': 'True', 'to': "orm['profiles.Time']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"}),
'record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoRecord']"}),
'time': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Time']", 'null': 'True'})
},
'profiles.datasource': {
'Meta': {'object_name': 'DataSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'implementation': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'profiles.denominator': {
'Meta': {'object_name': 'Denominator'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'multiplier': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'sort': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'profiles.denominatorpart': {
'Meta': {'object_name': 'DenominatorPart'},
'data': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'data_source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataSource']"}),
'denominator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Denominator']"}),
'formula': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"}),
'part': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.IndicatorPart']"})
},
'profiles.geolevel': {
'Meta': {'object_name': 'GeoLevel'},
'data_sources': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.DataSource']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoLevel']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'})
},
'profiles.georecord': {
'Meta': {'unique_together': "(('slug', 'level'), ('level', 'geo_id', 'custom_name', 'owner'))", 'object_name': 'GeoRecord'},
'components': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'components_rel_+'", 'blank': 'True', 'to': "orm['profiles.GeoRecord']"}),
'custom_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'geo_id': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'geom': ('django.contrib.gis.db.models.fields.GeometryField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoLevel']"}),
'mappings': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'mappings_rel_+'", 'blank': 'True', 'to': "orm['profiles.GeoRecord']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoRecord']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '100', 'blank': 'True'})
},
'profiles.indicator': {
'Meta': {'object_name': 'Indicator'},
'data_domains': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.DataDomain']", 'through': "orm['profiles.IndicatorDomain']", 'symmetrical': 'False'}),
'data_type': ('django.db.models.fields.CharField', [], {'default': "'COUNT'", 'max_length': '30'}),
'display_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'display_distribution': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'display_percent': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'levels': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.GeoLevel']", 'symmetrical': 'False'}),
'limitations': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'long_definition': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'purpose': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'routine_use': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'short_definition': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'default': "'U.S. Census Bureau'", 'max_length': '300', 'blank': 'True'}),
'universe': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'})
},
'profiles.indicatordomain': {
'Meta': {'object_name': 'IndicatorDomain'},
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataDomain']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"})
},
'profiles.indicatorpart': {
'Meta': {'object_name': 'IndicatorPart'},
'data': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'data_source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataSource']"}),
'formula': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"}),
'time': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Time']"})
},
'profiles.precalculatedvalue': {
'Meta': {'object_name': 'PrecalculatedValue'},
'data_source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataSource']"}),
'geo_record': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoRecord']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'table': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'profiles.time': {
'Meta': {'object_name': 'Time'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'sort': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '1'})
},
'profiles.value': {
'Meta': {'object_name': 'Value'},
'datapoint': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataPoint']"}),
'denominator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Denominator']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moe': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'number': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'percent': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'})
}
}
complete_apps = ['profiles']
|
216software/Profiles
|
communityprofiles/profiles/oldmigrations/0045_auto__add_field_indicator_source.py
|
Python
|
mit
| 15,170
|
[
"MOE"
] |
0501be25706d5ba972b8c72750649f4327d8fc1a03b25d4bad9751e6f62d4086
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
LAMDA Query Tool
----------------
:Author: Brian Svoboda (svobodb@email.arizona.edu)
This package is for querying the Leiden Atomic and Molecular Database (LAMDA)
hosted at: http://home.strw.leidenuniv.nl/~moldata/.
Note:
If you use the data files from LAMDA in your research work please refer to
the publication by Schoier, F.L., van der Tak, F.F.S., van Dishoeck E.F.,
Black, J.H. 2005, A&A 432, 369-379. When individual molecules are considered,
references to the original papers providing the spectroscopic and collisional
data are encouraged.
"""
from .core import Lamda, parse_lamda_datafile, write_lamda_datafile
|
ceb8/astroquery
|
astroquery/lamda/__init__.py
|
Python
|
bsd-3-clause
| 701
|
[
"Brian"
] |
3a2623fb883c350d2543abca0bfee4951d9f0ce1a0980c9277e3bce4f3ae77de
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase
from zilencer.models import Deployment
from zerver.views import get_invitee_emails_set, do_change_password
from zerver.models import (
get_realm, get_user_profile_by_email,
PreregistrationUser, Realm, Recipient, ScheduledJob, UserProfile, UserMessage,
)
from zerver.lib.actions import (
create_stream_if_needed,
do_add_subscription,
set_default_streams,
)
from zerver.lib.initial_password import initial_password
from zerver.lib.actions import do_set_realm_default_language
from zerver.lib.digest import send_digest_email
from zerver.lib.notifications import enqueue_welcome_emails, one_click_unsubscribe_link
from zerver.lib.test_helpers import ZulipTestCase, find_key_by_email, queries_captured
from zerver.lib.test_runner import slow
from zerver.lib.session_user import get_session_dict_user
import re
import ujson
from six.moves import urllib
from six.moves import range
import six
from six import text_type
class PublicURLTest(ZulipTestCase):
"""
Account creation URLs are accessible even when not logged in. Authenticated
URLs redirect to a page.
"""
def fetch(self, method, urls, expected_status):
# type: (str, List[str], int) -> None
for url in urls:
response = getattr(self.client, method)(url) # e.g. self.client_post(url) if method is "post"
self.assertEqual(response.status_code, expected_status,
msg="Expected %d, received %d for %s to %s" % (
expected_status, response.status_code, method, url))
def test_public_urls(self):
# type: () -> None
"""
Test which views are accessible when not logged in.
"""
# FIXME: We should also test the Tornado URLs -- this codepath
# can't do so because this Django test mechanism doesn't go
# through Tornado.
get_urls = {200: ["/accounts/home/", "/accounts/login/"],
302: ["/"],
401: ["/api/v1/streams/Denmark/members",
"/api/v1/users/me/subscriptions",
"/api/v1/messages",
"/json/messages",
"/json/streams",
],
}
post_urls = {200: ["/accounts/login/"],
302: ["/accounts/logout/"],
401: ["/json/messages",
"/json/invite_users",
"/json/settings/change",
"/json/subscriptions/remove",
"/json/subscriptions/exists",
"/json/subscriptions/property",
"/json/get_subscribers",
"/json/fetch_api_key",
"/json/users/me/subscriptions",
"/api/v1/users/me/subscriptions",
],
400: ["/api/v1/send_message",
"/api/v1/external/github",
"/api/v1/fetch_api_key",
],
}
put_urls = {401: ["/json/users/me/pointer"],
}
for status_code, url_set in six.iteritems(get_urls):
self.fetch("get", url_set, status_code)
for status_code, url_set in six.iteritems(post_urls):
self.fetch("post", url_set, status_code)
for status_code, url_set in six.iteritems(put_urls):
self.fetch("put", url_set, status_code)
def test_get_gcid_when_not_configured(self):
# type: () -> None
with self.settings(GOOGLE_CLIENT_ID=None):
resp = self.client_get("/api/v1/fetch_google_client_id")
self.assertEquals(400, resp.status_code,
msg="Expected 400, received %d for GET /api/v1/fetch_google_client_id" % resp.status_code,
)
data = ujson.loads(resp.content)
self.assertEqual('error', data['result'])
def test_get_gcid_when_configured(self):
# type: () -> None
with self.settings(GOOGLE_CLIENT_ID="ABCD"):
resp = self.client_get("/api/v1/fetch_google_client_id")
self.assertEquals(200, resp.status_code,
msg="Expected 200, received %d for GET /api/v1/fetch_google_client_id" % resp.status_code,
)
data = ujson.loads(resp.content)
self.assertEqual('success', data['result'])
self.assertEqual('ABCD', data['google_client_id'])
class PasswordResetTest(ZulipTestCase):
"""
Log in, reset password, log out, log in with new password.
"""
def test_password_reset(self):
# type: () -> None
email = 'hamlet@zulip.com'
old_password = initial_password(email)
self.login(email)
# start the password reset process by supplying an email address
result = self.client_post('/accounts/password/reset/', {'email': email})
# check the redirect link telling you to check mail for password reset link
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/password/reset/done/"))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email to finish the process.", result)
# visit password reset link
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
password_reset_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)")
password_reset_url = password_reset_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a password reset email.")
result = self.client_get(password_reset_url)
self.assertEquals(result.status_code, 200)
# Reset your password
result = self.client_post(password_reset_url,
{'new_password1': 'new_password',
'new_password2': 'new_password'})
# password reset succeeded
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/password/done/"))
# log back in with new password
self.login(email, password='new_password')
user_profile = get_user_profile_by_email('hamlet@zulip.com')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
# make sure old password no longer works
self.login(email, password=old_password, fails=True)
class LoginTest(ZulipTestCase):
"""
Logging in, registration, and logging out.
"""
def test_login(self):
# type: () -> None
self.login("hamlet@zulip.com")
user_profile = get_user_profile_by_email('hamlet@zulip.com')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_login_bad_password(self):
# type: () -> None
self.login("hamlet@zulip.com", password="wrongpassword", fails=True)
self.assertIsNone(get_session_dict_user(self.client.session))
def test_login_nonexist_user(self):
# type: () -> None
result = self.login_with_return("xxx@zulip.com", "xxx")
self.assert_in_response("Please enter a correct email and password", result)
def test_register(self):
# type: () -> None
realm = get_realm("zulip.com")
streams = ["stream_%s" % i for i in range(40)]
for stream in streams:
create_stream_if_needed(realm, stream)
set_default_streams(realm, streams)
with queries_captured() as queries:
self.register("test", "test")
# Ensure the number of queries we make is not O(streams)
self.assert_length(queries, 67)
user_profile = get_user_profile_by_email('test@zulip.com')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_register_deactivated(self):
# type: () -> None
"""
If you try to register for a deactivated realm, you get a clear error
page.
"""
realm = get_realm("zulip.com")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.register("test", "test")
self.assert_in_response("has been deactivated", result)
with self.assertRaises(UserProfile.DoesNotExist):
get_user_profile_by_email('test@zulip.com')
def test_login_deactivated(self):
# type: () -> None
"""
If you try to log in to a deactivated realm, you get a clear error page.
"""
realm = get_realm("zulip.com")
realm.deactivated = True
realm.save(update_fields=["deactivated"])
result = self.login_with_return("hamlet@zulip.com")
self.assert_in_response("has been deactivated", result)
def test_logout(self):
# type: () -> None
self.login("hamlet@zulip.com")
self.client_post('/accounts/logout/')
self.assertIsNone(get_session_dict_user(self.client.session))
def test_non_ascii_login(self):
# type: () -> None
"""
You can log in even if your password contain non-ASCII characters.
"""
email = "test@zulip.com"
password = u"hümbüǵ"
# Registering succeeds.
self.register("test", password)
user_profile = get_user_profile_by_email(email)
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
self.client_post('/accounts/logout/')
self.assertIsNone(get_session_dict_user(self.client.session))
# Logging in succeeds.
self.client_post('/accounts/logout/')
self.login(email, password)
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_register_first_user_with_invites(self):
# type: () -> None
"""
The first user in a realm has a special step in their signup workflow
for inviting other users. Do as realistic an end-to-end test as we can
without Tornado running.
"""
username = "user1"
password = "test"
domain = "test.com"
email = "user1@test.com"
# Create a new realm to ensure that we're the first user in it.
Realm.objects.create(domain=domain, name="Test Inc.")
# Start the signup process by supplying an email address.
result = self.client_post('/accounts/home/', {'email': email})
# Check the redirect telling you to check your mail for a confirmation
# link.
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s@%s" % (username, domain)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url)
self.assertEquals(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(username, password, domain)
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/invite/"))
# Invite other users to join you.
result = self.client_get(result["Location"])
self.assert_in_response("You're the first one here!", result)
# Reset the outbox for our invites.
outbox.pop()
invitees = ['alice@' + domain, 'bob@' + domain]
params = {
'invitee_emails': ujson.dumps(invitees)
}
result = self.client_post('/json/bulk_invite_users', params)
self.assert_json_success(result)
# We really did email these users, and they have PreregistrationUser
# objects.
email_recipients = [message.recipients()[0] for message in outbox]
self.assertEqual(len(outbox), len(invitees))
self.assertEqual(sorted(email_recipients), sorted(invitees))
user_profile = get_user_profile_by_email(email)
self.assertEqual(len(invitees), PreregistrationUser.objects.filter(
referred_by=user_profile).count())
# After this we start manipulating browser information, so stop here.
class InviteUserTest(ZulipTestCase):
def invite(self, users, streams):
# type: (str, List[text_type]) -> HttpResponse
"""
Invites the specified users to Zulip with the specified streams.
users should be a string containing the users to invite, comma or
newline separated.
streams should be a list of strings.
"""
return self.client_post("/json/invite_users",
{"invitee_emails": users,
"stream": streams})
def check_sent_emails(self, correct_recipients):
# type: (List[str]) -> None
from django.core.mail import outbox
self.assertEqual(len(outbox), len(correct_recipients))
email_recipients = [email.recipients()[0] for email in outbox]
self.assertEqual(sorted(email_recipients), sorted(correct_recipients))
def test_bulk_invite_users(self):
# type: () -> None
"""The bulk_invite_users code path is for the first user in a realm."""
self.login('hamlet@zulip.com')
invitees = ['alice@zulip.com', 'bob@zulip.com']
params = {
'invitee_emails': ujson.dumps(invitees)
}
result = self.client_post('/json/bulk_invite_users', params)
self.assert_json_success(result)
self.check_sent_emails(invitees)
def test_successful_invite_user(self):
# type: () -> None
"""
A call to /json/invite_users with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet@zulip.com")
invitee = "alice-test@zulip.com"
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.check_sent_emails([invitee])
def test_successful_invite_user_with_name(self):
# type: () -> None
"""
A call to /json/invite_users with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet@zulip.com")
email = "alice-test@zulip.com"
invitee = "Alice Test <{}>".format(email)
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.check_sent_emails([email])
def test_successful_invite_user_with_name_and_normal_one(self):
# type: () -> None
"""
A call to /json/invite_users with valid parameters causes an invitation
email to be sent.
"""
self.login("hamlet@zulip.com")
email = "alice-test@zulip.com"
email2 = "bob-test@zulip.com"
invitee = "Alice Test <{}>, {}".format(email, email2)
self.assert_json_success(self.invite(invitee, ["Denmark"]))
self.assertTrue(find_key_by_email(email))
self.assertTrue(find_key_by_email(email2))
self.check_sent_emails([email, email2])
def test_invite_user_signup_initial_history(self):
# type: () -> None
"""
Test that a new user invited to a stream receives some initial
history but only from public streams.
"""
self.login("hamlet@zulip.com")
user_profile = get_user_profile_by_email("hamlet@zulip.com")
private_stream_name = "Secret"
(stream, _) = create_stream_if_needed(user_profile.realm, private_stream_name, invite_only=True)
do_add_subscription(user_profile, stream)
public_msg_id = self.send_message("hamlet@zulip.com", "Denmark", Recipient.STREAM,
"Public topic", "Public message")
secret_msg_id = self.send_message("hamlet@zulip.com", private_stream_name, Recipient.STREAM,
"Secret topic", "Secret message")
invitee = "alice-test@zulip.com"
self.assert_json_success(self.invite(invitee, [private_stream_name, "Denmark"]))
self.assertTrue(find_key_by_email(invitee))
self.submit_reg_form_for_user("alice-test", "password")
invitee_profile = get_user_profile_by_email(invitee)
invitee_msg_ids = [um.message_id for um in
UserMessage.objects.filter(user_profile=invitee_profile)]
self.assertTrue(public_msg_id in invitee_msg_ids)
self.assertFalse(secret_msg_id in invitee_msg_ids)
def test_multi_user_invite(self):
# type: () -> None
"""
Invites multiple users with a variety of delimiters.
"""
self.login("hamlet@zulip.com")
# Intentionally use a weird string.
self.assert_json_success(self.invite(
"""bob-test@zulip.com, carol-test@zulip.com,
dave-test@zulip.com
earl-test@zulip.com""", ["Denmark"]))
for user in ("bob", "carol", "dave", "earl"):
self.assertTrue(find_key_by_email("%s-test@zulip.com" % (user,)))
self.check_sent_emails(["bob-test@zulip.com", "carol-test@zulip.com",
"dave-test@zulip.com", "earl-test@zulip.com"])
def test_missing_or_invalid_params(self):
# type: () -> None
"""
Tests inviting with various missing or invalid parameters.
"""
self.login("hamlet@zulip.com")
self.assert_json_error(
self.client_post("/json/invite_users", {"invitee_emails": "foo@zulip.com"}),
"You must specify at least one stream for invitees to join.")
for address in ("noatsign.com", "outsideyourdomain@example.net"):
self.assert_json_error(
self.invite(address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.")
self.check_sent_emails([])
def test_invalid_stream(self):
# type: () -> None
"""
Tests inviting to a non-existent stream.
"""
self.login("hamlet@zulip.com")
self.assert_json_error(self.invite("iago-test@zulip.com", ["NotARealStream"]),
"Stream does not exist: NotARealStream. No invites were sent.")
self.check_sent_emails([])
def test_invite_existing_user(self):
# type: () -> None
"""
If you invite an address already using Zulip, no invitation is sent.
"""
self.login("hamlet@zulip.com")
self.assert_json_error(
self.client_post("/json/invite_users",
{"invitee_emails": "hamlet@zulip.com",
"stream": ["Denmark"]}),
"We weren't able to invite anyone.")
self.assertRaises(PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(
email="hamlet@zulip.com"))
self.check_sent_emails([])
def test_invite_some_existing_some_new(self):
# type: () -> None
"""
If you invite a mix of already existing and new users, invitations are
only sent to the new users.
"""
self.login("hamlet@zulip.com")
existing = ["hamlet@zulip.com", "othello@zulip.com"]
new = ["foo-test@zulip.com", "bar-test@zulip.com"]
result = self.client_post("/json/invite_users",
{"invitee_emails": "\n".join(existing + new),
"stream": ["Denmark"]})
self.assert_json_error(result,
"Some of those addresses are already using Zulip, \
so we didn't send them an invitation. We did send invitations to everyone else!")
# We only created accounts for the new users.
for email in existing:
self.assertRaises(PreregistrationUser.DoesNotExist,
lambda: PreregistrationUser.objects.get(
email=email))
for email in new:
self.assertTrue(PreregistrationUser.objects.get(email=email))
# We only sent emails to the new users.
self.check_sent_emails(new)
def test_invite_outside_domain_in_closed_realm(self):
# type: () -> None
"""
In a realm with `restricted_to_domain = True`, you can't invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip.com")
zulip_realm.restricted_to_domain = True
zulip_realm.save()
self.login("hamlet@zulip.com")
external_address = "foo@example.com"
self.assert_json_error(
self.invite(external_address, ["Denmark"]),
"Some emails did not validate, so we didn't send any invitations.")
def test_invite_outside_domain_in_open_realm(self):
# type: () -> None
"""
In a realm with `restricted_to_domain = False`, you can invite people
with a different domain from that of the realm or your e-mail address.
"""
zulip_realm = get_realm("zulip.com")
zulip_realm.restricted_to_domain = False
zulip_realm.save()
self.login("hamlet@zulip.com")
external_address = "foo@example.com"
self.assert_json_success(self.invite(external_address, ["Denmark"]))
self.check_sent_emails([external_address])
def test_invite_with_non_ascii_streams(self):
# type: () -> None
"""
Inviting someone to streams with non-ASCII characters succeeds.
"""
self.login("hamlet@zulip.com")
invitee = "alice-test@zulip.com"
stream_name = u"hümbüǵ"
realm = get_realm("zulip.com")
stream, _ = create_stream_if_needed(realm, stream_name)
# Make sure we're subscribed before inviting someone.
do_add_subscription(
get_user_profile_by_email("hamlet@zulip.com"),
stream, no_log=True)
self.assert_json_success(self.invite(invitee, [stream_name]))
class InviteeEmailsParserTests(TestCase):
def setUp(self):
# type: () -> None
self.email1 = "email1@zulip.com"
self.email2 = "email2@zulip.com"
self.email3 = "email3@zulip.com"
def test_if_emails_separated_by_commas_are_parsed_and_striped_correctly(self):
# type: () -> None
emails_raw = "{} ,{}, {}".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_separated_by_newlines_are_parsed_and_striped_correctly(self):
# type: () -> None
emails_raw = "{}\n {}\n {} ".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_from_email_client_separated_by_newlines_are_parsed_correctly(self):
# type: () -> None
emails_raw = "Email One <{}>\nEmailTwo<{}>\nEmail Three<{}>".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
def test_if_emails_in_mixed_style_are_parsed_correctly(self):
# type: () -> None
emails_raw = "Email One <{}>,EmailTwo<{}>\n{}".format(self.email1, self.email2, self.email3)
expected_set = {self.email1, self.email2, self.email3}
self.assertEqual(get_invitee_emails_set(emails_raw), expected_set)
class EmailUnsubscribeTests(ZulipTestCase):
def test_missedmessage_unsubscribe(self):
# type: () -> None
"""
We provide one-click unsubscribe links in missed message
e-mails that you can click even when logged out to update your
email notification settings.
"""
user_profile = get_user_profile_by_email("hamlet@zulip.com")
user_profile.enable_offline_email_notifications = True
user_profile.save()
unsubscribe_link = one_click_unsubscribe_link(user_profile,
"missed_messages")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile = UserProfile.objects.get(email="hamlet@zulip.com")
self.assertFalse(user_profile.enable_offline_email_notifications)
def test_welcome_unsubscribe(self):
# type: () -> None
"""
We provide one-click unsubscribe links in welcome e-mails that you can
click even when logged out to stop receiving them.
"""
email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email("hamlet@zulip.com")
# Simulate a new user signing up, which enqueues 2 welcome e-mails.
enqueue_welcome_emails(email, "King Hamlet")
self.assertEqual(2, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
# Simulate unsubscribing from the welcome e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The welcome email jobs are no longer scheduled.
self.assertEqual(result.status_code, 200)
self.assertEqual(0, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
def test_digest_unsubscribe(self):
# type: () -> None
"""
We provide one-click unsubscribe links in digest e-mails that you can
click even when logged out to stop receiving them.
Unsubscribing from these emails also dequeues any digest email jobs that
have been queued.
"""
email = "hamlet@zulip.com"
user_profile = get_user_profile_by_email("hamlet@zulip.com")
self.assertTrue(user_profile.enable_digest_emails)
# Enqueue a fake digest email.
send_digest_email(user_profile, "", "")
self.assertEqual(1, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
# Simulate unsubscribing from digest e-mails.
unsubscribe_link = one_click_unsubscribe_link(user_profile, "digest")
result = self.client_get(urllib.parse.urlparse(unsubscribe_link).path)
# The setting is toggled off, and scheduled jobs have been removed.
self.assertEqual(result.status_code, 200)
# Circumvent user_profile caching.
user_profile = UserProfile.objects.get(email="hamlet@zulip.com")
self.assertFalse(user_profile.enable_digest_emails)
self.assertEqual(0, len(ScheduledJob.objects.filter(
type=ScheduledJob.EMAIL, filter_string__iexact=email)))
class RealmCreationTest(ZulipTestCase):
def test_create_realm(self):
# type: () -> None
username = "user1"
password = "test"
domain = "test.com"
email = "user1@test.com"
# Make sure the realm does not exist
self.assertIsNone(get_realm("test.com"))
with self.settings(OPEN_REALM_CREATION=True):
# Create new realm with the email
result = self.client_post('/create_realm/', {'email': email})
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s@%s" % (username, domain)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url)
self.assertEquals(result.status_code, 200)
result = self.submit_reg_form_for_user(username, password, domain)
self.assertEquals(result.status_code, 302)
# Make sure the realm is created
realm = get_realm("test.com")
self.assertIsNotNone(realm)
self.assertEqual(realm.domain, domain)
self.assertEqual(get_user_profile_by_email(email).realm, realm)
self.assertTrue(result["Location"].endswith("/invite/"))
result = self.client_get(result["Location"])
self.assert_in_response("You're the first one here!", result)
class UserSignUpTest(ZulipTestCase):
def test_user_default_language(self):
# type: () -> None
"""
Check if the default language of new user is the default language
of the realm.
"""
username = "newguy"
email = "newguy@zulip.com"
domain = "zulip.com"
password = "newpassword"
realm = get_realm(domain)
do_set_realm_default_language(realm, "de")
result = self.client_post('/accounts/home/', {'email': email})
self.assertEquals(result.status_code, 302)
self.assertTrue(result["Location"].endswith(
"/accounts/send_confirm/%s@%s" % (username, domain)))
result = self.client_get(result["Location"])
self.assert_in_response("Check your email so we can get started.", result)
# Visit the confirmation link.
from django.core.mail import outbox
for message in reversed(outbox):
if email in message.to:
confirmation_link_pattern = re.compile(settings.EXTERNAL_HOST + "(\S+)>")
confirmation_url = confirmation_link_pattern.search(
message.body).groups()[0]
break
else:
raise ValueError("Couldn't find a confirmation email.")
result = self.client_get(confirmation_url)
self.assertEquals(result.status_code, 200)
# Pick a password and agree to the ToS.
result = self.submit_reg_form_for_user(username, password, domain)
self.assertEquals(result.status_code, 302)
user_profile = get_user_profile_by_email(email)
self.assertEqual(user_profile.default_language, realm.default_language)
outbox.pop()
|
ahmadassaf/zulip
|
zerver/tests/test_signup.py
|
Python
|
apache-2.0
| 31,655
|
[
"VisIt"
] |
ea8c3e7a34939c0b000c13755754e3d1b118bf899c57479e78f62df46521e184
|
# -*- coding: utf-8 -*-
## begin license ##
#
# "Meresco Components" are components to build searchengines, repositories
# and archives, based on "Meresco Core".
#
# Copyright (C) 2007-2008 SURF Foundation. http://www.surf.nl
# Copyright (C) 2007-2011 Seek You Too (CQ2) http://www.cq2.nl
# Copyright (C) 2007-2009 Stichting Kennisnet Ict op school. http://www.kennisnetictopschool.nl
# Copyright (C) 2009 Delft University of Technology http://www.tudelft.nl
# Copyright (C) 2009 Tilburg University http://www.uvt.nl
# Copyright (C) 2011, 2015, 2020 Stichting Kennisnet https://www.kennisnet.nl
# Copyright (C) 2012, 2015, 2017-2018, 2020 Seecr (Seek You Too B.V.) https://seecr.nl
# Copyright (C) 2017, 2020 SURF https://www.surf.nl
# Copyright (C) 2020 Data Archiving and Network Services https://dans.knaw.nl
# Copyright (C) 2020 The Netherlands Institute for Sound and Vision https://beeldengeluid.nl
#
# This file is part of "Meresco Components"
#
# "Meresco Components" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Components" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Components"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
import re
UNQUOTED_STRING = r'(?P<unquoted>[\+\-]?[^"\s]+)'
QUOTED_STRING = r'(?P<quotedString>[\+\-]?(?P<quot>\")(?P<quoted>.+?)((?<!\\)(?P=quot)))'
QUOTED_LABEL_STRING = r'(?P<labelString>[\+\-]?(?P<label>[^"\s]+)=(?P<quot1>\")(?P<quoted1>.+?)((?<!\\)(?P=quot1)))'
STRINGS = [QUOTED_LABEL_STRING ,QUOTED_STRING, UNQUOTED_STRING]
SPLITTED_STRINGS = re.compile(r'\s*(%s)' % '|'.join(STRINGS))
from cqlparser import parseString, CQLParseException, cql2string, CqlIdentityVisitor, cqlToExpression, CQLTokenizerException, quotTerm
from cqlparser.cqlparser import CQL_QUERY, SCOPED_CLAUSE, SEARCH_CLAUSE, SEARCH_TERM, TERM, BOOLEAN, INDEX, RELATION, COMPARITOR
DEFAULT_KIND, PLUSMINUS_KIND, BOOLEAN_KIND = list(range(3))
class WebQuery(object):
def __init__(self, aString, antiUnaryClause=""):
self.original = aString
try:
plusminus = _feelsLikePlusMinusQuery(aString)
boolean = _feelsLikeBooleanQuery(aString)
self._needsHelp = boolean and plusminus
if plusminus and not boolean:
self._kind = PLUSMINUS_KIND
self.ast = parseString(_plusminus2Cql(aString, antiUnaryClause))
elif boolean and not plusminus:
try:
self._kind = BOOLEAN_KIND
self.ast = parseString(_boolean2Cql(aString, antiUnaryClause))
except CQLParseException:
self._needsHelp = True
self._kind = DEFAULT_KIND
self.ast = parseString(_default2CqlWithQuotes(aString, antiUnaryClause=antiUnaryClause))
else:
self._kind = DEFAULT_KIND
try:
self.ast = parseString(_default2Cql(aString, antiUnaryClause=antiUnaryClause))
except CQLParseException:
self._needsHelp = True
self.ast = parseString(_default2CqlWithQuotes(aString, antiUnaryClause=antiUnaryClause))
except (CQLParseException, CQLTokenizerException):
self.ast = parseString(quotTerm(self.original))
self.originalAst = self.ast
self._filters = []
def addTermFilter(self, term):
self._addFilter(SEARCH_CLAUSE(
SEARCH_TERM(
TERM(term)
)
))
@property
def query(self):
return cqlToExpression(self.ast)
def addFilter(self, field, term):
self._addFilter(SEARCH_CLAUSE(
INDEX(TERM(field)),
RELATION(COMPARITOR('exact')),
SEARCH_TERM(TERM(term))
))
def _addFilter(self, filterQuery):
self._filters.append(filterQuery)
insertOriginalAst = SCOPED_CLAUSE(SEARCH_CLAUSE(self.ast)) if len(self._filters) == 1 else self.ast.children[0]
self.ast = CQL_QUERY(
SCOPED_CLAUSE(
insertOriginalAst,
BOOLEAN('and'),
filterQuery
)
)
def replaceTerm(self, oldTerm, newTerm):
newAst = CqlReplaceTerm(self.originalAst, oldTerm, newTerm).visit()
result = WebQuery(cql2string(newAst))
for f in self._filters:
result._addFilter(f)
return result
def replaceIndex(self, mapping):
newAst = CqlReplaceIndex(self.originalAst, mapping).visit()
result = WebQuery(cql2string(newAst))
for f in self._filters:
result._addFilter(f)
return result
def asString(self):
return cql2string(self.ast)
def isBooleanQuery(self):
return self._kind == BOOLEAN_KIND
def isPlusMinusQuery(self):
return self._kind == PLUSMINUS_KIND
def isDefaultQuery(self):
return self._kind == DEFAULT_KIND
def needsBooleanHelp(self):
return self._needsHelp
def hasFilters(self):
return len(self._filters) > 0
class CqlReplaceTerm(CqlIdentityVisitor):
def __init__(self, ast, oldTerm, newTerm):
CqlIdentityVisitor.__init__(self, ast)
self._oldTerm = oldTerm
self._newTerm = newTerm
def visitTERM(self, node):
if node.children[0] == self._oldTerm:
return node.__class__(self._newTerm)
return CqlIdentityVisitor.visitTERM(self, node)
class CqlReplaceIndex(CqlIdentityVisitor):
def __init__(self, ast, mapping):
CqlIdentityVisitor.__init__(self, ast)
self._mapping = mapping
def visitINDEX(self, node):
indexTerm = node.children[0].children[0]
if indexTerm in self._mapping:
return INDEX(TERM(self._mapping[indexTerm]))
return CqlIdentityVisitor.visitINDEX(self, node)
def _feelsLikePlusMinusQuery(aString):
for part in (_valueFromGroupdict(m.groupdict()).lower() for m in SPLITTED_STRINGS.finditer(aString)):
if part[0] in ['-', '+'] and len(part) > 1:
return part[1] not in ['-', '+']
return False
def _feelsLikeBooleanQuery(aString):
for part in (_valueFromGroupdict(m.groupdict()).lower() for m in SPLITTED_STRINGS.finditer(aString)):
if part in ['and', 'or', 'not']:
return True
elif part[0] == '(' or part[-1] == ')':
return True
return False
def _joinFieldAndTerm(fieldAndTermList):
results = []
for field, term in (tuple(fieldAndTerm.split(':', 1)) for fieldAndTerm in fieldAndTermList):
if ' ' in term:
term = '"%s"' % term
results.append('%s exact %s' % (field, term))
if len(results) == 1:
return results[0]
return ' AND '.join('(%s)' % result for result in results)
def _plusminus2Cql(aString, antiUnaryClause):
newParts = []
for match in SPLITTED_STRINGS.finditer(aString):
part = _valueFromGroupdict(match.groupdict())
if part[0] == '+':
newParts.append(part[1:])
elif part[0] == '-':
notStatement = 'NOT ' + part[1:]
if len(newParts) == 0:
newParts.append(antiUnaryClause + " " + notStatement)
else:
newParts[-1] = newParts[-1] + ' ' + notStatement
else:
newParts.append(part)
return ' AND '.join(newParts)
def _boolean2Cql(aString, antiUnaryClause):
aString = aString.replace('(', ' ( ').replace(')', ' ) ')
newParts = []
for match in SPLITTED_STRINGS.finditer(aString):
part = _valueFromGroupdict(match.groupdict())
partAsToken = part.lower()
if partAsToken == 'not':
if len(newParts) == 0 or newParts[-1] == '(':
newParts.append(antiUnaryClause)
if partAsToken in ['not', 'and', 'or']:
part = part.upper()
newParts.append(part)
return ' '.join(newParts)
def _default2CqlWithQuotes(aString, antiUnaryClause="ignored"):
if aString.strip() == '':
return antiUnaryClause
return ' AND '.join(quot(_valueFromGroupdict(match.groupdict())) for match in SPLITTED_STRINGS.finditer(aString))
def _default2Cql(aString, antiUnaryClause="ignored"):
if aString.strip() == '':
return antiUnaryClause
try:
cqlToExpression(aString)
return aString
except (CQLParseException, CQLTokenizerException):
return ' AND '.join(_valueFromGroupdict(match.groupdict()) for match in SPLITTED_STRINGS.finditer(aString))
def quot(aString):
if aString[-1] == '"' == aString[0]:
return aString
return '"%s"' % aString
def _valueFromGroupdict(groupdict):
return groupdict['unquoted'] or groupdict['quotedString'] or groupdict['labelString']
|
seecr/meresco-components
|
meresco/components/web/webquery.py
|
Python
|
gpl-2.0
| 9,266
|
[
"VisIt"
] |
1cc83756ba59b8ce612424796ea1a397539c1b273ed29d9c8d5ed3a8605d799d
|
from suggestive.util import retry_function, retry
from suggestive.error import RetryError
import logging
import webbrowser
import pylastfm
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
def get(data, keys, default=None):
if not keys:
return data
if not isinstance(data, dict):
raise TypeError('not a dictionary')
key, rest = keys[0], keys[1:]
if key not in data:
return default
return get(data[key], rest, default=default)
class LastFM(object):
"""
Helper class for communicating with Last.FM servers
"""
def __init__(self, config):
self.config = config
self.client = self._initialize_client()
def _get_user_permission(self, token):
"""Attempt to open up authorization URL in browser. If this fails, simply
display a message in the console asking user to manually open URL"""
url = 'http://www.last.fm/api/auth/?api_key={0}&token={1}'.format(
self.config.lastfm.api_key, token)
try:
webbrowser.open_new_tab(url)
except Exception:
pass
message = """\
No LastFM session found; to authorize suggestive, visit this URL and click
'Yes, allow access', then return to this window:
{url}
Press Enter to continue...""".format(url=url)
input(message)
def _save_session(self, session_key):
"""Save session key (in plaintext) to a file"""
logger.info('Saving session key to file')
with open(self.config.general.session_file, 'w') as handle:
handle.write(session_key)
def _authorize_application(self, client):
"""Go through the LastFM desktop application authorization process, saving
a session key to a file for future use"""
token = retry_function(client.auth.get_token)
self._get_user_permission(token)
try:
session_key = retry_function(client.auth.get_session, token)
except pylastfm.AuthenticationError as exc:
logger.debug('Unable to get authenticated LastFM session',
exc_info=exc)
raise
self._save_session(session_key)
@retry(exceptions=RetryError)
def _initialize_client(self):
config = self.config
client = pylastfm.LastFM(config.lastfm.api_key,
config.lastfm.api_secret,
username=config.lastfm.user,
url=config.lastfm.url,
auth_method='session_key_file',
session_key=config.general.session_file)
try:
client.authenticate()
return client
except pylastfm.FileError:
logger.info('Authenticating suggestive with LastFM')
self._authorize_application(client)
raise RetryError
except pylastfm.AuthenticationError as exc:
logger.debug('Failed to authenticate', exc_info=exc)
raise
except pylastfm.LastfmError as exc:
logger.error(
'Unable to authenticate to LastFM due to unknown error',
exc_info=exc)
raise
def scrobbles(self, user, start=None, end=None):
"""Get user scrobbles in the given date range"""
return self.client.user.get_recent_tracks(user, start=start, end=end)
def loved_tracks(self, user):
"""Get all of the user's loved tracks"""
return self.client.user.get_loved_tracks(user)
def love_track(self, artist, track):
"""Mark the given track loved"""
try:
self.client.track.love(artist, track)
return True
except pylastfm.APIError as exc:
logger.error('Unable to love track', exc_info=exc)
return False
def unlove_track(self, artist, track):
"""Set the track as not loved"""
try:
self.client.track.unlove(artist, track)
return True
except pylastfm.APIError as exc:
logger.error('Unable to unlove track', exc_info=exc)
return False
|
thesquelched/suggestive
|
suggestive/lastfm.py
|
Python
|
bsd-2-clause
| 4,198
|
[
"VisIt"
] |
be5fb648a8b05eaf2c9a8451e1de9c123d77d50f527c1c656624b31cbe60fba2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.